map: rename size to len

pull/5451/head
ka-weihe 2020-06-21 16:51:02 +02:00 committed by GitHub
parent fbe5599526
commit 7f225f2eaa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 116 additions and 116 deletions

View File

@ -55,7 +55,7 @@ fn main() {
app.compare_api(api_base, api_os, mname, base_os, oname)
}
}
howmany := app.api_differences.size
howmany := app.api_differences.len
eprintln('NB: please, do run `git clean -xf` after this tool, or at least `find thirdparty/ |grep .o$|xargs rm`')
eprintln('otherwise, `./v test-fixed` may show false positives, due to .o files compiled with a cross compiler.')
if howmany > 0 {

View File

@ -686,7 +686,7 @@ fn (mut cfg DocConfig) generate_docs_from_file() {
cfg.vprintln('Rendering docs...')
if cfg.output_path.len == 0 {
outputs := cfg.render()
if outputs.size == 0 {
if outputs.len == 0 {
println('No documentation for $dirs')
} else {
first := outputs.keys()[0]

View File

@ -68,7 +68,7 @@ const (
init_log_capicity = 5
// Initial number of buckets in the hashtable
init_capicity = 1 << init_log_capicity
// Maximum load-factor (size / capacity)
// Maximum load-factor (len / capacity)
max_load_factor = 0.8
// Initial highest even index in metas
init_cap = init_capicity - 2
@ -97,7 +97,7 @@ struct DenseArray {
value_bytes int
mut:
cap u32
size u32
len u32
deletes u32
keys &string
values byteptr
@ -109,7 +109,7 @@ fn new_dense_array(value_bytes int) DenseArray {
return DenseArray{
value_bytes: value_bytes
cap: 8
size: 0
len: 0
deletes: 0
keys: &string(malloc(int(8 * sizeof(string))))
values: malloc(8 * value_bytes)
@ -120,22 +120,22 @@ fn new_dense_array(value_bytes int) DenseArray {
// The growth-factor is roughly 1.125 `(x + (x >> 3))`
[inline]
fn (mut d DenseArray) push(key string, value voidptr) u32 {
if d.cap == d.size {
if d.cap == d.len {
d.cap += d.cap >> 3
d.keys = &string(C.realloc(d.keys, sizeof(string) * d.cap))
d.values = C.realloc(d.values, u32(d.value_bytes) * d.cap)
}
push_index := d.size
push_index := d.len
d.keys[push_index] = key
C.memcpy(d.values + push_index * u32(d.value_bytes), value, d.value_bytes)
d.size++
d.len++
return push_index
}
fn (d DenseArray) get(i int) voidptr {
$if !no_bounds_checking? {
if i < 0 || i >= int(d.size) {
panic('DenseArray.get: index out of range (i == $i, d.len == $d.size)')
if i < 0 || i >= int(d.len) {
panic('DenseArray.get: index out of range (i == $i, d.len == $d.len)')
}
}
return byteptr(d.keys) + i * int(sizeof(string))
@ -146,7 +146,7 @@ fn (d DenseArray) get(i int) voidptr {
fn (mut d DenseArray) zeros_to_end() {
mut tmp_value := malloc(d.value_bytes)
mut count := u32(0)
for i in 0 .. d.size {
for i in 0 .. d.len {
if d.keys[i].str != 0 {
// swap keys
tmp_key := d.keys[count]
@ -161,7 +161,7 @@ fn (mut d DenseArray) zeros_to_end() {
}
free(tmp_value)
d.deletes = 0
d.size = count
d.len = count
d.cap = if count < 8 { u32(8) } else { count }
d.keys = &string(C.realloc(d.keys, sizeof(string) * d.cap))
d.values = C.realloc(d.values, u32(d.value_bytes) * d.cap)
@ -188,7 +188,7 @@ mut:
extra_metas u32
pub mut:
// Number of key-values currently in the hashmap
size int
len int
}
fn new_map_1(value_bytes int) map {
@ -200,7 +200,7 @@ fn new_map_1(value_bytes int) map {
key_values: new_dense_array(value_bytes)
metas: &u32(vcalloc(int(sizeof(u32) * (init_capicity + extra_metas_inc))))
extra_metas: extra_metas_inc
size: 0
len: 0
}
}
@ -270,7 +270,7 @@ fn (mut m map) ensure_extra_metas(probe_count u32) {
fn (mut m map) set(k string, value voidptr) {
key := k.clone()
load_factor := f32(m.size << 1) / f32(m.cap)
load_factor := f32(m.len << 1) / f32(m.cap)
if load_factor > max_load_factor {
m.expand()
}
@ -288,7 +288,7 @@ fn (mut m map) set(k string, value voidptr) {
}
kv_index := m.key_values.push(key, value)
m.meta_greater(index, meta, kv_index)
m.size++
m.len++
}
// Doubles the size of the hashmap
@ -311,7 +311,7 @@ fn (mut m map) rehash() {
meta_bytes := sizeof(u32) * (m.cap + 2 + m.extra_metas)
m.metas = &u32(C.realloc(m.metas, meta_bytes))
C.memset(m.metas, 0, meta_bytes)
for i := u32(0); i < m.key_values.size; i++ {
for i := u32(0); i < m.key_values.len; i++ {
if m.key_values.keys[i].str == 0 {
continue
}
@ -387,17 +387,17 @@ pub fn (mut m map) delete(key string) {
m.metas[index + 1] = m.metas[index + 3]
index += 2
}
m.size--
m.len--
m.metas[index] = 0
m.key_values.deletes++
// Mark key as deleted
m.key_values.keys[kv_index].free()
C.memset(&m.key_values.keys[kv_index], 0, sizeof(string))
if m.key_values.size <= 32 {
if m.key_values.len <= 32 {
return
}
// Clean up key_values if too many have been deleted
if m.key_values.deletes >= (m.key_values.size >> 1) {
if m.key_values.deletes >= (m.key_values.len >> 1) {
m.key_values.zeros_to_end()
m.rehash()
m.key_values.deletes = 0
@ -411,9 +411,9 @@ pub fn (mut m map) delete(key string) {
// TODO: add optimization in case of no deletes
pub fn (m &map) keys() []string {
mut keys := [''].repeat(m.size)
mut keys := [''].repeat(m.len)
mut j := 0
for i := u32(0); i < m.key_values.size; i++ {
for i := u32(0); i < m.key_values.len; i++ {
if m.key_values.keys[i].str == 0 {
continue
}
@ -428,7 +428,7 @@ pub fn (d DenseArray) clone() DenseArray {
res := DenseArray {
value_bytes: d.value_bytes
cap: d.cap
size: d.size
len: d.len
deletes: d.deletes
keys: &string(malloc(int(d.cap * sizeof(string))))
values: byteptr(malloc(int(d.cap * u32(d.value_bytes))))
@ -449,7 +449,7 @@ pub fn (m map) clone() map {
key_values: m.key_values.clone()
metas: &u32(malloc(int(metas_size)))
extra_metas: m.extra_metas
size: m.size
len: m.len
}
C.memcpy(res.metas, m.metas, metas_size)
return res
@ -458,7 +458,7 @@ pub fn (m map) clone() map {
[unsafe_fn]
pub fn (m &map) free() {
free(m.metas)
for i := u32(0); i < m.key_values.size; i++ {
for i := u32(0); i < m.key_values.len; i++ {
if m.key_values.keys[i].str == 0 {
continue
}
@ -469,7 +469,7 @@ pub fn (m &map) free() {
}
pub fn (m map_string) str() string {
if m.size == 0 {
if m.len == 0 {
return '{}'
}
mut sb := strings.new_builder(50)

View File

@ -16,12 +16,12 @@ fn (mut a Aaa) set(key string, val int) {
fn test_map() {
mut m := map[string]int
assert m.size == 0
assert m.len == 0
m['hi'] = 80
m['hello'] = 101
assert m['hi'] == 80
assert m['hello'] == 101
assert m.size == 2
assert m.len == 2
assert 'hi' in m
mut sum := 0
// Test `for in`
@ -35,9 +35,9 @@ fn test_map() {
assert 'hi' in keys
assert 'hello' in keys
m.delete('hi')
assert m.size == 1
assert m.len == 1
m.delete('aloha')
assert m.size == 1
assert m.len == 1
assert m['hi'] == 0
assert m.keys().len == 1
@ -200,8 +200,8 @@ fn test_delete_size() {
for i in 0..10 {
m[arr[i]] = i
}
assert(m.size == 10)
println(m.size)
assert(m.len == 10)
println(m.len)
for i in 0..10 {
m.delete(arr[i])
}

View File

@ -13,12 +13,12 @@ module builtin
// The number for `degree` has been picked through vigor-
// ous benchmarking but can be changed to any number > 1.
// `degree` determines the size of each node.
// `degree` determines the maximum length of each node.
const (
degree = 6
mid_index = degree - 1
max_size = 2 * degree - 1
children_bytes = sizeof(voidptr) * (max_size + 1)
max_len = 2 * degree - 1
children_bytes = sizeof(voidptr) * (max_len + 1)
)
pub struct SortedMap {
@ -26,22 +26,22 @@ pub struct SortedMap {
mut:
root &mapnode
pub mut:
size int
len int
}
struct mapnode {
mut:
children &voidptr
size int
keys [11]string // TODO: Should use `max_size`
values [11]voidptr // TODO: Should use `max_size`
len int
keys [11]string // TODO: Should use `max_len`
values [11]voidptr // TODO: Should use `max_len`
}
fn new_sorted_map(n, value_bytes int) SortedMap { // TODO: Remove `n`
return SortedMap {
value_bytes: value_bytes
root: new_node()
size: 0
len: 0
}
}
@ -59,7 +59,7 @@ fn new_sorted_map_init(n, value_bytes int, keys &string, values voidptr) SortedM
fn new_node() &mapnode {
return &mapnode {
children: 0
size: 0
len: 0
}
}
@ -70,7 +70,7 @@ fn (mut m SortedMap) set(key string, value voidptr) {
mut child_index := 0
mut parent := &mapnode(0)
for {
if node.size == max_size {
if node.len == max_len {
if isnil(parent) {
parent = new_node()
m.root = parent
@ -87,13 +87,13 @@ fn (mut m SortedMap) set(key string, value voidptr) {
}
}
mut i := 0
for i < node.size && key > node.keys[i] { i++ }
if i != node.size && key == node.keys[i] {
for i < node.len && key > node.keys[i] { i++ }
if i != node.len && key == node.keys[i] {
C.memcpy(node.values[i], value, m.value_bytes)
return
}
if isnil(node.children) {
mut j := node.size - 1
mut j := node.len - 1
for j >= 0 && key < node.keys[j] {
node.keys[j + 1] = node.keys[j]
node.values[j + 1] = node.values[j]
@ -102,8 +102,8 @@ fn (mut m SortedMap) set(key string, value voidptr) {
node.keys[j + 1] = key
node.values[j + 1] = malloc(m.value_bytes)
C.memcpy(node.values[j + 1], value, m.value_bytes)
node.size++
m.size++
node.len++
m.len++
return
}
parent = node
@ -114,8 +114,8 @@ fn (mut m SortedMap) set(key string, value voidptr) {
fn (mut n mapnode) split_child(child_index int, mut y mapnode) {
mut z := new_node()
z.size = mid_index
y.size = mid_index
z.len = mid_index
y.len = mid_index
for j := mid_index - 1; j >= 0; j-- {
z.keys[j] = y.keys[j + degree]
z.values[j] = y.values[j + degree]
@ -129,8 +129,8 @@ fn (mut n mapnode) split_child(child_index int, mut y mapnode) {
if isnil(n.children) {
n.children = &voidptr(malloc(int(children_bytes)))
}
n.children[n.size + 1] = n.children[n.size]
for j := n.size; j > child_index; j-- {
n.children[n.len + 1] = n.children[n.len]
for j := n.len; j > child_index; j-- {
n.keys[j] = n.keys[j - 1]
n.values[j] = n.values[j - 1]
n.children[j] = n.children[j - 1]
@ -139,13 +139,13 @@ fn (mut n mapnode) split_child(child_index int, mut y mapnode) {
n.values[child_index] = y.values[mid_index]
n.children[child_index] = voidptr(y)
n.children[child_index + 1] = voidptr(z)
n.size++
n.len++
}
fn (m SortedMap) get(key string, out voidptr) bool {
mut node := m.root
for {
mut i := node.size - 1
mut i := node.len - 1
for i >= 0 && key < node.keys[i] { i-- }
if i != -1 && key == node.keys[i] {
C.memcpy(out, node.values[i], m.value_bytes)
@ -165,7 +165,7 @@ fn (m SortedMap) exists(key string) bool {
}
mut node := m.root
for {
mut i := node.size - 1
mut i := node.len - 1
for i >= 0 && key < node.keys[i] { i-- }
if i != -1 && key == node.keys[i] {
return true
@ -180,7 +180,7 @@ fn (m SortedMap) exists(key string) bool {
fn (n &mapnode) find_key(k string) int {
mut idx := 0
for idx < n.size && n.keys[idx] < k {
for idx < n.len && n.keys[idx] < k {
idx++
}
return idx
@ -188,7 +188,7 @@ fn (n &mapnode) find_key(k string) int {
fn (mut n mapnode) remove_key(k string) bool {
idx := n.find_key(k)
if idx < n.size && n.keys[idx] == k {
if idx < n.len && n.keys[idx] == k {
if isnil(n.children) {
n.remove_from_leaf(idx)
} else {
@ -199,12 +199,12 @@ fn (mut n mapnode) remove_key(k string) bool {
if isnil(n.children) {
return false
}
flag := if idx == n.size {true} else {false}
if (&mapnode(n.children[idx])).size < degree {
flag := if idx == n.len {true} else {false}
if (&mapnode(n.children[idx])).len < degree {
n.fill(idx)
}
if flag && idx > n.size {
if flag && idx > n.len {
return (&mapnode(n.children[idx - 1])).remove_key(k)
} else {
return (&mapnode(n.children[idx])).remove_key(k)
@ -213,25 +213,25 @@ fn (mut n mapnode) remove_key(k string) bool {
}
fn (mut n mapnode) remove_from_leaf(idx int) {
for i := idx + 1; i < n.size; i++ {
for i := idx + 1; i < n.len; i++ {
n.keys[i - 1] = n.keys[i]
n.values[i - 1] = n.values[i]
}
n.size--
n.len--
}
fn (mut n mapnode) remove_from_non_leaf(idx int) {
k := n.keys[idx]
if &mapnode(n.children[idx]).size >= degree {
if &mapnode(n.children[idx]).len >= degree {
mut current := &mapnode(n.children[idx])
for !isnil(current.children) {
current = &mapnode(current.children[current.size])
current = &mapnode(current.children[current.len])
}
predecessor := current.keys[current.size - 1]
predecessor := current.keys[current.len - 1]
n.keys[idx] = predecessor
n.values[idx] = current.values[current.size - 1]
n.values[idx] = current.values[current.len - 1]
(&mapnode(n.children[idx])).remove_key(predecessor)
} else if &mapnode(n.children[idx + 1]).size >= degree {
} else if &mapnode(n.children[idx + 1]).len >= degree {
mut current := &mapnode(n.children[idx + 1])
for !isnil(current.children) {
current = &mapnode(current.children[0])
@ -247,11 +247,11 @@ fn (mut n mapnode) remove_from_non_leaf(idx int) {
}
fn (mut n mapnode) fill(idx int) {
if idx != 0 && &mapnode(n.children[idx - 1]).size >= degree {
if idx != 0 && &mapnode(n.children[idx - 1]).len >= degree {
n.borrow_from_prev(idx)
} else if idx != n.size && &mapnode(n.children[idx + 1]).size >= degree {
} else if idx != n.len && &mapnode(n.children[idx + 1]).len >= degree {
n.borrow_from_next(idx)
} else if idx != n.size {
} else if idx != n.len {
n.merge(idx)
} else {
n.merge(idx - 1)
@ -261,47 +261,47 @@ fn (mut n mapnode) fill(idx int) {
fn (mut n mapnode) borrow_from_prev(idx int) {
mut child := &mapnode(n.children[idx])
mut sibling := &mapnode(n.children[idx - 1])
for i := child.size - 1; i >= 0; i-- {
for i := child.len - 1; i >= 0; i-- {
child.keys[i + 1] = child.keys[i]
child.values[i + 1] = child.values[i]
}
if !isnil(child.children) {
for i := child.size; i >= 0; i-- {
for i := child.len; i >= 0; i-- {
child.children[i + 1] = child.children[i]
}
}
child.keys[0] = n.keys[idx - 1]
child.values[0] = n.values[idx - 1]
if !isnil(child.children) {
child.children[0] = sibling.children[sibling.size]
child.children[0] = sibling.children[sibling.len]
}
n.keys[idx - 1] = sibling.keys[sibling.size - 1]
n.values[idx - 1] = sibling.values[sibling.size - 1]
child.size++
sibling.size--
n.keys[idx - 1] = sibling.keys[sibling.len - 1]
n.values[idx - 1] = sibling.values[sibling.len - 1]
child.len++
sibling.len--
}
fn (mut n mapnode) borrow_from_next(idx int) {
mut child := &mapnode(n.children[idx])
mut sibling := &mapnode(n.children[idx + 1])
child.keys[child.size] = n.keys[idx]
child.values[child.size] = n.values[idx]
child.keys[child.len] = n.keys[idx]
child.values[child.len] = n.values[idx]
if !isnil(child.children) {
child.children[child.size + 1] = sibling.children[0]
child.children[child.len + 1] = sibling.children[0]
}
n.keys[idx] = sibling.keys[0]
n.values[idx] = sibling.values[0]
for i := 1; i < sibling.size; i++ {
for i := 1; i < sibling.len; i++ {
sibling.keys[i - 1] = sibling.keys[i]
sibling.values[i - 1] = sibling.values[i]
}
if !isnil(sibling.children) {
for i := 1; i <= sibling.size; i++ {
for i := 1; i <= sibling.len; i++ {
sibling.children[i - 1] = sibling.children[i]
}
}
child.size++
sibling.size--
child.len++
sibling.len--
}
fn (mut n mapnode) merge(idx int) {
@ -309,38 +309,38 @@ fn (mut n mapnode) merge(idx int) {
sibling := &mapnode(n.children[idx + 1])
child.keys[mid_index] = n.keys[idx]
child.values[mid_index] = n.values[idx]
for i in 0..sibling.size {
for i in 0..sibling.len {
child.keys[i + degree] = sibling.keys[i]
child.values[i + degree] = sibling.values[i]
}
if !isnil(child.children) {
for i := 0; i <= sibling.size; i++ {
for i := 0; i <= sibling.len; i++ {
child.children[i + degree] = sibling.children[i]
}
}
for i := idx + 1; i < n.size; i++ {
for i := idx + 1; i < n.len; i++ {
n.keys[i - 1] = n.keys[i]
n.values[i - 1] = n.values[i]
}
for i := idx + 2; i <= n.size; i++ {
for i := idx + 2; i <= n.len; i++ {
n.children[i - 1] = n.children[i]
}
child.size += sibling.size + 1
n.size--
child.len += sibling.len + 1
n.len--
// free(sibling)
}
pub fn (mut m SortedMap) delete(key string) {
if m.root.size == 0 {
if m.root.len == 0 {
return
}
removed := m.root.remove_key(key)
if removed {
m.size--
m.len--
}
if m.root.size == 0 {
if m.root.len == 0 {
// tmp := t.root
if isnil(m.root.children) {
return
@ -358,29 +358,29 @@ fn (n &mapnode) subkeys(mut keys []string, at int) int {
if !isnil(n.children) {
// Traverse children and insert
// keys inbetween children
for i in 0..n.size {
for i in 0..n.len {
child := &mapnode(n.children[i])
position += child.subkeys(mut keys, position)
keys[position] = n.keys[i]
position++
}
// Insert the keys of the last child
child := &mapnode(n.children[n.size])
child := &mapnode(n.children[n.len])
position += child.subkeys(mut keys, position)
} else {
// If leaf, insert keys
for i in 0..n.size {
for i in 0..n.len {
keys[position + i] = n.keys[i]
}
position += n.size
position += n.len
}
// Return # of added keys
return position - at
}
pub fn (m &SortedMap) keys() []string {
mut keys := [''].repeat(m.size)
if isnil(m.root) || m.root.size == 0 {
mut keys := [''].repeat(m.len)
if isnil(m.root) || m.root.len == 0 {
return keys
}
m.root.subkeys(mut keys, 0)
@ -403,7 +403,7 @@ pub fn (m SortedMap) print() {
}
// pub fn (m map_string) str() string {
// if m.size == 0 {
// if m.len == 0 {
// return '{}'
// }
// mut sb := strings.new_builder(50)

View File

@ -249,7 +249,7 @@ fn (req &Request) method_and_url_to_response(method string, url urllib.URL) ?Res
host_name := url.hostname()
scheme := url.scheme
p := url.path.trim_left('/')
path := if url.query().size > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
path := if url.query().len > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
mut nport := url.port().int()
if nport == 0 {
if scheme == 'http' {

View File

@ -879,7 +879,7 @@ fn parse_query_values(mut m Values, query string) ?bool {
// encode encodes the values into ``URL encoded'' form
// ('bar=baz&foo=quux') sorted by key.
pub fn (v Values) encode() string {
if v.size == 0 {
if v.len == 0 {
return ''
}
mut buf := strings.new_builder(200)

View File

@ -11,7 +11,7 @@ pub mut:
struct Values {
pub mut:
data map[string]Value
size int
len int
}
// new_values returns a new Values struct for creating
@ -35,7 +35,7 @@ pub fn (v &Value) all() []string {
// If there are no values associated with the key, get returns
// a empty string.
pub fn (v &Values) get(key string) string {
if v.data.size == 0 {
if v.data.len == 0 {
return ''
}
vs := v.data[key]
@ -49,7 +49,7 @@ pub fn (v &Values) get(key string) string {
// If there are no values associated with the key, get returns
// a empty []string.
pub fn (v &Values) get_all(key string) []string {
if v.data.size == 0 {
if v.data.len == 0 {
return []
}
vs := v.data[key]
@ -65,7 +65,7 @@ pub fn (mut v Values) set(key, value string) {
mut a := v.data[key]
a.data = [value]
v.data[key] = a
v.size = v.data.size
v.len = v.data.len
}
// add adds the value to key. It appends to any existing
@ -77,11 +77,11 @@ pub fn (mut v Values) add(key, value string) {
}
a.data << value
v.data[key] = a
v.size = v.data.size
v.len = v.data.len
}
// del deletes the values associated with key.
pub fn (mut v Values) del(key string) {
v.data.delete(key)
v.size = v.data.size
v.len = v.data.len
}

View File

@ -70,7 +70,7 @@ pub fn (mut o OrderedDepMap) apply_diff(name string, deps []string) {
}
pub fn (o &OrderedDepMap) size() int {
return o.data.size
return o.data.len
}
pub fn new_dep_graph() &DepGraph {

View File

@ -506,7 +506,7 @@ pub fn (mut g Gen) write_multi_return_types() {
}
pub fn (mut g Gen) write_variadic_types() {
if g.variadic_args.size > 0 {
if g.variadic_args.len > 0 {
g.type_definitions.writeln('// variadic structs')
}
for type_str, arg_len in g.variadic_args {
@ -4104,9 +4104,9 @@ fn (mut g Gen) gen_str_for_map(info table.Map, styp, str_fn_name string) {
zero := g.type_default(info.value_type)
g.type_definitions.writeln('string ${str_fn_name}($styp m); // auto')
g.auto_str_funcs.writeln('string ${str_fn_name}($styp m) { /* gen_str_for_map */')
g.auto_str_funcs.writeln('\tstrings__Builder sb = strings__new_builder(m.key_values.size*10);')
g.auto_str_funcs.writeln('\tstrings__Builder sb = strings__new_builder(m.key_values.len*10);')
g.auto_str_funcs.writeln('\tstrings__Builder_write(&sb, tos_lit("{"));')
g.auto_str_funcs.writeln('\tfor (unsigned int i = 0; i < m.key_values.size; i++) {')
g.auto_str_funcs.writeln('\tfor (unsigned int i = 0; i < m.key_values.len; i++) {')
g.auto_str_funcs.writeln('\t\tstring key = (*(string*)DenseArray_get(m.key_values, i));')
g.auto_str_funcs.writeln('\t\tstrings__Builder_write(&sb, _STR("\'%.*s\\000\'", 2, key));')
g.auto_str_funcs.writeln('\t\tstrings__Builder_write(&sb, tos_lit(": "));')
@ -4123,7 +4123,7 @@ fn (mut g Gen) gen_str_for_map(info table.Map, styp, str_fn_name string) {
} else {
g.auto_str_funcs.writeln('\t\tstrings__Builder_write(&sb, ${elem_str_fn_name}(it));')
}
g.auto_str_funcs.writeln('\t\tif (i != m.key_values.size-1) {')
g.auto_str_funcs.writeln('\t\tif (i != m.key_values.len-1) {')
g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write(&sb, tos_lit(", "));')
g.auto_str_funcs.writeln('\t\t}')
g.auto_str_funcs.writeln('\t}')