strings: simplify Builder (#10263)

pull/10285/head^2
Delyan Angelov 2021-05-31 14:21:06 +03:00 committed by GitHub
parent 8990114b4b
commit eac1e25c5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 65 additions and 76 deletions

View File

@ -102,6 +102,7 @@ pub mut:
vexename string // v or v.exe vexename string // v or v.exe
vexepath string // the full absolute path to the prepared v/v.exe vexepath string // the full absolute path to the prepared v/v.exe
vvlocation string // v.v or compiler/ or cmd/v, depending on v version vvlocation string // v.v or compiler/ or cmd/v, depending on v version
make_fresh_tcc bool // whether to do 'make fresh_tcc' before compiling an old V.
} }
pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() { pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
@ -141,6 +142,9 @@ pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
} }
// Recompilation is needed. Just to be sure, clean up everything first. // Recompilation is needed. Just to be sure, clean up everything first.
scripting.run('git clean -xf') scripting.run('git clean -xf')
if vgit_context.make_fresh_tcc {
scripting.run('make fresh_tcc')
}
scripting.run(command_for_building_v_from_c_source) scripting.run(command_for_building_v_from_c_source)
build_cmd := command_for_selfbuilding.replace('{SOURCE}', vgit_context.vvlocation) build_cmd := command_for_selfbuilding.replace('{SOURCE}', vgit_context.vvlocation)
scripting.run(build_cmd) scripting.run(build_cmd)

View File

@ -39,6 +39,7 @@ mut:
cc string = 'cc' // the C compiler to use for bootstrapping. cc string = 'cc' // the C compiler to use for bootstrapping.
cleanup bool // should the tool run a cleanup first cleanup bool // should the tool run a cleanup first
use_cache bool // use local cached copies for --vrepo and --vcrepo in use_cache bool // use local cached copies for --vrepo and --vcrepo in
fresh_tcc bool // do use `make fresh_tcc`
} }
fn (mut c Context) compile_oldv_if_needed() { fn (mut c Context) compile_oldv_if_needed() {
@ -50,6 +51,7 @@ fn (mut c Context) compile_oldv_if_needed() {
commit_v: c.commit_v commit_v: c.commit_v
path_v: c.path_v path_v: c.path_v
path_vc: c.path_vc path_vc: c.path_vc
make_fresh_tcc: c.fresh_tcc
} }
c.vgcontext.compile_oldv_if_needed() c.vgcontext.compile_oldv_if_needed()
c.commit_v_hash = c.vgcontext.commit_v__hash c.commit_v_hash = c.vgcontext.commit_v__hash
@ -125,6 +127,7 @@ fn main() {
} }
//// ////
context.cleanup = fp.bool('clean', 0, false, 'Clean before running (slower).') context.cleanup = fp.bool('clean', 0, false, 'Clean before running (slower).')
context.fresh_tcc = fp.bool('fresh_tcc', 0, true, 'Do `make fresh_tcc` when preparing a V compiler.')
context.cmd_to_run = fp.string('command', `c`, '', 'Command to run in the old V repo.\n') context.cmd_to_run = fp.string('command', `c`, '', 'Command to run in the old V repo.\n')
commits := vgit.add_common_tool_options(mut context.vgo, mut fp) commits := vgit.add_common_tool_options(mut context.vgo, mut fp)
if should_sync { if should_sync {

View File

@ -2,13 +2,6 @@ import net.http
import net.html import net.html
fn main() { fn main() {
/*
user_agent = 'v.http'
resp := http.get('https://tuicool.com') or {
println('failed to fetch data from the server')
return
}
*/
// http.fetch() sends an HTTP request to the URL with the given method and configurations. // http.fetch() sends an HTTP request to the URL with the given method and configurations.
config := http.FetchConfig{ config := http.FetchConfig{
user_agent: 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:88.0) Gecko/20100101 Firefox/88.0' user_agent: 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:88.0) Gecko/20100101 Firefox/88.0'

View File

@ -13,7 +13,7 @@ mut:
is_attribute bool is_attribute bool
opened_code_type string opened_code_type string
line_count int line_count int
lexeme_builder strings.Builder = strings.Builder{} lexeme_builder strings.Builder = strings.new_builder(100)
code_tags map[string]bool = map{ code_tags map[string]bool = map{
'script': true 'script': true
'style': true 'style': true

View File

@ -7,76 +7,59 @@ module strings
// dynamically growing buffer, then use the resulting large string. Using // dynamically growing buffer, then use the resulting large string. Using
// a string builder is much better for performance/memory usage than doing // a string builder is much better for performance/memory usage than doing
// constantly string concatenation. // constantly string concatenation.
pub struct Builder { pub type Builder = []byte
pub mut:
buf []byte
len int
initial_size int = 1
}
// new_builder returns a new string builder, with an initial capacity of `initial_size` // new_builder returns a new string builder, with an initial capacity of `initial_size`
pub fn new_builder(initial_size int) Builder { pub fn new_builder(initial_size int) Builder {
return Builder{ return Builder([]byte{cap: initial_size})
// buf: make(0, initial_size)
buf: []byte{cap: initial_size}
len: 0
initial_size: initial_size
}
}
// write_bytes appends `bytes` to the accumulated buffer
[deprecated: 'use Builder.write_ptr() instead']
[deprecated_after: '2021-04-18']
[unsafe]
pub fn (mut b Builder) write_bytes(bytes &byte, len int) {
unsafe { b.write_ptr(bytes, len) }
} }
// write_ptr writes `len` bytes provided byteptr to the accumulated buffer // write_ptr writes `len` bytes provided byteptr to the accumulated buffer
[unsafe] [unsafe]
pub fn (mut b Builder) write_ptr(ptr &byte, len int) { pub fn (mut b Builder) write_ptr(ptr &byte, len int) {
unsafe { b.buf.push_many(ptr, len) } unsafe { b.push_many(ptr, len) }
b.len += len
} }
// write_b appends a single `data` byte to the accumulated buffer // write_b appends a single `data` byte to the accumulated buffer
pub fn (mut b Builder) write_b(data byte) { pub fn (mut b Builder) write_b(data byte) {
b.buf << data b << data
b.len++
} }
// write implements the Writer interface // write implements the Writer interface
pub fn (mut b Builder) write(data []byte) ?int { pub fn (mut b Builder) write(data []byte) ?int {
b.buf << data b << data
b.len += data.len
return data.len return data.len
} }
[inline]
pub fn (b &Builder) byte_at(n int) byte {
return unsafe { (&[]byte(b))[n] }
}
// write appends the string `s` to the buffer // write appends the string `s` to the buffer
[inline] [inline]
pub fn (mut b Builder) write_string(s string) { pub fn (mut b Builder) write_string(s string) {
if s == '' { if s == '' {
return return
} }
unsafe { b.buf.push_many(s.str, s.len) } unsafe { b.push_many(s.str, s.len) }
// for c in s { // for c in s {
// b.buf << c // b.buf << c
// } // }
// b.buf << []byte(s) // TODO // b.buf << []byte(s) // TODO
b.len += s.len
} }
// go_back discards the last `n` bytes from the buffer // go_back discards the last `n` bytes from the buffer
pub fn (mut b Builder) go_back(n int) { pub fn (mut b Builder) go_back(n int) {
b.buf.trim(b.buf.len - n) b.trim(b.len - n)
b.len -= n
} }
// cut_last cuts the last `n` bytes from the buffer and returns them // cut_last cuts the last `n` bytes from the buffer and returns them
pub fn (mut b Builder) cut_last(n int) string { pub fn (mut b Builder) cut_last(n int) string {
res := b.buf[b.len - n..].bytestr() cut_pos := b.len - n
b.buf.trim(b.buf.len - n) x := unsafe { (&[]byte(b))[cut_pos..] }
b.len -= n res := x.bytestr()
b.trim(cut_pos)
return res return res
} }
@ -87,14 +70,13 @@ pub fn (mut b Builder) cut_to(pos int) string {
if pos > b.len { if pos > b.len {
return '' return ''
} }
return b.cut_last(b.buf.len - pos) return b.cut_last(b.len - pos)
} }
// go_back_to resets the buffer to the given position `pos` // go_back_to resets the buffer to the given position `pos`
// NB: pos should be < than the existing buffer length. // NB: pos should be < than the existing buffer length.
pub fn (mut b Builder) go_back_to(pos int) { pub fn (mut b Builder) go_back_to(pos int) {
b.buf.trim(pos) b.trim(pos)
b.len = pos
} }
// writeln appends the string `s`, and then a newline character. // writeln appends the string `s`, and then a newline character.
@ -103,10 +85,9 @@ pub fn (mut b Builder) writeln(s string) {
// for c in s { // for c in s {
// b.buf << c // b.buf << c
// } // }
unsafe { b.buf.push_many(s.str, s.len) } unsafe { b.push_many(s.str, s.len) }
// b.buf << []byte(s) // TODO // b.buf << []byte(s) // TODO
b.buf << byte(`\n`) b << byte(`\n`)
b.len += s.len + 1
} }
// buf == 'hello world' // buf == 'hello world'
@ -115,7 +96,8 @@ pub fn (b &Builder) last_n(n int) string {
if n > b.len { if n > b.len {
return '' return ''
} }
return b.buf[b.len - n..].bytestr() x := unsafe { (&[]byte(b))[b.len - n..] }
return x.bytestr()
} }
// buf == 'hello world' // buf == 'hello world'
@ -124,7 +106,8 @@ pub fn (b &Builder) after(n int) string {
if n >= b.len { if n >= b.len {
return '' return ''
} }
return b.buf[n..].bytestr() x := unsafe { (&[]byte(b))[n..] }
return x.bytestr()
} }
// str returns a copy of all of the accumulated buffer content. // str returns a copy of all of the accumulated buffer content.
@ -135,17 +118,15 @@ pub fn (b &Builder) after(n int) string {
// accumulated data that was in the string builder, before the // accumulated data that was in the string builder, before the
// .str() call. // .str() call.
pub fn (mut b Builder) str() string { pub fn (mut b Builder) str() string {
b.buf << byte(0) b << byte(0)
bcopy := unsafe { &byte(memdup(b.buf.data, b.buf.len)) } bcopy := unsafe { &byte(memdup(b.data, b.len)) }
s := unsafe { bcopy.vstring_with_len(b.len) } s := unsafe { bcopy.vstring_with_len(b.len - 1) }
b.len = 0 b.trim(0)
b.buf.trim(0)
return s return s
} }
// free - manually free the contents of the buffer // free - manually free the contents of the buffer
[unsafe] [unsafe]
pub fn (mut b Builder) free() { pub fn (mut b Builder) free() {
unsafe { free(b.buf.data) } unsafe { free(b.data) }
b.len = 0
} }

View File

@ -3,7 +3,7 @@ import strings
type MyInt = int type MyInt = int
fn test_sb() { fn test_sb() {
mut sb := strings.Builder{} mut sb := strings.new_builder(100)
sb.write_string('hi') sb.write_string('hi')
sb.write_string('!') sb.write_string('!')
sb.write_string('hello') sb.write_string('hello')

View File

@ -766,10 +766,11 @@ pub fn (mut c Checker) struct_init(mut struct_init ast.StructInit) ast.Type {
c.error('unknown struct: $type_sym.name', struct_init.pos) c.error('unknown struct: $type_sym.name', struct_init.pos)
return ast.void_type return ast.void_type
} }
if sym.kind != .struct_ { if sym.kind == .struct_ {
info = sym.info as ast.Struct
} else {
c.error('alias type name: $sym.name is not struct type', struct_init.pos) c.error('alias type name: $sym.name is not struct type', struct_init.pos)
} }
info = sym.info as ast.Struct
} else { } else {
info = type_sym.info as ast.Struct info = type_sym.info as ast.Struct
} }

View File

@ -126,7 +126,7 @@ pub fn (mut f Fmt) wrap_long_line(penalty_idx int, add_indent bool) bool {
if penalty_idx > 0 && f.line_len <= fmt.max_len[penalty_idx] { if penalty_idx > 0 && f.line_len <= fmt.max_len[penalty_idx] {
return false return false
} }
if f.out.buf[f.out.buf.len - 1] == ` ` { if f.out[f.out.len - 1] == ` ` {
f.out.go_back(1) f.out.go_back(1)
} }
f.write('\n') f.write('\n')
@ -149,7 +149,7 @@ pub fn (mut f Fmt) remove_new_line(cfg RemoveNewLineConfig) {
mut buffer := if cfg.imports_buffer { unsafe { &f.out_imports } } else { unsafe { &f.out } } mut buffer := if cfg.imports_buffer { unsafe { &f.out_imports } } else { unsafe { &f.out } }
mut i := 0 mut i := 0
for i = buffer.len - 1; i >= 0; i-- { for i = buffer.len - 1; i >= 0; i-- {
if !buffer.buf[i].is_space() { // != `\n` { if !buffer.byte_at(i).is_space() { // != `\n` {
break break
} }
} }

View File

@ -306,7 +306,7 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
fields_start := f.out.len fields_start := f.out.len
fields_loop: for { fields_loop: for {
if !single_line_fields { if !single_line_fields {
if use_short_args && f.out.buf[f.out.buf.len - 1] == ` ` { if use_short_args && f.out[f.out.len - 1] == ` ` {
// v Remove space at tail of line // v Remove space at tail of line
// f(a, b, c, \n // f(a, b, c, \n
// f1: 0\n // f1: 0\n

View File

@ -5837,9 +5837,9 @@ fn (mut g Gen) insert_before_stmt(s string) {
} }
fn (mut g Gen) write_expr_to_string(expr ast.Expr) string { fn (mut g Gen) write_expr_to_string(expr ast.Expr) string {
pos := g.out.buf.len pos := g.out.len
g.expr(expr) g.expr(expr)
return g.out.cut_last(g.out.buf.len - pos) return g.out.cut_last(g.out.len - pos)
} }
// fn (mut g Gen) start_tmp() { // fn (mut g Gen) start_tmp() {

View File

@ -39,7 +39,7 @@ fn (mut g Gen) process_fn_decl(node ast.FnDecl) {
g.gen_attrs(node.attrs) g.gen_attrs(node.attrs)
// g.tmp_count = 0 TODO // g.tmp_count = 0 TODO
mut skip := false mut skip := false
pos := g.out.buf.len pos := g.out.len
should_bundle_module := util.should_bundle_module(node.mod) should_bundle_module := util.should_bundle_module(node.mod)
if g.pref.build_mode == .build_module { if g.pref.build_mode == .build_module {
// if node.name.contains('parse_text') { // if node.name.contains('parse_text') {

View File

@ -314,25 +314,25 @@ pub const (
keywords = build_keys() keywords = build_keys()
) )
pub fn key_to_token(key string) Kind { [inline]
return Kind(token.keywords[key])
}
pub fn is_key(key string) bool { pub fn is_key(key string) bool {
return int(key_to_token(key)) > 0 return int(token.keywords[key]) > 0
} }
[inline]
pub fn is_decl(t Kind) bool { pub fn is_decl(t Kind) bool {
return t in [.key_enum, .key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_pub, return t in [.key_enum, .key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_pub,
.eof, .eof,
] ]
} }
[inline]
pub fn (t Kind) is_assign() bool { pub fn (t Kind) is_assign() bool {
return t in token.assign_tokens return t in token.assign_tokens
} }
// note: used for some code generation, so no quoting // note: used for some code generation, so no quoting
[inline]
pub fn (t Kind) str() string { pub fn (t Kind) str() string {
return token.token_str[int(t)] return token.token_str[int(t)]
} }
@ -434,34 +434,41 @@ const (
) )
// precedence returns a tokens precedence if defined, otherwise lowest_prec // precedence returns a tokens precedence if defined, otherwise lowest_prec
[inline]
pub fn (tok Token) precedence() int { pub fn (tok Token) precedence() int {
return int(token.precedences[tok.kind]) return int(token.precedences[tok.kind])
} }
// is_scalar returns true if the token is a scalar // is_scalar returns true if the token is a scalar
[inline]
pub fn (tok Token) is_scalar() bool { pub fn (tok Token) is_scalar() bool {
return tok.kind in [.number, .string] return tok.kind in [.number, .string]
} }
// is_unary returns true if the token can be in a unary expression // is_unary returns true if the token can be in a unary expression
[inline]
pub fn (tok Token) is_unary() bool { pub fn (tok Token) is_unary() bool {
// `+` | `-` | `!` | `~` | `*` | `&` | `<-` // `+` | `-` | `!` | `~` | `*` | `&` | `<-`
return tok.kind in [.plus, .minus, .not, .bit_not, .mul, .amp, .arrow] return tok.kind in [.plus, .minus, .not, .bit_not, .mul, .amp, .arrow]
} }
[inline]
pub fn (tok Kind) is_relational() bool { pub fn (tok Kind) is_relational() bool {
// `<` | `<=` | `>` | `>=` | `==` | `!=` // `<` | `<=` | `>` | `>=` | `==` | `!=`
return tok in [.lt, .le, .gt, .ge, .eq, .ne] return tok in [.lt, .le, .gt, .ge, .eq, .ne]
} }
[inline]
pub fn (k Kind) is_start_of_type() bool { pub fn (k Kind) is_start_of_type() bool {
return k in [.name, .lpar, .amp, .lsbr, .question, .key_shared] return k in [.name, .lpar, .amp, .lsbr, .question, .key_shared]
} }
[inline]
pub fn (kind Kind) is_prefix() bool { pub fn (kind Kind) is_prefix() bool {
return kind in [.minus, .amp, .mul, .not, .bit_not] return kind in [.minus, .amp, .mul, .not, .bit_not]
} }
[inline]
pub fn (kind Kind) is_infix() bool { pub fn (kind Kind) is_infix() bool {
return kind in [.plus, .minus, .mod, .mul, .div, .eq, .ne, .gt, .lt, .key_in, .key_as, .ge, return kind in [.plus, .minus, .mod, .mul, .div, .eq, .ne, .gt, .lt, .key_in, .key_as, .ge,
.le, .logical_or, .xor, .not_in, .key_is, .not_is, .and, .dot, .pipe, .amp, .left_shift, .le, .logical_or, .xor, .not_in, .key_is, .not_is, .and, .dot, .pipe, .amp, .left_shift,

View File

@ -53,7 +53,7 @@ pub fn (mut sse SSEConnection) start() ? {
start_sb.write_string('\r\n$k: $v') start_sb.write_string('\r\n$k: $v')
} }
start_sb.write_string('\r\n') start_sb.write_string('\r\n')
sse.conn.write(start_sb.buf) or { return error('could not start sse response') } sse.conn.write(start_sb) or { return error('could not start sse response') }
} }
// send_message sends a single message to the http client that listens for SSE. // send_message sends a single message to the http client that listens for SSE.
@ -73,5 +73,5 @@ pub fn (mut sse SSEConnection) send_message(message SSEMessage) ? {
sb.write_string('retry: $message.retry\n') sb.write_string('retry: $message.retry\n')
} }
sb.write_string('\n') sb.write_string('\n')
sse.conn.write(sb.buf) ? sse.conn.write(sb) ?
} }

View File

@ -156,7 +156,7 @@ fn save_raw_data_as_array(buf_bin []byte, file_name string) {
for x in buf_bin { for x in buf_bin {
buf.write_string('0x${x:02x},') buf.write_string('0x${x:02x},')
} }
os.write_file_array(file_name, buf.buf) or { panic(err) } os.write_file_array(file_name, buf) or { panic(err) }
} }
fn test_main() { fn test_main() {