strings: simplify Builder (#10263)

pull/10285/head^2
Delyan Angelov 2021-05-31 14:21:06 +03:00 committed by GitHub
parent 8990114b4b
commit eac1e25c5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 65 additions and 76 deletions

View File

@ -102,6 +102,7 @@ pub mut:
vexename string // v or v.exe
vexepath string // the full absolute path to the prepared v/v.exe
vvlocation string // v.v or compiler/ or cmd/v, depending on v version
make_fresh_tcc bool // whether to do 'make fresh_tcc' before compiling an old V.
}
pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
@ -141,6 +142,9 @@ pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
}
// Recompilation is needed. Just to be sure, clean up everything first.
scripting.run('git clean -xf')
if vgit_context.make_fresh_tcc {
scripting.run('make fresh_tcc')
}
scripting.run(command_for_building_v_from_c_source)
build_cmd := command_for_selfbuilding.replace('{SOURCE}', vgit_context.vvlocation)
scripting.run(build_cmd)

View File

@ -39,6 +39,7 @@ mut:
cc string = 'cc' // the C compiler to use for bootstrapping.
cleanup bool // should the tool run a cleanup first
use_cache bool // use local cached copies for --vrepo and --vcrepo in
fresh_tcc bool // do use `make fresh_tcc`
}
fn (mut c Context) compile_oldv_if_needed() {
@ -50,6 +51,7 @@ fn (mut c Context) compile_oldv_if_needed() {
commit_v: c.commit_v
path_v: c.path_v
path_vc: c.path_vc
make_fresh_tcc: c.fresh_tcc
}
c.vgcontext.compile_oldv_if_needed()
c.commit_v_hash = c.vgcontext.commit_v__hash
@ -125,6 +127,7 @@ fn main() {
}
////
context.cleanup = fp.bool('clean', 0, false, 'Clean before running (slower).')
context.fresh_tcc = fp.bool('fresh_tcc', 0, true, 'Do `make fresh_tcc` when preparing a V compiler.')
context.cmd_to_run = fp.string('command', `c`, '', 'Command to run in the old V repo.\n')
commits := vgit.add_common_tool_options(mut context.vgo, mut fp)
if should_sync {

View File

@ -2,13 +2,6 @@ import net.http
import net.html
fn main() {
/*
user_agent = 'v.http'
resp := http.get('https://tuicool.com') or {
println('failed to fetch data from the server')
return
}
*/
// http.fetch() sends an HTTP request to the URL with the given method and configurations.
config := http.FetchConfig{
user_agent: 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:88.0) Gecko/20100101 Firefox/88.0'

View File

@ -13,11 +13,11 @@ mut:
is_attribute bool
opened_code_type string
line_count int
lexeme_builder strings.Builder = strings.Builder{}
lexeme_builder strings.Builder = strings.new_builder(100)
code_tags map[string]bool = map{
'script': true
'style': true
}
'script': true
'style': true
}
}
// Parser is responsible for reading the HTML strings and converting them into a `DocumentObjectModel`.

View File

@ -7,76 +7,59 @@ module strings
// dynamically growing buffer, then use the resulting large string. Using
// a string builder is much better for performance/memory usage than doing
// constantly string concatenation.
pub struct Builder {
pub mut:
buf []byte
len int
initial_size int = 1
}
pub type Builder = []byte
// new_builder returns a new string builder, with an initial capacity of `initial_size`
pub fn new_builder(initial_size int) Builder {
return Builder{
// buf: make(0, initial_size)
buf: []byte{cap: initial_size}
len: 0
initial_size: initial_size
}
}
// write_bytes appends `bytes` to the accumulated buffer
[deprecated: 'use Builder.write_ptr() instead']
[deprecated_after: '2021-04-18']
[unsafe]
pub fn (mut b Builder) write_bytes(bytes &byte, len int) {
unsafe { b.write_ptr(bytes, len) }
return Builder([]byte{cap: initial_size})
}
// write_ptr writes `len` bytes provided byteptr to the accumulated buffer
[unsafe]
pub fn (mut b Builder) write_ptr(ptr &byte, len int) {
unsafe { b.buf.push_many(ptr, len) }
b.len += len
unsafe { b.push_many(ptr, len) }
}
// write_b appends a single `data` byte to the accumulated buffer
pub fn (mut b Builder) write_b(data byte) {
b.buf << data
b.len++
b << data
}
// write implements the Writer interface
pub fn (mut b Builder) write(data []byte) ?int {
b.buf << data
b.len += data.len
b << data
return data.len
}
[inline]
pub fn (b &Builder) byte_at(n int) byte {
return unsafe { (&[]byte(b))[n] }
}
// write appends the string `s` to the buffer
[inline]
pub fn (mut b Builder) write_string(s string) {
if s == '' {
return
}
unsafe { b.buf.push_many(s.str, s.len) }
unsafe { b.push_many(s.str, s.len) }
// for c in s {
// b.buf << c
// }
// b.buf << []byte(s) // TODO
b.len += s.len
}
// go_back discards the last `n` bytes from the buffer
pub fn (mut b Builder) go_back(n int) {
b.buf.trim(b.buf.len - n)
b.len -= n
b.trim(b.len - n)
}
// cut_last cuts the last `n` bytes from the buffer and returns them
pub fn (mut b Builder) cut_last(n int) string {
res := b.buf[b.len - n..].bytestr()
b.buf.trim(b.buf.len - n)
b.len -= n
cut_pos := b.len - n
x := unsafe { (&[]byte(b))[cut_pos..] }
res := x.bytestr()
b.trim(cut_pos)
return res
}
@ -87,14 +70,13 @@ pub fn (mut b Builder) cut_to(pos int) string {
if pos > b.len {
return ''
}
return b.cut_last(b.buf.len - pos)
return b.cut_last(b.len - pos)
}
// go_back_to resets the buffer to the given position `pos`
// NB: pos should be < than the existing buffer length.
pub fn (mut b Builder) go_back_to(pos int) {
b.buf.trim(pos)
b.len = pos
b.trim(pos)
}
// writeln appends the string `s`, and then a newline character.
@ -103,10 +85,9 @@ pub fn (mut b Builder) writeln(s string) {
// for c in s {
// b.buf << c
// }
unsafe { b.buf.push_many(s.str, s.len) }
unsafe { b.push_many(s.str, s.len) }
// b.buf << []byte(s) // TODO
b.buf << byte(`\n`)
b.len += s.len + 1
b << byte(`\n`)
}
// buf == 'hello world'
@ -115,7 +96,8 @@ pub fn (b &Builder) last_n(n int) string {
if n > b.len {
return ''
}
return b.buf[b.len - n..].bytestr()
x := unsafe { (&[]byte(b))[b.len - n..] }
return x.bytestr()
}
// buf == 'hello world'
@ -124,7 +106,8 @@ pub fn (b &Builder) after(n int) string {
if n >= b.len {
return ''
}
return b.buf[n..].bytestr()
x := unsafe { (&[]byte(b))[n..] }
return x.bytestr()
}
// str returns a copy of all of the accumulated buffer content.
@ -135,17 +118,15 @@ pub fn (b &Builder) after(n int) string {
// accumulated data that was in the string builder, before the
// .str() call.
pub fn (mut b Builder) str() string {
b.buf << byte(0)
bcopy := unsafe { &byte(memdup(b.buf.data, b.buf.len)) }
s := unsafe { bcopy.vstring_with_len(b.len) }
b.len = 0
b.buf.trim(0)
b << byte(0)
bcopy := unsafe { &byte(memdup(b.data, b.len)) }
s := unsafe { bcopy.vstring_with_len(b.len - 1) }
b.trim(0)
return s
}
// free - manually free the contents of the buffer
[unsafe]
pub fn (mut b Builder) free() {
unsafe { free(b.buf.data) }
b.len = 0
unsafe { free(b.data) }
}

View File

@ -3,7 +3,7 @@ import strings
type MyInt = int
fn test_sb() {
mut sb := strings.Builder{}
mut sb := strings.new_builder(100)
sb.write_string('hi')
sb.write_string('!')
sb.write_string('hello')

View File

@ -766,10 +766,11 @@ pub fn (mut c Checker) struct_init(mut struct_init ast.StructInit) ast.Type {
c.error('unknown struct: $type_sym.name', struct_init.pos)
return ast.void_type
}
if sym.kind != .struct_ {
if sym.kind == .struct_ {
info = sym.info as ast.Struct
} else {
c.error('alias type name: $sym.name is not struct type', struct_init.pos)
}
info = sym.info as ast.Struct
} else {
info = type_sym.info as ast.Struct
}

View File

@ -126,7 +126,7 @@ pub fn (mut f Fmt) wrap_long_line(penalty_idx int, add_indent bool) bool {
if penalty_idx > 0 && f.line_len <= fmt.max_len[penalty_idx] {
return false
}
if f.out.buf[f.out.buf.len - 1] == ` ` {
if f.out[f.out.len - 1] == ` ` {
f.out.go_back(1)
}
f.write('\n')
@ -149,7 +149,7 @@ pub fn (mut f Fmt) remove_new_line(cfg RemoveNewLineConfig) {
mut buffer := if cfg.imports_buffer { unsafe { &f.out_imports } } else { unsafe { &f.out } }
mut i := 0
for i = buffer.len - 1; i >= 0; i-- {
if !buffer.buf[i].is_space() { // != `\n` {
if !buffer.byte_at(i).is_space() { // != `\n` {
break
}
}

View File

@ -306,7 +306,7 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
fields_start := f.out.len
fields_loop: for {
if !single_line_fields {
if use_short_args && f.out.buf[f.out.buf.len - 1] == ` ` {
if use_short_args && f.out[f.out.len - 1] == ` ` {
// v Remove space at tail of line
// f(a, b, c, \n
// f1: 0\n

View File

@ -5837,9 +5837,9 @@ fn (mut g Gen) insert_before_stmt(s string) {
}
fn (mut g Gen) write_expr_to_string(expr ast.Expr) string {
pos := g.out.buf.len
pos := g.out.len
g.expr(expr)
return g.out.cut_last(g.out.buf.len - pos)
return g.out.cut_last(g.out.len - pos)
}
// fn (mut g Gen) start_tmp() {

View File

@ -39,7 +39,7 @@ fn (mut g Gen) process_fn_decl(node ast.FnDecl) {
g.gen_attrs(node.attrs)
// g.tmp_count = 0 TODO
mut skip := false
pos := g.out.buf.len
pos := g.out.len
should_bundle_module := util.should_bundle_module(node.mod)
if g.pref.build_mode == .build_module {
// if node.name.contains('parse_text') {

View File

@ -314,25 +314,25 @@ pub const (
keywords = build_keys()
)
pub fn key_to_token(key string) Kind {
return Kind(token.keywords[key])
}
[inline]
pub fn is_key(key string) bool {
return int(key_to_token(key)) > 0
return int(token.keywords[key]) > 0
}
[inline]
pub fn is_decl(t Kind) bool {
return t in [.key_enum, .key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_pub,
.eof,
]
}
[inline]
pub fn (t Kind) is_assign() bool {
return t in token.assign_tokens
}
// note: used for some code generation, so no quoting
[inline]
pub fn (t Kind) str() string {
return token.token_str[int(t)]
}
@ -434,34 +434,41 @@ const (
)
// precedence returns a tokens precedence if defined, otherwise lowest_prec
[inline]
pub fn (tok Token) precedence() int {
return int(token.precedences[tok.kind])
}
// is_scalar returns true if the token is a scalar
[inline]
pub fn (tok Token) is_scalar() bool {
return tok.kind in [.number, .string]
}
// is_unary returns true if the token can be in a unary expression
[inline]
pub fn (tok Token) is_unary() bool {
// `+` | `-` | `!` | `~` | `*` | `&` | `<-`
return tok.kind in [.plus, .minus, .not, .bit_not, .mul, .amp, .arrow]
}
[inline]
pub fn (tok Kind) is_relational() bool {
// `<` | `<=` | `>` | `>=` | `==` | `!=`
return tok in [.lt, .le, .gt, .ge, .eq, .ne]
}
[inline]
pub fn (k Kind) is_start_of_type() bool {
return k in [.name, .lpar, .amp, .lsbr, .question, .key_shared]
}
[inline]
pub fn (kind Kind) is_prefix() bool {
return kind in [.minus, .amp, .mul, .not, .bit_not]
}
[inline]
pub fn (kind Kind) is_infix() bool {
return kind in [.plus, .minus, .mod, .mul, .div, .eq, .ne, .gt, .lt, .key_in, .key_as, .ge,
.le, .logical_or, .xor, .not_in, .key_is, .not_is, .and, .dot, .pipe, .amp, .left_shift,

View File

@ -53,7 +53,7 @@ pub fn (mut sse SSEConnection) start() ? {
start_sb.write_string('\r\n$k: $v')
}
start_sb.write_string('\r\n')
sse.conn.write(start_sb.buf) or { return error('could not start sse response') }
sse.conn.write(start_sb) or { return error('could not start sse response') }
}
// send_message sends a single message to the http client that listens for SSE.
@ -73,5 +73,5 @@ pub fn (mut sse SSEConnection) send_message(message SSEMessage) ? {
sb.write_string('retry: $message.retry\n')
}
sb.write_string('\n')
sse.conn.write(sb.buf) ?
sse.conn.write(sb) ?
}

View File

@ -156,7 +156,7 @@ fn save_raw_data_as_array(buf_bin []byte, file_name string) {
for x in buf_bin {
buf.write_string('0x${x:02x},')
}
os.write_file_array(file_name, buf.buf) or { panic(err) }
os.write_file_array(file_name, buf) or { panic(err) }
}
fn test_main() {