vfmt: handle comments
parent
51663520c8
commit
ffa6bcfff5
|
@ -65,12 +65,12 @@ struct K {
|
|||
}
|
||||
|
||||
fn test_empty() {
|
||||
mut chunks := []Chunk{}
|
||||
mut chunks := []Chunk
|
||||
a := Chunk{}
|
||||
assert chunks.len == 0
|
||||
chunks << a
|
||||
assert chunks.len == 1
|
||||
chunks = []Chunk{}
|
||||
chunks = []Chunk
|
||||
assert chunks.len == 0
|
||||
chunks << a
|
||||
assert chunks.len == 1
|
||||
|
@ -90,6 +90,11 @@ fn test_push() {
|
|||
// i := 3
|
||||
// a.insert(0, &i)
|
||||
// ----------------------------
|
||||
fn test_insert() {
|
||||
// mut a := [1, 2]
|
||||
// a.insert(0, 3)
|
||||
// println(a)
|
||||
}
|
||||
// fn test_insert() {
|
||||
// mut a := [1, 2]
|
||||
|
||||
|
|
|
@ -168,7 +168,6 @@ fn (p mut Parser) name_expr() string {
|
|||
// known_type := p.table.known_type(name)
|
||||
orig_name := name
|
||||
is_c := name == 'C' && p.peek() == .dot
|
||||
|
||||
if is_c {
|
||||
p.check(.name)
|
||||
p.check(.dot)
|
||||
|
|
|
@ -206,7 +206,6 @@ pub fn (v mut V) compile() {
|
|||
for file in v.files {
|
||||
v.parse(file, .decl)
|
||||
}
|
||||
|
||||
// Main pass
|
||||
cgen.pass = .main
|
||||
if v.pref.is_debug {
|
||||
|
@ -268,7 +267,7 @@ pub fn (v mut V) compile() {
|
|||
defs_pos = 0
|
||||
}
|
||||
cgen.nogen = q
|
||||
for file in v.files {
|
||||
for i, file in v.files {
|
||||
v.parse(file, .main)
|
||||
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
|
||||
// Format all files (don't format automatically generated vlib headers)
|
||||
|
|
|
@ -10,7 +10,7 @@ import (
|
|||
)
|
||||
|
||||
struct Parser {
|
||||
file_path string // if parsing file will be path eg, "/home/user/hello.v"
|
||||
file_path string // "/home/user/hello.v"
|
||||
file_name string // "hello.v"
|
||||
file_platform string // ".v", "_windows.v", "_nix.v", "_darwin.v", "_linux.v" ...
|
||||
// When p.file_pcguard != '', it contains a
|
||||
|
@ -202,33 +202,41 @@ fn (p mut Parser) set_current_fn(f Fn) {
|
|||
fn (p mut Parser) next() {
|
||||
// Generate a formatted version of this token
|
||||
// (only when vfmt compile time flag is enabled, otherwise this function
|
||||
// is not even generatd)
|
||||
// is not even generated)
|
||||
p.fnext()
|
||||
|
||||
p.prev_tok2 = p.prev_tok
|
||||
p.prev_tok = p.tok
|
||||
p.scanner.prev_tok = p.tok
|
||||
if p.token_idx >= p.tokens.len {
|
||||
p.prev_tok2 = p.prev_tok
|
||||
p.prev_tok = p.tok
|
||||
p.scanner.prev_tok = p.tok
|
||||
if p.token_idx >= p.tokens.len {
|
||||
p.tok = .eof
|
||||
p.lit = ''
|
||||
return
|
||||
}
|
||||
res := p.tokens[p.token_idx]
|
||||
p.token_idx++
|
||||
p.tok = res.tok
|
||||
p.lit = res.lit
|
||||
p.scanner.line_nr = res.line_nr
|
||||
p.cgen.line = res.line_nr
|
||||
|
||||
|
||||
}
|
||||
res := p.tokens[p.token_idx]
|
||||
p.token_idx++
|
||||
p.tok = res.tok
|
||||
p.lit = res.lit
|
||||
p.scanner.line_nr = res.line_nr
|
||||
p.cgen.line = res.line_nr
|
||||
}
|
||||
|
||||
fn (p & Parser) peek() TokenKind {
|
||||
fn (p &Parser) peek() TokenKind {
|
||||
if p.token_idx >= p.tokens.len - 2 {
|
||||
return .eof
|
||||
}
|
||||
tok := p.tokens[p.token_idx]
|
||||
return tok.tok
|
||||
return p.tokens[p.token_idx].tok
|
||||
/*
|
||||
mut i := p.token_idx
|
||||
for i < p.tokens.len {
|
||||
tok := p.tokens[i]
|
||||
if tok.tok != .mline_comment && tok.tok != .line_comment {
|
||||
return tok.tok
|
||||
}
|
||||
i++
|
||||
}
|
||||
return .eof
|
||||
*/
|
||||
}
|
||||
|
||||
// TODO remove dups
|
||||
|
@ -461,11 +469,17 @@ fn (p mut Parser) imports() {
|
|||
p.check(.key_import)
|
||||
// `import ()`
|
||||
if p.tok == .lpar {
|
||||
p.fspace()
|
||||
p.check(.lpar)
|
||||
p.fmt_inc()
|
||||
p.fgenln('')
|
||||
for p.tok != .rpar && p.tok != .eof {
|
||||
p.import_statement()
|
||||
p.fgenln('')
|
||||
}
|
||||
p.fmt_dec()
|
||||
p.check(.rpar)
|
||||
p.fgenln('\n')
|
||||
return
|
||||
}
|
||||
// `import foo`
|
||||
|
@ -615,7 +629,6 @@ fn (p mut Parser) const_decl() {
|
|||
}
|
||||
p.fmt_dec()
|
||||
p.check(.rpar)
|
||||
p.fgenln('\n')
|
||||
p.inside_const = false
|
||||
}
|
||||
|
||||
|
@ -676,7 +689,6 @@ fn key_to_type_cat(tok TokenKind) TypeCategory {
|
|||
.key_interface { return .interface_ }
|
||||
.key_struct { return .struct_ }
|
||||
.key_union { return .union_ }
|
||||
//TokenKind.key_ => return .interface_
|
||||
}
|
||||
verror('Unknown token: $tok')
|
||||
return .builtin
|
||||
|
@ -708,6 +720,9 @@ fn (p &Parser) strtok() string {
|
|||
if p.tok == .number {
|
||||
return p.lit
|
||||
}
|
||||
if p.tok == .chartoken {
|
||||
return '`$p.lit`'
|
||||
}
|
||||
if p.tok == .str {
|
||||
if p.lit.contains("'") {
|
||||
return '"$p.lit"'
|
||||
|
@ -1166,6 +1181,9 @@ fn (p mut Parser) statement(add_semi bool) string {
|
|||
}
|
||||
.lcbr {// {} block
|
||||
p.check(.lcbr)
|
||||
if p.tok == .rcbr {
|
||||
p.error('empty statements block')
|
||||
}
|
||||
p.genln('{')
|
||||
p.statements()
|
||||
return ''
|
||||
|
@ -2075,6 +2093,7 @@ fn (p mut Parser) assoc() string {
|
|||
if p.tok != .rcbr {
|
||||
p.check(.comma)
|
||||
}
|
||||
p.fgenln('')
|
||||
}
|
||||
// Copy the rest of the fields
|
||||
T := p.table.find_type(var.typ)
|
||||
|
@ -2083,7 +2102,7 @@ fn (p mut Parser) assoc() string {
|
|||
if f in fields {
|
||||
continue
|
||||
}
|
||||
p.gen('.$f = $name . $f,')
|
||||
p.gen('.$f = ${name}.$f,')
|
||||
}
|
||||
p.check(.rcbr)
|
||||
p.gen('}')
|
||||
|
@ -2266,6 +2285,7 @@ fn (p mut Parser) map_init() string {
|
|||
keys_gen += 'tos3("$key"), '
|
||||
p.check(.str)
|
||||
p.check(.colon)
|
||||
p.fspace()
|
||||
t, val_expr := p.tmp_expr()
|
||||
if i == 0 {
|
||||
val_type = t
|
||||
|
@ -2278,12 +2298,14 @@ fn (p mut Parser) map_init() string {
|
|||
}
|
||||
vals_gen += '$val_expr, '
|
||||
if p.tok == .rcbr {
|
||||
p.fgenln('')
|
||||
p.check(.rcbr)
|
||||
break
|
||||
}
|
||||
if p.tok == .comma {
|
||||
p.check(.comma)
|
||||
}
|
||||
p.fgenln('')
|
||||
}
|
||||
p.gen('new_map_init($i, sizeof($val_type), ' +
|
||||
'(string[$i]){ $keys_gen }, ($val_type [$i]){ $vals_gen } )')
|
||||
|
|
|
@ -588,6 +588,9 @@ fn (s mut Scanner) scan() ScanRes {
|
|||
s.ignore_line()
|
||||
s.line_comment = s.text[start + 1..s.pos]
|
||||
s.line_comment = s.line_comment.trim_space()
|
||||
if s.is_fmt {
|
||||
return scan_res(.line_comment, s.line_comment)
|
||||
}
|
||||
//s.fgenln('// ${s.prev_tok.str()} "$s.line_comment"')
|
||||
// Skip the comment (return the next token)
|
||||
return s.scan()
|
||||
|
@ -617,8 +620,11 @@ fn (s mut Scanner) scan() ScanRes {
|
|||
}
|
||||
s.pos++
|
||||
end := s.pos + 1
|
||||
comm := s.text[start..end]
|
||||
s.fgenln(comm)
|
||||
comment := s.text[start..end]
|
||||
if s.is_fmt {
|
||||
s.line_comment = comment
|
||||
return scan_res(.mline_comment, s.line_comment)
|
||||
}
|
||||
// Skip if not in fmt mode
|
||||
return s.scan()
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ fn (p mut Parser) struct_decl() {
|
|||
p.error('type names cannot contain `_`')
|
||||
}
|
||||
if !p.builtin_mod && !name[0].is_capital() {
|
||||
p.error('struct names must be capitalized: use `struct ${name.capitalize()}`')
|
||||
p.error('mod=$p.mod struct names must be capitalized: use `struct ${name.capitalize()}`')
|
||||
}
|
||||
if is_interface && !name.ends_with('er') {
|
||||
p.error('interface names temporarily have to end with `er` (e.g. `Speaker`, `Reader`)')
|
||||
|
@ -112,15 +112,18 @@ fn (p mut Parser) struct_decl() {
|
|||
mut is_pub_field := false
|
||||
mut is_mut := false
|
||||
mut names := []string// to avoid dup names TODO alloc perf
|
||||
/*
|
||||
mut fmt_max_len := 0
|
||||
for field in typ.fields {
|
||||
if field.name.len > max_len {
|
||||
fmt_max_len = field.name.len
|
||||
// TODO why is typ.fields == 0?
|
||||
if p.scanner.is_fmt && p.pass == .main {
|
||||
for field in typ.fields {
|
||||
println(field.name)
|
||||
if field.name.len > fmt_max_len {
|
||||
fmt_max_len = field.name.len
|
||||
}
|
||||
}
|
||||
}
|
||||
println('fmt max len = $max_len nrfields=$typ.fields.len pass=$p.pass')
|
||||
*/
|
||||
//println('fmt max len = $max_len nrfields=$typ.fields.len pass=$p.pass')
|
||||
|
||||
|
||||
if !is_ph && p.first_pass() {
|
||||
p.table.register_type2(typ)
|
||||
|
@ -164,6 +167,11 @@ fn (p mut Parser) struct_decl() {
|
|||
// Check if reserved name
|
||||
field_name_token_idx := p.cur_tok_index()
|
||||
field_name := if name != 'Option' { p.table.var_cgen_name(p.check_name()) } else { p.check_name() }
|
||||
/*
|
||||
if !p.first_pass() {
|
||||
p.fgen(strings.repeat(` `, fmt_max_len - field_name.len))
|
||||
}
|
||||
*/
|
||||
// Check dups
|
||||
if field_name in names {
|
||||
p.error('duplicate field `$field_name`')
|
||||
|
|
|
@ -71,8 +71,8 @@ enum TokenKind {
|
|||
ge
|
||||
le
|
||||
// comments
|
||||
//line_com
|
||||
//mline_com
|
||||
line_comment
|
||||
mline_comment
|
||||
nl
|
||||
dot
|
||||
dotdot
|
||||
|
@ -192,7 +192,8 @@ fn build_token_str() []string {
|
|||
s[TokenKind.question] = '?'
|
||||
s[TokenKind.left_shift] = '<<'
|
||||
s[TokenKind.righ_shift] = '>>'
|
||||
//s[TokenKind.line_com] = '//'
|
||||
s[TokenKind.line_comment] = '// line comment'
|
||||
s[TokenKind.mline_comment] = '/* mline comment */'
|
||||
s[TokenKind.nl] = 'NLL'
|
||||
s[TokenKind.dollar] = '$'
|
||||
s[TokenKind.key_assert] = 'assert'
|
||||
|
|
|
@ -22,7 +22,7 @@ fn (scanner mut Scanner) fgen(s_ string) {
|
|||
[if vfmt]
|
||||
fn (scanner mut Scanner) fgenln(s_ string) {
|
||||
mut s := s_
|
||||
if scanner.fmt_line_empty {
|
||||
if scanner.fmt_line_empty && scanner.fmt_indent > 0 {
|
||||
s = strings.repeat(`\t`, scanner.fmt_indent) + s
|
||||
}
|
||||
//scanner.fmt_out << s
|
||||
|
@ -88,24 +88,63 @@ fn (p mut Parser) fmt_dec() {
|
|||
|
||||
[if vfmt]
|
||||
fn (p mut Scanner) init_fmt() {
|
||||
// Right now we can't do `$if vfmt {`, so I'm using
|
||||
// a conditional function init_fmt to set this flag.
|
||||
// This function will only be called if `-d vfmt` is passed.
|
||||
p.is_fmt = true
|
||||
}
|
||||
|
||||
[if vfmt]
|
||||
fn (p mut Parser) fnext() {
|
||||
if p.tok == .eof {
|
||||
return
|
||||
}
|
||||
//if p.tok == .eof {
|
||||
//println('eof ret')
|
||||
//return
|
||||
//}
|
||||
if p.tok == .rcbr && !p.inside_if_expr && p.prev_tok != .lcbr {
|
||||
p.fmt_dec()
|
||||
}
|
||||
mut s := p.strtok()
|
||||
if p.tok != .eof {
|
||||
p.fgen(s)
|
||||
}
|
||||
// vfmt: increase indentation on `{` unless it's `{}`
|
||||
if p.tok == .lcbr && !p.inside_if_expr && p.peek() != .rcbr {
|
||||
p.fgenln('')
|
||||
p.fmt_inc()
|
||||
}
|
||||
|
||||
// Skip comments and add them to vfmt output
|
||||
if p.tokens[p.token_idx].tok in [.line_comment, .mline_comment] {
|
||||
// Newline before the comment and after consts and closing }
|
||||
if p.inside_const {
|
||||
p.fgenln('\n')
|
||||
}
|
||||
if p.tok == .rcbr {
|
||||
p.fgenln('')
|
||||
}
|
||||
for p.token_idx < p.tokens.len - 1 {
|
||||
tok := p.tokens[p.token_idx].tok
|
||||
if tok != .line_comment && tok != .mline_comment {
|
||||
break
|
||||
}
|
||||
comment_token := p.tokens[p.token_idx]
|
||||
comment := comment_token.lit
|
||||
if p.token_idx > 0 && comment_token.line_nr > p.tokens[p.token_idx-1].line_nr {
|
||||
//p.fgenln('')
|
||||
}
|
||||
if tok == .line_comment {
|
||||
p.fgen('// ' + comment)
|
||||
} else {
|
||||
p.fgen(comment)
|
||||
}
|
||||
if p.token_idx > 0 &&
|
||||
comment_token.line_nr < p.tokens[p.token_idx+1].line_nr
|
||||
{
|
||||
p.fgenln('')
|
||||
}
|
||||
p.token_idx++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -117,6 +156,7 @@ fn (p mut Parser) gen_fmt() {
|
|||
if p.file_name == '' {
|
||||
return
|
||||
}
|
||||
//s := p.scanner.fmt_out.str().replace('\n\n\n', '\n').trim_space()
|
||||
s := p.scanner.fmt_out.str().trim_space()
|
||||
//s := p.scanner.fmt_out.join('').trim_space()
|
||||
if s == '' {
|
||||
|
|
|
@ -191,7 +191,7 @@ pub fn new_context(cfg gg.Cfg) &Context {
|
|||
C.glPixelStorei(C.GL_UNPACK_ALIGNMENT, 1)
|
||||
// Gen texture
|
||||
// Load first 128 characters of ASCII set
|
||||
mut chars := []Character{}
|
||||
mut chars := []Character
|
||||
for c := 0; c < 128; c++ {
|
||||
mut ch := ft_load_char(face, i64(c))
|
||||
// s := utf32_to_str(uint(0x043f))
|
||||
|
|
|
@ -52,8 +52,6 @@ pub:
|
|||
pub struct GG {
|
||||
shader gl.Shader
|
||||
// use_ortho bool
|
||||
width int
|
||||
height int
|
||||
vao u32
|
||||
rect_vao u32
|
||||
rect_vbo u32
|
||||
|
@ -61,7 +59,10 @@ pub struct GG {
|
|||
line_vbo u32
|
||||
vbo u32
|
||||
scale int // retina = 2 , normal = 1
|
||||
//pub:
|
||||
pub mut:
|
||||
width int
|
||||
height int
|
||||
window &glfw.Window
|
||||
render_fn fn()
|
||||
}
|
||||
|
|
|
@ -12,23 +12,22 @@ pub:
|
|||
}
|
||||
|
||||
pub const (
|
||||
// Primary colors
|
||||
Blue = Color { r: 0, g: 0, b: 255 }
|
||||
Red = Color { r: 255, g: 0, b: 0 }
|
||||
Green = Color { r: 0, g: 255, b: 0 }
|
||||
green = Color { r: 0, g: 255, b: 0 }
|
||||
Yellow = Color { r: 255, g: 255, b: 0 }
|
||||
|
||||
// Secondary colors
|
||||
Green = Color { r: 0, g: 255, b: 0 }
|
||||
Orange = Color { r: 255, g: 165, b: 0 }
|
||||
Purple = Color { r: 128, g: 0, b: 128 }
|
||||
|
||||
// Other
|
||||
Black = Color { r: 0, g: 0, b: 0 }
|
||||
Gray = Color { r: 128, g: 128, b: 128 }
|
||||
Indigo = Color { r: 75, g: 0, b: 130 }
|
||||
Pink = Color { r: 255, g: 192, b: 203 }
|
||||
Violet = Color { r: 238, g: 130, b: 238 }
|
||||
White = Color { r: 255, g: 255, b: 255 }
|
||||
white = Color { r: 255, g: 255, b: 255 }
|
||||
|
||||
// Shades
|
||||
DarkBlue = Color { r: 0, g: 0, b: 139 }
|
||||
|
|
|
@ -57,7 +57,13 @@ pub fn dir_exists(path string) bool {
|
|||
|
||||
// mkdir creates a new directory with the specified path.
|
||||
pub fn mkdir(path string) {
|
||||
C.mkdir(path.str, 511)// S_IRWXU | S_IRWXG | S_IRWXO
|
||||
/*
|
||||
$if linux {
|
||||
C.syscall(C.SYS_mkdir, path.str)
|
||||
} $else {
|
||||
*/
|
||||
C.mkdir(path.str, 511)// S_IRWXU | S_IRWXG | S_IRWXO
|
||||
//}
|
||||
}
|
||||
|
||||
// exec starts the specified command, waits for it to complete, and returns its output.
|
||||
|
|
|
@ -184,7 +184,15 @@ pub fn unix(abs int) Time {
|
|||
|
||||
month++ // because January is 1
|
||||
day = day - begin + 1
|
||||
return Time{year:year, month: month, day:day, hour:hour, minute: minute, second: second}
|
||||
return Time{
|
||||
year:year
|
||||
month: month
|
||||
day:day
|
||||
hour:hour
|
||||
minute: minute
|
||||
second: second
|
||||
uni: abs
|
||||
}
|
||||
}
|
||||
|
||||
pub fn convert_ctime(t tm) Time {
|
||||
|
@ -224,8 +232,6 @@ pub fn (t Time) smonth() string {
|
|||
}
|
||||
|
||||
// hhmm returns a string for t in the given format HH:MM in 24h notation
|
||||
// @param
|
||||
// @return string
|
||||
// @example 21:04
|
||||
pub fn (t Time) hhmm() string {
|
||||
return t.get_fmt_time_str(.hhmm24)
|
||||
|
@ -238,41 +244,26 @@ fn (t Time) hhmm_tmp() string {
|
|||
*/
|
||||
|
||||
// hhmm12 returns a string for t in the given format HH:MM in 12h notation
|
||||
// @param
|
||||
// @return string
|
||||
// @example 9:04 p.m.
|
||||
pub fn (t Time) hhmm12() string {
|
||||
return t.get_fmt_time_str(.hhmm12)
|
||||
}
|
||||
|
||||
// hhmmss returns a string for t in the given format HH:MM:SS in 24h notation
|
||||
// @param
|
||||
// @return string
|
||||
// @example 21:04:03
|
||||
pub fn (t Time) hhmmss() string {
|
||||
return t.get_fmt_time_str(.hhmmss24)
|
||||
}
|
||||
|
||||
// ymmdd returns a string for t in the given format YYYY-MM-DD
|
||||
// @param
|
||||
// @return string
|
||||
// @example 2012-01-05
|
||||
pub fn (t Time) ymmdd() string {
|
||||
return t.get_fmt_date_str(.hyphen, .yyyymmdd)
|
||||
}
|
||||
|
||||
// ddmmy returns a string for t in the given format DD.MM.YYYY
|
||||
// @param
|
||||
// @return string
|
||||
// @example 05.02.2012
|
||||
pub fn (t Time) ddmmy() string {
|
||||
return t.get_fmt_date_str(.dot, .ddmmyyyy)
|
||||
}
|
||||
|
||||
// md returns a string for t in the given format MMM D
|
||||
// @param
|
||||
// @return string
|
||||
// @example Jul 3
|
||||
pub fn (t Time) md() string {
|
||||
return t.get_fmt_date_str(.space, .mmmd)
|
||||
}
|
||||
|
@ -373,6 +364,10 @@ pub fn (t Time) add_seconds(seconds int) Time {
|
|||
return unix(t.uni + seconds)
|
||||
}
|
||||
|
||||
pub fn (t Time) add_days(days int) Time {
|
||||
return unix(t.uni + days * 3600 * 24)
|
||||
}
|
||||
|
||||
// TODO use time.Duration instead of seconds
|
||||
fn since(t Time) int {
|
||||
return 0
|
||||
|
|
Loading…
Reference in New Issue