parser: array init: `[]string` => `[]string{}`
parent
b898970031
commit
3ab8dc0092
|
@ -531,7 +531,7 @@ pub fn compare_f32(a, b &f32) int {
|
|||
// a.pointers() returns a new array, where each element
|
||||
// is the address of the corresponding element in a.
|
||||
pub fn (a array) pointers() []voidptr {
|
||||
mut res := []voidptr
|
||||
mut res := []voidptr{}
|
||||
for i in 0..a.len {
|
||||
res << byteptr(a.data) + i * a.element_size
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ fn print_backtrace_skipping_top_frames_linux(skipframes int) bool {
|
|||
buffer := [100]byteptr
|
||||
nr_ptrs := backtrace(buffer, 100)
|
||||
nr_actual_frames := nr_ptrs - skipframes
|
||||
mut sframes := []string
|
||||
mut sframes := []string{}
|
||||
//////csymbols := backtrace_symbols(*voidptr(&buffer[skipframes]), nr_actual_frames)
|
||||
csymbols := backtrace_symbols(&buffer[skipframes], nr_actual_frames)
|
||||
for i in 0 .. nr_actual_frames {
|
||||
|
|
|
@ -144,7 +144,7 @@ pub fn (s string) replace(rep, with string) string {
|
|||
}
|
||||
// TODO PERF Allocating ints is expensive. Should be a stack array
|
||||
// Get locations of all reps within this string
|
||||
mut idxs := []int
|
||||
mut idxs := []int{}
|
||||
mut idx := 0
|
||||
for {
|
||||
idx = s.index_after(rep, idx)
|
||||
|
@ -231,7 +231,7 @@ pub fn (s string) replace_each(vals []string) string {
|
|||
// Remember positions of all rep strings, and calculate the length
|
||||
// of the new string to do just one allocation.
|
||||
mut new_len := s.len
|
||||
mut idxs := []RepIndex
|
||||
mut idxs := []RepIndex{}
|
||||
mut idx := 0
|
||||
for rep_i := 0; rep_i < vals.len; rep_i += 2 {
|
||||
// vals: ['rep1, 'with1', 'rep2', 'with2']
|
||||
|
@ -407,7 +407,7 @@ The last returned element has the remainder of the string, even if
|
|||
the remainder contains more `delim` substrings.
|
||||
*/
|
||||
pub fn (s string) split_nth(delim string, nth int) []string {
|
||||
mut res := []string
|
||||
mut res := []string{}
|
||||
mut i := 0
|
||||
if delim.len == 0 {
|
||||
i = 1
|
||||
|
@ -458,7 +458,7 @@ pub fn (s string) split_nth(delim string, nth int) []string {
|
|||
}
|
||||
|
||||
pub fn (s string) split_into_lines() []string {
|
||||
mut res := []string
|
||||
mut res := []string{}
|
||||
if s.len == 0 {
|
||||
return res
|
||||
}
|
||||
|
@ -788,7 +788,7 @@ pub fn (s string) is_capital() bool {
|
|||
|
||||
pub fn (s string) title() string {
|
||||
words := s.split(' ')
|
||||
mut tit := []string
|
||||
mut tit := []string{}
|
||||
for word in words {
|
||||
tit << word.capitalize()
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ module cmdline
|
|||
// param: '-d'
|
||||
// ret: ['aa', 'bb', 'cc']
|
||||
pub fn options(args []string, param string) []string {
|
||||
mut flags := []string
|
||||
mut flags := []string{}
|
||||
for i, v in args {
|
||||
if v == param {
|
||||
if i + 1 < args.len {
|
||||
|
@ -39,7 +39,7 @@ pub fn option(args []string, param string, def string) string {
|
|||
// what: ['test']
|
||||
// ret: ['-stat']
|
||||
pub fn options_before(args []string, what []string) []string {
|
||||
mut args_before := []string
|
||||
mut args_before := []string {}
|
||||
for a in args {
|
||||
if a in what {
|
||||
break
|
||||
|
@ -55,7 +55,7 @@ pub fn options_before(args []string, what []string) []string {
|
|||
// ret: ['aaa.v']
|
||||
pub fn options_after(args []string, what []string) []string {
|
||||
mut found := false
|
||||
mut args_after := []string
|
||||
mut args_after := []string{}
|
||||
for a in args {
|
||||
if a in what {
|
||||
found = true
|
||||
|
|
10
vlib/os/os.v
10
vlib/os/os.v
|
@ -24,7 +24,7 @@ fn C.readdir(voidptr) C.dirent
|
|||
|
||||
|
||||
pub const (
|
||||
args = []string
|
||||
args = []string{}
|
||||
MAX_PATH = 4096
|
||||
)
|
||||
|
||||
|
@ -245,7 +245,7 @@ fn read_ulines(path string) ?[]ustring {
|
|||
return error(err)
|
||||
}
|
||||
// mut ulines := new_array(0, lines.len, sizeof(ustring))
|
||||
mut ulines := []ustring
|
||||
mut ulines := []ustring{}
|
||||
for myline in lines {
|
||||
// ulines[i] = ustr
|
||||
ulines << myline.ustring()
|
||||
|
@ -722,7 +722,7 @@ pub fn get_raw_line() string {
|
|||
|
||||
pub fn get_lines() []string {
|
||||
mut line := ''
|
||||
mut inputstr := []string
|
||||
mut inputstr := []string{}
|
||||
for {
|
||||
line = get_line()
|
||||
if line.len <= 0 {
|
||||
|
@ -1043,7 +1043,7 @@ pub fn is_abs_path(path string) bool {
|
|||
|
||||
// join returns path as string from string parameter(s).
|
||||
pub fn join_path(base string, dirs ...string) string {
|
||||
mut result := []string
|
||||
mut result := []string{}
|
||||
result << base.trim_right('\\/')
|
||||
for d in dirs {
|
||||
result << d
|
||||
|
@ -1059,7 +1059,7 @@ pub fn walk_ext(path, ext string) []string {
|
|||
mut files := os.ls(path) or {
|
||||
return []
|
||||
}
|
||||
mut res := []string
|
||||
mut res := []string{}
|
||||
separator := if path.ends_with(os.path_separator) { '' } else { os.path_separator }
|
||||
for i, file in files {
|
||||
if file.starts_with('.') {
|
||||
|
|
|
@ -19,7 +19,7 @@ const (
|
|||
fn C.symlink(charptr, charptr) int
|
||||
|
||||
fn init_os_args(argc int, argv &byteptr) []string {
|
||||
mut args := []string
|
||||
mut args := []string{}
|
||||
//mut args := []string(make(0, argc, sizeof(string)))
|
||||
//mut args := []string{len:argc}
|
||||
for i in 0 .. argc {
|
||||
|
@ -31,7 +31,7 @@ fn init_os_args(argc int, argv &byteptr) []string {
|
|||
}
|
||||
|
||||
pub fn ls(path string) ?[]string {
|
||||
mut res := []string
|
||||
mut res := []string{}
|
||||
dir := C.opendir(path.str)
|
||||
if isnil(dir) {
|
||||
return error('ls() couldnt open dir "$path"')
|
||||
|
|
|
@ -58,6 +58,8 @@ pub fn (s &Scope) is_known(name string) bool {
|
|||
if _ := s.find(name) {
|
||||
return true
|
||||
}
|
||||
//
|
||||
else{}
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -130,7 +132,7 @@ pub fn (s mut Scope) remove_unused_var(name string) {
|
|||
}
|
||||
|
||||
pub fn (s mut Scope) unused_vars() []UnusedVar {
|
||||
ret := []UnusedVar
|
||||
ret := []UnusedVar{}
|
||||
for _, v in s.unused_vars {
|
||||
ret << v
|
||||
}
|
||||
|
|
|
@ -121,7 +121,7 @@ pub fn (x Expr) str() string {
|
|||
return '${it.expr.str()}.${it.field}'
|
||||
}
|
||||
StringInterLiteral {
|
||||
res := []string
|
||||
res := []string{}
|
||||
res << "'"
|
||||
for i, val in it.vals {
|
||||
res << val
|
||||
|
@ -161,7 +161,7 @@ pub fn (a CallArg) str() string {
|
|||
}
|
||||
|
||||
pub fn args2str(args []CallArg) string {
|
||||
mut res := []string
|
||||
mut res := []string{}
|
||||
for a in args {
|
||||
res << a.str()
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ pub fn new_builder(pref &pref.Preferences) Builder {
|
|||
|
||||
// parse all deps from already parsed files
|
||||
pub fn (mut b Builder) parse_imports() {
|
||||
mut done_imports := []string
|
||||
mut done_imports := []string{}
|
||||
// NB: b.parsed_files is appended in the loop,
|
||||
// so we can not use the shorter `for in` form.
|
||||
for i := 0; i < b.parsed_files.len; i++ {
|
||||
|
@ -97,7 +97,7 @@ pub fn (mut b Builder) resolve_deps() {
|
|||
eprintln(deps_resolved.display())
|
||||
eprintln('------------------------------------------')
|
||||
}
|
||||
mut mods := []string
|
||||
mut mods := []string{}
|
||||
for node in deps_resolved.nodes {
|
||||
mods << node.name
|
||||
}
|
||||
|
@ -106,7 +106,7 @@ pub fn (mut b Builder) resolve_deps() {
|
|||
eprintln(mods.str())
|
||||
eprintln('-------------------------------')
|
||||
}
|
||||
mut reordered_parsed_files := []ast.File
|
||||
mut reordered_parsed_files := []ast.File{}
|
||||
for m in mods {
|
||||
for pf in b.parsed_files {
|
||||
if m == pf.mod.name {
|
||||
|
@ -124,7 +124,7 @@ pub fn (b &Builder) import_graph() &depgraph.DepGraph {
|
|||
builtins << 'builtin'
|
||||
mut graph := depgraph.new_dep_graph()
|
||||
for p in b.parsed_files {
|
||||
mut deps := []string
|
||||
mut deps := []string{}
|
||||
if p.mod.name !in builtins {
|
||||
deps << 'builtin'
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ pub fn (b &Builder) import_graph() &depgraph.DepGraph {
|
|||
}
|
||||
|
||||
pub fn (b Builder) v_files_from_dir(dir string) []string {
|
||||
mut res := []string
|
||||
mut res := []string{}
|
||||
if !os.exists(dir) {
|
||||
if dir == 'compiler' && os.is_dir('vlib') {
|
||||
println('looks like you are trying to build V with an old command')
|
||||
|
@ -246,7 +246,7 @@ pub fn (b Builder) find_module_path(mod, fpath string) ?string {
|
|||
// support @VROOT/v.mod relative paths:
|
||||
vmod_file_location := vmod.mod_file_cacher.get(fpath)
|
||||
mod_path := module_path(mod)
|
||||
mut module_lookup_paths := []string
|
||||
mut module_lookup_paths := []string{}
|
||||
if vmod_file_location.vmod_file.len != 0 && vmod_file_location.vmod_folder !in b.module_search_paths {
|
||||
module_lookup_paths << vmod_file_location.vmod_folder
|
||||
}
|
||||
|
|
|
@ -4,8 +4,8 @@ import v.cflag
|
|||
|
||||
// get flags for current os
|
||||
fn (v &Builder) get_os_cflags() []cflag.CFlag {
|
||||
mut flags := []cflag.CFlag
|
||||
mut ctimedefines := []string
|
||||
mut flags := []cflag.CFlag{}
|
||||
mut ctimedefines := []string{}
|
||||
if v.pref.compile_defines.len > 0 {
|
||||
ctimedefines << v.pref.compile_defines
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ fn (v &Builder) get_os_cflags() []cflag.CFlag {
|
|||
}
|
||||
|
||||
fn (v &Builder) get_rest_of_module_cflags(c &cflag.CFlag) []cflag.CFlag {
|
||||
mut flags := []cflag.CFlag
|
||||
mut flags := []cflag.CFlag{}
|
||||
cflags := v.get_os_cflags()
|
||||
for flag in cflags {
|
||||
if c.mod == flag.mod {
|
||||
|
|
|
@ -33,15 +33,9 @@ pub fn compile(command string, pref &pref.Preferences) {
|
|||
}
|
||||
mut tmark := benchmark.new_benchmark()
|
||||
match pref.backend {
|
||||
.c {
|
||||
b.compile_c()
|
||||
}
|
||||
.js {
|
||||
b.compile_js()
|
||||
}
|
||||
.x64 {
|
||||
b.compile_x64()
|
||||
}
|
||||
.c { b.compile_c() }
|
||||
.js { b.compile_js() }
|
||||
.x64 { b.compile_x64() }
|
||||
}
|
||||
if pref.is_stats {
|
||||
tmark.stop()
|
||||
|
@ -153,7 +147,7 @@ pub fn (v Builder) get_user_files() []string {
|
|||
v.log('get_v_files($dir)')
|
||||
// Need to store user files separately, because they have to be added after
|
||||
// libs, but we dont know which libs need to be added yet
|
||||
mut user_files := []string
|
||||
mut user_files := []string{}
|
||||
// See cmd/tools/preludes/README.md for more info about what preludes are
|
||||
vroot := os.dir(pref.vexe_path())
|
||||
preludes_path := os.join_path(vroot, 'cmd', 'tools', 'preludes')
|
||||
|
|
|
@ -22,13 +22,15 @@ struct MsvcResult {
|
|||
// shell32 for RegOpenKeyExW etc
|
||||
// Mimics a HKEY
|
||||
type RegKey voidptr
|
||||
|
||||
// Taken from the windows SDK
|
||||
const (
|
||||
HKEY_LOCAL_MACHINE = RegKey(0x80000002)// as RegKey
|
||||
KEY_QUERY_VALUE = (0x0001)
|
||||
KEY_WOW64_32KEY = (0x0200)
|
||||
HKEY_LOCAL_MACHINE = RegKey(0x80000002)
|
||||
KEY_QUERY_VALUE = (0x0001)
|
||||
KEY_WOW64_32KEY = (0x0200)
|
||||
KEY_ENUMERATE_SUB_KEYS = (0x0008)
|
||||
)
|
||||
|
||||
// Given a root key look for one of the subkeys in 'versions' and get the path
|
||||
fn find_windows_kit_internal(key RegKey, versions []string) ?string {
|
||||
$if windows {
|
||||
|
@ -41,7 +43,7 @@ fn find_windows_kit_internal(key RegKey, versions []string) ?string {
|
|||
continue
|
||||
}
|
||||
alloc_length := (required_bytes + 2)
|
||||
mut value := &u16(malloc(alloc_length))
|
||||
mut value := &&u16(malloc(alloc_length))
|
||||
if isnil(value) {
|
||||
continue
|
||||
}
|
||||
|
@ -75,7 +77,8 @@ fn find_windows_kit_root(host_arch string) ?WindowsKit {
|
|||
$if windows {
|
||||
root_key := RegKey(0)
|
||||
path := 'SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots'
|
||||
rc := C.RegOpenKeyEx(HKEY_LOCAL_MACHINE, path.to_wide(), 0, KEY_QUERY_VALUE | KEY_WOW64_32KEY | KEY_ENUMERATE_SUB_KEYS, &root_key)
|
||||
rc := C.RegOpenKeyEx(HKEY_LOCAL_MACHINE, path.to_wide(), 0, KEY_QUERY_VALUE | KEY_WOW64_32KEY |
|
||||
KEY_ENUMERATE_SUB_KEYS, &root_key)
|
||||
defer {
|
||||
C.RegCloseKey(root_key)
|
||||
}
|
||||
|
@ -83,12 +86,12 @@ fn find_windows_kit_root(host_arch string) ?WindowsKit {
|
|||
return error('Unable to open root key')
|
||||
}
|
||||
// Try and find win10 kit
|
||||
kit_root := find_windows_kit_internal(root_key, ['KitsRoot10', 'KitsRoot81'])or{
|
||||
kit_root := find_windows_kit_internal(root_key, ['KitsRoot10', 'KitsRoot81']) or {
|
||||
return error('Unable to find a windows kit')
|
||||
}
|
||||
kit_lib := kit_root + 'Lib'
|
||||
// println(kit_lib)
|
||||
files := os.ls(kit_lib)or{
|
||||
files := os.ls(kit_lib) or {
|
||||
panic(err)
|
||||
}
|
||||
mut highest_path := ''
|
||||
|
@ -121,7 +124,7 @@ struct VsInstallation {
|
|||
exe_path string
|
||||
}
|
||||
|
||||
fn find_vs(vswhere_dir string, host_arch string) ?VsInstallation {
|
||||
fn find_vs(vswhere_dir, host_arch string) ?VsInstallation {
|
||||
$if !windows {
|
||||
return error('Host OS does not support finding a Vs installation')
|
||||
}
|
||||
|
@ -129,17 +132,17 @@ fn find_vs(vswhere_dir string, host_arch string) ?VsInstallation {
|
|||
// VSWhere is guaranteed to be installed at this location now
|
||||
// If its not there then end user needs to update their visual studio
|
||||
// installation!
|
||||
res := os.exec('"$vswhere_dir\\Microsoft Visual Studio\\Installer\\vswhere.exe" -latest -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath')or{
|
||||
res := os.exec('"$vswhere_dir\\Microsoft Visual Studio\\Installer\\vswhere.exe" -latest -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath') or {
|
||||
return error(err)
|
||||
}
|
||||
// println('res: "$res"')
|
||||
version := os.read_file('$res.output\\VC\\Auxiliary\\Build\\Microsoft.VCToolsVersion.default.txt')or{
|
||||
version := os.read_file('$res.output\\VC\\Auxiliary\\Build\\Microsoft.VCToolsVersion.default.txt') or {
|
||||
println('Unable to find msvc version')
|
||||
return error('Unable to find vs installation')
|
||||
}
|
||||
version2 := version // TODO remove. cgen option bug if expr
|
||||
// println('version: $version')
|
||||
v := if version.ends_with('\n') { version2[..version.len - 2] } else {version2 }
|
||||
v := if version.ends_with('\n') { version2[..version.len - 2] } else { version2 }
|
||||
lib_path := '$res.output\\VC\\Tools\\MSVC\\$v\\lib\\$host_arch'
|
||||
include_path := '$res.output\\VC\\Tools\\MSVC\\$v\\include'
|
||||
if os.exists('$lib_path\\vcruntime.lib') {
|
||||
|
@ -160,10 +163,10 @@ fn find_msvc() ?MsvcResult {
|
|||
processor_architecture := os.getenv('PROCESSOR_ARCHITECTURE')
|
||||
vswhere_dir := if processor_architecture == 'x86' { '%ProgramFiles%' } else { '%ProgramFiles(x86)%' }
|
||||
host_arch := if processor_architecture == 'x86' { 'X86' } else { 'X64' }
|
||||
wk := find_windows_kit_root(host_arch)or{
|
||||
wk := find_windows_kit_root(host_arch) or {
|
||||
return error('Unable to find windows sdk')
|
||||
}
|
||||
vs := find_vs(vswhere_dir, host_arch)or{
|
||||
vs := find_vs(vswhere_dir, host_arch) or {
|
||||
return error('Unable to find visual studio')
|
||||
}
|
||||
return MsvcResult{
|
||||
|
@ -183,8 +186,8 @@ fn find_msvc() ?MsvcResult {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn (v mut Builder) cc_msvc() {
|
||||
r := find_msvc()or{
|
||||
pub fn (mut v Builder) cc_msvc() {
|
||||
r := find_msvc() or {
|
||||
// TODO: code reuse
|
||||
if !v.pref.is_keep_c && v.out_name_c != 'v.c' && v.out_name_c != 'v_macos.c' {
|
||||
os.rm(v.out_name_c)
|
||||
|
@ -204,8 +207,7 @@ pub fn (v mut Builder) cc_msvc() {
|
|||
a << '/MD'
|
||||
a << '/Zi'
|
||||
a << '/DNDEBUG'
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
a << '/Zi'
|
||||
a << '/MDd'
|
||||
}
|
||||
|
@ -215,8 +217,7 @@ pub fn (v mut Builder) cc_msvc() {
|
|||
}
|
||||
// Build dll
|
||||
a << '/LD'
|
||||
}
|
||||
else if !v.pref.out_name.ends_with('.exe') {
|
||||
} else if !v.pref.out_name.ends_with('.exe') {
|
||||
v.pref.out_name += '.exe'
|
||||
}
|
||||
v.pref.out_name = os.real_path(v.pref.out_name)
|
||||
|
@ -224,8 +225,7 @@ pub fn (v mut Builder) cc_msvc() {
|
|||
if v.pref.build_mode == .build_module {
|
||||
// Compile only
|
||||
a << '/c'
|
||||
}
|
||||
else if v.pref.build_mode == .default_mode {
|
||||
} else if v.pref.build_mode == .default_mode {
|
||||
/*
|
||||
b := os.real_path( '${pref.default_module_path}/vlib/builtin.obj' )
|
||||
alibs << '"$b"'
|
||||
|
@ -288,7 +288,7 @@ pub fn (v mut Builder) cc_msvc() {
|
|||
println('==========\n')
|
||||
}
|
||||
// println('$cmd')
|
||||
res := os.exec(cmd)or{
|
||||
res := os.exec(cmd) or {
|
||||
println(err)
|
||||
verror('msvc error')
|
||||
return
|
||||
|
@ -306,7 +306,7 @@ pub fn (v mut Builder) cc_msvc() {
|
|||
}
|
||||
|
||||
fn build_thirdparty_obj_file_with_msvc(path string, moduleflags []cflag.CFlag) {
|
||||
msvc := find_msvc()or{
|
||||
msvc := find_msvc() or {
|
||||
println('Could not find visual studio')
|
||||
return
|
||||
}
|
||||
|
@ -319,7 +319,7 @@ fn build_thirdparty_obj_file_with_msvc(path string, moduleflags []cflag.CFlag) {
|
|||
}
|
||||
println('$obj_path not found, building it (with msvc)...')
|
||||
parent := os.dir(obj_path)
|
||||
files := os.ls(parent)or{
|
||||
files := os.ls(parent) or {
|
||||
panic(err)
|
||||
}
|
||||
mut cfiles := ''
|
||||
|
@ -335,7 +335,7 @@ fn build_thirdparty_obj_file_with_msvc(path string, moduleflags []cflag.CFlag) {
|
|||
cmd := '"$msvc.full_cl_exe_path" /volatile:ms /Zi /DNDEBUG $include_string /c $btarget $cfiles $atarget /Fo"$obj_path"'
|
||||
// NB: the quotes above ARE balanced.
|
||||
println('thirdparty cmd line: $cmd')
|
||||
res := os.exec(cmd)or{
|
||||
res := os.exec(cmd) or {
|
||||
println('msvc: failed thirdparty object build cmd: $cmd')
|
||||
verror(err)
|
||||
return
|
||||
|
@ -357,10 +357,10 @@ mut:
|
|||
}
|
||||
|
||||
fn (cflags []cflag.CFlag) msvc_string_flags() MsvcStringFlags {
|
||||
mut real_libs := []string
|
||||
mut inc_paths := []string
|
||||
mut lib_paths := []string
|
||||
mut other_flags := []string
|
||||
mut real_libs := []string{}
|
||||
mut inc_paths := []string{}
|
||||
mut lib_paths := []string{}
|
||||
mut other_flags := []string{}
|
||||
for flag in cflags {
|
||||
// println('fl: $flag.name | flag arg: $flag.value')
|
||||
// We need to see if the flag contains -l
|
||||
|
@ -374,11 +374,9 @@ fn (cflags []cflag.CFlag) msvc_string_flags() MsvcStringFlags {
|
|||
// TODO: we should look for .defs aswell
|
||||
lib_lib := flag.value + '.lib'
|
||||
real_libs << lib_lib
|
||||
}
|
||||
else if flag.name == '-I' {
|
||||
} else if flag.name == '-I' {
|
||||
inc_paths << flag.format()
|
||||
}
|
||||
else if flag.name == '-L' {
|
||||
} else if flag.name == '-L' {
|
||||
lib_paths << flag.value
|
||||
lib_paths << flag.value + os.path_separator + 'msvc'
|
||||
// The above allows putting msvc specific .lib files in a subfolder msvc/ ,
|
||||
|
@ -386,24 +384,21 @@ fn (cflags []cflag.CFlag) msvc_string_flags() MsvcStringFlags {
|
|||
// NB: gcc is smart enough to not need .lib files at all in most cases, the .dll is enough.
|
||||
// When both a msvc .lib file and .dll file are present in the same folder,
|
||||
// as for example for glfw3, compilation with gcc would fail.
|
||||
}
|
||||
else if flag.value.ends_with('.o') {
|
||||
} else if flag.value.ends_with('.o') {
|
||||
// msvc expects .obj not .o
|
||||
other_flags << '"${flag.value}bj"'
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
other_flags << flag.value
|
||||
}
|
||||
}
|
||||
mut lpaths := []string
|
||||
mut lpaths := []string{}
|
||||
for l in lib_paths {
|
||||
lpaths << '/LIBPATH:"' + os.real_path(l) + '"'
|
||||
}
|
||||
return MsvcStringFlags{
|
||||
real_libs:real_libs
|
||||
inc_paths:inc_paths
|
||||
lib_paths:lpaths
|
||||
other_flags:other_flags
|
||||
real_libs: real_libs
|
||||
inc_paths: inc_paths
|
||||
lib_paths: lpaths
|
||||
other_flags: other_flags
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ fn (cflags []CFlag) c_options_after_target_msvc() string {
|
|||
|
||||
fn (cflags []CFlag) c_options_before_target() string {
|
||||
// -I flags, optimization flags and so on
|
||||
mut args := []string
|
||||
mut args := []string{}
|
||||
for flag in cflags {
|
||||
if flag.name != '-l' {
|
||||
args << flag.format()
|
||||
|
@ -52,7 +52,7 @@ fn (cflags []CFlag) c_options_before_target() string {
|
|||
|
||||
fn (cflags []CFlag) c_options_after_target() string {
|
||||
// -l flags (libs)
|
||||
mut args := []string
|
||||
mut args := []string{}
|
||||
for flag in cflags {
|
||||
if flag.name == '-l' {
|
||||
args << flag.format()
|
||||
|
@ -62,7 +62,7 @@ fn (cflags []CFlag) c_options_after_target() string {
|
|||
}
|
||||
|
||||
fn (cflags []CFlag) c_options_without_object_files() string {
|
||||
mut args := []string
|
||||
mut args := []string{}
|
||||
for flag in cflags {
|
||||
if flag.value.ends_with('.o') || flag.value.ends_with('.obj') {
|
||||
continue
|
||||
|
@ -73,7 +73,7 @@ fn (cflags []CFlag) c_options_without_object_files() string {
|
|||
}
|
||||
|
||||
fn (cflags []CFlag) c_options_only_object_files() string {
|
||||
mut args := []string
|
||||
mut args := []string{}
|
||||
for flag in cflags {
|
||||
if flag.value.ends_with('.o') || flag.value.ends_with('.obj') {
|
||||
args << flag.format()
|
||||
|
|
|
@ -246,7 +246,7 @@ pub fn (mut c Checker) struct_init(struct_init mut ast.StructInit) table.Type {
|
|||
if struct_init.is_short && struct_init.fields.len > info.fields.len {
|
||||
c.error('too many fields', struct_init.pos)
|
||||
}
|
||||
mut inited_fields := []string
|
||||
mut inited_fields := []string{}
|
||||
for i, field in struct_init.fields {
|
||||
mut info_field := table.Field{}
|
||||
mut field_name := ''
|
||||
|
@ -318,11 +318,11 @@ pub fn (mut c Checker) infix_expr(infix_expr mut ast.InfixExpr) table.Type {
|
|||
right := c.table.get_type_symbol(right_type)
|
||||
left := c.table.get_type_symbol(left_type)
|
||||
// Single side check
|
||||
// Place these branches according to ops' usage frequency to accelerate.
|
||||
// TODO: First branch includes ops where single side check is not needed, or needed but hasn't been implemented.
|
||||
// TODO: Some of the checks are not single side. Should find a better way to organize them.
|
||||
match infix_expr.op {
|
||||
// Place these branches according to ops' usage frequency to accelerate.
|
||||
// TODO: First branch includes ops where single side check is not needed, or needed but hasn't been implemented.
|
||||
.eq, .ne, .gt, .lt, .ge, .le, .and, .logical_or, .dot, .key_as, .right_shift { }
|
||||
// TODO: Some of the checks are not single side. Should find a better way to organize them.
|
||||
.eq, .ne, .gt, .lt, .ge, .le, .and, .logical_or, .dot, .key_as, .right_shift {}
|
||||
.key_in, .not_in {
|
||||
match right.kind {
|
||||
.array {
|
||||
|
@ -409,13 +409,14 @@ pub fn (mut c Checker) infix_expr(infix_expr mut ast.InfixExpr) table.Type {
|
|||
c.error('mismatched types `$left.name` and `$right.name`', infix_expr.right.position())
|
||||
}
|
||||
}
|
||||
else { }
|
||||
else {}
|
||||
}
|
||||
// TODO: Absorb this block into the above single side check block to accelerate.
|
||||
if left_type == table.bool_type && infix_expr.op !in [.eq, .ne, .logical_or, .and] {
|
||||
c.error('bool types only have the following operators defined: `==`, `!=`, `||`, and `&&`',
|
||||
infix_expr.pos)
|
||||
} else if left_type == table.string_type && infix_expr.op !in [.plus, .eq, .ne, .lt, .gt, .le, .ge] {
|
||||
} else if left_type == table.string_type && infix_expr.op !in [.plus, .eq, .ne, .lt, .gt,
|
||||
.le, .ge] {
|
||||
// TODO broken !in
|
||||
c.error('string types only have the following operators defined: `==`, `!=`, `<`, `>`, `<=`, `>=`, and `&&`',
|
||||
infix_expr.pos)
|
||||
|
@ -429,7 +430,11 @@ pub fn (mut c Checker) infix_expr(infix_expr mut ast.InfixExpr) table.Type {
|
|||
c.error('infix expr: cannot use `$right.name` (right expression) as `$left.name`',
|
||||
infix_expr.pos)
|
||||
}
|
||||
return if infix_expr.op.is_relational() { table.bool_type } else { left_type }
|
||||
return if infix_expr.op.is_relational() {
|
||||
table.bool_type
|
||||
} else {
|
||||
left_type
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut c Checker) assign_expr(assign_expr mut ast.AssignExpr) {
|
||||
|
@ -860,7 +865,7 @@ pub fn (mut c Checker) return_stmt(return_stmt mut ast.Return) {
|
|||
mr_info := expected_type_sym.info as table.MultiReturn
|
||||
expected_types = mr_info.types
|
||||
}
|
||||
mut got_types := []table.Type
|
||||
mut got_types := []table.Type{}
|
||||
for expr in return_stmt.exprs {
|
||||
typ := c.expr(expr)
|
||||
got_types << typ
|
||||
|
@ -1032,11 +1037,11 @@ pub fn (mut c Checker) array_init(array_init mut ast.ArrayInit) table.Type {
|
|||
}
|
||||
// [1,2,3]
|
||||
if array_init.exprs.len > 0 && array_init.elem_type == table.void_type {
|
||||
expecting_interface_array := c.expected_type != 0 &&
|
||||
c.table.get_type_symbol( c.table.value_type(c.expected_type) ).kind == .interface_
|
||||
//if expecting_interface_array {
|
||||
//println('ex $c.expected_type')
|
||||
//}
|
||||
expecting_interface_array := c.expected_type != 0 && c.table.get_type_symbol(c.table.value_type(c.expected_type)).kind ==
|
||||
.interface_
|
||||
// if expecting_interface_array {
|
||||
// println('ex $c.expected_type')
|
||||
// }
|
||||
for i, expr in array_init.exprs {
|
||||
typ := c.expr(expr)
|
||||
// The first element's type
|
||||
|
@ -1144,14 +1149,14 @@ fn (mut c Checker) stmt(node ast.Stmt) {
|
|||
}
|
||||
}
|
||||
ast.ConstDecl {
|
||||
mut field_names := []string
|
||||
mut field_order := []int
|
||||
mut field_names := []string{}
|
||||
mut field_order := []int{}
|
||||
for i, field in it.fields {
|
||||
field_names << field.name
|
||||
field_order << i
|
||||
}
|
||||
mut needs_order := false
|
||||
mut done_fields := []int
|
||||
mut done_fields := []int{}
|
||||
for i, field in it.fields {
|
||||
c.const_decl = field.name
|
||||
c.const_deps << field.name
|
||||
|
@ -1172,7 +1177,7 @@ fn (mut c Checker) stmt(node ast.Stmt) {
|
|||
c.const_deps = []
|
||||
}
|
||||
if needs_order {
|
||||
mut ordered_fields := []ast.ConstField
|
||||
mut ordered_fields := []ast.ConstField{}
|
||||
for order in field_order {
|
||||
ordered_fields << it.fields[order]
|
||||
}
|
||||
|
@ -1322,13 +1327,12 @@ pub fn (mut c Checker) expr(node ast.Expr) table.Type {
|
|||
c.error('cannot cast `$expr_type_sym.name` to `$type_sym.name`', it.pos)
|
||||
// c.error('only $info.variants can be casted to `$typ`', it.pos)
|
||||
}
|
||||
}
|
||||
//
|
||||
else {
|
||||
} else {
|
||||
//
|
||||
c.error('cannot cast non sum type `$type_sym.name` using `as`', it.pos)
|
||||
}
|
||||
return it.typ.to_ptr()
|
||||
//return it.typ
|
||||
// return it.typ
|
||||
}
|
||||
ast.AssignExpr {
|
||||
c.assign_expr(mut it)
|
||||
|
@ -1642,28 +1646,22 @@ fn (mut c Checker) match_exprs(node mut ast.MatchExpr, type_sym table.TypeSymbol
|
|||
// or, when the match is on a sum type or an enum
|
||||
// by listing all variants or values
|
||||
mut is_exhaustive := true
|
||||
mut unhandled := []string
|
||||
mut unhandled := []string{}
|
||||
match type_sym.info {
|
||||
table.SumType {
|
||||
for v in it.variants {
|
||||
table.SumType { for v in it.variants {
|
||||
v_str := c.table.type_to_str(v)
|
||||
if v_str !in branch_exprs {
|
||||
is_exhaustive = false
|
||||
unhandled << '`$v_str`'
|
||||
}
|
||||
}
|
||||
}
|
||||
table.Enum {
|
||||
for v in it.vals {
|
||||
} }
|
||||
table.Enum { for v in it.vals {
|
||||
if v !in branch_exprs {
|
||||
is_exhaustive = false
|
||||
unhandled << '`.$v`'
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
is_exhaustive = false
|
||||
}
|
||||
} }
|
||||
else { is_exhaustive = false }
|
||||
}
|
||||
mut else_branch := node.branches[node.branches.len - 1]
|
||||
mut has_else := else_branch.is_else
|
||||
|
|
|
@ -23,19 +23,21 @@ mut:
|
|||
data map[string][]string
|
||||
}
|
||||
|
||||
pub fn (o mut OrderedDepMap) set(name string, deps []string) {
|
||||
pub fn (mut o OrderedDepMap) set(name string, deps []string) {
|
||||
if name !in o.data {
|
||||
o.keys << name
|
||||
}
|
||||
o.data[name] = deps
|
||||
}
|
||||
|
||||
pub fn (o mut OrderedDepMap) add(name string, deps []string) {
|
||||
pub fn (mut o OrderedDepMap) add(name string, deps []string) {
|
||||
mut d := o.data[name]
|
||||
for dep in deps {
|
||||
if dep !in d {
|
||||
d << dep
|
||||
}
|
||||
//
|
||||
else{}
|
||||
}
|
||||
o.set(name, d)
|
||||
}
|
||||
|
@ -44,7 +46,7 @@ pub fn (o &OrderedDepMap) get(name string) []string {
|
|||
return o.data[name]
|
||||
}
|
||||
|
||||
pub fn (o mut OrderedDepMap) delete(name string) {
|
||||
pub fn (mut o OrderedDepMap) delete(name string) {
|
||||
if name !in o.data {
|
||||
panic('delete: no such key: $name')
|
||||
}
|
||||
|
@ -57,8 +59,8 @@ pub fn (o mut OrderedDepMap) delete(name string) {
|
|||
o.data.delete(name)
|
||||
}
|
||||
|
||||
pub fn (o mut OrderedDepMap) apply_diff(name string, deps []string) {
|
||||
mut diff := []string
|
||||
pub fn (mut o OrderedDepMap) apply_diff(name string, deps []string) {
|
||||
mut diff := []string{}
|
||||
for dep in o.data[name] {
|
||||
if dep !in deps {
|
||||
diff << dep
|
||||
|
@ -77,7 +79,7 @@ pub fn new_dep_graph() &DepGraph {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn (graph mut DepGraph) add(mod string, deps []string) {
|
||||
pub fn (mut graph DepGraph) add(mod string, deps []string) {
|
||||
graph.nodes << DepGraphNode{
|
||||
name: mod
|
||||
deps: deps.clone()
|
||||
|
@ -93,7 +95,7 @@ pub fn (graph &DepGraph) resolve() &DepGraph {
|
|||
}
|
||||
mut resolved := new_dep_graph()
|
||||
for node_deps.size() != 0 {
|
||||
mut ready_set := []string
|
||||
mut ready_set := []string{}
|
||||
for name in node_deps.keys {
|
||||
deps := node_deps.data[name]
|
||||
if deps.len == 0 {
|
||||
|
|
|
@ -88,7 +88,7 @@ fn (mut d Doc) write_fn_signatures(fn_signatures []string) {
|
|||
}
|
||||
|
||||
fn (d Doc) get_fn_signatures(filter_fn FilterFn) []string {
|
||||
mut fn_signatures := []string
|
||||
mut fn_signatures := []string{}
|
||||
for stmt in d.stmts {
|
||||
match stmt {
|
||||
ast.FnDecl {
|
||||
|
|
|
@ -418,7 +418,7 @@ fn (mut f Fmt) type_decl(node ast.TypeDecl) {
|
|||
f.write('pub ')
|
||||
}
|
||||
f.write('type $it.name = ')
|
||||
mut sum_type_names := []string
|
||||
mut sum_type_names := []string{}
|
||||
for t in it.sub_types {
|
||||
sum_type_names << f.type_to_str(t)
|
||||
}
|
||||
|
@ -989,6 +989,7 @@ fn (mut f Fmt) array_init(it ast.ArrayInit) {
|
|||
if it.exprs.len == 0 && it.typ != 0 && it.typ != table.void_type {
|
||||
// `x := []string`
|
||||
f.write(f.type_to_str(it.typ))
|
||||
f.write('{}')
|
||||
return
|
||||
}
|
||||
// `[1,2,3]`
|
||||
|
|
|
@ -768,7 +768,7 @@ fn (mut g Gen) gen_assign_stmt(assign_stmt ast.AssignStmt) {
|
|||
}
|
||||
if assign_stmt.left.len > assign_stmt.right.len {
|
||||
// multi return
|
||||
mut or_stmts := []ast.Stmt
|
||||
mut or_stmts := []ast.Stmt{}
|
||||
mut return_type := table.void_type
|
||||
if assign_stmt.right[0] is ast.CallExpr {
|
||||
it := assign_stmt.right[0] as ast.CallExpr
|
||||
|
@ -810,7 +810,7 @@ fn (mut g Gen) gen_assign_stmt(assign_stmt ast.AssignStmt) {
|
|||
ident_var_info := ident.var_info()
|
||||
styp := g.typ(ident_var_info.typ)
|
||||
mut is_call := false
|
||||
mut or_stmts := []ast.Stmt
|
||||
mut or_stmts := []ast.Stmt{}
|
||||
mut return_type := table.void_type
|
||||
match val {
|
||||
ast.CallExpr {
|
||||
|
@ -1244,7 +1244,7 @@ fn (mut g Gen) enum_expr(node ast.Expr) {
|
|||
fn (mut g Gen) assign_expr(node ast.AssignExpr) {
|
||||
// g.write('/*assign_expr*/')
|
||||
mut is_call := false
|
||||
mut or_stmts := []ast.Stmt
|
||||
mut or_stmts := []ast.Stmt{}
|
||||
mut return_type := table.void_type
|
||||
match node.val {
|
||||
ast.CallExpr {
|
||||
|
@ -2011,7 +2011,7 @@ fn (mut g Gen) struct_init(struct_init ast.StructInit) {
|
|||
g.writeln('($styp){')
|
||||
}
|
||||
// mut fields := []string
|
||||
mut inited_fields := []string // TODO this is done in checker, move to ast node
|
||||
mut inited_fields := []string{} // TODO this is done in checker, move to ast node
|
||||
/*
|
||||
if struct_init.fields.len == 0 && struct_init.exprs.len > 0 {
|
||||
// Get fields for {a,b} short syntax. Fields array wasn't set in the parser.
|
||||
|
@ -2182,7 +2182,7 @@ const (
|
|||
)
|
||||
|
||||
fn (mut g Gen) write_builtin_types() {
|
||||
mut builtin_types := []table.TypeSymbol // builtin types
|
||||
mut builtin_types := []table.TypeSymbol{} // builtin types
|
||||
// builtin types need to be on top
|
||||
// everything except builtin will get sorted
|
||||
for builtin_name in builtins {
|
||||
|
@ -2195,7 +2195,7 @@ fn (mut g Gen) write_builtin_types() {
|
|||
// Sort the types, make sure types that are referenced by other types
|
||||
// are added before them.
|
||||
fn (mut g Gen) write_sorted_types() {
|
||||
mut types := []table.TypeSymbol // structs that need to be sorted
|
||||
mut types := []table.TypeSymbol{} // structs that need to be sorted
|
||||
for typ in g.table.types {
|
||||
if typ.name !in builtins {
|
||||
types << typ
|
||||
|
@ -2268,7 +2268,7 @@ int typ;
|
|||
fn (g Gen) sort_structs(typesa []table.TypeSymbol) []table.TypeSymbol {
|
||||
mut dep_graph := depgraph.new_dep_graph()
|
||||
// types name list
|
||||
mut type_names := []string
|
||||
mut type_names := []string{}
|
||||
for typ in typesa {
|
||||
type_names << typ.name
|
||||
}
|
||||
|
@ -2278,7 +2278,7 @@ fn (g Gen) sort_structs(typesa []table.TypeSymbol) []table.TypeSymbol {
|
|||
continue
|
||||
}
|
||||
// create list of deps
|
||||
mut field_deps := []string
|
||||
mut field_deps := []string{}
|
||||
match t.info {
|
||||
table.ArrayFixed {
|
||||
dep := g.table.get_type_symbol(it.elem_type).name
|
||||
|
@ -2314,7 +2314,7 @@ fn (g Gen) sort_structs(typesa []table.TypeSymbol) []table.TypeSymbol {
|
|||
'\nif you feel this is an error, please create a new issue here: https://github.com/vlang/v/issues and tag @joe-conigliaro')
|
||||
}
|
||||
// sort types
|
||||
mut types_sorted := []table.TypeSymbol
|
||||
mut types_sorted := []table.TypeSymbol{}
|
||||
for node in dep_graph_sorted.nodes {
|
||||
types_sorted << g.table.types[g.table.type_idxs[node.name]]
|
||||
}
|
||||
|
@ -2815,7 +2815,7 @@ pub fn (mut g Gen) write_tests_main() {
|
|||
}
|
||||
|
||||
fn (g Gen) get_all_test_function_names() []string {
|
||||
mut tfuncs := []string
|
||||
mut tfuncs := []string{}
|
||||
mut tsuite_begin := ''
|
||||
mut tsuite_end := ''
|
||||
for _, f in g.table.fns {
|
||||
|
@ -2846,7 +2846,7 @@ fn (g Gen) get_all_test_function_names() []string {
|
|||
continue
|
||||
}
|
||||
}
|
||||
mut all_tfuncs := []string
|
||||
mut all_tfuncs := []string{}
|
||||
if tsuite_begin.len > 0 {
|
||||
all_tfuncs << tsuite_begin
|
||||
}
|
||||
|
@ -2854,7 +2854,7 @@ fn (g Gen) get_all_test_function_names() []string {
|
|||
if tsuite_end.len > 0 {
|
||||
all_tfuncs << tsuite_end
|
||||
}
|
||||
mut all_tfuncs_c := []string
|
||||
mut all_tfuncs_c := []string{}
|
||||
for f in all_tfuncs {
|
||||
all_tfuncs_c << f.replace('.', '__')
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ pub fn (mut p Parser) assign_expr(left ast.Expr) ast.AssignExpr {
|
|||
}
|
||||
|
||||
fn (mut p Parser) parse_assign_lhs() []ast.Ident {
|
||||
mut idents := []ast.Ident
|
||||
mut idents := []ast.Ident{}
|
||||
for {
|
||||
is_mut := p.tok.kind == .key_mut || p.tok.kind == .key_var
|
||||
if is_mut {
|
||||
|
@ -101,7 +101,7 @@ fn (mut p Parser) parse_assign_lhs() []ast.Ident {
|
|||
|
||||
// right hand side of `=` or `:=` in `a,b,c := 1,2,3`
|
||||
fn (mut p Parser) parse_assign_rhs() []ast.Expr {
|
||||
mut exprs := []ast.Expr
|
||||
mut exprs := []ast.Expr{}
|
||||
for {
|
||||
expr := p.expr(0)
|
||||
exprs << expr
|
||||
|
|
|
@ -62,7 +62,7 @@ fn (mut p Parser) comp_if() ast.CompIf {
|
|||
p.next()
|
||||
}
|
||||
val := p.check_name()
|
||||
mut stmts := []ast.Stmt
|
||||
mut stmts := []ast.Stmt{}
|
||||
mut skip_os := false
|
||||
if val in supported_platforms {
|
||||
os := os_from_string(val)
|
||||
|
|
|
@ -14,9 +14,10 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
|||
// p.warn('array_init() exp=$p.expected_type')
|
||||
mut array_type := table.void_type
|
||||
mut elem_type := table.void_type
|
||||
mut exprs := []ast.Expr
|
||||
mut exprs := []ast.Expr{}
|
||||
mut is_fixed := false
|
||||
mut has_val := false
|
||||
mut has_type := false
|
||||
if p.tok.kind == .rsbr {
|
||||
// []typ => `[]` and `typ` must be on the same line
|
||||
line_nr := p.tok.line_nr
|
||||
|
@ -28,6 +29,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
|||
// result of expr so we do those in checker
|
||||
idx := p.table.find_or_register_array(elem_type, 1)
|
||||
array_type = table.new_type(idx)
|
||||
has_type = true
|
||||
}
|
||||
} else {
|
||||
// [1,2,3] or [const]byte
|
||||
|
@ -65,6 +67,9 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
|||
}
|
||||
}
|
||||
}
|
||||
if exprs.len == 0 && p.tok.kind != .lcbr && has_type {
|
||||
p.warn('use `x := []Type{}` instead of `x := []Type`')
|
||||
}
|
||||
if p.tok.kind == .lcbr && exprs.len == 0 {
|
||||
// `[]int{ len: 10, cap: 100}` syntax
|
||||
p.next()
|
||||
|
@ -99,8 +104,8 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
|||
|
||||
fn (mut p Parser) map_init() ast.MapInit {
|
||||
pos := p.tok.position()
|
||||
mut keys := []ast.Expr
|
||||
mut vals := []ast.Expr
|
||||
mut keys := []ast.Expr{}
|
||||
mut vals := []ast.Expr{}
|
||||
for p.tok.kind != .rcbr && p.tok.kind != .eof {
|
||||
// p.check(.str)
|
||||
key := p.expr(0)
|
||||
|
|
|
@ -30,7 +30,7 @@ pub fn (mut p Parser) call_expr(is_c, is_js bool, mod string) ast.CallExpr {
|
|||
pos: first_pos.pos
|
||||
len: last_pos.pos - first_pos.pos + last_pos.len
|
||||
}
|
||||
mut or_stmts := []ast.Stmt
|
||||
mut or_stmts := []ast.Stmt{}
|
||||
mut is_or_block_used := false
|
||||
if p.tok.kind == .key_orelse {
|
||||
p.next()
|
||||
|
@ -63,7 +63,7 @@ pub fn (mut p Parser) call_expr(is_c, is_js bool, mod string) ast.CallExpr {
|
|||
}
|
||||
|
||||
pub fn (mut p Parser) call_args() []ast.CallArg {
|
||||
mut args := []ast.CallArg
|
||||
mut args := []ast.CallArg{}
|
||||
for p.tok.kind != .rpar {
|
||||
mut is_mut := false
|
||||
if p.tok.kind == .key_mut {
|
||||
|
@ -103,7 +103,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
|
|||
mut is_method := false
|
||||
mut rec_type := table.void_type
|
||||
mut rec_mut := false
|
||||
mut args := []table.Arg
|
||||
mut args := []table.Arg{}
|
||||
if p.tok.kind == .lpar {
|
||||
p.next() // (
|
||||
is_method = true
|
||||
|
@ -208,7 +208,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
|
|||
})
|
||||
}
|
||||
// Body
|
||||
mut stmts := []ast.Stmt
|
||||
mut stmts := []ast.Stmt{}
|
||||
no_body := p.tok.kind != .lcbr
|
||||
if p.tok.kind == .lcbr {
|
||||
stmts = p.parse_block_no_scope()
|
||||
|
@ -255,7 +255,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
|
|||
if p.tok.kind.is_start_of_type() {
|
||||
return_type = p.parse_type()
|
||||
}
|
||||
mut stmts := []ast.Stmt
|
||||
mut stmts := []ast.Stmt{}
|
||||
no_body := p.tok.kind != .lcbr
|
||||
if p.tok.kind == .lcbr {
|
||||
stmts = p.parse_block_no_scope()
|
||||
|
@ -289,7 +289,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
|
|||
|
||||
fn (mut p Parser) fn_args() ([]table.Arg, bool) {
|
||||
p.check(.lpar)
|
||||
mut args := []table.Arg
|
||||
mut args := []table.Arg{}
|
||||
mut is_variadic := false
|
||||
// `int, int, string` (no names, just types)
|
||||
types_only := p.tok.kind in [.amp, .and] || (p.peek_tok.kind == .comma && p.table.known_type(p.tok.lit)) ||
|
||||
|
|
|
@ -9,7 +9,7 @@ import v.token
|
|||
|
||||
fn (mut p Parser) if_expr() ast.IfExpr {
|
||||
pos := p.tok.position()
|
||||
mut branches := []ast.IfBranch
|
||||
mut branches := []ast.IfBranch{}
|
||||
mut has_else := false
|
||||
for p.tok.kind in [.key_if, .key_else] {
|
||||
p.inside_if = true
|
||||
|
@ -92,11 +92,11 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
|||
cond := p.expr(0)
|
||||
p.inside_match = false
|
||||
p.check(.lcbr)
|
||||
mut branches := []ast.MatchBranch
|
||||
mut branches := []ast.MatchBranch{}
|
||||
for {
|
||||
branch_first_pos := p.tok.position()
|
||||
comment := p.check_comment() // comment before {}
|
||||
mut exprs := []ast.Expr
|
||||
mut exprs := []ast.Expr{}
|
||||
p.open_scope()
|
||||
// final else
|
||||
mut is_else := false
|
||||
|
|
|
@ -49,7 +49,7 @@ pub fn (mut p Parser) parse_map_type() table.Type {
|
|||
|
||||
pub fn (mut p Parser) parse_multi_return_type() table.Type {
|
||||
p.check(.lpar)
|
||||
mut mr_types := []table.Type
|
||||
mut mr_types := []table.Type{}
|
||||
for {
|
||||
mr_type := p.parse_type()
|
||||
mr_types << mr_type
|
||||
|
|
|
@ -68,7 +68,7 @@ pub fn parse_file(path string, table &table.Table, comments_mode scanner.Comment
|
|||
// text := os.read_file(path) or {
|
||||
// panic(err)
|
||||
// }
|
||||
mut stmts := []ast.Stmt
|
||||
mut stmts := []ast.Stmt{}
|
||||
mut p := Parser{
|
||||
scanner: scanner.new_scanner_file(path, comments_mode)
|
||||
table: table
|
||||
|
@ -166,7 +166,7 @@ pub fn parse_files(paths []string, table &table.Table, pref &pref.Preferences, g
|
|||
return q.parsed_ast_files
|
||||
*/
|
||||
// ///////////////
|
||||
mut files := []ast.File
|
||||
mut files := []ast.File{}
|
||||
for path in paths {
|
||||
// println('parse_files $path')
|
||||
files << parse_file(path, table, .skip_comments, pref, global_scope)
|
||||
|
@ -221,7 +221,7 @@ pub fn (mut p Parser) parse_block() []ast.Stmt {
|
|||
|
||||
pub fn (mut p Parser) parse_block_no_scope() []ast.Stmt {
|
||||
p.check(.lcbr)
|
||||
mut stmts := []ast.Stmt
|
||||
mut stmts := []ast.Stmt{}
|
||||
if p.tok.kind != .rcbr {
|
||||
for {
|
||||
stmts << p.stmt()
|
||||
|
@ -775,7 +775,7 @@ fn (mut p Parser) dot_expr(left ast.Expr) ast.Expr {
|
|||
p.next()
|
||||
args := p.call_args()
|
||||
p.check(.rpar)
|
||||
mut or_stmts := []ast.Stmt
|
||||
mut or_stmts := []ast.Stmt{}
|
||||
mut is_or_block_used := false
|
||||
if p.tok.kind == .key_orelse {
|
||||
p.next()
|
||||
|
@ -859,9 +859,9 @@ fn (mut p Parser) string_expr() ast.Expr {
|
|||
}
|
||||
return node
|
||||
}
|
||||
mut exprs := []ast.Expr
|
||||
mut vals := []string
|
||||
mut efmts := []string
|
||||
mut exprs := []ast.Expr{}
|
||||
mut vals := []string{}
|
||||
mut efmts := []string{}
|
||||
// Handle $ interpolation
|
||||
for p.tok.kind == .string {
|
||||
vals << p.tok.lit
|
||||
|
@ -871,7 +871,7 @@ fn (mut p Parser) string_expr() ast.Expr {
|
|||
}
|
||||
p.check(.str_dollar)
|
||||
exprs << p.expr(0)
|
||||
mut efmt := []string
|
||||
mut efmt := []string{}
|
||||
if p.tok.kind == .colon {
|
||||
efmt << ':'
|
||||
p.next()
|
||||
|
@ -961,7 +961,7 @@ fn (mut p Parser) parse_import() ast.Import {
|
|||
|
||||
fn (mut p Parser) import_stmt() []ast.Import {
|
||||
p.check(.key_import)
|
||||
mut imports := []ast.Import
|
||||
mut imports := []ast.Import{}
|
||||
if p.tok.kind == .lpar {
|
||||
p.warn('`import()` has been deprecated, use `import x` instead. run `v fmt` to handle the transition')
|
||||
p.check(.lpar)
|
||||
|
@ -991,7 +991,7 @@ fn (mut p Parser) const_decl() ast.ConstDecl {
|
|||
p.error('consts must be grouped, e.g.\nconst (\n\ta = 1\n)')
|
||||
}
|
||||
p.next() // (
|
||||
mut fields := []ast.ConstField
|
||||
mut fields := []ast.ConstField{}
|
||||
for p.tok.kind != .rpar {
|
||||
if p.tok.kind == .comment {
|
||||
p.comment()
|
||||
|
@ -1021,7 +1021,7 @@ fn (mut p Parser) return_stmt() ast.Return {
|
|||
first_pos := p.tok.position()
|
||||
p.next()
|
||||
// return expressions
|
||||
mut exprs := []ast.Expr
|
||||
mut exprs := []ast.Expr{}
|
||||
if p.tok.kind == .rcbr {
|
||||
return ast.Return{
|
||||
pos: first_pos
|
||||
|
@ -1101,9 +1101,9 @@ fn (mut p Parser) enum_decl() ast.EnumDecl {
|
|||
}
|
||||
name := p.prepend_mod(enum_name)
|
||||
p.check(.lcbr)
|
||||
mut vals := []string
|
||||
mut vals := []string{}
|
||||
// mut default_exprs := []ast.Expr
|
||||
mut fields := []ast.EnumField
|
||||
mut fields := []ast.EnumField{}
|
||||
for p.tok.kind != .eof && p.tok.kind != .rcbr {
|
||||
pos := p.tok.position()
|
||||
val := p.check_name()
|
||||
|
@ -1156,7 +1156,7 @@ fn (mut p Parser) type_decl() ast.TypeDecl {
|
|||
end_pos := p.tok.position()
|
||||
decl_pos := start_pos.extend(end_pos)
|
||||
name := p.check_name()
|
||||
mut sum_variants := []table.Type
|
||||
mut sum_variants := []table.Type{}
|
||||
if p.tok.kind == .assign {
|
||||
p.next() // TODO require `=`
|
||||
}
|
||||
|
@ -1225,8 +1225,8 @@ fn (mut p Parser) assoc() ast.Assoc {
|
|||
return ast.Assoc{}
|
||||
}
|
||||
// println('assoc var $name typ=$var.typ')
|
||||
mut fields := []string
|
||||
mut vals := []ast.Expr
|
||||
mut fields := []string{}
|
||||
mut vals := []ast.Expr{}
|
||||
p.check(.pipe)
|
||||
for {
|
||||
fields << p.check_name()
|
||||
|
|
|
@ -33,8 +33,8 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
|
|||
end_pos := p.tok.position()
|
||||
mut name := p.check_name()
|
||||
// println('struct decl $name')
|
||||
mut ast_fields := []ast.StructField
|
||||
mut fields := []table.Field
|
||||
mut ast_fields := []ast.StructField{}
|
||||
mut fields := []table.Field{}
|
||||
mut mut_pos := -1
|
||||
mut pub_pos := -1
|
||||
mut pub_mut_pos := -1
|
||||
|
@ -165,7 +165,7 @@ fn (mut p Parser) struct_init(short_syntax bool) ast.StructInit {
|
|||
if !short_syntax {
|
||||
p.check(.lcbr)
|
||||
}
|
||||
mut fields := []ast.StructInitField
|
||||
mut fields := []ast.StructInitField{}
|
||||
mut i := 0
|
||||
is_short_syntax := p.peek_tok.kind != .colon && p.tok.kind != .rcbr // `Vec{a,b,c}
|
||||
// p.warn(is_short_syntax.str())
|
||||
|
@ -240,7 +240,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
|
|||
typ := table.new_type(p.table.register_type_symbol(t))
|
||||
ts := p.table.get_type_symbol(typ) // TODO t vs ts
|
||||
// Parse methods
|
||||
mut methods := []ast.FnDecl
|
||||
mut methods := []ast.FnDecl{}
|
||||
for p.tok.kind != .rcbr && p.tok.kind != .eof {
|
||||
line_nr := p.tok.line_nr
|
||||
name := p.check_name()
|
||||
|
|
|
@ -314,7 +314,7 @@ pub enum Precedence {
|
|||
}
|
||||
|
||||
pub fn build_precedences() []Precedence {
|
||||
mut p := []Precedence
|
||||
mut p := []Precedence{}
|
||||
p = make(100, 100, sizeof(Precedence))
|
||||
p[Kind.assign] = .assign
|
||||
p[Kind.eq] = .eq
|
||||
|
|
|
@ -68,7 +68,7 @@ pub fn formatted_error(kind string /*error or warn*/, emsg string, filepath stri
|
|||
//
|
||||
bline := util.imax(0, pos.line_nr - error_context_before)
|
||||
aline := util.imin(source_lines.len-1, pos.line_nr + error_context_after)
|
||||
mut clines := []string
|
||||
mut clines := []string{}
|
||||
tab_spaces := ' '
|
||||
for iline := bline; iline <= aline; iline++ {
|
||||
sline := source_lines[iline]
|
||||
|
@ -83,7 +83,7 @@ pub fn formatted_error(kind string /*error or warn*/, emsg string, filepath stri
|
|||
// line, so that it prints the ^ character exactly on the *same spot*
|
||||
// where it is needed. That is the reason we can not just
|
||||
// use strings.repeat(` `, col) to form it.
|
||||
mut pointerline := []string
|
||||
mut pointerline := []string{}
|
||||
for i, c in sline {
|
||||
if i < column {
|
||||
mut x := c
|
||||
|
|
|
@ -219,7 +219,7 @@ pub fn join_env_vflags_and_os_args() []string {
|
|||
if vosargs != '' {
|
||||
return non_empty(vosargs.split(' '))
|
||||
}
|
||||
mut args := []string
|
||||
mut args := []string{}
|
||||
vflags := os.getenv('VFLAGS')
|
||||
if vflags != '' {
|
||||
args << os.args[0]
|
||||
|
|
|
@ -91,7 +91,7 @@ fn (mcache mut ModFileCacher) traverse(mfolder string) ([]string, ModFileAndFold
|
|||
}else{
|
||||
mcache.mark_folders_with_vmod( folders_so_far, res )
|
||||
}
|
||||
return []string, res
|
||||
return []string{}, res
|
||||
}
|
||||
files := mcache.get_files( cfolder )
|
||||
if 'v.mod' in files {
|
||||
|
@ -99,7 +99,7 @@ fn (mcache mut ModFileCacher) traverse(mfolder string) ([]string, ModFileAndFold
|
|||
// if its source folder is different
|
||||
res := ModFileAndFolder{ vmod_file: os.join_path( cfolder, 'v.mod'), vmod_folder: cfolder }
|
||||
return folders_so_far, res
|
||||
}
|
||||
}
|
||||
if mcache.check_for_stop( cfolder, files ) {
|
||||
break
|
||||
}
|
||||
|
@ -139,7 +139,7 @@ fn (mcache mut ModFileCacher) get_files(cfolder string) []string {
|
|||
if cfolder in mcache.folder_files {
|
||||
return mcache.folder_files[ cfolder ]
|
||||
}
|
||||
mut files := []string
|
||||
mut files := []string{}
|
||||
if os.exists( cfolder ) && os.is_dir(cfolder) {
|
||||
if listing := os.ls(cfolder) {
|
||||
files = listing
|
||||
|
|
Loading…
Reference in New Issue