Compare commits

..

No commits in common. "0bd8d872d1e48659f31d6a29f051485eb5fa3728" and "3afb88c4e84ff3f9cb2df08ce8a2ee3a7fa0f152" have entirely different histories.

56 changed files with 263 additions and 904 deletions

View File

@ -217,7 +217,7 @@ jobs:
cd gitly
../v .
# ./gitly -ci_run
../v -autofree .
# ../v -autofree .
../v -o x tests/first_run.v
./x
cd ..
@ -248,8 +248,8 @@ jobs:
run: git clone --depth=1 https://github.com/vlang/go2v go2v/
- name: Build go2v
run: ./v go2v/
## - name: Run tests for go2v
## run: VJOBS=1 ./v -stats test go2v/
- name: Run tests for go2v
run: VJOBS=1 ./v -stats test go2v/
## vlang/pdf
- name: Clone & Build vlang/pdf

View File

@ -35,35 +35,35 @@ The main files are:
- Creates a parser object for each file and runs `parse()` on them.
- The correct backend is called (C, JS, native), and a binary is compiled.
2. `vlib/v/scanner` The scanner's job is to parse a list of characters and convert
2. `v/scanner` The scanner's job is to parse a list of characters and convert
them to tokens.
3. `vlib/v/token` This is simply a list of all tokens, their string values, and a
3. `v/token` This is simply a list of all tokens, their string values, and a
couple of helper functions.
4. `vlib/v/parser` The parser. It converts a list of tokens into an AST.
4. `v/parser` The parser. It converts a list of tokens into an AST.
In V, objects can be used before declaration, so unknown types are marked as
unresolved. They are resolved later in the type checker.
5. `vlib/v/table` V creates one table object that is shared by all parsers. It
5. `v/table` V creates one table object that is shared by all parsers. It
contains all types, consts, and functions, as well as several helpers to search
for objects by name, register new objects, modify types' fields, etc.
6. `vlib/v/checker` Type checker and resolver. It processes the AST and makes sure
6. `v/checker` Type checker and resolver. It processes the AST and makes sure
the types are correct. Unresolved types are resolved, type information is added
to the AST.
7. `vlib/v/gen/c` C backend. It simply walks the AST and generates C code that can be
7. `v/gen/c` C backend. It simply walks the AST and generates C code that can be
compiled with Clang, GCC, Visual Studio, and TCC.
8. `vlib/v/gen/js` JavaScript backend. It simply walks the AST and generates JS code that can be
8. `v/gen/js` JavaScript backend. It simply walks the AST and generates JS code that can be
executed on the browser or in NodeJS/Deno.
9. `vlib/v/gen/c/json.v` defines the json code generation. This file will be removed once V
9. `v/gen/c/json.v` defines the json code generation. This file will be removed once V
supports comptime code generation, and it will be possible to do this using the
language's tools.
10. `vlib/v/gen/native` is the directory with all the machine code generation logic. It
10. `v/gen/native` is the directory with all the machine code generation logic. It
defines a set of functions that translate assembly instructions to machine code
and build the binary from scratch byte by byte. It manually builds all headers,
segments, sections, symtable, relocations, etc. Right now it only has basic

View File

@ -4,8 +4,7 @@ fn main() {
exit(0)
}
$if !macos {
println('
println('
Note: `tcc` was not used, so unless you install it yourself, your backend
C compiler will be `cc`, which is usually either `clang`, `gcc` or `msvc`.
@ -13,5 +12,4 @@ These C compilers, are several times slower at compiling C source code,
compared to `tcc`. They do produce more optimised executables, but that
is done at the cost of compilation speed.
')
}
}

View File

@ -2422,7 +2422,8 @@ V supports closures too.
This means that anonymous functions can inherit variables from the scope they were created in.
They must do so explicitly by listing all variables that are inherited.
> Warning: currently works on x64 and arm64 architectures only.
> Warning: currently works on Unix-based, x64 architectures only.
Some work is in progress to make closures work on Windows, then other architectures.
```v oksyntax
my_int := 1
@ -6025,7 +6026,7 @@ a nested loop, and those do not risk violating memory-safety.
## Appendix I: Keywords
V has 42 reserved keywords (3 are literals):
V has 41 reserved keywords (3 are literals):
```v ignore
as
@ -6037,6 +6038,7 @@ const
continue
defer
else
embed
enum
false
fn
@ -6048,7 +6050,6 @@ import
in
interface
is
isreftype
lock
match
module
@ -6096,15 +6097,15 @@ This lists operators for [primitive types](#primitive-types) only.
<< left shift integer << unsigned integer
>> right shift integer >> unsigned integer
>>> unsigned right shift integer >> unsigned integer
>>> unsigned right shift integer >> unsigned integer
Precedence Operator
5 * / % << >> >>> &
4 + - | ^
3 == != < <= > >=
2 &&
1 ||
5 * / % << >> >>> &
4 + - | ^
3 == != < <= > >=
2 &&
1 ||
Assignment Operators

View File

@ -1,90 +0,0 @@
// https://en.wikipedia.org/wiki/Topological_sorting
// A DFS RECURSIVE ALGORITHM ....
// An alternative algorithm for topological sorting is based on depth-first search. The algorithm loops through each node of the graph, in an arbitrary order, initiating a depth-first search that terminates when it hits any node that has already been visited since the beginning
// of the topological sort or the node has no outgoing edges (i.e. a leaf node)
// Discussion: https://www.gatevidyalay.com/topological-sort-topological-sorting/
// $ v run dfs_topological_ordering.v
// Author: CCS
// THE DFS RECURSIVE .... classical searchig for leaves nodes
// the arguments are used in the function to avoid global variables....
fn dfs_recursive(u string, mut visited map[string]bool, graph map[string][]string, mut top_sorting []string) {
print(' Visiting: $u -> ')
visited[u] = true
for v in graph[u] {
if visited[v] == false {
dfs_recursive(v, mut visited, graph, mut top_sorting)
}
}
top_sorting << u
}
// Creating aa map to initialize with of visited nodes .... all with false in the init
// so these nodes are NOT VISITED YET
fn visited_init(a_graph map[string][]string) map[string]bool {
mut array_of_keys := a_graph.keys() // get all keys of this map
mut temp := map[string]bool{} // attention in these initializations with maps
for i in array_of_keys {
temp[i] = false
}
return temp
}
// attention here a map STRING ---> ONE BOOLEAN ... not a string
fn main() {
// A map illustration to use in a graph
// the graph: adjacency matrix
graph_01 := {
'A': ['C', 'B']
'B': ['D']
'C': ['D']
'D': []
}
graph_02 := {
'A': ['B', 'C', 'D']
'B': ['E']
'C': ['F']
'D': ['G']
'E': ['H']
'F': ['H']
'G': ['H']
'H': [] // no cycles
}
// from: https://en.wikipedia.org/wiki/Topological_sorting
graph_03 := {
'5': ['11']
'7': ['11', '8']
'3': ['8', '10']
'11': ['2', '9', '10']
'8': ['9']
'2': []
'9': []
'10': []
}
mut graph := map[string][]string{} // the graph: adjacency matrix
for index, g_value in [graph_01, graph_02, graph_03] {
println('Topological sorting for the graph $index using a DFS recursive')
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// mut n_nodes := graph.len
mut visited := visited_init(graph) // a map with nodes not visited
// mut start := (graph.keys()).first() // arbitrary, any node if you wish
mut top_sorting := []string{}
// advantages of map ... getting all nodes
for i in graph.keys() {
if visited[i] != true {
dfs_recursive(i, mut visited, graph, mut top_sorting)
}
}
print('\n A topological sorting of graph $index : ')
// println(g_value)
println(top_sorting.reverse())
println('')
} // End of for
}

View File

@ -1,146 +0,0 @@
// The idea of this algorithm follow :
// https://www.gatevidyalay.com/topological-sort-topological-sorting/ (GREEDY)
// (no cycles are detected)
// https://en.wikipedia.org/wiki/Topological_sorting ... just the input data
// and the Kahn algorithm
// Author: CCS
// the idea is rude: https://www.gatevidyalay.com/topological-sort-topological-sorting/
fn topog_sort_greedy(graph map[string][]string) []string {
n_nodes := graph.len // numbers of nodes of this graph
mut top_order := []string{} // a vector with sequence of nodes visited
mut count := 0
/*
IDEA ( a greedy algorythm ):
1. choose allways the node with smallest input degree
2. visit it
3. put it in the output vector
4. remove it from graph
5. update the graph (a new graph)
6. find a new vector degree
7. until all nodes has been visited
Back to step 1 (used the variable count)
Maybe it seems the Kahn's algorithm
*/
mut v_degree := in_degree(graph) // return: map [string] int
print('V Degree $v_degree')
mut small_degree := min_degree(v_degree)
mut new_graph := remove_node_from_graph(small_degree, graph)
top_order << small_degree
count++
for (count < n_nodes) {
v_degree = in_degree(new_graph) // return: map [string] int
print('\nV Degree $v_degree')
small_degree = min_degree(v_degree)
new_graph = remove_node_from_graph(small_degree, new_graph)
top_order << small_degree
count++
}
// print("\n New Graph ${new_graph}")
return top_order
}
// Give a node, return a list with all nodes incidents or fathers of this node
fn all_fathers(node string, a_map map[string][]string) []string {
mut array_of_keys := a_map.keys() // get a key of this map
mut all_incident := []string{}
for i in array_of_keys {
// in : function
if node in a_map[i] {
all_incident << i // a queue of this search
}
}
return all_incident
}
// Input: a map with input degree values, return the key with smallest value
fn min_degree(a_map map[string]int) string {
mut array_of_keys := a_map.keys() // get a key of this map
mut key_min := array_of_keys.first()
mut val_min := a_map[key_min]
// print("\n MIN: ${val_min} \t key_min: ${key_min} \n the map inp_degree: ${a_map}")
for i in array_of_keys {
// there is a smaller
if val_min > a_map[i] {
val_min = a_map[i]
key_min = i
}
}
return key_min // the key with smallest value
}
// Given a graph ... return a list of integer with degree of each node
fn in_degree(a_map map[string][]string) map[string]int {
mut array_of_keys := a_map.keys() // get a key of this map
// print(array_of_keys)
mut degree := map[string]int{}
for i in array_of_keys {
degree[i] = all_fathers(i, a_map).len
}
// print("\n Degree ${in_degree}" )
return degree // a vector of the indegree graph
}
// REMOVE A NODE FROM A GRAPH AND RETURN ANOTHER GRAPH
fn remove_node_from_graph(node string, a_map map[string][]string) map[string][]string {
// mut new_graph := map [string] string {}
mut new_graph := a_map.clone() // copy the graph
new_graph.delete(node)
mut all_nodes := new_graph.keys() // get all nodes of this graph
// FOR THE FUTURE with filter
// for i in all_nodes {
// new_graph[i] = new_graph[i].filter(index(it) != node)
// }
// A HELP FROM V discussion GITHUB - thread
for key in all_nodes {
i := new_graph[key].index(node)
if i >= 0 {
new_graph[key].delete(i)
}
}
// print("\n NEW ${new_graph}" )
return new_graph
}
fn main() {
// A map illustration to use in a graph
// adjacency matrix
graph_01 := {
'A': ['C', 'B']
'B': ['D']
'C': ['D']
'D': []
}
graph_02 := {
'A': ['B', 'C', 'D']
'B': ['E']
'C': ['F']
'D': ['G']
'E': ['H']
'F': ['H']
'G': ['H']
'H': []
}
// from: https://en.wikipedia.org/wiki/Topological_sorting
graph_03 := {
'5': ['11']
'7': ['11', '8']
'3': ['8', '10']
'11': ['2', '9', '10']
'8': ['9']
'2': []
'9': []
'10': []
}
println('\nA Topological Sort of G1: ${topog_sort_greedy(graph_01)}')
println('\nA Topological Sort of G2: ${topog_sort_greedy(graph_02)}')
println('\nA Topological Sort of G3: ${topog_sort_greedy(graph_03)}')
// ['2', '9', '10', '11', '5', '8', '7', '3']
}

View File

@ -601,7 +601,7 @@ pub fn (a &array) clone_to_depth(depth int) array {
}
return arr
} else {
if a.data != 0 {
if !isnil(a.data) {
unsafe { vmemcpy(&u8(arr.data), a.data, u64(a.cap) * u64(a.element_size)) }
}
return arr
@ -637,15 +637,16 @@ fn (mut a array) push(val voidptr) {
[unsafe]
pub fn (mut a3 array) push_many(val voidptr, size int) {
a3.ensure_cap(a3.len + size)
if a3.data == val && a3.data != 0 {
if a3.data == val && !isnil(a3.data) {
// handle `arr << arr`
copy := a3.clone()
unsafe {
vmemcpy(&u8(a3.data) + u64(a3.element_size) * u64(a3.len), copy.data, u64(a3.element_size) * u64(size))
// vmemcpy(a.data, copy.data, copy.element_size * copy.len)
vmemcpy(a3.get_unsafe(a3.len), copy.data, u64(a3.element_size) * u64(size))
}
} else {
if a3.data != 0 && val != 0 {
unsafe { vmemcpy(&u8(a3.data) + u64(a3.element_size) * u64(a3.len), val, u64(a3.element_size) * u64(size)) }
if !isnil(a3.data) && !isnil(val) {
unsafe { vmemcpy(a3.get_unsafe(a3.len), val, u64(a3.element_size) * u64(size)) }
}
}
a3.len += size

View File

@ -210,7 +210,7 @@ fn (a &array) clone_to_depth_noscan(depth int) array {
}
return arr
} else {
if a.data != 0 {
if !isnil(a.data) {
unsafe { vmemcpy(&u8(arr.data), a.data, u64(a.cap) * u64(a.element_size)) }
}
return arr
@ -227,7 +227,7 @@ fn (mut a array) push_noscan(val voidptr) {
// `val` is array.data and user facing usage is `a << [1,2,3]`
[unsafe]
fn (mut a3 array) push_many_noscan(val voidptr, size int) {
if a3.data == val && a3.data != 0 {
if a3.data == val && !isnil(a3.data) {
// handle `arr << arr`
copy := a3.clone()
a3.ensure_cap_noscan(a3.len + size)
@ -236,7 +236,7 @@ fn (mut a3 array) push_many_noscan(val voidptr, size int) {
}
} else {
a3.ensure_cap_noscan(a3.len + size)
if a3.data != 0 && val != 0 {
if !isnil(a3.data) && !isnil(val) {
unsafe { vmemcpy(a3.get_unsafe(a3.len), val, u64(a3.element_size) * u64(size)) }
}
}

View File

@ -71,9 +71,6 @@ fn C.fclose(stream &C.FILE) int
fn C.pclose(stream &C.FILE) int
fn C.strrchr(s &char, c int) &char
fn C.strchr(s &char, c int) &char
// process execution, os.process:
[trusted]
fn C.getpid() int

View File

@ -104,22 +104,6 @@ pub fn (o Option) str() string {
return 'Option{ error: "$o.err" }'
}
pub struct _option {
state u8
err IError = none__
}
// str returns the Option type: ok, none, or error
pub fn (o _option) str() string {
if o.state == 0 {
return 'Option{ ok }'
}
if o.state == 1 {
return 'Option{ none }'
}
return 'Option{ error: "$o.err" }'
}
// trace_error prints to stderr a string and a backtrace of the error
fn trace_error(x string) {
eprintln('> ${@FN} | $x')

View File

@ -86,8 +86,8 @@ mut:
// array allocated (with `cap` bytes) on first deletion
// has non-zero element when key deleted
all_deleted &u8
keys &u8
values &u8
keys &u8
}
[inline]
@ -126,8 +126,8 @@ fn (d &DenseArray) has_index(i int) bool {
[inline]
fn (mut d DenseArray) expand() int {
old_cap := d.cap
old_key_size := d.key_bytes * old_cap
old_value_size := d.value_bytes * old_cap
old_key_size := d.key_bytes * old_cap
if d.cap == d.len {
d.cap += d.cap >> 3
unsafe {
@ -628,31 +628,6 @@ pub fn (m &map) keys() array {
return keys
}
// Returns all values in the map.
pub fn (m &map) values() array {
mut values := __new_array(m.len, 0, m.value_bytes)
mut item := unsafe { &u8(values.data) }
if m.key_values.deletes == 0 {
unsafe {
vmemcpy(item, m.key_values.values, m.value_bytes * m.key_values.len)
}
return values
}
for i := 0; i < m.key_values.len; i++ {
if !m.key_values.has_index(i) {
continue
}
unsafe {
pvalue := m.key_values.value(i)
vmemcpy(item, pvalue, m.value_bytes)
item = item + m.value_bytes
}
}
return values
}
// warning: only copies keys, does not clone
[unsafe]
fn (d &DenseArray) clone() DenseArray {

View File

@ -47,16 +47,6 @@ fn test_keys_many() {
assert keys == strings
}
fn test_values_many() {
mut m := map[string]int{}
for i, s in strings {
m[s] = i
}
values := m.values()
assert values.len == strings.len
assert values.len == m.len
}
fn test_deletes_many() {
mut m := map[string]int{}
for i, s in strings {
@ -69,7 +59,6 @@ fn test_deletes_many() {
}
assert m.len == 0
assert m.keys().len == 0
assert m.values().len == 0
}
struct User {
@ -114,13 +103,6 @@ fn test_map() {
assert m['hi'] == 0
assert m.keys().len == 1
assert m.keys()[0] == 'hello'
// Test `.values()`
values := m.values()
assert values.len == 1
assert 80 !in values
assert 101 in values
assert m.values().len == 1
assert m.values()[0] == 101
// //
mut users := map[string]User{}
users['1'] = User{'Peter'}
@ -598,7 +580,6 @@ fn test_int_keys() {
4: 16
5: 25
}
assert m2.values() == [9, 16, 25]
assert m2.len == 3
// clone
@ -655,16 +636,6 @@ fn test_voidptr_keys() {
assert m.len == 2
}
fn test_voidptr_values() {
mut m := map[string]voidptr{}
v := 5
m['var'] = &v
m['map'] = &m
assert m['var'] == &v
assert m['map'] == &m
assert m.values().len == 2
}
fn test_rune_keys() {
mut m := {
`!`: 2

View File

@ -121,23 +121,6 @@ fn opt_ok(data voidptr, mut option Option, size int) {
}
}
// option is the base of V's internal optional return system.
struct _option {
state u8
err IError = none__
// Data is trailing after err
// and is not included in here but in the
// derived _option_xxx types
}
fn opt_ok2(data voidptr, mut option _option, size int) {
unsafe {
*option = _option{}
// use err to get the end of OptionBase and then memcpy into it
vmemcpy(&u8(&option.err) + sizeof(IError), data, size)
}
}
struct _result {
is_error bool
err IError = none__

View File

@ -550,15 +550,6 @@ pub fn (s string) u64() u64 {
return strconv.common_parse_uint(s, 0, 64, false, false) or { 0 }
}
// parse_uint is like `parse_int` but for unsigned numbers
//
// This method directly exposes the `parse_int` function from `strconv`
// as a method on `string`. For more advanced features,
// consider calling `strconv.common_parse_int` directly.
pub fn (s string) parse_uint(_base int, _bit_size int) ?u64 {
return strconv.parse_uint(s, _base, _bit_size)
}
// parse_int interprets a string s in the given base (0, 2 to 36) and
// bit size (0 to 64) and returns the corresponding value i.
//
@ -575,6 +566,15 @@ pub fn (s string) parse_uint(_base int, _bit_size int) ?u64 {
// This method directly exposes the `parse_uint` function from `strconv`
// as a method on `string`. For more advanced features,
// consider calling `strconv.common_parse_uint` directly.
pub fn (s string) parse_uint(_base int, _bit_size int) ?u64 {
return strconv.parse_uint(s, _base, _bit_size)
}
// parse_uint is like `parse_int` but for unsigned numbers
//
// This method directly exposes the `parse_int` function from `strconv`
// as a method on `string`. For more advanced features,
// consider calling `strconv.common_parse_int` directly.
pub fn (s string) parse_int(_base int, _bit_size int) ?i64 {
return strconv.parse_int(s, _base, _bit_size)
}
@ -1521,42 +1521,42 @@ pub fn (c u8) is_space() bool {
}
// is_digit returns `true` if the byte is in range 0-9 and `false` otherwise.
// Example: assert u8(`9`).is_digit() == true
// Example: assert u8(`9`) == true
[inline]
pub fn (c u8) is_digit() bool {
return c >= `0` && c <= `9`
}
// is_hex_digit returns `true` if the byte is either in range 0-9, a-f or A-F and `false` otherwise.
// Example: assert u8(`F`).is_hex_digit() == true
// Example: assert u8(`F`) == true
[inline]
pub fn (c u8) is_hex_digit() bool {
return (c >= `0` && c <= `9`) || (c >= `a` && c <= `f`) || (c >= `A` && c <= `F`)
}
// is_oct_digit returns `true` if the byte is in range 0-7 and `false` otherwise.
// Example: assert u8(`7`).is_oct_digit() == true
// Example: assert u8(`7`) == true
[inline]
pub fn (c u8) is_oct_digit() bool {
return c >= `0` && c <= `7`
}
// is_bin_digit returns `true` if the byte is a binary digit (0 or 1) and `false` otherwise.
// Example: assert u8(`0`).is_bin_digit() == true
// Example: assert u8(`0`) == true
[inline]
pub fn (c u8) is_bin_digit() bool {
return c == `0` || c == `1`
}
// is_letter returns `true` if the byte is in range a-z or A-Z and `false` otherwise.
// Example: assert u8(`V`).is_letter() == true
// Example: assert u8(`V`) == true
[inline]
pub fn (c u8) is_letter() bool {
return (c >= `a` && c <= `z`) || (c >= `A` && c <= `Z`)
}
// is_alnum returns `true` if the byte is in range a-z, A-Z, 0-9 and `false` otherwise.
// Example: assert u8(`V`).is_alnum() == true
// Example: assert u8(`V`) == true
[inline]
pub fn (c u8) is_alnum() bool {
return (c >= `a` && c <= `z`) || (c >= `A` && c <= `Z`) || (c >= `0` && c <= `9`)

View File

@ -26,5 +26,4 @@ println(stack)
- [x] Stack (LIFO)
- [x] Queue (FIFO)
- [x] Min heap (priority queue)
- [ ] Set
- [ ] ...

View File

@ -244,22 +244,16 @@ fn parse_request_line(s string) ?(Method, urllib.URL, Version) {
//
// a possible solution is to use the a list of QueryValue
pub fn parse_form(body string) map[string]string {
words := body.split('&')
mut form := map[string]string{}
if body.match_glob('{*}') {
form['json'] = body
} else {
words := body.split('&')
for word in words {
kv := word.split_nth('=', 2)
if kv.len != 2 {
continue
}
key := urllib.query_unescape(kv[0]) or { continue }
val := urllib.query_unescape(kv[1]) or { continue }
form[key] = val
for word in words {
kv := word.split_nth('=', 2)
if kv.len != 2 {
continue
}
key := urllib.query_unescape(kv[0]) or { continue }
val := urllib.query_unescape(kv[1]) or { continue }
form[key] = val
}
return form
// }

View File

@ -95,30 +95,6 @@ fn test_parse_form() {
'a': 'b'
'c': ' d '
}
assert parse_form('{json}') == {
'json': '{json}'
}
assert parse_form('{
"_id": "76c",
"friends": [
{
"id": 0,
"name": "Mason Luna"
}
],
"greeting": "Hello."
}') == {
'json': '{
"_id": "76c",
"friends": [
{
"id": 0,
"name": "Mason Luna"
}
],
"greeting": "Hello."
}'
}
}
fn test_parse_multipart_form() {

View File

@ -227,7 +227,7 @@ fn (mut c Client) send_data() ? {
fn (mut c Client) send_body(cfg Mail) ? {
is_html := cfg.body_type == .html
date := cfg.date.custom_format('ddd, D MMM YYYY HH:mm ZZ')
date := cfg.date.utc_string().trim_right(' UTC') // TODO
mut sb := strings.new_builder(200)
sb.write_string('From: $cfg.from\r\n')
sb.write_string('To: <$cfg.to>\r\n')
@ -236,9 +236,7 @@ fn (mut c Client) send_body(cfg Mail) ? {
sb.write_string('Date: $date\r\n')
sb.write_string('Subject: $cfg.subject\r\n')
if is_html {
sb.write_string('Content-Type: text/html; charset=UTF-8')
} else {
sb.write_string('Content-Type: text/plain; charset=UTF-8')
sb.write_string('Content-Type: text/html; charset=ISO-8859-1')
}
sb.write_string('\r\n\r\n')
sb.write_string(cfg.body)

View File

@ -215,9 +215,6 @@ fn str_to_primitive(str string, typ int) ?orm.Primitive {
16 {
return orm.Primitive(str.i8() == 1)
}
18 {
return orm.Primitive(str == 't')
}
// i8
5 {
return orm.Primitive(str.i8())

View File

@ -249,14 +249,7 @@ pub fn (mut re RE) find_from(in_txt string, start int) (int, int) {
return -1, -1
}
// find_all find all the non overlapping occurrences of the match pattern and return the start and end index of the match
//
// Usage:
// ```v
// blurb := 'foobar boo steelbar toolbox foot tooooot'
// mut re := regex.regex_opt('f|t[eo]+')?
// res := re.find_all(blurb) // [0, 3, 12, 15, 20, 23, 28, 31, 33, 39]
// ```
// find_all find all the non overlapping occurrences of the match pattern
[direct_array_access]
pub fn (mut re RE) find_all(in_txt string) []int {
// old_flag := re.flag

View File

@ -156,7 +156,7 @@ pub fn stbi_write_tga(path string, w int, h int, comp int, buf &u8) ? {
}
// stbi_write_png write on path a JPG file
// quality select the compression quality of the JPG
// quality select teh compression quality of the JPG
// quality is between 1 and 100. Higher quality looks better but results in a bigger image.
pub fn stbi_write_jpg(path string, w int, h int, comp int, buf &u8, quality int) ? {
if 0 == C.stbi_write_jpg(&char(path.str), w, h, comp, buf, quality) {

View File

@ -12,9 +12,7 @@ pub type Builder = []u8
// new_builder returns a new string builder, with an initial capacity of `initial_size`
pub fn new_builder(initial_size int) Builder {
mut res := Builder([]u8{cap: initial_size})
unsafe {
res.flags = .noslices
}
unsafe { res.flags.set(.noslices) }
return res
}
@ -50,12 +48,14 @@ pub fn (mut b Builder) write_runes(runes []rune) {
}
}
// clear clears the buffer contents
pub fn (mut b Builder) clear() {
b = []u8{cap: b.cap}
// write_b appends a single `data` byte to the accumulated buffer
[deprecated: 'Use write_u8() instead']
[deprecated_after: '2022-02-11']
pub fn (mut b Builder) write_b(data u8) {
b << data
}
// write_u8 appends a single `data` byte to the accumulated buffer
// write_byte appends a single `data` byte to the accumulated buffer
pub fn (mut b Builder) write_u8(data u8) {
b << data
}
@ -87,7 +87,7 @@ pub fn (mut b Builder) drain_builder(mut other Builder, other_new_cap int) {
// Note: it can panic, if there are not enough bytes in the strings builder yet.
[inline]
pub fn (b &Builder) byte_at(n int) u8 {
return unsafe { &u8(b.data)[n] }
return unsafe { (&[]u8(b))[n] }
}
// write appends the string `s` to the buffer
@ -145,15 +145,14 @@ pub fn (mut b Builder) go_back_to(pos int) {
// writeln appends the string `s`, and then a newline character.
[inline]
pub fn (mut b Builder) writeln(s string) {
new_len := b.len + s.len + 1
b.ensure_cap(new_len)
unsafe {
&u8(b.data)[new_len - 1] = u8(`\n`)
if s.len > 0 {
vmemcpy(&u8(b.data) + b.len, s.str, s.len)
}
b.len = new_len
// for c in s {
// b.buf << c
// }
if s.len > 0 {
unsafe { b.push_many(s.str, s.len) }
}
// b.buf << []u8(s) // TODO
b << u8(`\n`)
}
// last_n(5) returns 'world'
@ -191,13 +190,18 @@ pub fn (mut b Builder) str() string {
// ensure_cap ensures that the buffer has enough space for at least `n` bytes by growing the buffer if necessary
pub fn (mut b Builder) ensure_cap(n int) {
// code adapted from vlib/builtin/array.v
if n <= b.cap {
return
}
new_data := unsafe { malloc(u64(n) * u64(b.element_size)) }
if b.data != 0 {
unsafe { vmemcpy(new_data, b.data, b.len) }
unsafe { free(b.data) }
new_data := vcalloc(n * b.element_size)
if b.data != voidptr(0) {
unsafe { vmemcpy(new_data, b.data, b.len * b.element_size) }
// TODO: the old data may be leaked when no GC is used (ref-counting?)
if b.flags.has(.noslices) {
unsafe { free(b.data) }
}
}
unsafe {
b.data = new_data

View File

@ -18,12 +18,13 @@ pub fn new_builder(initial_size int) Builder {
return []u8{cap: initial_size}
}
pub fn (mut b Builder) write_byte(data byte) {
[deprecated: 'Use write_u8() instead']
pub fn (mut b Builder) write_b(data u8) {
b << data
}
pub fn (mut b Builder) clear() {
b = []u8{cap: b.cap}
pub fn (mut b Builder) write_byte(data byte) {
b << data
}
pub fn (mut b Builder) write_u8(data u8) {

View File

@ -39,9 +39,6 @@ fn test_sb() {
assert last_2 == '56'
final_sb := sb.str()
assert final_sb == '1234'
sb.clear()
assert sb.str() == ''
//}
}

View File

@ -40,8 +40,6 @@ fn test_sb() {
final_sb := sb.str()
assert final_sb == '1234'
//}
sb.clear()
assert sb.str() == ''
}
fn test_big_sb() {

View File

@ -193,46 +193,24 @@ fn parse_iso8601_time(s string) ?(int, int, int, int, i64, bool) {
hour_ := 0
minute_ := 0
second_ := 0
mut microsecond_ := 0
mut nanosecond_ := 0
microsecond_ := 0
plus_min_z := `a`
offset_hour := 0
offset_minute := 0
mut count := 0
count = unsafe {
C.sscanf(&char(s.str), c'%2d:%2d:%2d.%9d%c', &hour_, &minute_, &second_, &nanosecond_,
&char(&plus_min_z))
mut count := unsafe {
C.sscanf(&char(s.str), c'%2d:%2d:%2d.%6d%c%2d:%2d', &hour_, &minute_, &second_,
&microsecond_, &char(&plus_min_z), &offset_hour, &offset_minute)
}
if count == 5 && plus_min_z == `Z` {
// normalise the nanoseconds:
mut ndigits := 0
if mut pos := s.index('.') {
pos++
for ; pos < s.len && s[pos].is_digit(); pos++ {
ndigits++
}
}
for ndigits < 9 {
nanosecond_ *= 10
ndigits++
}
microsecond_ = nanosecond_ / 1000
} else {
// Missread microsecond ([Sec Hour Minute].len == 3 < 4)
if count < 4 {
count = unsafe {
C.sscanf(&char(s.str), c'%2d:%2d:%2d.%6d%c%2d:%2d', &hour_, &minute_, &second_,
&microsecond_, &char(&plus_min_z), &offset_hour, &offset_minute)
}
// Missread microsecond ([Sec Hour Minute].len == 3 < 4)
if count < 4 {
count = unsafe {
C.sscanf(&char(s.str), c'%2d:%2d:%2d%c%2d:%2d', &hour_, &minute_, &second_,
&char(&plus_min_z), &offset_hour, &offset_minute)
}
count++ // Increment count because skipped microsecond
}
if count < 4 {
return error_invalid_time(10)
C.sscanf(&char(s.str), c'%2d:%2d:%2d%c%2d:%2d', &hour_, &minute_, &second_,
&char(&plus_min_z), &offset_hour, &offset_minute)
}
count++ // Increment count because skipped microsecond
}
if count < 4 {
return error_invalid_time(10)
}
is_local_time := plus_min_z == `a` && count == 4
is_utc := plus_min_z == `Z` && count == 5

View File

@ -3,7 +3,6 @@ import time
fn test_parse() {
s := '2018-01-27 12:48:34'
t := time.parse(s) or {
eprintln('> failing format: $s | err: $err')
assert false
return
}
@ -24,7 +23,6 @@ fn test_parse_invalid() {
fn test_parse_rfc2822() {
s1 := 'Thu, 12 Dec 2019 06:07:45 GMT'
t1 := time.parse_rfc2822(s1) or {
eprintln('> failing format: $s1 | err: $err')
assert false
return
}
@ -33,7 +31,6 @@ fn test_parse_rfc2822() {
assert t1.unix == 1576130865
s2 := 'Thu 12 Dec 2019 06:07:45 +0800'
t2 := time.parse_rfc2822(s2) or {
eprintln('> failing format: $s2 | err: $err')
assert false
return
}
@ -70,7 +67,6 @@ fn test_parse_iso8601() {
]
for i, format in formats {
t := time.parse_iso8601(format) or {
eprintln('>>> failing format: $format | err: $err')
assert false
continue
}
@ -94,7 +90,6 @@ fn test_parse_iso8601() {
fn test_parse_iso8601_local() {
format := '2020-06-05T15:38:06.015959'
t := time.parse_iso8601(format) or {
eprintln('> failing format: $format | err: $err')
assert false
return
}
@ -132,7 +127,6 @@ fn test_parse_iso8601_invalid() {
fn test_parse_iso8601_date_only() {
format := '2020-06-05'
t := time.parse_iso8601(format) or {
eprintln('> failing format: $format | err: $err')
assert false
return
}
@ -167,20 +161,3 @@ fn test_invalid_dates_should_error_during_parse() {
check_invalid_date('2008-12-01 00:60:00')
check_invalid_date('2008-12-01 00:01:60')
}
fn test_parse_rfc3339() {
pairs := [
['2015-01-06T15:47:32.080254511Z', '2015-01-06 15:47:32.080254'],
['2015-01-06T15:47:32.072697474Z', '2015-01-06 15:47:32.072697'],
]
for pair in pairs {
input, expected := pair[0], pair[1]
res := time.parse_rfc3339(input) or {
eprintln('>>> failing input: $input | err: $err')
assert false
return
}
output := res.format_ss_micro()
assert expected == output
}
}

View File

@ -997,7 +997,7 @@ pub fn (t &Table) thread_cname(return_type Type) string {
}
return_type_sym := t.sym(return_type)
suffix := if return_type.is_ptr() { '_ptr' } else { '' }
prefix := if return_type.has_flag(.optional) { '_option_' } else { '' }
prefix := if return_type.has_flag(.optional) { 'Option_' } else { '' }
return '__v_thread_$prefix$return_type_sym.cname$suffix'
}
@ -1519,9 +1519,6 @@ pub fn (t Table) does_type_implement_interface(typ Type, inter_typ Type) bool {
// Even map[string]map[string]T can be resolved.
// This is used for resolving the generic return type of CallExpr white `unwrap_generic` is used to resolve generic usage in FnDecl.
pub fn (mut t Table) resolve_generic_to_concrete(generic_type Type, generic_names []string, concrete_types []Type) ?Type {
if generic_names.len != concrete_types.len {
return none
}
mut sym := t.sym(generic_type)
if sym.name in generic_names {
index := generic_names.index(sym.name)

View File

@ -418,21 +418,6 @@ fn (v &Builder) all_args(ccoptions CcompilerOptions) []string {
}
all << ccoptions.args
all << ccoptions.o_args
$if windows {
// Adding default options for tcc, gcc and clang as done in msvc.v.
// This is done before pre_args is added so that it can be overwritten if needed.
// -Wl,-stack=16777216 == /F 16777216
// -Werror=implicit-function-declaration == /we4013
// /volatile:ms - there seems to be no equivalent,
// normally msvc should use /volatile:iso
// but it could have an impact on vinix if it is created with msvc.
if !ccoptions.is_cc_msvc {
all << '-Wl,-stack=16777216'
if !v.pref.is_cstrict {
all << '-Werror=implicit-function-declaration'
}
}
}
all << ccoptions.pre_args
all << ccoptions.source_args
all << ccoptions.post_args

View File

@ -22,8 +22,7 @@ pub fn (mut c Checker) check_types(got ast.Type, expected ast.Type) bool {
if expected == ast.voidptr_type {
return true
}
if (expected == ast.bool_type && (got.is_any_kind_of_pointer() || got.is_int()))
|| ((expected.is_any_kind_of_pointer() || expected.is_int()) && got == ast.bool_type) {
if expected == ast.bool_type && (got.is_any_kind_of_pointer() || got.is_int()) {
return true
}
@ -51,7 +50,9 @@ pub fn (mut c Checker) check_types(got ast.Type, expected ast.Type) bool {
}
} else if got_sym.kind == .array_fixed {
// Allow fixed arrays as `&i8` etc
if expected_sym.is_number() || expected.is_any_kind_of_pointer() {
if expected_sym.is_number() {
return true
} else if expected.is_any_kind_of_pointer() {
return true
}
} else if expected_sym.kind == .array_fixed {
@ -64,14 +65,6 @@ pub fn (mut c Checker) check_types(got ast.Type, expected ast.Type) bool {
return true
}
}
} else if got_sym.kind == .array {
if expected_sym.is_number() || expected.is_any_kind_of_pointer() {
return true
}
} else if expected_sym.kind == .array {
if got_sym.is_number() && got.is_any_kind_of_pointer() {
return true
}
}
if expected_sym.kind == .enum_ && got_sym.is_number() {
// Allow enums as numbers

View File

@ -2670,7 +2670,6 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
}
ast.DumpExpr {
node.expr_type = c.expr(node.expr)
c.check_expr_opt_call(node.expr, node.expr_type)
etidx := node.expr_type.idx()
if etidx == ast.void_type_idx {
c.error('dump expression can not be void', node.expr.pos())

View File

@ -496,7 +496,8 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
}
panic('unreachable')
} else if fn_name == 'json.encode' {
}
if fn_name == 'json.encode' {
} else if fn_name == 'json.decode' && node.args.len > 0 {
if node.args.len != 2 {
c.error("json.decode expects 2 arguments, a type and a string (e.g `json.decode(T, '')`)",
@ -505,20 +506,12 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
expr := node.args[0].expr
if expr is ast.TypeNode {
sym := c.table.sym(c.unwrap_generic(expr.typ))
if c.table.known_type(sym.name) && sym.kind != .placeholder {
mut kind := sym.kind
if sym.info is ast.Alias {
kind = c.table.sym(sym.info.parent_type).kind
}
if kind !in [.struct_, .sum_type, .map, .array] {
c.error('json.decode: expected sum type, struct, map or array, found $kind',
expr.pos)
}
} else {
sym := c.table.sym(expr.typ)
if !c.table.known_type(sym.name) {
c.error('json.decode: unknown type `$sym.name`', node.pos)
}
} else {
// if expr !is ast.TypeNode {
typ := expr.type_name()
c.error('json.decode: first argument needs to be a type, got `$typ`', node.pos)
return ast.void_type
@ -882,7 +875,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
mut final_param_sym := param_typ_sym
mut final_param_typ := param.typ
if func.is_variadic && param_typ_sym.info is ast.Array {
final_param_typ = param_typ_sym.info.elem_type
final_param_typ = param_typ_sym.array_info().elem_type
final_param_sym = c.table.sym(final_param_typ)
}
// Note: Casting to voidptr is used as an escape mechanism, so:
@ -966,10 +959,10 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
continue
}
// Allow `[32]i8` as `&i8` etc
if ((arg_typ_sym.kind == .array_fixed || arg_typ_sym.kind == .array)
&& (param_is_number || param.typ.is_any_kind_of_pointer()))
|| ((param_typ_sym.kind == .array_fixed || param_typ_sym.kind == .array)
&& (typ_is_number || arg_typ.is_any_kind_of_pointer())) {
if (arg_typ_sym.kind == .array_fixed && (param_is_number
|| param.typ.is_any_kind_of_pointer()))
|| (param_typ_sym.kind == .array_fixed && (typ_is_number
|| arg_typ.is_any_kind_of_pointer())) {
continue
}
// Allow `int` as `&i8`
@ -1118,7 +1111,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if left_sym.kind == .array && method_name in array_builtin_methods {
return c.array_builtin_method_call(mut node, left_type, c.table.sym(left_type))
} else if (left_sym.kind == .map || final_left_sym.kind == .map)
&& method_name in ['clone', 'keys', 'values', 'move', 'delete'] {
&& method_name in ['clone', 'keys', 'move', 'delete'] {
if left_sym.kind == .map {
return c.map_builtin_method_call(mut node, left_type, left_sym)
} else {
@ -1229,11 +1222,8 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
// x is Bar<T>, x.foo() -> x.foo<T>()
rec_sym := c.table.sym(node.left_type)
rec_is_generic := left_type.has_flag(.generic)
mut rec_concrete_types := []ast.Type{}
if rec_sym.info is ast.Struct {
rec_concrete_types = rec_sym.info.concrete_types.clone()
if rec_is_generic && node.concrete_types.len == 0
&& method.generic_names.len == rec_sym.info.generic_types.len {
if rec_is_generic && node.concrete_types.len == 0 {
node.concrete_types = rec_sym.info.generic_types
} else if !rec_is_generic && rec_sym.info.concrete_types.len > 0
&& node.concrete_types.len > 0
@ -1313,7 +1303,11 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
no_type_promotion = true
}
}
// if method_name == 'clone' {
// println('CLONE nr args=$method.args.len')
// }
// node.args << method.args[0].typ
// node.exp_arg_types << method.args[0].typ
for i, mut arg in node.args {
if i > 0 || exp_arg_typ == ast.Type(0) {
exp_arg_typ = if method.is_variadic && i >= method.params.len - 1 {
@ -1346,13 +1340,8 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
final_arg_sym = c.table.sym(final_arg_typ)
}
if exp_arg_typ.has_flag(.generic) {
method_concrete_types := if method.generic_names.len == rec_concrete_types.len {
rec_concrete_types
} else {
concrete_types
}
if exp_utyp := c.table.resolve_generic_to_concrete(exp_arg_typ, method.generic_names,
method_concrete_types)
concrete_types)
{
exp_arg_typ = exp_utyp
} else {
@ -1361,7 +1350,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if got_arg_typ.has_flag(.generic) {
if got_utyp := c.table.resolve_generic_to_concrete(got_arg_typ, method.generic_names,
method_concrete_types)
concrete_types)
{
got_arg_typ = got_utyp
} else {
@ -1831,16 +1820,12 @@ fn (mut c Checker) map_builtin_method_call(mut node ast.CallExpr, left_type ast.
}
ret_type = ret_type.clear_flag(.shared_f)
}
'keys', 'values' {
'keys' {
if node.args.len != 0 {
c.error('`.${method_name}()` does not have any arguments', node.args[0].pos)
c.error('`.keys()` does not have any arguments', node.args[0].pos)
}
info := left_sym.info as ast.Map
typ := if method_name == 'keys' {
c.table.find_or_register_array(info.key_type)
} else {
c.table.find_or_register_array(info.value_type)
}
typ := c.table.find_or_register_array(info.key_type)
ret_type = ast.Type(typ)
}
'delete' {

View File

@ -24,7 +24,6 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
return
}
exp_is_optional := expected_type.has_flag(.optional)
exp_is_result := expected_type.has_flag(.result)
mut expected_types := [expected_type]
if expected_type_sym.info is ast.MultiReturn {
expected_types = expected_type_sym.info.types
@ -74,12 +73,10 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
}
}
// allow `none` & `error` return types for function that returns optional
option_type_idx := c.table.type_idxs['_option']
result_type_idx := c.table.type_idxs['_result']
option_type_idx := c.table.type_idxs['Option']
got_types_0_idx := got_types[0].idx()
if (exp_is_optional
&& got_types_0_idx in [ast.none_type_idx, ast.error_type_idx, option_type_idx])
|| (exp_is_result && got_types_0_idx in [ast.error_type_idx, result_type_idx]) {
if exp_is_optional
&& got_types_0_idx in [ast.none_type_idx, ast.error_type_idx, option_type_idx] {
if got_types_0_idx == ast.none_type_idx && expected_type == ast.ovoid_type {
c.error('returning `none` in functions, that have a `?` result type is not allowed anymore, either `return error(message)` or just `return` instead',
node.pos)

View File

@ -0,0 +1,7 @@
vlib/v/checker/tests/generic_parameter_on_method.vv:15:15: error: cannot use `&Type<int>` as `Type<>` in argument 1 to `ContainerType<int>.contains`
13 | fn main() {
14 | con := ContainerType<int>{typ: &Type<int>{0}}
15 | con.contains(con.typ)
| ~~~~~~~
16 | println(con)
17 | }

View File

@ -0,0 +1,17 @@
struct Type<T> {
value T
}
struct ContainerType<T> {
typ &Type<T>
}
fn (instance &ContainerType<T>) contains(typ Type<T>) {
println(typ)
}
fn main() {
con := ContainerType<int>{typ: &Type<int>{0}}
con.contains(con.typ)
println(con)
}

View File

@ -26,16 +26,16 @@ vlib/v/checker/tests/generics_fn_arguments_count_err.vv:22:22: error: expected 2
| ~~~~~~~~~~~~~~~~~~
23 | println(ret4)
24 | }
vlib/v/checker/tests/generics_fn_arguments_count_err.vv:2:11: error: no known default format for type `A`
vlib/v/checker/tests/generics_fn_arguments_count_err.vv:2:15: error: no known default format for type `B`
1 | fn get_name<A, B>(a A, b B) string {
2 | return '$a, $b'
| ^
| ^
3 | }
4 |
vlib/v/checker/tests/generics_fn_arguments_count_err.vv:8:11: error: no known default format for type `A`
vlib/v/checker/tests/generics_fn_arguments_count_err.vv:8:15: error: no known default format for type `B`
6 |
7 | fn (f Foo) get_name<A, B>(a A, b B) string {
8 | return '$a, $b'
| ^
| ^
9 | }
10 |

View File

@ -1,34 +0,0 @@
vlib/v/checker/tests/json_decode.vv:11:7: error: json.decode: unknown type `St2`
9 | fn main() {
10 | json.decode(St, '{a: ""}') ? // OK
11 | json.decode(St2, '{a: ""}') ? // BAD
| ~~~~~~~~~~~~~~~~~~~~~~
12 | json.decode(St) ? // BAD
13 | json.decode(string, '""') ? // BAD
vlib/v/checker/tests/json_decode.vv:12:7: error: json.decode expects 2 arguments, a type and a string (e.g `json.decode(T, '')`)
10 | json.decode(St, '{a: ""}') ? // OK
11 | json.decode(St2, '{a: ""}') ? // BAD
12 | json.decode(St) ? // BAD
| ~~~~~~~~~~
13 | json.decode(string, '""') ? // BAD
14 | json.decode(Num, '5') ? // BAD
vlib/v/checker/tests/json_decode.vv:13:14: error: json.decode: expected sum type, struct, map or array, found string
11 | json.decode(St2, '{a: ""}') ? // BAD
12 | json.decode(St) ? // BAD
13 | json.decode(string, '""') ? // BAD
| ~~~~~~
14 | json.decode(Num, '5') ? // BAD
15 | json.decode(St, 6) ? // BAD
vlib/v/checker/tests/json_decode.vv:14:14: error: json.decode: expected sum type, struct, map or array, found u8
12 | json.decode(St) ? // BAD
13 | json.decode(string, '""') ? // BAD
14 | json.decode(Num, '5') ? // BAD
| ~~~
15 | json.decode(St, 6) ? // BAD
16 | }
vlib/v/checker/tests/json_decode.vv:15:7: error: json.decode: second argument needs to be a string
13 | json.decode(string, '""') ? // BAD
14 | json.decode(Num, '5') ? // BAD
15 | json.decode(St, 6) ? // BAD
| ~~~~~~~~~~~~~
16 | }

View File

@ -1,16 +0,0 @@
import json
struct St {
a string
}
type Num = u8
fn main() {
json.decode(St, '{a: ""}') ? // OK
json.decode(St2, '{a: ""}') ? // BAD
json.decode(St) ? // BAD
json.decode(string, '""') ? // BAD
json.decode(Num, '5') ? // BAD
json.decode(St, 6) ? // BAD
}

View File

@ -1,6 +0,0 @@
vlib/v/checker/tests/optional_in_dump_err.vv:10:7: error: create() returns an option, so it should have either an `or {}` block, or `?` at the end
8 |
9 | fn main() {
10 | dump(create())
| ~~~~~~~~
11 | }

View File

@ -1,11 +0,0 @@
struct AStruct {
field1 int
}
fn create() ?AStruct {
return AStruct{123}
}
fn main() {
dump(create())
}

View File

@ -32,7 +32,6 @@ const (
// when operands are switched
cmp_rev = ['eq', 'ne', 'lt', 'gt', 'le', 'ge']
result_name = '_result'
option_name = '_option'
)
fn string_array_to_map(a []string) map[string]bool {
@ -973,7 +972,7 @@ fn (mut g Gen) expr_string_surround(prepend string, expr ast.Expr, append string
// if one location changes
fn (mut g Gen) optional_type_name(t ast.Type) (string, string) {
base := g.base_type(t)
mut styp := '_option_$base'
mut styp := 'Option_$base'
if t.is_ptr() {
styp = styp.replace('*', '_ptr')
}
@ -1188,11 +1187,11 @@ fn (mut g Gen) write_chan_push_optional_fns() {
done << styp
g.register_optional(ast.void_type.set_flag(.optional))
g.channel_definitions.writeln('
static inline ${c.option_name}_void __Option_${styp}_pushval($styp ch, $el_type e) {
static inline Option_void __Option_${styp}_pushval($styp ch, $el_type e) {
if (sync__Channel_try_push_priv(ch, &e, false)) {
return (${c.option_name}_void){ .state = 2, .err = _v_error(_SLIT("channel closed")), .data = {EMPTY_STRUCT_INITIALIZATION} };
return (Option_void){ .state = 2, .err = _v_error(_SLIT("channel closed")), .data = {EMPTY_STRUCT_INITIALIZATION} };
}
return (${c.option_name}_void){0};
return (Option_void){0};
}')
}
}
@ -1564,9 +1563,9 @@ fn (mut g Gen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) {
styp = 'f64'
}
}
g.write('opt_ok2(&($styp[]) { ')
g.write('opt_ok(&($styp[]) { ')
g.stmt(stmt)
g.writeln(' }, ($c.option_name*)(&$tmp_var), sizeof($styp));')
g.writeln(' }, (Option*)(&$tmp_var), sizeof($styp));')
}
}
} else {
@ -1751,7 +1750,34 @@ fn (mut g Gen) stmt(node ast.Stmt) {
g.defer_stmts << defer_stmt
}
ast.EnumDecl {
g.enum_decl(node)
enum_name := util.no_dots(node.name)
is_flag := node.is_flag
g.enum_typedefs.writeln('typedef enum {')
mut cur_enum_expr := ''
mut cur_enum_offset := 0
for i, field in node.fields {
g.enum_typedefs.write_string('\t${enum_name}__$field.name')
if field.has_expr {
g.enum_typedefs.write_string(' = ')
expr_str := g.expr_string(field.expr)
g.enum_typedefs.write_string(expr_str)
cur_enum_expr = expr_str
cur_enum_offset = 0
} else if is_flag {
g.enum_typedefs.write_string(' = ')
cur_enum_expr = '1 << $i'
g.enum_typedefs.write_string((1 << i).str())
cur_enum_offset = 0
}
cur_value := if cur_enum_offset > 0 {
'$cur_enum_expr+$cur_enum_offset'
} else {
cur_enum_expr
}
g.enum_typedefs.writeln(', // $cur_value')
cur_enum_offset++
}
g.enum_typedefs.writeln('} $enum_name;\n')
}
ast.ExprStmt {
g.write_v_source_line_info(node.pos)
@ -2238,8 +2264,7 @@ fn (mut g Gen) expr_with_cast(expr ast.Expr, got_type_raw ast.Type, expected_typ
deref_sym := g.table.sym(got_deref_type)
deref_will_match := expected_type in [got_type, got_deref_type, deref_sym.parent_idx]
got_is_opt := got_type.has_flag(.optional)
if deref_will_match || got_is_opt || expr.is_auto_deref_var()
|| expected_type.has_flag(.generic) {
if deref_will_match || got_is_opt || expr.is_auto_deref_var() {
g.write('*')
}
}
@ -3392,37 +3417,6 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
}
}
fn (mut g Gen) enum_decl(node ast.EnumDecl) {
enum_name := util.no_dots(node.name)
is_flag := node.is_flag
g.enum_typedefs.writeln('typedef enum {')
mut cur_enum_expr := ''
mut cur_enum_offset := 0
for i, field in node.fields {
g.enum_typedefs.write_string('\t${enum_name}__$field.name')
if field.has_expr {
g.enum_typedefs.write_string(' = ')
expr_str := g.expr_string(field.expr)
g.enum_typedefs.write_string(expr_str)
cur_enum_expr = expr_str
cur_enum_offset = 0
} else if is_flag {
g.enum_typedefs.write_string(' = ')
cur_enum_expr = '1 << $i'
g.enum_typedefs.write_string((1 << i).str())
cur_enum_offset = 0
}
cur_value := if cur_enum_offset > 0 {
'$cur_enum_expr+$cur_enum_offset'
} else {
cur_enum_expr
}
g.enum_typedefs.writeln(', // $cur_value')
cur_enum_offset++
}
g.enum_typedefs.writeln('} $enum_name;\n')
}
fn (mut g Gen) enum_expr(node ast.Expr) {
match node {
ast.EnumVal {
@ -4017,11 +4011,11 @@ fn (mut g Gen) return_stmt(node ast.Return) {
ret_typ := g.typ(g.unwrap_generic(g.fn_decl.return_type))
mut use_tmp_var := g.defer_stmts.len > 0 || g.defer_profile_code.len > 0
|| g.cur_lock.lockeds.len > 0
// handle promoting none/error/function returning _option'
// handle promoting none/error/function returning 'Option'
if fn_return_is_optional {
optional_none := node.exprs[0] is ast.None
ftyp := g.typ(node.types[0])
mut is_regular_option := ftyp == '_option'
mut is_regular_option := ftyp == 'Option'
if optional_none || is_regular_option || node.types[0] == ast.error_type_idx {
if !isnil(g.fn_decl) && g.fn_decl.is_test {
test_error_var := g.new_tmp_var()
@ -4091,7 +4085,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
if fn_return_is_optional || fn_return_is_result {
g.writeln('$ret_typ $tmpvar;')
styp = g.base_type(g.fn_decl.return_type)
g.write('opt_ok2(&($styp/*X*/[]) { ')
g.write('opt_ok(&($styp/*X*/[]) { ')
} else {
if use_tmp_var {
g.write('$ret_typ $tmpvar = ')
@ -4160,7 +4154,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
}
g.write('}')
if fn_return_is_optional || fn_return_is_result {
g.writeln(' }, ($c.option_name*)(&$tmpvar), sizeof($styp));')
g.writeln(' }, (Option*)(&$tmpvar), sizeof($styp));')
g.write_defer_stmts_when_needed()
g.write('return $tmpvar')
}
@ -4193,10 +4187,10 @@ fn (mut g Gen) return_stmt(node ast.Return) {
node.types[0].has_flag(.optional)
}
}
if fn_return_is_optional && !expr_type_is_opt && return_sym.name != c.option_name {
if fn_return_is_optional && !expr_type_is_opt && return_sym.name != 'Option' {
styp := g.base_type(g.fn_decl.return_type)
g.writeln('$ret_typ $tmpvar;')
g.write('opt_ok2(&($styp[]) { ')
g.write('opt_ok(&($styp[]) { ')
if !g.fn_decl.return_type.is_ptr() && node.types[0].is_ptr() {
if !(node.exprs[0] is ast.Ident && !g.is_amp) {
g.write('*')
@ -4208,7 +4202,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.write(', ')
}
}
g.writeln(' }, ($c.option_name*)(&$tmpvar), sizeof($styp));')
g.writeln(' }, (Option*)(&$tmpvar), sizeof($styp));')
g.write_defer_stmts_when_needed()
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
g.writeln('return $tmpvar;')
@ -4495,15 +4489,8 @@ fn (mut g Gen) const_decl_simple_define(name string, val string) {
} else {
x = '_const_$x'
}
if g.pref.translated {
g.definitions.write_string('const int $x = ')
} else {
g.definitions.write_string('#define $x ')
}
g.definitions.write_string('#define $x ')
g.definitions.writeln(val)
if g.pref.translated {
g.definitions.write_string(';')
}
}
fn (mut g Gen) const_decl_init_later(mod string, name string, expr ast.Expr, typ ast.Type, unwrap_option bool) {
@ -4747,7 +4734,7 @@ fn (mut g Gen) write_init_function() {
}
const (
builtins = ['string', 'array', 'DenseArray', 'map', 'Error', 'IError', option_name, result_name]
builtins = ['string', 'array', 'DenseArray', 'map', 'Error', 'IError', 'Option', result_name]
)
fn (mut g Gen) write_builtin_types() {
@ -5134,7 +5121,9 @@ fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Ty
g.inside_opt_data = true
g.expr_with_cast(expr_stmt.expr, expr_stmt.typ, return_type.clear_flag(.optional))
g.inside_opt_data = old_inside_opt_data
g.writeln(';')
if g.inside_ternary == 0 {
g.writeln(';')
}
g.stmt_path_pos.delete_last()
} else {
g.stmt(stmt)

View File

@ -1044,8 +1044,6 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
} else if final_left_sym.kind == .map {
if node.name == 'keys' {
name = 'map_keys'
} else if node.name == 'values' {
name = 'map_values'
}
}
if g.pref.obfuscate && g.cur_mod.name == 'main' && name.starts_with('main__')
@ -1284,7 +1282,7 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
g.call_args(node)
g.writeln(');')
tmp2 = g.new_tmp_var()
g.writeln('${option_name}_$typ $tmp2 = ${fn_name}($json_obj);')
g.writeln('Option_$typ $tmp2 = ${fn_name}($json_obj);')
}
if !g.is_autofree {
g.write('cJSON_Delete($json_obj); // del')
@ -1800,7 +1798,7 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
if node.call_expr.return_type == ast.void_type {
gohandle_name = if is_opt { '__v_thread_Option_void' } else { '__v_thread' }
} else {
opt := if is_opt { '${option_name}_' } else { '' }
opt := if is_opt { 'Option_' } else { '' }
gohandle_name = '__v_thread_$opt${g.table.sym(g.unwrap_generic(node.call_expr.return_type)).cname}'
}
if g.pref.os == .windows {

View File

@ -64,7 +64,7 @@ fn (mut g Gen) infix_expr_arrow_op(node ast.InfixExpr) {
if gen_or {
elem_styp := g.typ(elem_type)
g.register_chan_push_optional_fn(elem_styp, styp)
g.write('${option_name}_void $tmp_opt = __Option_${styp}_pushval(')
g.write('Option_void $tmp_opt = __Option_${styp}_pushval(')
} else {
g.write('__${styp}_pushval(')
}

View File

@ -45,7 +45,7 @@ fn (mut g Gen) gen_jsons() {
// cJSON_Parse(str) call is added by the compiler
// Code gen decoder
dec_fn_name := js_dec_name(styp)
dec_fn_dec := '${option_name}_$styp ${dec_fn_name}(cJSON* root)'
dec_fn_dec := 'Option_$styp ${dec_fn_name}(cJSON* root)'
mut init_styp := '$styp res'
if sym.kind == .struct_ {
@ -96,7 +96,7 @@ $dec_fn_dec {
int maxchars = vstrlen_char(prevline_ptr);
vmemcpy(buf, prevline_ptr, (maxchars < maxcontext_chars ? maxchars : maxcontext_chars));
}
return (${option_name}_$styp){.state = 2,.err = _v_error(tos2(buf)),.data = {0}};
return (Option_$styp){.state = 2,.err = _v_error(tos2(buf)),.data = {0}};
}
}
')
@ -157,8 +157,8 @@ $enc_fn_dec {
}
// cJSON_delete
// p.cgen.fns << '$dec return opt_ok(res); \n}'
dec.writeln('\t${option_name}_$styp ret;')
dec.writeln('\topt_ok2(&res, ($option_name*)&ret, sizeof(res));')
dec.writeln('\tOption_$styp ret;')
dec.writeln('\topt_ok(&res, (Option*)&ret, sizeof(res));')
dec.writeln('\treturn ret;\n}')
enc.writeln('\treturn o;\n}')
g.definitions.writeln(dec.str())
@ -255,9 +255,9 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
dec.writeln('\t\t\t}')
} else if !is_js_prim(variant_typ) && variant_sym.kind != .enum_ {
dec.writeln('\t\t\tif (strcmp("$unmangled_variant_name", $type_var) == 0) {')
dec.writeln('\t\t\t\t${option_name}_$variant_typ $tmp = ${js_dec_name(variant_typ)}(root);')
dec.writeln('\t\t\t\tOption_$variant_typ $tmp = ${js_dec_name(variant_typ)}(root);')
dec.writeln('\t\t\t\tif (${tmp}.state != 0) {')
dec.writeln('\t\t\t\t\treturn (${option_name}_$sym.cname){ .state = ${tmp}.state, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t\t\t\t\treturn (Option_$sym.cname){ .state = ${tmp}.state, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t\t\t\t}')
dec.writeln('\t\t\t\tres = ${variant_typ}_to_sumtype_${sym.cname}(($variant_typ*)${tmp}.data);')
dec.writeln('\t\t\t}')
@ -321,9 +321,9 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
'cJSON_IsNumber(root->child)'
}
dec.writeln('\t\tif (cJSON_IsArray(root) && $judge_elem_typ) {')
dec.writeln('\t\t\t${option_name}_$var_t $tmp = ${js_dec_name(var_t)}(root);')
dec.writeln('\t\t\tOption_$var_t $tmp = ${js_dec_name(var_t)}(root);')
dec.writeln('\t\t\tif (${tmp}.state != 0) {')
dec.writeln('\t\t\t\treturn (${option_name}_$sym.cname){ .state = ${tmp}.state, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t\t\t\treturn (Option_$sym.cname){ .state = ${tmp}.state, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t\t\t}')
dec.writeln('\t\t\tres = ${var_t}_to_sumtype_${sym.cname}(($var_t*)${tmp}.data);')
dec.writeln('\t\t}')
@ -456,16 +456,16 @@ fn gen_js_get(styp string, tmp string, name string, mut dec strings.Builder, is_
dec.writeln('\tcJSON *jsonroot_$tmp = js_get(root,"$name");')
if is_required {
dec.writeln('\tif(jsonroot_$tmp == 0) {')
dec.writeln('\t\treturn (${option_name}_$styp){ .state = 2, .err = _v_error(_SLIT("expected field \'$name\' is missing")), .data = {0} };')
dec.writeln('\t\treturn (Option_$styp){ .state = 2, .err = _v_error(_SLIT("expected field \'$name\' is missing")), .data = {0} };')
dec.writeln('\t}')
}
}
fn gen_js_get_opt(dec_name string, field_type string, styp string, tmp string, name string, mut dec strings.Builder, is_required bool) {
gen_js_get(styp, tmp, name, mut dec, is_required)
dec.writeln('\t${option_name}_$field_type $tmp = $dec_name (jsonroot_$tmp);')
dec.writeln('\tOption_$field_type $tmp = $dec_name (jsonroot_$tmp);')
dec.writeln('\tif(${tmp}.state != 0) {')
dec.writeln('\t\treturn (${option_name}_$styp){ .state = ${tmp}.state, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t\treturn (Option_$styp){ .state = ${tmp}.state, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t}')
}
@ -493,10 +493,10 @@ fn (mut g Gen) decode_array(value_type ast.Type) string {
s = '$styp val = ${fn_name}((cJSON *)jsval); '
} else {
s = '
${option_name}_$styp val2 = $fn_name ((cJSON *)jsval);
Option_$styp val2 = $fn_name ((cJSON *)jsval);
if(val2.state != 0) {
array_free(&res);
return *(${option_name}_Array_$styp*)&val2;
return *(Option_Array_$styp*)&val2;
}
$styp val = *($styp*)val2.data;
'
@ -504,7 +504,7 @@ fn (mut g Gen) decode_array(value_type ast.Type) string {
noscan := g.check_noscan(value_type)
return '
if(root && !cJSON_IsArray(root) && !cJSON_IsNull(root)) {
return (${option_name}_Array_$styp){.state = 2, .err = _v_error(string__plus(_SLIT("Json element is not an array: "), tos2((byteptr)cJSON_PrintUnformatted(root)))), .data = {0}};
return (Option_Array_$styp){.state = 2, .err = _v_error(string__plus(_SLIT("Json element is not an array: "), tos2((byteptr)cJSON_PrintUnformatted(root)))), .data = {0}};
}
res = __new_array${noscan}(0, 0, sizeof($styp));
const cJSON *jsval = NULL;
@ -538,17 +538,17 @@ fn (mut g Gen) decode_map(key_type ast.Type, value_type ast.Type) string {
s = '$styp_v val = $fn_name_v (js_get(root, jsval->string));'
} else {
s = '
${option_name}_$styp_v val2 = $fn_name_v (js_get(root, jsval->string));
Option_$styp_v val2 = $fn_name_v (js_get(root, jsval->string));
if(val2.state != 0) {
map_free(&res);
return *(${option_name}_Map_${styp}_$styp_v*)&val2;
return *(Option_Map_${styp}_$styp_v*)&val2;
}
$styp_v val = *($styp_v*)val2.data;
'
}
return '
if(!cJSON_IsObject(root) && !cJSON_IsNull(root)) {
return (${option_name}_Map_${styp}_$styp_v){ .state = 2, .err = _v_error(string__plus(_SLIT("Json element is not an object: "), tos2((byteptr)cJSON_PrintUnformatted(root)))), .data = {0}};
return (Option_Map_${styp}_$styp_v){ .state = 2, .err = _v_error(string__plus(_SLIT("Json element is not an object: "), tos2((byteptr)cJSON_PrintUnformatted(root)))), .data = {0}};
}
res = new_map(sizeof($styp), sizeof($styp_v), $hash_fn, $key_eq_fn, $clone_fn, $free_fn);
cJSON *jsval = NULL;

View File

@ -77,11 +77,11 @@ fn (mut g Gen) sql_stmt_line(nd ast.SqlStmtLine, expr string) {
}
if node.kind == .create {
g.write('${option_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.write('Option_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.sql_create_table(node, expr, table_name)
subs = true
} else if node.kind == .drop {
g.write('${option_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.write('Option_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.writeln('drop(${expr}._object, _SLIT("$table_name"));')
subs = true
} else if node.kind == .insert {
@ -90,10 +90,10 @@ fn (mut g Gen) sql_stmt_line(nd ast.SqlStmtLine, expr string) {
g.sql_insert(node, expr, table_name, arr, res, '', false, '')
dcheck = true
} else if node.kind == .update {
g.write('${option_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.write('Option_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.sql_update(node, expr, table_name)
} else if node.kind == .delete {
g.write('${option_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.write('Option_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.sql_delete(node, expr, table_name)
}
if !dcheck {
@ -185,7 +185,7 @@ fn (mut g Gen) sql_insert(node ast.SqlStmtLine, expr string, table_name string,
g.writeln('array_push(&$last_ids_arr, _MOV((orm__Primitive[]){orm__Connection_name_table[${expr}._typ]._method_last_id(${expr}._object)}));')
}
g.write('${option_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.write('Option_void $res = orm__Connection_name_table[${expr}._typ]._method_')
g.write('insert(${expr}._object, _SLIT("$table_name"), (orm__QueryData){')
g.write('.fields = new_array_from_c_array($fields.len, $fields.len, sizeof(string),')
@ -551,7 +551,7 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string) {
res := g.new_tmp_var()
table_name := g.get_table_name(node.table_expr)
g.sql_table_name = g.table.sym(node.table_expr.typ).name
g.write('${option_name}_Array_Array_orm__Primitive _o$res = orm__Connection_name_table[${expr}._typ]._method_select(${expr}._object, ')
g.write('Option_Array_Array_orm__Primitive _o$res = orm__Connection_name_table[${expr}._typ]._method_select(${expr}._object, ')
g.write('(orm__SelectConfig){')
g.write('.table = _SLIT("$table_name"),')
g.write('.is_count = $node.is_count,')

View File

@ -3,7 +3,7 @@
{1, { .str=(byteptr)("embed.vv"), .len=8, .is_lit=1 }, { .str=(byteptr)("none"), .len=4, .is_lit=1 }, _v_embed_blob_1},
VV_LOCAL_SYMBOL void v__preludes__embed_file__zlib__init(void);
VV_LOCAL_SYMBOL _option_Array_u8 v__preludes__embed_file__zlib__ZLibDecoder_decompress(v__preludes__embed_file__zlib__ZLibDecoder _d1, Array_u8 data) {
VV_LOCAL_SYMBOL Option_Array_u8 v__preludes__embed_file__zlib__ZLibDecoder_decompress(v__preludes__embed_file__zlib__ZLibDecoder _d1, Array_u8 data) {
= compress__zlib__decompress(data);
res.compressed = v__embed_file__find_index_entry_by_path((voidptr)_v_embed_file_index, _SLIT("embed.vv"), _SLIT("zlib"))->data;

View File

@ -23,7 +23,6 @@ const (
'int_literal', 'float_literal', 'bool', 'string', 'map', 'array', 'rune', 'any', 'voidptr']
shallow_equatables = [ast.Kind.i8, .i16, .int, .i64, .u8, .u16, .u32, .u64, .f32, .f64,
.int_literal, .float_literal, .bool, .string]
option_name = '_option'
)
struct SourcemapHelper {
@ -900,7 +899,7 @@ fn (mut g JsGen) expr(node_ ast.Expr) {
// TODO
}
ast.CharLiteral {
if node.val.len_utf8() < node.val.len {
if utf8_str_len(node.val) < node.val.len {
g.write("new rune('$node.val'.charCodeAt())")
} else {
g.write("new u8('$node.val')")
@ -1871,7 +1870,7 @@ fn (mut g JsGen) gen_return_stmt(it ast.Return) {
if fn_return_is_optional {
optional_none := node.exprs[0] is ast.None
ftyp := g.typ(node.types[0])
mut is_regular_option := ftyp == js.option_name
mut is_regular_option := ftyp == 'Option'
if optional_none || is_regular_option || node.types[0] == ast.error_type_idx {
if !isnil(g.fn_decl) && g.fn_decl.is_test {
test_error_var := g.new_tmp_var()
@ -1896,7 +1895,7 @@ fn (mut g JsGen) gen_return_stmt(it ast.Return) {
tmp := g.new_tmp_var()
g.write('const $tmp = new ')
g.writeln('${js.option_name}({});')
g.writeln('Option({});')
g.write('${tmp}.state = new u8(0);')
g.write('${tmp}.data = ')
if it.exprs.len == 1 {

View File

@ -35,7 +35,6 @@ pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []&ast.F
'tos3',
'isnil',
'opt_ok',
'opt_ok2',
'error',
// utf8_str_visible_length is used by c/str.v
'utf8_str_visible_length',

View File

@ -223,7 +223,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
p.tok.pos())
}
'_fastcall' {
p.note_with_pos('the tag [_fastcall] has been deprecated, it will be an error after 2022-06-01, use `[callconv: fastcall]` instead',
p.note_with_pos('teh tag [_fastcall] has been deprecated, it will be an error after 2022-06-01, use `[callconv: fastcall]` instead',
p.tok.pos())
}
'callconv' {

View File

@ -122,7 +122,7 @@ pub fn new_scanner_file(file_path string, comments_mode CommentsMode, pref &pref
file_path: file_path
file_base: os.base(file_path)
}
s.scan_all_tokens_in_buffer()
s.init_scanner()
return s
}
@ -139,10 +139,14 @@ pub fn new_scanner(text string, comments_mode CommentsMode, pref &pref.Preferenc
file_path: 'internal_memory'
file_base: 'internal_memory'
}
s.scan_all_tokens_in_buffer()
s.init_scanner()
return s
}
fn (mut s Scanner) init_scanner() {
s.scan_all_tokens_in_buffer(s.comments_mode)
}
[unsafe]
pub fn (mut s Scanner) free() {
unsafe {
@ -552,7 +556,7 @@ fn (mut s Scanner) end_of_file() token.Token {
return s.new_eof_token()
}
pub fn (mut s Scanner) scan_all_tokens_in_buffer() {
pub fn (mut s Scanner) scan_all_tokens_in_buffer(mode CommentsMode) {
mut timers := util.get_timers()
timers.measure_pause('PARSE')
util.timing_start('SCAN')
@ -560,9 +564,12 @@ pub fn (mut s Scanner) scan_all_tokens_in_buffer() {
util.timing_measure_cumulative('SCAN')
timers.measure_resume('PARSE')
}
oldmode := s.comments_mode
s.comments_mode = mode
// preallocate space for tokens
s.all_tokens = []token.Token{cap: s.text.len / 3}
s.scan_remaining_text()
s.comments_mode = oldmode
s.tidx = 0
$if debugscanner ? {
for t in s.all_tokens {
@ -584,8 +591,12 @@ pub fn (mut s Scanner) scan_remaining_text() {
}
}
[direct_array_access]
pub fn (mut s Scanner) scan() token.Token {
return s.buffer_scan()
}
[direct_array_access]
pub fn (mut s Scanner) buffer_scan() token.Token {
for {
cidx := s.tidx
s.tidx++
@ -630,6 +641,13 @@ fn (mut s Scanner) text_scan() token.Token {
// That optimization mostly matters for long sections
// of comments and string literals.
for {
// if s.comments_mode == .parse_comments {
// println('\nscan()')
// }
// if s.line_comment != '' {
// s.fgenln('// LC "$s.line_comment"')
// s.line_comment = ''
// }
if s.is_started {
s.pos++
} else {
@ -1139,6 +1157,10 @@ fn (mut s Scanner) ident_string() string {
s.quote = q
}
}
// if s.file_path.contains('string_test') {
// println('\nident_string() at char=${s.text[s.pos].str()}')
// println('linenr=$s.line_nr quote= $qquote ${qquote.str()}')
// }
mut n_cr_chars := 0
mut start := s.pos
start_char := s.text[start]

View File

@ -1,25 +0,0 @@
struct Type<T> {
value T
}
struct ContainerType<T> {
typ &Type<T>
}
fn (instance &ContainerType<T>) contains(typ Type<T>) bool {
println(typ)
if instance.typ == typ {
return true
} else {
return false
}
}
fn test_generic_fn_call_with_reference_argument() {
con := ContainerType<int>{
typ: &Type<int>{0}
}
ret := con.contains(con.typ)
println(con)
assert ret
}

View File

@ -1,38 +0,0 @@
struct Outer<T> {
mut:
inner Inner<T>
}
struct Inner<T> {
val T
}
fn (mut i Inner<T>) next<S>(input S) f64 {
$if S is f32 {
return 32
} $else {
panic('"$S.name" is not supported')
return 0
}
}
fn (mut o Outer<T>) next<S>(input S) f64 {
$if S is f32 {
return o.inner.next(input)
} $else {
panic('"$S.name" is not supported')
return 0
}
}
fn test_generics_method_on_nested_struct() {
mut outer := Outer<f64>{
inner: Inner<f64>{
val: 1.1
}
}
res := outer.next(f32(99.0))
println(res)
assert res == 32.0
}

View File

@ -1,23 +0,0 @@
struct Foo<T> {
}
fn (f Foo<T>) do(name string, d fn (T), v T) T {
println('running ' + name)
d(v)
println('ran ' + name)
return v
}
fn test_generics_method_with_generic_anon_fn_argument() {
f1 := Foo<string>{}
r1 := f1.do('foo', fn (s string) {
println('s value is ' + s)
}, 'bar')
assert r1 == 'bar'
f2 := Foo<int>{}
r2 := f2.do('bar', fn (s int) {
println('s value is $s')
}, 22)
assert r2 == 22
}

View File

@ -1,43 +0,0 @@
module main
import rand
interface Sample {
mut:
get_next() int
}
struct SampleA {
mut:
state int
}
fn (mut sample SampleA) get_next() int {
sample.state++
return sample.state
}
struct SampleB {
mut:
state int = 1
}
fn (mut sample SampleB) get_next() int {
sample.state += 2
return sample.state
}
fn create_sampler() Sample {
return if rand.intn(1) or { 0 } == 0 { Sample(SampleA{}) } else { Sample(SampleB{}) }
}
fn test_if_cond_with_optional() {
mut sample := create_sampler()
mut ret := sample.get_next()
println(ret)
assert ret == 1
ret = sample.get_next()
println(ret)
assert ret == 2
}

View File

@ -73,11 +73,3 @@ fn unsafe_return_error() !int {
fn test_unsafe_return_error() {
unsafe_return_error() or { assert err.msg() == 'abc' }
}
fn return_reference_type(path string) !&string {
if path.len == 0 {
return error('vfopen called with ""')
}
str := ''
return &str
}

View File

@ -33,7 +33,6 @@ const (
'\t\t\t\t\t\t\t',
'\t\t\t\t\t\t\t\t',
'\t\t\t\t\t\t\t\t\t',
'\t\t\t\t\t\t\t\t\t\t',
]
)
@ -294,9 +293,6 @@ pub fn cached_read_source_file(path string) ?string {
cache = &SourceCache{}
}
$if trace_cached_read_source_file ? {
println('cached_read_source_file $path')
}
if path.len == 0 {
unsafe { cache.sources.free() }
unsafe { free(cache) }
@ -307,15 +303,9 @@ pub fn cached_read_source_file(path string) ?string {
// eprintln('>> cached_read_source_file path: $path')
if res := cache.sources[path] {
// eprintln('>> cached')
$if trace_cached_read_source_file_cached ? {
println('cached_read_source_file cached $path')
}
return res
}
// eprintln('>> not cached | cache.sources.len: $cache.sources.len')
$if trace_cached_read_source_file_not_cached ? {
println('cached_read_source_file not cached $path')
}
raw_text := os.read_file(path) or { return error('failed to open $path') }
res := skip_bom(raw_text)
cache.sources[path] = res