all: fix typos (#19634)

This commit is contained in:
Turiiya 2023-10-23 20:21:15 +02:00 committed by GitHub
parent 407adaa3c1
commit 9051ac8921
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
108 changed files with 235 additions and 214 deletions

View file

@ -1,4 +1,4 @@
name: Bootstraping works name: Bootstrapping works
on: on:
schedule: schedule:

View file

@ -154,7 +154,7 @@ endif
fresh_tcc: fresh_tcc:
rm -rf $(TMPTCC) rm -rf $(TMPTCC)
ifndef local ifndef local
# Check wether a TCC branch exists for the user's system configuration. # Check whether a TCC branch exists for the user's system configuration.
ifneq (,$(findstring thirdparty-$(TCCOS)-$(TCCARCH), $(shell git ls-remote --heads $(TCCREPO) | sed 's/^[a-z0-9]*\trefs.heads.//'))) ifneq (,$(findstring thirdparty-$(TCCOS)-$(TCCARCH), $(shell git ls-remote --heads $(TCCREPO) | sed 's/^[a-z0-9]*\trefs.heads.//')))
$(GITFASTCLONE) --branch thirdparty-$(TCCOS)-$(TCCARCH) $(TCCREPO) $(TMPTCC) $(GITFASTCLONE) --branch thirdparty-$(TCCOS)-$(TCCARCH) $(TCCREPO) $(TMPTCC)
@$(MAKE) --quiet check_for_working_tcc 2> /dev/null @$(MAKE) --quiet check_for_working_tcc 2> /dev/null

View file

@ -59,7 +59,7 @@ fn main() {
Prints lines periodically (-period_ms), to stdout/stderr (-target). Prints lines periodically (-period_ms), to stdout/stderr (-target).
After a while (-timeout_ms), exit with (-exitcode). After a while (-timeout_ms), exit with (-exitcode).
This program is useful for platform independent testing This program is useful for platform independent testing
of child process/standart input/output control. of child process/standard input/output control.
It is used in V's `os` module tests. It is used in V's `os` module tests.
") ")
return return

View file

@ -15,7 +15,7 @@ fn funky()
- bar - bar
# test # test
########### deep test ########### deep test
#a shouldnt have a newline test #a shouldn't have a newline test
| foo bar | yes | | foo bar | yes |
|-----------|--------| |-----------|--------|

View file

@ -12,7 +12,7 @@
// - bar // - bar
// # test // # test
// ########### deep test // ########### deep test
// #a shouldnt have a newline // #a shouldn't have a newline
// test // test
// //
// | foo bar | yes | // | foo bar | yes |

View file

@ -11,7 +11,7 @@ http://rascunhointeligente.blogspot.com/2010/10/o-algoritmo-de-bellman-ford-um.h
code by CCS code by CCS
*/ */
const large = 999999 // almost inifinity const large = 999999 // almost infinity
// a structure to represent a weighted edge in graph // a structure to represent a weighted edge in graph
struct EDGE { struct EDGE {

View file

@ -125,7 +125,7 @@ fn dijkstra(g [][]int, s int) {
push_pq(mut pq_queue, s, 0) // goes s with priority 0 push_pq(mut pq_queue, s, 0) // goes s with priority 0
mut n := g.len mut n := g.len
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY mut dist := []int{len: n, init: -1} // dist with -1 instead of INFINITE
mut path := []int{len: n, init: -1} // previous node of each shortest path mut path := []int{len: n, init: -1} // previous node of each shortest path
// Distance of source vertex from itself is always 0 // Distance of source vertex from itself is always 0
@ -133,7 +133,7 @@ fn dijkstra(g [][]int, s int) {
for pq_queue.len != 0 { for pq_queue.len != 0 {
mut v := departure_priority(mut pq_queue) mut v := departure_priority(mut pq_queue)
// for all W adjcents vertices of v // for all W adjacents vertices of v
mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v .... mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v ....
// print('\n ADJ ${v} is ${adjs_of_v}') // print('\n ADJ ${v} is ${adjs_of_v}')
mut new_dist := 0 mut new_dist := 0

View file

@ -1,5 +1,5 @@
/* /*
Exploring PRIMS, Exploring PRIMS,
The data example is from The data example is from
https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/ https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
@ -117,7 +117,7 @@ fn prim_mst(g [][]int, s int) {
push_pq(mut pq_queue, s, 0) // goes s with priority 0 push_pq(mut pq_queue, s, 0) // goes s with priority 0
mut n := g.len mut n := g.len
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY mut dist := []int{len: n, init: -1} // dist with -1 instead of INFINITE
mut path := []int{len: n, init: -1} // previous node of each shortest path mut path := []int{len: n, init: -1} // previous node of each shortest path
// Distance of source vertex from itself is always 0 // Distance of source vertex from itself is always 0
@ -125,7 +125,7 @@ fn prim_mst(g [][]int, s int) {
for pq_queue.len != 0 { for pq_queue.len != 0 {
mut v := departure_priority(mut pq_queue) mut v := departure_priority(mut pq_queue)
// for all W adjcents vertices of v // for all W adjacents vertices of v
mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v .... mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v ....
// print('\n :${dist} :: ${pq_queue}') // print('\n :${dist} :: ${pq_queue}')
// print('\n ADJ ${v} is ${adjs_of_v}') // print('\n ADJ ${v} is ${adjs_of_v}')
@ -209,7 +209,7 @@ fn main() {
[5, 15, 4, 0], [5, 15, 4, 0],
] ]
// To find number of coluns // To find number of columns
// mut cols := an_array[0].len // mut cols := an_array[0].len
mut graph := [][]int{} // the graph: adjacency matrix mut graph := [][]int{} // the graph: adjacency matrix
// for index, g_value in [graph_01, graph_02, graph_03] { // for index, g_value in [graph_01, graph_02, graph_03] {

View file

@ -278,12 +278,12 @@ fn draw_cube_glsl(app App) {
tr_matrix := calc_tr_matrices(dw, dh, 0, 0, 2.3) tr_matrix := calc_tr_matrices(dw, dh, 0, 0, 2.3)
gfx.apply_viewport(0, 0, ws.width, ws.height, true) gfx.apply_viewport(0, 0, ws.width, ws.height, true)
// apply the pipline and bindings // apply the pipeline and bindings
gfx.apply_pipeline(app.cube_pip_glsl) gfx.apply_pipeline(app.cube_pip_glsl)
gfx.apply_bindings(app.cube_bind) gfx.apply_bindings(app.cube_bind)
// Uniforms // Uniforms
// *** vertex shadeer uniforms *** // *** vertex shader uniforms ***
// passing the view matrix as uniform // passing the view matrix as uniform
// res is a 4x4 matrix of f32 thus: 4*16 byte of size // res is a 4x4 matrix of f32 thus: 4*16 byte of size
vs_uniforms_range := gfx.Range{ vs_uniforms_range := gfx.Range{
@ -292,20 +292,18 @@ fn draw_cube_glsl(app App) {
} }
gfx.apply_uniforms(.vs, C.SLOT_vs_params, &vs_uniforms_range) gfx.apply_uniforms(.vs, C.SLOT_vs_params, &vs_uniforms_range)
// vfmt off
// *** fragment shader uniforms *** // *** fragment shader uniforms ***
time_ticks := f32(time.ticks() - app.ticks) / 1000 time_ticks := f32(time.ticks() - app.ticks) / 1000
mut tmp_fs_params := [ mut tmp_fs_params := [
f32(ws.width), f32(ws.width),
ws.height * ratio, // x,y resolution to pass to FS ws.height * ratio, // x,y resolution to pass to FS
app.mouse_x, // mouse x app.mouse_x, // mouse x
ws.height - app.mouse_y * 2, // mouse y scaled ws.height - app.mouse_y * 2, // mouse y scaled
time_ticks, // time as f32 time_ticks, // time as f32
app.frame_count, // frame count app.frame_count, // frame count
0, 0,
0 // padding bytes , see "fs_params" struct paddings in rt_glsl.h 0, // padding bytes , see "fs_params" struct paddings in rt_glsl.h
]! ]!
// vfmt on
fs_uniforms_range := gfx.Range{ fs_uniforms_range := gfx.Range{
ptr: unsafe { &tmp_fs_params } ptr: unsafe { &tmp_fs_params }
size: usize(sizeof(tmp_fs_params)) size: usize(sizeof(tmp_fs_params))

View file

@ -17,7 +17,7 @@ pub fn map_of_indexes[T](array []T) map[T][]int {
} }
// map_of_counts returns a map, where each key is an unique value in `array`, // map_of_counts returns a map, where each key is an unique value in `array`,
// and each value for that key is how many times that value occures in `array`. // and each value for that key is how many times that value occurs in `array`.
// It can be useful for building histograms of discrete measurements. // It can be useful for building histograms of discrete measurements.
// Example: arrays.map_of_counts([1,2,3,4,4,2,1,4,4]) == {1: 2, 2: 2, 3: 1, 4: 4} // Example: arrays.map_of_counts([1,2,3,4,4,2,1,4,4]) == {1: 2, 2: 2, 3: 1, 4: 4}
pub fn map_of_counts[T](array []T) map[T]int { pub fn map_of_counts[T](array []T) map[T]int {

View file

@ -203,7 +203,7 @@ fn (mut a array) ensure_cap(required int) {
} }
// repeat returns a new array with the given array elements repeated given times. // repeat returns a new array with the given array elements repeated given times.
// `cgen` will replace this with an apropriate call to `repeat_to_depth()` // `cgen` will replace this with an appropriate call to `repeat_to_depth()`
// //
// This is a dummy placeholder that will be overridden by `cgen` with an appropriate // This is a dummy placeholder that will be overridden by `cgen` with an appropriate
// call to `repeat_to_depth()`. However the `checker` needs it here. // call to `repeat_to_depth()`. However the `checker` needs it here.
@ -627,7 +627,7 @@ fn (a array) clone_static_to_depth(depth int) array {
} }
// clone returns an independent copy of a given array. // clone returns an independent copy of a given array.
// this will be overwritten by `cgen` with an apropriate call to `.clone_to_depth()` // this will be overwritten by `cgen` with an appropriate call to `.clone_to_depth()`
// However the `checker` needs it here. // However the `checker` needs it here.
pub fn (a &array) clone() array { pub fn (a &array) clone() array {
return unsafe { a.clone_to_depth(0) } return unsafe { a.clone_to_depth(0) }
@ -827,7 +827,7 @@ pub fn (a array) map(callback fn (voidptr) voidptr) array
// being compared. // being compared.
// //
// Example: array.sort() // will sort the array in ascending order // Example: array.sort() // will sort the array in ascending order
// Example: array.sort(b < a) // will sort the array in decending order // Example: array.sort(b < a) // will sort the array in descending order
// Example: array.sort(b.name < a.name) // will sort descending by the .name field // Example: array.sort(b.name < a.name) // will sort descending by the .name field
pub fn (mut a array) sort(callback fn (voidptr, voidptr) int) pub fn (mut a array) sort(callback fn (voidptr, voidptr) int)

View file

@ -111,7 +111,7 @@ fn (mut a array) ensure_cap_noscan(required int) {
} }
// repeat returns a new array with the given array elements repeated given times. // repeat returns a new array with the given array elements repeated given times.
// `cgen` will replace this with an apropriate call to `repeat_to_depth()` // `cgen` will replace this with an appropriate call to `repeat_to_depth()`
// version of `repeat()` that handles multi dimensional arrays // version of `repeat()` that handles multi dimensional arrays
// `unsafe` to call directly because `depth` is not checked // `unsafe` to call directly because `depth` is not checked

View file

@ -39,7 +39,7 @@ fn test_str_methods() {
assert charptr(1).str() == '0x1' assert charptr(1).str() == '0x1'
} }
fn test_and_precendence() { fn test_and_precedence() {
assert (2 & 0 == 0) == ((2 & 0) == 0) assert (2 & 0 == 0) == ((2 & 0) == 0)
assert (2 & 0 != 0) == ((2 & 0) != 0) assert (2 & 0 != 0) == ((2 & 0) != 0)
assert (0 & 0 >= 0) == ((0 & 0) >= 0) assert (0 & 0 >= 0) == ((0 & 0) >= 0)
@ -48,7 +48,7 @@ fn test_and_precendence() {
assert (1 & 2 > 0) == ((1 & 2) > 0) assert (1 & 2 > 0) == ((1 & 2) > 0)
} }
fn test_or_precendence() { fn test_or_precedence() {
assert (1 | 0 == 0) == ((1 | 0) == 0) assert (1 | 0 == 0) == ((1 | 0) == 0)
assert (1 | 0 != 1) == ((1 | 0) != 1) assert (1 | 0 != 1) == ((1 | 0) != 1)
assert (1 | 0 >= 2) == ((1 | 0) >= 2) assert (1 | 0 >= 2) == ((1 | 0) >= 2)
@ -57,7 +57,7 @@ fn test_or_precendence() {
assert (1 | 0 > 1) == ((1 | 0) > 1) assert (1 | 0 > 1) == ((1 | 0) > 1)
} }
fn test_xor_precendence() { fn test_xor_precedence() {
assert (1 ^ 0 == 2) == ((1 ^ 0) == 2) assert (1 ^ 0 == 2) == ((1 ^ 0) == 2)
assert (1 ^ 0 != 2) == ((1 ^ 0) != 2) assert (1 ^ 0 != 2) == ((1 ^ 0) != 2)
assert (1 ^ 0 >= 0) == ((1 ^ 0) >= 0) assert (1 ^ 0 >= 0) == ((1 ^ 0) >= 0)
@ -66,12 +66,12 @@ fn test_xor_precendence() {
assert (1 ^ 0 > 1) == ((1 ^ 0) > 1) assert (1 ^ 0 > 1) == ((1 ^ 0) > 1)
} }
fn test_left_shift_precendence() { fn test_left_shift_precedence() {
assert (2 << 4 | 3) == ((2 << 4) | 3) assert (2 << 4 | 3) == ((2 << 4) | 3)
assert (2 << 4 | 3) != (2 << (4 | 3)) assert (2 << 4 | 3) != (2 << (4 | 3))
} }
fn test_right_shift_precendence() { fn test_right_shift_precedence() {
assert (256 >> 4 | 3) == ((256 >> 4) | 3) assert (256 >> 4 | 3) == ((256 >> 4) | 3)
assert (256 >> 4 | 3) != (256 >> (4 | 3)) assert (256 >> 4 | 3) != (256 >> (4 | 3))
} }

View file

@ -432,7 +432,7 @@ pub fn (mut s []string) sort() {
s.sort_with_compare(compare_strings) s.sort_with_compare(compare_strings)
} }
// sort_ignore_case sorts the string array using case insesitive comparing. // sort_ignore_case sorts the string array using case insensitive comparing.
pub fn (mut s []string) sort_ignore_case() { pub fn (mut s []string) sort_ignore_case() {
s.sort_with_compare(compare_lower_strings) s.sort_with_compare(compare_lower_strings)
} }
@ -484,7 +484,7 @@ pub fn (s string) repeat(count int) string {
// TODO: Make these functions actually work. // TODO: Make these functions actually work.
// strip_margin allows multi-line strings to be formatted in a way that removes white-space // strip_margin allows multi-line strings to be formatted in a way that removes white-space
// before a delimeter. by default `|` is used. // before a delimiter. By default `|` is used.
// Note: the delimiter has to be a byte at this time. That means surrounding // Note: the delimiter has to be a byte at this time. That means surrounding
// the value in ``. // the value in ``.
// //

View file

@ -77,7 +77,7 @@ pub fn (s string) runes() []rune {
// cstring_to_vstring creates a new V string copy of the C style string, // cstring_to_vstring creates a new V string copy of the C style string,
// pointed by `s`. This function is most likely what you want to use when // pointed by `s`. This function is most likely what you want to use when
// working with C style pointers to 0 terminated strings (i.e. `char*`). // working with C style pointers to 0 terminated strings (i.e. `char*`).
// It is recomended to use it, unless you *do* understand the implications of // It is recommended to use it, unless you *do* understand the implications of
// tos/tos2/tos3/tos4/tos5 in terms of memory management and interactions with // tos/tos2/tos3/tos4/tos5 in terms of memory management and interactions with
// -autofree and `[manualfree]`. // -autofree and `[manualfree]`.
// It will panic, if the pointer `s` is 0. // It will panic, if the pointer `s` is 0.
@ -745,7 +745,7 @@ pub fn (s string) split_any(delim string) []string {
mut i := 0 mut i := 0
// check empty source string // check empty source string
if s.len > 0 { if s.len > 0 {
// if empty delimiter string using defautl split // if empty delimiter string using default split
if delim.len <= 0 { if delim.len <= 0 {
return s.split('') return s.split('')
} }
@ -813,7 +813,7 @@ pub fn (s string) rsplit(delim string) []string {
return s.rsplit_nth(delim, 0) return s.rsplit_nth(delim, 0)
} }
// split_once devides string into pair of string by `delim`. // split_once divides string into pair of string by `delim`.
// Example: // Example:
// ```v // ```v
// path, ext := 'file.ts.dts'.splice_once('.')? // path, ext := 'file.ts.dts'.splice_once('.')?
@ -832,7 +832,7 @@ pub fn (s string) split_once(delim string) ?(string, string) {
return result[0], result[1] return result[0], result[1]
} }
// rsplit_once devides string into pair of string by `delim`. // rsplit_once divides string into pair of string by `delim`.
// Example: // Example:
// ```v // ```v
// path, ext := 'file.ts.dts'.splice_once('.')? // path, ext := 'file.ts.dts'.splice_once('.')?
@ -1592,7 +1592,7 @@ pub fn (s string) trim(cutset string) string {
return s.substr(left, right) return s.substr(left, right)
} }
// trim_indexes gets the new start and end indicies of a string when any of the characters given in `cutset` were stripped from the start and end of the string. Should be used as an input to `substr()`. If the string contains only the characters in `cutset`, both values returned are zero. // trim_indexes gets the new start and end indices of a string when any of the characters given in `cutset` were stripped from the start and end of the string. Should be used as an input to `substr()`. If the string contains only the characters in `cutset`, both values returned are zero.
// Example: left, right := '-hi-'.trim_indexes('-') // Example: left, right := '-hi-'.trim_indexes('-')
[direct_array_access] [direct_array_access]
pub fn (s string) trim_indexes(cutset string) (int, int) { pub fn (s string) trim_indexes(cutset string) (int, int) {
@ -1719,7 +1719,7 @@ fn compare_lower_strings(a &string, b &string) int {
return compare_strings(&aa, &bb) return compare_strings(&aa, &bb)
} }
// sort_ignore_case sorts the string array using case insesitive comparing. // sort_ignore_case sorts the string array using case insensitive comparing.
[inline] [inline]
pub fn (mut s []string) sort_ignore_case() { pub fn (mut s []string) sort_ignore_case() {
s.sort_with_compare(compare_lower_strings) s.sort_with_compare(compare_lower_strings)
@ -2096,7 +2096,7 @@ pub fn (s string) fields() []string {
} }
// strip_margin allows multi-line strings to be formatted in a way that removes white-space // strip_margin allows multi-line strings to be formatted in a way that removes white-space
// before a delimeter. by default `|` is used. // before a delimiter. By default `|` is used.
// Note: the delimiter has to be a byte at this time. That means surrounding // Note: the delimiter has to be a byte at this time. That means surrounding
// the value in ``. // the value in ``.
// //

View file

@ -147,7 +147,7 @@ pub fn get_str_intp_u32_format(fmt_type StrIntpType, in_width int, in_precision
return res return res
} }
// convert from struct to formated string // convert from struct to formatted string
[manualfree] [manualfree]
fn (data &StrIntpData) process_str_intp_data(mut sb strings.Builder) { fn (data &StrIntpData) process_str_intp_data(mut sb strings.Builder) {
x := data.fmt x := data.fmt

View file

@ -326,7 +326,7 @@ pub fn (mut v Point) vartime_multiscalar_mult(scalars []Scalar, points []Point)
// at each iteration and checking whether there is a nonzero // at each iteration and checking whether there is a nonzero
// coefficient to look up a multiple of. // coefficient to look up a multiple of.
// //
// Skip trying to find the first nonzero coefficent, because // Skip trying to find the first nonzero coefficient, because
// searching might be more work than a few extra doublings. // searching might be more work than a few extra doublings.
// k == i, l == j // k == i, l == j
for k := 255; k >= 0; k-- { for k := 255; k >= 0; k-- {

View file

@ -38,7 +38,7 @@ fn (mut v ProjLookupTable) from_p3(q Point) {
for i := 0; i < 7; i++ { for i := 0; i < 7; i++ {
// Compute (i+1)*Q as Q + i*Q and convert to a ProjCached // Compute (i+1)*Q as Q + i*Q and convert to a ProjCached
// This is needlessly complicated because the API has explicit // This is needlessly complicated because the API has explicit
// recievers instead of creating stack objects and relying on RVO // receivers instead of creating stack objects and relying on RVO
v.points[i + 1].from_p3(tmp_p3.from_p1(tmp_p1.add(q, v.points[i]))) v.points[i + 1].from_p3(tmp_p3.from_p1(tmp_p1.add(q, v.points[i])))
} }
} }

View file

@ -4,7 +4,7 @@
// This is the generic version with no architecture optimizations. // This is the generic version with no architecture optimizations.
// In its own file so that an architecture // In its own file so that an architecture
// optimized verision can be substituted // optimized version can be substituted
module md5 module md5

View file

@ -54,20 +54,20 @@ fn parse_headers(block string) ?(map[string][]string, string) {
return map[string][]string{}, block return map[string][]string{}, block
} }
// seperate lines instead of iterating over them, // separate lines instead of iterating over them,
// so that we can manually index them // so that we can manually index them
headers_seperated := headers_str.split_into_lines() headers_separated := headers_str.split_into_lines()
// index the key/value separator ':', otherwise // index the key/value separator ':', otherwise
// return none because it should exist // return none because it should exist
// the initialisation of this function already tells us headers are present // the initialisation of this function already tells us headers are present
mut colon_index := headers_seperated[0].index(colon) or { return none } mut colon_index := headers_separated[0].index(colon) or { return none }
mut headers := map[string][]string{} mut headers := map[string][]string{}
mut index := 0 mut index := 0
for index < headers_seperated.len - 1 { for index < headers_separated.len - 1 {
line := headers_seperated[index] line := headers_separated[index]
if line.len == 0 { if line.len == 0 {
break break
} }
@ -75,10 +75,10 @@ fn parse_headers(block string) ?(map[string][]string, string) {
key := line[..colon_index].trim_space() key := line[..colon_index].trim_space()
mut val := line[colon_index + 1..].trim_space() mut val := line[colon_index + 1..].trim_space()
for colon_index = 0; index < headers_seperated.len - 1 && colon_index == 0; { for colon_index = 0; index < headers_separated.len - 1 && colon_index == 0; {
index++ index++
colon_index = headers_seperated[index].index(colon) or { colon_index = headers_separated[index].index(colon) or {
val += headers_seperated[index].trim_space() val += headers_separated[index].trim_space()
0 0
} }
} }

View file

@ -3,7 +3,7 @@
// that can be found in the LICENSE file. // that can be found in the LICENSE file.
// This is the generic version with no architecture optimizations. // This is the generic version with no architecture optimizations.
// In its own file so that an architecture // In its own file so that an architecture
// optimized verision can be substituted // optimized version can be substituted
module sha1 module sha1
import math.bits import math.bits

View file

@ -4,7 +4,7 @@
// SHA256 block step. // SHA256 block step.
// This is the generic version with no architecture optimizations. // This is the generic version with no architecture optimizations.
// In its own file so that an architecture // In its own file so that an architecture
// optimized verision can be substituted // optimized version can be substituted
module sha256 module sha256
import math.bits import math.bits

View file

@ -4,7 +4,7 @@
// SHA512 block step. // SHA512 block step.
// This is the generic version with no architecture optimizations. // This is the generic version with no architecture optimizations.
// In its own file so that an architecture // In its own file so that an architecture
// optimized verision can be substituted // optimized version can be substituted
module sha512 module sha512
import math.bits import math.bits

View file

@ -5,7 +5,7 @@ pub struct Eof {
Error Error
} }
// NotExpected is a generic error that means that we receave a not expecte error. // NotExpected is a generic error that means that we receave a not expected error.
pub struct NotExpected { pub struct NotExpected {
cause string cause string
code int code int

View file

@ -148,7 +148,7 @@ pub fn (c &Cookie) str() string {
b.write_string('; expires=') b.write_string('; expires=')
b.write_string(time_str) b.write_string(time_str)
} }
// TODO: Fix this. Techically a max age of 0 or less should be 0 // TODO: Fix this. Technically a max age of 0 or less should be 0
// We need a way to not have a max age. // We need a way to not have a max age.
if c.max_age > 0 { if c.max_age > 0 {
b.write_string('; Max-Age=') b.write_string('; Max-Age=')

View file

@ -398,7 +398,7 @@ fn new_tcp_socket(family AddrFamily) !TcpSocket {
} }
// TODO(emily): // TODO(emily):
// we shouldnt be using ioctlsocket in the 21st century // we shouldn't be using ioctlsocket in the 21st century
// use the non-blocking socket option instead please :) // use the non-blocking socket option instead please :)
// TODO(emily): // TODO(emily):

View file

@ -5,7 +5,7 @@ pub struct Eof {
Error Error
} }
// NotExpected is a generic error that means that we receave a not expecte error. // NotExpected is a generic error that means that we receave a not expected error.
pub struct NotExpected { pub struct NotExpected {
cause string cause string
code int code int
@ -841,7 +841,7 @@ pub enum SeekMode {
// .start -> the origin is the start of the file // .start -> the origin is the start of the file
// .current -> the current position/cursor in the file // .current -> the current position/cursor in the file
// .end -> the end of the file // .end -> the end of the file
// If the file is not seek-able, or an error occures, the error will // If the file is not seek-able, or an error occurs, the error will
// be returned to the caller. // be returned to the caller.
// A successful call to the fseek() function clears the end-of-file // A successful call to the fseek() function clears the end-of-file
// indicator for the file. // indicator for the file.

View file

@ -151,7 +151,7 @@ fn test_read_eof_last_read_partial_buffer_fill() {
// test_read_eof_last_read_full_buffer_fill tests that when reading a file the // test_read_eof_last_read_full_buffer_fill tests that when reading a file the
// end-of-file is detected and results in a none error being returned. This test // end-of-file is detected and results in a none error being returned. This test
// simulates file reading where the end-of-file is reached at the beinning of an // simulates file reading where the end-of-file is reached at the beginning of an
// fread that returns no data. // fread that returns no data.
fn test_read_eof_last_read_full_buffer_fill() { fn test_read_eof_last_read_full_buffer_fill() {
mut f := os.open_file(tfile, 'w')! mut f := os.open_file(tfile, 'w')!

View file

@ -203,7 +203,7 @@ fn clean_path(path string) string {
} }
continue continue
} }
// turn foward slash into a back slash on a Windows system // turn forward slash into a back slash on a Windows system
$if windows { $if windows {
if curr == os.fslash { if curr == os.fslash {
sb.write_u8(os.bslash) sb.write_u8(os.bslash)

View file

@ -1011,7 +1011,7 @@ pub fn chown(path string, owner int, group int) ! {
} }
// open_append tries to open a file from a given path. // open_append tries to open a file from a given path.
// If successfull, it and returns a `File` for appending. // If successful, it and returns a `File` for appending.
pub fn open_append(path string) !File { pub fn open_append(path string) !File {
mut file := File{} mut file := File{}
$if windows { $if windows {
@ -1036,7 +1036,7 @@ pub fn open_append(path string) !File {
// execvp - loads and executes a new child process, *in place* of the current process. // execvp - loads and executes a new child process, *in place* of the current process.
// The child process executable is located in `cmdpath`. // The child process executable is located in `cmdpath`.
// The arguments, that will be passed to it are in `args`. // The arguments, that will be passed to it are in `args`.
// Note: this function will NOT return when successfull, since // Note: this function will NOT return when successful, since
// the child process will take control over execution. // the child process will take control over execution.
pub fn execvp(cmdpath string, cmdargs []string) ! { pub fn execvp(cmdpath string, cmdargs []string) ! {
mut cargs := []&char{} mut cargs := []&char{}
@ -1063,7 +1063,7 @@ pub fn execvp(cmdpath string, cmdargs []string) ! {
// The child process executable is located in `cmdpath`. // The child process executable is located in `cmdpath`.
// The arguments, that will be passed to it are in `args`. // The arguments, that will be passed to it are in `args`.
// You can pass environment variables to through `envs`. // You can pass environment variables to through `envs`.
// Note: this function will NOT return when successfull, since // Note: this function will NOT return when successful, since
// the child process will take control over execution. // the child process will take control over execution.
pub fn execve(cmdpath string, cmdargs []string, envs []string) ! { pub fn execve(cmdpath string, cmdargs []string, envs []string) ! {
mut cargv := []&char{} mut cargv := []&char{}

View file

@ -55,8 +55,8 @@ fn executable_fallback() string {
} }
} }
if !is_abs_path(exepath) { if !is_abs_path(exepath) {
other_seperator := if path_separator == '/' { '\\' } else { '/' } other_separator := if path_separator == '/' { '\\' } else { '/' }
rexepath := exepath.replace(other_seperator, path_separator) rexepath := exepath.replace(other_separator, path_separator)
if rexepath.contains(path_separator) { if rexepath.contains(path_separator) {
exepath = join_path_single(os.wd_at_startup, exepath) exepath = join_path_single(os.wd_at_startup, exepath)
} else { } else {
@ -227,7 +227,7 @@ pub fn is_dir_empty(path string) bool {
return res return res
} }
// file_ext will return the part after the last occurence of `.` in `path`. // file_ext will return the part after the last occurrence of `.` in `path`.
// The `.` is included. // The `.` is included.
// Examples: // Examples:
// ```v // ```v
@ -256,8 +256,8 @@ pub fn dir(opath string) string {
if opath == '' { if opath == '' {
return '.' return '.'
} }
other_seperator := if path_separator == '/' { '\\' } else { '/' } other_separator := if path_separator == '/' { '\\' } else { '/' }
path := opath.replace(other_seperator, path_separator) path := opath.replace(other_separator, path_separator)
pos := path.last_index(path_separator) or { return '.' } pos := path.last_index(path_separator) or { return '.' }
if pos == 0 && path_separator == '/' { if pos == 0 && path_separator == '/' {
return '/' return '/'
@ -273,8 +273,8 @@ pub fn base(opath string) string {
if opath == '' { if opath == '' {
return '.' return '.'
} }
other_seperator := if path_separator == '/' { '\\' } else { '/' } other_separator := if path_separator == '/' { '\\' } else { '/' }
path := opath.replace(other_seperator, path_separator) path := opath.replace(other_separator, path_separator)
if path == path_separator { if path == path_separator {
return path_separator return path_separator
} }
@ -287,11 +287,11 @@ pub fn base(opath string) string {
return path[pos + 1..] return path[pos + 1..]
} }
// file_name will return all characters found after the last occurence of `path_separator`. // file_name will return all characters found after the last occurrence of `path_separator`.
// file extension is included. // file extension is included.
pub fn file_name(opath string) string { pub fn file_name(opath string) string {
other_seperator := if path_separator == '/' { '\\' } else { '/' } other_separator := if path_separator == '/' { '\\' } else { '/' }
path := opath.replace(other_seperator, path_separator) path := opath.replace(other_separator, path_separator)
return path.all_after_last(path_separator) return path.all_after_last(path_separator)
} }
@ -610,7 +610,7 @@ fn impl_walk_ext(path string, ext string, mut out []string) {
} }
// walk traverses the given directory `path`. // walk traverses the given directory `path`.
// When a file is encountred, it will call the callback `f` with current file as argument. // When a file is encountered, it will call the callback `f` with current file as argument.
// Note: walk can be called even for deeply nested folders, // Note: walk can be called even for deeply nested folders,
// since it does not recurse, but processes them iteratively. // since it does not recurse, but processes them iteratively.
pub fn walk(path string, f fn (string)) { pub fn walk(path string, f fn (string)) {
@ -645,7 +645,7 @@ pub fn walk(path string, f fn (string)) {
pub type FnWalkContextCB = fn (voidptr, string) pub type FnWalkContextCB = fn (voidptr, string)
// walk_with_context traverses the given directory `path`. // walk_with_context traverses the given directory `path`.
// For each encountred file *and* directory, it will call your `fcb` callback, // For each encountered file *and* directory, it will call your `fcb` callback,
// passing it the arbitrary `context` in its first parameter, // passing it the arbitrary `context` in its first parameter,
// and the path to the file in its second parameter. // and the path to the file in its second parameter.
// Note: walk_with_context can be called even for deeply nested folders, // Note: walk_with_context can be called even for deeply nested folders,
@ -695,8 +695,8 @@ pub struct MkdirParams {
// mkdir_all will create a valid full path of all directories given in `path`. // mkdir_all will create a valid full path of all directories given in `path`.
pub fn mkdir_all(opath string, params MkdirParams) ! { pub fn mkdir_all(opath string, params MkdirParams) ! {
other_seperator := if path_separator == '/' { '\\' } else { '/' } other_separator := if path_separator == '/' { '\\' } else { '/' }
path := opath.replace(other_seperator, path_separator) path := opath.replace(other_separator, path_separator)
mut p := if path.starts_with(path_separator) { path_separator } else { '' } mut p := if path.starts_with(path_separator) { path_separator } else { '' }
path_parts := path.trim_left(path_separator).split(path_separator) path_parts := path.trim_left(path_separator).split(path_separator)
for subdir in path_parts { for subdir in path_parts {

View file

@ -67,7 +67,7 @@ pub fn (a &Asset) get_length() int {
fn C.AAsset_getLength64(&C.AAsset) i64 fn C.AAsset_getLength64(&C.AAsset) i64
// get_length_64 returns the total size of the asset data using // get_length_64 returns the total size of the asset data using
// a 64-bit number insted of 32-bit as `get_length`. // a 64-bit number instead of 32-bit as `get_length`.
pub fn (a &Asset) get_length_64() i64 { pub fn (a &Asset) get_length_64() i64 {
return C.AAsset_getLength64(a) return C.AAsset_getLength64(a)
} }

View file

@ -764,7 +764,7 @@ fn test_posix_set_bit() {
} }
mode = u32(s.st_mode) & 0o0777 mode = u32(s.st_mode) & 0o0777
assert mode == 0o0777 assert mode == 0o0777
// Note: setting the sticky bit is platform dependend // Note: setting the sticky bit is platform dependent
// `chmod -s -g -t` // `chmod -s -g -t`
os.posix_set_permission_bit(fpath, os.s_isuid, false) os.posix_set_permission_bit(fpath, os.s_isuid, false)
os.posix_set_permission_bit(fpath, os.s_isgid, false) os.posix_set_permission_bit(fpath, os.s_isgid, false)

View file

@ -35,7 +35,7 @@ fn (mut p Process) unix_spawn_process() int {
C.setpgid(0, 0) C.setpgid(0, 0)
} }
if p.use_stdio_ctl { if p.use_stdio_ctl {
// Redirect the child standart in/out/err to the pipes that // Redirect the child standard in/out/err to the pipes that
// were created in the parent. // were created in the parent.
// Close the parent's pipe fds, the child do not need them: // Close the parent's pipe fds, the child do not need them:
fd_close(pipeset[1]) fd_close(pipeset[1])

View file

@ -1004,7 +1004,7 @@ fn (mut re RE) impl_compile(in_txt string) (int, int) {
// manage negation groups // manage negation groups
if negate_flag == true { if negate_flag == true {
re.prog[pc].group_neg = true re.prog[pc].group_neg = true
re.prog[pc].rep_min = 0 // may be not catched, but it is ok re.prog[pc].rep_min = 0 // may not be caught, but it is ok
} }
// set the group id // set the group id

View file

@ -1,7 +1,7 @@
module stdatomic module stdatomic
// Implement the atomic operations. For now TCC does not support the atomic // Implement the atomic operations. For now TCC does not support the atomic
// versions on nix so it uses locks to simulate the same behavor. // versions on nix so it uses locks to simulate the same behavior.
// //
// On windows tcc can simulate with other atomic operations. // On windows tcc can simulate with other atomic operations.
// //

View file

@ -1,7 +1,7 @@
// tests that use and test private functions // tests that use and test private functions
module time module time
// test the old behavor is same as new, the unix time should always be local time // test the old behavior is same as new, the unix time should always be local time
fn test_new_is_same_as_old_for_all_platforms() { fn test_new_is_same_as_old_for_all_platforms() {
t := C.time(0) t := C.time(0)
tm := C.localtime(&t) tm := C.localtime(&t)

View file

@ -224,7 +224,7 @@ pub fn (t Time) relative() string {
return '${prefix}${y} years${suffix}' return '${prefix}${y} years${suffix}'
} }
// relative_short returns a string saying how long ago a time occured as follows: // relative_short returns a string saying how long ago a time occurred as follows:
// 0-30 seconds: `"now"`; 30-60 seconds: `"1m"`; anything else is rounded to the // 0-30 seconds: `"now"`; 30-60 seconds: `"1m"`; anything else is rounded to the
// nearest minute, hour, day, or year // nearest minute, hour, day, or year
// //

View file

@ -21,7 +21,7 @@ struct C.tm {
fn C.timegm(&C.tm) C.time_t fn C.timegm(&C.tm) C.time_t
// prefering localtime_r over the localtime because // preferring localtime_r over the localtime because
// from docs localtime_r is thread safe, // from docs localtime_r is thread safe,
fn C.localtime_r(t &C.time_t, tm &C.tm) fn C.localtime_r(t &C.time_t, tm &C.tm)

View file

@ -128,7 +128,7 @@ pub fn (t Time) local() Time {
// win_now calculates current time using winapi to get higher resolution on windows // win_now calculates current time using winapi to get higher resolution on windows
// GetSystemTimeAsFileTime is used and converted to local time. It can resolve time // GetSystemTimeAsFileTime is used and converted to local time. It can resolve time
// down to millisecond. Other more precice methods can be implemented in the future // down to millisecond. Other more precise methods can be implemented in the future
fn win_now() Time { fn win_now() Time {
ft_utc := C._FILETIME{} ft_utc := C._FILETIME{}
C.GetSystemTimeAsFileTime(&ft_utc) C.GetSystemTimeAsFileTime(&ft_utc)
@ -152,7 +152,7 @@ fn win_now() Time {
// win_utc calculates current time using winapi to get higher resolution on windows // win_utc calculates current time using winapi to get higher resolution on windows
// GetSystemTimeAsFileTime is used. It can resolve time down to millisecond // GetSystemTimeAsFileTime is used. It can resolve time down to millisecond
// other more precice methods can be implemented in the future // other more precise methods can be implemented in the future
fn win_utc() Time { fn win_utc() Time {
ft_utc := C._FILETIME{} ft_utc := C._FILETIME{}
C.GetSystemTimeAsFileTime(&ft_utc) C.GetSystemTimeAsFileTime(&ft_utc)

View file

@ -235,8 +235,8 @@ pub fn (m map[string]Any) to_inline_toml() string {
mut i := 1 mut i := 1
for k, v in m { for k, v in m {
key := if k.contains(' ') { '"${k}"' } else { k } key := if k.contains(' ') { '"${k}"' } else { k }
delimeter := if i < m.len { ',' } else { '' } delimiter := if i < m.len { ',' } else { '' }
toml_text += ' ${key} = ${v.to_toml()}${delimeter}' toml_text += ' ${key} = ${v.to_toml()}${delimiter}'
i++ i++
} }
return toml_text + ' }' return toml_text + ' }'

View file

@ -559,11 +559,11 @@ pub fn (c Checker) check_comment(comment ast.Comment) ! {
break break
} }
ch_byte := u8(ch) ch_byte := u8(ch)
// Check for carrige return // Check for carriage return
if ch_byte == 0x0D { if ch_byte == 0x0D {
st := s.state() st := s.state()
return error(@MOD + '.' + @STRUCT + '.' + @FN + return error(@MOD + '.' + @STRUCT + '.' + @FN +
' carrige return character `${ch_byte.hex()}` is not allowed in comments (${st.line_nr},${st.col}).') ' carriage return character `${ch_byte.hex()}` is not allowed in comments (${st.line_nr},${st.col}).')
} }
// Check for control characters (allow TAB) // Check for control characters (allow TAB)
if util.is_illegal_ascii_control_character(ch_byte) { if util.is_illegal_ascii_control_character(ch_byte) {

View file

@ -13,7 +13,7 @@ pub const (
) )
// Scanner contains the necessary fields for the state of the scan process. // Scanner contains the necessary fields for the state of the scan process.
// the task the scanner does is also refered to as "lexing" or "tokenizing". // the task the scanner does is also referred to as "lexing" or "tokenizing".
// The Scanner methods are based on much of the work in `vlib/strings/textscanner`. // The Scanner methods are based on much of the work in `vlib/strings/textscanner`.
pub struct Scanner { pub struct Scanner {
pub: pub:

View file

@ -178,7 +178,7 @@ fn test_iarna_toml_spec_tests() {
"'import sys, yaml, json; json.dump(yaml.load(sys.stdin, Loader=yaml.FullLoader), sys.stdout, indent=4)'", "'import sys, yaml, json; json.dump(yaml.load(sys.stdin, Loader=yaml.FullLoader), sys.stdout, indent=4)'",
'<', iarna_yaml_path, '>', converted_json_path]) or { '<', iarna_yaml_path, '>', converted_json_path]) or {
contents := os.read_file(iarna_yaml_path)! contents := os.read_file(iarna_yaml_path)!
// NOTE there's known errors with the python convertion method. // NOTE there's known errors with the python convention method.
// For now we just ignore them as it's a broken tool - not a wrong test-case. // For now we just ignore them as it's a broken tool - not a wrong test-case.
// Uncomment this print to see/check them. // Uncomment this print to see/check them.
// eprintln(err.msg() + '\n$contents') // eprintln(err.msg() + '\n$contents')

View file

@ -7,7 +7,7 @@ module token
pub struct Pos { pub struct Pos {
pub: pub:
len int // length of the literal in the source len int // length of the literal in the source
line_nr int // the line number in the source where the token occured line_nr int // the line number in the source where the token occurred
pos int // the position of the token in scanner text pos int // the position of the token in scanner text
col int // the column in the source where the token occured col int // the column in the source where the token occurred
} }

View file

@ -8,8 +8,8 @@ pub struct Token {
pub: pub:
kind Kind // the token number/enum; for quick comparisons kind Kind // the token number/enum; for quick comparisons
lit string // literal representation of the token lit string // literal representation of the token
col int // the column in the source where the token occured col int // the column in the source where the token occurred
line_nr int // the line number in the source where the token occured line_nr int // the line number in the source where the token occurred
pos int // the position of the token in scanner text pos int // the position of the token in scanner text
len int // length of the literal len int // length of the literal
} }
@ -34,7 +34,7 @@ pub enum Kind {
lsbr // [ lsbr // [
rsbr // ] rsbr // ]
nl // \n linefeed / newline character nl // \n linefeed / newline character
cr // \r carrige return cr // \r carriage return
tab // \t character tab // \t character
whitespace // ` ` whitespace // ` `
period // . period // .

View file

@ -161,7 +161,7 @@ fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
mut right := if i < node.right.len { node.right[i] } else { node.right[0] } mut right := if i < node.right.len { node.right[i] } else { node.right[0] }
mut right_type := node.right_types[i] mut right_type := node.right_types[i]
if mut right is ast.Ident { if mut right is ast.Ident {
// resolve shared right vairable // resolve shared right variable
if right_type.has_flag(.shared_f) { if right_type.has_flag(.shared_f) {
if c.fail_if_unreadable(right, right_type, 'right-hand side of assignment') { if c.fail_if_unreadable(right, right_type, 'right-hand side of assignment') {
return return

View file

@ -1219,7 +1219,7 @@ fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool) ast.
// ... but 2. disallow passing non-pointers - that is very rarely what the user wanted, // ... but 2. disallow passing non-pointers - that is very rarely what the user wanted,
// it can lead to codegen errors (except for 'magic' functions like `json.encode` that, // it can lead to codegen errors (except for 'magic' functions like `json.encode` that,
// the compiler has special codegen support for), so it should be opt in, that is it // the compiler has special codegen support for), so it should be opt in, that is it
// shoould require an explicit voidptr(x) cast (and probably unsafe{} ?) . // should require an explicit voidptr(x) cast (and probably unsafe{} ?) .
if call_arg.typ != param.typ && (param.typ == ast.voidptr_type if call_arg.typ != param.typ && (param.typ == ast.voidptr_type
|| final_param_sym.idx == ast.voidptr_type_idx || final_param_sym.idx == ast.voidptr_type_idx
|| param.typ == ast.nil_type || final_param_sym.idx == ast.nil_type_idx) || param.typ == ast.nil_type || final_param_sym.idx == ast.nil_type_idx)

View file

@ -3,5 +3,5 @@ vlib/v/checker/tests/infix_sumtype_in_array_err.vv:15:7: error: left operand to
14 | RuneAlias { 14 | RuneAlias {
15 | if x in whitespace { 15 | if x in whitespace {
| ~~~~~~~~~~~~~~~ | ~~~~~~~~~~~~~~~
16 | // doing `if x as RuneAlias in whitepsace` here 16 | // doing `if x as RuneAlias in whitespace` here
17 | // works but it should be doing that automatically 17 | // works but it should be doing that automatically

View file

@ -13,7 +13,7 @@ fn main() {
match x { match x {
RuneAlias { RuneAlias {
if x in whitespace { if x in whitespace {
// doing `if x as RuneAlias in whitepsace` here // doing `if x as RuneAlias in whitespace` here
// works but it should be doing that automatically // works but it should be doing that automatically
// since I'm inside the RuneAlias match condition. // since I'm inside the RuneAlias match condition.
} }

View file

@ -83,7 +83,7 @@ pub fn (mut e Eval) eval(mut files []&ast.File) {
e.run_func(e.mods['main']['main'] or { ast.FnDecl{} } as ast.FnDecl) e.run_func(e.mods['main']['main'] or { ast.FnDecl{} } as ast.FnDecl)
} }
// first arg is reciever (if method) // first arg is receiver (if method)
pub fn (mut e Eval) run_func(func ast.FnDecl, _args ...Object) { pub fn (mut e Eval) run_func(func ast.FnDecl, _args ...Object) {
e.back_trace << EvalTrace{func.idx, func.source_file.idx, func.pos.line_nr} e.back_trace << EvalTrace{func.idx, func.source_file.idx, func.pos.line_nr}
old_mod := e.cur_mod old_mod := e.cur_mod

View file

@ -85,10 +85,10 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
// // } // // }
// } // }
// // println((e.local_vars['s'].val as string).str == voidptr_args[1]) // // println((e.local_vars['s'].val as string).str == voidptr_args[1])
// println('helo?$voidptr_args') // println('hello?$voidptr_args')
// // println((byteptr(voidptr_args[1])[0])) // // println((byteptr(voidptr_args[1])[0]))
// x := strconv.v_sprintf(args[0] as string, ...voidptr_args) // x := strconv.v_sprintf(args[0] as string, ...voidptr_args)
// // println('helo!') // // println('hello!')
// // println(x.len) // // println(x.len)
// y := C.write(1, x.str, x.len) // y := C.write(1, x.str, x.len)
// println('aft') // println('aft')

View file

@ -5,7 +5,7 @@ fn grouped_cond_single_line() {
} }
fn unwrap_grouped_conds() { fn unwrap_grouped_conds() {
// ...but sometimes they have to be splitted // ...but sometimes they have to be split
_ := one_condition && before_condition && (conds_inside_paren _ := one_condition && before_condition && (conds_inside_paren
|| are_kept_together || if_possible || but_this_is_really_too_much || are_kept_together || if_possible || but_this_is_really_too_much
|| for_one_line) || for_one_line)

View file

@ -4,7 +4,7 @@ fn grouped_cond_single_line() {
} }
fn unwrap_grouped_conds() { fn unwrap_grouped_conds() {
// ...but sometimes they have to be splitted // ...but sometimes they have to be split
_ := one_condition && before_condition && (conds_inside_paren || are_kept_together || if_possible || but_this_is_really_too_much || for_one_line) _ := one_condition && before_condition && (conds_inside_paren || are_kept_together || if_possible || but_this_is_really_too_much || for_one_line)
_ := (also_inside_parens || just_as_above || but_this_is_also_more || than_a_single_line_could_fit) && end_cond _ := (also_inside_parens || just_as_above || but_this_is_also_more || than_a_single_line_could_fit) && end_cond
fields = fields.filter((it.typ in [string_type, int_type, bool_type] || c.table.types[int(it.typ)].kind == .struct_) && !it.attrs.contains('skip')) fields = fields.filter((it.typ in [string_type, int_type, bool_type] || c.table.types[int(it.typ)].kind == .struct_) && !it.attrs.contains('skip'))

View file

@ -82,17 +82,17 @@ mut:
sorted_global_const_names []string sorted_global_const_names []string
file &ast.File = unsafe { nil } file &ast.File = unsafe { nil }
table &ast.Table = unsafe { nil } table &ast.Table = unsafe { nil }
unique_file_path_hash u64 // a hash of file.path, used for making auxilary fn generation unique (like `compare_xyz`) unique_file_path_hash u64 // a hash of file.path, used for making auxiliary fn generation unique (like `compare_xyz`)
fn_decl &ast.FnDecl = unsafe { nil } // pointer to the FnDecl we are currently inside otherwise 0 fn_decl &ast.FnDecl = unsafe { nil } // pointer to the FnDecl we are currently inside otherwise 0
last_fn_c_name string last_fn_c_name string
tmp_count int // counter for unique tmp vars (_tmp1, _tmp2 etc); resets at the start of each fn. tmp_count int // counter for unique tmp vars (_tmp1, _tmp2 etc); resets at the start of each fn.
tmp_count_af int // a separate tmp var counter for autofree fn calls tmp_count_af int // a separate tmp var counter for autofree fn calls
tmp_count_declarations int // counter for unique tmp names (_d1, _d2 etc); does NOT reset, used for C declarations tmp_count_declarations int // counter for unique tmp names (_d1, _d2 etc); does NOT reset, used for C declarations
global_tmp_count int // like tmp_count but global and not resetted in each function global_tmp_count int // like tmp_count but global and not reset in each function
discard_or_result bool // do not safe last ExprStmt of `or` block in tmp variable to defer ongoing expr usage discard_or_result bool // do not safe last ExprStmt of `or` block in tmp variable to defer ongoing expr usage
is_direct_array_access bool // inside a `[direct_array_access fn a() {}` function is_direct_array_access bool // inside a `[direct_array_access fn a() {}` function
is_assign_lhs bool // inside left part of assign expr (for array_set(), etc) is_assign_lhs bool // inside left part of assign expr (for array_set(), etc)
is_void_expr_stmt bool // ExprStmt whos result is discarded is_void_expr_stmt bool // ExprStmt whose result is discarded
is_arraymap_set bool // map or array set value state is_arraymap_set bool // map or array set value state
is_amp bool // for `&Foo{}` to merge PrefixExpr `&` and StructInit `Foo{}`; also for `&u8(0)` etc is_amp bool // for `&Foo{}` to merge PrefixExpr `&` and StructInit `Foo{}`; also for `&u8(0)` etc
is_sql bool // Inside `sql db{}` statement, generating sql instead of C (e.g. `and` instead of `&&` etc) is_sql bool // Inside `sql db{}` statement, generating sql instead of C (e.g. `and` instead of `&&` etc)
@ -1067,9 +1067,9 @@ fn (mut g Gen) expr_string_surround(prepend string, expr ast.Expr, append string
return g.out.cut_to(pos) return g.out.cut_to(pos)
} }
// TODO this really shouldnt be seperate from typ // TODO this really shouldn't be separate from typ
// but I(emily) would rather have this generation // but I(emily) would rather have this generation
// all unified in one place so that it doesnt break // all unified in one place so that it doesn't break
// if one location changes // if one location changes
fn (mut g Gen) option_type_name(t ast.Type) (string, string) { fn (mut g Gen) option_type_name(t ast.Type) (string, string) {
mut base := g.base_type(t) mut base := g.base_type(t)
@ -2591,7 +2591,7 @@ fn cescape_nonascii(original string) string {
return res return res
} }
// cestring returns a V string, properly escaped for embeddeding in a C string literal. // cestring returns a V string, properly escaped for embedding in a C string literal.
fn cestring(s string) string { fn cestring(s string) string {
return s.replace('\\', '\\\\').replace('"', "'") return s.replace('\\', '\\\\').replace('"', "'")
} }
@ -2632,7 +2632,7 @@ fn (mut g Gen) asm_stmt(stmt ast.AsmStmt) {
} else { } else {
g.write(' ') g.write(' ')
} }
// swap destionation and operands for att syntax // swap destination and operands for att syntax
if template.args.len != 0 && !template.is_directive { if template.args.len != 0 && !template.is_directive {
template.args.prepend(template.args.last()) template.args.prepend(template.args.last())
template.args.delete(template.args.len - 1) template.args.delete(template.args.len - 1)
@ -5045,7 +5045,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.write('(${styp}){') g.write('(${styp}){')
mut arg_idx := 0 mut arg_idx := 0
for i, expr in node.exprs { for i, expr in node.exprs {
// Check if we are dealing with a multi return and handle it seperately // Check if we are dealing with a multi return and handle it separately
if g.expr_is_multi_return_call(expr) { if g.expr_is_multi_return_call(expr) {
call_expr := expr as ast.CallExpr call_expr := expr as ast.CallExpr
expr_sym := g.table.sym(call_expr.return_type) expr_sym := g.table.sym(call_expr.return_type)
@ -5803,7 +5803,7 @@ fn (mut g Gen) write_init_function() {
g.writeln('\t_closure_mtx_init();') g.writeln('\t_closure_mtx_init();')
} }
// reflection bootstraping // reflection bootstrapping
if g.has_reflection { if g.has_reflection {
if var := g.global_const_defs['g_reflection'] { if var := g.global_const_defs['g_reflection'] {
g.writeln(var.init) g.writeln(var.init)
@ -6185,7 +6185,7 @@ fn (mut g Gen) sort_structs(typesa []&ast.TypeSymbol) []&ast.TypeSymbol {
// sort graph // sort graph
dep_graph_sorted := dep_graph.resolve() dep_graph_sorted := dep_graph.resolve()
if !dep_graph_sorted.acyclic { if !dep_graph_sorted.acyclic {
// this should no longer be called since it's catched in the parser // this should no longer be called since it's in the parser
// TODO: should it be removed? // TODO: should it be removed?
verror('cgen.sort_structs(): the following structs form a dependency cycle:\n' + verror('cgen.sort_structs(): the following structs form a dependency cycle:\n' +
dep_graph_sorted.display_cycles() + dep_graph_sorted.display_cycles() +
@ -6257,7 +6257,7 @@ fn (mut g Gen) gen_or_block_stmts(cvar_name string, cast_typ string, stmts []ast
g.indent-- g.indent--
} }
// If user is accessing the return value eg. in assigment, pass the variable name. // If user is accessing the return value eg. in assignment, pass the variable name.
// If the user is not using the option return value. We need to pass a temp var // If the user is not using the option return value. We need to pass a temp var
// to access its fields (`.ok`, `.error` etc) // to access its fields (`.ok`, `.error` etc)
// `os.cp(...)` => `Option bool tmp = os__cp(...); if (tmp.state != 0) { ... }` // `os.cp(...)` => `Option bool tmp = os__cp(...); if (tmp.state != 0) { ... }`
@ -6768,7 +6768,7 @@ fn (mut g Gen) interface_table() string {
cast_struct.writeln('\t\t.${cname} = (${field_styp}*)((char*)x + __offsetof_ptr(x, ${cctype}, ${cname})),') cast_struct.writeln('\t\t.${cname} = (${field_styp}*)((char*)x + __offsetof_ptr(x, ${cctype}, ${cname})),')
} else if st_sym.kind == .array } else if st_sym.kind == .array
&& field.name in ['element_size', 'data', 'offset', 'len', 'cap', 'flags'] { && field.name in ['element_size', 'data', 'offset', 'len', 'cap', 'flags'] {
// Manaully checking, we already knows array contains above fields // Manually checking, we already knows array contains above fields
cast_struct.writeln('\t\t.${cname} = (${field_styp}*)((char*)x + __offsetof_ptr(x, ${cctype}, ${cname})),') cast_struct.writeln('\t\t.${cname} = (${field_styp}*)((char*)x + __offsetof_ptr(x, ${cctype}, ${cname})),')
} else { } else {
// the field is embedded in another struct // the field is embedded in another struct

View file

@ -2018,7 +2018,7 @@ fn (mut g Gen) call_args(node ast.CallExpr) {
} }
} }
} }
// only v variadic, C variadic args will be appeneded like normal args // only v variadic, C variadic args will be appended like normal args
is_variadic := expected_types.len > 0 && expected_types.last().has_flag(.variadic) is_variadic := expected_types.len > 0 && expected_types.last().has_flag(.variadic)
&& node.language == .v && node.language == .v
for i, arg in args { for i, arg in args {

View file

@ -603,7 +603,7 @@ fn (mut g Gen) infix_expr_in_op(node ast.InfixExpr) {
} }
// infix_expr_in_optimization optimizes `<var> in <array>` expressions, // infix_expr_in_optimization optimizes `<var> in <array>` expressions,
// and transform them in a serie of equality comparison // and transform them in a series of equality comparison
// i.e. `a in [1,2,3]` => `a == 1 || a == 2 || a == 3` // i.e. `a in [1,2,3]` => `a == 1 || a == 2 || a == 3`
fn (mut g Gen) infix_expr_in_optimization(left ast.Expr, right ast.ArrayInit) { fn (mut g Gen) infix_expr_in_optimization(left ast.Expr, right ast.ArrayInit) {
mut elem_sym := g.table.sym(right.elem_type) mut elem_sym := g.table.sym(right.elem_type)

View file

@ -202,7 +202,7 @@ fn (mut g Gen) gen_reflection_sym_info(tsym ast.TypeSymbol) string {
} }
} }
// gen_reflection_data generates code to initilized V reflection metadata // gen_reflection_data generates code to initialized V reflection metadata
fn (mut g Gen) gen_reflection_data() { fn (mut g Gen) gen_reflection_data() {
// modules declaration // modules declaration
for mod_name in g.table.modules { for mod_name in g.table.modules {

View file

@ -48,7 +48,7 @@ fn (mut g Gen) get_default_fmt(ftyp ast.Type, typ ast.Type) u8 {
fn (mut g Gen) str_format(node ast.StringInterLiteral, i int, fmts []u8) (u64, string) { fn (mut g Gen) str_format(node ast.StringInterLiteral, i int, fmts []u8) (u64, string) {
mut base := 0 // numeric base mut base := 0 // numeric base
mut upper_case := false // set upercase for the result string mut upper_case := false // set uppercase for the result string
mut typ := g.unwrap_generic(node.expr_types[i]) mut typ := g.unwrap_generic(node.expr_types[i])
if node.exprs[i].is_auto_deref_var() { if node.exprs[i].is_auto_deref_var() {
typ = typ.deref() typ = typ.deref()

View file

@ -1,4 +1,4 @@
type ParseRes = Result<[]Token, ParseErr> type ParseRes = Result[[]Token, ParseErr]
struct Token {} struct Token {}
@ -26,7 +26,7 @@ fn main() {
r := Opt[ParseRes](None[ParseRes]{}) r := Opt[ParseRes](None[ParseRes]{})
match r { match r {
Some[ParseRes] { Some[ParseRes] {
// make possible cast fo the same type! // make possible cast of the same type!
rx := Result[[]Token, ParseErr](r.value) rx := Result[[]Token, ParseErr](r.value)
} }
None[ParseRes] {} None[ParseRes] {}

View file

@ -64,7 +64,7 @@ struct Type {
typ ast.Type [required] typ ast.Type [required]
sym &ast.TypeSymbol [required] sym &ast.TypeSymbol [required]
// unaliased is `typ` once aliased have been resolved // unaliased is `typ` once aliased have been resolved
// it may not contain informations such as flags and nr_muls // it may not contain information such as flags and nr_muls
unaliased ast.Type [required] unaliased ast.Type [required]
unaliased_sym &ast.TypeSymbol [required] unaliased_sym &ast.TypeSymbol [required]
} }

View file

@ -3507,7 +3507,7 @@ fn (mut g JsGen) gen_typeof_expr(it ast.TypeOf) {
} }
fn (mut g JsGen) gen_cast_tmp(tmp string, typ_ ast.Type) { fn (mut g JsGen) gen_cast_tmp(tmp string, typ_ ast.Type) {
// Skip cast if type is the same as the parrent caster // Skip cast if type is the same as the parent caster
tsym := g.table.final_sym(typ_) tsym := g.table.final_sym(typ_)
if !g.pref.output_es5 && (tsym.kind == .i64 || tsym.kind == .u64) { if !g.pref.output_es5 && (tsym.kind == .i64 || tsym.kind == .u64) {
g.write('new ') g.write('new ')
@ -3586,7 +3586,7 @@ fn (mut g JsGen) gen_type_cast_expr(it ast.CastExpr) {
return return
} }
// Skip cast if type is the same as the parrent caster // Skip cast if type is the same as the parent caster
tsym := to_type_sym tsym := to_type_sym
if tsym.kind == .sum_type { if tsym.kind == .sum_type {
g.expr(it.expr) g.expr(it.expr)
@ -3651,7 +3651,7 @@ fn (mut g JsGen) gen_integer_literal_expr(it ast.IntegerLiteral) {
} }
} }
// Skip cast if type is the same as the parrent caster // Skip cast if type is the same as the parent caster
if g.cast_stack.len > 0 { if g.cast_stack.len > 0 {
if g.cast_stack.last() in ast.integer_type_idxs { if g.cast_stack.last() in ast.integer_type_idxs {
g.write('new ') g.write('new ')
@ -3688,7 +3688,7 @@ fn (mut g JsGen) gen_float_literal_expr(it ast.FloatLiteral) {
} }
} }
// Skip cast if type is the same as the parrent caster // Skip cast if type is the same as the parent caster
if g.cast_stack.len > 0 { if g.cast_stack.len > 0 {
if g.cast_stack.last() in ast.float_type_idxs { if g.cast_stack.last() in ast.float_type_idxs {
g.write('new f32(${it.val})') g.write('new f32(${it.val})')

View file

@ -30,7 +30,7 @@ fn test_example_compilation() {
println('activate -sourcemap creation') println('activate -sourcemap creation')
v_options_file += ' -sourcemap' // activate souremap generation v_options_file += ' -sourcemap' // activate souremap generation
println('add node option: --enable-source-maps') // requieres node >=12.12.0 println('add node option: --enable-source-maps') // requires node >=12.12.0
node_options_file += ' --enable-source-maps' // activate souremap generation node_options_file += ' --enable-source-maps' // activate souremap generation
} }
jsfile := os.join_path_single(output_dir, '${file}.js') jsfile := os.join_path_single(output_dir, '${file}.js')

View file

@ -7,7 +7,7 @@ struct Type {
typ ast.Type [required] typ ast.Type [required]
sym &ast.TypeSymbol [required] = unsafe { nil } sym &ast.TypeSymbol [required] = unsafe { nil }
// unaliased is `typ` once aliased have been resolved // unaliased is `typ` once aliased have been resolved
// it may not contain informations such as flags and nr_muls // it may not contain information such as flags and nr_muls
unaliased ast.Type [required] unaliased ast.Type [required]
unaliased_sym &ast.TypeSymbol [required] = unsafe { nil } unaliased_sym &ast.TypeSymbol [required] = unsafe { nil }
} }

View file

@ -925,7 +925,7 @@ fn (mut c Amd64) mov_extend_reg(a Amd64Register, b Amd64Register, typ ast.Type)
fn (mut c Amd64) call_addr_at(addr i32, at i64) i64 { fn (mut c Amd64) call_addr_at(addr i32, at i64) i64 {
// Need to calculate the difference between current position (position after the e8 call) // Need to calculate the difference between current position (position after the e8 call)
// and the function to call.f // and the function to call.f
// +5 is to get the posistion "e8 xx xx xx xx" // +5 is to get the position "e8 xx xx xx xx"
// Not sure about the -1. // Not sure about the -1.
return 0xffffffff - (at + 5 - i64(addr) - 1) return 0xffffffff - (at + 5 - i64(addr) - 1)
} }
@ -3466,7 +3466,7 @@ fn (mut c Amd64) convert_int_to_string(a Register, b Register) {
} }
c.g.println('; jump to label ${skip_zero_label}') c.g.println('; jump to label ${skip_zero_label}')
// handle zeros seperately // handle zeros separately
// c.mov_int_to_var(LocalVar{buffer, ast.u8_type_idx, ''}, '0'[0]) // c.mov_int_to_var(LocalVar{buffer, ast.u8_type_idx, ''}, '0'[0])
c.g.write8(0xc6) c.g.write8(0xc6)

View file

@ -19,7 +19,7 @@ mut:
calls []i64 // call addresses calls []i64 // call addresses
} }
pub const inline_builtins = ['assert', 'print', 'eprint', 'println', 'eprintln', 'exit', 'C.syscall'] // classic V builtin functios accessible to the user get inlined pub const inline_builtins = ['assert', 'print', 'eprint', 'println', 'eprintln', 'exit', 'C.syscall'] // classic V builtin functions accessible to the user get inlined
pub fn (mut g Gen) init_builtins() { pub fn (mut g Gen) init_builtins() {
g.builtins = { g.builtins = {

View file

@ -375,7 +375,7 @@ mut:
typ i32 // Type of the node typ i32 // Type of the node
name []u8 // Name string of the note. name []u8 // Name string of the note.
desc []u8 // Descripition string of the node, must be aligned by 4 bytes desc []u8 // Description string of the node, must be aligned by 4 bytes
} }
fn (mut g Gen) create_note_section(typ i32, name string, desc string) NoteSection { fn (mut g Gen) create_note_section(typ i32, name string, desc string) NoteSection {

View file

@ -348,7 +348,7 @@ fn (mut g Gen) gen_print_from_expr(expr ast.Expr, typ ast.Type, name string) {
} }
ast.BoolLiteral { ast.BoolLiteral {
// register 'true' and 'false' strings // g.expr(expr) // register 'true' and 'false' strings // g.expr(expr)
// XXX mov64 shuoldnt be used for addressing // XXX mov64 shouldn't be used for addressing
nl := if newline { '\n' } else { '' } nl := if newline { '\n' } else { '' }
if expr.val { if expr.val {

View file

@ -83,7 +83,7 @@ mut:
address_size() i32 address_size() i32
adr(r Arm64Register, delta i32) // Note: Temporary! adr(r Arm64Register, delta i32) // Note: Temporary!
allocate_var(name string, size i32, initial_val i32) i32 allocate_var(name string, size i32, initial_val i32) i32
assign_stmt(node ast.AssignStmt) // TODO: make platform-independant assign_stmt(node ast.AssignStmt) // TODO: make platform-independent
builtin_decl(builtin BuiltinFn) builtin_decl(builtin BuiltinFn)
call_addr_at(addr i32, at i64) i64 call_addr_at(addr i32, at i64) i64
call_builtin(name Builtin) i64 call_builtin(name Builtin) i64
@ -107,7 +107,7 @@ mut:
gen_print(s string, fd i32) gen_print(s string, fd i32)
gen_syscall(node ast.CallExpr) gen_syscall(node ast.CallExpr)
inc_var(var Var, config VarConfig) inc_var(var Var, config VarConfig)
infix_expr(node ast.InfixExpr) // TODO: make platform-independant infix_expr(node ast.InfixExpr) // TODO: make platform-independent
infloop() infloop()
init_struct(var Var, init ast.StructInit) init_struct(var Var, init ast.StructInit)
init_array(var Var, init ast.ArrayInit) init_array(var Var, init ast.ArrayInit)
@ -520,7 +520,7 @@ pub fn (mut g Gen) link(obj_name string) {
g.link_elf_file(obj_name) g.link_elf_file(obj_name)
} }
.windows { .windows {
// windows linking is alredy done before codegen // windows linking is already done before codegen
} }
.macos { .macos {
// TODO: implement linking for macos! // TODO: implement linking for macos!

View file

@ -29,7 +29,7 @@ const (
pe_heap_size = 0x100000 // gcc default on windows pe_heap_size = 0x100000 // gcc default on windows
// tcc defaults // tcc defaults
pe_major_linker_version = 6 pe_major_linker_version = 6
pe_minor_linker_verion = 0 pe_minor_linker_version = 0
pe_major_os_version = 4 pe_major_os_version = 4
pe_minor_os_version = 0 pe_minor_os_version = 0
pe_major_subsystem_version = 4 pe_major_subsystem_version = 4
@ -236,7 +236,7 @@ fn (mut g Gen) get_pe32_plus_optional_header() Pe32PlusOptionalHeader {
return Pe32PlusOptionalHeader{ return Pe32PlusOptionalHeader{
magic: .pe32plus magic: .pe32plus
major_linker_version: native.pe_major_linker_version major_linker_version: native.pe_major_linker_version
minor_linker_version: native.pe_minor_linker_verion minor_linker_version: native.pe_minor_linker_version
image_base: native.image_base image_base: native.image_base
section_alignment: native.pe_section_align section_alignment: native.pe_section_align
file_alignment: native.pe_file_align file_alignment: native.pe_file_align
@ -262,7 +262,7 @@ enum Pe32PlusOPtionalHeaderField {
number_of_rva_and_sizes = 108 number_of_rva_and_sizes = 108
} }
// implemented because __offsetof() + [packed] structs wasn't consistend across OSs // implemented because __offsetof() + [packed] structs wasn't consistent across OSs
[inline] [inline]
fn pe32_plus_optional_header_offsetof(field Pe32PlusOPtionalHeaderField) i64 { fn pe32_plus_optional_header_offsetof(field Pe32PlusOPtionalHeaderField) i64 {
return i64(field) return i64(field)
@ -449,7 +449,7 @@ enum PeSectionHeaderField {
pointer_to_raw_data = 20 pointer_to_raw_data = 20
} }
// implemented because __offsetof() + [packed] structs wasn't consistend across OSs // implemented because __offsetof() + [packed] structs wasn't consistent across OSs
[inline] [inline]
fn pe_section_header_offsetof(field PeSectionHeaderField) i64 { fn pe_section_header_offsetof(field PeSectionHeaderField) i64 {
return i64(field) return i64(field)
@ -560,7 +560,7 @@ enum PeImportDirectoryTableField {
import_address_table_rva = 16 import_address_table_rva = 16
} }
// implemented because __offsetof() + [packed] structs wasn't consistend across OSs // implemented because __offsetof() + [packed] structs wasn't consistent across OSs
[inline] [inline]
fn pe_idt_offsetof(field PeImportDirectoryTableField) i64 { fn pe_idt_offsetof(field PeImportDirectoryTableField) i64 {
return i64(field) return i64(field)

View file

@ -423,7 +423,7 @@ pub fn (mut g Gen) set_set(v Var) {
// set pointers with value, get local, store value // set pointers with value, get local, store value
// set value, set local // set value, set local
// -- set works with a single value present on the stack beforehand // -- set works with a single value present on the stack beforehand
// -- not optimial for copying stack memory or shuffling structs // -- not optimal for copying stack memory or shuffling structs
// -- use mov instead // -- use mov instead
pub fn (mut g Gen) set(v Var) { pub fn (mut g Gen) set(v Var) {
if !v.is_address { if !v.is_address {

View file

@ -22,7 +22,7 @@ If everything works fine, the output of the generated program would have changed
which then is detected by the test program (the histogram checks). which then is detected by the test program (the histogram checks).
Since this test program is sensitive to coordination (or lack of) of several processes, Since this test program is sensitive to coordination (or lack of) of several processes,
it tries to sidestep the coordination issue by polling the file system for the existance it tries to sidestep the coordination issue by polling the file system for the existence
of files, ORIGINAL.txt ... STOP.txt , which are appended to by the generated program. of files, ORIGINAL.txt ... STOP.txt , which are appended to by the generated program.
Note: That approach of monitoring the state of the running generated program, is clearly not ideal, Note: That approach of monitoring the state of the running generated program, is clearly not ideal,

View file

@ -94,7 +94,7 @@ pub fn (mut p Preferences) fill_with_defaults() {
// compilers. // compilers.
// //
// If you do decide to break it, please *at the very least*, test it // If you do decide to break it, please *at the very least*, test it
// extensively, and make a PR about it, instead of commiting directly // extensively, and make a PR about it, instead of committing directly
// and breaking the CI, VC, and users doing `v up`. // and breaking the CI, VC, and users doing `v up`.
if rpath == '${p.vroot}/cmd/v' && os.is_dir('vlib/compiler') { if rpath == '${p.vroot}/cmd/v' && os.is_dir('vlib/compiler') {
// Building V? Use v2, since we can't overwrite a running // Building V? Use v2, since we can't overwrite a running
@ -129,7 +129,7 @@ pub fn (mut p Preferences) fill_with_defaults() {
p.parse_define('emscripten') p.parse_define('emscripten')
} }
if p.os == ._auto { if p.os == ._auto {
// No OS specifed? Use current system // No OS specified? Use current system
p.os = if p.backend != .wasm { get_host_os() } else { .wasi } p.os = if p.backend != .wasm { get_host_os() } else { .wasi }
} }
// //

View file

@ -29,9 +29,9 @@ pub enum GarbageCollectionMode {
unknown unknown
no_gc no_gc
boehm_full // full garbage collection mode boehm_full // full garbage collection mode
boehm_incr // incremental garbage colletion mode boehm_incr // incremental garbage collection mode
boehm_full_opt // full garbage collection mode boehm_full_opt // full garbage collection mode
boehm_incr_opt // incremental garbage colletion mode boehm_incr_opt // incremental garbage collection mode
boehm_leak // leak detection mode (makes `gc_check_leaks()` work) boehm_leak // leak detection mode (makes `gc_check_leaks()` work)
} }
@ -133,7 +133,7 @@ pub mut:
is_help bool // -h, -help or --help was passed is_help bool // -h, -help or --help was passed
is_quiet bool // do not show the repetitive explanatory messages like the one for `v -prod run file.v` . is_quiet bool // do not show the repetitive explanatory messages like the one for `v -prod run file.v` .
is_cstrict bool // turn on more C warnings; slightly slower is_cstrict bool // turn on more C warnings; slightly slower
eval_argument string // `println(2+2)` on `v -e "println(2+2)"`. Note that this souce code, will be evaluated in vsh mode, so 'v -e 'println(ls(".")!)' is valid. eval_argument string // `println(2+2)` on `v -e "println(2+2)"`. Note that this source code, will be evaluated in vsh mode, so 'v -e 'println(ls(".")!)' is valid.
test_runner string // can be 'simple' (fastest, but much less detailed), 'tap', 'normal' test_runner string // can be 'simple' (fastest, but much less detailed), 'tap', 'normal'
profile_file string // the profile results will be stored inside profile_file profile_file string // the profile results will be stored inside profile_file
profile_no_inline bool // when true, [inline] functions would not be profiled profile_no_inline bool // when true, [inline] functions would not be profiled
@ -189,7 +189,7 @@ pub mut:
prealloc bool prealloc bool
vroot string vroot string
vlib string // absolute path to the vlib/ folder vlib string // absolute path to the vlib/ folder
vmodules_paths []string // absolute paths to the vmodules folders, by default ['/home/user/.vmodules'], can be overriden by setting VMODULES vmodules_paths []string // absolute paths to the vmodules folders, by default ['/home/user/.vmodules'], can be overridden by setting VMODULES
out_name_c string // full os.real_path to the generated .tmp.c file; set by builder. out_name_c string // full os.real_path to the generated .tmp.c file; set by builder.
out_name string out_name string
path string // Path to file/folder to compile path string // Path to file/folder to compile

View file

@ -108,8 +108,8 @@ pub fn change_test_runner(x &TestRunner) {
test_runner = *x test_runner = *x
} }
// vtest_init will be caled *before* the normal _vinit() function, // vtest_init will be called *before* the normal _vinit() function,
// to give a chance to the test runner implemenation to change the // to give a chance to the test runner implementation to change the
// test_runner global variable. The reason vtest_init is called before // test_runner global variable. The reason vtest_init is called before
// _vinit, is because a _test.v file can define consts, and they in turn // _vinit, is because a _test.v file can define consts, and they in turn
// may use function calls in their declaration, which may do assertions. // may use function calls in their declaration, which may do assertions.

View file

@ -74,7 +74,7 @@ fn test_inline_asm() {
// assert loops == 1 // assert loops == 1
// assert k == 5 // assert k == 5
// not marked as mut because we derefernce m to change l // not marked as mut because we dereference m to change l
l := 5 l := 5
m := &l m := &l
asm amd64 { asm amd64 {

View file

@ -74,7 +74,7 @@ fn test_inline_asm() {
// assert loops == 1 // assert loops == 1
// assert k == 5 // assert k == 5
// not marked as mut because we derefernce m to change l // not marked as mut because we dereference m to change l
l := 5 l := 5
m := &l m := &l
asm i386 { asm i386 {

View file

@ -83,7 +83,7 @@ fn test_self_slice_push() {
fn test_slice_push_child() { fn test_slice_push_child() {
mut a := [1.0, 2.0625, 3.5, -7.75, 7.125, 8.4375, 0.5] mut a := [1.0, 2.0625, 3.5, -7.75, 7.125, 8.4375, 0.5]
mut b := unsafe { a[2..6] } // `b` is initially created as reference mut b := unsafe { a[2..6] } // `b` is initially created as reference
mut c := unsafe { b[1..3] } // `c` is initiall reference to `a` and `b` mut c := unsafe { b[1..3] } // `c` is initially created as references to `a` and `b`
b << -2.25 // `b` should be reallocated, so `a` doesn't change b << -2.25 // `b` should be reallocated, so `a` doesn't change
c[1] = -13.5 // this should change `c` and `a` but not `b` c[1] = -13.5 // this should change `c` and `a` but not `b`
assert c == [-7.75, -13.5] assert c == [-7.75, -13.5]

View file

@ -8,7 +8,7 @@ fn inc_elements(shared foo []int, n int, mut sem sync.Semaphore) {
for _ in 0 .. iterations_per_thread2 { for _ in 0 .. iterations_per_thread2 {
foo[n]++ foo[n]++
} }
sem.post() // indicat that thread is finished sem.post() // indicate that thread is finished
} }
fn test_autolocked_array_2() { fn test_autolocked_array_2() {

View file

@ -3,7 +3,7 @@ struct MyHeapStruct {
name string name string
} }
// make sure dereferencing of heap stucts works in selector expr (in tmpl), // make sure dereferencing of heap structs works in selector expr (in tmpl),
fn test_heap_struct_dereferencing_in_selector_expr() { fn test_heap_struct_dereferencing_in_selector_expr() {
a := MyHeapStruct{ a := MyHeapStruct{
name: 'my_heap_struct_a' name: 'my_heap_struct_a'

View file

@ -74,7 +74,7 @@ fn min[T](tree Tree[T]) T {
} }
} }
// delete a value in BST (if nonexistant do nothing) // delete a value in BST (if nonexistent do nothing)
fn delete[T](tree Tree[T], x T) Tree[T] { fn delete[T](tree Tree[T], x T) Tree[T] {
return match tree { return match tree {
Empty { Empty {

View file

@ -69,7 +69,7 @@ fn test_generics_method_return_generics_struct() {
assert foo.get[int](o) == 42 assert foo.get[int](o) == 42
} }
// test genrics struct str() // test generic struct str()
pub struct ArrayIterator[T] { pub struct ArrayIterator[T] {
data []T data []T
mut: mut:

View file

@ -53,7 +53,7 @@ fn test_propagate() {
assert z == 31.0 assert z == 31.0
} }
fn test_array_void_interate() { fn test_array_void_iter() {
mut r := []thread ?{} mut r := []thread ?{}
for i in 0 .. 3 { for i in 0 .. 3 {
r << spawn g(i) r << spawn g(i)
@ -67,7 +67,7 @@ fn test_array_void_interate() {
assert res[2] == 2 assert res[2] == 2
} }
fn test_array_val_interate() { fn test_array_val_iter() {
mut r := []thread ?f64{} mut r := []thread ?f64{}
for i in -1 .. 2 { for i in -1 .. 2 {
r << spawn f(i) r << spawn f(i)

View file

@ -43,7 +43,7 @@ pub struct ContainerBase {
ViewBase ViewBase
} }
// want to excute this method // want to execute this method
pub fn (mut cb ContainerBase) init(window &Window) { pub fn (mut cb ContainerBase) init(window &Window) {
dump(@METHOD) dump(@METHOD)
assert true assert true

View file

@ -74,7 +74,7 @@ fn min(tree Tree) f64 {
} }
} }
// delete a value in BST (if nonexistant do nothing) // delete a value in BST (if nonexistent do nothing)
fn delete(tree Tree, x f64) Tree { fn delete(tree Tree, x f64) Tree {
return match tree { return match tree {
Empty { Empty {

View file

@ -74,7 +74,7 @@ fn min(tree Tree) f64 {
} }
} }
// delete a value in BST (if nonexistant do nothing) // delete a value in BST (if nonexistent do nothing)
fn delete(tree Tree, x f64) Tree { fn delete(tree Tree, x f64) Tree {
return match tree { return match tree {
Empty { Empty {

View file

@ -1,11 +1,11 @@
module mod1 module mod1
// Note: the function here, should be overriden by the one in the wrapper.c.v file with the same name // Note: the function here, should be overridden by the one in the wrapper.c.v file with the same name
pub fn vadd(a int, b int) int { pub fn vadd(a int, b int) int {
return 123456 return 123456
} }
// this should NOT be overriden by the different wrapper.X.v files: // this should NOT be overridden by the different wrapper.X.v files:
pub fn a_common_pure_v_fn() int { pub fn a_common_pure_v_fn() int {
return 987654 return 987654
} }

View file

@ -7,7 +7,7 @@ import os
// The comment above, should make it try re-running the same test, // The comment above, should make it try re-running the same test,
// a maximum of 2 times. It will fail for all, but the last retry. // a maximum of 2 times. It will fail for all, but the last retry.
// This is useful for reducing false positives on the CI, due to // This is useful for reducing false positives on the CI, due to
// flakyness of specific tests like `vlib/v/live/live_test.v` for example. // flakiness of specific tests like `vlib/v/live/live_test.v` for example.
// Note: this test is supposed to be run with `v test retry_test.v`. // Note: this test is supposed to be run with `v test retry_test.v`.
// Running just `v retry_test.v` WILL fail. // Running just `v retry_test.v` WILL fail.

View file

@ -44,7 +44,7 @@ fn doub_large(shared a Large, shared b Large, shared c Large, shared d Large, sh
} }
fn test_mixed_order_lock_rlock() { fn test_mixed_order_lock_rlock() {
// initialze objects so that their sum = 1 // initialize objects so that their sum = 1
shared a := Large{ shared a := Large{
l: 4 l: 4
} }

View file

@ -2,7 +2,7 @@
// a) an array of custom structs, // a) an array of custom structs,
// b) also for the custom struct itself (when the .str() for it is missing). // b) also for the custom struct itself (when the .str() for it is missing).
// //
// Note: this is very simillar to string_interpolation_struct_test.v // Note: this is very similar to string_interpolation_struct_test.v
// but they should NOT be merged into 1 file. If you merge it with // but they should NOT be merged into 1 file. If you merge it with
// string_interpolation_struct_test.v, which tests whether the compiler // string_interpolation_struct_test.v, which tests whether the compiler
// can generate the default method for a struct, then the b) case of // can generate the default method for a struct, then the b) case of

View file

@ -312,19 +312,19 @@ fn test_levels() {
} }
} }
// Struct where an inizialized field is after a non-initilized field. // Struct where an initialized field is after a non-initialized field.
struct StructWithDefaultValues1 { struct StructWithDefaultValues1 {
field_uninitialized int field_uninitialized int
field_initialized int = 5 field_initialized int = 5
} }
// Struct where an inizialized field is before a non-initilized field. // Struct where an initialized field is before a non-initialized field.
struct StructWithDefaultValues2 { struct StructWithDefaultValues2 {
field_initialized int = 3 field_initialized int = 3
field_uninitialized int field_uninitialized int
} }
// Struct where an inizialized field is before several non-initilized fields. // Struct where an initialized field is before several non-initialized fields.
struct StructWithDefaultValues3 { struct StructWithDefaultValues3 {
field_initialized int = 2 field_initialized int = 2
field_uninitialized int field_uninitialized int

View file

@ -21,7 +21,7 @@ fn test_sumtype_equality() {
assert u1 != u2 assert u1 != u2
assert u1 != s1 assert u1 != s1
// Same value, defferent type // Same value, different type
foo := FooBar(Foo{ foo := FooBar(Foo{
v: 0 v: 0
}) })

View file

@ -45,5 +45,5 @@ fn test_main() {
fn test_opt_call() { fn test_opt_call() {
mut t := ?Test{} mut t := ?Test{}
fn_a(none) // returns none fn_a(none) // returns none
fn_b(t) // returs none fn_b(t) // returns none
} }

View file

@ -1,6 +1,6 @@
module token module token
// KeywordsMatcherTrie provides a faster way of determinining whether a given name // KeywordsMatcherTrie provides a faster way of determining whether a given name
// is a reserved word (belongs to a given set of previously known words `R`). // is a reserved word (belongs to a given set of previously known words `R`).
// See the module description for more details. // See the module description for more details.
[heap] [heap]

View file

@ -6,9 +6,9 @@ module token
pub struct Pos { pub struct Pos {
pub: pub:
len int // length of the literal in the source len int // length of the literal in the source
line_nr int // the line number in the source where the token occured line_nr int // the line number in the source where the token occurred
pos int // the position of the token in scanner text pos int // the position of the token in scanner text
col int // the column in the source where the token occured col int // the column in the source where the token occurred
pub mut: pub mut:
last_line int // the line number where the ast object ends (used by vfmt) last_line int // the line number where the ast object ends (used by vfmt)
} }

View file

@ -70,7 +70,7 @@ fn (mut i IndexState) safe_access(key string, new int) bool {
return true return true
} }
// safe_offset returns for a previvous array what was the highest // safe_offset returns for a previous array what was the highest
// offset we ever accessed for that identifier // offset we ever accessed for that identifier
fn (mut i IndexState) safe_offset(key string) int { fn (mut i IndexState) safe_offset(key string) int {
$if no_bounds_checking { $if no_bounds_checking {

View file

@ -494,7 +494,7 @@ pub fn (mut t Transformer) for_stmt(mut node ast.ForStmt) ast.Stmt {
node.cond = t.expr(mut node.cond) node.cond = t.expr(mut node.cond)
match node.cond { match node.cond {
ast.BoolLiteral { ast.BoolLiteral {
if !(node.cond as ast.BoolLiteral).val { // for false { ... } should be eleminated if !(node.cond as ast.BoolLiteral).val { // for false { ... } should be eliminated
return ast.empty_stmt return ast.empty_stmt
} }
} }

View file

@ -111,7 +111,7 @@ pub fn short_module_name(name string) string {
} }
// highlight_suggestion returns a colorfull/highlighted version of `message`, // highlight_suggestion returns a colorfull/highlighted version of `message`,
// but only if the standart error output allows for color messages, otherwise // but only if the standard error output allows for color messages, otherwise
// the plain message will be returned. // the plain message will be returned.
pub fn highlight_suggestion(message string) string { pub fn highlight_suggestion(message string) string {
return term.ecolorize(term.bright_blue, message) return term.ecolorize(term.bright_blue, message)

View file

@ -493,7 +493,7 @@ pub fn recompile_file(vexe string, file string) {
} }
// get_vtmp_folder returns the path to a folder, that is writable to V programs, // get_vtmp_folder returns the path to a folder, that is writable to V programs,
// and specific to the user. It can be overriden by setting the env variable `VTMP`. // and specific to the user. It can be overridden by setting the env variable `VTMP`.
pub fn get_vtmp_folder() string { pub fn get_vtmp_folder() string {
return os.vtmp_dir() return os.vtmp_dir()
} }

View file

@ -441,7 +441,7 @@ struct User {
} }
fn get_session(mut ctx vweb.Context) bool { fn get_session(mut ctx vweb.Context) bool {
// impelement your own logic to get the user // implement your own logic to get the user
user := User{ user := User{
session_id: '123456' session_id: '123456'
name: 'Vweb' name: 'Vweb'

Some files were not shown because too many files have changed in this diff Show more