mirror of
https://github.com/vlang/v.git
synced 2025-09-13 14:32:26 +03:00
all: fix typos (#19634)
This commit is contained in:
parent
407adaa3c1
commit
9051ac8921
108 changed files with 235 additions and 214 deletions
2
.github/workflows/bootstrapping_works_ci.yml
vendored
2
.github/workflows/bootstrapping_works_ci.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: Bootstraping works
|
||||
name: Bootstrapping works
|
||||
|
||||
on:
|
||||
schedule:
|
||||
|
|
|
@ -154,7 +154,7 @@ endif
|
|||
fresh_tcc:
|
||||
rm -rf $(TMPTCC)
|
||||
ifndef local
|
||||
# Check wether a TCC branch exists for the user's system configuration.
|
||||
# Check whether a TCC branch exists for the user's system configuration.
|
||||
ifneq (,$(findstring thirdparty-$(TCCOS)-$(TCCARCH), $(shell git ls-remote --heads $(TCCREPO) | sed 's/^[a-z0-9]*\trefs.heads.//')))
|
||||
$(GITFASTCLONE) --branch thirdparty-$(TCCOS)-$(TCCARCH) $(TCCREPO) $(TMPTCC)
|
||||
@$(MAKE) --quiet check_for_working_tcc 2> /dev/null
|
||||
|
|
|
@ -59,7 +59,7 @@ fn main() {
|
|||
Prints lines periodically (-period_ms), to stdout/stderr (-target).
|
||||
After a while (-timeout_ms), exit with (-exitcode).
|
||||
This program is useful for platform independent testing
|
||||
of child process/standart input/output control.
|
||||
of child process/standard input/output control.
|
||||
It is used in V's `os` module tests.
|
||||
")
|
||||
return
|
||||
|
|
|
@ -15,7 +15,7 @@ fn funky()
|
|||
- bar
|
||||
# test
|
||||
########### deep test
|
||||
#a shouldnt have a newline test
|
||||
#a shouldn't have a newline test
|
||||
|
||||
| foo bar | yes |
|
||||
|-----------|--------|
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
// - bar
|
||||
// # test
|
||||
// ########### deep test
|
||||
// #a shouldnt have a newline
|
||||
// #a shouldn't have a newline
|
||||
// test
|
||||
//
|
||||
// | foo bar | yes |
|
||||
|
|
|
@ -11,7 +11,7 @@ http://rascunhointeligente.blogspot.com/2010/10/o-algoritmo-de-bellman-ford-um.h
|
|||
code by CCS
|
||||
*/
|
||||
|
||||
const large = 999999 // almost inifinity
|
||||
const large = 999999 // almost infinity
|
||||
|
||||
// a structure to represent a weighted edge in graph
|
||||
struct EDGE {
|
||||
|
|
|
@ -125,7 +125,7 @@ fn dijkstra(g [][]int, s int) {
|
|||
push_pq(mut pq_queue, s, 0) // goes s with priority 0
|
||||
mut n := g.len
|
||||
|
||||
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
|
||||
mut dist := []int{len: n, init: -1} // dist with -1 instead of INFINITE
|
||||
mut path := []int{len: n, init: -1} // previous node of each shortest path
|
||||
|
||||
// Distance of source vertex from itself is always 0
|
||||
|
@ -133,7 +133,7 @@ fn dijkstra(g [][]int, s int) {
|
|||
|
||||
for pq_queue.len != 0 {
|
||||
mut v := departure_priority(mut pq_queue)
|
||||
// for all W adjcents vertices of v
|
||||
// for all W adjacents vertices of v
|
||||
mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v ....
|
||||
// print('\n ADJ ${v} is ${adjs_of_v}')
|
||||
mut new_dist := 0
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
Exploring PRIMS,
|
||||
Exploring PRIMS,
|
||||
The data example is from
|
||||
https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
|
||||
|
||||
|
@ -117,7 +117,7 @@ fn prim_mst(g [][]int, s int) {
|
|||
push_pq(mut pq_queue, s, 0) // goes s with priority 0
|
||||
mut n := g.len
|
||||
|
||||
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
|
||||
mut dist := []int{len: n, init: -1} // dist with -1 instead of INFINITE
|
||||
mut path := []int{len: n, init: -1} // previous node of each shortest path
|
||||
|
||||
// Distance of source vertex from itself is always 0
|
||||
|
@ -125,7 +125,7 @@ fn prim_mst(g [][]int, s int) {
|
|||
|
||||
for pq_queue.len != 0 {
|
||||
mut v := departure_priority(mut pq_queue)
|
||||
// for all W adjcents vertices of v
|
||||
// for all W adjacents vertices of v
|
||||
mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v ....
|
||||
// print('\n :${dist} :: ${pq_queue}')
|
||||
// print('\n ADJ ${v} is ${adjs_of_v}')
|
||||
|
@ -209,7 +209,7 @@ fn main() {
|
|||
[5, 15, 4, 0],
|
||||
]
|
||||
|
||||
// To find number of coluns
|
||||
// To find number of columns
|
||||
// mut cols := an_array[0].len
|
||||
mut graph := [][]int{} // the graph: adjacency matrix
|
||||
// for index, g_value in [graph_01, graph_02, graph_03] {
|
||||
|
|
|
@ -278,12 +278,12 @@ fn draw_cube_glsl(app App) {
|
|||
tr_matrix := calc_tr_matrices(dw, dh, 0, 0, 2.3)
|
||||
gfx.apply_viewport(0, 0, ws.width, ws.height, true)
|
||||
|
||||
// apply the pipline and bindings
|
||||
// apply the pipeline and bindings
|
||||
gfx.apply_pipeline(app.cube_pip_glsl)
|
||||
gfx.apply_bindings(app.cube_bind)
|
||||
|
||||
// Uniforms
|
||||
// *** vertex shadeer uniforms ***
|
||||
// *** vertex shader uniforms ***
|
||||
// passing the view matrix as uniform
|
||||
// res is a 4x4 matrix of f32 thus: 4*16 byte of size
|
||||
vs_uniforms_range := gfx.Range{
|
||||
|
@ -292,20 +292,18 @@ fn draw_cube_glsl(app App) {
|
|||
}
|
||||
gfx.apply_uniforms(.vs, C.SLOT_vs_params, &vs_uniforms_range)
|
||||
|
||||
// vfmt off
|
||||
// *** fragment shader uniforms ***
|
||||
time_ticks := f32(time.ticks() - app.ticks) / 1000
|
||||
mut tmp_fs_params := [
|
||||
f32(ws.width),
|
||||
ws.height * ratio, // x,y resolution to pass to FS
|
||||
app.mouse_x, // mouse x
|
||||
ws.height * ratio, // x,y resolution to pass to FS
|
||||
app.mouse_x, // mouse x
|
||||
ws.height - app.mouse_y * 2, // mouse y scaled
|
||||
time_ticks, // time as f32
|
||||
app.frame_count, // frame count
|
||||
time_ticks, // time as f32
|
||||
app.frame_count, // frame count
|
||||
0,
|
||||
0 // padding bytes , see "fs_params" struct paddings in rt_glsl.h
|
||||
0, // padding bytes , see "fs_params" struct paddings in rt_glsl.h
|
||||
]!
|
||||
// vfmt on
|
||||
fs_uniforms_range := gfx.Range{
|
||||
ptr: unsafe { &tmp_fs_params }
|
||||
size: usize(sizeof(tmp_fs_params))
|
||||
|
|
|
@ -17,7 +17,7 @@ pub fn map_of_indexes[T](array []T) map[T][]int {
|
|||
}
|
||||
|
||||
// map_of_counts returns a map, where each key is an unique value in `array`,
|
||||
// and each value for that key is how many times that value occures in `array`.
|
||||
// and each value for that key is how many times that value occurs in `array`.
|
||||
// It can be useful for building histograms of discrete measurements.
|
||||
// Example: arrays.map_of_counts([1,2,3,4,4,2,1,4,4]) == {1: 2, 2: 2, 3: 1, 4: 4}
|
||||
pub fn map_of_counts[T](array []T) map[T]int {
|
||||
|
|
|
@ -203,7 +203,7 @@ fn (mut a array) ensure_cap(required int) {
|
|||
}
|
||||
|
||||
// repeat returns a new array with the given array elements repeated given times.
|
||||
// `cgen` will replace this with an apropriate call to `repeat_to_depth()`
|
||||
// `cgen` will replace this with an appropriate call to `repeat_to_depth()`
|
||||
//
|
||||
// This is a dummy placeholder that will be overridden by `cgen` with an appropriate
|
||||
// call to `repeat_to_depth()`. However the `checker` needs it here.
|
||||
|
@ -627,7 +627,7 @@ fn (a array) clone_static_to_depth(depth int) array {
|
|||
}
|
||||
|
||||
// clone returns an independent copy of a given array.
|
||||
// this will be overwritten by `cgen` with an apropriate call to `.clone_to_depth()`
|
||||
// this will be overwritten by `cgen` with an appropriate call to `.clone_to_depth()`
|
||||
// However the `checker` needs it here.
|
||||
pub fn (a &array) clone() array {
|
||||
return unsafe { a.clone_to_depth(0) }
|
||||
|
@ -827,7 +827,7 @@ pub fn (a array) map(callback fn (voidptr) voidptr) array
|
|||
// being compared.
|
||||
//
|
||||
// Example: array.sort() // will sort the array in ascending order
|
||||
// Example: array.sort(b < a) // will sort the array in decending order
|
||||
// Example: array.sort(b < a) // will sort the array in descending order
|
||||
// Example: array.sort(b.name < a.name) // will sort descending by the .name field
|
||||
pub fn (mut a array) sort(callback fn (voidptr, voidptr) int)
|
||||
|
||||
|
|
|
@ -111,7 +111,7 @@ fn (mut a array) ensure_cap_noscan(required int) {
|
|||
}
|
||||
|
||||
// repeat returns a new array with the given array elements repeated given times.
|
||||
// `cgen` will replace this with an apropriate call to `repeat_to_depth()`
|
||||
// `cgen` will replace this with an appropriate call to `repeat_to_depth()`
|
||||
|
||||
// version of `repeat()` that handles multi dimensional arrays
|
||||
// `unsafe` to call directly because `depth` is not checked
|
||||
|
|
|
@ -39,7 +39,7 @@ fn test_str_methods() {
|
|||
assert charptr(1).str() == '0x1'
|
||||
}
|
||||
|
||||
fn test_and_precendence() {
|
||||
fn test_and_precedence() {
|
||||
assert (2 & 0 == 0) == ((2 & 0) == 0)
|
||||
assert (2 & 0 != 0) == ((2 & 0) != 0)
|
||||
assert (0 & 0 >= 0) == ((0 & 0) >= 0)
|
||||
|
@ -48,7 +48,7 @@ fn test_and_precendence() {
|
|||
assert (1 & 2 > 0) == ((1 & 2) > 0)
|
||||
}
|
||||
|
||||
fn test_or_precendence() {
|
||||
fn test_or_precedence() {
|
||||
assert (1 | 0 == 0) == ((1 | 0) == 0)
|
||||
assert (1 | 0 != 1) == ((1 | 0) != 1)
|
||||
assert (1 | 0 >= 2) == ((1 | 0) >= 2)
|
||||
|
@ -57,7 +57,7 @@ fn test_or_precendence() {
|
|||
assert (1 | 0 > 1) == ((1 | 0) > 1)
|
||||
}
|
||||
|
||||
fn test_xor_precendence() {
|
||||
fn test_xor_precedence() {
|
||||
assert (1 ^ 0 == 2) == ((1 ^ 0) == 2)
|
||||
assert (1 ^ 0 != 2) == ((1 ^ 0) != 2)
|
||||
assert (1 ^ 0 >= 0) == ((1 ^ 0) >= 0)
|
||||
|
@ -66,12 +66,12 @@ fn test_xor_precendence() {
|
|||
assert (1 ^ 0 > 1) == ((1 ^ 0) > 1)
|
||||
}
|
||||
|
||||
fn test_left_shift_precendence() {
|
||||
fn test_left_shift_precedence() {
|
||||
assert (2 << 4 | 3) == ((2 << 4) | 3)
|
||||
assert (2 << 4 | 3) != (2 << (4 | 3))
|
||||
}
|
||||
|
||||
fn test_right_shift_precendence() {
|
||||
fn test_right_shift_precedence() {
|
||||
assert (256 >> 4 | 3) == ((256 >> 4) | 3)
|
||||
assert (256 >> 4 | 3) != (256 >> (4 | 3))
|
||||
}
|
||||
|
|
|
@ -432,7 +432,7 @@ pub fn (mut s []string) sort() {
|
|||
s.sort_with_compare(compare_strings)
|
||||
}
|
||||
|
||||
// sort_ignore_case sorts the string array using case insesitive comparing.
|
||||
// sort_ignore_case sorts the string array using case insensitive comparing.
|
||||
pub fn (mut s []string) sort_ignore_case() {
|
||||
s.sort_with_compare(compare_lower_strings)
|
||||
}
|
||||
|
@ -484,7 +484,7 @@ pub fn (s string) repeat(count int) string {
|
|||
|
||||
// TODO: Make these functions actually work.
|
||||
// strip_margin allows multi-line strings to be formatted in a way that removes white-space
|
||||
// before a delimeter. by default `|` is used.
|
||||
// before a delimiter. By default `|` is used.
|
||||
// Note: the delimiter has to be a byte at this time. That means surrounding
|
||||
// the value in ``.
|
||||
//
|
||||
|
|
|
@ -77,7 +77,7 @@ pub fn (s string) runes() []rune {
|
|||
// cstring_to_vstring creates a new V string copy of the C style string,
|
||||
// pointed by `s`. This function is most likely what you want to use when
|
||||
// working with C style pointers to 0 terminated strings (i.e. `char*`).
|
||||
// It is recomended to use it, unless you *do* understand the implications of
|
||||
// It is recommended to use it, unless you *do* understand the implications of
|
||||
// tos/tos2/tos3/tos4/tos5 in terms of memory management and interactions with
|
||||
// -autofree and `[manualfree]`.
|
||||
// It will panic, if the pointer `s` is 0.
|
||||
|
@ -745,7 +745,7 @@ pub fn (s string) split_any(delim string) []string {
|
|||
mut i := 0
|
||||
// check empty source string
|
||||
if s.len > 0 {
|
||||
// if empty delimiter string using defautl split
|
||||
// if empty delimiter string using default split
|
||||
if delim.len <= 0 {
|
||||
return s.split('')
|
||||
}
|
||||
|
@ -813,7 +813,7 @@ pub fn (s string) rsplit(delim string) []string {
|
|||
return s.rsplit_nth(delim, 0)
|
||||
}
|
||||
|
||||
// split_once devides string into pair of string by `delim`.
|
||||
// split_once divides string into pair of string by `delim`.
|
||||
// Example:
|
||||
// ```v
|
||||
// path, ext := 'file.ts.dts'.splice_once('.')?
|
||||
|
@ -832,7 +832,7 @@ pub fn (s string) split_once(delim string) ?(string, string) {
|
|||
return result[0], result[1]
|
||||
}
|
||||
|
||||
// rsplit_once devides string into pair of string by `delim`.
|
||||
// rsplit_once divides string into pair of string by `delim`.
|
||||
// Example:
|
||||
// ```v
|
||||
// path, ext := 'file.ts.dts'.splice_once('.')?
|
||||
|
@ -1592,7 +1592,7 @@ pub fn (s string) trim(cutset string) string {
|
|||
return s.substr(left, right)
|
||||
}
|
||||
|
||||
// trim_indexes gets the new start and end indicies of a string when any of the characters given in `cutset` were stripped from the start and end of the string. Should be used as an input to `substr()`. If the string contains only the characters in `cutset`, both values returned are zero.
|
||||
// trim_indexes gets the new start and end indices of a string when any of the characters given in `cutset` were stripped from the start and end of the string. Should be used as an input to `substr()`. If the string contains only the characters in `cutset`, both values returned are zero.
|
||||
// Example: left, right := '-hi-'.trim_indexes('-')
|
||||
[direct_array_access]
|
||||
pub fn (s string) trim_indexes(cutset string) (int, int) {
|
||||
|
@ -1719,7 +1719,7 @@ fn compare_lower_strings(a &string, b &string) int {
|
|||
return compare_strings(&aa, &bb)
|
||||
}
|
||||
|
||||
// sort_ignore_case sorts the string array using case insesitive comparing.
|
||||
// sort_ignore_case sorts the string array using case insensitive comparing.
|
||||
[inline]
|
||||
pub fn (mut s []string) sort_ignore_case() {
|
||||
s.sort_with_compare(compare_lower_strings)
|
||||
|
@ -2096,7 +2096,7 @@ pub fn (s string) fields() []string {
|
|||
}
|
||||
|
||||
// strip_margin allows multi-line strings to be formatted in a way that removes white-space
|
||||
// before a delimeter. by default `|` is used.
|
||||
// before a delimiter. By default `|` is used.
|
||||
// Note: the delimiter has to be a byte at this time. That means surrounding
|
||||
// the value in ``.
|
||||
//
|
||||
|
|
|
@ -147,7 +147,7 @@ pub fn get_str_intp_u32_format(fmt_type StrIntpType, in_width int, in_precision
|
|||
return res
|
||||
}
|
||||
|
||||
// convert from struct to formated string
|
||||
// convert from struct to formatted string
|
||||
[manualfree]
|
||||
fn (data &StrIntpData) process_str_intp_data(mut sb strings.Builder) {
|
||||
x := data.fmt
|
||||
|
|
|
@ -326,7 +326,7 @@ pub fn (mut v Point) vartime_multiscalar_mult(scalars []Scalar, points []Point)
|
|||
// at each iteration and checking whether there is a nonzero
|
||||
// coefficient to look up a multiple of.
|
||||
//
|
||||
// Skip trying to find the first nonzero coefficent, because
|
||||
// Skip trying to find the first nonzero coefficient, because
|
||||
// searching might be more work than a few extra doublings.
|
||||
// k == i, l == j
|
||||
for k := 255; k >= 0; k-- {
|
||||
|
|
|
@ -38,7 +38,7 @@ fn (mut v ProjLookupTable) from_p3(q Point) {
|
|||
for i := 0; i < 7; i++ {
|
||||
// Compute (i+1)*Q as Q + i*Q and convert to a ProjCached
|
||||
// This is needlessly complicated because the API has explicit
|
||||
// recievers instead of creating stack objects and relying on RVO
|
||||
// receivers instead of creating stack objects and relying on RVO
|
||||
v.points[i + 1].from_p3(tmp_p3.from_p1(tmp_p1.add(q, v.points[i])))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
// This is the generic version with no architecture optimizations.
|
||||
// In its own file so that an architecture
|
||||
// optimized verision can be substituted
|
||||
// optimized version can be substituted
|
||||
|
||||
module md5
|
||||
|
||||
|
|
|
@ -54,20 +54,20 @@ fn parse_headers(block string) ?(map[string][]string, string) {
|
|||
return map[string][]string{}, block
|
||||
}
|
||||
|
||||
// seperate lines instead of iterating over them,
|
||||
// separate lines instead of iterating over them,
|
||||
// so that we can manually index them
|
||||
headers_seperated := headers_str.split_into_lines()
|
||||
headers_separated := headers_str.split_into_lines()
|
||||
|
||||
// index the key/value separator ':', otherwise
|
||||
// return none because it should exist
|
||||
// the initialisation of this function already tells us headers are present
|
||||
mut colon_index := headers_seperated[0].index(colon) or { return none }
|
||||
mut colon_index := headers_separated[0].index(colon) or { return none }
|
||||
|
||||
mut headers := map[string][]string{}
|
||||
mut index := 0
|
||||
|
||||
for index < headers_seperated.len - 1 {
|
||||
line := headers_seperated[index]
|
||||
for index < headers_separated.len - 1 {
|
||||
line := headers_separated[index]
|
||||
if line.len == 0 {
|
||||
break
|
||||
}
|
||||
|
@ -75,10 +75,10 @@ fn parse_headers(block string) ?(map[string][]string, string) {
|
|||
key := line[..colon_index].trim_space()
|
||||
mut val := line[colon_index + 1..].trim_space()
|
||||
|
||||
for colon_index = 0; index < headers_seperated.len - 1 && colon_index == 0; {
|
||||
for colon_index = 0; index < headers_separated.len - 1 && colon_index == 0; {
|
||||
index++
|
||||
colon_index = headers_seperated[index].index(colon) or {
|
||||
val += headers_seperated[index].trim_space()
|
||||
colon_index = headers_separated[index].index(colon) or {
|
||||
val += headers_separated[index].trim_space()
|
||||
0
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
// that can be found in the LICENSE file.
|
||||
// This is the generic version with no architecture optimizations.
|
||||
// In its own file so that an architecture
|
||||
// optimized verision can be substituted
|
||||
// optimized version can be substituted
|
||||
module sha1
|
||||
|
||||
import math.bits
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
// SHA256 block step.
|
||||
// This is the generic version with no architecture optimizations.
|
||||
// In its own file so that an architecture
|
||||
// optimized verision can be substituted
|
||||
// optimized version can be substituted
|
||||
module sha256
|
||||
|
||||
import math.bits
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
// SHA512 block step.
|
||||
// This is the generic version with no architecture optimizations.
|
||||
// In its own file so that an architecture
|
||||
// optimized verision can be substituted
|
||||
// optimized version can be substituted
|
||||
module sha512
|
||||
|
||||
import math.bits
|
||||
|
|
|
@ -5,7 +5,7 @@ pub struct Eof {
|
|||
Error
|
||||
}
|
||||
|
||||
// NotExpected is a generic error that means that we receave a not expecte error.
|
||||
// NotExpected is a generic error that means that we receave a not expected error.
|
||||
pub struct NotExpected {
|
||||
cause string
|
||||
code int
|
||||
|
|
|
@ -148,7 +148,7 @@ pub fn (c &Cookie) str() string {
|
|||
b.write_string('; expires=')
|
||||
b.write_string(time_str)
|
||||
}
|
||||
// TODO: Fix this. Techically a max age of 0 or less should be 0
|
||||
// TODO: Fix this. Technically a max age of 0 or less should be 0
|
||||
// We need a way to not have a max age.
|
||||
if c.max_age > 0 {
|
||||
b.write_string('; Max-Age=')
|
||||
|
|
|
@ -398,7 +398,7 @@ fn new_tcp_socket(family AddrFamily) !TcpSocket {
|
|||
}
|
||||
|
||||
// TODO(emily):
|
||||
// we shouldnt be using ioctlsocket in the 21st century
|
||||
// we shouldn't be using ioctlsocket in the 21st century
|
||||
// use the non-blocking socket option instead please :)
|
||||
|
||||
// TODO(emily):
|
||||
|
|
|
@ -5,7 +5,7 @@ pub struct Eof {
|
|||
Error
|
||||
}
|
||||
|
||||
// NotExpected is a generic error that means that we receave a not expecte error.
|
||||
// NotExpected is a generic error that means that we receave a not expected error.
|
||||
pub struct NotExpected {
|
||||
cause string
|
||||
code int
|
||||
|
@ -841,7 +841,7 @@ pub enum SeekMode {
|
|||
// .start -> the origin is the start of the file
|
||||
// .current -> the current position/cursor in the file
|
||||
// .end -> the end of the file
|
||||
// If the file is not seek-able, or an error occures, the error will
|
||||
// If the file is not seek-able, or an error occurs, the error will
|
||||
// be returned to the caller.
|
||||
// A successful call to the fseek() function clears the end-of-file
|
||||
// indicator for the file.
|
||||
|
|
|
@ -151,7 +151,7 @@ fn test_read_eof_last_read_partial_buffer_fill() {
|
|||
|
||||
// test_read_eof_last_read_full_buffer_fill tests that when reading a file the
|
||||
// end-of-file is detected and results in a none error being returned. This test
|
||||
// simulates file reading where the end-of-file is reached at the beinning of an
|
||||
// simulates file reading where the end-of-file is reached at the beginning of an
|
||||
// fread that returns no data.
|
||||
fn test_read_eof_last_read_full_buffer_fill() {
|
||||
mut f := os.open_file(tfile, 'w')!
|
||||
|
|
|
@ -203,7 +203,7 @@ fn clean_path(path string) string {
|
|||
}
|
||||
continue
|
||||
}
|
||||
// turn foward slash into a back slash on a Windows system
|
||||
// turn forward slash into a back slash on a Windows system
|
||||
$if windows {
|
||||
if curr == os.fslash {
|
||||
sb.write_u8(os.bslash)
|
||||
|
|
|
@ -1011,7 +1011,7 @@ pub fn chown(path string, owner int, group int) ! {
|
|||
}
|
||||
|
||||
// open_append tries to open a file from a given path.
|
||||
// If successfull, it and returns a `File` for appending.
|
||||
// If successful, it and returns a `File` for appending.
|
||||
pub fn open_append(path string) !File {
|
||||
mut file := File{}
|
||||
$if windows {
|
||||
|
@ -1036,7 +1036,7 @@ pub fn open_append(path string) !File {
|
|||
// execvp - loads and executes a new child process, *in place* of the current process.
|
||||
// The child process executable is located in `cmdpath`.
|
||||
// The arguments, that will be passed to it are in `args`.
|
||||
// Note: this function will NOT return when successfull, since
|
||||
// Note: this function will NOT return when successful, since
|
||||
// the child process will take control over execution.
|
||||
pub fn execvp(cmdpath string, cmdargs []string) ! {
|
||||
mut cargs := []&char{}
|
||||
|
@ -1063,7 +1063,7 @@ pub fn execvp(cmdpath string, cmdargs []string) ! {
|
|||
// The child process executable is located in `cmdpath`.
|
||||
// The arguments, that will be passed to it are in `args`.
|
||||
// You can pass environment variables to through `envs`.
|
||||
// Note: this function will NOT return when successfull, since
|
||||
// Note: this function will NOT return when successful, since
|
||||
// the child process will take control over execution.
|
||||
pub fn execve(cmdpath string, cmdargs []string, envs []string) ! {
|
||||
mut cargv := []&char{}
|
||||
|
|
28
vlib/os/os.v
28
vlib/os/os.v
|
@ -55,8 +55,8 @@ fn executable_fallback() string {
|
|||
}
|
||||
}
|
||||
if !is_abs_path(exepath) {
|
||||
other_seperator := if path_separator == '/' { '\\' } else { '/' }
|
||||
rexepath := exepath.replace(other_seperator, path_separator)
|
||||
other_separator := if path_separator == '/' { '\\' } else { '/' }
|
||||
rexepath := exepath.replace(other_separator, path_separator)
|
||||
if rexepath.contains(path_separator) {
|
||||
exepath = join_path_single(os.wd_at_startup, exepath)
|
||||
} else {
|
||||
|
@ -227,7 +227,7 @@ pub fn is_dir_empty(path string) bool {
|
|||
return res
|
||||
}
|
||||
|
||||
// file_ext will return the part after the last occurence of `.` in `path`.
|
||||
// file_ext will return the part after the last occurrence of `.` in `path`.
|
||||
// The `.` is included.
|
||||
// Examples:
|
||||
// ```v
|
||||
|
@ -256,8 +256,8 @@ pub fn dir(opath string) string {
|
|||
if opath == '' {
|
||||
return '.'
|
||||
}
|
||||
other_seperator := if path_separator == '/' { '\\' } else { '/' }
|
||||
path := opath.replace(other_seperator, path_separator)
|
||||
other_separator := if path_separator == '/' { '\\' } else { '/' }
|
||||
path := opath.replace(other_separator, path_separator)
|
||||
pos := path.last_index(path_separator) or { return '.' }
|
||||
if pos == 0 && path_separator == '/' {
|
||||
return '/'
|
||||
|
@ -273,8 +273,8 @@ pub fn base(opath string) string {
|
|||
if opath == '' {
|
||||
return '.'
|
||||
}
|
||||
other_seperator := if path_separator == '/' { '\\' } else { '/' }
|
||||
path := opath.replace(other_seperator, path_separator)
|
||||
other_separator := if path_separator == '/' { '\\' } else { '/' }
|
||||
path := opath.replace(other_separator, path_separator)
|
||||
if path == path_separator {
|
||||
return path_separator
|
||||
}
|
||||
|
@ -287,11 +287,11 @@ pub fn base(opath string) string {
|
|||
return path[pos + 1..]
|
||||
}
|
||||
|
||||
// file_name will return all characters found after the last occurence of `path_separator`.
|
||||
// file_name will return all characters found after the last occurrence of `path_separator`.
|
||||
// file extension is included.
|
||||
pub fn file_name(opath string) string {
|
||||
other_seperator := if path_separator == '/' { '\\' } else { '/' }
|
||||
path := opath.replace(other_seperator, path_separator)
|
||||
other_separator := if path_separator == '/' { '\\' } else { '/' }
|
||||
path := opath.replace(other_separator, path_separator)
|
||||
return path.all_after_last(path_separator)
|
||||
}
|
||||
|
||||
|
@ -610,7 +610,7 @@ fn impl_walk_ext(path string, ext string, mut out []string) {
|
|||
}
|
||||
|
||||
// walk traverses the given directory `path`.
|
||||
// When a file is encountred, it will call the callback `f` with current file as argument.
|
||||
// When a file is encountered, it will call the callback `f` with current file as argument.
|
||||
// Note: walk can be called even for deeply nested folders,
|
||||
// since it does not recurse, but processes them iteratively.
|
||||
pub fn walk(path string, f fn (string)) {
|
||||
|
@ -645,7 +645,7 @@ pub fn walk(path string, f fn (string)) {
|
|||
pub type FnWalkContextCB = fn (voidptr, string)
|
||||
|
||||
// walk_with_context traverses the given directory `path`.
|
||||
// For each encountred file *and* directory, it will call your `fcb` callback,
|
||||
// For each encountered file *and* directory, it will call your `fcb` callback,
|
||||
// passing it the arbitrary `context` in its first parameter,
|
||||
// and the path to the file in its second parameter.
|
||||
// Note: walk_with_context can be called even for deeply nested folders,
|
||||
|
@ -695,8 +695,8 @@ pub struct MkdirParams {
|
|||
|
||||
// mkdir_all will create a valid full path of all directories given in `path`.
|
||||
pub fn mkdir_all(opath string, params MkdirParams) ! {
|
||||
other_seperator := if path_separator == '/' { '\\' } else { '/' }
|
||||
path := opath.replace(other_seperator, path_separator)
|
||||
other_separator := if path_separator == '/' { '\\' } else { '/' }
|
||||
path := opath.replace(other_separator, path_separator)
|
||||
mut p := if path.starts_with(path_separator) { path_separator } else { '' }
|
||||
path_parts := path.trim_left(path_separator).split(path_separator)
|
||||
for subdir in path_parts {
|
||||
|
|
|
@ -67,7 +67,7 @@ pub fn (a &Asset) get_length() int {
|
|||
fn C.AAsset_getLength64(&C.AAsset) i64
|
||||
|
||||
// get_length_64 returns the total size of the asset data using
|
||||
// a 64-bit number insted of 32-bit as `get_length`.
|
||||
// a 64-bit number instead of 32-bit as `get_length`.
|
||||
pub fn (a &Asset) get_length_64() i64 {
|
||||
return C.AAsset_getLength64(a)
|
||||
}
|
||||
|
|
|
@ -764,7 +764,7 @@ fn test_posix_set_bit() {
|
|||
}
|
||||
mode = u32(s.st_mode) & 0o0777
|
||||
assert mode == 0o0777
|
||||
// Note: setting the sticky bit is platform dependend
|
||||
// Note: setting the sticky bit is platform dependent
|
||||
// `chmod -s -g -t`
|
||||
os.posix_set_permission_bit(fpath, os.s_isuid, false)
|
||||
os.posix_set_permission_bit(fpath, os.s_isgid, false)
|
||||
|
|
|
@ -35,7 +35,7 @@ fn (mut p Process) unix_spawn_process() int {
|
|||
C.setpgid(0, 0)
|
||||
}
|
||||
if p.use_stdio_ctl {
|
||||
// Redirect the child standart in/out/err to the pipes that
|
||||
// Redirect the child standard in/out/err to the pipes that
|
||||
// were created in the parent.
|
||||
// Close the parent's pipe fds, the child do not need them:
|
||||
fd_close(pipeset[1])
|
||||
|
|
|
@ -1004,7 +1004,7 @@ fn (mut re RE) impl_compile(in_txt string) (int, int) {
|
|||
// manage negation groups
|
||||
if negate_flag == true {
|
||||
re.prog[pc].group_neg = true
|
||||
re.prog[pc].rep_min = 0 // may be not catched, but it is ok
|
||||
re.prog[pc].rep_min = 0 // may not be caught, but it is ok
|
||||
}
|
||||
|
||||
// set the group id
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
module stdatomic
|
||||
|
||||
// Implement the atomic operations. For now TCC does not support the atomic
|
||||
// versions on nix so it uses locks to simulate the same behavor.
|
||||
// versions on nix so it uses locks to simulate the same behavior.
|
||||
//
|
||||
// On windows tcc can simulate with other atomic operations.
|
||||
//
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// tests that use and test private functions
|
||||
module time
|
||||
|
||||
// test the old behavor is same as new, the unix time should always be local time
|
||||
// test the old behavior is same as new, the unix time should always be local time
|
||||
fn test_new_is_same_as_old_for_all_platforms() {
|
||||
t := C.time(0)
|
||||
tm := C.localtime(&t)
|
||||
|
|
|
@ -224,7 +224,7 @@ pub fn (t Time) relative() string {
|
|||
return '${prefix}${y} years${suffix}'
|
||||
}
|
||||
|
||||
// relative_short returns a string saying how long ago a time occured as follows:
|
||||
// relative_short returns a string saying how long ago a time occurred as follows:
|
||||
// 0-30 seconds: `"now"`; 30-60 seconds: `"1m"`; anything else is rounded to the
|
||||
// nearest minute, hour, day, or year
|
||||
//
|
||||
|
|
|
@ -21,7 +21,7 @@ struct C.tm {
|
|||
|
||||
fn C.timegm(&C.tm) C.time_t
|
||||
|
||||
// prefering localtime_r over the localtime because
|
||||
// preferring localtime_r over the localtime because
|
||||
// from docs localtime_r is thread safe,
|
||||
fn C.localtime_r(t &C.time_t, tm &C.tm)
|
||||
|
||||
|
|
|
@ -128,7 +128,7 @@ pub fn (t Time) local() Time {
|
|||
|
||||
// win_now calculates current time using winapi to get higher resolution on windows
|
||||
// GetSystemTimeAsFileTime is used and converted to local time. It can resolve time
|
||||
// down to millisecond. Other more precice methods can be implemented in the future
|
||||
// down to millisecond. Other more precise methods can be implemented in the future
|
||||
fn win_now() Time {
|
||||
ft_utc := C._FILETIME{}
|
||||
C.GetSystemTimeAsFileTime(&ft_utc)
|
||||
|
@ -152,7 +152,7 @@ fn win_now() Time {
|
|||
|
||||
// win_utc calculates current time using winapi to get higher resolution on windows
|
||||
// GetSystemTimeAsFileTime is used. It can resolve time down to millisecond
|
||||
// other more precice methods can be implemented in the future
|
||||
// other more precise methods can be implemented in the future
|
||||
fn win_utc() Time {
|
||||
ft_utc := C._FILETIME{}
|
||||
C.GetSystemTimeAsFileTime(&ft_utc)
|
||||
|
|
|
@ -235,8 +235,8 @@ pub fn (m map[string]Any) to_inline_toml() string {
|
|||
mut i := 1
|
||||
for k, v in m {
|
||||
key := if k.contains(' ') { '"${k}"' } else { k }
|
||||
delimeter := if i < m.len { ',' } else { '' }
|
||||
toml_text += ' ${key} = ${v.to_toml()}${delimeter}'
|
||||
delimiter := if i < m.len { ',' } else { '' }
|
||||
toml_text += ' ${key} = ${v.to_toml()}${delimiter}'
|
||||
i++
|
||||
}
|
||||
return toml_text + ' }'
|
||||
|
|
|
@ -559,11 +559,11 @@ pub fn (c Checker) check_comment(comment ast.Comment) ! {
|
|||
break
|
||||
}
|
||||
ch_byte := u8(ch)
|
||||
// Check for carrige return
|
||||
// Check for carriage return
|
||||
if ch_byte == 0x0D {
|
||||
st := s.state()
|
||||
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||
' carrige return character `${ch_byte.hex()}` is not allowed in comments (${st.line_nr},${st.col}).')
|
||||
' carriage return character `${ch_byte.hex()}` is not allowed in comments (${st.line_nr},${st.col}).')
|
||||
}
|
||||
// Check for control characters (allow TAB)
|
||||
if util.is_illegal_ascii_control_character(ch_byte) {
|
||||
|
|
|
@ -13,7 +13,7 @@ pub const (
|
|||
)
|
||||
|
||||
// Scanner contains the necessary fields for the state of the scan process.
|
||||
// the task the scanner does is also refered to as "lexing" or "tokenizing".
|
||||
// the task the scanner does is also referred to as "lexing" or "tokenizing".
|
||||
// The Scanner methods are based on much of the work in `vlib/strings/textscanner`.
|
||||
pub struct Scanner {
|
||||
pub:
|
||||
|
|
|
@ -178,7 +178,7 @@ fn test_iarna_toml_spec_tests() {
|
|||
"'import sys, yaml, json; json.dump(yaml.load(sys.stdin, Loader=yaml.FullLoader), sys.stdout, indent=4)'",
|
||||
'<', iarna_yaml_path, '>', converted_json_path]) or {
|
||||
contents := os.read_file(iarna_yaml_path)!
|
||||
// NOTE there's known errors with the python convertion method.
|
||||
// NOTE there's known errors with the python convention method.
|
||||
// For now we just ignore them as it's a broken tool - not a wrong test-case.
|
||||
// Uncomment this print to see/check them.
|
||||
// eprintln(err.msg() + '\n$contents')
|
||||
|
|
|
@ -7,7 +7,7 @@ module token
|
|||
pub struct Pos {
|
||||
pub:
|
||||
len int // length of the literal in the source
|
||||
line_nr int // the line number in the source where the token occured
|
||||
line_nr int // the line number in the source where the token occurred
|
||||
pos int // the position of the token in scanner text
|
||||
col int // the column in the source where the token occured
|
||||
col int // the column in the source where the token occurred
|
||||
}
|
||||
|
|
|
@ -8,8 +8,8 @@ pub struct Token {
|
|||
pub:
|
||||
kind Kind // the token number/enum; for quick comparisons
|
||||
lit string // literal representation of the token
|
||||
col int // the column in the source where the token occured
|
||||
line_nr int // the line number in the source where the token occured
|
||||
col int // the column in the source where the token occurred
|
||||
line_nr int // the line number in the source where the token occurred
|
||||
pos int // the position of the token in scanner text
|
||||
len int // length of the literal
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ pub enum Kind {
|
|||
lsbr // [
|
||||
rsbr // ]
|
||||
nl // \n linefeed / newline character
|
||||
cr // \r carrige return
|
||||
cr // \r carriage return
|
||||
tab // \t character
|
||||
whitespace // ` `
|
||||
period // .
|
||||
|
|
|
@ -161,7 +161,7 @@ fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
|
|||
mut right := if i < node.right.len { node.right[i] } else { node.right[0] }
|
||||
mut right_type := node.right_types[i]
|
||||
if mut right is ast.Ident {
|
||||
// resolve shared right vairable
|
||||
// resolve shared right variable
|
||||
if right_type.has_flag(.shared_f) {
|
||||
if c.fail_if_unreadable(right, right_type, 'right-hand side of assignment') {
|
||||
return
|
||||
|
|
|
@ -1219,7 +1219,7 @@ fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool) ast.
|
|||
// ... but 2. disallow passing non-pointers - that is very rarely what the user wanted,
|
||||
// it can lead to codegen errors (except for 'magic' functions like `json.encode` that,
|
||||
// the compiler has special codegen support for), so it should be opt in, that is it
|
||||
// shoould require an explicit voidptr(x) cast (and probably unsafe{} ?) .
|
||||
// should require an explicit voidptr(x) cast (and probably unsafe{} ?) .
|
||||
if call_arg.typ != param.typ && (param.typ == ast.voidptr_type
|
||||
|| final_param_sym.idx == ast.voidptr_type_idx
|
||||
|| param.typ == ast.nil_type || final_param_sym.idx == ast.nil_type_idx)
|
||||
|
|
|
@ -3,5 +3,5 @@ vlib/v/checker/tests/infix_sumtype_in_array_err.vv:15:7: error: left operand to
|
|||
14 | RuneAlias {
|
||||
15 | if x in whitespace {
|
||||
| ~~~~~~~~~~~~~~~
|
||||
16 | // doing `if x as RuneAlias in whitepsace` here
|
||||
16 | // doing `if x as RuneAlias in whitespace` here
|
||||
17 | // works but it should be doing that automatically
|
||||
|
|
|
@ -13,7 +13,7 @@ fn main() {
|
|||
match x {
|
||||
RuneAlias {
|
||||
if x in whitespace {
|
||||
// doing `if x as RuneAlias in whitepsace` here
|
||||
// doing `if x as RuneAlias in whitespace` here
|
||||
// works but it should be doing that automatically
|
||||
// since I'm inside the RuneAlias match condition.
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ pub fn (mut e Eval) eval(mut files []&ast.File) {
|
|||
e.run_func(e.mods['main']['main'] or { ast.FnDecl{} } as ast.FnDecl)
|
||||
}
|
||||
|
||||
// first arg is reciever (if method)
|
||||
// first arg is receiver (if method)
|
||||
pub fn (mut e Eval) run_func(func ast.FnDecl, _args ...Object) {
|
||||
e.back_trace << EvalTrace{func.idx, func.source_file.idx, func.pos.line_nr}
|
||||
old_mod := e.cur_mod
|
||||
|
|
|
@ -85,10 +85,10 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
|
|||
// // }
|
||||
// }
|
||||
// // println((e.local_vars['s'].val as string).str == voidptr_args[1])
|
||||
// println('helo?$voidptr_args')
|
||||
// println('hello?$voidptr_args')
|
||||
// // println((byteptr(voidptr_args[1])[0]))
|
||||
// x := strconv.v_sprintf(args[0] as string, ...voidptr_args)
|
||||
// // println('helo!')
|
||||
// // println('hello!')
|
||||
// // println(x.len)
|
||||
// y := C.write(1, x.str, x.len)
|
||||
// println('aft')
|
||||
|
|
|
@ -5,7 +5,7 @@ fn grouped_cond_single_line() {
|
|||
}
|
||||
|
||||
fn unwrap_grouped_conds() {
|
||||
// ...but sometimes they have to be splitted
|
||||
// ...but sometimes they have to be split
|
||||
_ := one_condition && before_condition && (conds_inside_paren
|
||||
|| are_kept_together || if_possible || but_this_is_really_too_much
|
||||
|| for_one_line)
|
||||
|
|
|
@ -4,7 +4,7 @@ fn grouped_cond_single_line() {
|
|||
}
|
||||
|
||||
fn unwrap_grouped_conds() {
|
||||
// ...but sometimes they have to be splitted
|
||||
// ...but sometimes they have to be split
|
||||
_ := one_condition && before_condition && (conds_inside_paren || are_kept_together || if_possible || but_this_is_really_too_much || for_one_line)
|
||||
_ := (also_inside_parens || just_as_above || but_this_is_also_more || than_a_single_line_could_fit) && end_cond
|
||||
fields = fields.filter((it.typ in [string_type, int_type, bool_type] || c.table.types[int(it.typ)].kind == .struct_) && !it.attrs.contains('skip'))
|
||||
|
|
|
@ -82,17 +82,17 @@ mut:
|
|||
sorted_global_const_names []string
|
||||
file &ast.File = unsafe { nil }
|
||||
table &ast.Table = unsafe { nil }
|
||||
unique_file_path_hash u64 // a hash of file.path, used for making auxilary fn generation unique (like `compare_xyz`)
|
||||
unique_file_path_hash u64 // a hash of file.path, used for making auxiliary fn generation unique (like `compare_xyz`)
|
||||
fn_decl &ast.FnDecl = unsafe { nil } // pointer to the FnDecl we are currently inside otherwise 0
|
||||
last_fn_c_name string
|
||||
tmp_count int // counter for unique tmp vars (_tmp1, _tmp2 etc); resets at the start of each fn.
|
||||
tmp_count_af int // a separate tmp var counter for autofree fn calls
|
||||
tmp_count_declarations int // counter for unique tmp names (_d1, _d2 etc); does NOT reset, used for C declarations
|
||||
global_tmp_count int // like tmp_count but global and not resetted in each function
|
||||
global_tmp_count int // like tmp_count but global and not reset in each function
|
||||
discard_or_result bool // do not safe last ExprStmt of `or` block in tmp variable to defer ongoing expr usage
|
||||
is_direct_array_access bool // inside a `[direct_array_access fn a() {}` function
|
||||
is_assign_lhs bool // inside left part of assign expr (for array_set(), etc)
|
||||
is_void_expr_stmt bool // ExprStmt whos result is discarded
|
||||
is_void_expr_stmt bool // ExprStmt whose result is discarded
|
||||
is_arraymap_set bool // map or array set value state
|
||||
is_amp bool // for `&Foo{}` to merge PrefixExpr `&` and StructInit `Foo{}`; also for `&u8(0)` etc
|
||||
is_sql bool // Inside `sql db{}` statement, generating sql instead of C (e.g. `and` instead of `&&` etc)
|
||||
|
@ -1067,9 +1067,9 @@ fn (mut g Gen) expr_string_surround(prepend string, expr ast.Expr, append string
|
|||
return g.out.cut_to(pos)
|
||||
}
|
||||
|
||||
// TODO this really shouldnt be seperate from typ
|
||||
// TODO this really shouldn't be separate from typ
|
||||
// but I(emily) would rather have this generation
|
||||
// all unified in one place so that it doesnt break
|
||||
// all unified in one place so that it doesn't break
|
||||
// if one location changes
|
||||
fn (mut g Gen) option_type_name(t ast.Type) (string, string) {
|
||||
mut base := g.base_type(t)
|
||||
|
@ -2591,7 +2591,7 @@ fn cescape_nonascii(original string) string {
|
|||
return res
|
||||
}
|
||||
|
||||
// cestring returns a V string, properly escaped for embeddeding in a C string literal.
|
||||
// cestring returns a V string, properly escaped for embedding in a C string literal.
|
||||
fn cestring(s string) string {
|
||||
return s.replace('\\', '\\\\').replace('"', "'")
|
||||
}
|
||||
|
@ -2632,7 +2632,7 @@ fn (mut g Gen) asm_stmt(stmt ast.AsmStmt) {
|
|||
} else {
|
||||
g.write(' ')
|
||||
}
|
||||
// swap destionation and operands for att syntax
|
||||
// swap destination and operands for att syntax
|
||||
if template.args.len != 0 && !template.is_directive {
|
||||
template.args.prepend(template.args.last())
|
||||
template.args.delete(template.args.len - 1)
|
||||
|
@ -5045,7 +5045,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
|
|||
g.write('(${styp}){')
|
||||
mut arg_idx := 0
|
||||
for i, expr in node.exprs {
|
||||
// Check if we are dealing with a multi return and handle it seperately
|
||||
// Check if we are dealing with a multi return and handle it separately
|
||||
if g.expr_is_multi_return_call(expr) {
|
||||
call_expr := expr as ast.CallExpr
|
||||
expr_sym := g.table.sym(call_expr.return_type)
|
||||
|
@ -5803,7 +5803,7 @@ fn (mut g Gen) write_init_function() {
|
|||
g.writeln('\t_closure_mtx_init();')
|
||||
}
|
||||
|
||||
// reflection bootstraping
|
||||
// reflection bootstrapping
|
||||
if g.has_reflection {
|
||||
if var := g.global_const_defs['g_reflection'] {
|
||||
g.writeln(var.init)
|
||||
|
@ -6185,7 +6185,7 @@ fn (mut g Gen) sort_structs(typesa []&ast.TypeSymbol) []&ast.TypeSymbol {
|
|||
// sort graph
|
||||
dep_graph_sorted := dep_graph.resolve()
|
||||
if !dep_graph_sorted.acyclic {
|
||||
// this should no longer be called since it's catched in the parser
|
||||
// this should no longer be called since it's in the parser
|
||||
// TODO: should it be removed?
|
||||
verror('cgen.sort_structs(): the following structs form a dependency cycle:\n' +
|
||||
dep_graph_sorted.display_cycles() +
|
||||
|
@ -6257,7 +6257,7 @@ fn (mut g Gen) gen_or_block_stmts(cvar_name string, cast_typ string, stmts []ast
|
|||
g.indent--
|
||||
}
|
||||
|
||||
// If user is accessing the return value eg. in assigment, pass the variable name.
|
||||
// If user is accessing the return value eg. in assignment, pass the variable name.
|
||||
// If the user is not using the option return value. We need to pass a temp var
|
||||
// to access its fields (`.ok`, `.error` etc)
|
||||
// `os.cp(...)` => `Option bool tmp = os__cp(...); if (tmp.state != 0) { ... }`
|
||||
|
@ -6768,7 +6768,7 @@ fn (mut g Gen) interface_table() string {
|
|||
cast_struct.writeln('\t\t.${cname} = (${field_styp}*)((char*)x + __offsetof_ptr(x, ${cctype}, ${cname})),')
|
||||
} else if st_sym.kind == .array
|
||||
&& field.name in ['element_size', 'data', 'offset', 'len', 'cap', 'flags'] {
|
||||
// Manaully checking, we already knows array contains above fields
|
||||
// Manually checking, we already knows array contains above fields
|
||||
cast_struct.writeln('\t\t.${cname} = (${field_styp}*)((char*)x + __offsetof_ptr(x, ${cctype}, ${cname})),')
|
||||
} else {
|
||||
// the field is embedded in another struct
|
||||
|
|
|
@ -2018,7 +2018,7 @@ fn (mut g Gen) call_args(node ast.CallExpr) {
|
|||
}
|
||||
}
|
||||
}
|
||||
// only v variadic, C variadic args will be appeneded like normal args
|
||||
// only v variadic, C variadic args will be appended like normal args
|
||||
is_variadic := expected_types.len > 0 && expected_types.last().has_flag(.variadic)
|
||||
&& node.language == .v
|
||||
for i, arg in args {
|
||||
|
|
|
@ -603,7 +603,7 @@ fn (mut g Gen) infix_expr_in_op(node ast.InfixExpr) {
|
|||
}
|
||||
|
||||
// infix_expr_in_optimization optimizes `<var> in <array>` expressions,
|
||||
// and transform them in a serie of equality comparison
|
||||
// and transform them in a series of equality comparison
|
||||
// i.e. `a in [1,2,3]` => `a == 1 || a == 2 || a == 3`
|
||||
fn (mut g Gen) infix_expr_in_optimization(left ast.Expr, right ast.ArrayInit) {
|
||||
mut elem_sym := g.table.sym(right.elem_type)
|
||||
|
|
|
@ -202,7 +202,7 @@ fn (mut g Gen) gen_reflection_sym_info(tsym ast.TypeSymbol) string {
|
|||
}
|
||||
}
|
||||
|
||||
// gen_reflection_data generates code to initilized V reflection metadata
|
||||
// gen_reflection_data generates code to initialized V reflection metadata
|
||||
fn (mut g Gen) gen_reflection_data() {
|
||||
// modules declaration
|
||||
for mod_name in g.table.modules {
|
||||
|
|
|
@ -48,7 +48,7 @@ fn (mut g Gen) get_default_fmt(ftyp ast.Type, typ ast.Type) u8 {
|
|||
|
||||
fn (mut g Gen) str_format(node ast.StringInterLiteral, i int, fmts []u8) (u64, string) {
|
||||
mut base := 0 // numeric base
|
||||
mut upper_case := false // set upercase for the result string
|
||||
mut upper_case := false // set uppercase for the result string
|
||||
mut typ := g.unwrap_generic(node.expr_types[i])
|
||||
if node.exprs[i].is_auto_deref_var() {
|
||||
typ = typ.deref()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
type ParseRes = Result<[]Token, ParseErr>
|
||||
type ParseRes = Result[[]Token, ParseErr]
|
||||
|
||||
struct Token {}
|
||||
|
||||
|
@ -26,7 +26,7 @@ fn main() {
|
|||
r := Opt[ParseRes](None[ParseRes]{})
|
||||
match r {
|
||||
Some[ParseRes] {
|
||||
// make possible cast fo the same type!
|
||||
// make possible cast of the same type!
|
||||
rx := Result[[]Token, ParseErr](r.value)
|
||||
}
|
||||
None[ParseRes] {}
|
||||
|
|
|
@ -64,7 +64,7 @@ struct Type {
|
|||
typ ast.Type [required]
|
||||
sym &ast.TypeSymbol [required]
|
||||
// unaliased is `typ` once aliased have been resolved
|
||||
// it may not contain informations such as flags and nr_muls
|
||||
// it may not contain information such as flags and nr_muls
|
||||
unaliased ast.Type [required]
|
||||
unaliased_sym &ast.TypeSymbol [required]
|
||||
}
|
||||
|
|
|
@ -3507,7 +3507,7 @@ fn (mut g JsGen) gen_typeof_expr(it ast.TypeOf) {
|
|||
}
|
||||
|
||||
fn (mut g JsGen) gen_cast_tmp(tmp string, typ_ ast.Type) {
|
||||
// Skip cast if type is the same as the parrent caster
|
||||
// Skip cast if type is the same as the parent caster
|
||||
tsym := g.table.final_sym(typ_)
|
||||
if !g.pref.output_es5 && (tsym.kind == .i64 || tsym.kind == .u64) {
|
||||
g.write('new ')
|
||||
|
@ -3586,7 +3586,7 @@ fn (mut g JsGen) gen_type_cast_expr(it ast.CastExpr) {
|
|||
return
|
||||
}
|
||||
|
||||
// Skip cast if type is the same as the parrent caster
|
||||
// Skip cast if type is the same as the parent caster
|
||||
tsym := to_type_sym
|
||||
if tsym.kind == .sum_type {
|
||||
g.expr(it.expr)
|
||||
|
@ -3651,7 +3651,7 @@ fn (mut g JsGen) gen_integer_literal_expr(it ast.IntegerLiteral) {
|
|||
}
|
||||
}
|
||||
|
||||
// Skip cast if type is the same as the parrent caster
|
||||
// Skip cast if type is the same as the parent caster
|
||||
if g.cast_stack.len > 0 {
|
||||
if g.cast_stack.last() in ast.integer_type_idxs {
|
||||
g.write('new ')
|
||||
|
@ -3688,7 +3688,7 @@ fn (mut g JsGen) gen_float_literal_expr(it ast.FloatLiteral) {
|
|||
}
|
||||
}
|
||||
|
||||
// Skip cast if type is the same as the parrent caster
|
||||
// Skip cast if type is the same as the parent caster
|
||||
if g.cast_stack.len > 0 {
|
||||
if g.cast_stack.last() in ast.float_type_idxs {
|
||||
g.write('new f32(${it.val})')
|
||||
|
|
|
@ -30,7 +30,7 @@ fn test_example_compilation() {
|
|||
println('activate -sourcemap creation')
|
||||
v_options_file += ' -sourcemap' // activate souremap generation
|
||||
|
||||
println('add node option: --enable-source-maps') // requieres node >=12.12.0
|
||||
println('add node option: --enable-source-maps') // requires node >=12.12.0
|
||||
node_options_file += ' --enable-source-maps' // activate souremap generation
|
||||
}
|
||||
jsfile := os.join_path_single(output_dir, '${file}.js')
|
||||
|
|
|
@ -7,7 +7,7 @@ struct Type {
|
|||
typ ast.Type [required]
|
||||
sym &ast.TypeSymbol [required] = unsafe { nil }
|
||||
// unaliased is `typ` once aliased have been resolved
|
||||
// it may not contain informations such as flags and nr_muls
|
||||
// it may not contain information such as flags and nr_muls
|
||||
unaliased ast.Type [required]
|
||||
unaliased_sym &ast.TypeSymbol [required] = unsafe { nil }
|
||||
}
|
||||
|
|
|
@ -925,7 +925,7 @@ fn (mut c Amd64) mov_extend_reg(a Amd64Register, b Amd64Register, typ ast.Type)
|
|||
fn (mut c Amd64) call_addr_at(addr i32, at i64) i64 {
|
||||
// Need to calculate the difference between current position (position after the e8 call)
|
||||
// and the function to call.f
|
||||
// +5 is to get the posistion "e8 xx xx xx xx"
|
||||
// +5 is to get the position "e8 xx xx xx xx"
|
||||
// Not sure about the -1.
|
||||
return 0xffffffff - (at + 5 - i64(addr) - 1)
|
||||
}
|
||||
|
@ -3466,7 +3466,7 @@ fn (mut c Amd64) convert_int_to_string(a Register, b Register) {
|
|||
}
|
||||
c.g.println('; jump to label ${skip_zero_label}')
|
||||
|
||||
// handle zeros seperately
|
||||
// handle zeros separately
|
||||
// c.mov_int_to_var(LocalVar{buffer, ast.u8_type_idx, ''}, '0'[0])
|
||||
|
||||
c.g.write8(0xc6)
|
||||
|
|
|
@ -19,7 +19,7 @@ mut:
|
|||
calls []i64 // call addresses
|
||||
}
|
||||
|
||||
pub const inline_builtins = ['assert', 'print', 'eprint', 'println', 'eprintln', 'exit', 'C.syscall'] // classic V builtin functios accessible to the user get inlined
|
||||
pub const inline_builtins = ['assert', 'print', 'eprint', 'println', 'eprintln', 'exit', 'C.syscall'] // classic V builtin functions accessible to the user get inlined
|
||||
|
||||
pub fn (mut g Gen) init_builtins() {
|
||||
g.builtins = {
|
||||
|
|
|
@ -375,7 +375,7 @@ mut:
|
|||
typ i32 // Type of the node
|
||||
|
||||
name []u8 // Name string of the note.
|
||||
desc []u8 // Descripition string of the node, must be aligned by 4 bytes
|
||||
desc []u8 // Description string of the node, must be aligned by 4 bytes
|
||||
}
|
||||
|
||||
fn (mut g Gen) create_note_section(typ i32, name string, desc string) NoteSection {
|
||||
|
|
|
@ -348,7 +348,7 @@ fn (mut g Gen) gen_print_from_expr(expr ast.Expr, typ ast.Type, name string) {
|
|||
}
|
||||
ast.BoolLiteral {
|
||||
// register 'true' and 'false' strings // g.expr(expr)
|
||||
// XXX mov64 shuoldnt be used for addressing
|
||||
// XXX mov64 shouldn't be used for addressing
|
||||
nl := if newline { '\n' } else { '' }
|
||||
|
||||
if expr.val {
|
||||
|
|
|
@ -83,7 +83,7 @@ mut:
|
|||
address_size() i32
|
||||
adr(r Arm64Register, delta i32) // Note: Temporary!
|
||||
allocate_var(name string, size i32, initial_val i32) i32
|
||||
assign_stmt(node ast.AssignStmt) // TODO: make platform-independant
|
||||
assign_stmt(node ast.AssignStmt) // TODO: make platform-independent
|
||||
builtin_decl(builtin BuiltinFn)
|
||||
call_addr_at(addr i32, at i64) i64
|
||||
call_builtin(name Builtin) i64
|
||||
|
@ -107,7 +107,7 @@ mut:
|
|||
gen_print(s string, fd i32)
|
||||
gen_syscall(node ast.CallExpr)
|
||||
inc_var(var Var, config VarConfig)
|
||||
infix_expr(node ast.InfixExpr) // TODO: make platform-independant
|
||||
infix_expr(node ast.InfixExpr) // TODO: make platform-independent
|
||||
infloop()
|
||||
init_struct(var Var, init ast.StructInit)
|
||||
init_array(var Var, init ast.ArrayInit)
|
||||
|
@ -520,7 +520,7 @@ pub fn (mut g Gen) link(obj_name string) {
|
|||
g.link_elf_file(obj_name)
|
||||
}
|
||||
.windows {
|
||||
// windows linking is alredy done before codegen
|
||||
// windows linking is already done before codegen
|
||||
}
|
||||
.macos {
|
||||
// TODO: implement linking for macos!
|
||||
|
|
|
@ -29,7 +29,7 @@ const (
|
|||
pe_heap_size = 0x100000 // gcc default on windows
|
||||
// tcc defaults
|
||||
pe_major_linker_version = 6
|
||||
pe_minor_linker_verion = 0
|
||||
pe_minor_linker_version = 0
|
||||
pe_major_os_version = 4
|
||||
pe_minor_os_version = 0
|
||||
pe_major_subsystem_version = 4
|
||||
|
@ -236,7 +236,7 @@ fn (mut g Gen) get_pe32_plus_optional_header() Pe32PlusOptionalHeader {
|
|||
return Pe32PlusOptionalHeader{
|
||||
magic: .pe32plus
|
||||
major_linker_version: native.pe_major_linker_version
|
||||
minor_linker_version: native.pe_minor_linker_verion
|
||||
minor_linker_version: native.pe_minor_linker_version
|
||||
image_base: native.image_base
|
||||
section_alignment: native.pe_section_align
|
||||
file_alignment: native.pe_file_align
|
||||
|
@ -262,7 +262,7 @@ enum Pe32PlusOPtionalHeaderField {
|
|||
number_of_rva_and_sizes = 108
|
||||
}
|
||||
|
||||
// implemented because __offsetof() + [packed] structs wasn't consistend across OSs
|
||||
// implemented because __offsetof() + [packed] structs wasn't consistent across OSs
|
||||
[inline]
|
||||
fn pe32_plus_optional_header_offsetof(field Pe32PlusOPtionalHeaderField) i64 {
|
||||
return i64(field)
|
||||
|
@ -449,7 +449,7 @@ enum PeSectionHeaderField {
|
|||
pointer_to_raw_data = 20
|
||||
}
|
||||
|
||||
// implemented because __offsetof() + [packed] structs wasn't consistend across OSs
|
||||
// implemented because __offsetof() + [packed] structs wasn't consistent across OSs
|
||||
[inline]
|
||||
fn pe_section_header_offsetof(field PeSectionHeaderField) i64 {
|
||||
return i64(field)
|
||||
|
@ -560,7 +560,7 @@ enum PeImportDirectoryTableField {
|
|||
import_address_table_rva = 16
|
||||
}
|
||||
|
||||
// implemented because __offsetof() + [packed] structs wasn't consistend across OSs
|
||||
// implemented because __offsetof() + [packed] structs wasn't consistent across OSs
|
||||
[inline]
|
||||
fn pe_idt_offsetof(field PeImportDirectoryTableField) i64 {
|
||||
return i64(field)
|
||||
|
@ -667,7 +667,7 @@ fn (mut g Gen) gen_pe_idata() {
|
|||
g.zeroes(4)
|
||||
g.println('; null entry')
|
||||
|
||||
// dll names
|
||||
// dll names
|
||||
for imp in imports {
|
||||
g.write32_at(imp.idt_pos + pe_idt_offsetof(.name_rva), i32(g.pos() - idata_pos) +
|
||||
idata_section.header.virtual_address)
|
||||
|
|
|
@ -423,7 +423,7 @@ pub fn (mut g Gen) set_set(v Var) {
|
|||
// set pointers with value, get local, store value
|
||||
// set value, set local
|
||||
// -- set works with a single value present on the stack beforehand
|
||||
// -- not optimial for copying stack memory or shuffling structs
|
||||
// -- not optimal for copying stack memory or shuffling structs
|
||||
// -- use mov instead
|
||||
pub fn (mut g Gen) set(v Var) {
|
||||
if !v.is_address {
|
||||
|
|
|
@ -22,7 +22,7 @@ If everything works fine, the output of the generated program would have changed
|
|||
which then is detected by the test program (the histogram checks).
|
||||
|
||||
Since this test program is sensitive to coordination (or lack of) of several processes,
|
||||
it tries to sidestep the coordination issue by polling the file system for the existance
|
||||
it tries to sidestep the coordination issue by polling the file system for the existence
|
||||
of files, ORIGINAL.txt ... STOP.txt , which are appended to by the generated program.
|
||||
|
||||
Note: That approach of monitoring the state of the running generated program, is clearly not ideal,
|
||||
|
|
|
@ -94,7 +94,7 @@ pub fn (mut p Preferences) fill_with_defaults() {
|
|||
// compilers.
|
||||
//
|
||||
// If you do decide to break it, please *at the very least*, test it
|
||||
// extensively, and make a PR about it, instead of commiting directly
|
||||
// extensively, and make a PR about it, instead of committing directly
|
||||
// and breaking the CI, VC, and users doing `v up`.
|
||||
if rpath == '${p.vroot}/cmd/v' && os.is_dir('vlib/compiler') {
|
||||
// Building V? Use v2, since we can't overwrite a running
|
||||
|
@ -129,7 +129,7 @@ pub fn (mut p Preferences) fill_with_defaults() {
|
|||
p.parse_define('emscripten')
|
||||
}
|
||||
if p.os == ._auto {
|
||||
// No OS specifed? Use current system
|
||||
// No OS specified? Use current system
|
||||
p.os = if p.backend != .wasm { get_host_os() } else { .wasi }
|
||||
}
|
||||
//
|
||||
|
|
|
@ -29,9 +29,9 @@ pub enum GarbageCollectionMode {
|
|||
unknown
|
||||
no_gc
|
||||
boehm_full // full garbage collection mode
|
||||
boehm_incr // incremental garbage colletion mode
|
||||
boehm_incr // incremental garbage collection mode
|
||||
boehm_full_opt // full garbage collection mode
|
||||
boehm_incr_opt // incremental garbage colletion mode
|
||||
boehm_incr_opt // incremental garbage collection mode
|
||||
boehm_leak // leak detection mode (makes `gc_check_leaks()` work)
|
||||
}
|
||||
|
||||
|
@ -133,7 +133,7 @@ pub mut:
|
|||
is_help bool // -h, -help or --help was passed
|
||||
is_quiet bool // do not show the repetitive explanatory messages like the one for `v -prod run file.v` .
|
||||
is_cstrict bool // turn on more C warnings; slightly slower
|
||||
eval_argument string // `println(2+2)` on `v -e "println(2+2)"`. Note that this souce code, will be evaluated in vsh mode, so 'v -e 'println(ls(".")!)' is valid.
|
||||
eval_argument string // `println(2+2)` on `v -e "println(2+2)"`. Note that this source code, will be evaluated in vsh mode, so 'v -e 'println(ls(".")!)' is valid.
|
||||
test_runner string // can be 'simple' (fastest, but much less detailed), 'tap', 'normal'
|
||||
profile_file string // the profile results will be stored inside profile_file
|
||||
profile_no_inline bool // when true, [inline] functions would not be profiled
|
||||
|
@ -151,7 +151,7 @@ pub mut:
|
|||
show_callgraph bool // -show-callgraph, print the program callgraph, in a Graphviz DOT format to stdout
|
||||
show_depgraph bool // -show-depgraph, print the program module dependency graph, in a Graphviz DOT format to stdout
|
||||
dump_c_flags string // `-dump-c-flags file.txt` - let V store all C flags, passed to the backend C compiler in `file.txt`, one C flag/value per line.
|
||||
dump_modules string // `-dump-modules modules.txt` - let V store all V modules, that were used by the compiled program in `modules.txt`, one module per line.
|
||||
dump_modules string // `-dump-modules modules.txt` - let V store all V modules, that were used by the compiled program in `modules.txt`, one module per line.
|
||||
dump_files string // `-dump-files files.txt` - let V store all V or .template file paths, that were used by the compiled program in `files.txt`, one path per line.
|
||||
dump_defines string // `-dump-defines defines.txt` - let V store all the defines that affect the current program and their values, one define per line + `,` + its value.
|
||||
use_cache bool // when set, use cached modules to speed up subsequent compilations, at the cost of slower initial ones (while the modules are cached)
|
||||
|
@ -189,7 +189,7 @@ pub mut:
|
|||
prealloc bool
|
||||
vroot string
|
||||
vlib string // absolute path to the vlib/ folder
|
||||
vmodules_paths []string // absolute paths to the vmodules folders, by default ['/home/user/.vmodules'], can be overriden by setting VMODULES
|
||||
vmodules_paths []string // absolute paths to the vmodules folders, by default ['/home/user/.vmodules'], can be overridden by setting VMODULES
|
||||
out_name_c string // full os.real_path to the generated .tmp.c file; set by builder.
|
||||
out_name string
|
||||
path string // Path to file/folder to compile
|
||||
|
|
|
@ -108,8 +108,8 @@ pub fn change_test_runner(x &TestRunner) {
|
|||
test_runner = *x
|
||||
}
|
||||
|
||||
// vtest_init will be caled *before* the normal _vinit() function,
|
||||
// to give a chance to the test runner implemenation to change the
|
||||
// vtest_init will be called *before* the normal _vinit() function,
|
||||
// to give a chance to the test runner implementation to change the
|
||||
// test_runner global variable. The reason vtest_init is called before
|
||||
// _vinit, is because a _test.v file can define consts, and they in turn
|
||||
// may use function calls in their declaration, which may do assertions.
|
||||
|
|
|
@ -74,7 +74,7 @@ fn test_inline_asm() {
|
|||
// assert loops == 1
|
||||
// assert k == 5
|
||||
|
||||
// not marked as mut because we derefernce m to change l
|
||||
// not marked as mut because we dereference m to change l
|
||||
l := 5
|
||||
m := &l
|
||||
asm amd64 {
|
||||
|
|
|
@ -74,7 +74,7 @@ fn test_inline_asm() {
|
|||
// assert loops == 1
|
||||
// assert k == 5
|
||||
|
||||
// not marked as mut because we derefernce m to change l
|
||||
// not marked as mut because we dereference m to change l
|
||||
l := 5
|
||||
m := &l
|
||||
asm i386 {
|
||||
|
|
|
@ -83,7 +83,7 @@ fn test_self_slice_push() {
|
|||
fn test_slice_push_child() {
|
||||
mut a := [1.0, 2.0625, 3.5, -7.75, 7.125, 8.4375, 0.5]
|
||||
mut b := unsafe { a[2..6] } // `b` is initially created as reference
|
||||
mut c := unsafe { b[1..3] } // `c` is initiall reference to `a` and `b`
|
||||
mut c := unsafe { b[1..3] } // `c` is initially created as references to `a` and `b`
|
||||
b << -2.25 // `b` should be reallocated, so `a` doesn't change
|
||||
c[1] = -13.5 // this should change `c` and `a` but not `b`
|
||||
assert c == [-7.75, -13.5]
|
||||
|
|
|
@ -8,7 +8,7 @@ fn inc_elements(shared foo []int, n int, mut sem sync.Semaphore) {
|
|||
for _ in 0 .. iterations_per_thread2 {
|
||||
foo[n]++
|
||||
}
|
||||
sem.post() // indicat that thread is finished
|
||||
sem.post() // indicate that thread is finished
|
||||
}
|
||||
|
||||
fn test_autolocked_array_2() {
|
||||
|
|
|
@ -3,7 +3,7 @@ struct MyHeapStruct {
|
|||
name string
|
||||
}
|
||||
|
||||
// make sure dereferencing of heap stucts works in selector expr (in tmpl),
|
||||
// make sure dereferencing of heap structs works in selector expr (in tmpl),
|
||||
fn test_heap_struct_dereferencing_in_selector_expr() {
|
||||
a := MyHeapStruct{
|
||||
name: 'my_heap_struct_a'
|
||||
|
|
|
@ -74,7 +74,7 @@ fn min[T](tree Tree[T]) T {
|
|||
}
|
||||
}
|
||||
|
||||
// delete a value in BST (if nonexistant do nothing)
|
||||
// delete a value in BST (if nonexistent do nothing)
|
||||
fn delete[T](tree Tree[T], x T) Tree[T] {
|
||||
return match tree {
|
||||
Empty {
|
||||
|
|
|
@ -69,7 +69,7 @@ fn test_generics_method_return_generics_struct() {
|
|||
assert foo.get[int](o) == 42
|
||||
}
|
||||
|
||||
// test genrics struct str()
|
||||
// test generic struct str()
|
||||
pub struct ArrayIterator[T] {
|
||||
data []T
|
||||
mut:
|
||||
|
|
|
@ -53,7 +53,7 @@ fn test_propagate() {
|
|||
assert z == 31.0
|
||||
}
|
||||
|
||||
fn test_array_void_interate() {
|
||||
fn test_array_void_iter() {
|
||||
mut r := []thread ?{}
|
||||
for i in 0 .. 3 {
|
||||
r << spawn g(i)
|
||||
|
@ -67,7 +67,7 @@ fn test_array_void_interate() {
|
|||
assert res[2] == 2
|
||||
}
|
||||
|
||||
fn test_array_val_interate() {
|
||||
fn test_array_val_iter() {
|
||||
mut r := []thread ?f64{}
|
||||
for i in -1 .. 2 {
|
||||
r << spawn f(i)
|
||||
|
|
|
@ -43,7 +43,7 @@ pub struct ContainerBase {
|
|||
ViewBase
|
||||
}
|
||||
|
||||
// want to excute this method
|
||||
// want to execute this method
|
||||
pub fn (mut cb ContainerBase) init(window &Window) {
|
||||
dump(@METHOD)
|
||||
assert true
|
||||
|
|
|
@ -74,7 +74,7 @@ fn min(tree Tree) f64 {
|
|||
}
|
||||
}
|
||||
|
||||
// delete a value in BST (if nonexistant do nothing)
|
||||
// delete a value in BST (if nonexistent do nothing)
|
||||
fn delete(tree Tree, x f64) Tree {
|
||||
return match tree {
|
||||
Empty {
|
||||
|
|
|
@ -74,7 +74,7 @@ fn min(tree Tree) f64 {
|
|||
}
|
||||
}
|
||||
|
||||
// delete a value in BST (if nonexistant do nothing)
|
||||
// delete a value in BST (if nonexistent do nothing)
|
||||
fn delete(tree Tree, x f64) Tree {
|
||||
return match tree {
|
||||
Empty {
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
module mod1
|
||||
|
||||
// Note: the function here, should be overriden by the one in the wrapper.c.v file with the same name
|
||||
// Note: the function here, should be overridden by the one in the wrapper.c.v file with the same name
|
||||
pub fn vadd(a int, b int) int {
|
||||
return 123456
|
||||
}
|
||||
|
||||
// this should NOT be overriden by the different wrapper.X.v files:
|
||||
// this should NOT be overridden by the different wrapper.X.v files:
|
||||
pub fn a_common_pure_v_fn() int {
|
||||
return 987654
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ import os
|
|||
// The comment above, should make it try re-running the same test,
|
||||
// a maximum of 2 times. It will fail for all, but the last retry.
|
||||
// This is useful for reducing false positives on the CI, due to
|
||||
// flakyness of specific tests like `vlib/v/live/live_test.v` for example.
|
||||
// flakiness of specific tests like `vlib/v/live/live_test.v` for example.
|
||||
|
||||
// Note: this test is supposed to be run with `v test retry_test.v`.
|
||||
// Running just `v retry_test.v` WILL fail.
|
||||
|
|
|
@ -44,7 +44,7 @@ fn doub_large(shared a Large, shared b Large, shared c Large, shared d Large, sh
|
|||
}
|
||||
|
||||
fn test_mixed_order_lock_rlock() {
|
||||
// initialze objects so that their sum = 1
|
||||
// initialize objects so that their sum = 1
|
||||
shared a := Large{
|
||||
l: 4
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
// a) an array of custom structs,
|
||||
// b) also for the custom struct itself (when the .str() for it is missing).
|
||||
//
|
||||
// Note: this is very simillar to string_interpolation_struct_test.v
|
||||
// Note: this is very similar to string_interpolation_struct_test.v
|
||||
// but they should NOT be merged into 1 file. If you merge it with
|
||||
// string_interpolation_struct_test.v, which tests whether the compiler
|
||||
// can generate the default method for a struct, then the b) case of
|
||||
|
|
|
@ -312,19 +312,19 @@ fn test_levels() {
|
|||
}
|
||||
}
|
||||
|
||||
// Struct where an inizialized field is after a non-initilized field.
|
||||
// Struct where an initialized field is after a non-initialized field.
|
||||
struct StructWithDefaultValues1 {
|
||||
field_uninitialized int
|
||||
field_initialized int = 5
|
||||
}
|
||||
|
||||
// Struct where an inizialized field is before a non-initilized field.
|
||||
// Struct where an initialized field is before a non-initialized field.
|
||||
struct StructWithDefaultValues2 {
|
||||
field_initialized int = 3
|
||||
field_uninitialized int
|
||||
}
|
||||
|
||||
// Struct where an inizialized field is before several non-initilized fields.
|
||||
// Struct where an initialized field is before several non-initialized fields.
|
||||
struct StructWithDefaultValues3 {
|
||||
field_initialized int = 2
|
||||
field_uninitialized int
|
||||
|
|
|
@ -21,7 +21,7 @@ fn test_sumtype_equality() {
|
|||
assert u1 != u2
|
||||
assert u1 != s1
|
||||
|
||||
// Same value, defferent type
|
||||
// Same value, different type
|
||||
foo := FooBar(Foo{
|
||||
v: 0
|
||||
})
|
||||
|
|
|
@ -45,5 +45,5 @@ fn test_main() {
|
|||
fn test_opt_call() {
|
||||
mut t := ?Test{}
|
||||
fn_a(none) // returns none
|
||||
fn_b(t) // returs none
|
||||
fn_b(t) // returns none
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module token
|
||||
|
||||
// KeywordsMatcherTrie provides a faster way of determinining whether a given name
|
||||
// KeywordsMatcherTrie provides a faster way of determining whether a given name
|
||||
// is a reserved word (belongs to a given set of previously known words `R`).
|
||||
// See the module description for more details.
|
||||
[heap]
|
||||
|
|
|
@ -6,9 +6,9 @@ module token
|
|||
pub struct Pos {
|
||||
pub:
|
||||
len int // length of the literal in the source
|
||||
line_nr int // the line number in the source where the token occured
|
||||
line_nr int // the line number in the source where the token occurred
|
||||
pos int // the position of the token in scanner text
|
||||
col int // the column in the source where the token occured
|
||||
col int // the column in the source where the token occurred
|
||||
pub mut:
|
||||
last_line int // the line number where the ast object ends (used by vfmt)
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ fn (mut i IndexState) safe_access(key string, new int) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// safe_offset returns for a previvous array what was the highest
|
||||
// safe_offset returns for a previous array what was the highest
|
||||
// offset we ever accessed for that identifier
|
||||
fn (mut i IndexState) safe_offset(key string) int {
|
||||
$if no_bounds_checking {
|
||||
|
|
|
@ -494,7 +494,7 @@ pub fn (mut t Transformer) for_stmt(mut node ast.ForStmt) ast.Stmt {
|
|||
node.cond = t.expr(mut node.cond)
|
||||
match node.cond {
|
||||
ast.BoolLiteral {
|
||||
if !(node.cond as ast.BoolLiteral).val { // for false { ... } should be eleminated
|
||||
if !(node.cond as ast.BoolLiteral).val { // for false { ... } should be eliminated
|
||||
return ast.empty_stmt
|
||||
}
|
||||
}
|
||||
|
|
|
@ -111,7 +111,7 @@ pub fn short_module_name(name string) string {
|
|||
}
|
||||
|
||||
// highlight_suggestion returns a colorfull/highlighted version of `message`,
|
||||
// but only if the standart error output allows for color messages, otherwise
|
||||
// but only if the standard error output allows for color messages, otherwise
|
||||
// the plain message will be returned.
|
||||
pub fn highlight_suggestion(message string) string {
|
||||
return term.ecolorize(term.bright_blue, message)
|
||||
|
|
|
@ -493,7 +493,7 @@ pub fn recompile_file(vexe string, file string) {
|
|||
}
|
||||
|
||||
// get_vtmp_folder returns the path to a folder, that is writable to V programs,
|
||||
// and specific to the user. It can be overriden by setting the env variable `VTMP`.
|
||||
// and specific to the user. It can be overridden by setting the env variable `VTMP`.
|
||||
pub fn get_vtmp_folder() string {
|
||||
return os.vtmp_dir()
|
||||
}
|
||||
|
|
|
@ -441,7 +441,7 @@ struct User {
|
|||
}
|
||||
|
||||
fn get_session(mut ctx vweb.Context) bool {
|
||||
// impelement your own logic to get the user
|
||||
// implement your own logic to get the user
|
||||
user := User{
|
||||
session_id: '123456'
|
||||
name: 'Vweb'
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue