mirror of
https://github.com/vlang/v.git
synced 2025-09-13 14:32:26 +03:00
all: change single blank comment to blank line (#22016)
This commit is contained in:
parent
793b66d8d5
commit
19f080ffb8
147 changed files with 319 additions and 339 deletions
|
@ -93,7 +93,7 @@ pub mut:
|
|||
failed_cmds shared []string
|
||||
reporter Reporter = Reporter(NormalReporter{})
|
||||
hash string // used as part of the name of the temporary directory created for tests, to ease cleanup
|
||||
//
|
||||
|
||||
exec_mode ActionMode = .compile // .compile_and_run only for `v test`
|
||||
}
|
||||
|
||||
|
@ -368,7 +368,7 @@ pub fn (mut ts TestSession) test() {
|
|||
if current_wd == os.wd_at_startup && current_wd == ts.vroot {
|
||||
ts.root_relative = true
|
||||
}
|
||||
//
|
||||
|
||||
ts.init()
|
||||
mut remaining_files := []string{}
|
||||
for dot_relative_file in ts.files {
|
||||
|
|
|
@ -7,14 +7,14 @@ pub enum MessageKind {
|
|||
compile_end // sent right after *each* _test.v file compilation, the message contains the output of that compilation
|
||||
cmd_begin // sent right before *each* _test.v file execution, the resulting status is not known yet, but the _test.v file itself is
|
||||
cmd_end // sent right after *each* _test.v file execution, the message contains the output of that execution
|
||||
//
|
||||
|
||||
ok // success of a _test.v file
|
||||
fail // failed _test.v file, one or more assertions failed
|
||||
skip // the _test.v file was skipped for some reason
|
||||
info // a generic information message, detailing the actions of the `v test` program (some tests could be repeated for example, and the details are sent with an .info status)
|
||||
//
|
||||
|
||||
cannot_compile // when the _test.v file compiled with errors
|
||||
//
|
||||
|
||||
sentinel // send just once after all executions are done; it signals that the reporting/printing thread should stop the loop and exit
|
||||
}
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ pub fn prepare_vc_source(vcdir string, cdir string, commit string) (string, stri
|
|||
check_v_commit_timestamp_before_self_rebuilding(v_timestamp)
|
||||
scripting.chdir(vcdir)
|
||||
scripting.run('git checkout --quiet master')
|
||||
//
|
||||
|
||||
mut vccommit := ''
|
||||
mut partial_hash := v_commithash[0..7]
|
||||
if '5b7a1e8'.starts_with(partial_hash) {
|
||||
|
|
|
@ -124,7 +124,7 @@ fn json(file string) string {
|
|||
pref_.fill_with_defaults()
|
||||
pref_.enable_globals = true
|
||||
pref_.is_fmt = true
|
||||
//
|
||||
|
||||
mut t := Tree{
|
||||
root: new_object()
|
||||
table: ast.new_table()
|
||||
|
|
|
@ -11,7 +11,7 @@ struct CounterLine {
|
|||
mut:
|
||||
file string // retrieved based on the loaded meta
|
||||
line int // retrieved based on the loaded meta
|
||||
//
|
||||
|
||||
meta string // A filename in the sibling meta/ folder, should exist, to match the value from this field. The filename is a hash of both the path and the used build options, to facilitate merging coverage data from different builds/programs
|
||||
point int // The index of a source point. Note that it is not a line number, but an index in the meta data file, keyed by the field `meta` above.
|
||||
hits u64 // How many times the coverage point was executed. Only counters that are != 0 are recorded.
|
||||
|
|
|
@ -21,11 +21,11 @@ mut:
|
|||
be_verbose bool
|
||||
filter string
|
||||
working_folder string
|
||||
//
|
||||
|
||||
targets []string
|
||||
meta map[string]MetaData // aggregated meta data, read from all .json files
|
||||
all_lines_per_file map[string][]int // aggregated by load_meta
|
||||
//
|
||||
|
||||
counters map[string]u64 // incremented by process_target, based on each .csv file
|
||||
lines_per_file map[string]map[int]int // incremented by process_target, based on each .csv file
|
||||
processed_points u64
|
||||
|
|
|
@ -112,7 +112,7 @@ fn process_cli_args() &Context {
|
|||
context.cut_index = fp.int('cut_index', `c`, 1, 'worker specific flag - cut index in the source file, everything before that will be parsed, the rest - ignored.')
|
||||
context.timeout_ms = fp.int('timeout_ms', `t`, 250, 'worker specific flag - timeout in ms; a worker taking longer, will self terminate.')
|
||||
context.path = fp.string('path', `p`, '', 'worker specific flag - path to the current source file, which will be parsed.')
|
||||
//
|
||||
|
||||
if context.is_help {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
|
|
|
@ -13,7 +13,7 @@ struct App {
|
|||
is_prod bool
|
||||
vexe string
|
||||
vroot string
|
||||
//
|
||||
|
||||
skip_v_self bool // do not run `v self`, effectively enforcing the running of `make` or `make.bat`
|
||||
skip_current bool // skip the current hash check, enabling easier testing on the same commit, without using docker etc
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ fn (mut state AppState) update() {
|
|||
for t in 0 .. state.ntasks {
|
||||
threads << spawn state.worker(t, chunk_channel, chunk_ready_channel)
|
||||
}
|
||||
//
|
||||
|
||||
mut oview := ViewRect{}
|
||||
mut sw := time.new_stopwatch()
|
||||
for {
|
||||
|
|
|
@ -167,18 +167,18 @@ fn (mut a App) draw_menu() {
|
|||
y025 := int(f32(a.height) * 0.25)
|
||||
y075 := int(f32(a.height) * 0.75)
|
||||
cy := int(f32(a.height) * 0.5)
|
||||
//
|
||||
|
||||
a.tui.set_color(white)
|
||||
a.tui.bold()
|
||||
a.tui.draw_text(cx - 2, y025, 'VONG')
|
||||
a.tui.reset()
|
||||
a.tui.draw_text(cx - 13, y025 + 1, '(A game of Pong written in V)')
|
||||
//
|
||||
|
||||
a.tui.set_color(white)
|
||||
a.tui.bold()
|
||||
a.tui.draw_text(cx - 3, cy + 1, 'START')
|
||||
a.tui.reset()
|
||||
//
|
||||
|
||||
a.tui.draw_text(cx - 9, y075 + 1, 'Press SPACE to start')
|
||||
a.tui.reset()
|
||||
a.tui.draw_text(cx - 5, y075 + 3, 'ESC to Quit')
|
||||
|
|
|
@ -436,7 +436,7 @@ fn test_map_of_indexes() {
|
|||
assert arrays.map_of_indexes([1, 2, 3, 999]) == {1: [0], 2: [1], 3: [2], 999: [3]}
|
||||
assert arrays.map_of_indexes([999, 1, 2, 3]) == {1: [1], 2: [2], 3: [3], 999: [0]}
|
||||
assert arrays.map_of_indexes([1, 2, 3, 4, 4, 2, 1, 4, 4, 999]) == {1: [0, 6], 2: [1, 5], 3: [2], 4: [3, 4, 7, 8], 999: [9]}
|
||||
//
|
||||
|
||||
assert arrays.map_of_indexes([]string{}) == {}
|
||||
assert arrays.map_of_indexes(['abc']) == {'abc': [0]}
|
||||
assert arrays.map_of_indexes(['abc', 'abc']) == {'abc': [0, 1]}
|
||||
|
@ -451,7 +451,7 @@ fn test_map_of_counts() {
|
|||
assert map_of_counts([1, 2, 3, 999]) == {1: 1, 2: 1, 3: 1, 999: 1}
|
||||
assert map_of_counts([999, 1, 2, 3]) == {1: 1, 2: 1, 3: 1, 999: 1}
|
||||
assert map_of_counts([1, 2, 3, 4, 4, 2, 1, 4, 4, 999]) == {1: 2, 2: 2, 3: 1, 4: 4, 999: 1}
|
||||
//
|
||||
|
||||
assert map_of_counts([]string{}) == {}
|
||||
assert map_of_counts(['abc']) == {'abc': 1}
|
||||
assert map_of_counts(['abc', 'abc']) == {'abc': 2}
|
||||
|
@ -550,7 +550,7 @@ fn test_each() {
|
|||
for x in a {
|
||||
control_sum += x
|
||||
}
|
||||
//
|
||||
|
||||
each(a, fn (x int) {
|
||||
println(x)
|
||||
})
|
||||
|
@ -573,7 +573,7 @@ fn test_each_indexed() {
|
|||
for idx, x in a {
|
||||
control_sum += f(idx, x)
|
||||
}
|
||||
//
|
||||
|
||||
each_indexed(a, fn (idx int, x int) {
|
||||
println('idx: ${idx}, x: ${x}')
|
||||
})
|
||||
|
|
|
@ -19,7 +19,7 @@ fn test_record_measure() {
|
|||
assert x > 50_000
|
||||
// assert x < 200_000
|
||||
flush_stdout()
|
||||
//
|
||||
|
||||
println('step 2')
|
||||
flush_stdout()
|
||||
time.sleep(150 * time.millisecond)
|
||||
|
@ -27,7 +27,7 @@ fn test_record_measure() {
|
|||
assert y > 100_000
|
||||
// assert y < 200_000
|
||||
flush_stdout()
|
||||
//
|
||||
|
||||
res := b.all_recorded_measures()
|
||||
println('All recorded measurements:')
|
||||
println(res)
|
||||
|
|
|
@ -1279,7 +1279,7 @@ fn test_push_arr_string_free() {
|
|||
lines << s
|
||||
// make sure the data in the array is valid after freeing the string
|
||||
unsafe { s.free() }
|
||||
//
|
||||
|
||||
println(lines)
|
||||
assert lines.len == 2
|
||||
assert lines[0] == 'hi'
|
||||
|
|
|
@ -125,22 +125,22 @@ pub:
|
|||
name string // the name of the field f
|
||||
typ int // the internal TypeID of the field f,
|
||||
unaliased_typ int // if f's type was an alias of int, this will be TypeID(int)
|
||||
//
|
||||
|
||||
attrs []string // the attributes of the field f
|
||||
is_pub bool // f is in a `pub:` section
|
||||
is_mut bool // f is in a `mut:` section
|
||||
//
|
||||
|
||||
is_shared bool // `f shared Abc`
|
||||
is_atomic bool // `f atomic int` , TODO
|
||||
is_option bool // `f ?string` , TODO
|
||||
//
|
||||
|
||||
is_array bool // `f []string` , TODO
|
||||
is_map bool // `f map[string]int` , TODO
|
||||
is_chan bool // `f chan int` , TODO
|
||||
is_enum bool // `f Enum` where Enum is an enum
|
||||
is_struct bool // `f Abc` where Abc is a struct , TODO
|
||||
is_alias bool // `f MyInt` where `type MyInt = int`, TODO
|
||||
//
|
||||
|
||||
indirections u8 // 0 for `f int`, 1 for `f &int`, 2 for `f &&int` , TODO
|
||||
}
|
||||
|
||||
|
|
|
@ -156,8 +156,7 @@ fn test_float_point_formatting_rounding() {
|
|||
assert '${float_2:0.0f}' == '45'
|
||||
assert '${float_3:0.0f}' == '239'
|
||||
assert '${float_4:0.0f}' == '240'
|
||||
//
|
||||
//
|
||||
|
||||
assert '${239.5555551:0.0f}' == '240'
|
||||
assert '${239.5555551:0.1f}' == '239.6'
|
||||
assert '${239.5555551:0.2f}' == '239.56'
|
||||
|
@ -170,7 +169,7 @@ fn test_float_point_formatting_rounding() {
|
|||
assert '${239.5555551:0.9f}' == '239.555555100'
|
||||
assert '${239.5555551:0.10f}' == '239.5555551000'
|
||||
assert '${239.5555551:0.11f}' == '239.55555510000'
|
||||
//
|
||||
|
||||
assert '${239.5:0.0f}' == '240'
|
||||
assert '${239.55:0.1f}' == '239.6'
|
||||
assert '${239.555:0.2f}' == '239.56'
|
||||
|
@ -182,7 +181,7 @@ fn test_float_point_formatting_rounding() {
|
|||
assert '${239.555555555:0.8f}' == '239.55555556'
|
||||
assert '${239.5555555555:0.9f}' == '239.555555556'
|
||||
assert '${239.55555555555:0.10f}' == '239.5555555556'
|
||||
//
|
||||
|
||||
assert '${239.5550:0.3f}' == '239.555'
|
||||
assert '${239.5551:0.3f}' == '239.555'
|
||||
assert '${239.5552:0.3f}' == '239.555'
|
||||
|
@ -193,7 +192,7 @@ fn test_float_point_formatting_rounding() {
|
|||
assert '${239.5557:0.3f}' == '239.556'
|
||||
assert '${239.5558:0.3f}' == '239.556'
|
||||
assert '${239.5559:0.3f}' == '239.556'
|
||||
//
|
||||
|
||||
assert '${239.5555551:0.6f}' == '239.555555'
|
||||
assert '${239.5555552:0.6f}' == '239.555555'
|
||||
assert '${239.5555553:0.6f}' == '239.555555'
|
||||
|
@ -203,7 +202,7 @@ fn test_float_point_formatting_rounding() {
|
|||
assert '${239.5555557:0.6f}' == '239.555556'
|
||||
assert '${239.5555558:0.6f}' == '239.555556'
|
||||
assert '${239.5555559:0.6f}' == '239.555556'
|
||||
//
|
||||
|
||||
assert '${239.55555555555:0.10f}' == '239.5555555556'
|
||||
assert '${239.55555555555:0.9f}' == '239.555555556'
|
||||
assert '${239.55555555555:0.8f}' == '239.55555556'
|
||||
|
@ -215,7 +214,7 @@ fn test_float_point_formatting_rounding() {
|
|||
assert '${239.55555555555:0.2f}' == '239.56'
|
||||
assert '${239.55555555555:0.1f}' == '239.6'
|
||||
assert '${239.55555555555:0.0f}' == '240'
|
||||
//
|
||||
|
||||
assert '${-239.55555555555:0.10f}' == '-239.5555555556'
|
||||
assert '${-239.55555555555:0.9f}' == '-239.555555556'
|
||||
assert '${-239.55555555555:0.8f}' == '-239.55555556'
|
||||
|
|
|
@ -1241,7 +1241,7 @@ fn test_push_arr_string_free() {
|
|||
lines << s
|
||||
// make sure the data in the array is valid after freeing the string
|
||||
unsafe { s.free() }
|
||||
//
|
||||
|
||||
println(lines)
|
||||
assert lines.len == 2
|
||||
assert lines[0] == 'hi'
|
||||
|
|
|
@ -3,7 +3,7 @@ const a = [4, 5, 1, 2, 5, 9]
|
|||
fn test_map() {
|
||||
assert a.map(it) == a
|
||||
assert a.map(it * 10) == [40, 50, 10, 20, 50, 90]
|
||||
//
|
||||
|
||||
assert a.map(|x| x) == a
|
||||
assert a.map(|x| x * 10) == [40, 50, 10, 20, 50, 90]
|
||||
assert a.map(|x| 'x: ${x}') == ['x: 4', 'x: 5', 'x: 1', 'x: 2', 'x: 5', 'x: 9']
|
||||
|
@ -13,7 +13,7 @@ fn test_map() {
|
|||
fn test_filter() {
|
||||
assert a.filter(it > 4) == [5, 5, 9]
|
||||
assert a.filter(it < 4) == [1, 2]
|
||||
//
|
||||
|
||||
assert a.filter(|x| x > 4) == [5, 5, 9]
|
||||
assert a.filter(|x| x < 4) == [1, 2]
|
||||
}
|
||||
|
|
|
@ -16,23 +16,23 @@ fn test_length_in_bytes() {
|
|||
assert rune(0x0).length_in_bytes() == 1
|
||||
assert `A`.length_in_bytes() == 1 // latin letter
|
||||
assert rune(0x7F).length_in_bytes() == 1
|
||||
//
|
||||
|
||||
assert rune(0x80).length_in_bytes() == 2
|
||||
assert `Д`.length_in_bytes() == 2 // cyrillic letter
|
||||
assert rune(0x7FF).length_in_bytes() == 2
|
||||
//
|
||||
|
||||
assert rune(0x800).length_in_bytes() == 3
|
||||
assert `喂`.length_in_bytes() == 3 // hey
|
||||
assert rune(0xFFFF).length_in_bytes() == 3
|
||||
//
|
||||
|
||||
assert rune(0xD800).length_in_bytes() == -1 // min for surrogates
|
||||
assert rune(0xD866).length_in_bytes() == -1 // invalid
|
||||
assert rune(0xDFFF).length_in_bytes() == -1 // max for surrogates
|
||||
//
|
||||
|
||||
assert rune(0x100000).length_in_bytes() == 4
|
||||
assert rune(0x10FFD7).length_in_bytes() == 4 // "Supplementary Private Use Area-B" ¯\_(ツ)_/¯
|
||||
assert rune(0x10FFFF).length_in_bytes() == 4
|
||||
//
|
||||
|
||||
assert rune(0x110000).length_in_bytes() == -1
|
||||
}
|
||||
|
||||
|
|
|
@ -267,7 +267,6 @@ fn test_interpolation_of_negative_numbers_padding_and_width() {
|
|||
assert '-000000000000000004d' == '${a:020x}'
|
||||
assert '-0000000000001001101' == '${a:020b}'
|
||||
|
||||
//
|
||||
assert ' -77' == '${a:8}'
|
||||
assert ' -77' == '${a:8d}'
|
||||
assert ' -4d' == '${a:8x}'
|
||||
|
@ -278,7 +277,6 @@ fn test_interpolation_of_negative_numbers_padding_and_width() {
|
|||
assert '-1001101' == '${a:08b}'
|
||||
assert '-000004d' == '${a:08x}'
|
||||
|
||||
//
|
||||
assert ' -77' == '${a:4}'
|
||||
assert ' -77' == '${a:4d}'
|
||||
assert '-1001101' == '${a:4b}'
|
||||
|
@ -289,7 +287,6 @@ fn test_interpolation_of_negative_numbers_padding_and_width() {
|
|||
assert '-1001101' == '${a:04b}'
|
||||
assert '-04d' == '${a:04x}'
|
||||
|
||||
//
|
||||
assert '-77' == '${a:2}'
|
||||
assert '-77' == '${a:2d}'
|
||||
assert '-1001101' == '${a:2b}'
|
||||
|
@ -300,14 +297,12 @@ fn test_interpolation_of_negative_numbers_padding_and_width() {
|
|||
assert '-1001101' == '${a:02b}'
|
||||
assert '-4d' == '${a:02x}'
|
||||
|
||||
//
|
||||
bin0 := ~6
|
||||
assert bin0 == -7
|
||||
assert '-0000111' == '${bin0:08b}' // a minimum of 8 characters for the whole number, including the padding and the sign
|
||||
assert '-0000111' == '${~6:08b}'
|
||||
assert ' -111' == '${~6:8b}'
|
||||
|
||||
//
|
||||
assert '-0000110' == '${-6:08b}'
|
||||
assert ' -110' == '${-6:8b}'
|
||||
}
|
||||
|
|
|
@ -15,20 +15,20 @@ fn test_match_glob_on_x() {
|
|||
fn test_match_glob_on_abc() {
|
||||
assert !'abc'.match_glob('')
|
||||
assert 'abc'.match_glob('*')
|
||||
//
|
||||
|
||||
assert !'abc'.match_glob('ab')
|
||||
assert 'abc'.match_glob('abc')
|
||||
assert 'abc'.match_glob('abc*')
|
||||
//
|
||||
|
||||
assert 'abc'.match_glob('*c')
|
||||
assert !'abc'.match_glob('*b')
|
||||
assert 'abc'.match_glob('*bc')
|
||||
assert 'abc'.match_glob('*abc')
|
||||
//
|
||||
|
||||
assert 'abc'.match_glob('a*')
|
||||
assert !'abc'.match_glob('b*')
|
||||
assert 'abc'.match_glob('a*c')
|
||||
//
|
||||
|
||||
assert 'abc'.match_glob('ab?')
|
||||
assert 'abc'.match_glob('a??')
|
||||
assert 'abc'.match_glob('???')
|
||||
|
@ -49,13 +49,13 @@ fn test_match_glob_with_any_charset_patterns() {
|
|||
assert 'axbxcxdxe'.match_glob('*c[xyz]d*')
|
||||
assert 'axbxcxdxe'.match_glob('*c[yxz]d*')
|
||||
assert 'axbxcxdxe'.match_glob('*c[zyx]d*')
|
||||
//
|
||||
|
||||
assert 'axbxcxdxe'.match_glob('*dx[QeW]')
|
||||
assert 'axbxcxdxe'.match_glob('*dx[QeW]*')
|
||||
//
|
||||
|
||||
assert !'axbxcxdxe'.match_glob('*bx[QcW]')
|
||||
assert 'axbxcxdxe'.match_glob('*bx[QcW]*')
|
||||
//
|
||||
|
||||
assert !'axbxcxdxe'.match_glob('*zx[QeW]')
|
||||
assert !'axbxcxdxe'.match_glob('*zx[QeW]*')
|
||||
}
|
||||
|
|
|
@ -1506,12 +1506,11 @@ fn test_index_u8() {
|
|||
assert 'abcabca'.index_u8(`a`) == 0
|
||||
assert 'abcabca'.index_u8(`b`) == 1
|
||||
assert 'abcabca'.index_u8(`c`) == 2
|
||||
//
|
||||
|
||||
assert 'abc'.index_u8(`d`) == -1
|
||||
assert 'abc'.index_u8(`A`) == -1
|
||||
assert 'abc'.index_u8(`B`) == -1
|
||||
assert 'abc'.index_u8(`C`) == -1
|
||||
//
|
||||
}
|
||||
|
||||
fn test_last_index() {
|
||||
|
|
|
@ -25,7 +25,7 @@ fn test_constant_time_select() {
|
|||
assert constant_time_select(1, 2, 0) == 2
|
||||
assert constant_time_select(1, 2, 255) == 2
|
||||
assert constant_time_select(1, 2, 255 * 255) == 2
|
||||
//
|
||||
|
||||
assert constant_time_select(0, 1, 0) == 0
|
||||
assert constant_time_select(0, 1, 255) == 255
|
||||
assert constant_time_select(0, 1, 255 * 255) == 255 * 255
|
||||
|
|
|
@ -18,7 +18,7 @@ fn test_long_encoding() {
|
|||
unsafe { free(ebuffer) }
|
||||
unsafe { free(dbuffer) }
|
||||
}
|
||||
//
|
||||
|
||||
encoded_size := base64.encode_in_buffer(s_original, ebuffer)
|
||||
mut encoded_in_buf := []u8{len: encoded_size}
|
||||
unsafe { vmemcpy(encoded_in_buf.data, ebuffer, encoded_size) }
|
||||
|
|
|
@ -67,15 +67,15 @@ pub:
|
|||
native_frame_fn FNCb = unsafe { nil }
|
||||
cleanup_fn FNCb = unsafe { nil } // Called once, after Sokol determines that the application is finished/closed. Put your app specific cleanup/free actions here.
|
||||
fail_fn FNFail = unsafe { nil } // Called once per Sokol error/log message. TODO: currently it does nothing with latest Sokol, reimplement using Sokol's new sapp_logger APIs.
|
||||
//
|
||||
|
||||
event_fn FNEvent = unsafe { nil } // Called once per each user initiated event, received by Sokol/GG.
|
||||
on_event FNEvent2 = unsafe { nil } // Called once per each user initiated event, received by Sokol/GG. Same as event_fn, just the parameter order is different. TODO: deprecate this, in favor of event_fn
|
||||
quit_fn FNEvent = unsafe { nil } // Called when the user closes the app window.
|
||||
//
|
||||
|
||||
keydown_fn FNKeyDown = unsafe { nil } // Called once per key press, no matter how long the key is held down. Note that here you can access the scan code/physical key, but not the logical character.
|
||||
keyup_fn FNKeyUp = unsafe { nil } // Called once per key press, when the key is released.
|
||||
char_fn FNChar = unsafe { nil } // Called once per character (after the key is pressed down, and then released). Note that you can access the character/utf8 rune here, not just the scan code.
|
||||
//
|
||||
|
||||
move_fn FNMove = unsafe { nil } // Called while the mouse/touch point is moving.
|
||||
click_fn FNClick = unsafe { nil } // Called once when the mouse/touch button is clicked.
|
||||
unclick_fn FNUnClick = unsafe { nil } // Called once when the mouse/touch button is released.
|
||||
|
@ -103,7 +103,7 @@ pub:
|
|||
enable_dragndrop bool // enable file dropping (drag'n'drop), default is false
|
||||
max_dropped_files int = 1 // max number of dropped files to process (default: 1)
|
||||
max_dropped_file_path_length int = 2048 // max length in bytes of a dropped UTF-8 file path (default: 2048)
|
||||
//
|
||||
|
||||
min_width int // desired minimum width of the window
|
||||
min_height int // desired minimum height of the window
|
||||
}
|
||||
|
@ -169,7 +169,7 @@ pub mut:
|
|||
font_inited bool
|
||||
ui_mode bool // do not redraw everything 60 times/second, but only when the user requests
|
||||
frame u64 // the current frame counted from the start of the application; always increasing
|
||||
//
|
||||
|
||||
mbtn_mask u8
|
||||
mouse_buttons MouseButtons // typed version of mbtn_mask; easier to use for user programs
|
||||
mouse_pos_x int
|
||||
|
@ -178,7 +178,7 @@ pub mut:
|
|||
mouse_dy int
|
||||
scroll_x int
|
||||
scroll_y int
|
||||
//
|
||||
|
||||
key_modifiers Modifier // the current key modifiers
|
||||
key_repeat bool // whether the pressed key was an autorepeated one
|
||||
pressed_keys [key_code_max]bool // an array representing all currently pressed keys
|
||||
|
|
|
@ -217,14 +217,14 @@ pub:
|
|||
native_frame_fn FNCb = unsafe { nil }
|
||||
cleanup_fn FNCb = unsafe { nil }
|
||||
fail_fn FNFail = unsafe { nil }
|
||||
//
|
||||
|
||||
event_fn FNEvent = unsafe { nil }
|
||||
quit_fn FNEvent = unsafe { nil }
|
||||
//
|
||||
|
||||
keydown_fn FNKeyDown = unsafe { nil }
|
||||
keyup_fn FNKeyUp = unsafe { nil }
|
||||
char_fn FNChar = unsafe { nil }
|
||||
//
|
||||
|
||||
move_fn FNMove = unsafe { nil }
|
||||
click_fn FNClick = unsafe { nil }
|
||||
unclick_fn FNUnClick = unsafe { nil }
|
||||
|
@ -284,7 +284,7 @@ pub mut:
|
|||
mouse_dy int
|
||||
scroll_x int
|
||||
scroll_y int
|
||||
//
|
||||
|
||||
key_modifiers Modifier // the current key modifiers
|
||||
key_repeat bool // whether the pressed key was an autorepeated one
|
||||
pressed_keys [key_code_max]bool // an array representing all currently pressed keys
|
||||
|
|
|
@ -18,7 +18,7 @@ pub mut:
|
|||
z f32
|
||||
color gx.Color = gx.white
|
||||
effect ImageEffect = .alpha
|
||||
//
|
||||
|
||||
rotation f32 // the amount to rotate the image in degrees, counterclockwise. Use a negative value, to rotate it clockwise.
|
||||
}
|
||||
|
||||
|
|
|
@ -236,7 +236,7 @@ pub fn (a Color) over(b Color) Color {
|
|||
aa := f32(a.a) / 255
|
||||
ab := f32(b.a) / 255
|
||||
ar := aa + ab * (1 - aa)
|
||||
//
|
||||
|
||||
rr := (f32(a.r) * aa + f32(b.r) * ab * (1 - aa)) / ar
|
||||
gr := (f32(a.g) * aa + f32(b.g) * ab * (1 - aa)) / ar
|
||||
br := (f32(a.b) * aa + f32(b.b) * ab * (1 - aa)) / ar
|
||||
|
|
|
@ -16,7 +16,7 @@ fn test_add() {
|
|||
b := gx.rgba(100, 100, 100, 100)
|
||||
r := gx.rgba(200, 200, 200, 200)
|
||||
assert (a + b) == r
|
||||
//
|
||||
|
||||
assert gx.red + gx.green == gx.yellow
|
||||
assert gx.red + gx.blue == gx.magenta
|
||||
assert gx.green + gx.blue == gx.cyan
|
||||
|
@ -27,7 +27,7 @@ fn test_sub() {
|
|||
b := gx.rgba(100, 100, 100, 100)
|
||||
r := gx.rgba(0, 0, 0, 100)
|
||||
assert (a - b) == r
|
||||
//
|
||||
|
||||
assert gx.white - gx.green == gx.magenta
|
||||
assert gx.white - gx.blue == gx.yellow
|
||||
assert gx.white - gx.red == gx.cyan
|
||||
|
|
|
@ -24,7 +24,7 @@ fn test_fnv1a_sum32() {
|
|||
assert b.hex() == ahash
|
||||
assert c.hex() == ahash
|
||||
assert d.hex() == ahash
|
||||
//
|
||||
|
||||
mut aa := Abc{}
|
||||
x := fnv1a.sum32_struct(aa)
|
||||
aa.a[3] = 5
|
||||
|
@ -51,7 +51,7 @@ fn test_fnv1a_sum64() {
|
|||
assert b.hex() == ahash
|
||||
assert c.hex() == ahash
|
||||
assert d.hex() == ahash
|
||||
//
|
||||
|
||||
mut aa := Abc{}
|
||||
x := fnv1a.sum64_struct(aa)
|
||||
aa.a[3] = 5
|
||||
|
|
|
@ -29,7 +29,7 @@ fn read_from_string(text string, capacity int) []u8 {
|
|||
text: text
|
||||
}
|
||||
mut stream := io.new_buffered_reader(reader: str, cap: capacity)
|
||||
//
|
||||
|
||||
mut buf := []u8{len: 1}
|
||||
mut res := []u8{}
|
||||
mut i := 0
|
||||
|
|
|
@ -18,9 +18,9 @@ pub:
|
|||
next &C.cJSON // next/prev allow you to walk array/object chains. Alternatively, use GetArraySize/GetArrayItem/GetObjectItem
|
||||
prev &C.cJSON
|
||||
child &C.cJSON // An array or object item will have a child pointer pointing to a chain of the items in the array/object
|
||||
//
|
||||
|
||||
@type int // The type of the item, as above
|
||||
//
|
||||
|
||||
valueint int // writing to valueint is DEPRECATED, use cJSON_SetNumberValue instead
|
||||
valuedouble f64 // The item's number, if type==cJSON_Number
|
||||
valuestring &char // The item's string, if type==cJSON_String and type == cJSON_Raw
|
||||
|
|
|
@ -127,7 +127,7 @@ fn test_merge_in_place() {
|
|||
'xyz': 'zyx'
|
||||
'aa': 'dd'
|
||||
}
|
||||
//
|
||||
|
||||
mut im1 := {
|
||||
11: 22
|
||||
33: 44
|
||||
|
@ -171,7 +171,7 @@ fn test_merge() {
|
|||
'xyz': 'zyx'
|
||||
'aa': 'dd'
|
||||
}
|
||||
//
|
||||
|
||||
mut im1 := {
|
||||
11: 22
|
||||
33: 44
|
||||
|
|
|
@ -474,7 +474,7 @@ fn test_signi() {
|
|||
assert signi(-0.000000000001) == -1
|
||||
assert signi(-0.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001) == -1
|
||||
assert signi(-0.0) == -1
|
||||
//
|
||||
|
||||
assert signi(inf(1)) == 1
|
||||
assert signi(72234878292.4586129) == 1
|
||||
assert signi(10) == 1
|
||||
|
@ -495,7 +495,7 @@ fn test_sign() {
|
|||
assert sign(-0.000000000001) == -1.0
|
||||
assert sign(-0.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001) == -1.0
|
||||
assert sign(-0.0) == -1.0
|
||||
//
|
||||
|
||||
assert sign(inf(1)) == 1.0
|
||||
assert sign(72234878292.4586129) == 1
|
||||
assert sign(10) == 1.0
|
||||
|
@ -1098,7 +1098,7 @@ fn test_count_digits() {
|
|||
assert count_digits(99) == 2
|
||||
assert count_digits(100) == 3
|
||||
assert count_digits(999) == 3
|
||||
//
|
||||
|
||||
assert count_digits(12345) == 5
|
||||
assert count_digits(123456789012345) == 15
|
||||
assert count_digits(-67345) == 5
|
||||
|
|
|
@ -82,7 +82,7 @@ fn (mut h StaticHttpHandler) handle(req http.Request) http.Response {
|
|||
res.header.add(.content_type, 'text/html; charset=utf-8')
|
||||
return res
|
||||
}
|
||||
//
|
||||
|
||||
mut body := ''
|
||||
mut content_type := 'text/html; charset=utf-8'
|
||||
if os.is_dir(requested_file_path) {
|
||||
|
|
|
@ -24,7 +24,7 @@ pub mut:
|
|||
user_ptr voidptr = unsafe { nil }
|
||||
verbose bool
|
||||
proxy &HttpProxy = unsafe { nil }
|
||||
//
|
||||
|
||||
validate bool // set this to true, if you want to stop requests, when their certificates are found to be invalid
|
||||
verify string // the path to a rootca.pem file, containing trusted CA certificate(s)
|
||||
cert string // the path to a cert.pem file, containing client certificate(s) for the request
|
||||
|
@ -37,7 +37,7 @@ pub mut:
|
|||
on_progress RequestProgressFn = unsafe { nil }
|
||||
on_progress_body RequestProgressBodyFn = unsafe { nil }
|
||||
on_finish RequestFinishFn = unsafe { nil }
|
||||
//
|
||||
|
||||
stop_copying_limit i64 = -1 // after this many bytes are received, stop copying to the response. Note that on_progress and on_progress_body callbacks, will continue to fire normally, until the full response is read, which allows you to implement streaming downloads, without keeping the whole big response in memory
|
||||
stop_receiving_limit i64 = -1 // after this many bytes are received, break out of the loop that reads the response, effectively stopping the request early. No more on_progress callbacks will be fired. The on_finish callback will fire.
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ pub mut:
|
|||
// time = -1 for no timeout
|
||||
read_timeout i64 = 30 * time.second
|
||||
write_timeout i64 = 30 * time.second
|
||||
//
|
||||
|
||||
validate bool // when true, certificate failures will stop further processing
|
||||
verify string
|
||||
cert string
|
||||
|
@ -50,7 +50,7 @@ pub mut:
|
|||
on_progress RequestProgressFn = unsafe { nil }
|
||||
on_progress_body RequestProgressBodyFn = unsafe { nil }
|
||||
on_finish RequestFinishFn = unsafe { nil }
|
||||
//
|
||||
|
||||
stop_copying_limit i64 = -1 // after this many bytes are received, stop copying to the response. Note that on_progress and on_progress_body callbacks, will continue to fire normally, until the full response is read, which allows you to implement streaming downloads, without keeping the whole big response in memory
|
||||
stop_receiving_limit i64 = -1 // after this many bytes are received, break out of the loop that reads the response, effectively stopping the request early. No more on_progress callbacks will be fired. The on_finish callback will fire.
|
||||
}
|
||||
|
|
|
@ -38,11 +38,11 @@ pub mut:
|
|||
pool_channel_slots int = 1024
|
||||
worker_num int = runtime.nr_jobs()
|
||||
listener net.TcpListener
|
||||
//
|
||||
|
||||
on_running fn (mut s Server) = unsafe { nil } // Blocking cb. If set, ran by the web server on transitions to its .running state.
|
||||
on_stopped fn (mut s Server) = unsafe { nil } // Blocking cb. If set, ran by the web server on transitions to its .stopped state.
|
||||
on_closed fn (mut s Server) = unsafe { nil } // Blocking cb. If set, ran by the web server on transitions to its .closed state.
|
||||
//
|
||||
|
||||
show_startup_message bool = true // set to false, to remove the default `Listening on ...` message.
|
||||
}
|
||||
|
||||
|
|
|
@ -136,9 +136,9 @@ fn test_server_custom_handler() {
|
|||
assert x.status_msg == 'OK'
|
||||
assert y.status() == .ok
|
||||
assert y.http_version == '1.1'
|
||||
//
|
||||
|
||||
http.fetch(url: 'http://${server.addr}/something/else')!
|
||||
//
|
||||
|
||||
big_url := 'http://${server.addr}/redirect_to_big'
|
||||
mut progress_calls := &ProgressCalls{}
|
||||
z := http.fetch(
|
||||
|
@ -173,10 +173,10 @@ fn test_server_custom_handler() {
|
|||
assert progress_calls.chunks[1].bytestr().starts_with('HTTP/1.1 200 OK')
|
||||
assert progress_calls.chunks.last().bytestr().contains('xyz def')
|
||||
assert progress_calls.redirected_to == ['http://${server.addr}/big']
|
||||
//
|
||||
|
||||
server.stop()
|
||||
t.wait()
|
||||
//
|
||||
|
||||
assert handler.counter == 5
|
||||
assert handler.oks == 3
|
||||
assert handler.not_founds == 1
|
||||
|
|
|
@ -9,16 +9,16 @@ struct Context {
|
|||
mut:
|
||||
ok_client_dials int
|
||||
fail_client_dials int
|
||||
//
|
||||
|
||||
ok_client_close int
|
||||
fail_client_close int
|
||||
////
|
||||
|
||||
ok_server_accepts int
|
||||
fail_server_accepts int
|
||||
//
|
||||
|
||||
ok_server_close int
|
||||
fail_server_close int
|
||||
//
|
||||
|
||||
received []int
|
||||
}
|
||||
|
||||
|
|
|
@ -92,7 +92,7 @@ fn test_socket_read_line() {
|
|||
message := '${message1}\n${message2}\n'
|
||||
socket.write_string(message) or { assert false }
|
||||
assert true
|
||||
//
|
||||
|
||||
line1 := reader.read_line() or {
|
||||
// println(reader.buf)
|
||||
assert false
|
||||
|
|
|
@ -21,12 +21,12 @@ fn test_that_net_and_net_unix_can_be_imported_together_without_conflicts() {
|
|||
defer {
|
||||
l.close() or {}
|
||||
}
|
||||
//
|
||||
|
||||
mut c := unix.connect_stream(test_port)!
|
||||
defer {
|
||||
c.close() or {}
|
||||
}
|
||||
//
|
||||
|
||||
data := 'Hello from vlib/net!'
|
||||
c.write_string(data)!
|
||||
mut buf := []u8{len: 100}
|
||||
|
|
|
@ -151,7 +151,7 @@ fn test_on_close_when_server_closing_connection() ! {
|
|||
res.nr_closes++
|
||||
}, test_results)
|
||||
start_server_in_thread_and_wait_till_it_is_ready_to_accept_connections(mut ws)
|
||||
//
|
||||
|
||||
mut client := websocket.new_client('ws://localhost:30003')!
|
||||
client.connect()!
|
||||
spawn client.listen()
|
||||
|
@ -164,7 +164,7 @@ fn test_on_close_when_server_closing_connection() ! {
|
|||
fn test_on_close_when_client_closing_connection() ! {
|
||||
mut ws := websocket.new_server(.ip, 30004, '')
|
||||
start_server_in_thread_and_wait_till_it_is_ready_to_accept_connections(mut ws)
|
||||
//
|
||||
|
||||
mut client := websocket.new_client('ws://localhost:30004')!
|
||||
mut test_results := WebsocketTestResults{}
|
||||
client.on_close_ref(fn (mut cli websocket.Client, code int, reason string, mut res WebsocketTestResults) ! {
|
||||
|
|
|
@ -24,7 +24,7 @@ fn test_ensure_folder_is_writable() {
|
|||
fn test_expand_tilde_to_home() {
|
||||
os.setenv('HOME', '/tmp/home/folder', true)
|
||||
os.setenv('USERPROFILE', r'\tmp\home\folder', true)
|
||||
//
|
||||
|
||||
home_test := os.join_path(os.home_dir(), 'test', 'tilde', 'expansion')
|
||||
home_expansion_test := os.expand_tilde_to_home(os.join_path('~', 'test', 'tilde',
|
||||
'expansion'))
|
||||
|
|
|
@ -30,7 +30,7 @@ fn test_set_buffer_line_buffered() {
|
|||
unsafe { buf.reset() }
|
||||
}
|
||||
wfile.close()
|
||||
//
|
||||
|
||||
content := os.read_lines('text.txt')!
|
||||
dump(content)
|
||||
assert content == ['----------------------------------', 'hello', 'world', 'hi']
|
||||
|
@ -57,7 +57,7 @@ fn test_set_buffer_fully_buffered() {
|
|||
dump(buf)
|
||||
// assert buf.bytestr().starts_with('---\nhello\nworld\nhi\n') // works on GLIBC, fails on MUSL
|
||||
assert buf.bytestr().contains('---\nhello\nworld\n')
|
||||
//
|
||||
|
||||
content := os.read_lines('text.txt')!
|
||||
dump(content)
|
||||
assert content == ['S---', 'hello', 'world', 'hi']
|
||||
|
@ -80,7 +80,7 @@ fn test_set_unbuffered() {
|
|||
wfile.close()
|
||||
// dump(buf.bytestr())
|
||||
assert buf.all(it == 0)
|
||||
//
|
||||
|
||||
content := os.read_lines('text.txt')!
|
||||
dump(content)
|
||||
assert content == ['S---', 'hello', 'world', 'hi']
|
||||
|
|
|
@ -334,7 +334,7 @@ fn test_seek() {
|
|||
|
||||
// println('> ${sizeof(Point)} ${sizeof(byte)} ${sizeof(Color)} ${sizeof(Permissions)}')
|
||||
f = os.open_file(tfile, 'r')!
|
||||
//
|
||||
|
||||
f.seek(i64(sizeof(Point)), .start)!
|
||||
assert f.tell()! == sizeof(Point)
|
||||
b := f.read_raw[u8]()!
|
||||
|
@ -343,7 +343,7 @@ fn test_seek() {
|
|||
f.seek(i64(sizeof(Color)), .current)!
|
||||
x := f.read_raw[Permissions]()!
|
||||
assert x == another_permission
|
||||
//
|
||||
|
||||
f.close()
|
||||
}
|
||||
|
||||
|
@ -411,7 +411,7 @@ fn test_open_file_wb_ab() {
|
|||
wfile.write_string('hello')!
|
||||
wfile.close()
|
||||
assert os.read_file('text.txt')! == 'hello'
|
||||
//
|
||||
|
||||
mut afile := os.open_file('text.txt', 'ab', 0o666)!
|
||||
afile.write_string('hello')!
|
||||
afile.close()
|
||||
|
@ -424,12 +424,12 @@ fn test_open_append() {
|
|||
f1.write_string('abc\n')!
|
||||
f1.close()
|
||||
assert os.read_lines(tfile)! == ['abc']
|
||||
//
|
||||
|
||||
mut f2 := os.open_append(tfile)!
|
||||
f2.write_string('abc\n')!
|
||||
f2.close()
|
||||
assert os.read_lines(tfile)! == ['abc', 'abc']
|
||||
//
|
||||
|
||||
mut f3 := os.open_append(tfile)!
|
||||
f3.write_string('def\n')!
|
||||
f3.close()
|
||||
|
|
|
@ -8,21 +8,21 @@ fn test_find_abs_path_of_executable() {
|
|||
defer {
|
||||
os.rmdir_all(tfolder) or {}
|
||||
}
|
||||
//
|
||||
|
||||
original_path := os.getenv('PATH')
|
||||
original_wdir := os.getwd()
|
||||
defer {
|
||||
os.chdir(original_wdir) or {}
|
||||
}
|
||||
//
|
||||
|
||||
new_path := tfolder + os.path_delimiter + original_path
|
||||
os.setenv('PATH', new_path, true)
|
||||
//
|
||||
|
||||
mut myclang_file := 'myclang'
|
||||
$if windows {
|
||||
myclang_file += '.bat'
|
||||
}
|
||||
//
|
||||
|
||||
os.chdir(tfolder)!
|
||||
os.write_file(myclang_file, 'echo hello')!
|
||||
os.chmod(myclang_file, 0o0777)!
|
||||
|
@ -31,13 +31,13 @@ fn test_find_abs_path_of_executable() {
|
|||
defer {
|
||||
os.rm(myclang_file) or {}
|
||||
}
|
||||
//
|
||||
|
||||
fpath := os.find_abs_path_of_executable('myclang') or {
|
||||
assert false
|
||||
return
|
||||
}
|
||||
dump(fpath)
|
||||
//
|
||||
|
||||
os.setenv('PATH', original_path, true)
|
||||
os.chdir(os.home_dir())! // change to a *completely* different folder, to avoid the original PATH containing `.`
|
||||
if x := os.find_abs_path_of_executable('myclang') {
|
||||
|
|
|
@ -983,7 +983,7 @@ fn move_across_partitions_using_function(f fn (src string, dst string, opts os.M
|
|||
os.mkdir_all(mfolder)!
|
||||
os.mkdir_all(cfolder)!
|
||||
os.mkdir_all(cdeepfolder)!
|
||||
//
|
||||
|
||||
original_path := os.join_path(pfolder, 'original.txt')
|
||||
target_path := os.join_path(cdeepfolder, 'target.txt')
|
||||
os.write_file(original_path, 'text')!
|
||||
|
|
|
@ -88,7 +88,7 @@ fn test_run() {
|
|||
p.wait()
|
||||
assert p.code == 0
|
||||
assert p.status == .exited
|
||||
//
|
||||
|
||||
eprintln('polling iterations: ${i}')
|
||||
assert i < 50
|
||||
p.close()
|
||||
|
@ -126,7 +126,7 @@ fn test_slurping_output() {
|
|||
assert output.contains('stdout, 2')
|
||||
assert output.contains('stdout, 3')
|
||||
assert output.contains('stdout, 4')
|
||||
//
|
||||
|
||||
// dump(errors)
|
||||
assert errors.contains('stderr, 1')
|
||||
assert errors.contains('stderr, 2')
|
||||
|
|
|
@ -50,10 +50,10 @@ pub mut:
|
|||
proc_info ProcessInformation
|
||||
command_line [65536]u8
|
||||
child_stdin &u32 = unsafe { nil }
|
||||
//
|
||||
|
||||
child_stdout_read &u32 = unsafe { nil }
|
||||
child_stdout_write &u32 = unsafe { nil }
|
||||
//
|
||||
|
||||
child_stderr_read &u32 = unsafe { nil }
|
||||
child_stderr_write &u32 = unsafe { nil }
|
||||
}
|
||||
|
|
|
@ -15,12 +15,12 @@ pub enum Backend {
|
|||
pub enum PixelFormat as u32 {
|
||||
_default // value 0 reserved for default-init
|
||||
@none
|
||||
//
|
||||
|
||||
r8
|
||||
r8sn
|
||||
r8ui
|
||||
r8si
|
||||
//
|
||||
|
||||
r16
|
||||
r16sn
|
||||
r16ui
|
||||
|
@ -30,7 +30,7 @@ pub enum PixelFormat as u32 {
|
|||
rg8sn
|
||||
rg8ui
|
||||
rg8si
|
||||
//
|
||||
|
||||
r32ui
|
||||
r32si
|
||||
r32f
|
||||
|
@ -48,7 +48,7 @@ pub enum PixelFormat as u32 {
|
|||
rgb10a2
|
||||
rg11b10f
|
||||
rgb9e5
|
||||
//
|
||||
|
||||
rg32ui
|
||||
rg32si
|
||||
rg32f
|
||||
|
@ -57,16 +57,14 @@ pub enum PixelFormat as u32 {
|
|||
rgba16ui
|
||||
rgba16si
|
||||
rgba16f
|
||||
//
|
||||
|
||||
rgba32ui
|
||||
rgba32si
|
||||
rgba32f
|
||||
//
|
||||
//
|
||||
//
|
||||
|
||||
depth
|
||||
depth_stencil
|
||||
//
|
||||
|
||||
bc1_rgba
|
||||
bc2_rgba
|
||||
bc3_rgba
|
||||
|
@ -94,7 +92,7 @@ pub enum PixelFormat as u32 {
|
|||
eac_rg11sn
|
||||
astc_4x4_rgba
|
||||
astc_4x4_srgba
|
||||
//
|
||||
|
||||
_num
|
||||
_force_u32 = 0x7FFFFFFF
|
||||
}
|
||||
|
|
|
@ -532,7 +532,7 @@ pub type FrameStatsWGPU = C.sg_frame_stats_wgpu
|
|||
@[typedef]
|
||||
pub struct C.sg_frame_stats {
|
||||
frame_index u32 // current frame counter, starts at 0
|
||||
//
|
||||
|
||||
num_passes u32
|
||||
num_apply_viewport u32
|
||||
num_apply_scissor_rect u32
|
||||
|
@ -543,12 +543,12 @@ pub struct C.sg_frame_stats {
|
|||
num_update_buffer u32
|
||||
num_append_buffer u32
|
||||
num_update_image u32
|
||||
//
|
||||
|
||||
size_apply_uniforms u32
|
||||
size_update_buffer u32
|
||||
size_append_buffer u32
|
||||
size_update_image u32
|
||||
//
|
||||
|
||||
gl FrameStatsGL
|
||||
d3d11 FrameStatsD3D11
|
||||
metal FrameStatsMetal
|
||||
|
|
|
@ -236,7 +236,7 @@ fn converter(mut pn PrepNumber) u64 {
|
|||
mut r2 := u32(0)
|
||||
mut r1 := u32(0)
|
||||
mut r0 := u32(0)
|
||||
//
|
||||
|
||||
mask28 := u32(u64(0xF) << 28)
|
||||
mut result := u64(0)
|
||||
// working on 3 u32 to have 96 bit precision
|
||||
|
|
|
@ -28,7 +28,7 @@ fn test_sb() {
|
|||
assert res[res.len - 1] == `\n`
|
||||
println('"${res}"')
|
||||
assert res.trim_space() == 'x = 10 y = 20'
|
||||
//
|
||||
|
||||
sb = strings.new_builder(10)
|
||||
sb.write_string('x = ${x} y = ${y}')
|
||||
assert sb.str() == 'x = 10 y = 20'
|
||||
|
|
|
@ -28,7 +28,7 @@ fn test_sb() {
|
|||
assert res[res.len - 1] == `\n`
|
||||
println('"${res}"')
|
||||
assert res.trim_space() == 'x = 10 y = 20'
|
||||
//
|
||||
|
||||
sb = strings.new_builder(10)
|
||||
sb.write_string('x = ${x} y = ${y}')
|
||||
assert sb.str() == 'x = 10 y = 20'
|
||||
|
|
|
@ -71,7 +71,7 @@ fn test_peek() {
|
|||
assert s.peek() == `a`
|
||||
assert s.peek() == `a`
|
||||
assert s.peek() == `a`
|
||||
//
|
||||
|
||||
assert s.next() == `a`
|
||||
assert s.next() == `b`
|
||||
assert s.next() == `c`
|
||||
|
@ -85,7 +85,7 @@ fn test_peek_n() {
|
|||
assert s.peek_n(2) == `c`
|
||||
assert s.peek_n(3) == -1
|
||||
assert s.peek_n(4) == -1
|
||||
//
|
||||
|
||||
assert s.next() == `a`
|
||||
assert s.next() == `b`
|
||||
assert s.next() == `c`
|
||||
|
|
|
@ -71,7 +71,7 @@ fn test_peek() {
|
|||
assert s.peek() == `a`
|
||||
assert s.peek() == `a`
|
||||
assert s.peek() == `a`
|
||||
//
|
||||
|
||||
assert s.next() == `a`
|
||||
assert s.next() == `b`
|
||||
assert s.next() == `c`
|
||||
|
@ -85,7 +85,7 @@ fn test_peek_n() {
|
|||
assert s.peek_n(2) == `c`
|
||||
assert s.peek_n(3) == -1
|
||||
assert s.peek_n(4) == -1
|
||||
//
|
||||
|
||||
assert s.next() == `a`
|
||||
assert s.next() == `b`
|
||||
assert s.next() == `c`
|
||||
|
|
|
@ -26,10 +26,10 @@ mut:
|
|||
n_iters int
|
||||
n_readers int
|
||||
n_writers int
|
||||
//
|
||||
|
||||
pops_wg &sync.WaitGroup
|
||||
pops []Event
|
||||
//
|
||||
|
||||
pushes_wg &sync.WaitGroup
|
||||
pushes []Event
|
||||
}
|
||||
|
@ -90,7 +90,6 @@ fn do_send(ch chan int, id int, mut ctx Context) {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
//
|
||||
args := os.args[1..]
|
||||
if '-h' in args || '--help' in args {
|
||||
eprintln('Usage:\n many_writers_and_receivers_on_1_channel [-readers 1] [-writers 4] [-chan_cap 100] [-iterations 25000]')
|
||||
|
@ -101,7 +100,7 @@ fn main() {
|
|||
n_writers := cmdline.option(args, '-writers', '4').int()
|
||||
chan_cap := cmdline.option(args, '-chan_cap', '100').int()
|
||||
eprintln('> n_iters, ${n_iters}, n_writers, ${n_writers}, n_readers, ${n_readers}, chan_cap, ${chan_cap}')
|
||||
//
|
||||
|
||||
ch := chan int{cap: chan_cap}
|
||||
max_number_of_pushes := n_writers * (n_iters + 2)
|
||||
max_number_of_pops := max_number_of_pushes * n_readers
|
||||
|
|
|
@ -62,19 +62,19 @@ fn test_get_cursor_position() {
|
|||
cursor_position_1 := term.get_cursor_position()!
|
||||
assert original_position.x == cursor_position_1.x
|
||||
assert original_position.y == cursor_position_1.y
|
||||
//
|
||||
|
||||
term.set_cursor_position(
|
||||
x: 10
|
||||
y: 11
|
||||
)
|
||||
cursor_position_2 := term.get_cursor_position()!
|
||||
//
|
||||
|
||||
term.set_cursor_position(
|
||||
x: 5
|
||||
y: 6
|
||||
)
|
||||
cursor_position_3 := term.get_cursor_position()!
|
||||
//
|
||||
|
||||
term.set_cursor_position(original_position)
|
||||
eprintln('original_position: ${original_position}')
|
||||
eprintln('cursor_position_2: ${cursor_position_2}')
|
||||
|
|
|
@ -4,29 +4,29 @@ fn test_duration_str() {
|
|||
assert time.Duration(1 * time.nanosecond).str() == '1ns'
|
||||
assert time.Duration(999 * time.nanosecond).str() == '999ns'
|
||||
assert time.Duration(1000 * time.nanosecond).str() == '1.000us'
|
||||
//
|
||||
|
||||
assert time.Duration(1 * time.microsecond).str() == '1.000us'
|
||||
assert time.Duration(999 * time.microsecond).str() == '999.000us'
|
||||
assert time.Duration(1000 * time.microsecond).str() == '1.000ms'
|
||||
//
|
||||
|
||||
assert time.Duration(1 * time.second).str() == '1.000s'
|
||||
assert time.Duration(999 * time.second).str() == '16:39.000'
|
||||
assert time.Duration(1000 * time.second).str() == '16:40.000'
|
||||
//
|
||||
|
||||
assert time.Duration(1 * time.minute).str() == '1:00.000'
|
||||
assert time.Duration(999 * time.minute).str() == '16:39:00'
|
||||
assert time.Duration(1000 * time.minute).str() == '16:40:00'
|
||||
//
|
||||
|
||||
assert time.Duration(1 * time.hour).str() == '1:00:00'
|
||||
assert time.Duration(999 * time.hour).str() == '999:00:00'
|
||||
assert time.Duration(1000 * time.hour).str() == '1000:00:00'
|
||||
//
|
||||
|
||||
assert time.Duration(1 * time.microsecond + 7 * time.nanosecond).str() == '1.007us'
|
||||
//
|
||||
|
||||
assert time.Duration(1 * time.second + 5 * time.nanosecond).str() == '1.000s'
|
||||
assert time.Duration(1 * time.second + 5 * time.microsecond).str() == '1.000s'
|
||||
assert time.Duration(1 * time.second + 5 * time.millisecond).str() == '1.005s'
|
||||
//
|
||||
|
||||
assert time.Duration(1 * time.hour + 5 * time.millisecond).str() == '1:00:00'
|
||||
assert time.Duration(1 * time.hour + 5 * time.second).str() == '1:00:05'
|
||||
assert time.Duration(168 * time.hour + 5 * time.minute + 7 * time.second).str() == '168:05:07'
|
||||
|
|
|
@ -80,7 +80,7 @@ pub fn parse(s string) !Time {
|
|||
hour_ := hms[0][1..]
|
||||
minute_ := hms[1]
|
||||
second_ := hms[2]
|
||||
//
|
||||
|
||||
iyear := strconv.atoi(ymd[0]) or {
|
||||
return error_invalid_time(0, 'invalid year format: ${ymd[0]}')
|
||||
}
|
||||
|
|
|
@ -159,13 +159,13 @@ fn check_invalid_date(s string) {
|
|||
fn test_invalid_dates_should_error_during_parse() {
|
||||
check_invalid_date('-99999-12-20 00:00:00')
|
||||
check_invalid_date('99999-12-20 00:00:00')
|
||||
//
|
||||
|
||||
check_invalid_date('2008-00-20 00:00:00')
|
||||
check_invalid_date('2008-25-20 00:00:00')
|
||||
//
|
||||
|
||||
check_invalid_date('2008-12-00 00:00:00')
|
||||
check_invalid_date('2008-12-32 00:00:00')
|
||||
//
|
||||
|
||||
check_invalid_date('2008-12-01 30:00:00')
|
||||
check_invalid_date('2008-12-01 00:60:00')
|
||||
check_invalid_date('2008-12-01 00:01:60')
|
||||
|
|
|
@ -8,7 +8,7 @@ fn test_stopwatch_works_as_intended() {
|
|||
// sample code that you want to measure:
|
||||
println('Hello world')
|
||||
time.sleep(1 * time.millisecond)
|
||||
//
|
||||
|
||||
println('Greeting the world took: ${sw.elapsed().nanoseconds()}ns')
|
||||
assert sw.elapsed().nanoseconds() > 0
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ pub:
|
|||
second int
|
||||
nanosecond int
|
||||
is_local bool // used to make time.now().local().local() == time.now().local()
|
||||
//
|
||||
|
||||
microsecond int @[deprecated: 'use t.nanosecond / 1000 instead'; deprecated_after: '2023-08-05']
|
||||
}
|
||||
|
||||
|
|
|
@ -237,14 +237,14 @@ fn test_add() {
|
|||
assert t2.nanosecond == t1.nanosecond + d_nanoseconds
|
||||
assert t2.unix() == t1.unix() + d_seconds
|
||||
assert t2.is_local == t1.is_local
|
||||
//
|
||||
|
||||
t3 := local_time_to_test.add(-duration)
|
||||
// dump(t3.debug())
|
||||
assert t3.second == t1.second - d_seconds
|
||||
assert t3.nanosecond == t1.nanosecond - d_nanoseconds
|
||||
assert t3.unix() == t1.unix() - d_seconds
|
||||
assert t3.is_local == t1.is_local
|
||||
//
|
||||
|
||||
t4 := local_time_to_test.as_local()
|
||||
// dump(t4.debug())
|
||||
t5 := t4.add(duration)
|
||||
|
@ -319,14 +319,14 @@ fn test_unix_time() {
|
|||
eprintln(' ut1: ${ut1}')
|
||||
eprintln(' ut2: ${ut2}')
|
||||
assert ut2 - ut1 < 2
|
||||
//
|
||||
|
||||
utm1 := t1.unix_milli()
|
||||
utm2 := t2.unix_milli()
|
||||
eprintln('utm1: ${utm1}')
|
||||
eprintln('utm2: ${utm2}')
|
||||
assert (utm1 - ut1 * 1000) < 1000
|
||||
assert (utm2 - ut2 * 1000) < 1000
|
||||
//
|
||||
|
||||
assert utm2 - utm1 > 2
|
||||
assert utm2 - utm1 < 999
|
||||
}
|
||||
|
@ -334,7 +334,7 @@ fn test_unix_time() {
|
|||
fn test_offset() {
|
||||
u := time.utc()
|
||||
n := time.now()
|
||||
//
|
||||
|
||||
mut diff_seconds := 0
|
||||
if u.day != n.day {
|
||||
if u.day > n.day {
|
||||
|
|
|
@ -50,7 +50,7 @@ fn test_at() {
|
|||
assert s.at() == `a`
|
||||
assert s.at() == `a`
|
||||
assert s.at() == `a`
|
||||
//
|
||||
|
||||
assert s.next() == `a`
|
||||
assert s.next() == `b`
|
||||
assert s.next() == `c`
|
||||
|
@ -64,7 +64,7 @@ fn test_peek() {
|
|||
assert s.peek(2) == `c`
|
||||
assert s.peek(3) == scanner.end_of_text
|
||||
assert s.peek(4) == scanner.end_of_text
|
||||
//
|
||||
|
||||
assert s.next() == `a`
|
||||
assert s.next() == `b`
|
||||
assert s.next() == `c`
|
||||
|
|
|
@ -121,7 +121,7 @@ fn test_burnt_sushi_tomltest() {
|
|||
invalid_folder := 'invalid'
|
||||
invalid_test_files := os.walk_ext('${test_root}/invalid', '.toml').map(it.replace('\\',
|
||||
'/')).sorted()
|
||||
//
|
||||
|
||||
println('\nTesting ${valid_test_files.len} valid TOML files...')
|
||||
mut valid := 0
|
||||
mut e := 0
|
||||
|
|
|
@ -22,7 +22,7 @@ fn test_keys() {
|
|||
out_file_json := os.read_file(path_by_extension('out'))!
|
||||
println(toml_json)
|
||||
assert toml_json == out_file_json
|
||||
//
|
||||
|
||||
if x := toml_doc.value_opt('unknown key') {
|
||||
assert false
|
||||
} else {
|
||||
|
|
|
@ -504,7 +504,7 @@ pub enum StructInitKind {
|
|||
pub struct Import {
|
||||
pub:
|
||||
source_name string // The original name in the source, `import abc.def` -> 'abc.def', *no matter* how the module is resolved
|
||||
//
|
||||
|
||||
mod string // the module name of the import
|
||||
alias string // the `x` in `import xxx as x`
|
||||
pos token.Pos
|
||||
|
|
|
@ -302,7 +302,7 @@ fn shorten_full_name_based_on_aliases(input string, m2a map[string]string) strin
|
|||
if replacements.len == 0 {
|
||||
return input
|
||||
}
|
||||
//
|
||||
|
||||
mut res := input
|
||||
if replacements.len > 1 {
|
||||
replacements.sort(a.weight > b.weight)
|
||||
|
|
|
@ -37,7 +37,6 @@ pub mut:
|
|||
//}
|
||||
table &ast.Table = unsafe { nil }
|
||||
ccoptions CcompilerOptions
|
||||
//
|
||||
// Note: changes in mod `builtin` force invalidation of every other .v file
|
||||
mod_invalidates_paths map[string][]string // changes in mod `os`, invalidate only .v files, that do `import os`
|
||||
mod_invalidates_mods map[string][]string // changes in mod `os`, force invalidation of mods, that do `import os`
|
||||
|
@ -141,7 +140,7 @@ pub fn (mut b Builder) middle_stages() ! {
|
|||
util.timing_start('TRANSFORM')
|
||||
b.transformer.transform_files(b.parsed_files)
|
||||
util.timing_measure('TRANSFORM')
|
||||
//
|
||||
|
||||
b.table.complete_interface_check()
|
||||
if b.pref.skip_unused {
|
||||
markused.mark_used(mut b.table, mut b.pref, b.parsed_files)
|
||||
|
@ -565,7 +564,7 @@ pub fn (mut b Builder) print_warnings_and_errors() {
|
|||
util.show_compiler_message(kind, err.CompilerMessage)
|
||||
}
|
||||
}
|
||||
//
|
||||
|
||||
if b.pref.is_verbose && b.checker.nr_errors > 1 {
|
||||
println('${b.checker.nr_errors} errors')
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ fn parallel_cc(mut b builder.Builder, header string, res string, out_str string,
|
|||
for i in 0 .. c_files {
|
||||
out_files[i].close()
|
||||
}
|
||||
//
|
||||
|
||||
sw := time.new_stopwatch()
|
||||
mut o_postfixes := ['0', 'x']
|
||||
for i in 0 .. c_files {
|
||||
|
|
|
@ -113,13 +113,13 @@ struct CcompilerOptions {
|
|||
mut:
|
||||
guessed_compiler string
|
||||
shared_postfix string // .so, .dll
|
||||
//
|
||||
|
||||
debug_mode bool
|
||||
cc CC
|
||||
//
|
||||
|
||||
env_cflags string // prepended *before* everything else
|
||||
env_ldflags string // appended *after* everything else
|
||||
//
|
||||
|
||||
args []string // ordinary C options like `-O2`
|
||||
wargs []string // for `-Wxyz` *exclusively*
|
||||
pre_args []string // options that should go before .o_args
|
||||
|
@ -132,7 +132,7 @@ mut:
|
|||
|
||||
fn (mut v Builder) setup_ccompiler_options(ccompiler string) {
|
||||
mut ccoptions := CcompilerOptions{}
|
||||
//
|
||||
|
||||
mut debug_options := ['-g']
|
||||
mut optimization_options := ['-O2']
|
||||
// arguments for the C compiler
|
||||
|
@ -263,7 +263,7 @@ fn (mut v Builder) setup_ccompiler_options(ccompiler string) {
|
|||
}
|
||||
optimization_options = ['-Ofast']
|
||||
}
|
||||
//
|
||||
|
||||
if ccoptions.debug_mode {
|
||||
ccoptions.args << debug_options
|
||||
}
|
||||
|
@ -287,7 +287,7 @@ fn (mut v Builder) setup_ccompiler_options(ccompiler string) {
|
|||
if v.pref.is_o {
|
||||
ccoptions.args << '-c'
|
||||
}
|
||||
//
|
||||
|
||||
ccoptions.shared_postfix = '.so'
|
||||
if v.pref.os == .macos {
|
||||
ccoptions.shared_postfix = '.dylib'
|
||||
|
@ -606,7 +606,7 @@ pub fn (mut v Builder) cc() {
|
|||
return
|
||||
}
|
||||
}
|
||||
//
|
||||
|
||||
vexe := pref.vexe_path()
|
||||
vdir := os.dir(vexe)
|
||||
mut tried_compilation_commands := []string{}
|
||||
|
@ -948,7 +948,7 @@ fn (mut c Builder) cc_windows_cross() {
|
|||
eprintln('See https://github.com/vlang/v/blob/master/doc/docs.md#cross-compilation for instructions on how to fix that.')
|
||||
exit(1)
|
||||
}
|
||||
//
|
||||
|
||||
c.setup_ccompiler_options(c.pref.ccompiler)
|
||||
c.build_thirdparty_obj_files()
|
||||
c.setup_output_name()
|
||||
|
@ -956,7 +956,7 @@ fn (mut c Builder) cc_windows_cross() {
|
|||
args << '${c.pref.cflags}'
|
||||
args << '-o ${os.quoted_path(c.pref.out_name)}'
|
||||
args << '-w -L.'
|
||||
//
|
||||
|
||||
cflags := c.get_os_cflags()
|
||||
// -I flags
|
||||
if c.pref.ccompiler == 'msvc' {
|
||||
|
@ -999,14 +999,14 @@ fn (mut c Builder) cc_windows_cross() {
|
|||
println(builder.current_os)
|
||||
panic('your platform is not supported yet')
|
||||
}
|
||||
//
|
||||
|
||||
mut all_args := []string{}
|
||||
all_args << '-std=gnu11'
|
||||
all_args << optimization_options
|
||||
all_args << debug_options
|
||||
//
|
||||
|
||||
all_args << args
|
||||
//
|
||||
|
||||
all_args << '-municode'
|
||||
all_args << c.ccoptions.linker_flags
|
||||
all_args << '${c.pref.ldflags}'
|
||||
|
@ -1077,7 +1077,7 @@ fn (mut v Builder) build_thirdparty_obj_file(mod string, path string, moduleflag
|
|||
// prepare for tcc, it needs relative paths to thirdparty/tcc to work:
|
||||
current_folder := os.getwd()
|
||||
os.chdir(v.pref.vroot) or {}
|
||||
//
|
||||
|
||||
mut all_options := []string{}
|
||||
all_options << v.pref.third_party_option
|
||||
all_options << moduleflags.c_options_before_target()
|
||||
|
|
|
@ -14,7 +14,7 @@ fn interp_test(expression string, expected string) ! {
|
|||
defer {
|
||||
os.rmdir_all(tmpdir) or {}
|
||||
}
|
||||
//
|
||||
|
||||
tmpfile := os.join_path(tmpdir, 'input.v')
|
||||
outfile := os.join_path(tmpdir, 'output.txt')
|
||||
os.write_file(tmpfile, interpreter_wrap(expression))!
|
||||
|
|
|
@ -258,13 +258,13 @@ pub fn (mut v Builder) cc_msvc() {
|
|||
out_name_pdb := os.real_path(v.out_name_c + '.pdb')
|
||||
out_name_cmd_line := os.real_path(v.out_name_c + '.rsp')
|
||||
mut a := []string{}
|
||||
//
|
||||
|
||||
env_cflags := os.getenv('CFLAGS')
|
||||
mut all_cflags := '${env_cflags} ${v.pref.cflags}'
|
||||
if all_cflags != ' ' {
|
||||
a << all_cflags
|
||||
}
|
||||
//
|
||||
|
||||
// Default arguments
|
||||
// `-w` no warnings
|
||||
// `/we4013` 2 unicode defines, see https://docs.microsoft.com/en-us/cpp/error-messages/compiler-warnings/compiler-warning-level-3-c4013?redirectedfrom=MSDN&view=msvc-170
|
||||
|
@ -406,7 +406,7 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(mod string, path string,
|
|||
flags := msvc_string_flags(moduleflags)
|
||||
inc_dirs := flags.inc_paths.join(' ')
|
||||
defines := flags.defines.join(' ')
|
||||
//
|
||||
|
||||
mut oargs := []string{}
|
||||
env_cflags := os.getenv('CFLAGS')
|
||||
mut all_cflags := '${env_cflags} ${v.pref.cflags}'
|
||||
|
@ -415,7 +415,7 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(mod string, path string,
|
|||
}
|
||||
oargs << '/NOLOGO'
|
||||
oargs << '/volatile:ms'
|
||||
//
|
||||
|
||||
if v.pref.is_prod {
|
||||
oargs << '/O2'
|
||||
oargs << '/MD'
|
||||
|
|
|
@ -34,7 +34,7 @@ pub fn (mut b Builder) find_invalidated_modules_by_files(all_files []string) []s
|
|||
mut new_hashes := map[string]string{}
|
||||
mut old_hashes := map[string]string{}
|
||||
mut sb_new_hashes := strings.new_builder(1024)
|
||||
//
|
||||
|
||||
mut cm := vcache.new_cache_manager(all_files)
|
||||
sold_hashes := cm.load('.hashes', 'all_files') or { ' ' }
|
||||
// eprintln(sold_hashes)
|
||||
|
|
|
@ -42,10 +42,10 @@ pub const vroot_is_deprecated_message = '@VROOT is deprecated, use @VMODROOT or
|
|||
pub struct Checker {
|
||||
pub mut:
|
||||
pref &pref.Preferences = unsafe { nil } // Preferences shared from V struct
|
||||
//
|
||||
|
||||
table &ast.Table = unsafe { nil }
|
||||
file &ast.File = unsafe { nil }
|
||||
//
|
||||
|
||||
nr_errors int
|
||||
nr_warnings int
|
||||
nr_notices int
|
||||
|
@ -57,7 +57,7 @@ pub mut:
|
|||
notice_lines map[string]bool // dedup notices
|
||||
error_details []string
|
||||
should_abort bool // when too many errors/warnings/notices are accumulated, .should_abort becomes true. It is checked in statement/expression loops, so the checker can return early, instead of wasting time.
|
||||
//
|
||||
|
||||
expected_type ast.Type
|
||||
expected_or_type ast.Type // fn() or { 'this type' } eg. string. expected or block type
|
||||
expected_expr_type ast.Type // if/match is_expr: expected_type
|
||||
|
@ -135,7 +135,7 @@ mut:
|
|||
variant_data_type ast.Type
|
||||
fn_return_type ast.Type
|
||||
orm_table_fields map[string][]ast.StructField // known table structs
|
||||
//
|
||||
|
||||
v_current_commit_hash string // same as old C.V_CURRENT_COMMIT_HASH
|
||||
assign_stmt_attr string // for `x := [1,2,3] @[freed]`
|
||||
}
|
||||
|
|
|
@ -87,7 +87,7 @@ fn test_all() {
|
|||
global_run_dir := '${checker_dir}/globals_run'
|
||||
run_dir := '${checker_dir}/run'
|
||||
skip_unused_dir := 'vlib/v/tests/skip_unused'
|
||||
//
|
||||
|
||||
checker_tests := get_tests_in_dir(checker_dir, false).filter(!it.contains('with_check_option'))
|
||||
parser_tests := get_tests_in_dir(parser_dir, false)
|
||||
scanner_tests := get_tests_in_dir(scanner_dir, false)
|
||||
|
@ -115,7 +115,7 @@ fn test_all() {
|
|||
tasks.add('', checker_with_check_option_dir, '-check', '.out', checker_with_check_option_tests,
|
||||
false)
|
||||
tasks.run()
|
||||
//
|
||||
|
||||
if os.user_os() == 'linux' {
|
||||
mut skip_unused_tasks := Tasks{
|
||||
vexe: vexe
|
||||
|
@ -128,7 +128,7 @@ fn test_all() {
|
|||
'.skip_unused.run.out', skip_unused_dir_tests, false)
|
||||
skip_unused_tasks.run()
|
||||
}
|
||||
//
|
||||
|
||||
if github_job == 'ubuntu-tcc' {
|
||||
// This is done with tcc only, because the error output is compiler specific.
|
||||
// Note: the tasks should be run serially, since they depend on
|
||||
|
|
|
@ -66,7 +66,7 @@ pub mut:
|
|||
return_values []Object
|
||||
cur_mod string
|
||||
cur_file string
|
||||
//
|
||||
|
||||
trace_file_paths []string
|
||||
trace_function_names []string
|
||||
back_trace []EvalTrace
|
||||
|
@ -97,7 +97,7 @@ pub fn (mut e Eval) run_func(func ast.FnDecl, _args ...Object) {
|
|||
e.back_trace.pop()
|
||||
}
|
||||
is_main := func.name == 'main.main'
|
||||
//
|
||||
|
||||
mut args := _args.clone()
|
||||
if !is_main && func.params.len != args.len && !func.is_variadic {
|
||||
e.error('mismatched parameter length for ${func.name}: got `${args.len}`, expected `${func.params.len}`')
|
||||
|
|
|
@ -11,7 +11,7 @@ fn test_interpret() {
|
|||
os.chdir(vroot)!
|
||||
dir := os.join_path(vroot, 'vlib/v/eval/testdata')
|
||||
files := os.ls(dir)!
|
||||
//
|
||||
|
||||
tests := files.filter(it.ends_with('.vv'))
|
||||
if tests.len == 0 {
|
||||
println('no interpreter tests found')
|
||||
|
|
|
@ -507,7 +507,7 @@ pub fn gen(files []&ast.File, mut table ast.Table, pref_ &pref.Preferences) (str
|
|||
g.definitions.writeln('int _v_type_idx_${sym.cname}() { return ${idx}; };')
|
||||
}
|
||||
}
|
||||
//
|
||||
|
||||
// v files are finished, what remains is pure C code
|
||||
g.gen_vlines_reset()
|
||||
if g.pref.build_mode != .build_module {
|
||||
|
|
|
@ -301,7 +301,7 @@ pub fn (mut g Gen) gen_c_main_for_tests() {
|
|||
g.writeln('\t_vinit(___argc, (voidptr)___argv);')
|
||||
g.writeln('\tmain__vtest_init();')
|
||||
g.gen_c_main_profile_hook()
|
||||
//
|
||||
|
||||
mut all_tfuncs := g.get_all_test_function_names()
|
||||
all_tfuncs = g.filter_only_matching_fn_names(all_tfuncs)
|
||||
g.writeln('\tstring v_test_file = ${ctoslit(g.pref.path)};')
|
||||
|
@ -350,7 +350,7 @@ pub fn (mut g Gen) gen_c_main_for_tests() {
|
|||
g.writeln('')
|
||||
g.writeln('\t_vtrunner._method_finish(_vtobj);')
|
||||
g.writeln('\tint test_exit_code = _vtrunner._method_exit_code(_vtobj);')
|
||||
//
|
||||
|
||||
g.writeln('\t_vtrunner._method__v_free(_vtobj);')
|
||||
g.writeln('')
|
||||
g.writeln('\t_vcleanup();')
|
||||
|
|
|
@ -345,7 +345,6 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
|
|||
}
|
||||
}
|
||||
|
||||
//
|
||||
if is_live_wrap {
|
||||
if is_livemain {
|
||||
g.definitions.write_string('${type_name} (* ${impl_fn_name})(')
|
||||
|
|
|
@ -84,7 +84,7 @@ fn (mut g Gen) generate_hotcode_reloading_main_caller() {
|
|||
ccompiler := '-cc ${ccpath}'
|
||||
so_debug_flag := if g.pref.is_debug { '-cg' } else { '' }
|
||||
vopts := '${ccompiler} ${so_debug_flag} -sharedlive -shared'
|
||||
//
|
||||
|
||||
g.writeln('\t\t// start background reloading thread')
|
||||
if g.pref.os == .windows {
|
||||
g.writeln('\t\tlive_fn_mutex = CreateMutexA(0, 0, 0);')
|
||||
|
|
|
@ -156,7 +156,7 @@ fn (mut g Gen) str_format(node ast.StringInterLiteral, i int, fmts []u8) (u64, s
|
|||
}
|
||||
res := get_str_intp_u32_format(fmt_type, node.fwidths[i], node.precisions[i], remove_tail_zeros,
|
||||
node.pluss[i], u8(pad_ch), base, upper_case)
|
||||
//
|
||||
|
||||
return res, fmt_type.str()
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ fn test_golang() {
|
|||
vroot := os.dir(vexe)
|
||||
dir := os.join_path(vroot, 'vlib/v/gen/golang/tests')
|
||||
files := os.ls(dir) or { panic(err) }
|
||||
//
|
||||
|
||||
wrkdir := os.join_path(os.vtmp_dir(), 'golang_tests')
|
||||
os.mkdir_all(wrkdir) or { panic(err) }
|
||||
defer {
|
||||
|
|
|
@ -1091,7 +1091,7 @@ fn (mut g Gen) println(comment string) {
|
|||
sb.write_string(hexstr)
|
||||
}
|
||||
g.debug_pos = i32(g.buf.len)
|
||||
//
|
||||
|
||||
colored := sb.str()
|
||||
plain := term.strip_ansi(colored)
|
||||
padding := ' '.repeat(mu.max(1, 40 - plain.len))
|
||||
|
|
|
@ -326,7 +326,7 @@ pub fn (mut g Gen) generate_macho_object_header() {
|
|||
g.write32(0)
|
||||
// lc_symtab
|
||||
g.sym_table_command()
|
||||
//
|
||||
|
||||
g.macho_add_loadcommand(native.lc_dysymtab, native.macho_d_size)
|
||||
g.write32(0)
|
||||
g.write32(2)
|
||||
|
|
|
@ -20,7 +20,7 @@ fn test_native() {
|
|||
vroot := os.dir(vexe)
|
||||
dir := os.join_path(vroot, 'vlib', 'v', 'gen', 'native', 'tests')
|
||||
files := os.ls(dir) or { panic(err) }
|
||||
//
|
||||
|
||||
wrkdir := os.join_path(os.vtmp_dir(), 'native_tests')
|
||||
os.mkdir_all(wrkdir) or { panic(err) }
|
||||
defer {
|
||||
|
|
|
@ -25,7 +25,7 @@ mut:
|
|||
table &ast.Table = unsafe { nil }
|
||||
eval eval.Eval
|
||||
enum_vals map[string]Enum
|
||||
//
|
||||
|
||||
mod wasm.Module
|
||||
pool serialise.Pool
|
||||
func wasm.Function
|
||||
|
|
|
@ -31,7 +31,7 @@ fn test_wasm() {
|
|||
vroot := os.dir(vexe)
|
||||
dir := os.join_path(vroot, 'vlib/v/gen/wasm/tests')
|
||||
files := os.ls(dir)!
|
||||
//
|
||||
|
||||
wrkdir := os.join_path(os.vtmp_dir(), 'wasm_tests')
|
||||
os.mkdir_all(wrkdir)!
|
||||
defer {
|
||||
|
|
|
@ -103,18 +103,18 @@ fn load_lib(mut r live.LiveReloadInfo, new_lib_path string) {
|
|||
elog(r, 'live mutex locking...')
|
||||
C.pthread_mutex_lock(r.live_fn_mutex)
|
||||
elog(r, 'live mutex locked')
|
||||
//
|
||||
|
||||
if r.cb_locked_before != unsafe { nil } {
|
||||
r.cb_locked_before(r)
|
||||
}
|
||||
//
|
||||
|
||||
protected_load_lib(mut r, new_lib_path)
|
||||
//
|
||||
|
||||
r.reloads_ok++
|
||||
if r.cb_locked_after != unsafe { nil } {
|
||||
r.cb_locked_after(r)
|
||||
}
|
||||
//
|
||||
|
||||
elog(r, 'live mutex unlocking...')
|
||||
C.pthread_mutex_unlock(r.live_fn_mutex)
|
||||
elog(r, 'live mutex unlocked')
|
||||
|
|
|
@ -175,7 +175,7 @@ fn test_live_program_can_be_compiled() {
|
|||
compile_cmd := '${os.quoted_path(vexe)} -cg -keepc -nocolor -live -o ${os.quoted_path(genexe_file)} ${os.quoted_path(main_source_file)}'
|
||||
eprintln('> compile_cmd: ${compile_cmd}')
|
||||
os.system(compile_cmd)
|
||||
//
|
||||
|
||||
cmd := '${os.quoted_path(genexe_file)} > /dev/null &'
|
||||
eprintln('Running with: ${cmd}')
|
||||
res := os.system(cmd)
|
||||
|
|
|
@ -530,7 +530,7 @@ pub fn (mut w Walker) call_expr(mut node ast.CallExpr) {
|
|||
}
|
||||
w.expr(node.left)
|
||||
w.or_block(node.or_block)
|
||||
//
|
||||
|
||||
fn_name := node.fkey()
|
||||
if w.used_fns[fn_name] {
|
||||
return
|
||||
|
|
|
@ -232,13 +232,13 @@ fn vweb_tmpl_${fn_name}() string {
|
|||
|
||||
')
|
||||
source.write_string(tmpl_str_start)
|
||||
//
|
||||
|
||||
mut state := State.simple
|
||||
template_ext := os.file_ext(template_file)
|
||||
if template_ext.to_lower() == '.html' {
|
||||
state = .html
|
||||
}
|
||||
//
|
||||
|
||||
mut in_span := false
|
||||
mut end_of_line_pos := 0
|
||||
mut start_of_line_pos := 0
|
||||
|
|
|
@ -21,7 +21,7 @@ fn (mut p Preferences) expand_lookup_paths() {
|
|||
}
|
||||
p.vlib = os.join_path(p.vroot, 'vlib')
|
||||
p.vmodules_paths = os.vmodules_paths()
|
||||
//
|
||||
|
||||
if p.lookup_path.len == 0 {
|
||||
p.lookup_path = ['@vlib', '@vmodules']
|
||||
}
|
||||
|
@ -143,7 +143,7 @@ pub fn (mut p Preferences) fill_with_defaults() {
|
|||
// No OS specified? Use current system
|
||||
p.os = if p.backend != .wasm { get_host_os() } else { .wasi }
|
||||
}
|
||||
//
|
||||
|
||||
p.try_to_use_tcc_by_default()
|
||||
if p.ccompiler == '' {
|
||||
p.default_c_compiler()
|
||||
|
|
|
@ -22,20 +22,20 @@ fn test_cflags() {
|
|||
debug_arg = '/MDd /D_DEBUG'
|
||||
optimised_arg = '/O1'
|
||||
}
|
||||
//
|
||||
|
||||
println('> test whether -cflags is passed to the backend C compiler')
|
||||
fail := custom_compile('failing.exe', 'NONSENSE_OPTION')
|
||||
assert fail.compilation.exit_code != 0
|
||||
println('> NONSENSE_OPTION failed the C build, OK')
|
||||
//
|
||||
|
||||
dbg := custom_compile('debug_hw.exe', debug_arg)
|
||||
assert dbg.compilation.exit_code == 0
|
||||
assert dbg.file_size > 0
|
||||
//
|
||||
|
||||
opt := custom_compile('optimised_hw.exe', optimised_arg)
|
||||
assert opt.compilation.exit_code == 0
|
||||
assert opt.file_size > 0
|
||||
//
|
||||
|
||||
$if !tinyc {
|
||||
// tcc does almost no optimisations, so the differences are very insignificant
|
||||
// optimised_file_size should be smaller in general, but not on the Ubuntu CI for some reason :-|
|
||||
|
|
|
@ -185,22 +185,21 @@ pub mut:
|
|||
path string // Path to file/folder to compile
|
||||
line_info string // `-line-info="file.v:28"`: for "mini VLS" (shows information about objects on provided line)
|
||||
linfo LineInfo
|
||||
//
|
||||
|
||||
run_only []string // VTEST_ONLY_FN and -run-only accept comma separated glob patterns.
|
||||
exclude []string // glob patterns for excluding .v files from the list of .v files that otherwise would have been used for a compilation, example: `-exclude @vlib/math/*.c.v`
|
||||
// Only test_ functions that match these patterns will be run. -run-only is valid only for _test.v files.
|
||||
//
|
||||
// -d vfmt and -d another=0 for `$if vfmt { will execute }` and `$if another ? { will NOT get here }`
|
||||
compile_defines []string // just ['vfmt']
|
||||
compile_defines_all []string // contains both: ['vfmt','another']
|
||||
compile_values map[string]string // the map will contain for `-d key=value`: compile_values['key'] = 'value', and for `-d ident`, it will be: compile_values['ident'] = 'true'
|
||||
//
|
||||
|
||||
run_args []string // `v run x.v 1 2 3` => `1 2 3`
|
||||
printfn_list []string // a list of generated function names, whose source should be shown, for debugging
|
||||
//
|
||||
|
||||
print_v_files bool // when true, just print the list of all parsed .v files then stop.
|
||||
print_watched_files bool // when true, just print the list of all parsed .v files + all the compiled $tmpl files, then stop. Used by `v watch run webserver.v`
|
||||
//
|
||||
|
||||
skip_running bool // when true, do no try to run the produced file (set by b.cc(), when -o x.c or -o x.js)
|
||||
skip_warnings bool // like C's "-w", forces warnings to be ignored.
|
||||
skip_notes bool // force notices to be ignored/not shown.
|
||||
|
@ -211,14 +210,14 @@ pub mut:
|
|||
reuse_tmpc bool // do not use random names for .tmp.c and .tmp.c.rsp files, and do not remove them
|
||||
no_rsp bool // when true, pass C backend options directly on the CLI (do not use `.rsp` files for them, some older C compilers do not support them)
|
||||
no_std bool // when true, do not pass -std=gnu99(linux)/-std=c99 to the C backend
|
||||
//
|
||||
|
||||
no_parallel bool // do not use threads when compiling; slower, but more portable and sometimes less buggy
|
||||
parallel_cc bool // whether to split the resulting .c file into many .c files + a common .h file, that are then compiled in parallel, then linked together.
|
||||
only_check_syntax bool // when true, just parse the files, then stop, before running checker
|
||||
check_only bool // same as only_check_syntax, but also runs the checker
|
||||
experimental bool // enable experimental features
|
||||
skip_unused bool // skip generating C code for functions, that are not used
|
||||
//
|
||||
|
||||
use_color ColorOutput // whether the warnings/errors should use ANSI color escapes.
|
||||
cleanup_files []string // list of temporary *.tmp.c and *.tmp.c.rsp files. Cleaned up on successful builds.
|
||||
build_options []string // list of options, that should be passed down to `build-module`, if needed for -usecache
|
||||
|
@ -289,11 +288,11 @@ fn run_code_in_tmp_vfile_and_exit(args []string, mut res Preferences, option_nam
|
|||
command_options := cmdline.options_after(args, [option_name])[1..].join(' ')
|
||||
vexe := vexe_path()
|
||||
tmp_cmd := '${os.quoted_path(vexe)} ${output_option} ${run_options} run ${os.quoted_path(tmp_v_file_path)} ${command_options}'
|
||||
//
|
||||
|
||||
res.vrun_elog('tmp_cmd: ${tmp_cmd}')
|
||||
tmp_result := os.system(tmp_cmd)
|
||||
res.vrun_elog('exit code: ${tmp_result}')
|
||||
//
|
||||
|
||||
if output_option != '' {
|
||||
res.vrun_elog('remove tmp exe file: ${tmp_exe_file_path}')
|
||||
os.rm(tmp_exe_file_path) or {}
|
||||
|
|
|
@ -85,7 +85,7 @@ pub fn (prefs &Preferences) should_compile_filtered_files(dir string, files_ []s
|
|||
}
|
||||
all_v_files << os.join_path(dir, file)
|
||||
}
|
||||
//
|
||||
|
||||
mut defaults := []string{}
|
||||
mut fnames_no_postfixes := map[string][]string{}
|
||||
for file in all_v_files {
|
||||
|
@ -183,18 +183,18 @@ pub fn (prefs &Preferences) should_compile_c(file string) bool {
|
|||
if prefs.os != .windows && (file.ends_with('_windows.c.v') || file.ends_with('_windows.v')) {
|
||||
return false
|
||||
}
|
||||
//
|
||||
|
||||
if prefs.os != .linux && (file.ends_with('_linux.c.v') || file.ends_with('_linux.v')) {
|
||||
return false
|
||||
}
|
||||
//
|
||||
|
||||
if prefs.os != .macos && (file.ends_with('_darwin.c.v') || file.ends_with('_darwin.v')) {
|
||||
return false
|
||||
}
|
||||
if prefs.os != .macos && (file.ends_with('_macos.c.v') || file.ends_with('_macos.v')) {
|
||||
return false
|
||||
}
|
||||
//
|
||||
|
||||
if prefs.os != .ios && (file.ends_with('_ios.c.v') || file.ends_with('_ios.v')) {
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -16,13 +16,13 @@ mut:
|
|||
fname string
|
||||
plan_tests int
|
||||
test_counter int
|
||||
//
|
||||
|
||||
file_test_info VTestFileMetaInfo
|
||||
fn_test_info VTestFnMetaInfo
|
||||
fn_assert_passes u64
|
||||
fn_passes u64
|
||||
fn_fails u64
|
||||
//
|
||||
|
||||
total_assert_passes u64
|
||||
total_assert_fails u64
|
||||
}
|
||||
|
|
|
@ -20,14 +20,13 @@ pub mut:
|
|||
use_color bool
|
||||
use_relative_paths bool
|
||||
all_assertsions []&VAssertMetaInfo
|
||||
//
|
||||
mut:
|
||||
file_test_info VTestFileMetaInfo
|
||||
fn_test_info VTestFnMetaInfo
|
||||
fn_assert_passes u64
|
||||
fn_passes u64
|
||||
fn_fails u64
|
||||
//
|
||||
|
||||
total_assert_passes u64
|
||||
total_assert_fails u64
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue