mirror of
https://github.com/vlang/v.git
synced 2025-09-13 22:42:26 +03:00
Merge branch 'vlang:master' into opt-math-big-64bit-ops
This commit is contained in:
commit
afbdc9eb7e
24 changed files with 379 additions and 258 deletions
61
thirdparty/build_scripts/thirdparty-linux-armv7_bdwgc.sh
vendored
Executable file
61
thirdparty/build_scripts/thirdparty-linux-armv7_bdwgc.sh
vendored
Executable file
|
@ -0,0 +1,61 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# NOTE: The configure step may require:
|
||||||
|
# sudo apt-get install libatomic-ops-dev
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if ! test -f vlib/v/compiler_errors_test.v; then
|
||||||
|
echo "this script should be run in V's main repo folder!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
export CURRENT_SCRIPT_PATH=$(realpath "$0")
|
||||||
|
|
||||||
|
export CC="${CC:-gcc}"
|
||||||
|
export TCC_FOLDER="${TCC_FOLDER:-thirdparty/tcc}"
|
||||||
|
export LIBGC_COMMIT="${LIBGC_COMMIT:-master}"
|
||||||
|
mkdir -p $TCC_FOLDER/lib/
|
||||||
|
|
||||||
|
echo " CC: $CC"
|
||||||
|
echo " TCC_FOLDER: $TCC_FOLDER"
|
||||||
|
echo "LIBGC_COMMIT: $LIBGC_COMMIT"
|
||||||
|
echo ===============================================================
|
||||||
|
|
||||||
|
rm -rf bdwgc/
|
||||||
|
|
||||||
|
pushd .
|
||||||
|
git clone https://github.com/ivmai/bdwgc
|
||||||
|
cd bdwgc/
|
||||||
|
|
||||||
|
git checkout $LIBGC_COMMIT
|
||||||
|
export LIBGC_COMMIT_FULL_HASH=$(git rev-parse HEAD)
|
||||||
|
|
||||||
|
./autogen.sh
|
||||||
|
|
||||||
|
CC=$CC CFLAGS='-fPIC' LDFLAGS='-fPIC' ./configure \
|
||||||
|
--disable-dependency-tracking \
|
||||||
|
--disable-docs \
|
||||||
|
--enable-handle-fork=yes \
|
||||||
|
--enable-rwlock \
|
||||||
|
--enable-threads=pthreads \
|
||||||
|
--enable-static \
|
||||||
|
--enable-shared=no \
|
||||||
|
--enable-parallel-mark \
|
||||||
|
--enable-single-obj-compilation \
|
||||||
|
--enable-gc-debug \
|
||||||
|
--with-libatomic-ops=yes \
|
||||||
|
--enable-sigrt-signals
|
||||||
|
|
||||||
|
make
|
||||||
|
|
||||||
|
popd
|
||||||
|
|
||||||
|
cp bdwgc/.libs/libgc.a $TCC_FOLDER/lib/libgc.a
|
||||||
|
|
||||||
|
date > $TCC_FOLDER/lib/libgc_build_on_date.txt
|
||||||
|
echo $LIBGC_COMMIT_FULL_HASH > $TCC_FOLDER/lib/libgc_build_source_hash.txt
|
||||||
|
uname -a > $TCC_FOLDER/lib/libgc_build_machine_uname.txt
|
||||||
|
|
||||||
|
ls -la $TCC_FOLDER/lib/libgc.a
|
||||||
|
echo "Done compiling libgc, at commit $LIBGC_COMMIT , full hash: $LIBGC_COMMIT_FULL_HASH . The static library is in $TCC_FOLDER/lib/libgc.a "
|
|
@ -1,4 +1,4 @@
|
||||||
// vtest retry: 2
|
// vtest retry: 5
|
||||||
import log
|
import log
|
||||||
import net
|
import net
|
||||||
import net.http
|
import net.http
|
||||||
|
|
|
@ -29,35 +29,20 @@ const stderr_value = 2
|
||||||
// (Must be realized in Syscall) (Must be specified)
|
// (Must be realized in Syscall) (Must be specified)
|
||||||
// ref: http://www.ccfit.nsu.ru/~deviv/courses/unix/unix/ng7c229.html
|
// ref: http://www.ccfit.nsu.ru/~deviv/courses/unix/unix/ng7c229.html
|
||||||
pub const s_ifmt = 0xF000 // type of file
|
pub const s_ifmt = 0xF000 // type of file
|
||||||
|
|
||||||
pub const s_ifdir = 0x4000 // directory
|
pub const s_ifdir = 0x4000 // directory
|
||||||
|
|
||||||
pub const s_ifreg = 0x8000 // regular file
|
pub const s_ifreg = 0x8000 // regular file
|
||||||
|
|
||||||
pub const s_iflnk = 0xa000 // link
|
pub const s_iflnk = 0xa000 // link
|
||||||
|
|
||||||
pub const s_isuid = 0o4000 // SUID
|
pub const s_isuid = 0o4000 // SUID
|
||||||
|
|
||||||
pub const s_isgid = 0o2000 // SGID
|
pub const s_isgid = 0o2000 // SGID
|
||||||
|
|
||||||
pub const s_isvtx = 0o1000 // Sticky
|
pub const s_isvtx = 0o1000 // Sticky
|
||||||
|
|
||||||
pub const s_irusr = 0o0400 // Read by owner
|
pub const s_irusr = 0o0400 // Read by owner
|
||||||
|
|
||||||
pub const s_iwusr = 0o0200 // Write by owner
|
pub const s_iwusr = 0o0200 // Write by owner
|
||||||
|
|
||||||
pub const s_ixusr = 0o0100 // Execute by owner
|
pub const s_ixusr = 0o0100 // Execute by owner
|
||||||
|
|
||||||
pub const s_irgrp = 0o0040 // Read by group
|
pub const s_irgrp = 0o0040 // Read by group
|
||||||
|
|
||||||
pub const s_iwgrp = 0o0020 // Write by group
|
pub const s_iwgrp = 0o0020 // Write by group
|
||||||
|
|
||||||
pub const s_ixgrp = 0o0010 // Execute by group
|
pub const s_ixgrp = 0o0010 // Execute by group
|
||||||
|
|
||||||
pub const s_iroth = 0o0004 // Read by others
|
pub const s_iroth = 0o0004 // Read by others
|
||||||
|
|
||||||
pub const s_iwoth = 0o0002 // Write by others
|
pub const s_iwoth = 0o0002 // Write by others
|
||||||
|
|
||||||
pub const s_ixoth = 0o0001
|
pub const s_ixoth = 0o0001
|
||||||
|
|
||||||
fn C.utime(&char, &C.utimbuf) int
|
fn C.utime(&char, &C.utimbuf) int
|
||||||
|
|
24
vlib/os/util/util.v
Normal file
24
vlib/os/util/util.v
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
module util
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
// TODO `select` doesn't work with time.Duration for some reason
|
||||||
|
pub fn execute_with_timeout(cmd string, timeout i64) ?os.Result {
|
||||||
|
ch := chan os.Result{cap: 1}
|
||||||
|
spawn fn [cmd] (c chan os.Result) {
|
||||||
|
res := os.execute(cmd)
|
||||||
|
c <- res
|
||||||
|
}(ch)
|
||||||
|
select {
|
||||||
|
a := <-ch {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
// timeout {
|
||||||
|
// 1000 * time.millisecond {
|
||||||
|
// timeout * time.millisecond {
|
||||||
|
timeout * 1_000_000 {
|
||||||
|
return none
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return os.Result{}
|
||||||
|
}
|
|
@ -14,6 +14,7 @@ import v.markused
|
||||||
import v.depgraph
|
import v.depgraph
|
||||||
import v.callgraph
|
import v.callgraph
|
||||||
import v.dotgraph
|
import v.dotgraph
|
||||||
|
// import x.json2
|
||||||
|
|
||||||
pub struct Builder {
|
pub struct Builder {
|
||||||
pub:
|
pub:
|
||||||
|
@ -475,7 +476,8 @@ pub fn (b &Builder) show_total_warns_and_errors_stats() {
|
||||||
println('checker summary: ${estring} V errors, ${wstring} V warnings, ${nstring} V notices')
|
println('checker summary: ${estring} V errors, ${wstring} V warnings, ${nstring} V notices')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if b.checker.nr_errors > 0 && b.pref.path.ends_with('.v') && os.is_file(b.pref.path) {
|
if !b.pref.is_vls && b.checker.nr_errors > 0 && b.pref.path.ends_with('.v')
|
||||||
|
&& os.is_file(b.pref.path) {
|
||||||
if b.checker.errors.any(it.message.starts_with('unknown ')) {
|
if b.checker.errors.any(it.message.starts_with('unknown ')) {
|
||||||
// Sometimes users try to `v main.v`, when they have several .v files in their project.
|
// Sometimes users try to `v main.v`, when they have several .v files in their project.
|
||||||
// Then, they encounter puzzling errors about missing or unknown types. In this case,
|
// Then, they encounter puzzling errors about missing or unknown types. In this case,
|
||||||
|
@ -519,6 +521,7 @@ pub fn (mut b Builder) print_warnings_and_errors() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mut json_errors := []util.JsonError{}
|
||||||
for file in b.parsed_files {
|
for file in b.parsed_files {
|
||||||
for err in file.errors {
|
for err in file.errors {
|
||||||
kind := if b.pref.is_verbose {
|
kind := if b.pref.is_verbose {
|
||||||
|
@ -526,9 +529,24 @@ pub fn (mut b Builder) print_warnings_and_errors() {
|
||||||
} else {
|
} else {
|
||||||
'error:'
|
'error:'
|
||||||
}
|
}
|
||||||
util.show_compiler_message(kind, err.CompilerMessage)
|
|
||||||
|
if b.pref.json_errors {
|
||||||
|
json_errors << util.JsonError{
|
||||||
|
message: err.message
|
||||||
|
path: err.file_path
|
||||||
|
line_nr: err.pos.line_nr + 1
|
||||||
|
col: err.pos.col + 1
|
||||||
|
}
|
||||||
|
// util.print_json_error(kind, err.CompilerMessage)
|
||||||
|
} else {
|
||||||
|
util.show_compiler_message(kind, err.CompilerMessage)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if b.pref.json_errors {
|
||||||
|
util.print_json_errors(json_errors)
|
||||||
|
// eprintln(json2.encode_pretty(json_errors))
|
||||||
|
}
|
||||||
|
|
||||||
if !b.pref.skip_warnings {
|
if !b.pref.skip_warnings {
|
||||||
for file in b.parsed_files {
|
for file in b.parsed_files {
|
||||||
|
|
|
@ -1881,6 +1881,10 @@ fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if unknown_field_msg == '' {
|
if unknown_field_msg == '' {
|
||||||
|
if field_name == '' && c.pref.is_vls {
|
||||||
|
// VLS will often have `foo.`, skip the no field error
|
||||||
|
return ast.void_type
|
||||||
|
}
|
||||||
unknown_field_msg = 'type `${sym.name}` has no field named `${field_name}`'
|
unknown_field_msg = 'type `${sym.name}` has no field named `${field_name}`'
|
||||||
}
|
}
|
||||||
if sym.info is ast.Struct {
|
if sym.info is ast.Struct {
|
||||||
|
|
|
@ -508,12 +508,14 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
|
||||||
unwrapped_val_type := g.unwrap_generic(val_type)
|
unwrapped_val_type := g.unwrap_generic(val_type)
|
||||||
right_sym := g.table.sym(unwrapped_val_type)
|
right_sym := g.table.sym(unwrapped_val_type)
|
||||||
unaliased_right_sym := g.table.final_sym(unwrapped_val_type)
|
unaliased_right_sym := g.table.final_sym(unwrapped_val_type)
|
||||||
is_fixed_array_var := !g.pref.translated && unaliased_right_sym.kind == .array_fixed
|
unaliased_left_sym := g.table.final_sym(g.unwrap_generic(var_type))
|
||||||
&& val !is ast.ArrayInit
|
is_fixed_array_var := unaliased_right_sym.kind == .array_fixed && val !is ast.ArrayInit
|
||||||
&& (val in [ast.Ident, ast.IndexExpr, ast.CallExpr, ast.SelectorExpr, ast.DumpExpr, ast.InfixExpr]
|
&& (val in [ast.Ident, ast.IndexExpr, ast.CallExpr, ast.SelectorExpr, ast.DumpExpr, ast.InfixExpr]
|
||||||
|| (val is ast.CastExpr && val.expr !is ast.ArrayInit)
|
|| (val is ast.CastExpr && val.expr !is ast.ArrayInit)
|
||||||
|| (val is ast.PrefixExpr && val.op == .arrow)
|
|| (val is ast.PrefixExpr && val.op == .arrow)
|
||||||
|| (val is ast.UnsafeExpr && val.expr in [ast.SelectorExpr, ast.Ident, ast.CallExpr]))
|
|| (val is ast.UnsafeExpr && val.expr in [ast.SelectorExpr, ast.Ident, ast.CallExpr]))
|
||||||
|
&& !((g.pref.translated || g.file.is_translated)
|
||||||
|
&& unaliased_left_sym.kind != .array_fixed)
|
||||||
g.is_assign_lhs = true
|
g.is_assign_lhs = true
|
||||||
g.assign_op = node.op
|
g.assign_op = node.op
|
||||||
|
|
||||||
|
@ -553,10 +555,18 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
|
||||||
}
|
}
|
||||||
g.writeln(';}')
|
g.writeln(';}')
|
||||||
}
|
}
|
||||||
} else if node.op == .assign && !g.pref.translated && (is_fixed_array_init
|
} else if node.op == .assign && (is_fixed_array_init
|
||||||
|| (unaliased_right_sym.kind == .array_fixed && val in [ast.Ident, ast.CastExpr])) {
|
|| (unaliased_right_sym.kind == .array_fixed && val in [ast.Ident, ast.CastExpr])) {
|
||||||
// Fixed arrays
|
// Fixed arrays
|
||||||
if is_fixed_array_init && var_type.has_flag(.option) {
|
if unaliased_left_sym.kind != .array_fixed && unaliased_right_sym.kind == .array_fixed
|
||||||
|
&& (g.pref.translated || g.file.is_translated) {
|
||||||
|
// translated:
|
||||||
|
// arr = [5]u8{}
|
||||||
|
// ptr = arr => ptr = &arr[0]
|
||||||
|
g.expr(left)
|
||||||
|
g.write(' = ')
|
||||||
|
g.expr(val)
|
||||||
|
} else if is_fixed_array_init && var_type.has_flag(.option) {
|
||||||
g.expr(left)
|
g.expr(left)
|
||||||
g.write(' = ')
|
g.write(' = ')
|
||||||
g.expr_with_opt(val, val_type, var_type)
|
g.expr_with_opt(val, val_type, var_type)
|
||||||
|
|
|
@ -7106,7 +7106,7 @@ fn (mut g Gen) gen_or_block_stmts(cvar_name string, cast_typ string, stmts []ast
|
||||||
g.writeln(' }, (${option_name}*)&${cvar_name}, sizeof(${cast_typ}));')
|
g.writeln(' }, (${option_name}*)&${cvar_name}, sizeof(${cast_typ}));')
|
||||||
g.indent--
|
g.indent--
|
||||||
return
|
return
|
||||||
} else {
|
} else if return_type.clear_option_and_result() != ast.void_type {
|
||||||
g.write('*(${cast_typ}*) ${cvar_name}${tmp_op}data = ')
|
g.write('*(${cast_typ}*) ${cvar_name}${tmp_op}data = ')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2707,6 +2707,8 @@ fn (mut g Gen) ref_or_deref_arg(arg ast.CallArg, expected_type ast.Type, lang as
|
||||||
}
|
}
|
||||||
if arg.expr.is_as_cast() {
|
if arg.expr.is_as_cast() {
|
||||||
g.inside_smartcast = true
|
g.inside_smartcast = true
|
||||||
|
} else if arg_typ_sym.is_int() && arg.expr !is ast.CastExpr {
|
||||||
|
g.write('(voidptr)&')
|
||||||
} else {
|
} else {
|
||||||
g.write('ADDR(${g.styp(atype)}, ')
|
g.write('ADDR(${g.styp(atype)}, ')
|
||||||
needs_closing = true
|
needs_closing = true
|
||||||
|
|
|
@ -249,9 +249,12 @@ pub fn parse_file(path string, mut table ast.Table, comments_mode scanner.Commen
|
||||||
eprintln('> ${@MOD}.${@FN} comments_mode: ${comments_mode:-20} | path: ${path}')
|
eprintln('> ${@MOD}.${@FN} comments_mode: ${comments_mode:-20} | path: ${path}')
|
||||||
}
|
}
|
||||||
mut p := Parser{
|
mut p := Parser{
|
||||||
scanner: scanner.new_scanner_file(path, comments_mode, pref_) or { panic(err) }
|
scanner: scanner.new_scanner_file(path, comments_mode, pref_) or { panic(err) }
|
||||||
table: table
|
table: table
|
||||||
pref: pref_
|
pref: pref_
|
||||||
|
// Only set vls mode if it's the file the user requested via `v -vls-mode file.v`
|
||||||
|
// Otherwise we'd be parsing entire stdlib in vls mode
|
||||||
|
is_vls: pref_.is_vls && path == pref_.path
|
||||||
scope: &ast.Scope{
|
scope: &ast.Scope{
|
||||||
start_pos: 0
|
start_pos: 0
|
||||||
parent: table.global_scope
|
parent: table.global_scope
|
||||||
|
|
|
@ -257,8 +257,9 @@ pub mut:
|
||||||
// forwards compatibility settings:
|
// forwards compatibility settings:
|
||||||
relaxed_gcc14 bool = true // turn on the generated pragmas, that make gcc versions > 14 a lot less pedantic. The default is to have those pragmas in the generated C output, so that gcc-14 can be used on Arch etc.
|
relaxed_gcc14 bool = true // turn on the generated pragmas, that make gcc versions > 14 a lot less pedantic. The default is to have those pragmas in the generated C output, so that gcc-14 can be used on Arch etc.
|
||||||
//
|
//
|
||||||
subsystem Subsystem // the type of the window app, that is going to be generated; has no effect on !windows
|
subsystem Subsystem // the type of the window app, that is going to be generated; has no effect on !windows
|
||||||
is_vls bool
|
is_vls bool
|
||||||
|
json_errors bool // -json-errors, for VLS and other tools
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LineInfo {
|
pub struct LineInfo {
|
||||||
|
@ -406,6 +407,9 @@ pub fn parse_args_and_show_errors(known_external_commands []string, args []strin
|
||||||
'-check' {
|
'-check' {
|
||||||
res.check_only = true
|
res.check_only = true
|
||||||
}
|
}
|
||||||
|
'-vls-mode' {
|
||||||
|
res.is_vls = true
|
||||||
|
}
|
||||||
'-?', '-h', '-help', '--help' {
|
'-?', '-h', '-help', '--help' {
|
||||||
// Note: help is *very important*, just respond to all variations:
|
// Note: help is *very important*, just respond to all variations:
|
||||||
res.is_help = true
|
res.is_help = true
|
||||||
|
@ -561,6 +565,9 @@ pub fn parse_args_and_show_errors(known_external_commands []string, args []strin
|
||||||
'-repl' {
|
'-repl' {
|
||||||
res.is_repl = true
|
res.is_repl = true
|
||||||
}
|
}
|
||||||
|
'-json-errors' {
|
||||||
|
res.json_errors = true
|
||||||
|
}
|
||||||
'-live' {
|
'-live' {
|
||||||
res.is_livemain = true
|
res.is_livemain = true
|
||||||
}
|
}
|
||||||
|
|
15
vlib/v/tests/concurrency/chan_try_push_int_test.v
Normal file
15
vlib/v/tests/concurrency/chan_try_push_int_test.v
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
struct Foo {
|
||||||
|
mut:
|
||||||
|
value int = 123
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_main() {
|
||||||
|
shared m := Foo{}
|
||||||
|
ch := chan int{cap: 1}
|
||||||
|
ch.try_push(rlock m {
|
||||||
|
m.value
|
||||||
|
})
|
||||||
|
mut tmp := int(0)
|
||||||
|
ch.try_pop(mut tmp)
|
||||||
|
assert tmp == 123
|
||||||
|
}
|
8
vlib/v/tests/options/option_or_expr_dump_test.v
Normal file
8
vlib/v/tests/options/option_or_expr_dump_test.v
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
fn f() ? {
|
||||||
|
println('hello')
|
||||||
|
return none
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_main() {
|
||||||
|
f() or { dump(err) }
|
||||||
|
}
|
|
@ -12,3 +12,36 @@ const ssf = [1, 2, 3]!
|
||||||
fn test_const_name_without_main_prefix() {
|
fn test_const_name_without_main_prefix() {
|
||||||
assert ssf[0] == 1
|
assert ssf[0] == 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct ASMOperand {
|
||||||
|
constraint [2]i8
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_pointer_assign_without_memcpy() {
|
||||||
|
op := ASMOperand{
|
||||||
|
constraint: [i8(5), 4]!
|
||||||
|
}
|
||||||
|
str := &i8(0)
|
||||||
|
str = op.constraint
|
||||||
|
assert !isnil(str)
|
||||||
|
|
||||||
|
ops := [3]ASMOperand{}
|
||||||
|
pop := &ASMOperand(0)
|
||||||
|
pop = ops
|
||||||
|
assert pop[1].constraint[0] == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
struct StubIndex {
|
||||||
|
pub mut:
|
||||||
|
data [5][5]map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_pointer_assign_with_memcpy() {
|
||||||
|
mut s := StubIndex{}
|
||||||
|
s.data[1] = [5]map[string]string{}
|
||||||
|
|
||||||
|
mut data := s.data[1]
|
||||||
|
data[0]['abc'] = '123'
|
||||||
|
k := data[0]['abc']
|
||||||
|
assert k == '123'
|
||||||
|
}
|
||||||
|
|
|
@ -210,3 +210,44 @@ pub fn show_compiler_message(kind string, err errors.CompilerMessage) {
|
||||||
eprintln(bold('Details: ') + color('details', err.details))
|
eprintln(bold('Details: ') + color('details', err.details))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct JsonError {
|
||||||
|
pub:
|
||||||
|
path string
|
||||||
|
message string
|
||||||
|
line_nr int
|
||||||
|
col int
|
||||||
|
len int
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_json_errors(errs []JsonError) {
|
||||||
|
// Can't import x.json2 or json, so have to manually generate json
|
||||||
|
eprintln('[')
|
||||||
|
for i, e in errs {
|
||||||
|
msg := e.message.replace('"', '\\"').replace('\n', '\\n')
|
||||||
|
eprintln('{
|
||||||
|
"path":"${e.path}",
|
||||||
|
"message":"${msg}",
|
||||||
|
"line_nr":${e.line_nr},
|
||||||
|
"col":${e.col},
|
||||||
|
"len":${e.len}
|
||||||
|
}')
|
||||||
|
if i < errs.len - 1 {
|
||||||
|
eprintln(',')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
eprintln(']')
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
pub fn print_json_error(kind string, err errors.CompilerMessage) {
|
||||||
|
e := JsonError{
|
||||||
|
message: err.message
|
||||||
|
path: err.file_path
|
||||||
|
line_nr: err.pos.line_nr + 1
|
||||||
|
col: err.pos.col + 1
|
||||||
|
len: err.pos.len
|
||||||
|
}
|
||||||
|
eprintln(json2.encode_pretty(e))
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
|
@ -1,14 +1,28 @@
|
||||||
module decoder2
|
module decoder2
|
||||||
|
|
||||||
|
// increment checks eof and increments checker by one
|
||||||
|
@[inline]
|
||||||
|
fn (mut checker Decoder) increment(message string) ! {
|
||||||
|
if checker.checker_idx + 1 == checker.json.len {
|
||||||
|
if message == '' {
|
||||||
|
return Error{}
|
||||||
|
}
|
||||||
|
checker.checker_error('EOF: ' + message)!
|
||||||
|
}
|
||||||
|
checker.checker_idx++
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip_whitespace checks eof and increments checker until next non whitespace character
|
||||||
|
@[inline]
|
||||||
|
fn (mut checker Decoder) skip_whitespace(message string) ! {
|
||||||
|
for checker.json[checker.checker_idx] in whitespace_chars {
|
||||||
|
checker.increment(message)!
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// check_json_format checks if the JSON string is valid and updates the decoder state.
|
// check_json_format checks if the JSON string is valid and updates the decoder state.
|
||||||
fn (mut checker Decoder) check_json_format() ! {
|
fn (mut checker Decoder) check_json_format() ! {
|
||||||
// skip whitespace
|
checker.skip_whitespace('empty json')!
|
||||||
for checker.json[checker.checker_idx] in whitespace_chars {
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
|
|
||||||
start_idx_position := checker.checker_idx
|
start_idx_position := checker.checker_idx
|
||||||
|
|
||||||
|
@ -18,7 +32,7 @@ fn (mut checker Decoder) check_json_format() ! {
|
||||||
`"` {
|
`"` {
|
||||||
checker.values_info.push(ValueInfo{
|
checker.values_info.push(ValueInfo{
|
||||||
position: checker.checker_idx
|
position: checker.checker_idx
|
||||||
value_kind: .string_
|
value_kind: .string
|
||||||
})
|
})
|
||||||
|
|
||||||
actual_value_info_pointer = checker.values_info.last()
|
actual_value_info_pointer = checker.values_info.last()
|
||||||
|
@ -82,54 +96,36 @@ fn (mut checker Decoder) check_json_format() ! {
|
||||||
|
|
||||||
actual_value_info_pointer.length = checker.checker_idx + 1 - start_idx_position
|
actual_value_info_pointer.length = checker.checker_idx + 1 - start_idx_position
|
||||||
|
|
||||||
if checker.checker_idx < checker.json.len {
|
checker.increment('') or { return }
|
||||||
checker.checker_idx++
|
checker.skip_whitespace('') or { return }
|
||||||
}
|
|
||||||
|
|
||||||
for checker.checker_idx < checker.json.len
|
if checker.json[checker.checker_idx] !in [`,`, `:`, `}`, `]`] {
|
||||||
&& checker.json[checker.checker_idx] !in [`,`, `:`, `}`, `]`] {
|
checker.checker_error('invalid value. Unexpected character after ${actual_value_info_pointer.value_kind} end')!
|
||||||
// get trash characters after the value
|
|
||||||
if checker.json[checker.checker_idx] !in whitespace_chars {
|
|
||||||
checker.checker_error('invalid value. Unexpected character after ${actual_value_info_pointer.value_kind} end')!
|
|
||||||
} else {
|
|
||||||
// whitespace
|
|
||||||
}
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut checker Decoder) check_string() ! {
|
fn (mut checker Decoder) check_string() ! {
|
||||||
// check if the JSON string is a valid string
|
checker.increment('string not closed')!
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('EOF error: string not closed')
|
|
||||||
}
|
|
||||||
|
|
||||||
checker.checker_idx++
|
|
||||||
|
|
||||||
// check if the JSON string is a valid escape sequence
|
// check if the JSON string is a valid escape sequence
|
||||||
for checker.json[checker.checker_idx] != `"` {
|
for checker.json[checker.checker_idx] != `"` {
|
||||||
if checker.json[checker.checker_idx] == `\\` {
|
if checker.json[checker.checker_idx] == `\\` {
|
||||||
if checker.checker_idx + 1 >= checker.json.len - 1 {
|
checker.increment('invalid escape sequence')!
|
||||||
return checker.checker_error('invalid escape sequence')
|
escaped_char := checker.json[checker.checker_idx]
|
||||||
}
|
|
||||||
escaped_char := checker.json[checker.checker_idx + 1]
|
|
||||||
match escaped_char {
|
match escaped_char {
|
||||||
`/`, `b`, `f`, `n`, `r`, `t`, `"`, `\\` {
|
`/`, `b`, `f`, `n`, `r`, `t`, `"`, `\\` {}
|
||||||
checker.checker_idx++ // make sure escaped quotation marks are skipped
|
|
||||||
}
|
|
||||||
`u` {
|
`u` {
|
||||||
// check if the JSON string is a valid unicode escape sequence
|
// check if the JSON string is a valid unicode escape sequence
|
||||||
escaped_char_last_index := checker.checker_idx + 5
|
escaped_char_last_index := checker.checker_idx + 4
|
||||||
|
|
||||||
if escaped_char_last_index < checker.json.len {
|
if escaped_char_last_index < checker.json.len {
|
||||||
// 2 bytes for the unicode escape sequence `\u`
|
// 2 bytes for the unicode escape sequence `\u`
|
||||||
checker.checker_idx += 2
|
checker.increment('invalid escape sequence')!
|
||||||
|
|
||||||
for checker.checker_idx < escaped_char_last_index {
|
for checker.checker_idx < escaped_char_last_index {
|
||||||
match checker.json[checker.checker_idx] {
|
match checker.json[checker.checker_idx] {
|
||||||
`0`...`9`, `a`...`f`, `A`...`F` {
|
`0`...`9`, `a`...`f`, `A`...`F` {
|
||||||
checker.checker_idx++
|
checker.increment('invalid unicode escape sequence')!
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return checker.checker_error('invalid unicode escape sequence')
|
return checker.checker_error('invalid unicode escape sequence')
|
||||||
|
@ -138,7 +134,7 @@ fn (mut checker Decoder) check_string() ! {
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
} else {
|
} else {
|
||||||
return checker.checker_error('short unicode escape sequence ${checker.json[checker.checker_idx..escaped_char_last_index]}')
|
return checker.checker_error('short unicode escape sequence ${checker.json[checker.checker_idx - 1..checker.json.len - 1]}')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -146,81 +142,57 @@ fn (mut checker Decoder) check_string() ! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
checker.checker_idx++
|
checker.increment('string not closed')!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut checker Decoder) check_number() ! {
|
fn (mut checker Decoder) check_number() ! {
|
||||||
// check if the JSON string is a valid float or integer
|
// check if the JSON string is a valid float or integer
|
||||||
if checker.json[checker.checker_idx] == `-` {
|
if checker.json[checker.checker_idx] == `-` {
|
||||||
checker.checker_idx++
|
checker.increment('expected digit')!
|
||||||
}
|
|
||||||
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('expected digit got EOF')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// integer part
|
// integer part
|
||||||
if checker.json[checker.checker_idx] == `0` {
|
if checker.json[checker.checker_idx] == `0` {
|
||||||
checker.checker_idx++
|
checker.increment('') or { return }
|
||||||
} else if checker.json[checker.checker_idx] >= `1` && checker.json[checker.checker_idx] <= `9` {
|
} else if checker.json[checker.checker_idx] >= `1` && checker.json[checker.checker_idx] <= `9` {
|
||||||
checker.checker_idx++
|
checker.increment('') or { return }
|
||||||
|
|
||||||
for checker.checker_idx < checker.json.len && checker.json[checker.checker_idx] >= `0`
|
for checker.json[checker.checker_idx] >= `0` && checker.json[checker.checker_idx] <= `9` {
|
||||||
&& checker.json[checker.checker_idx] <= `9` {
|
checker.increment('') or { return }
|
||||||
checker.checker_idx++
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return checker.checker_error('expected digit got ${checker.json[checker.checker_idx].ascii_str()}')
|
return checker.checker_error('expected digit got ${checker.json[checker.checker_idx].ascii_str()}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// fraction part
|
// fraction part
|
||||||
if checker.checker_idx != checker.json.len && checker.json[checker.checker_idx] == `.` {
|
if checker.json[checker.checker_idx] == `.` {
|
||||||
checker.checker_idx++
|
checker.increment('expected digit')!
|
||||||
|
|
||||||
if checker.checker_idx == checker.json.len {
|
if !(checker.json[checker.checker_idx] >= `0` && checker.json[checker.checker_idx] <= `9`) {
|
||||||
checker.checker_idx--
|
return checker.checker_error('expected digit got ${checker.json[checker.checker_idx].ascii_str()}')
|
||||||
return checker.checker_error('expected digit got EOF')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] >= `0` && checker.json[checker.checker_idx] <= `9` {
|
for checker.json[checker.checker_idx] >= `0` && checker.json[checker.checker_idx] <= `9` {
|
||||||
for checker.checker_idx < checker.json.len && checker.json[checker.checker_idx] >= `0`
|
checker.increment('') or { return }
|
||||||
&& checker.json[checker.checker_idx] <= `9` {
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return checker.checker_error('expected digit got ${checker.json[checker.checker_idx].ascii_str()}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// exponent part
|
// exponent part
|
||||||
if checker.checker_idx != checker.json.len
|
if checker.json[checker.checker_idx] == `e` || checker.json[checker.checker_idx] == `E` {
|
||||||
&& (checker.json[checker.checker_idx] == `e` || checker.json[checker.checker_idx] == `E`) {
|
checker.increment('expected digit')!
|
||||||
checker.checker_idx++
|
|
||||||
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('expected digit got EOF')
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] == `-` || checker.json[checker.checker_idx] == `+` {
|
if checker.json[checker.checker_idx] == `-` || checker.json[checker.checker_idx] == `+` {
|
||||||
checker.checker_idx++
|
checker.increment('expected digit')!
|
||||||
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('expected digit got EOF')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] >= `0` && checker.json[checker.checker_idx] <= `9` {
|
if !(checker.json[checker.checker_idx] >= `0` && checker.json[checker.checker_idx] <= `9`) {
|
||||||
for checker.checker_idx < checker.json.len && checker.json[checker.checker_idx] >= `0`
|
|
||||||
&& checker.json[checker.checker_idx] <= `9` {
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return checker.checker_error('expected digit got ${checker.json[checker.checker_idx].ascii_str()}')
|
return checker.checker_error('expected digit got ${checker.json[checker.checker_idx].ascii_str()}')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for checker.json[checker.checker_idx] >= `0` && checker.json[checker.checker_idx] <= `9` {
|
||||||
|
checker.increment('') or { return }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
checker.checker_idx--
|
checker.checker_idx--
|
||||||
|
@ -284,160 +256,58 @@ fn (mut checker Decoder) check_null() ! {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut checker Decoder) check_array() ! {
|
fn (mut checker Decoder) check_array() ! {
|
||||||
// check if the JSON string is an empty array
|
checker.increment('expected array end')!
|
||||||
if checker.json.len >= checker.checker_idx + 2 {
|
|
||||||
checker.checker_idx++
|
checker.skip_whitespace('expected array end')!
|
||||||
} else {
|
|
||||||
return checker.checker_error('EOF error: There are not enough length for an array')
|
|
||||||
}
|
|
||||||
|
|
||||||
for checker.json[checker.checker_idx] != `]` {
|
for checker.json[checker.checker_idx] != `]` {
|
||||||
// skip whitespace
|
|
||||||
for checker.json[checker.checker_idx] in whitespace_chars {
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
break
|
|
||||||
}
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] == `]` {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('EOF error: array not closed')
|
|
||||||
}
|
|
||||||
|
|
||||||
checker.check_json_format()!
|
checker.check_json_format()!
|
||||||
|
|
||||||
// whitespace
|
checker.skip_whitespace('expected array end')!
|
||||||
for checker.json[checker.checker_idx] in whitespace_chars {
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
if checker.json[checker.checker_idx] == `]` {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('EOF error: braces are not closed')
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] == `,` {
|
if checker.json[checker.checker_idx] == `,` {
|
||||||
checker.checker_idx++
|
checker.increment('expected array value')!
|
||||||
for checker.json[checker.checker_idx] in whitespace_chars {
|
checker.skip_whitespace('') or {}
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
if checker.json[checker.checker_idx] == `]` {
|
if checker.json[checker.checker_idx] == `]` {
|
||||||
return checker.checker_error('Cannot use `,`, before `]`')
|
return checker.checker_error('Cannot use `,`, before `]`')
|
||||||
}
|
}
|
||||||
continue
|
|
||||||
} else {
|
|
||||||
if checker.json[checker.checker_idx] == `]` {
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
return checker.checker_error('`]` after value')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut checker Decoder) check_object() ! {
|
fn (mut checker Decoder) check_object() ! {
|
||||||
if checker.json.len - checker.checker_idx < 2 {
|
checker.increment('expected object end')!
|
||||||
return checker.checker_error('EOF error: expecting a complete object after `{`')
|
|
||||||
}
|
checker.skip_whitespace('expected object end')!
|
||||||
checker.checker_idx++
|
|
||||||
for checker.json[checker.checker_idx] != `}` {
|
for checker.json[checker.checker_idx] != `}` {
|
||||||
// skip whitespace
|
|
||||||
for checker.json[checker.checker_idx] in whitespace_chars {
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
break
|
|
||||||
}
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] == `}` {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] != `"` {
|
if checker.json[checker.checker_idx] != `"` {
|
||||||
return checker.checker_error('Expecting object key')
|
checker.checker_error('Expecting object key')!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Object key
|
|
||||||
checker.check_json_format()!
|
checker.check_json_format()!
|
||||||
|
|
||||||
for checker.json[checker.checker_idx] != `:` {
|
checker.skip_whitespace('expected `:`')!
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('EOF error: key colon not found')
|
|
||||||
}
|
|
||||||
if checker.json[checker.checker_idx] !in whitespace_chars {
|
|
||||||
return checker.checker_error('invalid value after object key')
|
|
||||||
}
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] != `:` {
|
if checker.json[checker.checker_idx] != `:` {
|
||||||
return checker.checker_error('Expecting `:` after object key')
|
checker.checker_error('expected `:`, got `${checker.json[checker.checker_idx].ascii_str()}`')!
|
||||||
}
|
}
|
||||||
|
|
||||||
// skip `:`
|
checker.increment('expected object value')!
|
||||||
checker.checker_idx++
|
|
||||||
|
|
||||||
// skip whitespace
|
checker.skip_whitespace('expected object value')!
|
||||||
for checker.json[checker.checker_idx] in whitespace_chars {
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
|
|
||||||
match checker.json[checker.checker_idx] {
|
checker.check_json_format()!
|
||||||
`"`, `[`, `{`, `0`...`9`, `-`, `n`, `t`, `f` {
|
|
||||||
checker.check_json_format()!
|
|
||||||
|
|
||||||
if checker.checker_idx == checker.json.len {
|
checker.skip_whitespace('expected object end')!
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('EOF error: braces are not closed')
|
|
||||||
}
|
|
||||||
|
|
||||||
// whitespace
|
if checker.json[checker.checker_idx] == `,` {
|
||||||
for checker.json[checker.checker_idx] in whitespace_chars {
|
checker.increment('expected object key')!
|
||||||
checker.checker_idx++
|
checker.skip_whitespace('') or {}
|
||||||
}
|
|
||||||
if checker.json[checker.checker_idx] == `}` {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.checker_idx == checker.json.len {
|
if checker.json[checker.checker_idx] == `}` {
|
||||||
checker.checker_idx--
|
return checker.checker_error('Cannot use `,`, before `}`')
|
||||||
return checker.checker_error('EOF error: braces are not closed')
|
|
||||||
}
|
|
||||||
|
|
||||||
if checker.json[checker.checker_idx] == `,` {
|
|
||||||
checker.checker_idx++
|
|
||||||
|
|
||||||
if checker.checker_idx == checker.json.len {
|
|
||||||
checker.checker_idx--
|
|
||||||
return checker.checker_error('EOF error: Expecting object key after `,`')
|
|
||||||
}
|
|
||||||
|
|
||||||
for checker.json[checker.checker_idx] in whitespace_chars {
|
|
||||||
checker.checker_idx++
|
|
||||||
}
|
|
||||||
if checker.json[checker.checker_idx] != `"` {
|
|
||||||
return checker.checker_error('Expecting object key after `,`')
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if checker.json[checker.checker_idx] == `}` {
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
return checker.checker_error('invalid object value')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return checker.checker_error('invalid object value')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,7 +126,7 @@ fn (list &LinkedList[T]) free() {
|
||||||
enum ValueKind {
|
enum ValueKind {
|
||||||
array
|
array
|
||||||
object
|
object
|
||||||
string_
|
string
|
||||||
number
|
number
|
||||||
boolean
|
boolean
|
||||||
null
|
null
|
||||||
|
@ -316,7 +316,7 @@ fn (mut decoder Decoder) decode_value[T](mut val T) ! {
|
||||||
$if val is StringDecoder {
|
$if val is StringDecoder {
|
||||||
struct_info := decoder.current_node.value
|
struct_info := decoder.current_node.value
|
||||||
|
|
||||||
if struct_info.value_kind == .string_ {
|
if struct_info.value_kind == .string {
|
||||||
val.from_json_string(decoder.json[struct_info.position + 1..struct_info.position +
|
val.from_json_string(decoder.json[struct_info.position + 1..struct_info.position +
|
||||||
struct_info.length - 1]) or {
|
struct_info.length - 1]) or {
|
||||||
decoder.decode_error('${typeof(*val).name}: ${err.msg()}')!
|
decoder.decode_error('${typeof(*val).name}: ${err.msg()}')!
|
||||||
|
@ -370,7 +370,7 @@ fn (mut decoder Decoder) decode_value[T](mut val T) ! {
|
||||||
$if T.unaliased_typ is string {
|
$if T.unaliased_typ is string {
|
||||||
string_info := decoder.current_node.value
|
string_info := decoder.current_node.value
|
||||||
|
|
||||||
if string_info.value_kind == .string_ {
|
if string_info.value_kind == .string {
|
||||||
mut string_buffer := []u8{cap: string_info.length} // might be too long but most json strings don't contain many escape characters anyways
|
mut string_buffer := []u8{cap: string_info.length} // might be too long but most json strings don't contain many escape characters anyways
|
||||||
|
|
||||||
mut buffer_index := 1
|
mut buffer_index := 1
|
||||||
|
@ -542,7 +542,7 @@ fn (mut decoder Decoder) decode_value[T](mut val T) ! {
|
||||||
current_field_info = current_field_info.next
|
current_field_info = current_field_info.next
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
.string_ {
|
.string {
|
||||||
if decoder.current_node.next.value.length == 2 {
|
if decoder.current_node.next.value.length == 2 {
|
||||||
current_field_info = current_field_info.next
|
current_field_info = current_field_info.next
|
||||||
continue
|
continue
|
||||||
|
@ -705,7 +705,7 @@ fn (mut decoder Decoder) decode_value[T](mut val T) ! {
|
||||||
|
|
||||||
if value_info.value_kind == .number {
|
if value_info.value_kind == .number {
|
||||||
unsafe { decoder.decode_number(&val)! }
|
unsafe { decoder.decode_number(&val)! }
|
||||||
} else if value_info.value_kind == .string_ {
|
} else if value_info.value_kind == .string {
|
||||||
// recheck if string contains number
|
// recheck if string contains number
|
||||||
decoder.checker_idx = value_info.position + 1
|
decoder.checker_idx = value_info.position + 1
|
||||||
decoder.check_number()!
|
decoder.check_number()!
|
||||||
|
|
|
@ -42,7 +42,7 @@ fn (mut decoder Decoder) check_element_type_valid[T](element T, current_node &No
|
||||||
}
|
}
|
||||||
|
|
||||||
match current_node.value.value_kind {
|
match current_node.value.value_kind {
|
||||||
.string_ {
|
.string {
|
||||||
$if element is string {
|
$if element is string {
|
||||||
return true
|
return true
|
||||||
} $else $if element is time.Time {
|
} $else $if element is time.Time {
|
||||||
|
@ -220,7 +220,7 @@ fn (mut decoder Decoder) init_sumtype_by_value_kind[T](mut val T, value_info Val
|
||||||
mut failed_struct := false
|
mut failed_struct := false
|
||||||
|
|
||||||
match value_info.value_kind {
|
match value_info.value_kind {
|
||||||
.string_ {
|
.string {
|
||||||
$for v in val.variants {
|
$for v in val.variants {
|
||||||
$if v.typ is string {
|
$if v.typ is string {
|
||||||
val = T(v)
|
val = T(v)
|
||||||
|
|
|
@ -20,7 +20,7 @@ fn test_check_if_json_match() {
|
||||||
if err is json.JsonDecodeError {
|
if err is json.JsonDecodeError {
|
||||||
assert err.line == 1
|
assert err.line == 1
|
||||||
assert err.character == 1
|
assert err.character == 1
|
||||||
assert err.message == 'Data: Expected object, but got string_'
|
assert err.message == 'Data: Expected object, but got string'
|
||||||
}
|
}
|
||||||
has_error = true
|
has_error = true
|
||||||
}
|
}
|
||||||
|
@ -115,20 +115,60 @@ fn test_check_json_format() {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'json': '{"key": 123'
|
'json': '{"key": 123'
|
||||||
'error': 'Syntax: EOF error: braces are not closed'
|
'error': 'Syntax: Expecting object key' // improve message
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'json': '{"key": 123,'
|
'json': '{"key": 123,'
|
||||||
'error': 'Syntax: EOF error: Expecting object key after `,`'
|
'error': 'Syntax: EOF: expected object key'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'json': '{"key": 123, "key2": 456,}'
|
'json': '{"key": 123, "key2": 456,}'
|
||||||
'error': 'Syntax: Expecting object key after `,`'
|
'error': 'Syntax: Cannot use `,`, before `}`'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'json': '[[1, 2, 3], [4, 5, 6],]'
|
'json': '[[1, 2, 3], [4, 5, 6],]'
|
||||||
'error': 'Syntax: Cannot use `,`, before `]`'
|
'error': 'Syntax: Cannot use `,`, before `]`'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'json': ' '
|
||||||
|
'error': 'Syntax: EOF: empty json'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '"'
|
||||||
|
'error': 'Syntax: EOF: string not closed'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '"not closed'
|
||||||
|
'error': 'Syntax: EOF: string not closed'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '"\\"'
|
||||||
|
'error': 'Syntax: EOF: string not closed'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '"\\u8"'
|
||||||
|
'error': 'Syntax: short unicode escape sequence \\u8'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '['
|
||||||
|
'error': 'Syntax: EOF: expected array end'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '[ '
|
||||||
|
'error': 'Syntax: EOF: expected array end'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '{'
|
||||||
|
'error': 'Syntax: EOF: expected object end'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '{ '
|
||||||
|
'error': 'Syntax: EOF: expected object end'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'json': '{"key": "value" '
|
||||||
|
'error': 'Syntax: EOF: expected object end'
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
for json_and_error in json_and_error_message {
|
for json_and_error in json_and_error_message {
|
||||||
|
|
|
@ -36,7 +36,7 @@ fn test_json_string_invalid_escapes() {
|
||||||
json.decode[string](r'"\x"') or {
|
json.decode[string](r'"\x"') or {
|
||||||
if err is json.JsonDecodeError {
|
if err is json.JsonDecodeError {
|
||||||
assert err.line == 1
|
assert err.line == 1
|
||||||
assert err.character == 2
|
assert err.character == 3
|
||||||
assert err.message == 'Syntax: unknown escape sequence'
|
assert err.message == 'Syntax: unknown escape sequence'
|
||||||
}
|
}
|
||||||
has_error = true
|
has_error = true
|
||||||
|
@ -48,7 +48,7 @@ fn test_json_string_invalid_escapes() {
|
||||||
json.decode[string](r'"\u123"') or {
|
json.decode[string](r'"\u123"') or {
|
||||||
if err is json.JsonDecodeError {
|
if err is json.JsonDecodeError {
|
||||||
assert err.line == 1
|
assert err.line == 1
|
||||||
assert err.character == 2
|
assert err.character == 3
|
||||||
assert err.message == 'Syntax: short unicode escape sequence \\u123'
|
assert err.message == 'Syntax: short unicode escape sequence \\u123'
|
||||||
}
|
}
|
||||||
has_error = true
|
has_error = true
|
||||||
|
|
|
@ -60,7 +60,7 @@ fn test_raw_decode_map_invalid() {
|
||||||
if err is json.JsonDecodeError {
|
if err is json.JsonDecodeError {
|
||||||
assert err.line == 1
|
assert err.line == 1
|
||||||
assert err.character == 8
|
assert err.character == 8
|
||||||
assert err.message == 'Syntax: invalid value after object key'
|
assert err.message == 'Syntax: expected `:`, got `,`'
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
|
@ -58,7 +58,7 @@ fn test_decode_error_message_should_have_enough_context_just_brace() {
|
||||||
if err is json.JsonDecodeError {
|
if err is json.JsonDecodeError {
|
||||||
assert err.line == 1
|
assert err.line == 1
|
||||||
assert err.character == 1
|
assert err.character == 1
|
||||||
assert err.message == 'Syntax: EOF error: expecting a complete object after `{`'
|
assert err.message == 'Syntax: EOF: expected object end'
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -76,7 +76,7 @@ fn test_decode_error_message_should_have_enough_context_trailing_comma_at_end()
|
||||||
if err is json.JsonDecodeError {
|
if err is json.JsonDecodeError {
|
||||||
assert err.line == 5
|
assert err.line == 5
|
||||||
assert err.character == 1
|
assert err.character == 1
|
||||||
assert err.message == 'Syntax: Expecting object key after `,`'
|
assert err.message == 'Syntax: Cannot use `,`, before `}`'
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
|
@ -90,7 +90,7 @@ fn test_decode_error_message_should_have_enough_context_in_the_middle() {
|
||||||
if err is json.JsonDecodeError {
|
if err is json.JsonDecodeError {
|
||||||
assert err.line == 1
|
assert err.line == 1
|
||||||
assert err.character == 40
|
assert err.character == 40
|
||||||
assert err.message == 'Syntax: invalid value. Unexpected character after string_ end'
|
assert err.message == 'Syntax: invalid value. Unexpected character after string end'
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,10 +2,10 @@ import x.json2 as json
|
||||||
import time
|
import time
|
||||||
|
|
||||||
const fixed_time = time.new(
|
const fixed_time = time.new(
|
||||||
year: 2022
|
year: 2022
|
||||||
month: 3
|
month: 3
|
||||||
day: 11
|
day: 11
|
||||||
hour: 13
|
hour: 13
|
||||||
minute: 54
|
minute: 54
|
||||||
second: 25
|
second: 25
|
||||||
)
|
)
|
||||||
|
|
|
@ -44,7 +44,7 @@ enum ValueKind {
|
||||||
unknown
|
unknown
|
||||||
array
|
array
|
||||||
object
|
object
|
||||||
string_
|
string
|
||||||
number
|
number
|
||||||
boolean
|
boolean
|
||||||
null
|
null
|
||||||
|
@ -56,7 +56,7 @@ fn (k ValueKind) str() string {
|
||||||
.unknown { 'unknown' }
|
.unknown { 'unknown' }
|
||||||
.array { 'array' }
|
.array { 'array' }
|
||||||
.object { 'object' }
|
.object { 'object' }
|
||||||
.string_ { 'string' }
|
.string { 'string' }
|
||||||
.number { 'number' }
|
.number { 'number' }
|
||||||
.boolean { 'boolean' }
|
.boolean { 'boolean' }
|
||||||
.null { 'null' }
|
.null { 'null' }
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue