mirror of
https://github.com/vlang/v.git
synced 2025-09-13 14:32:26 +03:00
Compare commits
4 commits
24f91280d9
...
7c780ed8fa
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7c780ed8fa | ||
![]() |
9fb8aae2d7 | ||
![]() |
cb2756e39c | ||
![]() |
2ac3478296 |
24 changed files with 270 additions and 225 deletions
|
@ -1018,8 +1018,12 @@ pub mut:
|
||||||
stmts []Stmt // all the statements in the source file
|
stmts []Stmt // all the statements in the source file
|
||||||
imports []Import // all the imports
|
imports []Import // all the imports
|
||||||
auto_imports []string // imports that were implicitly added
|
auto_imports []string // imports that were implicitly added
|
||||||
|
used_imports []string
|
||||||
|
implied_imports []string // imports that the user's code uses but omitted to import explicitly, used by `vfmt`
|
||||||
embedded_files []EmbeddedFile // list of files to embed in the binary
|
embedded_files []EmbeddedFile // list of files to embed in the binary
|
||||||
imported_symbols map[string]string // used for `import {symbol}`, it maps symbol => module.symbol
|
imported_symbols map[string]string // used for `import {symbol}`, it maps symbol => module.symbol
|
||||||
|
imported_symbols_trie token.KeywordsMatcherTrie // constructed from imported_symbols, to accelerate presense checks
|
||||||
|
imported_symbols_used map[string]bool
|
||||||
errors []errors.Error // all the checker errors in the file
|
errors []errors.Error // all the checker errors in the file
|
||||||
warnings []errors.Warning // all the checker warnings in the file
|
warnings []errors.Warning // all the checker warnings in the file
|
||||||
notices []errors.Notice // all the checker notices in the file
|
notices []errors.Notice // all the checker notices in the file
|
||||||
|
|
|
@ -4261,7 +4261,7 @@ fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
|
||||||
}
|
}
|
||||||
mut name := node.name
|
mut name := node.name
|
||||||
// check for imported symbol
|
// check for imported symbol
|
||||||
if name in c.file.imported_symbols {
|
if c.file.imported_symbols_trie.matches(name) {
|
||||||
name = c.file.imported_symbols[name]
|
name = c.file.imported_symbols[name]
|
||||||
}
|
}
|
||||||
// prepend mod to look for fn call or const
|
// prepend mod to look for fn call or const
|
||||||
|
|
|
@ -254,6 +254,9 @@ fn (mut c Checker) for_in_stmt(mut node ast.ForInStmt) {
|
||||||
}
|
}
|
||||||
if node.val_is_mut {
|
if node.val_is_mut {
|
||||||
value_type = value_type.ref()
|
value_type = value_type.ref()
|
||||||
|
if value_type.has_flag(.option) {
|
||||||
|
value_type = value_type.set_flag(.option_mut_param_t)
|
||||||
|
}
|
||||||
match mut node.cond {
|
match mut node.cond {
|
||||||
ast.Ident {
|
ast.Ident {
|
||||||
if mut node.cond.obj is ast.Var {
|
if mut node.cond.obj is ast.Var {
|
||||||
|
|
|
@ -117,7 +117,7 @@ pub fn (mut f Fmt) import_comments(comments []ast.Comment, options CommentsOptio
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if options.same_line {
|
if options.same_line {
|
||||||
f.remove_new_line(imports_buffer: true)
|
f.remove_new_line()
|
||||||
}
|
}
|
||||||
for c in comments {
|
for c in comments {
|
||||||
ctext := c.text.trim_left('\x01')
|
ctext := c.text.trim_left('\x01')
|
||||||
|
@ -129,7 +129,7 @@ pub fn (mut f Fmt) import_comments(comments []ast.Comment, options CommentsOptio
|
||||||
out_s += ' '
|
out_s += ' '
|
||||||
}
|
}
|
||||||
out_s += ctext
|
out_s += ctext
|
||||||
f.out_imports.writeln(out_s)
|
f.writeln(out_s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
173
vlib/v/fmt/fmt.v
173
vlib/v/fmt/fmt.v
|
@ -22,7 +22,6 @@ pub mut:
|
||||||
table &ast.Table = unsafe { nil }
|
table &ast.Table = unsafe { nil }
|
||||||
is_debug bool
|
is_debug bool
|
||||||
out strings.Builder
|
out strings.Builder
|
||||||
out_imports strings.Builder
|
|
||||||
indent int
|
indent int
|
||||||
empty_line bool
|
empty_line bool
|
||||||
line_len int // the current line length, Note: it counts \t as 4 spaces, and starts at 0 after f.writeln
|
line_len int // the current line length, Note: it counts \t as 4 spaces, and starts at 0 after f.writeln
|
||||||
|
@ -32,13 +31,12 @@ pub mut:
|
||||||
array_init_depth int // current level of hierarchy in array init
|
array_init_depth int // current level of hierarchy in array init
|
||||||
single_line_if bool
|
single_line_if bool
|
||||||
cur_mod string
|
cur_mod string
|
||||||
did_imports bool
|
import_pos int // position of the last import in the resulting string
|
||||||
import_pos int // position of the imports in the resulting string
|
|
||||||
auto_imports map[string]bool // potentially hidden imports(`sync` when using channels) and preludes(when embedding files)
|
|
||||||
used_imports map[string]bool // to remove unused imports
|
|
||||||
import_syms_used map[string]bool // to remove unused import symbols
|
|
||||||
mod2alias map[string]string // for `import time as t`, will contain: 'time'=>'t'
|
mod2alias map[string]string // for `import time as t`, will contain: 'time'=>'t'
|
||||||
mod2syms map[string]string // import time { now } 'time.now'=>'now'
|
mod2syms map[string]string // import time { now } 'time.now'=>'now'
|
||||||
|
implied_import_str string // imports that the user's code uses but omitted to import explicitly
|
||||||
|
processed_imports []string
|
||||||
|
has_import_stmt bool
|
||||||
use_short_fn_args bool
|
use_short_fn_args bool
|
||||||
single_line_fields bool // should struct fields be on a single line
|
single_line_fields bool // should struct fields be on a single line
|
||||||
in_lambda_depth int
|
in_lambda_depth int
|
||||||
|
@ -68,7 +66,6 @@ pub fn fmt(file ast.File, mut table ast.Table, pref_ &pref.Preferences, is_debug
|
||||||
pref: pref_
|
pref: pref_
|
||||||
is_debug: is_debug
|
is_debug: is_debug
|
||||||
out: strings.new_builder(1000)
|
out: strings.new_builder(1000)
|
||||||
out_imports: strings.new_builder(200)
|
|
||||||
}
|
}
|
||||||
f.source_text = options.source_text
|
f.source_text = options.source_text
|
||||||
f.process_file_imports(file)
|
f.process_file_imports(file)
|
||||||
|
@ -76,16 +73,15 @@ pub fn fmt(file ast.File, mut table ast.Table, pref_ &pref.Preferences, is_debug
|
||||||
f.indent--
|
f.indent--
|
||||||
f.stmts(file.stmts)
|
f.stmts(file.stmts)
|
||||||
f.indent++
|
f.indent++
|
||||||
// Format after file import symbols are processed.
|
|
||||||
f.imports(f.file.imports)
|
|
||||||
res := f.out.str().trim_space() + '\n'
|
res := f.out.str().trim_space() + '\n'
|
||||||
|
|
||||||
|
// `implied_imports` should append to end of `import` block
|
||||||
if res.len == 1 {
|
if res.len == 1 {
|
||||||
return f.out_imports.str().trim_space() + '\n'
|
return f.implied_import_str + '\n'
|
||||||
}
|
}
|
||||||
if res.len <= f.import_pos {
|
if res.len <= f.import_pos {
|
||||||
imp_str := f.out_imports.str().trim_space()
|
if f.implied_import_str.len > 0 {
|
||||||
if imp_str.len > 0 {
|
return res + '\n' + f.implied_import_str + '\n'
|
||||||
return res + '\n' + imp_str + '\n'
|
|
||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
@ -98,7 +94,11 @@ pub fn fmt(file ast.File, mut table ast.Table, pref_ &pref.Preferences, is_debug
|
||||||
import_start_pos = stmt.pos.len
|
import_start_pos = stmt.pos.len
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return res[..import_start_pos] + f.out_imports.str() + res[import_start_pos..]
|
if f.has_import_stmt || f.implied_import_str.len == 0 {
|
||||||
|
return res[..import_start_pos] + f.implied_import_str + res[import_start_pos..]
|
||||||
|
} else {
|
||||||
|
return res[..import_start_pos] + f.implied_import_str + '\n' + res[import_start_pos..]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -121,6 +121,12 @@ pub fn (f &Fmt) type_to_str(typ ast.Type) string {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
pub fn (mut f Fmt) process_file_imports(file &ast.File) {
|
pub fn (mut f Fmt) process_file_imports(file &ast.File) {
|
||||||
|
mut sb := strings.new_builder(128)
|
||||||
|
for imp in file.implied_imports {
|
||||||
|
sb.writeln('import ${imp}')
|
||||||
|
}
|
||||||
|
f.implied_import_str = sb.str()
|
||||||
|
|
||||||
for imp in file.imports {
|
for imp in file.imports {
|
||||||
f.mod2alias[imp.mod] = imp.alias
|
f.mod2alias[imp.mod] = imp.alias
|
||||||
f.mod2alias[imp.mod.all_after('${file.mod.name}.')] = imp.alias
|
f.mod2alias[imp.mod.all_after('${file.mod.name}.')] = imp.alias
|
||||||
|
@ -131,12 +137,8 @@ pub fn (mut f Fmt) process_file_imports(file &ast.File) {
|
||||||
f.mod2syms['${imp.mod}.${sym.name}'] = sym.name
|
f.mod2syms['${imp.mod}.${sym.name}'] = sym.name
|
||||||
f.mod2syms['${imp.mod.all_after_last('.')}.${sym.name}'] = sym.name
|
f.mod2syms['${imp.mod.all_after_last('.')}.${sym.name}'] = sym.name
|
||||||
f.mod2syms[sym.name] = sym.name
|
f.mod2syms[sym.name] = sym.name
|
||||||
f.import_syms_used[sym.name] = false
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for mod in f.file.auto_imports {
|
|
||||||
f.auto_imports[mod] = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//=== Basic buffer write operations ===//
|
//=== Basic buffer write operations ===//
|
||||||
|
@ -186,15 +188,9 @@ pub fn (mut f Fmt) wrap_long_line(penalty_idx int, add_indent bool) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct RemoveNewLineConfig {
|
|
||||||
pub:
|
|
||||||
imports_buffer bool // Work on f.out_imports instead of f.out
|
|
||||||
}
|
|
||||||
|
|
||||||
// When the removal action actually occurs, the string of the last line after the removal is returned
|
// When the removal action actually occurs, the string of the last line after the removal is returned
|
||||||
pub fn (mut f Fmt) remove_new_line(cfg RemoveNewLineConfig) string {
|
pub fn (mut f Fmt) remove_new_line() string {
|
||||||
mut buffer := if cfg.imports_buffer { unsafe { &f.out_imports } } else { unsafe { &f.out } }
|
mut buffer := unsafe { &f.out }
|
||||||
mut i := 0
|
mut i := 0
|
||||||
for i = buffer.len - 1; i >= 0; i-- {
|
for i = buffer.len - 1; i >= 0; i-- {
|
||||||
if !buffer.byte_at(i).is_space() { // != `\n` {
|
if !buffer.byte_at(i).is_space() { // != `\n` {
|
||||||
|
@ -314,76 +310,30 @@ pub fn (mut f Fmt) short_module(name string) string {
|
||||||
|
|
||||||
//=== Import-related methods ===//
|
//=== Import-related methods ===//
|
||||||
|
|
||||||
pub fn (mut f Fmt) mark_types_import_as_used(typ ast.Type) {
|
pub fn (mut f Fmt) import_stmt(imp ast.Import) {
|
||||||
sym := f.table.sym(typ)
|
f.has_import_stmt = true
|
||||||
match sym.info {
|
if imp.mod in f.file.auto_imports && imp.mod !in f.file.used_imports {
|
||||||
ast.Map {
|
|
||||||
map_info := sym.map_info()
|
|
||||||
f.mark_types_import_as_used(map_info.key_type)
|
|
||||||
f.mark_types_import_as_used(map_info.value_type)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
ast.Array, ast.ArrayFixed {
|
|
||||||
f.mark_types_import_as_used(sym.info.elem_type)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
ast.GenericInst {
|
|
||||||
for concrete_typ in sym.info.concrete_types {
|
|
||||||
f.mark_types_import_as_used(concrete_typ)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {}
|
|
||||||
}
|
|
||||||
// `Type[T]` -> `Type` || `[]thread Type` -> `Type`.
|
|
||||||
name := sym.name.all_before('[').all_after(' ')
|
|
||||||
f.mark_import_as_used(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (mut f Fmt) mark_import_as_used(name string) {
|
|
||||||
parts := name.split('.')
|
|
||||||
sym := parts.last()
|
|
||||||
if sym in f.import_syms_used {
|
|
||||||
f.import_syms_used[sym] = true
|
|
||||||
}
|
|
||||||
if parts.len == 1 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
mod := parts[..parts.len - 1].join('.')
|
|
||||||
f.used_imports[mod] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (mut f Fmt) imports(imports []ast.Import) {
|
|
||||||
if f.did_imports || imports.len == 0 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
f.did_imports = true
|
|
||||||
mut processed_imports := map[string]bool{}
|
|
||||||
for imp in imports {
|
|
||||||
if imp.mod in f.auto_imports && imp.mod !in f.used_imports {
|
|
||||||
// Skip hidden imports like preludes.
|
// Skip hidden imports like preludes.
|
||||||
continue
|
return
|
||||||
}
|
}
|
||||||
imp_stmt := f.imp_stmt_str(imp)
|
imp_stmt := f.imp_stmt_str(imp)
|
||||||
if imp_stmt in processed_imports {
|
if imp_stmt in f.processed_imports {
|
||||||
// Skip duplicates.
|
// Skip duplicates.
|
||||||
f.import_comments(imp.next_comments)
|
f.import_comments(imp.next_comments)
|
||||||
continue
|
return
|
||||||
}
|
}
|
||||||
processed_imports[imp_stmt] = true
|
f.processed_imports << imp_stmt
|
||||||
if !f.format_state.is_vfmt_on {
|
if !f.format_state.is_vfmt_on {
|
||||||
original_imp_line := f.get_source_lines()#[imp.pos.line_nr..imp.pos.last_line + 1].join('\n')
|
original_imp_line := f.get_source_lines()#[imp.pos.line_nr..imp.pos.last_line + 1].join('\n')
|
||||||
// Same line comments(`imp.comments`) are included in the `original_imp_line`.
|
// Same line comments(`imp.comments`) are included in the `original_imp_line`.
|
||||||
f.out_imports.writeln(original_imp_line)
|
f.writeln(original_imp_line)
|
||||||
f.import_comments(imp.next_comments)
|
f.import_comments(imp.next_comments)
|
||||||
} else {
|
} else {
|
||||||
f.out_imports.writeln('import ${imp_stmt}')
|
f.writeln('import ${imp_stmt}')
|
||||||
f.import_comments(imp.comments, same_line: true)
|
f.import_comments(imp.comments, same_line: true)
|
||||||
f.import_comments(imp.next_comments)
|
f.import_comments(imp.next_comments)
|
||||||
}
|
}
|
||||||
}
|
f.import_pos = f.out.len
|
||||||
if processed_imports.len > 0 {
|
|
||||||
f.out_imports.writeln('')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (f &Fmt) imp_stmt_str(imp ast.Import) string {
|
pub fn (f &Fmt) imp_stmt_str(imp ast.Import) string {
|
||||||
|
@ -391,7 +341,7 @@ pub fn (f &Fmt) imp_stmt_str(imp ast.Import) string {
|
||||||
// E.g.: `import foo { Foo }` || `import foo as f { Foo }`
|
// E.g.: `import foo { Foo }` || `import foo as f { Foo }`
|
||||||
has_alias := imp.alias != imp.source_name.all_after_last('.')
|
has_alias := imp.alias != imp.source_name.all_after_last('.')
|
||||||
mut suffix := if has_alias { ' as ${imp.alias}' } else { '' }
|
mut suffix := if has_alias { ' as ${imp.alias}' } else { '' }
|
||||||
mut syms := imp.syms.map(it.name).filter(f.import_syms_used[it])
|
mut syms := imp.syms.map(it.name).filter(f.file.imported_symbols_used[it])
|
||||||
syms.sort()
|
syms.sort()
|
||||||
if syms.len > 0 {
|
if syms.len > 0 {
|
||||||
suffix += if imp.syms[0].pos.line_nr == imp.pos.line_nr {
|
suffix += if imp.syms[0].pos.line_nr == imp.pos.line_nr {
|
||||||
|
@ -440,9 +390,9 @@ fn (f &Fmt) should_insert_newline_before_node(node ast.Node, prev_node ast.Node)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Imports are handled special hence they are ignored here
|
// Force a newline after imports
|
||||||
ast.Import {
|
ast.Import {
|
||||||
return false
|
return node !is ast.Import
|
||||||
}
|
}
|
||||||
ast.ConstDecl {
|
ast.ConstDecl {
|
||||||
if node !is ast.ConstDecl && !(node is ast.ExprStmt && node.expr is ast.Comment) {
|
if node !is ast.ConstDecl && !(node is ast.ExprStmt && node.expr is ast.Comment) {
|
||||||
|
@ -572,9 +522,7 @@ pub fn (mut f Fmt) stmt(node ast.Stmt) {
|
||||||
f.hash_stmt(node)
|
f.hash_stmt(node)
|
||||||
}
|
}
|
||||||
ast.Import {
|
ast.Import {
|
||||||
// Imports are handled after the file is formatted, to automatically add necessary modules
|
f.import_stmt(node)
|
||||||
// Just remember the position of the imports for now
|
|
||||||
f.import_pos = f.out.len
|
|
||||||
}
|
}
|
||||||
ast.InterfaceDecl {
|
ast.InterfaceDecl {
|
||||||
f.interface_decl(node)
|
f.interface_decl(node)
|
||||||
|
@ -932,9 +880,6 @@ pub fn (mut f Fmt) comptime_for(node ast.ComptimeFor) {
|
||||||
(node.expr as ast.Ident).name
|
(node.expr as ast.Ident).name
|
||||||
}
|
}
|
||||||
f.write('\$for ${node.val_var} in ${typ}.${node.kind.str()} {')
|
f.write('\$for ${node.val_var} in ${typ}.${node.kind.str()} {')
|
||||||
if node.typ != ast.void_type {
|
|
||||||
f.mark_types_import_as_used(node.typ)
|
|
||||||
}
|
|
||||||
if node.stmts.len > 0 || node.pos.line_nr < node.pos.last_line {
|
if node.stmts.len > 0 || node.pos.line_nr < node.pos.last_line {
|
||||||
f.writeln('')
|
f.writeln('')
|
||||||
f.stmts(node.stmts)
|
f.stmts(node.stmts)
|
||||||
|
@ -1202,11 +1147,6 @@ fn (mut f Fmt) fn_body(node ast.FnDecl) {
|
||||||
} else {
|
} else {
|
||||||
f.writeln('')
|
f.writeln('')
|
||||||
}
|
}
|
||||||
// Mark all function's used type so that they are not removed from imports
|
|
||||||
for arg in node.params {
|
|
||||||
f.mark_types_import_as_used(arg.typ)
|
|
||||||
}
|
|
||||||
f.mark_types_import_as_used(node.return_type)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut f Fmt) for_c_stmt(node ast.ForCStmt) {
|
pub fn (mut f Fmt) for_c_stmt(node ast.ForCStmt) {
|
||||||
|
@ -1355,7 +1295,6 @@ pub fn (mut f Fmt) global_decl(node ast.GlobalDecl) {
|
||||||
if node.is_block {
|
if node.is_block {
|
||||||
f.writeln('')
|
f.writeln('')
|
||||||
}
|
}
|
||||||
f.mark_types_import_as_used(field.typ)
|
|
||||||
}
|
}
|
||||||
f.comments_after_last_field(node.end_comments)
|
f.comments_after_last_field(node.end_comments)
|
||||||
if node.is_block {
|
if node.is_block {
|
||||||
|
@ -1568,7 +1507,6 @@ pub fn (mut f Fmt) interface_field(field ast.StructField, mut type_align FieldAl
|
||||||
if next_line_cmts.len > 0 {
|
if next_line_cmts.len > 0 {
|
||||||
f.comments(next_line_cmts, level: .indent)
|
f.comments(next_line_cmts, level: .indent)
|
||||||
}
|
}
|
||||||
f.mark_types_import_as_used(field.typ)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut f Fmt) interface_method(method ast.FnDecl, mut comment_align FieldAlign) {
|
pub fn (mut f Fmt) interface_method(method ast.FnDecl, mut comment_align FieldAlign) {
|
||||||
|
@ -1587,10 +1525,6 @@ pub fn (mut f Fmt) interface_method(method ast.FnDecl, mut comment_align FieldAl
|
||||||
f.writeln('')
|
f.writeln('')
|
||||||
}
|
}
|
||||||
f.comments(method.next_comments, level: .indent)
|
f.comments(method.next_comments, level: .indent)
|
||||||
for param in method.params {
|
|
||||||
f.mark_types_import_as_used(param.typ)
|
|
||||||
}
|
|
||||||
f.mark_types_import_as_used(method.return_type)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut f Fmt) module_stmt(mod ast.Module) {
|
pub fn (mut f Fmt) module_stmt(mod ast.Module) {
|
||||||
|
@ -1655,7 +1589,6 @@ pub fn (mut f Fmt) sql_stmt_line(node ast.SqlStmtLine) {
|
||||||
table_name = f.no_cur_mod(f.short_module(sym.name)) // TODO: f.type_to_str?
|
table_name = f.no_cur_mod(f.short_module(sym.name)) // TODO: f.type_to_str?
|
||||||
}
|
}
|
||||||
|
|
||||||
f.mark_types_import_as_used(node.table_expr.typ)
|
|
||||||
f.write('\t')
|
f.write('\t')
|
||||||
match node.kind {
|
match node.kind {
|
||||||
.insert {
|
.insert {
|
||||||
|
@ -1712,7 +1645,6 @@ pub fn (mut f Fmt) alias_type_decl(node ast.AliasTypeDecl) {
|
||||||
f.write('type ${node.name} = ')
|
f.write('type ${node.name} = ')
|
||||||
f.struct_decl(ast.StructDecl{ fields: sym.info.fields }, true)
|
f.struct_decl(ast.StructDecl{ fields: sym.info.fields }, true)
|
||||||
f.comments(node.comments, has_nl: false)
|
f.comments(node.comments, has_nl: false)
|
||||||
f.mark_types_import_as_used(node.parent_type)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1720,7 +1652,6 @@ pub fn (mut f Fmt) alias_type_decl(node ast.AliasTypeDecl) {
|
||||||
f.write('type ${node.name} = ${ptype}')
|
f.write('type ${node.name} = ${ptype}')
|
||||||
|
|
||||||
f.comments(node.comments, has_nl: false)
|
f.comments(node.comments, has_nl: false)
|
||||||
f.mark_types_import_as_used(node.parent_type)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
|
pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
|
||||||
|
@ -1743,7 +1674,6 @@ pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
|
||||||
f.write(arg.typ.share().str() + ' ')
|
f.write(arg.typ.share().str() + ' ')
|
||||||
}
|
}
|
||||||
f.write(arg.name)
|
f.write(arg.name)
|
||||||
f.mark_types_import_as_used(arg.typ)
|
|
||||||
mut s := f.no_cur_mod(f.table.type_to_str_using_aliases(arg.typ, f.mod2alias))
|
mut s := f.no_cur_mod(f.table.type_to_str_using_aliases(arg.typ, f.mod2alias))
|
||||||
if arg.is_mut {
|
if arg.is_mut {
|
||||||
if s.starts_with('&') {
|
if s.starts_with('&') {
|
||||||
|
@ -1769,7 +1699,6 @@ pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
|
||||||
}
|
}
|
||||||
f.write(')')
|
f.write(')')
|
||||||
if fn_info.return_type.idx() != ast.void_type_idx {
|
if fn_info.return_type.idx() != ast.void_type_idx {
|
||||||
f.mark_types_import_as_used(fn_info.return_type)
|
|
||||||
ret_str := f.no_cur_mod(f.table.type_to_str_using_aliases(fn_info.return_type,
|
ret_str := f.no_cur_mod(f.table.type_to_str_using_aliases(fn_info.return_type,
|
||||||
f.mod2alias))
|
f.mod2alias))
|
||||||
f.write(' ${ret_str}')
|
f.write(' ${ret_str}')
|
||||||
|
@ -1801,7 +1730,6 @@ pub fn (mut f Fmt) sum_type_decl(node ast.SumTypeDecl) {
|
||||||
mut variants := []Variant{cap: node.variants.len}
|
mut variants := []Variant{cap: node.variants.len}
|
||||||
for i, variant in node.variants {
|
for i, variant in node.variants {
|
||||||
variants << Variant{f.table.type_to_str_using_aliases(variant.typ, f.mod2alias), i}
|
variants << Variant{f.table.type_to_str_using_aliases(variant.typ, f.mod2alias), i}
|
||||||
f.mark_types_import_as_used(variant.typ)
|
|
||||||
}
|
}
|
||||||
// The first variant is now used as the default variant when doing `a:= Sumtype{}`, i.e. a change in semantics.
|
// The first variant is now used as the default variant when doing `a:= Sumtype{}`, i.e. a change in semantics.
|
||||||
// Sorting is disabled, because it is no longer a cosmetic change - it can change the default variant.
|
// Sorting is disabled, because it is no longer a cosmetic change - it can change the default variant.
|
||||||
|
@ -1851,7 +1779,6 @@ pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
|
||||||
}
|
}
|
||||||
if node.exprs.len == 0 && node.typ != 0 && node.typ != ast.void_type {
|
if node.exprs.len == 0 && node.typ != 0 && node.typ != ast.void_type {
|
||||||
// `x := []string{}`
|
// `x := []string{}`
|
||||||
f.mark_types_import_as_used(node.typ)
|
|
||||||
if node.alias_type != ast.void_type {
|
if node.alias_type != ast.void_type {
|
||||||
f.write(f.table.type_to_str_using_aliases(node.alias_type, f.mod2alias))
|
f.write(f.table.type_to_str_using_aliases(node.alias_type, f.mod2alias))
|
||||||
} else {
|
} else {
|
||||||
|
@ -2054,7 +1981,6 @@ pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut f Fmt) as_cast(node ast.AsCast) {
|
pub fn (mut f Fmt) as_cast(node ast.AsCast) {
|
||||||
f.mark_types_import_as_used(node.typ)
|
|
||||||
type_str := f.table.type_to_str_using_aliases(node.typ, f.mod2alias)
|
type_str := f.table.type_to_str_using_aliases(node.typ, f.mod2alias)
|
||||||
f.expr(node.expr)
|
f.expr(node.expr)
|
||||||
f.write(' as ${type_str}')
|
f.write(' as ${type_str}')
|
||||||
|
@ -2078,7 +2004,6 @@ pub fn (mut f Fmt) at_expr(node ast.AtExpr) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut f Fmt) write_static_method(name string, short_name string) {
|
fn (mut f Fmt) write_static_method(name string, short_name string) {
|
||||||
f.mark_import_as_used(name.split('__static__')[0])
|
|
||||||
if short_name.contains('.') {
|
if short_name.contains('.') {
|
||||||
indx := short_name.index('.') or { -1 } + 1
|
indx := short_name.index('.') or { -1 } + 1
|
||||||
f.write(short_name[0..indx] + short_name[indx..].replace('__static__', '.').capitalize())
|
f.write(short_name[0..indx] + short_name[indx..].replace('__static__', '.').capitalize())
|
||||||
|
@ -2094,20 +2019,6 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
|
||||||
f.in_lambda_depth++
|
f.in_lambda_depth++
|
||||||
defer { f.in_lambda_depth-- }
|
defer { f.in_lambda_depth-- }
|
||||||
}
|
}
|
||||||
if node.left is ast.Ident {
|
|
||||||
// `time.now()` without `time imported` is processed as a method call with `time` being
|
|
||||||
// a `node.left` expression. Import `time` automatically.
|
|
||||||
// TODO: fetch all available modules
|
|
||||||
if node.left.name in ['time', 'os', 'strings', 'math', 'json', 'base64']
|
|
||||||
&& !node.left.scope.known_var(node.left.name) {
|
|
||||||
f.file.imports << ast.Import{
|
|
||||||
source_name: node.left.name
|
|
||||||
mod: node.left.name
|
|
||||||
alias: node.left.name
|
|
||||||
}
|
|
||||||
f.used_imports[node.left.name] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
f.expr(node.left)
|
f.expr(node.left)
|
||||||
is_method_newline = node.left.pos().last_line != node.name_pos.line_nr
|
is_method_newline = node.left.pos().last_line != node.name_pos.line_nr
|
||||||
if is_method_newline {
|
if is_method_newline {
|
||||||
|
@ -2126,7 +2037,6 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
|
||||||
if node.is_static_method {
|
if node.is_static_method {
|
||||||
f.write_static_method(node.name, name)
|
f.write_static_method(node.name, name)
|
||||||
} else {
|
} else {
|
||||||
f.mark_import_as_used(name)
|
|
||||||
f.write(name)
|
f.write(name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2164,7 +2074,6 @@ fn (mut f Fmt) write_generic_call_if_require(node ast.CallExpr) {
|
||||||
name = 'C.' + name
|
name = 'C.' + name
|
||||||
}
|
}
|
||||||
f.write(name)
|
f.write(name)
|
||||||
f.mark_types_import_as_used(concrete_type)
|
|
||||||
if i != node.concrete_types.len - 1 {
|
if i != node.concrete_types.len - 1 {
|
||||||
f.write(', ')
|
f.write(', ')
|
||||||
}
|
}
|
||||||
|
@ -2227,7 +2136,6 @@ pub fn (mut f Fmt) cast_expr(node ast.CastExpr) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
f.write('${typ}(')
|
f.write('${typ}(')
|
||||||
f.mark_types_import_as_used(node.typ)
|
|
||||||
f.expr(node.expr)
|
f.expr(node.expr)
|
||||||
if node.has_arg {
|
if node.has_arg {
|
||||||
f.write(', ')
|
f.write(', ')
|
||||||
|
@ -2370,7 +2278,6 @@ pub fn (mut f Fmt) dump_expr(node ast.DumpExpr) {
|
||||||
pub fn (mut f Fmt) enum_val(node ast.EnumVal) {
|
pub fn (mut f Fmt) enum_val(node ast.EnumVal) {
|
||||||
name := f.short_module(node.enum_name)
|
name := f.short_module(node.enum_name)
|
||||||
f.write(name + '.' + node.val)
|
f.write(name + '.' + node.val)
|
||||||
f.mark_import_as_used(name)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut f Fmt) ident(node ast.Ident) {
|
pub fn (mut f Fmt) ident(node ast.Ident) {
|
||||||
|
@ -2414,7 +2321,6 @@ pub fn (mut f Fmt) ident(node ast.Ident) {
|
||||||
if node.name.contains('__static__') {
|
if node.name.contains('__static__') {
|
||||||
f.write_static_method(node.name, name)
|
f.write_static_method(node.name, name)
|
||||||
} else {
|
} else {
|
||||||
f.mark_import_as_used(name)
|
|
||||||
f.write(name)
|
f.write(name)
|
||||||
}
|
}
|
||||||
if node.concrete_types.len > 0 {
|
if node.concrete_types.len > 0 {
|
||||||
|
@ -2433,7 +2339,6 @@ pub fn (mut f Fmt) ident(node ast.Ident) {
|
||||||
} else if node.or_expr.kind == .block {
|
} else if node.or_expr.kind == .block {
|
||||||
f.or_expr(node.or_expr)
|
f.or_expr(node.or_expr)
|
||||||
}
|
}
|
||||||
f.mark_import_as_used(name)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2791,10 +2696,6 @@ pub fn (mut f Fmt) lock_expr(node ast.LockExpr) {
|
||||||
pub fn (mut f Fmt) map_init(node ast.MapInit) {
|
pub fn (mut f Fmt) map_init(node ast.MapInit) {
|
||||||
if node.keys.len == 0 && !node.has_update_expr {
|
if node.keys.len == 0 && !node.has_update_expr {
|
||||||
if node.typ > ast.void_type {
|
if node.typ > ast.void_type {
|
||||||
sym := f.table.sym(node.typ)
|
|
||||||
info := sym.info as ast.Map
|
|
||||||
f.mark_types_import_as_used(info.key_type)
|
|
||||||
f.mark_types_import_as_used(info.value_type)
|
|
||||||
f.write(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
|
f.write(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
|
||||||
}
|
}
|
||||||
if node.pos.line_nr == node.pos.last_line {
|
if node.pos.line_nr == node.pos.last_line {
|
||||||
|
@ -2929,7 +2830,6 @@ pub fn (mut f Fmt) match_expr(node ast.MatchExpr) {
|
||||||
|
|
||||||
pub fn (mut f Fmt) offset_of(node ast.OffsetOf) {
|
pub fn (mut f Fmt) offset_of(node ast.OffsetOf) {
|
||||||
f.write('__offsetof(${f.table.type_to_str_using_aliases(node.struct_type, f.mod2alias)}, ${node.field})')
|
f.write('__offsetof(${f.table.type_to_str_using_aliases(node.struct_type, f.mod2alias)}, ${node.field})')
|
||||||
f.mark_types_import_as_used(node.struct_type)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut f Fmt) or_expr(node ast.OrExpr) {
|
pub fn (mut f Fmt) or_expr(node ast.OrExpr) {
|
||||||
|
@ -3272,7 +3172,6 @@ pub fn (mut f Fmt) string_inter_literal(node ast.StringInterLiteral) {
|
||||||
|
|
||||||
pub fn (mut f Fmt) type_expr(node ast.TypeNode) {
|
pub fn (mut f Fmt) type_expr(node ast.TypeNode) {
|
||||||
if node.stmt == ast.empty_stmt {
|
if node.stmt == ast.empty_stmt {
|
||||||
f.mark_types_import_as_used(node.typ)
|
|
||||||
f.write(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
|
f.write(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
|
||||||
} else {
|
} else {
|
||||||
f.struct_decl(ast.StructDecl{ fields: (node.stmt as ast.StructDecl).fields },
|
f.struct_decl(ast.StructDecl{ fields: (node.stmt as ast.StructDecl).fields },
|
||||||
|
|
|
@ -41,7 +41,6 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl, is_anon bool) {
|
||||||
if i < node.implements_types.len - 1 {
|
if i < node.implements_types.len - 1 {
|
||||||
f.write(', ')
|
f.write(', ')
|
||||||
}
|
}
|
||||||
f.mark_types_import_as_used(t.typ)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Calculate the alignments first
|
// Calculate the alignments first
|
||||||
|
@ -52,7 +51,6 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl, is_anon bool) {
|
||||||
f.comments_before_field(node.pre_comments)
|
f.comments_before_field(node.pre_comments)
|
||||||
}
|
}
|
||||||
for embed in node.embeds {
|
for embed in node.embeds {
|
||||||
f.mark_types_import_as_used(embed.typ)
|
|
||||||
styp := f.table.type_to_str_using_aliases(embed.typ, f.mod2alias)
|
styp := f.table.type_to_str_using_aliases(embed.typ, f.mod2alias)
|
||||||
|
|
||||||
pre_comments := embed.comments.filter(it.pos.pos < embed.pos.pos)
|
pre_comments := embed.comments.filter(it.pos.pos < embed.pos.pos)
|
||||||
|
@ -101,7 +99,6 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl, is_anon bool) {
|
||||||
if !f.write_anon_struct_field_decl(field.typ, field.anon_struct_decl) {
|
if !f.write_anon_struct_field_decl(field.typ, field.anon_struct_decl) {
|
||||||
f.write(field_types[i])
|
f.write(field_types[i])
|
||||||
}
|
}
|
||||||
f.mark_types_import_as_used(field.typ)
|
|
||||||
attrs_len := inline_attrs_len(field.attrs)
|
attrs_len := inline_attrs_len(field.attrs)
|
||||||
if field.has_default_expr {
|
if field.has_default_expr {
|
||||||
f.write(' '.repeat(default_expr_align.max_len(field.pos.line_nr) - field_types[i].len))
|
f.write(' '.repeat(default_expr_align.max_len(field.pos.line_nr) - field_types[i].len))
|
||||||
|
@ -202,7 +199,6 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
|
||||||
defer {
|
defer {
|
||||||
f.is_struct_init = struct_init_save
|
f.is_struct_init = struct_init_save
|
||||||
}
|
}
|
||||||
f.mark_types_import_as_used(node.typ)
|
|
||||||
sym_name := f.table.sym(node.typ).name
|
sym_name := f.table.sym(node.typ).name
|
||||||
// f.write('<old name: $type_sym.name>')
|
// f.write('<old name: $type_sym.name>')
|
||||||
mut name := if !sym_name.starts_with('C.') && !sym_name.starts_with('JS.') {
|
mut name := if !sym_name.starts_with('C.') && !sym_name.starts_with('JS.') {
|
||||||
|
@ -228,11 +224,9 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
|
||||||
f.comments(node.pre_comments, same_line: true, has_nl: true, level: .indent)
|
f.comments(node.pre_comments, same_line: true, has_nl: true, level: .indent)
|
||||||
f.write('}')
|
f.write('}')
|
||||||
}
|
}
|
||||||
f.mark_import_as_used(name)
|
|
||||||
} else if node.no_keys {
|
} else if node.no_keys {
|
||||||
// `Foo{1,2,3}` (short syntax, no keys)
|
// `Foo{1,2,3}` (short syntax, no keys)
|
||||||
f.write('${name}{')
|
f.write('${name}{')
|
||||||
f.mark_import_as_used(name)
|
|
||||||
if node.has_update_expr {
|
if node.has_update_expr {
|
||||||
f.write('...')
|
f.write('...')
|
||||||
f.expr(node.update_expr)
|
f.expr(node.update_expr)
|
||||||
|
@ -255,7 +249,6 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
|
||||||
}
|
}
|
||||||
if !use_short_args || node.is_anon {
|
if !use_short_args || node.is_anon {
|
||||||
f.write('${name}{')
|
f.write('${name}{')
|
||||||
f.mark_import_as_used(name)
|
|
||||||
if single_line_fields {
|
if single_line_fields {
|
||||||
f.write(' ')
|
f.write(' ')
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
module main
|
||||||
|
|
||||||
|
import term { bright_cyan, colorize }
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
n := colorize(bright_cyan, 'hello')
|
||||||
|
println(n)
|
||||||
|
}
|
|
@ -866,9 +866,19 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
|
||||||
g.write('*')
|
g.write('*')
|
||||||
}
|
}
|
||||||
if node_.op == .assign && var_type.has_flag(.option_mut_param_t) {
|
if node_.op == .assign && var_type.has_flag(.option_mut_param_t) {
|
||||||
|
if val is ast.CastExpr {
|
||||||
|
g.expr(left)
|
||||||
|
g.write('->state = ')
|
||||||
|
g.expr(val)
|
||||||
|
g.writeln('.state;')
|
||||||
|
}
|
||||||
g.write('memcpy(&')
|
g.write('memcpy(&')
|
||||||
g.expr(left)
|
g.expr(left)
|
||||||
|
if val is ast.CastExpr {
|
||||||
|
g.write('->data, ')
|
||||||
|
} else {
|
||||||
g.write('->data, *(${g.styp(val_type)}**)&')
|
g.write('->data, *(${g.styp(val_type)}**)&')
|
||||||
|
}
|
||||||
} else if var_type.has_flag(.option_mut_param_t) {
|
} else if var_type.has_flag(.option_mut_param_t) {
|
||||||
g.expr(left)
|
g.expr(left)
|
||||||
g.write(' = ')
|
g.write(' = ')
|
||||||
|
|
|
@ -62,7 +62,7 @@ fn (mut g Gen) autofree_scope_vars2(scope &ast.Scope, start_pos int, end_pos int
|
||||||
match obj {
|
match obj {
|
||||||
ast.Var {
|
ast.Var {
|
||||||
g.trace_autofree('// var "${obj.name}" var.pos=${obj.pos.pos} var.line_nr=${obj.pos.line_nr}')
|
g.trace_autofree('// var "${obj.name}" var.pos=${obj.pos.pos} var.line_nr=${obj.pos.line_nr}')
|
||||||
if obj.name == g.returned_var_name {
|
if obj.name in g.returned_var_names {
|
||||||
g.print_autofree_var(obj, 'returned from function')
|
g.print_autofree_var(obj, 'returned from function')
|
||||||
g.trace_autofree('// skipping returned var')
|
g.trace_autofree('// skipping returned var')
|
||||||
continue
|
continue
|
||||||
|
@ -247,3 +247,17 @@ fn (mut g Gen) autofree_var_call(free_fn_name string, v ast.Var) {
|
||||||
}
|
}
|
||||||
g.autofree_scope_stmts << af.str()
|
g.autofree_scope_stmts << af.str()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn (mut g Gen) detect_used_var_on_return(expr ast.Expr) {
|
||||||
|
match expr {
|
||||||
|
ast.Ident {
|
||||||
|
g.returned_var_names[expr.name] = true
|
||||||
|
}
|
||||||
|
ast.StructInit {
|
||||||
|
for field_expr in expr.init_fields {
|
||||||
|
g.detect_used_var_on_return(field_expr.expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -233,7 +233,7 @@ mut:
|
||||||
aggregate_type_idx int
|
aggregate_type_idx int
|
||||||
arg_no_auto_deref bool // smartcast must not be dereferenced
|
arg_no_auto_deref bool // smartcast must not be dereferenced
|
||||||
branch_parent_pos int // used in BranchStmt (continue/break) for autofree stop position
|
branch_parent_pos int // used in BranchStmt (continue/break) for autofree stop position
|
||||||
returned_var_name string // to detect that a var doesn't need to be freed since it's being returned
|
returned_var_names map[string]bool // to detect that vars doesn't need to be freed since it's being returned
|
||||||
infix_left_var_name string // a && if expr
|
infix_left_var_name string // a && if expr
|
||||||
curr_var_name []string // curr var name on assignment
|
curr_var_name []string // curr var name on assignment
|
||||||
called_fn_name string
|
called_fn_name string
|
||||||
|
@ -6241,6 +6241,9 @@ fn (mut g Gen) return_stmt(node ast.Return) {
|
||||||
g.writeln(' }, (${option_name}*)(&${tmpvar}), sizeof(${styp}));')
|
g.writeln(' }, (${option_name}*)(&${tmpvar}), sizeof(${styp}));')
|
||||||
}
|
}
|
||||||
g.write_defer_stmts_when_needed()
|
g.write_defer_stmts_when_needed()
|
||||||
|
if g.is_autofree {
|
||||||
|
g.detect_used_var_on_return(expr0)
|
||||||
|
}
|
||||||
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
|
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
|
||||||
g.writeln('return ${tmpvar};')
|
g.writeln('return ${tmpvar};')
|
||||||
return
|
return
|
||||||
|
@ -6286,6 +6289,9 @@ fn (mut g Gen) return_stmt(node ast.Return) {
|
||||||
g.writeln(' }, (${result_name}*)(&${tmpvar}), sizeof(${styp}));')
|
g.writeln(' }, (${result_name}*)(&${tmpvar}), sizeof(${styp}));')
|
||||||
}
|
}
|
||||||
g.write_defer_stmts_when_needed()
|
g.write_defer_stmts_when_needed()
|
||||||
|
if g.is_autofree {
|
||||||
|
g.detect_used_var_on_return(expr0)
|
||||||
|
}
|
||||||
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
|
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
|
||||||
g.writeln('return ${tmpvar};')
|
g.writeln('return ${tmpvar};')
|
||||||
return
|
return
|
||||||
|
@ -6294,9 +6300,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
|
||||||
// set free_parent_scopes to true, since all variables defined in parent
|
// set free_parent_scopes to true, since all variables defined in parent
|
||||||
// scopes need to be freed before the return
|
// scopes need to be freed before the return
|
||||||
if g.is_autofree {
|
if g.is_autofree {
|
||||||
if expr0 is ast.Ident {
|
g.detect_used_var_on_return(expr0)
|
||||||
g.returned_var_name = expr0.name
|
|
||||||
}
|
|
||||||
if !use_tmp_var && !g.is_builtin_mod {
|
if !use_tmp_var && !g.is_builtin_mod {
|
||||||
use_tmp_var = expr0 is ast.CallExpr
|
use_tmp_var = expr0 is ast.CallExpr
|
||||||
}
|
}
|
||||||
|
|
|
@ -196,7 +196,7 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
g.returned_var_name = ''
|
g.returned_var_names.clear()
|
||||||
old_g_autofree := g.is_autofree
|
old_g_autofree := g.is_autofree
|
||||||
if node.is_manualfree {
|
if node.is_manualfree {
|
||||||
g.is_autofree = false
|
g.is_autofree = false
|
||||||
|
|
9
vlib/v/gen/c/testdata/autofree_toml.c.must_have
vendored
Normal file
9
vlib/v/gen/c/testdata/autofree_toml.c.must_have
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
_result_toml__scanner__Scanner_ptr toml__scanner__new_scanner(toml__scanner__Config config) {
|
||||||
|
_result_toml__scanner__Scanner_ptr _t3 = {0};
|
||||||
|
_result_ok(&(toml__scanner__Scanner*[]) { s }, (_result*)(&_t3), sizeof(toml__scanner__Scanner*));
|
||||||
|
return _t3;
|
||||||
|
}
|
||||||
|
toml__ast__Quoted toml__parser__Parser_quoted(toml__parser__Parser* p) {
|
||||||
|
return ((toml__ast__Quoted){.text = string_clone_static(lit),.pos = toml__token__Token_pos(&p->tok),.is_multiline = is_multiline,.quote = quote,});
|
||||||
|
}
|
||||||
|
|
0
vlib/v/gen/c/testdata/autofree_toml.out
vendored
Normal file
0
vlib/v/gen/c/testdata/autofree_toml.out
vendored
Normal file
15
vlib/v/gen/c/testdata/autofree_toml.vv
vendored
Normal file
15
vlib/v/gen/c/testdata/autofree_toml.vv
vendored
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
// vtest vflags: -autofree
|
||||||
|
import toml
|
||||||
|
import os
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
config_fname := 'config.toml'
|
||||||
|
tab_title := 'test tab title'
|
||||||
|
if !os.exists(config_fname) {
|
||||||
|
mut f := os.create(config_fname) or { panic(err) }
|
||||||
|
f.writeln('tab_title = "${tab_title}"') or { panic(err) }
|
||||||
|
f.close()
|
||||||
|
}
|
||||||
|
doc := toml.parse_file(config_fname) or { panic(err) }
|
||||||
|
assert doc.value('tab_title').string() == tab_title
|
||||||
|
}
|
|
@ -1300,6 +1300,10 @@ fn (mut w Walker) mark_resource_dependencies() {
|
||||||
w.fn_by_name(builderptr_idx + '.write_string')
|
w.fn_by_name(builderptr_idx + '.write_string')
|
||||||
w.fn_by_name('strings.new_builder')
|
w.fn_by_name('strings.new_builder')
|
||||||
w.uses_free[ast.string_type] = true
|
w.uses_free[ast.string_type] = true
|
||||||
|
|
||||||
|
if w.table.dumps.keys().any(ast.Type(u32(it)).has_flag(.option)) {
|
||||||
|
w.fn_by_name('str_intp')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if w.features.auto_str_ptr {
|
if w.features.auto_str_ptr {
|
||||||
w.fn_by_name('isnil')
|
w.fn_by_name('isnil')
|
||||||
|
|
|
@ -61,7 +61,7 @@ fn (mut p Parser) enum_decl() ast.EnumDecl {
|
||||||
end_pos)
|
end_pos)
|
||||||
return ast.EnumDecl{}
|
return ast.EnumDecl{}
|
||||||
}
|
}
|
||||||
if enum_name in p.imported_symbols {
|
if p.is_imported_symbol(enum_name) {
|
||||||
p.error_with_pos('cannot register enum `${enum_name}`, this type was already imported',
|
p.error_with_pos('cannot register enum `${enum_name}`, this type was already imported',
|
||||||
end_pos)
|
end_pos)
|
||||||
return ast.EnumDecl{}
|
return ast.EnumDecl{}
|
||||||
|
|
|
@ -89,10 +89,13 @@ fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr {
|
||||||
}
|
}
|
||||||
or_kind = if is_not { .propagate_result } else { .propagate_option }
|
or_kind = if is_not { .propagate_result } else { .propagate_option }
|
||||||
}
|
}
|
||||||
if fn_name in p.imported_symbols {
|
if p.is_imported_symbol(fn_name) {
|
||||||
|
check := !p.imported_symbols_used[fn_name]
|
||||||
fn_name = p.imported_symbols[fn_name]
|
fn_name = p.imported_symbols[fn_name]
|
||||||
|
if check {
|
||||||
p.register_used_import_for_symbol_name(fn_name)
|
p.register_used_import_for_symbol_name(fn_name)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
comments := p.eat_comments(same_line: true)
|
comments := p.eat_comments(same_line: true)
|
||||||
pos.update_last_line(p.prev_tok.line_nr)
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.CallExpr{
|
return ast.CallExpr{
|
||||||
|
@ -148,6 +151,13 @@ fn (mut p Parser) call_args() []ast.CallArg {
|
||||||
expr = p.struct_init('void_type', .short_syntax, false)
|
expr = p.struct_init('void_type', .short_syntax, false)
|
||||||
} else {
|
} else {
|
||||||
expr = p.expr(0)
|
expr = p.expr(0)
|
||||||
|
if mut expr is ast.Ident {
|
||||||
|
if p.is_imported_symbol(expr.name) && !p.imported_symbols_used[expr.name] {
|
||||||
|
// func call arg is another function call
|
||||||
|
// import term { bright_cyan, colorize } ... colorize(bright_cyan, 'hello')
|
||||||
|
p.register_used_import_for_symbol_name(p.imported_symbols[expr.name])
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if array_decompose {
|
if array_decompose {
|
||||||
expr = ast.ArrayDecompose{
|
expr = ast.ArrayDecompose{
|
||||||
|
@ -383,7 +393,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !p.pref.is_fmt {
|
if !p.pref.is_fmt {
|
||||||
if name in p.imported_symbols {
|
if p.is_imported_symbol(name) {
|
||||||
p.error_with_pos('cannot redefine imported function `${name}`', name_pos)
|
p.error_with_pos('cannot redefine imported function `${name}`', name_pos)
|
||||||
return ast.FnDecl{
|
return ast.FnDecl{
|
||||||
scope: unsafe { nil }
|
scope: unsafe { nil }
|
||||||
|
|
|
@ -35,6 +35,10 @@ fn (mut p Parser) register_used_import(alias string) {
|
||||||
|
|
||||||
fn (mut p Parser) register_used_import_for_symbol_name(sym_name string) {
|
fn (mut p Parser) register_used_import_for_symbol_name(sym_name string) {
|
||||||
short_import_name := sym_name.all_before_last('.').all_after_last('.')
|
short_import_name := sym_name.all_before_last('.').all_after_last('.')
|
||||||
|
short_symbol_name := sym_name.all_after_last('.')
|
||||||
|
if p.is_imported_symbol(short_symbol_name) {
|
||||||
|
p.imported_symbols_used[short_symbol_name] = true
|
||||||
|
}
|
||||||
for alias, mod in p.imports {
|
for alias, mod in p.imports {
|
||||||
if mod == short_import_name {
|
if mod == short_import_name {
|
||||||
p.register_used_import(alias)
|
p.register_used_import(alias)
|
||||||
|
@ -62,7 +66,14 @@ fn (mut p Parser) register_auto_import(alias string) {
|
||||||
if alias !in p.auto_imports {
|
if alias !in p.auto_imports {
|
||||||
p.auto_imports << alias
|
p.auto_imports << alias
|
||||||
}
|
}
|
||||||
p.register_used_import(alias)
|
// do not call `register_used_import()` here as it may not used by the code.
|
||||||
|
// for example, when using `chan`, but we has no `sync.xx()` call in the code.
|
||||||
|
}
|
||||||
|
|
||||||
|
fn (mut p Parser) register_implied_import(alias string) {
|
||||||
|
if alias !in p.implied_imports {
|
||||||
|
p.implied_imports << alias
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut p Parser) check_unused_imports() {
|
fn (mut p Parser) check_unused_imports() {
|
||||||
|
@ -75,7 +86,8 @@ fn (mut p Parser) check_unused_imports() {
|
||||||
for import_m in p.ast_imports {
|
for import_m in p.ast_imports {
|
||||||
alias := import_m.alias
|
alias := import_m.alias
|
||||||
mod := import_m.mod
|
mod := import_m.mod
|
||||||
if !(alias.len == 1 && alias[0] == `_`) && !p.is_used_import(alias) {
|
if !(alias.len == 1 && alias[0] == `_`) && !p.is_used_import(alias)
|
||||||
|
&& alias !in p.auto_imports {
|
||||||
mod_alias := if alias == mod { alias } else { '${alias} (${mod})' }
|
mod_alias := if alias == mod { alias } else { '${alias} (${mod})' }
|
||||||
p.warn_with_pos("module '${mod_alias}' is imported but never used", import_m.mod_pos)
|
p.warn_with_pos("module '${mod_alias}' is imported but never used", import_m.mod_pos)
|
||||||
}
|
}
|
||||||
|
@ -310,12 +322,13 @@ fn (mut p Parser) import_syms(mut parent ast.Import) {
|
||||||
for p.tok.kind == .name {
|
for p.tok.kind == .name {
|
||||||
pos := p.tok.pos()
|
pos := p.tok.pos()
|
||||||
alias := p.check_name()
|
alias := p.check_name()
|
||||||
if alias in p.imported_symbols {
|
if p.is_imported_symbol(alias) {
|
||||||
p.error_with_pos('cannot register symbol `${alias}`, it was already imported',
|
p.error_with_pos('cannot register symbol `${alias}`, it was already imported',
|
||||||
pos)
|
pos)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
p.imported_symbols[alias] = parent.mod + '.' + alias
|
p.imported_symbols[alias] = parent.mod + '.' + alias
|
||||||
|
p.rebuild_imported_symbols_matcher(alias)
|
||||||
// so we can work with this in fmt+checker
|
// so we can work with this in fmt+checker
|
||||||
parent.syms << ast.ImportSymbol{
|
parent.syms << ast.ImportSymbol{
|
||||||
pos: pos
|
pos: pos
|
||||||
|
@ -335,3 +348,12 @@ fn (mut p Parser) import_syms(mut parent ast.Import) {
|
||||||
}
|
}
|
||||||
p.next()
|
p.next()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn (mut p Parser) rebuild_imported_symbols_matcher(name string) {
|
||||||
|
p.imported_symbols_trie = token.new_keywords_matcher_from_array_trie(p.imported_symbols.keys())
|
||||||
|
}
|
||||||
|
|
||||||
|
@[inline]
|
||||||
|
fn (mut p Parser) is_imported_symbol(name string) bool {
|
||||||
|
return p.imported_symbols_trie.matches(name)
|
||||||
|
}
|
||||||
|
|
|
@ -636,9 +636,12 @@ fn (mut p Parser) parse_any_type(language ast.Language, is_ptr bool, check_dot b
|
||||||
} else if p.expr_mod != '' && !p.inside_generic_params {
|
} else if p.expr_mod != '' && !p.inside_generic_params {
|
||||||
// p.expr_mod is from the struct and not from the generic parameter
|
// p.expr_mod is from the struct and not from the generic parameter
|
||||||
name = p.expr_mod + '.' + name
|
name = p.expr_mod + '.' + name
|
||||||
} else if name in p.imported_symbols {
|
} else if p.is_imported_symbol(name) {
|
||||||
|
check := !p.imported_symbols_used[name]
|
||||||
name = p.imported_symbols[name]
|
name = p.imported_symbols[name]
|
||||||
|
if check {
|
||||||
p.register_used_import_for_symbol_name(name)
|
p.register_used_import_for_symbol_name(name)
|
||||||
|
}
|
||||||
} else if !p.builtin_mod && name.len > 1 && name !in p.table.type_idxs {
|
} else if !p.builtin_mod && name.len > 1 && name !in p.table.type_idxs {
|
||||||
// `Foo` in module `mod` means `mod.Foo`
|
// `Foo` in module `mod` means `mod.Foo`
|
||||||
name = p.mod + '.' + name
|
name = p.mod + '.' + name
|
||||||
|
|
|
@ -81,9 +81,12 @@ mut:
|
||||||
last_enum_mod string // saves the last enum mod name on an array initialization
|
last_enum_mod string // saves the last enum mod name on an array initialization
|
||||||
imports map[string]string // alias => mod_name
|
imports map[string]string // alias => mod_name
|
||||||
ast_imports []ast.Import // mod_names
|
ast_imports []ast.Import // mod_names
|
||||||
used_imports []string // alias
|
used_imports []string
|
||||||
auto_imports []string // imports, the user does not need to specify
|
auto_imports []string // imports, the user does not need to specify
|
||||||
|
implied_imports []string // imports that the user's code uses but omitted to import explicitly, used by `vfmt`
|
||||||
imported_symbols map[string]string
|
imported_symbols map[string]string
|
||||||
|
imported_symbols_used map[string]bool
|
||||||
|
imported_symbols_trie token.KeywordsMatcherTrie
|
||||||
is_amp bool // for generating the right code for `&Foo{}`
|
is_amp bool // for generating the right code for `&Foo{}`
|
||||||
returns bool
|
returns bool
|
||||||
is_stmt_ident bool // true while the beginning of a statement is an ident/selector
|
is_stmt_ident bool // true while the beginning of a statement is an ident/selector
|
||||||
|
@ -341,7 +344,11 @@ pub fn (mut p Parser) parse() &ast.File {
|
||||||
mod: module_decl
|
mod: module_decl
|
||||||
imports: p.ast_imports
|
imports: p.ast_imports
|
||||||
imported_symbols: p.imported_symbols
|
imported_symbols: p.imported_symbols
|
||||||
|
imported_symbols_trie: token.new_keywords_matcher_from_array_trie(p.imported_symbols.keys())
|
||||||
|
imported_symbols_used: p.imported_symbols_used
|
||||||
auto_imports: p.auto_imports
|
auto_imports: p.auto_imports
|
||||||
|
used_imports: p.used_imports
|
||||||
|
implied_imports: p.implied_imports
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
scope: p.scope
|
scope: p.scope
|
||||||
global_scope: p.table.global_scope
|
global_scope: p.table.global_scope
|
||||||
|
@ -587,7 +594,7 @@ fn (mut p Parser) check_name() string {
|
||||||
name := p.tok.lit
|
name := p.tok.lit
|
||||||
if p.tok.kind != .name && p.peek_tok.kind == .dot && name in p.imports {
|
if p.tok.kind != .name && p.peek_tok.kind == .dot && name in p.imports {
|
||||||
p.register_used_import(name)
|
p.register_used_import(name)
|
||||||
} else if p.tok.kind == .name && p.peek_tok.kind == .dot && name in p.imported_symbols {
|
} else if p.tok.kind == .name && p.is_imported_symbol(name) && !p.imported_symbols_used[name] {
|
||||||
// symbols like Enum.field_name
|
// symbols like Enum.field_name
|
||||||
p.register_used_import_for_symbol_name(p.imported_symbols[name])
|
p.register_used_import_for_symbol_name(p.imported_symbols[name])
|
||||||
}
|
}
|
||||||
|
@ -2085,6 +2092,17 @@ fn (mut p Parser) dot_expr(left ast.Expr) ast.Expr {
|
||||||
if mut left_node is ast.CallExpr {
|
if mut left_node is ast.CallExpr {
|
||||||
left_node.is_return_used = true
|
left_node.is_return_used = true
|
||||||
}
|
}
|
||||||
|
if p.pref.is_fmt {
|
||||||
|
if mut left_node is ast.Ident {
|
||||||
|
// `time.now()` without `time imported` is processed as a method call with `time` being
|
||||||
|
// a `left_node` expression. Import `time` automatically.
|
||||||
|
// TODO: fetch all available modules
|
||||||
|
if left_node.name in ['time', 'os', 'strings', 'math', 'json', 'base64']
|
||||||
|
&& !left_node.scope.known_var(left_node.name) {
|
||||||
|
p.register_implied_import(left_node.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
mcall_expr := ast.CallExpr{
|
mcall_expr := ast.CallExpr{
|
||||||
left: left
|
left: left
|
||||||
name: field_name
|
name: field_name
|
||||||
|
@ -2681,7 +2699,7 @@ fn (mut p Parser) type_decl() ast.TypeDecl {
|
||||||
return ast.FnTypeDecl{}
|
return ast.FnTypeDecl{}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if name in p.imported_symbols {
|
if p.is_imported_symbol(name) {
|
||||||
p.error_with_pos('cannot register alias `${name}`, this type was already imported',
|
p.error_with_pos('cannot register alias `${name}`, this type was already imported',
|
||||||
end_pos)
|
end_pos)
|
||||||
return ast.AliasTypeDecl{}
|
return ast.AliasTypeDecl{}
|
||||||
|
|
|
@ -73,7 +73,7 @@ fn (mut p Parser) struct_decl(is_anon bool) ast.StructDecl {
|
||||||
p.error_with_pos('struct names must have more than one character', name_pos)
|
p.error_with_pos('struct names must have more than one character', name_pos)
|
||||||
return ast.StructDecl{}
|
return ast.StructDecl{}
|
||||||
}
|
}
|
||||||
if name in p.imported_symbols {
|
if p.is_imported_symbol(name) {
|
||||||
p.error_with_pos('cannot register struct `${name}`, this type was already imported',
|
p.error_with_pos('cannot register struct `${name}`, this type was already imported',
|
||||||
name_pos)
|
name_pos)
|
||||||
return ast.StructDecl{}
|
return ast.StructDecl{}
|
||||||
|
@ -651,7 +651,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
|
||||||
mut pre_comments := p.eat_comments()
|
mut pre_comments := p.eat_comments()
|
||||||
p.check(.lcbr)
|
p.check(.lcbr)
|
||||||
pre_comments << p.eat_comments()
|
pre_comments << p.eat_comments()
|
||||||
if modless_name in p.imported_symbols {
|
if p.is_imported_symbol(modless_name) {
|
||||||
p.error_with_pos('cannot register interface `${interface_name}`, this type was already imported',
|
p.error_with_pos('cannot register interface `${interface_name}`, this type was already imported',
|
||||||
name_pos)
|
name_pos)
|
||||||
return ast.InterfaceDecl{}
|
return ast.InterfaceDecl{}
|
||||||
|
|
|
@ -398,6 +398,7 @@ fn advanced_options() {
|
||||||
s2 := parse_header1('foo:bar') or { return }
|
s2 := parse_header1('foo:bar') or { return }
|
||||||
_ := s.len + s2.len // avoid warning for unused variables
|
_ := s.len + s2.len // avoid warning for unused variables
|
||||||
// TODO: fix -autofree, so that it adds this free automatically:
|
// TODO: fix -autofree, so that it adds this free automatically:
|
||||||
|
unsafe { s.free() }
|
||||||
unsafe { s2.free() }
|
unsafe { s2.free() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
15
vlib/v/tests/options/option_for_mut_test.v
Normal file
15
vlib/v/tests/options/option_for_mut_test.v
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
module main
|
||||||
|
|
||||||
|
fn test_main() {
|
||||||
|
mut data := [3]?int{}
|
||||||
|
|
||||||
|
for mut d in data {
|
||||||
|
d = ?int(1)
|
||||||
|
assert '${d}' == 'Option(1)'
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in 0 .. data.len {
|
||||||
|
data[i] = ?int(3)
|
||||||
|
}
|
||||||
|
assert '${data}' == '[Option(3), Option(3), Option(3)]'
|
||||||
|
}
|
|
@ -11,6 +11,11 @@ pub mut:
|
||||||
max_len int
|
max_len int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// str returns a short representation of matcher
|
||||||
|
pub fn (km &KeywordsMatcherTrie) str() string {
|
||||||
|
return 'KeywordsMatcherTrie{ /* nodes.len: ${km.nodes.len} */ min_len: ${km.min_len}, max_len: ${km.max_len} }'
|
||||||
|
}
|
||||||
|
|
||||||
// TrieNode is a single node from a trie, used by KeywordsMatcherTrie
|
// TrieNode is a single node from a trie, used by KeywordsMatcherTrie
|
||||||
pub struct TrieNode {
|
pub struct TrieNode {
|
||||||
pub mut:
|
pub mut:
|
||||||
|
@ -18,6 +23,14 @@ pub mut:
|
||||||
value int = -1 // when != -1, it is a leaf node representing a match
|
value int = -1 // when != -1, it is a leaf node representing a match
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// str returns a string representation of the node content
|
||||||
|
pub fn (node &TrieNode) str() string {
|
||||||
|
if isnil(node) {
|
||||||
|
return '&TrieNode(nil)'
|
||||||
|
}
|
||||||
|
return '&TrieNode{value: ${node.value}}'
|
||||||
|
}
|
||||||
|
|
||||||
// find tries to find the given `word` in the set of all previously added words
|
// find tries to find the given `word` in the set of all previously added words
|
||||||
// to the KeywordsMatcherTrie instance. It returns -1 if the word was NOT found
|
// to the KeywordsMatcherTrie instance. It returns -1 if the word was NOT found
|
||||||
// there at all. If the word was found, find will return the `value` (value => 0),
|
// there at all. If the word was found, find will return the `value` (value => 0),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue