Compare commits

...

4 commits

Author SHA1 Message Date
Felipe Pena
7c780ed8fa
cgen, markused, checker: fix iteration over mutable option (fix #24860) (#25199)
Some checks are pending
Graphics CI / gg-regressions (push) Waiting to run
vlib modules CI / build-module-docs (push) Waiting to run
native backend CI / native-backend-ubuntu (push) Waiting to run
native backend CI / native-backend-windows (push) Waiting to run
Shy and PV CI / v-compiles-puzzle-vibes (push) Waiting to run
Sanitized CI / sanitize-undefined-clang (push) Waiting to run
Sanitized CI / sanitize-undefined-gcc (push) Waiting to run
Sanitized CI / tests-sanitize-address-clang (push) Waiting to run
Sanitized CI / sanitize-address-msvc (push) Waiting to run
Sanitized CI / sanitize-address-gcc (push) Waiting to run
Sanitized CI / sanitize-memory-clang (push) Waiting to run
sdl CI / v-compiles-sdl-examples (push) Waiting to run
Time CI / time-linux (push) Waiting to run
Time CI / time-macos (push) Waiting to run
Time CI / time-windows (push) Waiting to run
toml CI / toml-module-pass-external-test-suites (push) Waiting to run
Tools CI / tools-linux (clang) (push) Waiting to run
Tools CI / tools-linux (gcc) (push) Waiting to run
Tools CI / tools-linux (tcc) (push) Waiting to run
Tools CI / tools-macos (clang) (push) Waiting to run
Tools CI / tools-windows (gcc) (push) Waiting to run
Tools CI / tools-windows (msvc) (push) Waiting to run
Tools CI / tools-windows (tcc) (push) Waiting to run
Tools CI / tools-docker-ubuntu-musl (push) Waiting to run
vab CI / vab-compiles-v-examples (push) Waiting to run
vab CI / v-compiles-os-android (push) Waiting to run
wasm backend CI / wasm-backend (ubuntu-22.04) (push) Waiting to run
wasm backend CI / wasm-backend (windows-2022) (push) Waiting to run
2025-08-30 23:40:47 +03:00
Felipe Pena
9fb8aae2d7
cgen: fix autofree used vars on return (fix #25196) (#25198) 2025-08-30 23:39:34 +03:00
Delyan Angelov
cb2756e39c
parser,checker: use keywords matcher trie for imported symbol presence checks, instead of x in p.imported_symbol (#25201) 2025-08-30 22:56:30 +03:00
kbkpbot
2ac3478296
vfmt,parser: move mark used from vfmt to parser (#25190) 2025-08-30 18:42:12 +03:00
24 changed files with 270 additions and 225 deletions

View file

@ -1018,8 +1018,12 @@ pub mut:
stmts []Stmt // all the statements in the source file
imports []Import // all the imports
auto_imports []string // imports that were implicitly added
used_imports []string
implied_imports []string // imports that the user's code uses but omitted to import explicitly, used by `vfmt`
embedded_files []EmbeddedFile // list of files to embed in the binary
imported_symbols map[string]string // used for `import {symbol}`, it maps symbol => module.symbol
imported_symbols_trie token.KeywordsMatcherTrie // constructed from imported_symbols, to accelerate presense checks
imported_symbols_used map[string]bool
errors []errors.Error // all the checker errors in the file
warnings []errors.Warning // all the checker warnings in the file
notices []errors.Notice // all the checker notices in the file

View file

@ -4261,7 +4261,7 @@ fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
}
mut name := node.name
// check for imported symbol
if name in c.file.imported_symbols {
if c.file.imported_symbols_trie.matches(name) {
name = c.file.imported_symbols[name]
}
// prepend mod to look for fn call or const

View file

@ -254,6 +254,9 @@ fn (mut c Checker) for_in_stmt(mut node ast.ForInStmt) {
}
if node.val_is_mut {
value_type = value_type.ref()
if value_type.has_flag(.option) {
value_type = value_type.set_flag(.option_mut_param_t)
}
match mut node.cond {
ast.Ident {
if mut node.cond.obj is ast.Var {

View file

@ -117,7 +117,7 @@ pub fn (mut f Fmt) import_comments(comments []ast.Comment, options CommentsOptio
return
}
if options.same_line {
f.remove_new_line(imports_buffer: true)
f.remove_new_line()
}
for c in comments {
ctext := c.text.trim_left('\x01')
@ -129,7 +129,7 @@ pub fn (mut f Fmt) import_comments(comments []ast.Comment, options CommentsOptio
out_s += ' '
}
out_s += ctext
f.out_imports.writeln(out_s)
f.writeln(out_s)
}
}

View file

@ -22,7 +22,6 @@ pub mut:
table &ast.Table = unsafe { nil }
is_debug bool
out strings.Builder
out_imports strings.Builder
indent int
empty_line bool
line_len int // the current line length, Note: it counts \t as 4 spaces, and starts at 0 after f.writeln
@ -32,13 +31,12 @@ pub mut:
array_init_depth int // current level of hierarchy in array init
single_line_if bool
cur_mod string
did_imports bool
import_pos int // position of the imports in the resulting string
auto_imports map[string]bool // potentially hidden imports(`sync` when using channels) and preludes(when embedding files)
used_imports map[string]bool // to remove unused imports
import_syms_used map[string]bool // to remove unused import symbols
import_pos int // position of the last import in the resulting string
mod2alias map[string]string // for `import time as t`, will contain: 'time'=>'t'
mod2syms map[string]string // import time { now } 'time.now'=>'now'
implied_import_str string // imports that the user's code uses but omitted to import explicitly
processed_imports []string
has_import_stmt bool
use_short_fn_args bool
single_line_fields bool // should struct fields be on a single line
in_lambda_depth int
@ -68,7 +66,6 @@ pub fn fmt(file ast.File, mut table ast.Table, pref_ &pref.Preferences, is_debug
pref: pref_
is_debug: is_debug
out: strings.new_builder(1000)
out_imports: strings.new_builder(200)
}
f.source_text = options.source_text
f.process_file_imports(file)
@ -76,16 +73,15 @@ pub fn fmt(file ast.File, mut table ast.Table, pref_ &pref.Preferences, is_debug
f.indent--
f.stmts(file.stmts)
f.indent++
// Format after file import symbols are processed.
f.imports(f.file.imports)
res := f.out.str().trim_space() + '\n'
// `implied_imports` should append to end of `import` block
if res.len == 1 {
return f.out_imports.str().trim_space() + '\n'
return f.implied_import_str + '\n'
}
if res.len <= f.import_pos {
imp_str := f.out_imports.str().trim_space()
if imp_str.len > 0 {
return res + '\n' + imp_str + '\n'
if f.implied_import_str.len > 0 {
return res + '\n' + f.implied_import_str + '\n'
}
return res
}
@ -98,7 +94,11 @@ pub fn fmt(file ast.File, mut table ast.Table, pref_ &pref.Preferences, is_debug
import_start_pos = stmt.pos.len
}
}
return res[..import_start_pos] + f.out_imports.str() + res[import_start_pos..]
if f.has_import_stmt || f.implied_import_str.len == 0 {
return res[..import_start_pos] + f.implied_import_str + res[import_start_pos..]
} else {
return res[..import_start_pos] + f.implied_import_str + '\n' + res[import_start_pos..]
}
}
/*
@ -121,6 +121,12 @@ pub fn (f &Fmt) type_to_str(typ ast.Type) string {
*/
pub fn (mut f Fmt) process_file_imports(file &ast.File) {
mut sb := strings.new_builder(128)
for imp in file.implied_imports {
sb.writeln('import ${imp}')
}
f.implied_import_str = sb.str()
for imp in file.imports {
f.mod2alias[imp.mod] = imp.alias
f.mod2alias[imp.mod.all_after('${file.mod.name}.')] = imp.alias
@ -131,12 +137,8 @@ pub fn (mut f Fmt) process_file_imports(file &ast.File) {
f.mod2syms['${imp.mod}.${sym.name}'] = sym.name
f.mod2syms['${imp.mod.all_after_last('.')}.${sym.name}'] = sym.name
f.mod2syms[sym.name] = sym.name
f.import_syms_used[sym.name] = false
}
}
for mod in f.file.auto_imports {
f.auto_imports[mod] = true
}
}
//=== Basic buffer write operations ===//
@ -186,15 +188,9 @@ pub fn (mut f Fmt) wrap_long_line(penalty_idx int, add_indent bool) bool {
return true
}
@[params]
pub struct RemoveNewLineConfig {
pub:
imports_buffer bool // Work on f.out_imports instead of f.out
}
// When the removal action actually occurs, the string of the last line after the removal is returned
pub fn (mut f Fmt) remove_new_line(cfg RemoveNewLineConfig) string {
mut buffer := if cfg.imports_buffer { unsafe { &f.out_imports } } else { unsafe { &f.out } }
pub fn (mut f Fmt) remove_new_line() string {
mut buffer := unsafe { &f.out }
mut i := 0
for i = buffer.len - 1; i >= 0; i-- {
if !buffer.byte_at(i).is_space() { // != `\n` {
@ -314,76 +310,30 @@ pub fn (mut f Fmt) short_module(name string) string {
//=== Import-related methods ===//
pub fn (mut f Fmt) mark_types_import_as_used(typ ast.Type) {
sym := f.table.sym(typ)
match sym.info {
ast.Map {
map_info := sym.map_info()
f.mark_types_import_as_used(map_info.key_type)
f.mark_types_import_as_used(map_info.value_type)
return
}
ast.Array, ast.ArrayFixed {
f.mark_types_import_as_used(sym.info.elem_type)
return
}
ast.GenericInst {
for concrete_typ in sym.info.concrete_types {
f.mark_types_import_as_used(concrete_typ)
}
}
else {}
}
// `Type[T]` -> `Type` || `[]thread Type` -> `Type`.
name := sym.name.all_before('[').all_after(' ')
f.mark_import_as_used(name)
}
pub fn (mut f Fmt) mark_import_as_used(name string) {
parts := name.split('.')
sym := parts.last()
if sym in f.import_syms_used {
f.import_syms_used[sym] = true
}
if parts.len == 1 {
return
}
mod := parts[..parts.len - 1].join('.')
f.used_imports[mod] = true
}
pub fn (mut f Fmt) imports(imports []ast.Import) {
if f.did_imports || imports.len == 0 {
return
}
f.did_imports = true
mut processed_imports := map[string]bool{}
for imp in imports {
if imp.mod in f.auto_imports && imp.mod !in f.used_imports {
pub fn (mut f Fmt) import_stmt(imp ast.Import) {
f.has_import_stmt = true
if imp.mod in f.file.auto_imports && imp.mod !in f.file.used_imports {
// Skip hidden imports like preludes.
continue
return
}
imp_stmt := f.imp_stmt_str(imp)
if imp_stmt in processed_imports {
if imp_stmt in f.processed_imports {
// Skip duplicates.
f.import_comments(imp.next_comments)
continue
return
}
processed_imports[imp_stmt] = true
f.processed_imports << imp_stmt
if !f.format_state.is_vfmt_on {
original_imp_line := f.get_source_lines()#[imp.pos.line_nr..imp.pos.last_line + 1].join('\n')
// Same line comments(`imp.comments`) are included in the `original_imp_line`.
f.out_imports.writeln(original_imp_line)
f.writeln(original_imp_line)
f.import_comments(imp.next_comments)
} else {
f.out_imports.writeln('import ${imp_stmt}')
f.writeln('import ${imp_stmt}')
f.import_comments(imp.comments, same_line: true)
f.import_comments(imp.next_comments)
}
}
if processed_imports.len > 0 {
f.out_imports.writeln('')
}
f.import_pos = f.out.len
}
pub fn (f &Fmt) imp_stmt_str(imp ast.Import) string {
@ -391,7 +341,7 @@ pub fn (f &Fmt) imp_stmt_str(imp ast.Import) string {
// E.g.: `import foo { Foo }` || `import foo as f { Foo }`
has_alias := imp.alias != imp.source_name.all_after_last('.')
mut suffix := if has_alias { ' as ${imp.alias}' } else { '' }
mut syms := imp.syms.map(it.name).filter(f.import_syms_used[it])
mut syms := imp.syms.map(it.name).filter(f.file.imported_symbols_used[it])
syms.sort()
if syms.len > 0 {
suffix += if imp.syms[0].pos.line_nr == imp.pos.line_nr {
@ -440,9 +390,9 @@ fn (f &Fmt) should_insert_newline_before_node(node ast.Node, prev_node ast.Node)
return true
}
}
// Imports are handled special hence they are ignored here
// Force a newline after imports
ast.Import {
return false
return node !is ast.Import
}
ast.ConstDecl {
if node !is ast.ConstDecl && !(node is ast.ExprStmt && node.expr is ast.Comment) {
@ -572,9 +522,7 @@ pub fn (mut f Fmt) stmt(node ast.Stmt) {
f.hash_stmt(node)
}
ast.Import {
// Imports are handled after the file is formatted, to automatically add necessary modules
// Just remember the position of the imports for now
f.import_pos = f.out.len
f.import_stmt(node)
}
ast.InterfaceDecl {
f.interface_decl(node)
@ -932,9 +880,6 @@ pub fn (mut f Fmt) comptime_for(node ast.ComptimeFor) {
(node.expr as ast.Ident).name
}
f.write('\$for ${node.val_var} in ${typ}.${node.kind.str()} {')
if node.typ != ast.void_type {
f.mark_types_import_as_used(node.typ)
}
if node.stmts.len > 0 || node.pos.line_nr < node.pos.last_line {
f.writeln('')
f.stmts(node.stmts)
@ -1202,11 +1147,6 @@ fn (mut f Fmt) fn_body(node ast.FnDecl) {
} else {
f.writeln('')
}
// Mark all function's used type so that they are not removed from imports
for arg in node.params {
f.mark_types_import_as_used(arg.typ)
}
f.mark_types_import_as_used(node.return_type)
}
pub fn (mut f Fmt) for_c_stmt(node ast.ForCStmt) {
@ -1355,7 +1295,6 @@ pub fn (mut f Fmt) global_decl(node ast.GlobalDecl) {
if node.is_block {
f.writeln('')
}
f.mark_types_import_as_used(field.typ)
}
f.comments_after_last_field(node.end_comments)
if node.is_block {
@ -1568,7 +1507,6 @@ pub fn (mut f Fmt) interface_field(field ast.StructField, mut type_align FieldAl
if next_line_cmts.len > 0 {
f.comments(next_line_cmts, level: .indent)
}
f.mark_types_import_as_used(field.typ)
}
pub fn (mut f Fmt) interface_method(method ast.FnDecl, mut comment_align FieldAlign) {
@ -1587,10 +1525,6 @@ pub fn (mut f Fmt) interface_method(method ast.FnDecl, mut comment_align FieldAl
f.writeln('')
}
f.comments(method.next_comments, level: .indent)
for param in method.params {
f.mark_types_import_as_used(param.typ)
}
f.mark_types_import_as_used(method.return_type)
}
pub fn (mut f Fmt) module_stmt(mod ast.Module) {
@ -1655,7 +1589,6 @@ pub fn (mut f Fmt) sql_stmt_line(node ast.SqlStmtLine) {
table_name = f.no_cur_mod(f.short_module(sym.name)) // TODO: f.type_to_str?
}
f.mark_types_import_as_used(node.table_expr.typ)
f.write('\t')
match node.kind {
.insert {
@ -1712,7 +1645,6 @@ pub fn (mut f Fmt) alias_type_decl(node ast.AliasTypeDecl) {
f.write('type ${node.name} = ')
f.struct_decl(ast.StructDecl{ fields: sym.info.fields }, true)
f.comments(node.comments, has_nl: false)
f.mark_types_import_as_used(node.parent_type)
return
}
}
@ -1720,7 +1652,6 @@ pub fn (mut f Fmt) alias_type_decl(node ast.AliasTypeDecl) {
f.write('type ${node.name} = ${ptype}')
f.comments(node.comments, has_nl: false)
f.mark_types_import_as_used(node.parent_type)
}
pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
@ -1743,7 +1674,6 @@ pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
f.write(arg.typ.share().str() + ' ')
}
f.write(arg.name)
f.mark_types_import_as_used(arg.typ)
mut s := f.no_cur_mod(f.table.type_to_str_using_aliases(arg.typ, f.mod2alias))
if arg.is_mut {
if s.starts_with('&') {
@ -1769,7 +1699,6 @@ pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
}
f.write(')')
if fn_info.return_type.idx() != ast.void_type_idx {
f.mark_types_import_as_used(fn_info.return_type)
ret_str := f.no_cur_mod(f.table.type_to_str_using_aliases(fn_info.return_type,
f.mod2alias))
f.write(' ${ret_str}')
@ -1801,7 +1730,6 @@ pub fn (mut f Fmt) sum_type_decl(node ast.SumTypeDecl) {
mut variants := []Variant{cap: node.variants.len}
for i, variant in node.variants {
variants << Variant{f.table.type_to_str_using_aliases(variant.typ, f.mod2alias), i}
f.mark_types_import_as_used(variant.typ)
}
// The first variant is now used as the default variant when doing `a:= Sumtype{}`, i.e. a change in semantics.
// Sorting is disabled, because it is no longer a cosmetic change - it can change the default variant.
@ -1851,7 +1779,6 @@ pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
}
if node.exprs.len == 0 && node.typ != 0 && node.typ != ast.void_type {
// `x := []string{}`
f.mark_types_import_as_used(node.typ)
if node.alias_type != ast.void_type {
f.write(f.table.type_to_str_using_aliases(node.alias_type, f.mod2alias))
} else {
@ -2054,7 +1981,6 @@ pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
}
pub fn (mut f Fmt) as_cast(node ast.AsCast) {
f.mark_types_import_as_used(node.typ)
type_str := f.table.type_to_str_using_aliases(node.typ, f.mod2alias)
f.expr(node.expr)
f.write(' as ${type_str}')
@ -2078,7 +2004,6 @@ pub fn (mut f Fmt) at_expr(node ast.AtExpr) {
}
fn (mut f Fmt) write_static_method(name string, short_name string) {
f.mark_import_as_used(name.split('__static__')[0])
if short_name.contains('.') {
indx := short_name.index('.') or { -1 } + 1
f.write(short_name[0..indx] + short_name[indx..].replace('__static__', '.').capitalize())
@ -2094,20 +2019,6 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
f.in_lambda_depth++
defer { f.in_lambda_depth-- }
}
if node.left is ast.Ident {
// `time.now()` without `time imported` is processed as a method call with `time` being
// a `node.left` expression. Import `time` automatically.
// TODO: fetch all available modules
if node.left.name in ['time', 'os', 'strings', 'math', 'json', 'base64']
&& !node.left.scope.known_var(node.left.name) {
f.file.imports << ast.Import{
source_name: node.left.name
mod: node.left.name
alias: node.left.name
}
f.used_imports[node.left.name] = true
}
}
f.expr(node.left)
is_method_newline = node.left.pos().last_line != node.name_pos.line_nr
if is_method_newline {
@ -2126,7 +2037,6 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
if node.is_static_method {
f.write_static_method(node.name, name)
} else {
f.mark_import_as_used(name)
f.write(name)
}
}
@ -2164,7 +2074,6 @@ fn (mut f Fmt) write_generic_call_if_require(node ast.CallExpr) {
name = 'C.' + name
}
f.write(name)
f.mark_types_import_as_used(concrete_type)
if i != node.concrete_types.len - 1 {
f.write(', ')
}
@ -2227,7 +2136,6 @@ pub fn (mut f Fmt) cast_expr(node ast.CastExpr) {
}
}
f.write('${typ}(')
f.mark_types_import_as_used(node.typ)
f.expr(node.expr)
if node.has_arg {
f.write(', ')
@ -2370,7 +2278,6 @@ pub fn (mut f Fmt) dump_expr(node ast.DumpExpr) {
pub fn (mut f Fmt) enum_val(node ast.EnumVal) {
name := f.short_module(node.enum_name)
f.write(name + '.' + node.val)
f.mark_import_as_used(name)
}
pub fn (mut f Fmt) ident(node ast.Ident) {
@ -2414,7 +2321,6 @@ pub fn (mut f Fmt) ident(node ast.Ident) {
if node.name.contains('__static__') {
f.write_static_method(node.name, name)
} else {
f.mark_import_as_used(name)
f.write(name)
}
if node.concrete_types.len > 0 {
@ -2433,7 +2339,6 @@ pub fn (mut f Fmt) ident(node ast.Ident) {
} else if node.or_expr.kind == .block {
f.or_expr(node.or_expr)
}
f.mark_import_as_used(name)
}
}
@ -2791,10 +2696,6 @@ pub fn (mut f Fmt) lock_expr(node ast.LockExpr) {
pub fn (mut f Fmt) map_init(node ast.MapInit) {
if node.keys.len == 0 && !node.has_update_expr {
if node.typ > ast.void_type {
sym := f.table.sym(node.typ)
info := sym.info as ast.Map
f.mark_types_import_as_used(info.key_type)
f.mark_types_import_as_used(info.value_type)
f.write(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
}
if node.pos.line_nr == node.pos.last_line {
@ -2929,7 +2830,6 @@ pub fn (mut f Fmt) match_expr(node ast.MatchExpr) {
pub fn (mut f Fmt) offset_of(node ast.OffsetOf) {
f.write('__offsetof(${f.table.type_to_str_using_aliases(node.struct_type, f.mod2alias)}, ${node.field})')
f.mark_types_import_as_used(node.struct_type)
}
pub fn (mut f Fmt) or_expr(node ast.OrExpr) {
@ -3272,7 +3172,6 @@ pub fn (mut f Fmt) string_inter_literal(node ast.StringInterLiteral) {
pub fn (mut f Fmt) type_expr(node ast.TypeNode) {
if node.stmt == ast.empty_stmt {
f.mark_types_import_as_used(node.typ)
f.write(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
} else {
f.struct_decl(ast.StructDecl{ fields: (node.stmt as ast.StructDecl).fields },

View file

@ -41,7 +41,6 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl, is_anon bool) {
if i < node.implements_types.len - 1 {
f.write(', ')
}
f.mark_types_import_as_used(t.typ)
}
}
// Calculate the alignments first
@ -52,7 +51,6 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl, is_anon bool) {
f.comments_before_field(node.pre_comments)
}
for embed in node.embeds {
f.mark_types_import_as_used(embed.typ)
styp := f.table.type_to_str_using_aliases(embed.typ, f.mod2alias)
pre_comments := embed.comments.filter(it.pos.pos < embed.pos.pos)
@ -101,7 +99,6 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl, is_anon bool) {
if !f.write_anon_struct_field_decl(field.typ, field.anon_struct_decl) {
f.write(field_types[i])
}
f.mark_types_import_as_used(field.typ)
attrs_len := inline_attrs_len(field.attrs)
if field.has_default_expr {
f.write(' '.repeat(default_expr_align.max_len(field.pos.line_nr) - field_types[i].len))
@ -202,7 +199,6 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
defer {
f.is_struct_init = struct_init_save
}
f.mark_types_import_as_used(node.typ)
sym_name := f.table.sym(node.typ).name
// f.write('<old name: $type_sym.name>')
mut name := if !sym_name.starts_with('C.') && !sym_name.starts_with('JS.') {
@ -228,11 +224,9 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
f.comments(node.pre_comments, same_line: true, has_nl: true, level: .indent)
f.write('}')
}
f.mark_import_as_used(name)
} else if node.no_keys {
// `Foo{1,2,3}` (short syntax, no keys)
f.write('${name}{')
f.mark_import_as_used(name)
if node.has_update_expr {
f.write('...')
f.expr(node.update_expr)
@ -255,7 +249,6 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
}
if !use_short_args || node.is_anon {
f.write('${name}{')
f.mark_import_as_used(name)
if single_line_fields {
f.write(' ')
}

View file

@ -0,0 +1,8 @@
module main
import term { bright_cyan, colorize }
fn main() {
n := colorize(bright_cyan, 'hello')
println(n)
}

View file

@ -866,9 +866,19 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
g.write('*')
}
if node_.op == .assign && var_type.has_flag(.option_mut_param_t) {
if val is ast.CastExpr {
g.expr(left)
g.write('->state = ')
g.expr(val)
g.writeln('.state;')
}
g.write('memcpy(&')
g.expr(left)
if val is ast.CastExpr {
g.write('->data, ')
} else {
g.write('->data, *(${g.styp(val_type)}**)&')
}
} else if var_type.has_flag(.option_mut_param_t) {
g.expr(left)
g.write(' = ')

View file

@ -62,7 +62,7 @@ fn (mut g Gen) autofree_scope_vars2(scope &ast.Scope, start_pos int, end_pos int
match obj {
ast.Var {
g.trace_autofree('// var "${obj.name}" var.pos=${obj.pos.pos} var.line_nr=${obj.pos.line_nr}')
if obj.name == g.returned_var_name {
if obj.name in g.returned_var_names {
g.print_autofree_var(obj, 'returned from function')
g.trace_autofree('// skipping returned var')
continue
@ -247,3 +247,17 @@ fn (mut g Gen) autofree_var_call(free_fn_name string, v ast.Var) {
}
g.autofree_scope_stmts << af.str()
}
fn (mut g Gen) detect_used_var_on_return(expr ast.Expr) {
match expr {
ast.Ident {
g.returned_var_names[expr.name] = true
}
ast.StructInit {
for field_expr in expr.init_fields {
g.detect_used_var_on_return(field_expr.expr)
}
}
else {}
}
}

View file

@ -233,7 +233,7 @@ mut:
aggregate_type_idx int
arg_no_auto_deref bool // smartcast must not be dereferenced
branch_parent_pos int // used in BranchStmt (continue/break) for autofree stop position
returned_var_name string // to detect that a var doesn't need to be freed since it's being returned
returned_var_names map[string]bool // to detect that vars doesn't need to be freed since it's being returned
infix_left_var_name string // a && if expr
curr_var_name []string // curr var name on assignment
called_fn_name string
@ -6241,6 +6241,9 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.writeln(' }, (${option_name}*)(&${tmpvar}), sizeof(${styp}));')
}
g.write_defer_stmts_when_needed()
if g.is_autofree {
g.detect_used_var_on_return(expr0)
}
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
g.writeln('return ${tmpvar};')
return
@ -6286,6 +6289,9 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.writeln(' }, (${result_name}*)(&${tmpvar}), sizeof(${styp}));')
}
g.write_defer_stmts_when_needed()
if g.is_autofree {
g.detect_used_var_on_return(expr0)
}
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
g.writeln('return ${tmpvar};')
return
@ -6294,9 +6300,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
// set free_parent_scopes to true, since all variables defined in parent
// scopes need to be freed before the return
if g.is_autofree {
if expr0 is ast.Ident {
g.returned_var_name = expr0.name
}
g.detect_used_var_on_return(expr0)
if !use_tmp_var && !g.is_builtin_mod {
use_tmp_var = expr0 is ast.CallExpr
}

View file

@ -196,7 +196,7 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
}
*/
g.returned_var_name = ''
g.returned_var_names.clear()
old_g_autofree := g.is_autofree
if node.is_manualfree {
g.is_autofree = false

View file

@ -0,0 +1,9 @@
_result_toml__scanner__Scanner_ptr toml__scanner__new_scanner(toml__scanner__Config config) {
_result_toml__scanner__Scanner_ptr _t3 = {0};
_result_ok(&(toml__scanner__Scanner*[]) { s }, (_result*)(&_t3), sizeof(toml__scanner__Scanner*));
return _t3;
}
toml__ast__Quoted toml__parser__Parser_quoted(toml__parser__Parser* p) {
return ((toml__ast__Quoted){.text = string_clone_static(lit),.pos = toml__token__Token_pos(&p->tok),.is_multiline = is_multiline,.quote = quote,});
}

View file

15
vlib/v/gen/c/testdata/autofree_toml.vv vendored Normal file
View file

@ -0,0 +1,15 @@
// vtest vflags: -autofree
import toml
import os
fn main() {
config_fname := 'config.toml'
tab_title := 'test tab title'
if !os.exists(config_fname) {
mut f := os.create(config_fname) or { panic(err) }
f.writeln('tab_title = "${tab_title}"') or { panic(err) }
f.close()
}
doc := toml.parse_file(config_fname) or { panic(err) }
assert doc.value('tab_title').string() == tab_title
}

View file

@ -1300,6 +1300,10 @@ fn (mut w Walker) mark_resource_dependencies() {
w.fn_by_name(builderptr_idx + '.write_string')
w.fn_by_name('strings.new_builder')
w.uses_free[ast.string_type] = true
if w.table.dumps.keys().any(ast.Type(u32(it)).has_flag(.option)) {
w.fn_by_name('str_intp')
}
}
if w.features.auto_str_ptr {
w.fn_by_name('isnil')

View file

@ -61,7 +61,7 @@ fn (mut p Parser) enum_decl() ast.EnumDecl {
end_pos)
return ast.EnumDecl{}
}
if enum_name in p.imported_symbols {
if p.is_imported_symbol(enum_name) {
p.error_with_pos('cannot register enum `${enum_name}`, this type was already imported',
end_pos)
return ast.EnumDecl{}

View file

@ -89,10 +89,13 @@ fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr {
}
or_kind = if is_not { .propagate_result } else { .propagate_option }
}
if fn_name in p.imported_symbols {
if p.is_imported_symbol(fn_name) {
check := !p.imported_symbols_used[fn_name]
fn_name = p.imported_symbols[fn_name]
if check {
p.register_used_import_for_symbol_name(fn_name)
}
}
comments := p.eat_comments(same_line: true)
pos.update_last_line(p.prev_tok.line_nr)
return ast.CallExpr{
@ -148,6 +151,13 @@ fn (mut p Parser) call_args() []ast.CallArg {
expr = p.struct_init('void_type', .short_syntax, false)
} else {
expr = p.expr(0)
if mut expr is ast.Ident {
if p.is_imported_symbol(expr.name) && !p.imported_symbols_used[expr.name] {
// func call arg is another function call
// import term { bright_cyan, colorize } ... colorize(bright_cyan, 'hello')
p.register_used_import_for_symbol_name(p.imported_symbols[expr.name])
}
}
}
if array_decompose {
expr = ast.ArrayDecompose{
@ -383,7 +393,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
}
}
if !p.pref.is_fmt {
if name in p.imported_symbols {
if p.is_imported_symbol(name) {
p.error_with_pos('cannot redefine imported function `${name}`', name_pos)
return ast.FnDecl{
scope: unsafe { nil }

View file

@ -35,6 +35,10 @@ fn (mut p Parser) register_used_import(alias string) {
fn (mut p Parser) register_used_import_for_symbol_name(sym_name string) {
short_import_name := sym_name.all_before_last('.').all_after_last('.')
short_symbol_name := sym_name.all_after_last('.')
if p.is_imported_symbol(short_symbol_name) {
p.imported_symbols_used[short_symbol_name] = true
}
for alias, mod in p.imports {
if mod == short_import_name {
p.register_used_import(alias)
@ -62,7 +66,14 @@ fn (mut p Parser) register_auto_import(alias string) {
if alias !in p.auto_imports {
p.auto_imports << alias
}
p.register_used_import(alias)
// do not call `register_used_import()` here as it may not used by the code.
// for example, when using `chan`, but we has no `sync.xx()` call in the code.
}
fn (mut p Parser) register_implied_import(alias string) {
if alias !in p.implied_imports {
p.implied_imports << alias
}
}
fn (mut p Parser) check_unused_imports() {
@ -75,7 +86,8 @@ fn (mut p Parser) check_unused_imports() {
for import_m in p.ast_imports {
alias := import_m.alias
mod := import_m.mod
if !(alias.len == 1 && alias[0] == `_`) && !p.is_used_import(alias) {
if !(alias.len == 1 && alias[0] == `_`) && !p.is_used_import(alias)
&& alias !in p.auto_imports {
mod_alias := if alias == mod { alias } else { '${alias} (${mod})' }
p.warn_with_pos("module '${mod_alias}' is imported but never used", import_m.mod_pos)
}
@ -310,12 +322,13 @@ fn (mut p Parser) import_syms(mut parent ast.Import) {
for p.tok.kind == .name {
pos := p.tok.pos()
alias := p.check_name()
if alias in p.imported_symbols {
if p.is_imported_symbol(alias) {
p.error_with_pos('cannot register symbol `${alias}`, it was already imported',
pos)
return
}
p.imported_symbols[alias] = parent.mod + '.' + alias
p.rebuild_imported_symbols_matcher(alias)
// so we can work with this in fmt+checker
parent.syms << ast.ImportSymbol{
pos: pos
@ -335,3 +348,12 @@ fn (mut p Parser) import_syms(mut parent ast.Import) {
}
p.next()
}
fn (mut p Parser) rebuild_imported_symbols_matcher(name string) {
p.imported_symbols_trie = token.new_keywords_matcher_from_array_trie(p.imported_symbols.keys())
}
@[inline]
fn (mut p Parser) is_imported_symbol(name string) bool {
return p.imported_symbols_trie.matches(name)
}

View file

@ -636,9 +636,12 @@ fn (mut p Parser) parse_any_type(language ast.Language, is_ptr bool, check_dot b
} else if p.expr_mod != '' && !p.inside_generic_params {
// p.expr_mod is from the struct and not from the generic parameter
name = p.expr_mod + '.' + name
} else if name in p.imported_symbols {
} else if p.is_imported_symbol(name) {
check := !p.imported_symbols_used[name]
name = p.imported_symbols[name]
if check {
p.register_used_import_for_symbol_name(name)
}
} else if !p.builtin_mod && name.len > 1 && name !in p.table.type_idxs {
// `Foo` in module `mod` means `mod.Foo`
name = p.mod + '.' + name

View file

@ -81,9 +81,12 @@ mut:
last_enum_mod string // saves the last enum mod name on an array initialization
imports map[string]string // alias => mod_name
ast_imports []ast.Import // mod_names
used_imports []string // alias
used_imports []string
auto_imports []string // imports, the user does not need to specify
implied_imports []string // imports that the user's code uses but omitted to import explicitly, used by `vfmt`
imported_symbols map[string]string
imported_symbols_used map[string]bool
imported_symbols_trie token.KeywordsMatcherTrie
is_amp bool // for generating the right code for `&Foo{}`
returns bool
is_stmt_ident bool // true while the beginning of a statement is an ident/selector
@ -341,7 +344,11 @@ pub fn (mut p Parser) parse() &ast.File {
mod: module_decl
imports: p.ast_imports
imported_symbols: p.imported_symbols
imported_symbols_trie: token.new_keywords_matcher_from_array_trie(p.imported_symbols.keys())
imported_symbols_used: p.imported_symbols_used
auto_imports: p.auto_imports
used_imports: p.used_imports
implied_imports: p.implied_imports
stmts: stmts
scope: p.scope
global_scope: p.table.global_scope
@ -587,7 +594,7 @@ fn (mut p Parser) check_name() string {
name := p.tok.lit
if p.tok.kind != .name && p.peek_tok.kind == .dot && name in p.imports {
p.register_used_import(name)
} else if p.tok.kind == .name && p.peek_tok.kind == .dot && name in p.imported_symbols {
} else if p.tok.kind == .name && p.is_imported_symbol(name) && !p.imported_symbols_used[name] {
// symbols like Enum.field_name
p.register_used_import_for_symbol_name(p.imported_symbols[name])
}
@ -2085,6 +2092,17 @@ fn (mut p Parser) dot_expr(left ast.Expr) ast.Expr {
if mut left_node is ast.CallExpr {
left_node.is_return_used = true
}
if p.pref.is_fmt {
if mut left_node is ast.Ident {
// `time.now()` without `time imported` is processed as a method call with `time` being
// a `left_node` expression. Import `time` automatically.
// TODO: fetch all available modules
if left_node.name in ['time', 'os', 'strings', 'math', 'json', 'base64']
&& !left_node.scope.known_var(left_node.name) {
p.register_implied_import(left_node.name)
}
}
}
mcall_expr := ast.CallExpr{
left: left
name: field_name
@ -2681,7 +2699,7 @@ fn (mut p Parser) type_decl() ast.TypeDecl {
return ast.FnTypeDecl{}
}
}
if name in p.imported_symbols {
if p.is_imported_symbol(name) {
p.error_with_pos('cannot register alias `${name}`, this type was already imported',
end_pos)
return ast.AliasTypeDecl{}

View file

@ -73,7 +73,7 @@ fn (mut p Parser) struct_decl(is_anon bool) ast.StructDecl {
p.error_with_pos('struct names must have more than one character', name_pos)
return ast.StructDecl{}
}
if name in p.imported_symbols {
if p.is_imported_symbol(name) {
p.error_with_pos('cannot register struct `${name}`, this type was already imported',
name_pos)
return ast.StructDecl{}
@ -651,7 +651,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
mut pre_comments := p.eat_comments()
p.check(.lcbr)
pre_comments << p.eat_comments()
if modless_name in p.imported_symbols {
if p.is_imported_symbol(modless_name) {
p.error_with_pos('cannot register interface `${interface_name}`, this type was already imported',
name_pos)
return ast.InterfaceDecl{}

View file

@ -398,6 +398,7 @@ fn advanced_options() {
s2 := parse_header1('foo:bar') or { return }
_ := s.len + s2.len // avoid warning for unused variables
// TODO: fix -autofree, so that it adds this free automatically:
unsafe { s.free() }
unsafe { s2.free() }
}

View file

@ -0,0 +1,15 @@
module main
fn test_main() {
mut data := [3]?int{}
for mut d in data {
d = ?int(1)
assert '${d}' == 'Option(1)'
}
for i in 0 .. data.len {
data[i] = ?int(3)
}
assert '${data}' == '[Option(3), Option(3), Option(3)]'
}

View file

@ -11,6 +11,11 @@ pub mut:
max_len int
}
// str returns a short representation of matcher
pub fn (km &KeywordsMatcherTrie) str() string {
return 'KeywordsMatcherTrie{ /* nodes.len: ${km.nodes.len} */ min_len: ${km.min_len}, max_len: ${km.max_len} }'
}
// TrieNode is a single node from a trie, used by KeywordsMatcherTrie
pub struct TrieNode {
pub mut:
@ -18,6 +23,14 @@ pub mut:
value int = -1 // when != -1, it is a leaf node representing a match
}
// str returns a string representation of the node content
pub fn (node &TrieNode) str() string {
if isnil(node) {
return '&TrieNode(nil)'
}
return '&TrieNode{value: ${node.value}}'
}
// find tries to find the given `word` in the set of all previously added words
// to the KeywordsMatcherTrie instance. It returns -1 if the word was NOT found
// there at all. If the word was found, find will return the `value` (value => 0),