compiler: rename Tok/Token to Token/TokenKind

This commit is contained in:
Anders Busch 2019-10-09 00:05:34 +02:00 committed by Alexander Medvednikov
parent 89ea8a0275
commit c620da9089
6 changed files with 209 additions and 211 deletions

View file

@ -9,10 +9,8 @@ import (
strings
)
// TODO rename to Token
// TODO rename enum Token to TokenType
struct Tok {
tok Token // the token number/enum; for quick comparisons
struct Token {
tok TokenKind // the token number/enum; for quick comparisons
lit string // literal representation of the token
line_nr int // the line number in the source where the token occured
name_idx int // name table index for O(1) lookup
@ -32,11 +30,11 @@ struct Parser {
pref &Preferences // Preferences shared from V struct
mut:
scanner &Scanner
tokens []Tok
tokens []Token
token_idx int
tok Token
prev_tok Token
prev_tok2 Token // TODO remove these once the tokens are cached
tok TokenKind
prev_tok TokenKind
prev_tok2 TokenKind // TODO remove these once the tokens are cached
lit string
cgen &CGen
table &Table
@ -165,7 +163,7 @@ fn (v mut V) new_parser(scanner &Scanner, id string) Parser {
fn (p mut Parser) scan_tokens() {
for {
res := p.scanner.scan()
p.tokens << Tok{
p.tokens << Token{
tok: res.tok
lit: res.lit
line_nr: p.scanner.line_nr
@ -188,7 +186,7 @@ fn (p mut Parser) next() {
p.prev_tok = p.tok
p.scanner.prev_tok = p.tok
if p.token_idx >= p.tokens.len {
p.tok = Token.eof
p.tok = TokenKind.eof
p.lit = ''
return
}
@ -199,25 +197,25 @@ fn (p mut Parser) next() {
p.scanner.line_nr = res.line_nr
}
fn (p & Parser) peek() Token {
fn (p & Parser) peek() TokenKind {
if p.token_idx >= p.tokens.len - 2 {
return Token.eof
return TokenKind.eof
}
tok := p.tokens[p.token_idx]
return tok.tok
}
// TODO remove dups
[inline] fn (p &Parser) prev_token() Tok {
[inline] fn (p &Parser) prev_token() Token {
return p.tokens[p.token_idx - 2]
}
[inline] fn (p &Parser) cur_tok() Tok {
[inline] fn (p &Parser) cur_tok() Token {
return p.tokens[p.token_idx - 1]
}
[inline] fn (p &Parser) peek_token() Tok {
[inline] fn (p &Parser) peek_token() Token {
if p.token_idx >= p.tokens.len - 2 {
return Tok{ tok:Token.eof }
return Token{ tok:TokenKind.eof }
}
return p.tokens[p.token_idx]
}
@ -286,7 +284,7 @@ fn (p mut Parser) parse(pass Pass) {
p.fgenln('')
}
}
case Token.key_enum:
case TokenKind.key_enum:
p.next()
if p.tok == .name {
p.fgen('enum ')
@ -303,7 +301,7 @@ fn (p mut Parser) parse(pass Pass) {
else {
p.check(.name)
}
case Token.key_pub:
case TokenKind.key_pub:
if p.peek() == .func {
p.fn_decl()
} else if p.peek() == .key_struct {
@ -312,27 +310,27 @@ fn (p mut Parser) parse(pass Pass) {
} else {
p.error('wrong pub keyword usage')
}
case Token.func:
case TokenKind.func:
p.fn_decl()
case Token.key_type:
case TokenKind.key_type:
p.type_decl()
case Token.lsbr:
case TokenKind.lsbr:
// `[` can only mean an [attribute] before a function
// or a struct definition
p.attribute()
case Token.key_struct, Token.key_interface, Token.key_union, Token.lsbr:
case TokenKind.key_struct, TokenKind.key_interface, TokenKind.key_union, TokenKind.lsbr:
p.struct_decl()
case Token.key_const:
case TokenKind.key_const:
p.const_decl()
case Token.hash:
case TokenKind.hash:
// insert C code, TODO this is going to be removed ASAP
// some libraries (like UI) still have lots of C code
// # puts("hello");
p.chash()
case Token.dollar:
case TokenKind.dollar:
// $if, $else
p.comp_time()
case Token.key_global:
case TokenKind.key_global:
if !p.pref.translated && !p.pref.is_live &&
!p.builtin_mod && !p.pref.building_v && !os.getwd().contains('/volt') {
p.error('__global is only allowed in translated code')
@ -355,7 +353,7 @@ fn (p mut Parser) parse(pass Pass) {
// p.genln('; // global')
g += '; // global'
p.cgen.consts << g
case Token.eof:
case TokenKind.eof:
//p.log('end of parse()')
// TODO: check why this was added? everything seems to work
// without it, and it's already happening in fn_decl
@ -581,12 +579,12 @@ fn (p mut Parser) interface_method(field_name, receiver string) &Fn {
return method
}
fn key_to_type_cat(tok Token) TypeCategory {
fn key_to_type_cat(tok TokenKind) TypeCategory {
switch tok {
case Token.key_interface: return TypeCategory.interface_
case Token.key_struct: return TypeCategory.struct_
case Token.key_union: return TypeCategory.union_
//Token.key_ => return .interface_
case TokenKind.key_interface: return TypeCategory.interface_
case TokenKind.key_struct: return TypeCategory.struct_
case TokenKind.key_union: return TypeCategory.union_
//TokenKind.key_ => return .interface_
}
verror('Unknown token: $tok')
return TypeCategory.builtin
@ -862,13 +860,13 @@ fn (p &Parser) strtok() string {
// same as check(), but adds a space to the formatter output
// TODO bad name
fn (p mut Parser) check_space(expected Token) {
fn (p mut Parser) check_space(expected TokenKind) {
p.fspace()
p.check(expected)
p.fspace()
}
fn (p mut Parser) check(expected Token) {
fn (p mut Parser) check(expected TokenKind) {
if p.tok != expected {
println('check()')
s := 'expected `${expected.str()}` but got `${p.strtok()}`'
@ -1265,52 +1263,52 @@ fn (p mut Parser) statement(add_semi bool) string {
// `a + 3`, `a(7)`, or just `a`
q = p.bool_expression()
}
case Token.key_goto:
case TokenKind.key_goto:
p.check(.key_goto)
p.fgen(' ')
label := p.check_name()
p.genln('goto $label;')
return ''
case Token.key_defer:
case TokenKind.key_defer:
p.defer_st()
return ''
case Token.hash:
case TokenKind.hash:
p.chash()
return ''
case Token.dollar:
case TokenKind.dollar:
p.comp_time()
case Token.key_if:
case TokenKind.key_if:
p.if_st(false, 0)
case Token.key_for:
case TokenKind.key_for:
p.for_st()
case Token.key_switch:
case TokenKind.key_switch:
p.switch_statement()
case Token.key_match:
case TokenKind.key_match:
p.match_statement(false)
case Token.key_mut, Token.key_static:
case TokenKind.key_mut, TokenKind.key_static:
p.var_decl()
case Token.key_return:
case TokenKind.key_return:
p.return_st()
case Token.lcbr:// {} block
case TokenKind.lcbr:// {} block
p.check(.lcbr)
p.genln('{')
p.statements()
return ''
case Token.key_continue:
case TokenKind.key_continue:
if p.for_expr_cnt == 0 {
p.error('`continue` statement outside `for`')
}
p.genln('continue')
p.check(.key_continue)
case Token.key_break:
case TokenKind.key_break:
if p.for_expr_cnt == 0 {
p.error('`break` statement outside `for`')
}
p.genln('break')
p.check(.key_break)
case Token.key_go:
case TokenKind.key_go:
p.go_statement()
case Token.key_assert:
case TokenKind.key_assert:
p.assert_statement()
default:
// An expression as a statement
@ -1357,11 +1355,11 @@ fn ($v.name mut $v.typ) $p.cur_fn.name (...) {
is_str := v.typ == 'string'
is_ustr := v.typ == 'ustring'
switch tok {
case Token.assign:
case TokenKind.assign:
if !is_map && !p.is_empty_c_struct_init {
p.gen(' = ')
}
case Token.plus_assign:
case TokenKind.plus_assign:
if is_str && !p.is_js {
p.gen('= string_add($v.name, ')// TODO can't do `foo.bar += '!'`
}
@ -1628,42 +1626,42 @@ fn (p mut Parser) bterm() string {
if is_str && !p.is_js { //&& !p.is_sql {
p.gen(')')
switch tok {
case Token.eq: p.cgen.set_placeholder(ph, 'string_eq(')
case Token.ne: p.cgen.set_placeholder(ph, 'string_ne(')
case Token.le: p.cgen.set_placeholder(ph, 'string_le(')
case Token.ge: p.cgen.set_placeholder(ph, 'string_ge(')
case Token.gt: p.cgen.set_placeholder(ph, 'string_gt(')
case Token.lt: p.cgen.set_placeholder(ph, 'string_lt(')
case TokenKind.eq: p.cgen.set_placeholder(ph, 'string_eq(')
case TokenKind.ne: p.cgen.set_placeholder(ph, 'string_ne(')
case TokenKind.le: p.cgen.set_placeholder(ph, 'string_le(')
case TokenKind.ge: p.cgen.set_placeholder(ph, 'string_ge(')
case TokenKind.gt: p.cgen.set_placeholder(ph, 'string_gt(')
case TokenKind.lt: p.cgen.set_placeholder(ph, 'string_lt(')
}
/*
Token.eq => p.cgen.set_placeholder(ph, 'string_eq(')
Token.ne => p.cgen.set_placeholder(ph, 'string_ne(')
Token.le => p.cgen.set_placeholder(ph, 'string_le(')
Token.ge => p.cgen.set_placeholder(ph, 'string_ge(')
Token.gt => p.cgen.set_placeholder(ph, 'string_gt(')
Token.lt => p.cgen.set_placeholder(ph, 'string_lt(')
TokenKind.eq => p.cgen.set_placeholder(ph, 'string_eq(')
TokenKind.ne => p.cgen.set_placeholder(ph, 'string_ne(')
TokenKind.le => p.cgen.set_placeholder(ph, 'string_le(')
TokenKind.ge => p.cgen.set_placeholder(ph, 'string_ge(')
TokenKind.gt => p.cgen.set_placeholder(ph, 'string_gt(')
TokenKind.lt => p.cgen.set_placeholder(ph, 'string_lt(')
*/
}
if is_ustr {
p.gen(')')
switch tok {
case Token.eq: p.cgen.set_placeholder(ph, 'ustring_eq(')
case Token.ne: p.cgen.set_placeholder(ph, 'ustring_ne(')
case Token.le: p.cgen.set_placeholder(ph, 'ustring_le(')
case Token.ge: p.cgen.set_placeholder(ph, 'ustring_ge(')
case Token.gt: p.cgen.set_placeholder(ph, 'ustring_gt(')
case Token.lt: p.cgen.set_placeholder(ph, 'ustring_lt(')
case TokenKind.eq: p.cgen.set_placeholder(ph, 'ustring_eq(')
case TokenKind.ne: p.cgen.set_placeholder(ph, 'ustring_ne(')
case TokenKind.le: p.cgen.set_placeholder(ph, 'ustring_le(')
case TokenKind.ge: p.cgen.set_placeholder(ph, 'ustring_ge(')
case TokenKind.gt: p.cgen.set_placeholder(ph, 'ustring_gt(')
case TokenKind.lt: p.cgen.set_placeholder(ph, 'ustring_lt(')
}
}
if is_float {
p.gen(')')
switch tok {
case Token.eq: p.cgen.set_placeholder(ph, '${expr_type}_eq(')
case Token.ne: p.cgen.set_placeholder(ph, '${expr_type}_ne(')
case Token.le: p.cgen.set_placeholder(ph, '${expr_type}_le(')
case Token.ge: p.cgen.set_placeholder(ph, '${expr_type}_ge(')
case Token.gt: p.cgen.set_placeholder(ph, '${expr_type}_gt(')
case Token.lt: p.cgen.set_placeholder(ph, '${expr_type}_lt(')
case TokenKind.eq: p.cgen.set_placeholder(ph, '${expr_type}_eq(')
case TokenKind.ne: p.cgen.set_placeholder(ph, '${expr_type}_ne(')
case TokenKind.le: p.cgen.set_placeholder(ph, '${expr_type}_le(')
case TokenKind.ge: p.cgen.set_placeholder(ph, '${expr_type}_ge(')
case TokenKind.gt: p.cgen.set_placeholder(ph, '${expr_type}_gt(')
case TokenKind.lt: p.cgen.set_placeholder(ph, '${expr_type}_lt(')
}
}
}
@ -2477,7 +2475,7 @@ fn (p mut Parser) expression() string {
return 'int'
}
// + - | ^
for p.tok in [Token.plus, .minus, .pipe, .amp, .xor] {
for p.tok in [TokenKind.plus, .minus, .pipe, .amp, .xor] {
tok_op := p.tok
if typ == 'bool' {
p.error('operator ${p.tok.str()} not defined on bool ')
@ -2576,7 +2574,7 @@ fn (p mut Parser) unary() string {
mut typ := ''
tok := p.tok
switch tok {
case Token.not:
case TokenKind.not:
p.gen('!')
p.check(.not)
// typ should be bool type
@ -2585,7 +2583,7 @@ fn (p mut Parser) unary() string {
p.error('operator ! requires bool type, not `$typ`')
}
case Token.bit_not:
case TokenKind.bit_not:
p.gen('~')
p.check(.bit_not)
typ = p.bool_expression()
@ -2606,7 +2604,7 @@ fn (p mut Parser) factor() string {
p.gen('opt_none()')
p.check(.key_none)
return p.expected_type
case Token.number:
case TokenKind.number:
typ = 'int'
// Check if float (`1.0`, `1e+3`) but not if is hexa
if (p.lit.contains('.') || (p.lit.contains('e') || p.lit.contains('E'))) &&
@ -2624,13 +2622,13 @@ fn (p mut Parser) factor() string {
}
p.gen(p.lit)
p.fgen(p.lit)
case Token.minus:
case TokenKind.minus:
p.gen('-')
p.fgen('-')
p.next()
return p.factor()
// Variable
case Token.key_sizeof:
case TokenKind.key_sizeof:
p.gen('sizeof(')
p.fgen('sizeof(')
p.next()
@ -2640,10 +2638,10 @@ fn (p mut Parser) factor() string {
p.gen('$sizeof_typ)')
p.fgen('$sizeof_typ)')
return 'int'
case Token.amp, Token.dot, Token.mul:
case TokenKind.amp, TokenKind.dot, TokenKind.mul:
// (dot is for enum vals: `.green`)
return p.name_expr()
case Token.name:
case TokenKind.name:
// map[string]int
if p.lit == 'map' && p.peek() == .lsbr {
return p.map_init()
@ -2660,7 +2658,7 @@ fn (p mut Parser) factor() string {
//}
typ = p.name_expr()
return typ
case Token.key_default:
case TokenKind.key_default:
p.next()
p.next()
name := p.check_name()
@ -2670,7 +2668,7 @@ fn (p mut Parser) factor() string {
p.gen('default(T)')
p.next()
return 'T'
case Token.lpar:
case TokenKind.lpar:
//p.gen('(/*lpar*/')
p.gen('(')
p.check(.lpar)
@ -2684,38 +2682,38 @@ fn (p mut Parser) factor() string {
p.ptr_cast = false
p.gen(')')
return typ
case Token.chartoken:
case TokenKind.chartoken:
p.char_expr()
typ = 'byte'
return typ
case Token.str:
case TokenKind.str:
p.string_expr()
typ = 'string'
return typ
case Token.key_false:
case TokenKind.key_false:
typ = 'bool'
p.gen('0')
p.fgen('false')
case Token.key_true:
case TokenKind.key_true:
typ = 'bool'
p.gen('1')
p.fgen('true')
case Token.lsbr:
case TokenKind.lsbr:
// `[1,2,3]` or `[]` or `[20]byte`
// TODO have to return because arrayInit does next()
// everything should do next()
return p.array_init()
case Token.lcbr:
case TokenKind.lcbr:
// `m := { 'one': 1 }`
if p.peek() == .str {
return p.map_init()
}
// { user | name :'new name' }
return p.assoc()
case Token.key_if:
case TokenKind.key_if:
typ = p.if_st(true, 0)
return typ
case Token.key_match:
case TokenKind.key_match:
typ = p.match_statement(true)
return typ
default: