63 lines
2.9 KiB
Nim
63 lines
2.9 KiB
Nim
import ../frontend/parsing/lexer
|
|
|
|
|
|
proc fillSymbolTable*(tokenizer: Lexer) =
|
|
## Initializes the Lexer's symbol
|
|
## table with builtin symbols and
|
|
## keywords
|
|
|
|
# 1-byte symbols
|
|
tokenizer.symbols.addSymbol("{", LeftBrace)
|
|
tokenizer.symbols.addSymbol("}", RightBrace)
|
|
tokenizer.symbols.addSymbol("(", LeftParen)
|
|
tokenizer.symbols.addSymbol(")", RightParen)
|
|
tokenizer.symbols.addSymbol("[", LeftBracket)
|
|
tokenizer.symbols.addSymbol("]", RightBracket)
|
|
tokenizer.symbols.addSymbol(".", Dot)
|
|
tokenizer.symbols.addSymbol(",", Comma)
|
|
tokenizer.symbols.addSymbol(";", Semicolon)
|
|
# Keywords
|
|
tokenizer.symbols.addKeyword("type", TokenType.Type)
|
|
tokenizer.symbols.addKeyword("enum", Enum)
|
|
tokenizer.symbols.addKeyword("case", Case)
|
|
tokenizer.symbols.addKeyword("operator", Operator)
|
|
tokenizer.symbols.addKeyword("generator", Generator)
|
|
tokenizer.symbols.addKeyword("fn", TokenType.Function)
|
|
tokenizer.symbols.addKeyword("coroutine", Coroutine)
|
|
tokenizer.symbols.addKeyword("break", TokenType.Break)
|
|
tokenizer.symbols.addKeyword("continue", Continue)
|
|
tokenizer.symbols.addKeyword("while", While)
|
|
tokenizer.symbols.addKeyword("for", For)
|
|
tokenizer.symbols.addKeyword("foreach", Foreach)
|
|
tokenizer.symbols.addKeyword("if", If)
|
|
tokenizer.symbols.addKeyword("else", Else)
|
|
tokenizer.symbols.addKeyword("await", TokenType.Await)
|
|
tokenizer.symbols.addKeyword("defer", Defer)
|
|
tokenizer.symbols.addKeyword("try", Try)
|
|
tokenizer.symbols.addKeyword("except", Except)
|
|
tokenizer.symbols.addKeyword("finally", Finally)
|
|
tokenizer.symbols.addKeyword("raise", TokenType.Raise)
|
|
tokenizer.symbols.addKeyword("assert", TokenType.Assert)
|
|
tokenizer.symbols.addKeyword("const", Const)
|
|
tokenizer.symbols.addKeyword("let", Let)
|
|
tokenizer.symbols.addKeyword("var", TokenType.Var)
|
|
tokenizer.symbols.addKeyword("import", Import)
|
|
tokenizer.symbols.addKeyword("yield", TokenType.Yield)
|
|
tokenizer.symbols.addKeyword("return", TokenType.Return)
|
|
tokenizer.symbols.addKeyword("object", Object)
|
|
tokenizer.symbols.addKeyword("export", Export)
|
|
tokenizer.symbols.addKeyword("block", TokenType.Block)
|
|
tokenizer.symbols.addKeyword("switch", TokenType.Switch)
|
|
# These are more like expressions with a reserved
|
|
# name that produce a value of a builtin type,
|
|
# but we don't need to care about that until
|
|
# we're in the parsing/compilation steps so
|
|
# it's fine
|
|
tokenizer.symbols.addKeyword("true", True)
|
|
tokenizer.symbols.addKeyword("false", False)
|
|
tokenizer.symbols.addKeyword("ref", TokenType.Ref)
|
|
tokenizer.symbols.addKeyword("ptr", TokenType.Ptr)
|
|
for sym in [">", "<", "=", "~", "/", "+", "-", "_", "*", "?", "@", ":", "==", "!=",
|
|
">=", "<=", "+=", "-=", "/=", "*=", "**=", "!", "%", "&", "|", "^",
|
|
">>", "<<"]:
|
|
tokenizer.symbols.addSymbol(sym, Symbol) |