import ../frontend/lexer proc fillSymbolTable*(tokenizer: Lexer) = ## Initializes the Lexer's symbol ## table with the builtin symbols ## and keywords # 1-byte symbols tokenizer.symbols.addSymbol("{", LeftBrace) tokenizer.symbols.addSymbol("}", RightBrace) tokenizer.symbols.addSymbol("(", LeftParen) tokenizer.symbols.addSymbol(")", RightParen) tokenizer.symbols.addSymbol("[", LeftBracket) tokenizer.symbols.addSymbol("]", RightBracket) tokenizer.symbols.addSymbol(".", Dot) tokenizer.symbols.addSymbol(",", Comma) tokenizer.symbols.addSymbol(";", Semicolon) # tokenizer.symbols.addSymbol("\n", Semicolon) # TODO: Broken # Keywords tokenizer.symbols.addKeyword("type", TokenType.Type) tokenizer.symbols.addKeyword("enum", Enum) tokenizer.symbols.addKeyword("case", Case) tokenizer.symbols.addKeyword("operator", Operator) tokenizer.symbols.addKeyword("generator", Generator) tokenizer.symbols.addKeyword("fn", TokenType.Function) tokenizer.symbols.addKeyword("coroutine", Coroutine) tokenizer.symbols.addKeyword("break", TokenType.Break) tokenizer.symbols.addKeyword("continue", Continue) tokenizer.symbols.addKeyword("while", While) tokenizer.symbols.addKeyword("for", For) tokenizer.symbols.addKeyword("foreach", Foreach) tokenizer.symbols.addKeyword("if", If) tokenizer.symbols.addKeyword("else", Else) tokenizer.symbols.addKeyword("await", TokenType.Await) tokenizer.symbols.addKeyword("defer", Defer) tokenizer.symbols.addKeyword("try", Try) tokenizer.symbols.addKeyword("except", Except) tokenizer.symbols.addKeyword("finally", Finally) tokenizer.symbols.addKeyword("raise", TokenType.Raise) tokenizer.symbols.addKeyword("assert", TokenType.Assert) tokenizer.symbols.addKeyword("const", Const) tokenizer.symbols.addKeyword("let", Let) tokenizer.symbols.addKeyword("var", TokenType.Var) tokenizer.symbols.addKeyword("import", Import) tokenizer.symbols.addKeyword("yield", TokenType.Yield) tokenizer.symbols.addKeyword("return", TokenType.Return) tokenizer.symbols.addKeyword("object", Object) # These are more like expressions with a reserved # name that produce a value of a builtin type, # but we don't need to care about that until # we're in the parsing/ compilation steps so # it's fine tokenizer.symbols.addKeyword("nan", NotANumber) tokenizer.symbols.addKeyword("inf", Infinity) tokenizer.symbols.addKeyword("nil", TokenType.Nil) tokenizer.symbols.addKeyword("true", True) tokenizer.symbols.addKeyword("false", False) tokenizer.symbols.addKeyword("ref", TokenType.Ref) tokenizer.symbols.addKeyword("ptr", TokenType.Ptr) for sym in [">", "<", "=", "~", "/", "+", "-", "_", "*", "?", "@", ":", "==", "!=", ">=", "<=", "+=", "-=", "/=", "*=", "**=", "!"]: tokenizer.symbols.addSymbol(sym, Symbol)