# Copyright 2021 Mattia Giambirtone # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # A simple lexer module import strutils import strformat import tables import ../objects/token import ../objects/error # Table of all tokens const tokens = to_table({ '(': TokenType.LeftParen, ')': TokenType.RightParen, '-': TokenType.Minus, '+': TokenType.Plus, '*': TokenType.Mul, '/': TokenType.Div, '%': TokenType.Modulo, '^': TokenType.Exp, ',': TokenType.Comma}) # All the identifiers and constants (such as PI) # Since they're constant we don't even need to bother adding another # AST node kind, we can just map the name to a float literal ;) const constants = to_table({ "pi": Token(kind: TokenType.Float, lexeme: "3.141592653589793"), "e": Token(kind: TokenType.Float, lexeme: "2.718281828459045"), "tau": Token(kind: TokenType.Float, lexeme: "6.283185307179586"), "inf": Token(kind: TokenType.Float, lexeme: "inf"), "nan": Token(kind: TokenType.Float, lexeme: "nan") }) # Since also math functions are hardcoded, we can use an array const functions = ["sin", "cos", "tan", "cosh", "tanh", "sinh", "arccos", "arcsin", "arctan", "log", "log10", "ln", "log2", "hypot", "sqrt", "cbrt", "arctanh", "arcsinh", "arccosh"] type Lexer* = ref object # A lexer object source*: string tokens*: seq[Token] start*: int current*: int func initLexer*(): Lexer = ## Initializes the lexer in an empty state result = Lexer(source: "", tokens: @[], start: 0, current: 0) func done(self: Lexer): bool = ## Returns true if we reached EOF result = self.current >= self.source.len proc step(self: Lexer): char = ## Steps one character forward in the ## source. A null terminator is returned ## if the lexer is at EOF if self.done(): return '\0' self.current = self.current + 1 result = self.source[self.current - 1] proc peek(self: Lexer): char = ## Returns the current character in the ## source without consuming it. ## A null terminator is returned ## if the lexer is at EOF if self.done(): result = '\0' else: result = self.source[self.current] func createToken(self: Lexer, tokenType: TokenType): Token = ## Creates a token object for later use in the parser result = Token(kind: tokenType, lexeme: self.source[self.start..