# Builtins & external libs import strformat import strutils import terminal import parseopt import times import os # Thanks art <3 import jale/editor as ed import jale/templates import jale/plugin/defaults import jale/plugin/editor_history import jale/keycodes import jale/multiline # Our stuff import frontend/lexer as l import frontend/parser as p import frontend/compiler as c import backend/vm as v import util/serializer as s import util/debugger import config # Forward declarations proc fillSymbolTable(tokenizer: Lexer) proc getLineEditor: LineEditor # Handy dandy compile-time constants const debugLexer {.booldefine.} = false const debugParser {.booldefine.} = false const debugCompiler {.booldefine.} = false const debugSerializer {.booldefine.} = false const debugRuntime {.booldefine.} = false when debugSerializer: import nimSHA2 proc repl(vm: PeonVM = newPeonVM()) = styledEcho fgMagenta, "Welcome into the peon REPL!" var keep = true tokens: seq[Token] = @[] tree: seq[Declaration] = @[] compiled: Chunk serialized: Serialized tokenizer = newLexer() parser = newParser() compiler = newCompiler(replMode=true) debugger = newDebugger() serializer = newSerializer() editor = getLineEditor() input: string current: string tokenizer.fillSymbolTable() editor.bindEvent(jeQuit): stdout.styledWriteLine(fgGreen, "Goodbye!") editor.prompt = "" keep = false input = "" editor.bindKey("ctrl+a"): editor.content.home() editor.bindKey("ctrl+e"): editor.content.`end`() while keep: try: # We incrementally add content to the input # so that you can, for example, define a function # then press enter and use it at the next iteration # of the read loop current = editor.read() if current.len() == 0: continue elif current == "#clearInput": input = "" continue elif current == "#clear": stdout.write("\x1Bc") continue elif current == "#showInput": echo input continue input &= &"{current}\n" tokens = tokenizer.lex(input, "stdin") if tokens.len() == 0: continue when debugLexer: styledEcho fgCyan, "Tokenization step:" for i, token in tokens: if i == tokens.high(): # Who cares about EOF? break styledEcho fgGreen, "\t", $token echo "" tree = parser.parse(tokens, "stdin") if tree.len() == 0: continue when debugParser: styledEcho fgCyan, "Parsing step:" for node in tree: styledEcho fgGreen, "\t", $node echo "" compiled = compiler.compile(tree, "stdin") when debugCompiler: styledEcho fgCyan, "Compilation step:\n" debugger.disassembleChunk(compiled, "stdin") echo "" serializer.dumpFile(compiled, "stdin", "stdin.pbc") serialized = serializer.loadFile("stdin.pbc") when debugSerializer: styledEcho fgCyan, "Serialization step: " styledEcho fgBlue, "\t- Peon version: ", fgYellow, &"{serialized.version.major}.{serialized.version.minor}.{serialized.version.patch}", fgBlue, " (commit ", fgYellow, serialized.commit[0..8], fgBlue, ") on branch ", fgYellow, serialized.branch stdout.styledWriteLine(fgBlue, "\t- Compilation date & time: ", fgYellow, fromUnix(serialized.compileDate).format("d/M/yyyy HH:mm:ss")) stdout.styledWrite(fgBlue, &"\t- Constants segment: ") if serialized.chunk.consts == compiled.consts: styledEcho fgGreen, "OK" else: styledEcho fgRed, "Corrupted" stdout.styledWrite(fgBlue, &"\t- Code segment: ") if serialized.chunk.code == compiled.code: styledEcho fgGreen, "OK" else: styledEcho fgRed, "Corrupted" stdout.styledWrite(fgBlue, "\t- Line info segment: ") if serialized.chunk.lines == compiled.lines: styledEcho fgGreen, "OK" else: styledEcho fgRed, "Corrupted" stdout.styledWrite(fgBlue, "\t- CFI segment: ") if serialized.chunk.cfi == compiled.cfi: styledEcho fgGreen, "OK" else: styledEcho fgRed, "Corrupted" when debugRuntime: styledEcho fgCyan, "\n\nExecution step: " vm.run(serialized.chunk) except LexingError: input = "" let exc = LexingError(getCurrentException()) let relPos = tokenizer.getRelPos(exc.line) let line = tokenizer.getSource().splitLines()[exc.line - 1].strip() stderr.styledWriteLine(fgRed, "A fatal error occurred while parsing ", fgYellow, &"'{exc.file}'", fgRed, ", module ", fgYellow, &"'{exc.file.extractFilename()}'", fgRed, ", line ", fgYellow, $exc.line, fgRed, " at ", fgYellow, &"'{exc.lexeme}'", fgRed, ": ", fgGreen , getCurrentExceptionMsg()) styledEcho fgBlue, "Source line: " , fgDefault, line styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start) except ParseError: input = "" let exc = ParseError(getCurrentException()) let lexeme = exc.token.lexeme let lineNo = exc.token.line let relPos = tokenizer.getRelPos(lineNo) let fn = parser.getCurrentFunction() let line = tokenizer.getSource().splitLines()[lineNo - 1].strip() var fnMsg = "" if fn != nil and fn.kind == funDecl: fnMsg &= &"in function '{FunDecl(fn).name.token.lexeme}'" stderr.styledWriteLine(fgRed, "A fatal error occurred while parsing ", fgYellow, &"'{exc.file}'", fgRed, ", module ", fgYellow, &"'{exc.file}'", fgRed, ", line ", fgYellow, $lineNo, fgRed, " at ", fgYellow, &"'{lexeme}'", fgRed, ": ", fgGreen , getCurrentExceptionMsg()) styledEcho fgBlue, "Source line: " , fgDefault, line styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start) except CompileError: let exc = CompileError(getCurrentException()) let lexeme = exc.node.token.lexeme let lineNo = exc.node.token.line let relPos = tokenizer.getRelPos(lineNo) let line = tokenizer.getSource().splitLines()[lineNo - 1].strip() var fn = compiler.getCurrentFunction() var fnMsg = "" if fn != nil and fn.kind == funDecl: fnMsg &= &"in function '{FunDecl(fn).name.token.lexeme}'" stderr.styledWriteLine(fgRed, "A fatal error occurred while compiling ", fgYellow, &"'{exc.file}'", fgRed, ", module ", fgYellow, &"'{exc.module}'", fgRed, ", line ", fgYellow, $lineNo, fgRed, " at ", fgYellow, &"'{lexeme}'", fgRed, ": ", fgGreen , getCurrentExceptionMsg()) styledEcho fgBlue, "Source line: " , fgDefault, line styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start) except SerializationError: let exc = SerializationError(getCurrentException()) stderr.styledWriteLine(fgRed, "A fatal error occurred while (de-)serializing", fgYellow, &"'{exc.file}'", fgGreen, ": ", getCurrentExceptionMsg()) quit(0) proc runFile(f: string, interactive: bool = false, fromString: bool = false) = var tokens: seq[Token] = @[] tree: seq[Declaration] = @[] compiled: Chunk serialized: Serialized tokenizer = newLexer() parser = newParser() compiler = newCompiler() debugger = newDebugger() serializer = newSerializer() vm = newPeonVM() input: string tokenizer.fillSymbolTable() try: var f = f if not fromString: if not f.endsWith(".pn"): f &= ".pn" input = readFile(f) else: input = f tokens = tokenizer.lex(input, f) if tokens.len() == 0: return when debugLexer: styledEcho fgCyan, "Tokenization step:" for i, token in tokens: if i == tokens.high(): # Who cares about EOF? break styledEcho fgGreen, "\t", $token echo "" tree = parser.parse(tokens, f) if tree.len() == 0: return when debugParser: styledEcho fgCyan, "Parsing step:" for node in tree: styledEcho fgGreen, "\t", $node echo "" compiled = compiler.compile(tree, f) when debugCompiler: styledEcho fgCyan, "Compilation step:\n" debugger.disassembleChunk(compiled, f) echo "" var path = splitFile(f).dir if path.len() > 0: path &= "/" path &= splitFile(f).name & ".pbc" serializer.dumpFile(compiled, f, path) serialized = serializer.loadFile(path) when debugSerializer: styledEcho fgCyan, "Serialization step: " styledEcho fgBlue, "\t- Peon version: ", fgYellow, &"{serialized.version.major}.{serialized.version.minor}.{serialized.version.patch}", fgBlue, " (commit ", fgYellow, serialized.commit[0..8], fgBlue, ") on branch ", fgYellow, serialized.branch stdout.styledWriteLine(fgBlue, "\t- Compilation date & time: ", fgYellow, fromUnix(serialized.compileDate).format("d/M/yyyy HH:mm:ss")) stdout.styledWrite(fgBlue, &"\t- Constants segment: ") if serialized.chunk.consts == compiled.consts: styledEcho fgGreen, "OK" else: styledEcho fgRed, "Corrupted" stdout.styledWrite(fgBlue, &"\t- Code segment: ") if serialized.chunk.code == compiled.code: styledEcho fgGreen, "OK" else: styledEcho fgRed, "Corrupted" stdout.styledWrite(fgBlue, "\t- Line info segment: ") if serialized.chunk.lines == compiled.lines: styledEcho fgGreen, "OK" else: styledEcho fgRed, "Corrupted" stdout.styledWrite(fgBlue, "\t- CFI segment: ") if serialized.chunk.cfi == compiled.cfi: styledEcho fgGreen, "OK" else: styledEcho fgRed, "Corrupted" when debugRuntime: styledEcho fgCyan, "\n\nExecution step: " vm.run(serialized.chunk) except LexingError: let exc = LexingError(getCurrentException()) let relPos = tokenizer.getRelPos(exc.line) let line = tokenizer.getSource().splitLines()[exc.line - 1].strip() stderr.styledWriteLine(fgRed, "A fatal error occurred while parsing ", fgYellow, &"'{exc.file}'", fgRed, ", module ", fgYellow, &"'{exc.file}'", fgRed, ", line ", fgYellow, $exc.line, fgRed, " at ", fgYellow, &"'{exc.lexeme.escape()}'", fgRed, ": ", fgGreen , getCurrentExceptionMsg()) styledEcho fgBlue, "Source line: " , fgDefault, line styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start) except ParseError: let exc = ParseError(getCurrentException()) let lexeme = exc.token.lexeme.escape() let lineNo = exc.token.line let relPos = tokenizer.getRelPos(lineNo) let fn = parser.getCurrentFunction() let line = tokenizer.getSource().splitLines()[lineNo - 1].strip() var fnMsg = "" if fn != nil and fn.kind == funDecl: fnMsg &= &"in function '{FunDecl(fn).name.token.lexeme}'" stderr.styledWriteLine(fgRed, "A fatal error occurred while parsing ", fgYellow, &"'{exc.file}'", fgRed, ", module ", fgYellow, &"'{exc.file}'", fgRed, ", line ", fgYellow, $lineNo, fgRed, " at ", fgYellow, &"'{lexeme}'", fgRed, ": ", fgGreen , getCurrentExceptionMsg()) styledEcho fgBlue, "Source line: " , fgDefault, line styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start) except CompileError: let exc = CompileError(getCurrentException()) let lexeme = exc.node.token.lexeme.escape() let lineNo = exc.node.token.line let relPos = tokenizer.getRelPos(lineNo) let line = tokenizer.getSource().splitLines()[lineNo - 1].strip() var fn = compiler.getCurrentFunction() var fnMsg = "" if fn != nil and fn.kind == funDecl: fnMsg &= &"in function '{FunDecl(fn).name.token.lexeme}'" stderr.styledWriteLine(fgRed, "A fatal error occurred while compiling ", fgYellow, &"'{exc.file}'", fgRed, ", module ", fgYellow, &"'{exc.module}'", fgRed, ", line ", fgYellow, $lineNo, fgRed, " at ", fgYellow, &"'{lexeme}'", fgRed, ": ", fgGreen , getCurrentExceptionMsg()) styledEcho fgBlue, "Source line: " , fgDefault, line styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start) except SerializationError: let exc = SerializationError(getCurrentException()) stderr.styledWriteLine(fgRed, "A fatal error occurred while (de-)serializing", fgYellow, &"'{exc.file}'", fgGreen, ": ", getCurrentExceptionMsg()) except IOError: stderr.styledWriteLine(fgRed, "An error occurred while trying to read ", fgYellow, &"'{f}'", fgGreen, &": {getCurrentExceptionMsg()}") except OSError: stderr.styledWriteLine(fgRed, "An error occurred while trying to read ", fgYellow, &"'{f}'", fgGreen, &": {osErrorMsg(osLastError())} [errno {osLastError()}]") if interactive: repl(vm) when isMainModule: setControlCHook(proc () {.noconv.} = quit(0)) var optParser = initOptParser(commandLineParams()) var file: string = "" var fromString: bool = false var interactive: bool = false for kind, key, value in optParser.getopt(): case kind: of cmdArgument: file = key of cmdLongOption: case key: of "help": echo HELP_MESSAGE quit() of "version": echo PEON_VERSION_STRING quit() of "string": file = key fromString = true of "interactive": interactive = true else: echo &"error: unkown option '{key}'" quit() of cmdShortOption: case key: of "h": echo HELP_MESSAGE quit() of "v": echo PEON_VERSION_STRING quit() of "s": file = key fromString = true of "i": interactive = true else: echo &"error: unkown option '{key}'" quit() else: echo "usage: peon [options] [filename.pn]" quit() # TODO: Use interactive/fromString options if file == "": repl() else: runFile(file, interactive, fromString) proc fillSymbolTable(tokenizer: Lexer) = ## Initializes the Lexer's symbol ## table with the builtin symbols ## and keywords # 1-byte symbols tokenizer.symbols.addSymbol("{", LeftBrace) tokenizer.symbols.addSymbol("}", RightBrace) tokenizer.symbols.addSymbol("(", LeftParen) tokenizer.symbols.addSymbol(")", RightParen) tokenizer.symbols.addSymbol("[", LeftBracket) tokenizer.symbols.addSymbol("]", RightBracket) tokenizer.symbols.addSymbol(".", Dot) tokenizer.symbols.addSymbol(",", Comma) tokenizer.symbols.addSymbol(";", Semicolon) # tokenizer.symbols.addSymbol("\n", Semicolon) # TODO: Broken # Keywords tokenizer.symbols.addKeyword("type", TokenType.Type) tokenizer.symbols.addKeyword("enum", Enum) tokenizer.symbols.addKeyword("case", Case) tokenizer.symbols.addKeyword("operator", Operator) tokenizer.symbols.addKeyword("generator", Generator) tokenizer.symbols.addKeyword("fn", TokenType.Function) tokenizer.symbols.addKeyword("coroutine", Coroutine) tokenizer.symbols.addKeyword("break", TokenType.Break) tokenizer.symbols.addKeyword("continue", Continue) tokenizer.symbols.addKeyword("while", While) tokenizer.symbols.addKeyword("for", For) tokenizer.symbols.addKeyword("foreach", Foreach) tokenizer.symbols.addKeyword("if", If) tokenizer.symbols.addKeyword("else", Else) tokenizer.symbols.addKeyword("await", TokenType.Await) tokenizer.symbols.addKeyword("defer", Defer) tokenizer.symbols.addKeyword("try", Try) tokenizer.symbols.addKeyword("except", Except) tokenizer.symbols.addKeyword("finally", Finally) tokenizer.symbols.addKeyword("raise", TokenType.Raise) tokenizer.symbols.addKeyword("assert", TokenType.Assert) tokenizer.symbols.addKeyword("const", Const) tokenizer.symbols.addKeyword("let", Let) tokenizer.symbols.addKeyword("var", TokenType.Var) tokenizer.symbols.addKeyword("import", Import) tokenizer.symbols.addKeyword("yield", TokenType.Yield) tokenizer.symbols.addKeyword("return", TokenType.Return) tokenizer.symbols.addKeyword("object", Object) # These are more like expressions with a reserved # name that produce a value of a builtin type, # but we don't need to care about that until # we're in the parsing/ compilation steps so # it's fine tokenizer.symbols.addKeyword("nan", NotANumber) tokenizer.symbols.addKeyword("inf", Infinity) tokenizer.symbols.addKeyword("nil", TokenType.Nil) tokenizer.symbols.addKeyword("true", True) tokenizer.symbols.addKeyword("false", False) tokenizer.symbols.addKeyword("ref", TokenType.Ref) tokenizer.symbols.addKeyword("ptr", TokenType.Ptr) for sym in [">", "<", "=", "~", "/", "+", "-", "_", "*", "?", "@", ":"]: tokenizer.symbols.addSymbol(sym, Symbol) proc getLineEditor: LineEditor = result = newLineEditor() result.prompt = "=> " result.populateDefaults() let history = result.plugHistory() result.bindHistory(history)