diff --git a/src/frontend/lexer.nim b/src/frontend/lexer.nim index ea3d580..6995980 100644 --- a/src/frontend/lexer.nim +++ b/src/frontend/lexer.nim @@ -565,6 +565,8 @@ proc next(self: Lexer) = elif self.match("\n"): # New line self.incLine() + if not self.getToken("\n").isNil(): + self.createToken(Semicolon) elif self.match("`"): # Stropped token self.parseBackticks() @@ -599,7 +601,7 @@ proc next(self: Lexer) = elif self.match("#"): if not self.match("pragma["): # Inline comments - while not (self.check("\n") or self.done()): + while not (self.match("\n") or self.done()): discard self.step() self.createToken(Comment) else: diff --git a/src/frontend/meta/token.nim b/src/frontend/meta/token.nim index d326694..516b93a 100644 --- a/src/frontend/meta/token.nim +++ b/src/frontend/meta/token.nim @@ -13,6 +13,7 @@ # limitations under the License. import strformat +import strutils type @@ -78,7 +79,7 @@ type proc `$`*(self: Token): string = ## Strinfifies if self != nil: - result = &"Token(kind={self.kind}, lexeme='{$(self.lexeme)}', line={self.line}, pos=({self.pos.start}, {self.pos.stop}), spaces={self.spaces})" + result = &"Token(kind={self.kind}, lexeme={($self.lexeme).escape()}, line={self.line}, pos=({self.pos.start}, {self.pos.stop}), spaces={self.spaces})" else: result = "nil" diff --git a/src/main.nim b/src/main.nim index 1d2b101..8611373 100644 --- a/src/main.nim +++ b/src/main.nim @@ -269,13 +269,13 @@ proc runFile(f: string, interactive: bool = false, fromString: bool = false) = let relPos = tokenizer.getRelPos(exc.line) let line = tokenizer.getSource().splitLines()[exc.line - 1].strip() stderr.styledWriteLine(fgRed, "A fatal error occurred while parsing ", fgYellow, &"'{exc.file}'", fgRed, ", module ", - fgYellow, &"'{exc.file}'", fgRed, ", line ", fgYellow, $exc.line, fgRed, " at ", fgYellow, &"'{exc.lexeme}'", + fgYellow, &"'{exc.file}'", fgRed, ", line ", fgYellow, $exc.line, fgRed, " at ", fgYellow, &"'{exc.lexeme.escape()}'", fgRed, ": ", fgGreen , getCurrentExceptionMsg()) styledEcho fgBlue, "Source line: " , fgDefault, line styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start) except ParseError: let exc = ParseError(getCurrentException()) - let lexeme = exc.token.lexeme + let lexeme = exc.token.lexeme.escape() let lineNo = exc.token.line let relPos = tokenizer.getRelPos(lineNo) let fn = parser.getCurrentFunction() @@ -290,7 +290,7 @@ proc runFile(f: string, interactive: bool = false, fromString: bool = false) = styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start) except CompileError: let exc = CompileError(getCurrentException()) - let lexeme = exc.node.token.lexeme + let lexeme = exc.node.token.lexeme.escape() let lineNo = exc.node.token.line let relPos = tokenizer.getRelPos(lineNo) let line = tokenizer.getSource().splitLines()[lineNo - 1].strip() @@ -380,7 +380,8 @@ proc fillSymbolTable(tokenizer: Lexer) = tokenizer.symbols.addSymbol("]", RightBracket) tokenizer.symbols.addSymbol(".", Dot) tokenizer.symbols.addSymbol(",", Comma) - tokenizer.symbols.addSymbol(";", Semicolon) + # tokenizer.symbols.addSymbol(";", Semicolon) + tokenizer.symbols.addSymbol("\n", Semicolon) # Keywords tokenizer.symbols.addKeyword("type", TokenType.Type) tokenizer.symbols.addKeyword("enum", Enum)