BYE BYE SEMICOLONS!

This commit is contained in:
Mattia Giambirtone 2022-07-15 19:48:12 +02:00
parent 60028ed664
commit 2072f34d4c
3 changed files with 10 additions and 6 deletions

View File

@ -565,6 +565,8 @@ proc next(self: Lexer) =
elif self.match("\n"):
# New line
self.incLine()
if not self.getToken("\n").isNil():
self.createToken(Semicolon)
elif self.match("`"):
# Stropped token
self.parseBackticks()
@ -599,7 +601,7 @@ proc next(self: Lexer) =
elif self.match("#"):
if not self.match("pragma["):
# Inline comments
while not (self.check("\n") or self.done()):
while not (self.match("\n") or self.done()):
discard self.step()
self.createToken(Comment)
else:

View File

@ -13,6 +13,7 @@
# limitations under the License.
import strformat
import strutils
type
@ -78,7 +79,7 @@ type
proc `$`*(self: Token): string =
## Strinfifies
if self != nil:
result = &"Token(kind={self.kind}, lexeme='{$(self.lexeme)}', line={self.line}, pos=({self.pos.start}, {self.pos.stop}), spaces={self.spaces})"
result = &"Token(kind={self.kind}, lexeme={($self.lexeme).escape()}, line={self.line}, pos=({self.pos.start}, {self.pos.stop}), spaces={self.spaces})"
else:
result = "nil"

View File

@ -269,13 +269,13 @@ proc runFile(f: string, interactive: bool = false, fromString: bool = false) =
let relPos = tokenizer.getRelPos(exc.line)
let line = tokenizer.getSource().splitLines()[exc.line - 1].strip()
stderr.styledWriteLine(fgRed, "A fatal error occurred while parsing ", fgYellow, &"'{exc.file}'", fgRed, ", module ",
fgYellow, &"'{exc.file}'", fgRed, ", line ", fgYellow, $exc.line, fgRed, " at ", fgYellow, &"'{exc.lexeme}'",
fgYellow, &"'{exc.file}'", fgRed, ", line ", fgYellow, $exc.line, fgRed, " at ", fgYellow, &"'{exc.lexeme.escape()}'",
fgRed, ": ", fgGreen , getCurrentExceptionMsg())
styledEcho fgBlue, "Source line: " , fgDefault, line
styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start)
except ParseError:
let exc = ParseError(getCurrentException())
let lexeme = exc.token.lexeme
let lexeme = exc.token.lexeme.escape()
let lineNo = exc.token.line
let relPos = tokenizer.getRelPos(lineNo)
let fn = parser.getCurrentFunction()
@ -290,7 +290,7 @@ proc runFile(f: string, interactive: bool = false, fromString: bool = false) =
styledEcho fgCyan, " ".repeat(len("Source line: ")) & "^".repeat(relPos.stop - relPos.start)
except CompileError:
let exc = CompileError(getCurrentException())
let lexeme = exc.node.token.lexeme
let lexeme = exc.node.token.lexeme.escape()
let lineNo = exc.node.token.line
let relPos = tokenizer.getRelPos(lineNo)
let line = tokenizer.getSource().splitLines()[lineNo - 1].strip()
@ -380,7 +380,8 @@ proc fillSymbolTable(tokenizer: Lexer) =
tokenizer.symbols.addSymbol("]", RightBracket)
tokenizer.symbols.addSymbol(".", Dot)
tokenizer.symbols.addSymbol(",", Comma)
tokenizer.symbols.addSymbol(";", Semicolon)
# tokenizer.symbols.addSymbol(";", Semicolon)
tokenizer.symbols.addSymbol("\n", Semicolon)
# Keywords
tokenizer.symbols.addKeyword("type", TokenType.Type)
tokenizer.symbols.addKeyword("enum", Enum)