Minor changes to the lexer
This commit is contained in:
parent
efde97ead2
commit
32f375c40c
|
@ -80,6 +80,7 @@ const reserved = to_table({
|
||||||
"lambda": TokenType.Lambda, "class": TokenType.Class,
|
"lambda": TokenType.Lambda, "class": TokenType.Class,
|
||||||
"async": TokenType.Async, "import": TokenType.Import,
|
"async": TokenType.Async, "import": TokenType.Import,
|
||||||
"isnot": TokenType.IsNot, "from": TokenType.From,
|
"isnot": TokenType.IsNot, "from": TokenType.From,
|
||||||
|
"let": TokenType.Let, "const": TokenType.Const
|
||||||
})
|
})
|
||||||
|
|
||||||
type
|
type
|
||||||
|
@ -95,22 +96,20 @@ type
|
||||||
errorMessage*: string
|
errorMessage*: string
|
||||||
|
|
||||||
|
|
||||||
func newLexer*(self: Lexer = nil): Lexer =
|
func initLexer*(self: Lexer = nil): Lexer =
|
||||||
## Initializes the lexer or resets
|
## Initializes the lexer or resets
|
||||||
## the state of an existing one
|
## the state of an existing one
|
||||||
if self == nil:
|
new(result)
|
||||||
result = Lexer(source: "", tokens: @[], line: 1, start: 0, current: 0,
|
if self != nil:
|
||||||
errored: false, file: "", errorMessage: "")
|
|
||||||
else:
|
|
||||||
self.source = ""
|
|
||||||
self.tokens = @[]
|
|
||||||
self.line = 1
|
|
||||||
self.start = 0
|
|
||||||
self.current = 0
|
|
||||||
self.errored = false
|
|
||||||
self.file = ""
|
|
||||||
self.errorMessage = ""
|
|
||||||
result = self
|
result = self
|
||||||
|
result.source = ""
|
||||||
|
result.tokens = @[]
|
||||||
|
result.line = 1
|
||||||
|
result.start = 0
|
||||||
|
result.current = 0
|
||||||
|
result.errored = false
|
||||||
|
result.file = ""
|
||||||
|
result.errorMessage = ""
|
||||||
|
|
||||||
|
|
||||||
func done(self: Lexer): bool =
|
func done(self: Lexer): bool =
|
||||||
|
@ -150,6 +149,8 @@ func error(self: Lexer, message: string) =
|
||||||
## for the lexer. The lex method will not
|
## for the lexer. The lex method will not
|
||||||
## continue tokenizing if it finds out
|
## continue tokenizing if it finds out
|
||||||
## an error occurred
|
## an error occurred
|
||||||
|
if self.errored:
|
||||||
|
return
|
||||||
self.errored = true
|
self.errored = true
|
||||||
self.errorMessage = &"A fatal error occurred while parsing '{self.file}', line {self.line} at '{self.peek()}' -> {message}"
|
self.errorMessage = &"A fatal error occurred while parsing '{self.file}', line {self.line} at '{self.peek()}' -> {message}"
|
||||||
|
|
||||||
|
@ -181,7 +182,6 @@ func check(self: Lexer, what: string): bool =
|
||||||
return true
|
return true
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
func check(self: Lexer, what: openarray[char]): bool =
|
func check(self: Lexer, what: openarray[char]): bool =
|
||||||
## Calls self.check() in a loop with
|
## Calls self.check() in a loop with
|
||||||
## each character from the given seq of
|
## each character from the given seq of
|
||||||
|
@ -306,7 +306,7 @@ func parseNumber(self: Lexer) =
|
||||||
self.createToken(kind)
|
self.createToken(kind)
|
||||||
|
|
||||||
|
|
||||||
proc parseIdentifier(self: Lexer) =
|
func parseIdentifier(self: Lexer) =
|
||||||
## Parses identifiers. Note that
|
## Parses identifiers. Note that
|
||||||
## multi-character tokens such as
|
## multi-character tokens such as
|
||||||
## UTF runes are not supported
|
## UTF runes are not supported
|
||||||
|
@ -321,7 +321,7 @@ proc parseIdentifier(self: Lexer) =
|
||||||
self.createToken(TokenType.Identifier)
|
self.createToken(TokenType.Identifier)
|
||||||
|
|
||||||
|
|
||||||
proc next(self: Lexer) =
|
func next(self: Lexer) =
|
||||||
## Scans a single token. This method is
|
## Scans a single token. This method is
|
||||||
## called iteratively until the source
|
## called iteratively until the source
|
||||||
## file reaches EOF
|
## file reaches EOF
|
||||||
|
@ -380,13 +380,13 @@ proc next(self: Lexer) =
|
||||||
self.error(&"Unexpected token '{single}'")
|
self.error(&"Unexpected token '{single}'")
|
||||||
|
|
||||||
|
|
||||||
proc lex*(self: Lexer, source, file: string): seq[Token] =
|
func lex*(self: Lexer, source, file: string): seq[Token] =
|
||||||
## Lexes a source file, converting a stream
|
## Lexes a source file, converting a stream
|
||||||
## of characters into a series of tokens.
|
## of characters into a series of tokens.
|
||||||
## If an error occurs, this procedure
|
## If an error occurs, this procedure
|
||||||
## returns an empty sequence and the lexer's
|
## returns an empty sequence and the lexer's
|
||||||
## errored and errorMessage fields will be set
|
## errored and errorMessage fields will be set
|
||||||
discard self.newLexer()
|
discard self.initLexer()
|
||||||
self.source = source
|
self.source = source
|
||||||
self.file = file
|
self.file = file
|
||||||
while not self.done():
|
while not self.done():
|
||||||
|
@ -396,4 +396,4 @@ proc lex*(self: Lexer, source, file: string): seq[Token] =
|
||||||
return @[]
|
return @[]
|
||||||
self.tokens.add(Token(kind: TokenType.EndOfFile, lexeme: "EOF",
|
self.tokens.add(Token(kind: TokenType.EndOfFile, lexeme: "EOF",
|
||||||
line: self.line))
|
line: self.line))
|
||||||
return self.tokens
|
return self.tokens
|
|
@ -56,11 +56,11 @@ type
|
||||||
LessOrEqual, GreaterOrEqual, # >= <=
|
LessOrEqual, GreaterOrEqual, # >= <=
|
||||||
ExclamationMark, DoubleEqual, # ! ==
|
ExclamationMark, DoubleEqual, # ! ==
|
||||||
NotEqual, RightShift, LeftShift, # != >> <<
|
NotEqual, RightShift, LeftShift, # != >> <<
|
||||||
LogicalAnd, LogicalOr, FloorDiv, # && || //
|
LogicalAnd, LogicalOr, FloorDiv, # && || //
|
||||||
InplaceAdd, InplaceSub, InplaceDiv, # += -= /=
|
InplaceAdd, InplaceSub, InplaceDiv, # += -= /=
|
||||||
InplaceMod, InplaceMul, InplaceXor, # %= *= ^=
|
InplaceMod, InplaceMul, InplaceXor, # %= *= ^=
|
||||||
InplaceAnd, InplaceOr, InplaceNot, # &= |= ~=
|
InplaceAnd, InplaceOr, InplaceNot, # &= |= ~=
|
||||||
InplaceFloorDiv, InplacePow # //= **=
|
InplaceFloorDiv, InplacePow # //= **=
|
||||||
|
|
||||||
# Miscellaneous
|
# Miscellaneous
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue