Removed capitalization from error messages. Added support for del and assert statements
This commit is contained in:
parent
3f1c6077bc
commit
d192d5e6b7
|
@ -87,6 +87,7 @@ ifStmt → "if" "(" expression ")" statement ( "else" statement )?; //
|
|||
returnStmt → "return" expression? ";"; // Returns from a function, illegal in top-level code
|
||||
breakStmt → "break" ";";
|
||||
assertStmt → "assert" expression ";"
|
||||
delStmt → "del" expression ";"
|
||||
continueStmt → "continue" ";";
|
||||
whileStmt → "while" "(" expression ")" statement; // While loops run until their condition is truthy
|
||||
blockStmt → "{" declaration* "}"; // Blocks create a new scope that lasts until they're closed
|
||||
|
|
|
@ -78,7 +78,7 @@ const reserved = to_table({
|
|||
"isnot": TokenType.IsNot, "from": TokenType.From,
|
||||
"let": TokenType.Let, "const": TokenType.Const,
|
||||
"assert": TokenType.Assert, "or": TokenType.LogicalOr,
|
||||
"and": TokenType.LogicalAnd
|
||||
"and": TokenType.LogicalAnd, "del": TokenType.Del
|
||||
})
|
||||
|
||||
type
|
||||
|
@ -197,10 +197,10 @@ proc match(self: Lexer, what: char): bool =
|
|||
## the given character, and consumes it.
|
||||
## Otherwise, false is returned
|
||||
if self.done():
|
||||
self.error("Unexpected EOF")
|
||||
self.error("unexpected EOF")
|
||||
return false
|
||||
elif not self.check(what):
|
||||
self.error(&"Expecting '{what}', got '{self.peek()}' instead")
|
||||
self.error(&"expecting '{what}', got '{self.peek()}' instead")
|
||||
return false
|
||||
self.current += 1
|
||||
return true
|
||||
|
@ -246,7 +246,7 @@ proc parseString(self: Lexer, delimiter: char, mode: string = "single") =
|
|||
if self.check('\n') and mode == "multi":
|
||||
self.line = self.line + 1
|
||||
else:
|
||||
self.error("Unexpected EOL while parsing string literal")
|
||||
self.error("unexpected EOL while parsing string literal")
|
||||
return
|
||||
if mode in ["raw", "multi"]:
|
||||
discard self.step()
|
||||
|
@ -290,14 +290,14 @@ proc parseString(self: Lexer, delimiter: char, mode: string = "single") =
|
|||
of '\\':
|
||||
self.source[self.current] = cast[char](0x5C)
|
||||
else:
|
||||
self.error(&"Invalid escape sequence '\\{self.peek()}'")
|
||||
self.error(&"invalid escape sequence '\\{self.peek()}'")
|
||||
return
|
||||
if self.done():
|
||||
self.error(&"Unexpected EOF while parsing string literal")
|
||||
self.error(&"inexpected EOF while parsing string literal")
|
||||
return
|
||||
if mode == "multi":
|
||||
if not self.match(delimiter.repeat(3)):
|
||||
self.error("Unexpected EOL while parsing multi-line string literal")
|
||||
self.error("unexpected EOL while parsing multi-line string literal")
|
||||
else:
|
||||
discard self.step()
|
||||
self.createToken(TokenType.String)
|
||||
|
@ -307,7 +307,7 @@ proc parseBinary(self: Lexer) =
|
|||
## Parses binary numbers
|
||||
while self.peek().isDigit():
|
||||
if not self.check(['0', '1']):
|
||||
self.error(&"Invalid digit '{self.peek()}' in binary literal")
|
||||
self.error(&"invalid digit '{self.peek()}' in binary literal")
|
||||
return
|
||||
discard self.step()
|
||||
self.createToken(TokenType.Binary)
|
||||
|
@ -321,7 +321,7 @@ proc parseOctal(self: Lexer) =
|
|||
## Parses octal numbers
|
||||
while self.peek().isDigit():
|
||||
if self.peek() notin '0'..'7':
|
||||
self.error(&"Invalid digit '{self.peek()}' in octal literal")
|
||||
self.error(&"invalid digit '{self.peek()}' in octal literal")
|
||||
return
|
||||
discard self.step()
|
||||
self.createToken(TokenType.Octal)
|
||||
|
@ -331,7 +331,7 @@ proc parseHex(self: Lexer) =
|
|||
## Parses hexadecimal numbers
|
||||
while self.peek().isAlphaNumeric():
|
||||
if not self.peek().isDigit() and self.peek().toLowerAscii() notin 'a'..'f':
|
||||
self.error(&"Invalid hexadecimal literal")
|
||||
self.error(&"invalid hexadecimal literal")
|
||||
return
|
||||
discard self.step()
|
||||
self.createToken(TokenType.Hex)
|
||||
|
@ -373,7 +373,7 @@ proc parseNumber(self: Lexer) =
|
|||
# TODO: Is there a better way?
|
||||
discard self.step()
|
||||
if not isDigit(self.peek()):
|
||||
self.error("Invalid float number literal")
|
||||
self.error("invalid float number literal")
|
||||
return
|
||||
kind = TokenType.Float
|
||||
while isDigit(self.peek()):
|
||||
|
@ -429,7 +429,7 @@ proc next(self: Lexer) =
|
|||
self.parseString(self.peek(-1), "bytes")
|
||||
else:
|
||||
# TODO: Format strings? (f"{hello}")
|
||||
self.error(&"Unknown string prefix '{single}'")
|
||||
self.error(&"unknown string prefix '{single}'")
|
||||
return
|
||||
elif single.isAlphaNumeric() or single == '_':
|
||||
self.parseIdentifier()
|
||||
|
@ -456,7 +456,7 @@ proc next(self: Lexer) =
|
|||
# Eventually we emit a single token
|
||||
self.createToken(tokens[single])
|
||||
else:
|
||||
self.error(&"Unexpected token '{single}'")
|
||||
self.error(&"unexpected token '{single}'")
|
||||
|
||||
|
||||
proc lex*(self: Lexer, source, file: string): seq[Token] =
|
||||
|
|
|
@ -40,7 +40,8 @@ type
|
|||
whileStmt,
|
||||
blockStmt,
|
||||
raiseStmt,
|
||||
assertStmt
|
||||
assertStmt,
|
||||
delStmt,
|
||||
fromStmt,
|
||||
importStmt,
|
||||
# An expression followed by a semicolon
|
||||
|
|
|
@ -35,7 +35,7 @@ type
|
|||
Function, Break, Lambda,
|
||||
Continue, Var, Let, Const, Is,
|
||||
Return, Async, Class, Import, From,
|
||||
IsNot, Raise, Assert
|
||||
IsNot, Raise, Assert, Del
|
||||
|
||||
# Basic types
|
||||
|
||||
|
|
|
@ -20,8 +20,6 @@ import meta/ast
|
|||
export token, ast
|
||||
|
||||
|
||||
|
||||
|
||||
type Parser* = ref object
|
||||
## A recursive-descent top-down
|
||||
## parser implementation
|
||||
|
@ -46,6 +44,7 @@ proc initParser*(self: Parser = nil): Parser =
|
|||
|
||||
|
||||
template endOfFile: Token = Token(kind: TokenType.EndOfFile, lexeme: "", line: -1)
|
||||
template endOfLine(msg: string) = discard self.expect(TokenType.Semicolon, msg)
|
||||
|
||||
|
||||
proc peek(self: Parser, distance: int = 0): Token =
|
||||
|
@ -144,7 +143,7 @@ proc expect(self: Parser, kind: TokenType, message: string = ""): bool =
|
|||
else:
|
||||
result = false
|
||||
if message.len() == 0:
|
||||
self.error(&"Expecting token of kind {kind}, found {self.peek().kind} instead")
|
||||
self.error(&"expecting token of kind {kind}, found {self.peek().kind} instead")
|
||||
else:
|
||||
self.error(message)
|
||||
|
||||
|
@ -175,10 +174,10 @@ proc primary(self: Parser): ASTNode =
|
|||
of TokenType.LeftParen:
|
||||
discard self.step()
|
||||
result = self.expression()
|
||||
if self.expect(TokenType.RightParen, "Unmatched '('"):
|
||||
if self.expect(TokenType.RightParen, "unmatched '('"):
|
||||
result = newASTNode(self.peek(-3), NodeKind.groupingExpr, @[result])
|
||||
of TokenType.RightParen:
|
||||
self.error("Unmatched ')'")
|
||||
self.error("unmatched ')'")
|
||||
of TokenType.Hex:
|
||||
result = newASTNode(self.step(), NodeKind.hexExpr)
|
||||
of TokenType.Octal:
|
||||
|
@ -186,7 +185,7 @@ proc primary(self: Parser): ASTNode =
|
|||
of TokenType.Binary:
|
||||
result = newASTNode(self.step(), NodeKind.binExpr)
|
||||
else:
|
||||
self.error("Invalid syntax")
|
||||
self.error("invalid syntax")
|
||||
|
||||
|
||||
proc make_call(self: Parser, callee: ASTNode): ASTNode =
|
||||
|
@ -196,7 +195,7 @@ proc make_call(self: Parser, callee: ASTNode): ASTNode =
|
|||
if not self.check(TokenType.RightParen):
|
||||
while true:
|
||||
if len(arguments) >= 255:
|
||||
self.error("Cannot have more than 255 arguments")
|
||||
self.error("cannot have more than 255 arguments")
|
||||
break
|
||||
arguments.add(self.expression())
|
||||
if not self.match(TokenType.Comma):
|
||||
|
@ -213,7 +212,7 @@ proc call(self: Parser): ASTNode =
|
|||
if self.match(TokenType.LeftParen):
|
||||
result = self.make_call(result)
|
||||
elif self.match(TokenType.Dot):
|
||||
if self.expect(TokenType.Identifier, "Expecting attribute name after '.'"):
|
||||
if self.expect(TokenType.Identifier, "expecting attribute name after '.'"):
|
||||
result = newASTNode(self.peek(-2), NodeKind.getExpr, @[result, newAstNode(self.peek(-1), NodeKind.identExpr)])
|
||||
else:
|
||||
break
|
||||
|
@ -324,17 +323,59 @@ proc assignment(self: Parser): ASTNode =
|
|||
|
||||
proc expression(self: Parser): ASTNode =
|
||||
## Parses expressions
|
||||
self.assignment()
|
||||
result = self.assignment()
|
||||
|
||||
|
||||
proc expressionStatement(self: Parser): ASTNode =
|
||||
## Parses expression statements, which
|
||||
## are expressions followed by a semicolon
|
||||
var expression = self.expression()
|
||||
discard self.expect(TokenType.Semicolon, "missing semicolon after expression")
|
||||
endOfLIne("missing semicolon after expression")
|
||||
result = newAstNode(self.peek(-1), NodeKind.exprStmt, @[expression])
|
||||
|
||||
|
||||
proc delStmt(self: Parser): ASTNode =
|
||||
## Parses "del" statements,
|
||||
## which unbind a name from its
|
||||
## value in the current scope and
|
||||
## calls its destructor
|
||||
var expression = self.expression()
|
||||
endOfLIne("missing semicolon after del statement")
|
||||
if expression.kind != NodeKind.identExpr:
|
||||
self.error("cannot delete a literal")
|
||||
else:
|
||||
result = newASTNode(self.peek(-1), NodeKind.delStmt, @[expression])
|
||||
|
||||
|
||||
proc assertStmt(self: Parser): ASTNode =
|
||||
## Parses "assert" statements,
|
||||
## raise an error if the expression
|
||||
## fed into them is falsey
|
||||
var expression = self.expression()
|
||||
endOfLIne("missing semicolon after del statement")
|
||||
result = newASTNode(self.peek(), NodeKind.assertStmt, @[expression])
|
||||
|
||||
|
||||
proc statement(self: Parser): ASTNode =
|
||||
## Parses statement
|
||||
# TODO
|
||||
case self.peek().kind:
|
||||
of TokenType.Del:
|
||||
discard self.step()
|
||||
result = self.delStmt()
|
||||
of TokenType.Assert:
|
||||
discard self.step()
|
||||
result = self.assertStmt()
|
||||
else:
|
||||
result = self.expressionStatement()
|
||||
|
||||
|
||||
proc declaration(self: Parser): ASTNode =
|
||||
## Parses declarations
|
||||
# TODO
|
||||
result = self.statement()
|
||||
|
||||
|
||||
proc parse*(self: Parser, tokens: seq[Token], file: string): seq[ASTNode] =
|
||||
## Parses a series of tokens into an AST node
|
||||
discard self.initParser()
|
||||
|
@ -342,7 +383,7 @@ proc parse*(self: Parser, tokens: seq[Token], file: string): seq[ASTNode] =
|
|||
self.file = file
|
||||
var program: seq[ASTNode] = @[]
|
||||
while not self.done():
|
||||
program.add(self.expressionStatement())
|
||||
program.add(self.declaration())
|
||||
if self.errored:
|
||||
program = @[]
|
||||
break
|
||||
|
|
Loading…
Reference in New Issue