Add parser support for selective import statements

This commit is contained in:
Mattia Giambirtone 2023-11-20 10:13:20 +01:00
parent 3ad22dea12
commit 8b39cc3bc0
Signed by: nocturn9x
GPG Key ID: 8270F9F467971E59
3 changed files with 48 additions and 39 deletions

View File

@ -202,6 +202,8 @@ type
ImportStmt* = ref object of Statement
moduleName*: IdentExpr
fromStmt*: bool
names*: seq[IdentExpr]
ExportStmt* = ref object of Statement
name*: IdentExpr
@ -532,10 +534,12 @@ proc newExprStmt*(expression: Expression, token: Token): ExprStmt =
result.token = token
proc newImportStmt*(moduleName: IdentExpr, token: Token): ImportStmt =
proc newImportStmt*(moduleName: IdentExpr, fromStmt: bool, names: seq[IdentExpr], token: Token): ImportStmt =
result = ImportStmt(kind: importStmt)
result.moduleName = moduleName
result.token = token
result.fromStmt = fromStmt
result.names = names
proc newExportStmt*(name: IdentExpr, token: Token): ExportStmt =
@ -721,7 +725,7 @@ proc `$`*(self: ASTNode): string =
result = &"Break({self.label})"
of importStmt:
var self = ImportStmt(self)
result &= &"Import({self.moduleName})"
result &= &"Import({self.moduleName}, names={self.names}, fromStmt={self.fromStmt})"
of assertStmt:
var self = AssertStmt(self)
result &= &"Assert({self.expression})"

View File

@ -166,6 +166,7 @@ proc newParser*: Parser =
# Nim initializes all the other fields
# automatically
result.operators = newOperatorTable()
result.modules = newTable[string, bool]()
# Public getters for improved error formatting
@ -753,12 +754,10 @@ proc importStmt(self: Parser, fromStmt: bool = false): Statement =
## module to import the operators from it
if self.scopeDepth > 0:
self.error("import statements are only allowed at the top level")
var tok: Token
if fromStmt:
tok = self.peek(-2)
else:
var
tok = self.peek(-1)
var moduleName = ""
moduleName = ""
names: seq[IdentExpr]
while not self.check(Semicolon) and not self.done():
if self.match(".."):
if not self.check("/"):
@ -771,12 +770,19 @@ proc importStmt(self: Parser, fromStmt: bool = false): Statement =
moduleName &= self.peek(-1).lexeme
else:
break
if fromStmt:
self.expect(Import)
while not self.check(Semicolon) and not self.done():
self.expect(Identifier, "expecting identifier after 'import'")
names.add(newIdentExpr(self.peek(-1)))
if not self.match(Comma):
break
endOfLine("missing semicolon after import statement")
result = newImportStmt(newIdentExpr(Token(kind: Identifier, lexeme: moduleName,
line: self.peek(-1).line,
pos: (tok.pos.stop + 1, (tok.pos.stop + 1) + len(moduleName)),
relPos: (tok.relPos.stop + 1, (tok.relPos.stop + 1) + len(moduleName))),
self.scopeDepth), tok)
self.scopeDepth), fromStmt, names, tok)
result.file = self.file
moduleName &= ".pn"
var lexer = newLexer()
@ -789,7 +795,7 @@ proc importStmt(self: Parser, fromStmt: bool = false): Statement =
path = joinPath(searchPath, moduleName)
if fileExists(path):
break
elif i == searchPath.high():
elif i == moduleLookupPaths.high():
self.error(&"""could not import '{path}': module not found""")
if not self.modules.getOrDefault(path, true):
self.error(&"coult not import '{path}' from '{self.file}' due to a cyclic dependency")
@ -1197,8 +1203,6 @@ proc statement(self: Parser): Statement =
discard self.step()
result = self.exportStmt()
of From:
# TODO
# from module import a [, b, c as d]
discard self.step()
result = self.importStmt(fromStmt=true)
of While:

View File

@ -7,54 +7,55 @@ proc fillSymbolTable*(tokenizer: Lexer) =
## keywords
# 1-byte symbols
tokenizer.symbols.addSymbol("{", LeftBrace)
tokenizer.symbols.addSymbol("}", RightBrace)
tokenizer.symbols.addSymbol("(", LeftParen)
tokenizer.symbols.addSymbol(")", RightParen)
tokenizer.symbols.addSymbol("[", LeftBracket)
tokenizer.symbols.addSymbol("]", RightBracket)
tokenizer.symbols.addSymbol(".", Dot)
tokenizer.symbols.addSymbol(",", Comma)
tokenizer.symbols.addSymbol(";", Semicolon)
tokenizer.symbols.addSymbol("{", TokenType.LeftBrace)
tokenizer.symbols.addSymbol("}", TokenType.RightBrace)
tokenizer.symbols.addSymbol("(", TokenType.LeftParen)
tokenizer.symbols.addSymbol(")", TokenType.RightParen)
tokenizer.symbols.addSymbol("[", TokenType.LeftBracket)
tokenizer.symbols.addSymbol("]", TokenType.RightBracket)
tokenizer.symbols.addSymbol(".", TokenType.Dot)
tokenizer.symbols.addSymbol(",", TokenType.Comma)
tokenizer.symbols.addSymbol(";", TokenType.Semicolon)
# Keywords
tokenizer.symbols.addKeyword("type", TokenType.Type)
tokenizer.symbols.addKeyword("enum", Enum)
tokenizer.symbols.addKeyword("case", Case)
tokenizer.symbols.addKeyword("operator", Operator)
tokenizer.symbols.addKeyword("generator", Generator)
tokenizer.symbols.addKeyword("enum", TokenType.Enum)
tokenizer.symbols.addKeyword("case", TokenType.Case)
tokenizer.symbols.addKeyword("operator", TokenType.Operator)
tokenizer.symbols.addKeyword("generator", TokenType.Generator)
tokenizer.symbols.addKeyword("fn", TokenType.Function)
tokenizer.symbols.addKeyword("coroutine", Coroutine)
tokenizer.symbols.addKeyword("coroutine", TokenType.Coroutine)
tokenizer.symbols.addKeyword("break", TokenType.Break)
tokenizer.symbols.addKeyword("continue", Continue)
tokenizer.symbols.addKeyword("while", While)
tokenizer.symbols.addKeyword("for", For)
tokenizer.symbols.addKeyword("foreach", Foreach)
tokenizer.symbols.addKeyword("if", If)
tokenizer.symbols.addKeyword("else", Else)
tokenizer.symbols.addKeyword("continue", TokenType.Continue)
tokenizer.symbols.addKeyword("while", TokenType.While)
tokenizer.symbols.addKeyword("for", TokenType.For)
tokenizer.symbols.addKeyword("foreach", TokenType.Foreach)
tokenizer.symbols.addKeyword("if", TokenType.If)
tokenizer.symbols.addKeyword("else", TokenType.Else)
tokenizer.symbols.addKeyword("await", TokenType.Await)
tokenizer.symbols.addKeyword("raise", TokenType.Raise)
tokenizer.symbols.addKeyword("assert", TokenType.Assert)
tokenizer.symbols.addKeyword("const", Const)
tokenizer.symbols.addKeyword("let", Let)
tokenizer.symbols.addKeyword("const", TokenType.Const)
tokenizer.symbols.addKeyword("let", TokenType.Let)
tokenizer.symbols.addKeyword("var", TokenType.Var)
tokenizer.symbols.addKeyword("import", Import)
tokenizer.symbols.addKeyword("import", TokenType.Import)
tokenizer.symbols.addKeyword("yield", TokenType.Yield)
tokenizer.symbols.addKeyword("return", TokenType.Return)
tokenizer.symbols.addKeyword("object", Object)
tokenizer.symbols.addKeyword("export", Export)
tokenizer.symbols.addKeyword("object", TokenType.Object)
tokenizer.symbols.addKeyword("export", TokenType.Export)
tokenizer.symbols.addKeyword("block", TokenType.Block)
tokenizer.symbols.addKeyword("switch", TokenType.Switch)
tokenizer.symbols.addKeyword("lent", TokenType.Lent)
tokenizer.symbols.addKeyword("from", TokenType.From)
# These are more like expressions with a reserved
# name that produce a value of a builtin type,
# but we don't need to care about that until
# we're in the parsing/compilation steps so
# it's fine
tokenizer.symbols.addKeyword("true", True)
tokenizer.symbols.addKeyword("false", False)
tokenizer.symbols.addKeyword("true", TokenType.True)
tokenizer.symbols.addKeyword("false", TokenType.False)
tokenizer.symbols.addKeyword("ref", TokenType.Ref)
tokenizer.symbols.addKeyword("ptr", TokenType.Ptr)
for sym in [">", "<", "=", "~", "/", "+", "-", "_", "*", "?", "@", ":", "==", "!=",
">=", "<=", "+=", "-=", "/=", "*=", "**=", "!", "%", "&", "|", "^",
">>", "<<"]:
tokenizer.symbols.addSymbol(sym, Symbol)
tokenizer.symbols.addSymbol(sym, TokenType.Symbol)