1773 lines
74 KiB
Nim
1773 lines
74 KiB
Nim
import errors
|
|
import frontend/parser/parser
|
|
import frontend/parser/lexer
|
|
import frontend/compiler/typesystem
|
|
|
|
|
|
import std/os
|
|
import std/sets
|
|
import std/tables
|
|
import std/terminal
|
|
import std/strutils
|
|
import std/sequtils
|
|
import std/algorithm
|
|
import std/strformat
|
|
import std/parseutils
|
|
|
|
|
|
export ast, errors, typesystem
|
|
|
|
|
|
|
|
type
|
|
TypeCheckError* = ref object of PeonException
|
|
## A typechecking error with location information
|
|
node*: ASTNode
|
|
function*: Declaration
|
|
# The instance of the typechecker that
|
|
# raised the error
|
|
instance*: TypeChecker
|
|
|
|
PragmaFunc* = object
|
|
## An internal function called
|
|
## by pragmas
|
|
kind: PragmaKind
|
|
handler: proc (self: TypeChecker, pragma: Pragma, name: Name)
|
|
|
|
PragmaKind* = enum
|
|
## An enumeration of pragma types
|
|
Immediate,
|
|
Delayed
|
|
|
|
TypeChecker* = ref object
|
|
## The Peon type checker
|
|
current: int # The current node we're looking at
|
|
tree: ParseTree # The AST for the current module
|
|
scopeDepth*: int # The current scope depth (0 == global, > 0 == local)
|
|
source: string # The module's raw source code
|
|
file: string # The module's filename
|
|
isMainModule: bool # Are we the main module?
|
|
currentFunction: Name # The current function we're checking
|
|
currentModule: Name # The current module we're checking
|
|
disabledWarnings: seq[WarningKind] # List of disabled warnings
|
|
names: seq[TableRef[string, seq[Name]]] # Maps scope depths to namespaces
|
|
# Internal procedures called by pragmas
|
|
pragmas: TableRef[string, PragmaFunc]
|
|
# Show full info about type mismatches when dispatching
|
|
# function calls fails (we hide this under a boolean flag
|
|
# because the output is quite verbose)
|
|
showMismatches: bool
|
|
|
|
|
|
## Forward declarations
|
|
proc validate(self: TypeChecker, node: ASTNode): TypedNode
|
|
proc toIntrinsic(name: string): Type
|
|
proc done(self: TypeChecker): bool {.inline.}
|
|
proc toRef(self: Type): Type {.inline.}
|
|
proc toConst(self: Type): Type {.inline.}
|
|
proc toPtr(self: Type): Type {.inline.}
|
|
proc toLent(self: Type): Type {.inline.}
|
|
proc wrapType(self: Type): Type {.inline.}
|
|
proc unwrapType(self: Type): Type {.inline.}
|
|
proc handleErrorPragma(self: TypeChecker, pragma: Pragma, name: Name)
|
|
proc handlePurePragma(self: TypeChecker, pragma: Pragma, name: Name)
|
|
proc handleMagicPragma(self: TypeChecker, pragma: Pragma, name: Name)
|
|
proc handleUnsafePragma(self: TypeChecker, pragma: Pragma, name: Name)
|
|
proc handleWarnPragma(self: TypeChecker, pragma: Pragma, name: Name)
|
|
proc warning(self: TypeChecker, kind: WarningKind, message: string, name: Name = nil, node: ASTNode = nil)
|
|
proc stringify*(self: TypeChecker, typ: Type): string
|
|
proc stringify*(self: TypeChecker, typ: TypedNode): string
|
|
proc inferOrError*(self: TypeChecker, node: Expression): TypedExpr
|
|
proc compare(self: TypeChecker, a, b: Type): bool
|
|
proc expression(self: TypeChecker, node: Expression): TypedExpr
|
|
proc specialize(self: TypeChecker, name: Name, args: seq[TypedExpr], node: ASTNode = nil): Type
|
|
proc declareGenerics(self: TypeChecker, name: Name)
|
|
proc funDecl(self: TypeChecker, node: FunDecl, name: Name = nil): TypedFunDecl
|
|
proc getTypeDistance(self: TypeChecker, a, b: Type): int
|
|
|
|
## Public getters for nicer error formatting
|
|
proc getCurrentNode*(self: TypeChecker): ASTNode = (if self.done(): self.tree[^1] else: self.tree[self.current - 1])
|
|
proc getCurrentFunction*(self: TypeChecker): Declaration {.inline.} = (if self.currentFunction.isNil(): nil else: self.currentFunction.node)
|
|
proc getSource*(self: TypeChecker): string {.inline.} = self.source
|
|
|
|
|
|
proc newTypeChecker*: TypeChecker =
|
|
## Initializes a new compiler instance
|
|
new(result)
|
|
result.current = 0
|
|
result.tree = @[]
|
|
result.scopeDepth = 0
|
|
result.source = ""
|
|
result.file = ""
|
|
result.isMainModule = false
|
|
result.currentFunction = nil
|
|
result.disabledWarnings = @[]
|
|
result.names = @[]
|
|
result.pragmas = newTable[string, PragmaFunc]()
|
|
result.pragmas["magic"] = PragmaFunc(kind: Immediate, handler: handleMagicPragma)
|
|
result.pragmas["pure"] = PragmaFunc(kind: Immediate, handler: handlePurePragma)
|
|
result.pragmas["error"] = PragmaFunc(kind: Delayed, handler: handleErrorPragma)
|
|
result.pragmas["warn"] = PragmaFunc(kind: Delayed, handler: handleWarnPragma)
|
|
result.pragmas["unsafe"] = PragmaFunc(kind: Immediate, handler: handleUnsafePragma)
|
|
result.showMismatches = false
|
|
|
|
|
|
proc done(self: TypeChecker): bool {.inline.} = self.current == self.tree.len()
|
|
proc `$`(self: Name): string = $(self[])
|
|
proc `$`(self: Type): string = $(self[])
|
|
|
|
|
|
proc peek(self: TypeChecker): ASTNode {.inline.} =
|
|
if self.tree.len() == 0:
|
|
return nil
|
|
if self.done():
|
|
return self.tree[^1]
|
|
return self.tree[self.current]
|
|
|
|
|
|
proc step(self: TypeChecker): ASTNode {.inline.} =
|
|
if self.tree.len() == 0:
|
|
return nil
|
|
if self.done():
|
|
return self.tree[^1]
|
|
result = self.peek()
|
|
inc(self.current)
|
|
|
|
|
|
proc error(self: TypeChecker, message: string, node: ASTNode = nil) {.inline.} =
|
|
## Raises a TypeCheckError exception
|
|
let node = if node.isNil(): self.getCurrentNode() else: node
|
|
raise TypeCheckError(msg: message, node: node, line: node.token.line, file: node.file, instance: self)
|
|
|
|
|
|
proc handleMagicPragma(self: TypeChecker, pragma: Pragma, name: Name) =
|
|
## Handles the "magic" pragma. Assumes the given name is already
|
|
## declared
|
|
if pragma.args.len() != 1:
|
|
self.error(&"'magic' pragma: wrong number of arguments (expected 1, got {len(pragma.args)} instead)")
|
|
elif pragma.args[0].kind != strExpr:
|
|
self.error(&"'magic' pragma: wrong argument type (constant string expected, got {self.stringify(self.inferOrError(pragma.args[0]))} instead)")
|
|
elif name.node.kind == NodeKind.typeDecl:
|
|
let typ = pragma.args[0].token.lexeme[1..^2].toIntrinsic()
|
|
if typ.isNil():
|
|
self.error("'magic' pragma: wrong argument value", pragma.args[0])
|
|
name.valueType = typ.wrapType()
|
|
name.valueType.intrinsic = true
|
|
else:
|
|
self.error("'magic' pragma is not valid in this context")
|
|
|
|
|
|
proc handleErrorPragma(self: TypeChecker, pragma: Pragma, name: Name) =
|
|
## Handles the "error" pragma
|
|
if pragma.args.len() != 1:
|
|
self.error("'error' pragma: wrong number of arguments")
|
|
elif pragma.args[0].kind != strExpr:
|
|
self.error("'error' pragma: wrong type of argument (constant string expected)")
|
|
elif not name.isNil() and name.node.kind != NodeKind.funDecl:
|
|
self.error("'error' pragma is not valid in this context")
|
|
self.error(pragma.args[0].token.lexeme[1..^2])
|
|
|
|
|
|
proc handleWarnPragma(self: TypeChecker, pragma: Pragma, name: Name) =
|
|
## Handles the "warn" pragma
|
|
if pragma.args.len() != 1:
|
|
self.error("'warn' pragma: wrong number of arguments")
|
|
elif pragma.args[0].kind != strExpr:
|
|
self.error("'warn' pragma: wrong type of argument (constant string expected)")
|
|
self.warning(UserWarning, pragma.args[0].token.lexeme[1..^2])
|
|
|
|
|
|
|
|
proc handlePurePragma(self: TypeChecker, pragma: Pragma, name: Name) =
|
|
## Handles the "pure" pragma
|
|
case name.node.kind:
|
|
of NodeKind.funDecl, lambdaExpr:
|
|
discard # TODO
|
|
else:
|
|
self.error("'pure' pragma is not valid in this context")
|
|
|
|
|
|
proc handleUnsafePragma(self: TypeChecker, pragma: Pragma, name: Name) =
|
|
## Handles the "unsafe" pragma
|
|
if name.node.kind notin [NodeKind.funDecl, NodeKind.lambdaExpr]:
|
|
self.error("'unsafe' pragma is not valid in this context")
|
|
name.valueType.unsafe = true
|
|
|
|
|
|
proc warning(self: TypeChecker, kind: WarningKind, message: string, name: Name = nil, node: ASTNode = nil) =
|
|
## Raises a warning
|
|
if kind in self.disabledWarnings:
|
|
return
|
|
var node: ASTNode = node
|
|
var fn: Declaration
|
|
if name.isNil():
|
|
if node.isNil():
|
|
node = self.getCurrentNode()
|
|
fn = self.getCurrentFunction()
|
|
else:
|
|
node = name.node
|
|
if node.isNil():
|
|
node = self.getCurrentNode()
|
|
if not name.owner.isNil():
|
|
fn = name.owner.node
|
|
else:
|
|
fn = self.getCurrentFunction()
|
|
var file = self.file
|
|
if not name.isNil():
|
|
file = name.owner.file
|
|
var pos = node.getRelativeBoundaries()
|
|
if file notin ["<string>", ""]:
|
|
file = relativePath(file, getCurrentDir())
|
|
stderr.styledWrite(fgYellow, styleBright, "Warning in ", fgRed, &"{file}:{node.token.line}:{pos.start}")
|
|
if not fn.isNil() and fn.kind == funDecl:
|
|
stderr.styledWrite(fgYellow, styleBright, " in function ", fgRed, FunDecl(fn).name.token.lexeme)
|
|
stderr.styledWriteLine(styleBright, fgYellow, &" ({kind}): ", fgDefault, message)
|
|
try:
|
|
# We try to be as specific as possible with the warning message, pointing to the
|
|
# line it belongs to, but since warnings are not always raised from the source
|
|
# file they're generated in, we take into account the fact that retrieving the
|
|
# exact warning location may fail and bail out silently if it does
|
|
let line = self.source.splitLines()[node.token.line - 1].strip(chars={'\n'})
|
|
stderr.styledWrite(fgYellow, styleBright, "Source line: ", resetStyle, fgDefault, line[0..<pos.start])
|
|
stderr.styledWrite(fgYellow, styleUnderscore, line[pos.start..pos.stop])
|
|
stderr.styledWriteLine(fgDefault, line[pos.stop + 1..^1])
|
|
except IndexDefect:
|
|
# Something probably went wrong (wrong line metadata): bad idea to crash!
|
|
stderr.styledwriteLine(resetStyle, fgRed, "Failed to retrieve line information")
|
|
|
|
|
|
proc wrapType(self: Type): Type {.inline.} =
|
|
## Wraps a type in a typevar
|
|
case self.kind:
|
|
of Union:
|
|
result = Type(kind: Union, types: @[])
|
|
for typ in self.types:
|
|
result.types.add((match: typ.match, kind: typ.kind.wrapType(), value: typ.value))
|
|
else:
|
|
result = Type(kind: Typevar, wrapped: self)
|
|
|
|
|
|
proc unwrapType(self: Type): Type {.inline.} =
|
|
## Unwraps a typevar if it's not already
|
|
## unwrapped
|
|
case self.kind:
|
|
of Typevar:
|
|
result = self.wrapped
|
|
of Union:
|
|
result = Type(kind: Union, types: @[])
|
|
for typ in self.types:
|
|
result.types.add((match: typ.match, kind: typ.kind.unwrapType(), value: typ.value))
|
|
else:
|
|
result = self
|
|
|
|
|
|
proc toRef(self: Type): Type {.inline.} =
|
|
## Wraps a type in a ref
|
|
return Type(kind: Reference, value: self)
|
|
|
|
|
|
proc toPtr(self: Type): Type {.inline} =
|
|
## Wraps a type in a ptr type
|
|
return Type(kind: Pointer, value: self)
|
|
|
|
|
|
proc toLent(self: Type): Type {.inline.} =
|
|
## Turns a type into a lent type
|
|
result = "lent".toIntrinsic()
|
|
result.value = self
|
|
|
|
|
|
proc toConst(self: Type): Type {.inline.} =
|
|
## Wraps a type into a const
|
|
result = "const".toIntrinsic()
|
|
result.value = self
|
|
|
|
|
|
proc toIntrinsic(name: string): Type =
|
|
## Converts a string to an intrinsic
|
|
## type if it is valid and returns nil
|
|
## otherwise
|
|
case name:
|
|
of "any":
|
|
return Type(kind: Any)
|
|
of "auto":
|
|
return Type(kind: Auto)
|
|
of "int64":
|
|
return Type(kind: Integer, size: LongLong, signed: true)
|
|
of "uint64":
|
|
return Type(kind: Integer, size: LongLong, signed: false)
|
|
of "int32":
|
|
return Type(kind: Integer, size: Long, signed: true)
|
|
of "uint32":
|
|
return Type(kind: Integer, size: Long, signed: false)
|
|
of "int16":
|
|
return Type(kind: Integer, size: Short, signed: true)
|
|
of "uint16":
|
|
return Type(kind: Integer, size: Short, signed: false)
|
|
of "int8":
|
|
return Type(kind: Integer, size: Tiny, signed: true)
|
|
of "uint8":
|
|
return Type(kind: Integer, size: Tiny, signed: false)
|
|
of "float64":
|
|
return Type(kind: Float, width: Full)
|
|
of "float32":
|
|
return Type(kind: Float, width: Half)
|
|
of "byte":
|
|
return Type(kind: Byte)
|
|
of "char":
|
|
return Type(kind: Char)
|
|
of "NaN":
|
|
return Type(kind: TypeKind.Nan)
|
|
of "Inf":
|
|
return Type(kind: Infinity, positive: true)
|
|
of "NegInf":
|
|
return Type(kind: Infinity)
|
|
of "bool":
|
|
return Type(kind: Boolean)
|
|
of "string":
|
|
return Type(kind: String)
|
|
of "pointer":
|
|
return Type(kind: Pointer, value: "any".toIntrinsic())
|
|
of "lent":
|
|
return Type(kind: TypeKind.Lent, value: "any".toIntrinsic())
|
|
of "const":
|
|
return Type(kind: TypeKind.Const, value: "any".toIntrinsic())
|
|
of "ref":
|
|
return Type(kind: Reference, value: "any".toIntrinsic())
|
|
|
|
|
|
|
|
proc infer(self: TypeChecker, node: LiteralExpr): TypedExpr =
|
|
case node.kind:
|
|
of trueExpr, falseExpr:
|
|
return newTypedExpr(node, "bool".toIntrinsic())
|
|
of strExpr:
|
|
return newTypedExpr(node, "string".toIntrinsic())
|
|
of nanExpr:
|
|
return newTypedExpr(node, "NaN".toIntrinsic())
|
|
of infExpr:
|
|
return newTypedExpr(node, "Inf".toIntrinsic())
|
|
of intExpr, binExpr, octExpr, hexExpr:
|
|
let size = node.token.lexeme.split("'")
|
|
if size.len() == 1:
|
|
return newTypedExpr(node, "int64".toIntrinsic())
|
|
result = newTypedExpr(node, size[1].toIntrinsic())
|
|
if result.isNil():
|
|
self.error(&"invalid type specifier '{size[1]}' for int", node)
|
|
of floatExpr:
|
|
let size = node.token.lexeme.split("'")
|
|
if size.len() == 1:
|
|
return newTypedExpr(node, "float".toIntrinsic())
|
|
result = newTypedExpr(node, size[1].toIntrinsic())
|
|
if result.isNil():
|
|
self.error(&"invalid type specifier '{size[1]}' for float", node)
|
|
else:
|
|
discard
|
|
|
|
|
|
proc infer(self: TypeChecker, node: Expression): TypedExpr =
|
|
if node.isConst():
|
|
return self.infer(LiteralExpr(node))
|
|
result = self.expression(node)
|
|
|
|
|
|
proc compareUnions*(self: TypeChecker, a, b: seq[tuple[match: bool, kind: Type, value: Expression]]): bool =
|
|
## Compares type unions between each other
|
|
var
|
|
long = a
|
|
short = b
|
|
i = 0
|
|
if b.len() > a.len():
|
|
long = b
|
|
short = a
|
|
for cond1 in short:
|
|
for cond2 in long:
|
|
if not self.compare(cond1.kind, cond2.kind) or cond1.match != cond2.match:
|
|
continue
|
|
inc(i)
|
|
return i >= short.len()
|
|
|
|
|
|
proc matchUnion(self: TypeChecker, a: Type, b: seq[tuple[match: bool, kind: Type, value: Expression]]): bool =
|
|
## Returns whether a non-union type a matches
|
|
## the given untagged union b
|
|
assert a.kind != Union
|
|
for constraint in b:
|
|
if self.compare(constraint.kind, a) and constraint.match:
|
|
return true
|
|
return false
|
|
|
|
|
|
proc matchGeneric(self: TypeChecker, a: Type, b: seq[tuple[match: bool, kind: Type, value: Expression]]): bool =
|
|
## Returns whether a concrete type matches the
|
|
## given generic type b
|
|
for constraint in b:
|
|
if not self.compare(constraint.kind, a) or not constraint.match:
|
|
return false
|
|
return true
|
|
|
|
|
|
proc isAny(typ: Type): bool =
|
|
## Returns true if the given type is
|
|
## of (or contains) the any type. Not
|
|
## applicable to typevars
|
|
case typ.kind:
|
|
of Any:
|
|
return true
|
|
of Union:
|
|
for condition in typ.types:
|
|
if condition.kind.isAny():
|
|
return true
|
|
else:
|
|
return false
|
|
|
|
|
|
proc compare(self: TypeChecker, a, b: Type): bool =
|
|
if a.isAny() or b.isAny():
|
|
return true
|
|
if a.kind == b.kind:
|
|
case a.kind:
|
|
of Typevar:
|
|
return self.compare(a.wrapped, b.wrapped)
|
|
# TODO: Take interfaces into account
|
|
of Structure:
|
|
# Compare type names, if they both have it
|
|
# (some internally generated types may not
|
|
# have names)
|
|
if a.name.len() > 0 and b.name.len() > 0:
|
|
if a.name != b.name:
|
|
return false
|
|
# Compare fields
|
|
var hashSet = initHashSet[string]()
|
|
for field in a.fields.keys():
|
|
hashSet.incl(field)
|
|
for field in b.fields.keys():
|
|
hashSet.incl(field)
|
|
# Ensure both types have the same field
|
|
# names
|
|
for field in hashSet:
|
|
if field notin a.fields:
|
|
return false
|
|
if field notin b.fields:
|
|
return false
|
|
# Ensure fields have matching types
|
|
for field in hashSet:
|
|
if not self.compare(a.fields[field], b.fields[field]):
|
|
return false
|
|
hashSet.clear()
|
|
# Compare generic arguments
|
|
|
|
# Check generic types
|
|
for generic in a.genericTypes.keys():
|
|
hashSet.incl(generic)
|
|
for generic in b.genericTypes.keys():
|
|
hashSet.incl(generic)
|
|
|
|
# Ensure both types have the same generic
|
|
# argument names
|
|
for generic in hashSet:
|
|
if generic notin a.genericTypes:
|
|
return false
|
|
if generic notin b.genericTypes:
|
|
return false
|
|
|
|
for generic in hashSet:
|
|
if not self.compare(a.genericTypes[generic], b.genericTypes[generic]):
|
|
return false
|
|
hashSet.clear()
|
|
|
|
# Check generic values
|
|
for generic in a.genericValues.keys():
|
|
hashSet.incl(generic)
|
|
for generic in b.genericValues.keys():
|
|
hashSet.incl(generic)
|
|
|
|
# Ensure both types have the same generic
|
|
# argument names
|
|
for generic in hashSet:
|
|
if generic notin a.genericTypes:
|
|
return false
|
|
if generic notin b.genericTypes:
|
|
return false
|
|
|
|
for generic in hashSet:
|
|
if not self.compare(a.genericValues[generic], b.genericValues[generic]):
|
|
return false
|
|
return true
|
|
of Boolean, Infinity, Any,
|
|
Auto, Char, Byte, String:
|
|
return true
|
|
of Integer:
|
|
return a.size == b.size and a.signed == b.signed
|
|
of Float:
|
|
return a.width == b.width
|
|
of TypeKind.Lent, Reference, Pointer, TypeKind.Const:
|
|
return self.compare(a.value, b.value)
|
|
of Union:
|
|
return self.compareUnions(a.types, b.types)
|
|
of Function:
|
|
# TODO
|
|
return false
|
|
else:
|
|
# TODO
|
|
return false
|
|
if a.kind == Union:
|
|
return self.matchUnion(b, a.types)
|
|
if b.kind == Union:
|
|
return self.matchUnion(a, b.types)
|
|
return false
|
|
|
|
|
|
proc compare(self: TypeChecker, a, b: Name): bool =
|
|
## Compares two names. In addition to checking
|
|
## that their types match, this function makes
|
|
## sure the two given objects actually come from
|
|
## the same module (and so are literally the same
|
|
## exact object)
|
|
if not self.compare(a.valueType, b.valueType):
|
|
return false
|
|
return a.owner == b.owner
|
|
|
|
|
|
proc literal(self: TypeChecker, node: LiteralExpr): TypedExpr =
|
|
case node.kind:
|
|
of trueExpr, falseExpr, strExpr, infExpr, nanExpr:
|
|
result = self.infer(node)
|
|
of intExpr:
|
|
result = self.infer(node)
|
|
if result.kind.kind == Integer:
|
|
var x: int
|
|
try:
|
|
discard parseInt(node.literal.lexeme, x)
|
|
except ValueError:
|
|
self.error("integer value out of range")
|
|
else:
|
|
var x: uint64
|
|
try:
|
|
discard parseBiggestUInt(node.literal.lexeme, x)
|
|
except ValueError:
|
|
self.error("integer value out of range")
|
|
of hexExpr:
|
|
var x: int
|
|
result = self.infer(node)
|
|
try:
|
|
discard parseHex(node.literal.lexeme, x)
|
|
except ValueError:
|
|
self.error("integer value out of range")
|
|
let node = newIntExpr(Token(lexeme: $x, line: node.token.line,
|
|
pos: (start: node.token.pos.start,
|
|
stop: node.token.pos.start + len($x)),
|
|
relPos: (start: node.token.relPos.start, stop: node.token.relPos.start + len($x))
|
|
)
|
|
)
|
|
result.node = node
|
|
of binExpr:
|
|
var x: int
|
|
result = self.infer(node)
|
|
try:
|
|
discard parseBin(node.literal.lexeme, x)
|
|
except ValueError:
|
|
self.error("integer value out of range")
|
|
let node = newIntExpr(Token(lexeme: $x, line: node.token.line,
|
|
pos: (start: node.token.pos.start,
|
|
stop: node.token.pos.start + len($x)),
|
|
relPos: (start: node.token.relPos.start, stop: node.token.relPos.start + len($x))
|
|
)
|
|
)
|
|
result.node = node
|
|
of octExpr:
|
|
var x: int
|
|
result = self.infer(node)
|
|
try:
|
|
discard parseOct(node.literal.lexeme, x)
|
|
except ValueError:
|
|
self.error("integer value out of range")
|
|
let node = newIntExpr(Token(lexeme: $x, line: node.token.line,
|
|
pos: (start: node.token.pos.start,
|
|
stop: node.token.pos.start + len($x)),
|
|
relPos: (start: node.token.relPos.start, stop: node.token.relPos.start + len($x))
|
|
)
|
|
)
|
|
result.node = node
|
|
of floatExpr:
|
|
var x: float
|
|
result = self.infer(node)
|
|
try:
|
|
discard parseFloat(node.literal.lexeme, x)
|
|
except ValueError:
|
|
self.error("floating point value out of range")
|
|
else:
|
|
self.error(&"invalid AST node of kind {node.kind} at literal(): {node} (This is an internal error and most likely a bug!)")
|
|
|
|
|
|
proc find(self: TypeChecker, name: string, kind: Type = "any".toIntrinsic()): Name =
|
|
## Looks up a name in all scopes starting from the current
|
|
## one. Optionally matches it to the given type
|
|
|
|
## A note about how namespaces are implemented. At each scope depth lies
|
|
## a dictionary which maps strings to a list of names. Why a list of names
|
|
## rather than a single name? Well, that's to allow cool things like overloading
|
|
## existing functions with different type signatures. We still disallow most cases
|
|
## of re-declaration, though (for example, shadowing a function with a variable in the
|
|
## same scope and module) because it would just be confusing
|
|
var depth = self.scopeDepth
|
|
while depth >= 0:
|
|
if self.names[depth].hasKey(name):
|
|
for obj in reversed(self.names[depth][name]):
|
|
if obj.module.absPath != self.currentModule.absPath:
|
|
# We don't own this name, but we
|
|
# may still have access to it
|
|
if obj.isPrivate or self.currentModule notin obj.exportedTo:
|
|
# The name is either private in its owner
|
|
# module, so we definitely can't use it, or
|
|
# said module has not explicitly exported it
|
|
# to us. If the name is public but not exported
|
|
# in its owner module, then we act as if it's
|
|
# private. This is to avoid namespace pollution
|
|
# from imports (i.e. if module A imports modules
|
|
# C and D and module B imports module A, then B
|
|
# might not want to also have access to C's and D's
|
|
# names as they might clash with its own stuff)
|
|
continue
|
|
# If we got here, we can access the name
|
|
if self.compare(obj.valueType, kind):
|
|
result = obj
|
|
break
|
|
|
|
if not result.isNil():
|
|
break
|
|
dec(depth)
|
|
|
|
|
|
proc findOrError(self: TypeChecker, name: string, kind: Type = "any".toIntrinsic(), node: ASTNode = nil): Name =
|
|
## Like find(), but raises an error if the name is not found
|
|
result = self.find(name, kind)
|
|
if result.isNil():
|
|
self.error(&"reference to undefined name '{name}'", node)
|
|
|
|
|
|
proc findAll(self: TypeChecker, name: string, kind: Type = "any".toIntrinsic()): seq[Name] =
|
|
## Like find(), but doesn't stop at the first match. Returns
|
|
## a list of matches
|
|
var depth = self.scopeDepth
|
|
while depth >= 0:
|
|
if self.names[depth].hasKey(name):
|
|
for obj in self.names[depth][name]:
|
|
if obj.owner.absPath != self.currentModule.absPath:
|
|
# We don't own this name, but we
|
|
# may still have access to it
|
|
if obj.isPrivate or self.currentModule notin obj.exportedTo:
|
|
# The name is either private in its owner
|
|
# module, so we definitely can't use it, or
|
|
# said module has not explicitly exported it
|
|
# to us. If the name is public but not exported
|
|
# from its owner module, then we act as if it's
|
|
# private. This is to avoid namespace pollution
|
|
# from imports (i.e. if module A imports modules
|
|
# C and D and module B imports module A, then B
|
|
# might not want to also have access to C's and D's
|
|
# names as they might clash with its own stuff)
|
|
continue
|
|
# If we got here, we can access the name
|
|
if self.compare(obj.valueType, kind):
|
|
result.add(obj)
|
|
dec(depth)
|
|
|
|
|
|
proc inferOrError(self: TypeChecker, node: Expression): TypedExpr =
|
|
## Attempts to infer the type of
|
|
## the given expression and raises an
|
|
## error if it fails
|
|
result = self.infer(node)
|
|
if result.isNil():
|
|
self.error("expression has no type", node)
|
|
|
|
|
|
proc stringify*(self: TypeChecker, typ: Type): string =
|
|
## Returns the string representation of a
|
|
## type object
|
|
if typ.isNil():
|
|
return "void"
|
|
case typ.kind:
|
|
of Char, Byte, String, TypeKind.Nan,
|
|
Auto, Any:
|
|
result &= ($typ.kind).toLowerAscii()
|
|
of Structure:
|
|
result &= typ.name
|
|
if typ.genericTypes.len() > 0:
|
|
result &= "<"
|
|
var i = 0
|
|
for gen in typ.genericTypes.keys():
|
|
result &= &"{gen}: {self.stringify(typ.genericTypes[gen])}"
|
|
if i < typ.genericTypes.len() - 1:
|
|
result &= ", "
|
|
inc(i)
|
|
result &= ">"
|
|
if typ.genericValues.len() > 0:
|
|
result &= "["
|
|
var i = 0
|
|
for gen in typ.genericValues.keys():
|
|
result &= &"{gen}: {self.stringify(typ.genericValues[gen])}"
|
|
if i < typ.genericValues.len() - 1:
|
|
result &= ", "
|
|
inc(i)
|
|
result &= "]"
|
|
of Boolean:
|
|
result = "bool"
|
|
of Infinity:
|
|
result = "inf"
|
|
of Integer:
|
|
if not typ.signed:
|
|
result &= "u"
|
|
result &= &"int{int(typ.size)}"
|
|
of Float:
|
|
result &= "float"
|
|
case typ.width:
|
|
of Half:
|
|
result &= "32"
|
|
of Full:
|
|
result &= "64"
|
|
of Pointer:
|
|
result &= &"ptr {self.stringify(typ.value)}"
|
|
of Reference:
|
|
result &= &"ref {self.stringify(typ.value)}"
|
|
of TypeKind.Const:
|
|
result &= &"const {self.stringify(typ.value)}"
|
|
of Function:
|
|
result &= "fn "
|
|
if typ.genericTypes.len() > 0:
|
|
result &= "<"
|
|
var i = 0
|
|
for gen in typ.genericTypes.keys():
|
|
result &= &"{gen}: {self.stringify(typ.genericTypes[gen])}"
|
|
if i < typ.genericTypes.len() - 1:
|
|
result &= ", "
|
|
inc(i)
|
|
result &= "]"
|
|
result &= "("
|
|
for i, (argName, argType, argDefault) in typ.parameters:
|
|
result &= &"{argName}: {self.stringify(argType)}"
|
|
if not argDefault.isNil():
|
|
result &= &" = {argDefault.kind}"
|
|
if i < typ.parameters.len() - 1:
|
|
result &= ", "
|
|
result &= ")"
|
|
if not self.compare(typ.returnType, "nil".toIntrinsic()):
|
|
result &= &": {self.stringify(typ.returnType)}"
|
|
if typ.pragmas.len() > 0:
|
|
result &= " {"
|
|
var i = 0
|
|
for name, pragma in typ.pragmas:
|
|
result &= &"{name}"
|
|
if pragma.args.len() > 0:
|
|
result &= ": "
|
|
for j, arg in pragma.args:
|
|
result &= arg.token.lexeme
|
|
if j < pragma.args.high():
|
|
result &= ", "
|
|
if i < typ.pragmas.len() - 1:
|
|
result &= ", "
|
|
else:
|
|
result &= "}"
|
|
inc(i)
|
|
of TypeKind.Lent:
|
|
result &= &"lent {self.stringify(typ.value)}"
|
|
of Union:
|
|
for i, condition in typ.types:
|
|
if i > 0:
|
|
result &= " | "
|
|
if not condition.match:
|
|
result &= "~"
|
|
result &= self.stringify(condition.kind)
|
|
of Typevar:
|
|
result &= &"typevar[{self.stringify(typ.wrapped)}]"
|
|
else:
|
|
discard # TODO(?)
|
|
|
|
|
|
proc stringify*(self: TypeChecker, typ: TypedNode): string =
|
|
if typ.node.isConst():
|
|
return self.stringify(TypedExpr(typ).kind)
|
|
case typ.node.kind:
|
|
of NodeKind.funDecl, varDecl, typeDecl:
|
|
result = self.stringify(TypedDecl(typ).name.valueType)
|
|
of binaryExpr, unaryExpr, identExpr, callExpr, lentExpr,
|
|
constExpr, ptrExpr, refExpr:
|
|
result = self.stringify(TypedExpr(typ).kind)
|
|
else:
|
|
# TODO
|
|
return "void"
|
|
|
|
|
|
proc beginScope(self: TypeChecker) =
|
|
## Begins a new lexical scope
|
|
inc(self.scopeDepth)
|
|
self.names.add(newTable[string, seq[Name]]())
|
|
|
|
|
|
proc endScope(self: TypeChecker) =
|
|
## Closes the current lexical
|
|
## scope and reverts to the one
|
|
discard self.names.pop()
|
|
dec(self.scopeDepth)
|
|
assert self.scopeDepth == self.names.high()
|
|
|
|
|
|
func isType(self: Type): bool = self.kind in [Structure, ]
|
|
proc isTypevar(self: Type): bool =
|
|
return self.unwrapType() != self
|
|
|
|
|
|
proc check(self: TypeChecker, term, expected: Type, node: ASTNode = nil): Type {.inline, discardable.} =
|
|
## Like the other check(), but works with two type objects.
|
|
## The node is passed in to error() in case of a failure
|
|
result = term
|
|
if not self.compare(result, expected):
|
|
self.error(&"expecting an expression of type {self.stringify(expected)}, got {self.stringify(result)} instead", node=node)
|
|
if result.isAny() and not expected.isAny():
|
|
self.error("any is not a valid type in this context", node)
|
|
|
|
|
|
proc check(self: TypeChecker, term: Expression, expected: Type): TypedExpr {.inline, discardable.} =
|
|
## Checks the type of the given expression against a known one.
|
|
## Raises an error if appropriate and returns the typed expression
|
|
## otherwise
|
|
result = self.inferOrError(term)
|
|
self.check(result.kind, expected)
|
|
|
|
|
|
proc getTypeDistance(self: TypeChecker, a, b: Type): int =
|
|
## Gets the type distance of two Peon types. Assumes
|
|
## a and b are already compatible (i.e. compare(a, b)
|
|
## returns true). For more info, check out self.match()
|
|
if a.kind != Structure or b.kind != Structure:
|
|
# TODO: Test
|
|
return 0
|
|
var parent = b.parent
|
|
result = 0
|
|
# We already know that the inheritance
|
|
# chain is correct (i.e. that a is at the
|
|
# root of it) because we assume a and b are
|
|
# already compatible, so we really just need
|
|
# to walk the tree backwards and keep track of
|
|
# how many times we go up one node
|
|
while not parent.isNil():
|
|
# Juuust to be sure
|
|
when defined(debug):
|
|
if parent.parent.isNil():
|
|
assert self.parent.parent == a
|
|
inc(result)
|
|
parent = parent.parent
|
|
|
|
|
|
proc calcTypeDistance(self: TypeChecker, typ: Type, sig: TypeSignature): int =
|
|
## Computes the cumulative type distance between
|
|
## the given type and the given type signature. It's
|
|
## basically just adding up the type distances of each
|
|
## element in the type/signature to end up with a single
|
|
## value to represent how precisely a given type matches
|
|
## the given signature. Note that this function assumes
|
|
## that the types are already compatible
|
|
case typ.kind:
|
|
of TypeKind.Function:
|
|
for (argA, argB) in zip(typ.parameters, sig):
|
|
result += self.getTypeDistance(argA.kind, argB.kind)
|
|
of TypeKind.Structure:
|
|
discard
|
|
else:
|
|
self.error(&"cannot compute type distance for object of type {self.stringify(typ)}")
|
|
|
|
|
|
proc checkTypeSignature(self: TypeChecker, typ: Type, sig: TypeSignature): bool =
|
|
## Helper for to check type signatures.
|
|
## Returns true if the given type matches
|
|
## the given type signature
|
|
case typ.kind:
|
|
of TypeKind.Function:
|
|
if typ.parameters.len() < sig.len():
|
|
# If the function has less arguments than
|
|
# we have in our signature, then we surely
|
|
# can't match to it. This is different from
|
|
# the code in self.compare() (which checks that
|
|
# both signatures have the same length) because
|
|
# of how we handle default arguments; This is to
|
|
# address a problem that arises that when we have
|
|
# a function that looks like (a, b, c = someDefault) -> z
|
|
# and we're looking for a signature (a, b) -> y: if
|
|
# we enforced that the length of the signatures must
|
|
# be equal for them to match, we'd consider that function
|
|
# to not be a good match, even though it is because the
|
|
# third argument has a default value
|
|
return false
|
|
# We construct a new signature, without the function's default arguments
|
|
var args: TypeSignature = @[]
|
|
for argument in typ.parameters:
|
|
if argument.default.isNil():
|
|
args.add(argument)
|
|
else:
|
|
break
|
|
if args.len() != sig.len():
|
|
return false
|
|
for (argA, argB) in zip(args, sig):
|
|
if not self.compare(argA.kind, argB.kind):
|
|
return false
|
|
if argA.name == "" or argB.name == "":
|
|
continue
|
|
elif argA.name != argB.name:
|
|
return false
|
|
return true
|
|
of TypeKind.Structure:
|
|
if sig.len() != typ.fields.len():
|
|
# For now, we require that all fields of an object
|
|
# be explicitly initialized
|
|
return false
|
|
var fields: seq[Type] = @[]
|
|
var names: HashSet[string]
|
|
for fieldName in typ.fields.keys():
|
|
fields.add(typ.fields[fieldName])
|
|
|
|
else:
|
|
self.error(&"cannot check type signature for object of type {self.stringify(typ)}")
|
|
|
|
|
|
proc match(self: TypeChecker, name: string, sig: TypeSignature, node: ASTNode = nil): Name =
|
|
## Tries to find a matching type for a given (typeName, typeSignature) pair
|
|
## and returns it. In this context, "type signature" means an ordered list of
|
|
## tuples (paramName, paramType, paramDefault) that represents the arguments we
|
|
## want to instantiate a given named object with, be it a function, a type or an
|
|
## enumeration. The optional node parameter is passed to error() for reporting purposes
|
|
## in case of failure
|
|
var
|
|
impl: seq[Name] = @[]
|
|
matches: seq[Name] = @[]
|
|
distances: seq[int] = @[]
|
|
dst: int = 0
|
|
minDst: int = dst
|
|
# Find all matching implementations and record their
|
|
# type distance relative to our input type signature,
|
|
# as well as the smallest one found so far
|
|
for n in filterIt(self.findAll(name), it.valueType.kind in [TypeKind.Function, TypeKind.Structure]):
|
|
if self.checkTypeSignature(n.valueType, sig):
|
|
impl.add(n)
|
|
dst = self.calcTypeDistance(n.valueType, sig)
|
|
if dst < minDst:
|
|
minDst = dst
|
|
distances.add(dst)
|
|
# When we look for an object to instantiate with a given type signature, we
|
|
# incrementally build a list of potential matches: if the resulting list has
|
|
# length zero, that means no match has been found and we should report the error
|
|
# to the user; A similarly trivial case is that of a list of length one, except of
|
|
# course it's not an error (it just means we have found the only match). A more
|
|
# interesting case is that of multiple candidate matches: the simple answer would
|
|
# be to just throw an exception warning the user of the ambiguity, except that sometimes
|
|
# that backfires hard. A practical example is going to be useful: say you have a function
|
|
# sum that takes in two integers and returns their sum. In peon, you would write something
|
|
# like this:
|
|
#
|
|
# fn sum(a, b: int): int {
|
|
# return a + b;
|
|
# }
|
|
#
|
|
# Let's now say that I overload this for some arbitrary subtype of int, MyInt
|
|
#
|
|
# fn sum(a, b: MyInt): MyInt {
|
|
# # code here
|
|
# }
|
|
#
|
|
# If I now were to call sum with two integers, that would work because the compiler
|
|
# can only find one function that matches the type signature (1, 2) for the name sum;
|
|
# If you attempted to call sum with arguments of type MyInt, however, the compiler would
|
|
# find that both functions taking int and MyInt are valid matches: this is because in peon
|
|
# (and in many other languages) you can always treat a given subtype like an instance of its
|
|
# supertype (potentially losing information, of course). The problem lies in our comparison
|
|
# function, which has no way of judging how "good" a given match is: in our example above,
|
|
# we can easily see that the overloaded implementation is the one that most closely matches
|
|
# the type signature we're looking for (since (MyInt, MyInt) -> MyInt is more precise than
|
|
# (int, int) -> int). So what we do is basically gauge how good each potential match is by
|
|
# computing a metric I call "type distance". Before explaining what type distance is, I'd like
|
|
# to point out that it is a strictly relative metric: it measures how closely a given type
|
|
# signature matches the one we're looking for, but only in relation to all of the other potentially
|
|
# compatible matches for a given lookup. This means that comparing type distances between types that
|
|
# are not compatible just doesn't make sense (you could say they're "infinitely distant"). Of course,
|
|
# should we encounter more than one match with the same type distance from our signature that would
|
|
# still be an ambiguity error, but at least the compiler isn't completely clueless now. The way type
|
|
# distance is measured is quite simple: it's simply the relative distance between the two types in their
|
|
# inheritance tree, hence a value of 0 indicates a perfect match (i.e. they're the EXACT same type, structurally
|
|
# speaking at least), while larger values indicate "less precise" (or "further") ones
|
|
for i, n in impl:
|
|
# Grab all the matches with the smallest type distance
|
|
if distances[i] == minDst:
|
|
matches.add(n)
|
|
case matches.len():
|
|
of 1:
|
|
# There's just one match. We're done
|
|
result = impl[0]
|
|
if result.kind == NameKind.Var:
|
|
# Variables bound to other names must always
|
|
# have this field set
|
|
assert not result.assignedName.isNil()
|
|
# We found a name bound to a variable, so we
|
|
# return the original name object rather than
|
|
# the wrapper
|
|
result = result.assignedName
|
|
# Extra checks
|
|
case result.valueType.kind:
|
|
of Function:
|
|
for (a, b) in zip(result.valueType.parameters, sig):
|
|
if not a.kind.isAny() and b.kind.isAny():
|
|
self.error("any is not a valid type in this context", node)
|
|
of Structure:
|
|
for (a, b) in zip(result.valueType.fields.values().toSeq(), sig):
|
|
if not a.isAny() and b.kind.isAny():
|
|
self.error("any is not a valid type in this context", node)
|
|
else:
|
|
# TODO: Enums
|
|
discard
|
|
else:
|
|
# We either found no matches or too many, woopsie daisy! That's definitely an error
|
|
var msg: string = ""
|
|
case matches.len():
|
|
of 0:
|
|
# No matches
|
|
let names = self.findAll(name)
|
|
msg &= &"failed to find a suitable type for '{name}'"
|
|
if names.len() > 0:
|
|
msg &= &", found {len(names)} potential candidate"
|
|
if names.len() > 1:
|
|
msg &= "s"
|
|
if self.showMismatches:
|
|
msg &= ":"
|
|
for name in names:
|
|
msg &= &"\n - in {relativePath(name.file, getCurrentDir())}:{name.ident.token.line}:{name.ident.token.relPos.start} -> {self.stringify(name.valueType)}"
|
|
if name.valueType.kind notin [Function, Structure]:
|
|
msg &= ": not callable"
|
|
elif sig.len() != name.valueType.parameters.len():
|
|
msg &= &": wrong number of arguments (expected {name.valueType.parameters.len()}, got {sig.len()} instead)"
|
|
else:
|
|
for i, arg in sig:
|
|
if arg.name != "" and name.valueType.parameters[i].name != "" and arg.name != name.valueType.parameters[i].name:
|
|
msg &= &": unexpected argument '{arg.name}' at position {i + 1}"
|
|
if not self.compare(arg.kind, name.valueType.parameters[i].kind):
|
|
msg &= &": first mismatch at position {i + 1}: (expected {self.stringify(name.valueType.parameters[i].kind)}, got {self.stringify(arg.kind)} instead)"
|
|
break
|
|
else:
|
|
msg &= " (compile with --showMismatches for more details)"
|
|
else:
|
|
msg &= &"reference to undefined name '{name}'"
|
|
else:
|
|
# Ambiguity detected
|
|
msg &= &"multiple matches found for '{name}'"
|
|
if self.showMismatches:
|
|
msg &= ":"
|
|
for fn in reversed(impl):
|
|
msg &= &"\n- in {relativePath(fn.file, getCurrentDir())}, line {fn.line} of type {self.stringify(fn.valueType)}"
|
|
else:
|
|
msg &= " (compile with --showMismatches for more details)"
|
|
self.error(msg, node)
|
|
|
|
|
|
proc specialize(self: TypeChecker, name: Name, args: seq[TypedExpr], node: ASTNode = nil): Type =
|
|
## Instantiates a generic type
|
|
let
|
|
typ = name.valueType.unwrapType()
|
|
expectedCount = typ.genericTypes.len() + typ.genericValues.len()
|
|
if expectedCount == 0:
|
|
self.error(&"cannot create concrete instance of objects of type {self.stringify(typ)} (type is not a generic)")
|
|
if len(args) < expectedCount:
|
|
self.error(&"partial generic instantiation is not supported (expecting exactly {expectedCount} arguments, got {len(args)} instead)", node=node)
|
|
elif len(args) != expectedCount:
|
|
self.error(&"invalid number of arguments supplied for generic instantiation (expecting exactly {expectedCount}, got {len(args)} instead)", node=node)
|
|
case typ.kind:
|
|
of TypeKind.Structure:
|
|
result = typ.deepCopy()
|
|
result.genericTypes.clear()
|
|
result.genericValues.clear()
|
|
var replaced = newTable[string, Type]()
|
|
var i = 0
|
|
for key in typ.genericTypes.keys():
|
|
var term = args[i].kind
|
|
# Type may not be wrapped yet
|
|
if args[i].kind.isType():
|
|
term = term.wrapType()
|
|
result.genericTypes[key] = self.check(term, typ.genericTypes[key], args[i].node)
|
|
replaced[key] = result.genericTypes[key]
|
|
inc(i)
|
|
# Note how we do not reset i!
|
|
for key in typ.genericValues.keys():
|
|
var term = args[i].kind
|
|
result.genericValues[key] = self.check(term, typ.genericValues[key], args[i].node)
|
|
replaced[key] = result.genericValues[key]
|
|
inc(i)
|
|
for field in TypeDecl(name.node).fields:
|
|
if field.valueType.kind == identExpr:
|
|
let name = field.valueType.token.lexeme
|
|
if name in replaced:
|
|
result.fields[name] = replaced[name]
|
|
else:
|
|
self.error(&"cannot create concrete instance of objects of type {self.stringify(typ)}")
|
|
|
|
|
|
proc unpackTypes(self: TypeChecker, condition: Expression, list: var seq[tuple[match: bool, kind: Type, value: Expression]], accept: bool = true) =
|
|
## Recursively unpacks a type constraint
|
|
case condition.kind:
|
|
of identExpr, genericExpr:
|
|
var typ = self.inferOrError(condition).kind
|
|
|
|
if self.compare(typ, "auto".toIntrinsic()):
|
|
self.error("automatic types cannot be used within type constraints", condition)
|
|
list.add((accept, typ, condition))
|
|
of binaryExpr:
|
|
let condition = BinaryExpr(condition)
|
|
case condition.operator.lexeme:
|
|
of "|":
|
|
self.unpackTypes(condition.a, list)
|
|
self.unpackTypes(condition.b, list)
|
|
else:
|
|
self.error("invalid type constraint", condition)
|
|
of unaryExpr:
|
|
let condition = UnaryExpr(condition)
|
|
case condition.operator.lexeme:
|
|
of "~":
|
|
self.unpackTypes(condition.a, list, accept=false)
|
|
else:
|
|
self.error("invalid type constraint in", condition)
|
|
else:
|
|
self.error("invalid type constraint", condition)
|
|
|
|
|
|
proc dispatchPragmas(self: TypeChecker, name: Name) =
|
|
## Dispatches pragmas bound to objects
|
|
if name.node.isNil():
|
|
return
|
|
var pragmas: seq[Pragma] = @[]
|
|
case name.node.kind:
|
|
of NodeKind.funDecl, NodeKind.typeDecl, NodeKind.varDecl:
|
|
pragmas = Declaration(name.node).pragmas
|
|
of NodeKind.lambdaExpr:
|
|
pragmas = LambdaExpr(name.node).pragmas
|
|
else:
|
|
discard # Unreachable
|
|
var f: PragmaFunc
|
|
for pragma in pragmas:
|
|
if pragma.name.token.lexeme notin self.pragmas:
|
|
self.error(&"unknown pragma '{pragma.name.token.lexeme}'")
|
|
f = self.pragmas[pragma.name.token.lexeme]
|
|
if f.kind != Immediate:
|
|
continue
|
|
f.handler(self, pragma, name)
|
|
|
|
|
|
proc dispatchDelayedPragmas(self: TypeChecker, name: Name) {.used.} =
|
|
## Dispatches pragmas bound to objects once they
|
|
## are used
|
|
if name.node.isNil():
|
|
return
|
|
var pragmas: seq[Pragma] = @[]
|
|
pragmas = Declaration(name.node).pragmas
|
|
var f: PragmaFunc
|
|
for pragma in pragmas:
|
|
if pragma.name.token.lexeme notin self.pragmas:
|
|
self.error(&"unknown pragma '{pragma.name.token.lexeme}'")
|
|
f = self.pragmas[pragma.name.token.lexeme]
|
|
if f.kind == Immediate:
|
|
continue
|
|
f.handler(self, pragma, name)
|
|
|
|
|
|
proc addName(self: TypeChecker, name: Name) =
|
|
## Adds a name to the current lexical scope
|
|
var scope = self.names[self.scopeDepth]
|
|
if scope.hasKey(name.ident.token.lexeme):
|
|
for obj in scope[name.ident.token.lexeme]:
|
|
if name.valueType.kind == TypeKind.Function:
|
|
# We don't check for name clashes for functions because self.match() does that
|
|
continue
|
|
if (obj.kind in [NameKind.Var, NameKind.Module] or obj.valueType.kind in [TypeKind.Structure, TypeKind.EnumEntry]) and name.owner == obj.owner:
|
|
self.error(&"re-declaration of '{obj.ident.token.lexeme}' is not allowed (previously declared in {obj.owner.ident.token.lexeme}:{obj.ident.token.line}:{obj.ident.token.relPos.start})", name.node)
|
|
else:
|
|
scope[name.ident.token.lexeme] = @[]
|
|
scope[name.ident.token.lexeme].add(name)
|
|
|
|
|
|
proc declare(self: TypeChecker, node: ASTNode): Name {.discardable.} =
|
|
## Declares a name into the current scope
|
|
var scope = self.names[self.scopeDepth]
|
|
var name: Name
|
|
var declaredName: string = ""
|
|
case node.kind:
|
|
of NodeKind.varDecl:
|
|
var node = VarDecl(node)
|
|
declaredName = node.name.token.lexeme
|
|
# Creates a new Name entry so that self.identifier can find it later
|
|
name = Name(depth: self.scopeDepth,
|
|
ident: node.name,
|
|
isPrivate: node.isPrivate,
|
|
module: self.currentModule,
|
|
file: self.file,
|
|
valueType: nil, # Done later in varDecl (for better semantics)
|
|
line: node.token.line,
|
|
owner: self.currentFunction,
|
|
kind: NameKind.Var,
|
|
node: node,
|
|
)
|
|
self.addName(name)
|
|
of NodeKind.funDecl:
|
|
discard
|
|
of NodeKind.importStmt:
|
|
discard
|
|
of NodeKind.typeDecl:
|
|
var node = TypeDecl(node)
|
|
declaredName = node.name.token.lexeme
|
|
var kind: Type = Type(kind: Structure,
|
|
name: declaredName,
|
|
genericTypes: newTable[string, Type](),
|
|
genericValues: newTable[string, Type](),
|
|
fields: newTable[string, Type](),
|
|
interfaces: @[],
|
|
isEnum: node.isEnum)
|
|
if node.isRef:
|
|
kind = kind.toRef()
|
|
name = Name(depth: self.scopeDepth,
|
|
module: self.currentModule,
|
|
node: node,
|
|
ident: node.name,
|
|
line: node.name.token.line,
|
|
isPrivate: node.isPrivate,
|
|
owner: self.currentFunction,
|
|
valueType: kind)
|
|
self.addName(name)
|
|
else:
|
|
discard # TODO: enums
|
|
if not name.isNil():
|
|
self.dispatchPragmas(name)
|
|
return name
|
|
|
|
|
|
proc identifier(self: TypeChecker, node: IdentExpr): TypedExpr =
|
|
## Typechecks name resolution
|
|
return newTypedIdentExpr(node, self.findOrError(node.name.lexeme, node=node))
|
|
|
|
|
|
proc unary(self: TypeChecker, node: UnaryExpr): TypedUnaryExpr =
|
|
## Typechecks unary expressions
|
|
var
|
|
default: TypedExpr
|
|
typeOfA = self.infer(node.a)
|
|
let fn = Type(kind: Function, returnType: Type(kind: Any), parameters: @[("", typeOfA.kind, default)])
|
|
let name = self.match(node.token.lexeme, fn.parameters, node)
|
|
let impl = self.specialize(name, @[self.expression(node.a)])
|
|
result = newTypedUnaryExpr(node, impl.returnType, typeOfA)
|
|
|
|
|
|
proc binary(self: TypeChecker, node: BinaryExpr): TypedBinaryExpr =
|
|
## Typechecks binary expressions
|
|
var
|
|
default: TypedExpr
|
|
typeOfA = self.infer(node.a)
|
|
typeOfB = self.infer(node.b)
|
|
let fn = Type(kind: Function,
|
|
returnType: Type(kind: Any),
|
|
parameters: @[("", typeOfA.kind, default), ("", typeOfB.kind, default)])
|
|
let name = self.match(node.token.lexeme, fn.parameters, node)
|
|
let impl = self.specialize(name, @[self.expression(node.a)])
|
|
result = newTypedBinaryExpr(node, impl.returnType, typeOfA, typeOfB)
|
|
|
|
|
|
proc genericExpr(self: TypeChecker, node: GenericExpr): TypedExpr =
|
|
## Typechecks generic instantiation
|
|
var args: seq[TypedExpr] = @[]
|
|
for arg in node.args:
|
|
args.add(self.expression(arg))
|
|
result = newTypedExpr(node, self.specialize(self.findOrError(node.ident.token.lexeme), args, node))
|
|
|
|
|
|
proc call(self: TypeChecker, node: CallExpr): TypedExpr =
|
|
## Typechecks call expressions. This includes
|
|
## things like object and enum construction
|
|
var args: TypeSignature = @[]
|
|
var argExpr: seq[TypedExpr] = @[]
|
|
var default: TypedExpr
|
|
var kind: Type
|
|
for i, argument in node.arguments.positionals:
|
|
kind = self.inferOrError(argument).kind
|
|
args.add(("", kind, default))
|
|
argExpr.add(self.expression(argument))
|
|
for i, argument in node.arguments.keyword:
|
|
kind = self.infer(argument.value).kind
|
|
args.add((argument.name.token.lexeme, kind, default))
|
|
argExpr.add(self.expression(argument.value))
|
|
case node.callee.kind:
|
|
of NodeKind.identExpr:
|
|
# Calls like hi()
|
|
var impl = self.match(IdentExpr(node.callee).name.lexeme, args, node)
|
|
self.dispatchDelayedPragmas(impl)
|
|
var typ = self.specialize(impl, argExpr)
|
|
case typ.kind:
|
|
of Structure:
|
|
# TODO
|
|
result = newTypedExpr(node, typ)
|
|
of Function:
|
|
var typedArgs: seq[tuple[name: string, kind: Type, default: TypedExpr]] = @[]
|
|
for arg in args:
|
|
if not arg.default.isNil():
|
|
typedArgs.add((arg.name, arg.kind, arg.default))
|
|
else:
|
|
typedArgs.add((arg.name, arg.kind, nil))
|
|
result = newTypedCallExpr(node, impl, typedArgs)
|
|
else:
|
|
# TODO?
|
|
discard
|
|
of NodeKind.callExpr:
|
|
# Calling a call expression, like hello()()
|
|
# TODO
|
|
#[
|
|
var node: Expression = node
|
|
var all: seq[CallExpr] = @[]
|
|
# Since there can be as many consecutive calls as
|
|
# the user wants, we need to "extract" all of them
|
|
while CallExpr(node).callee.kind == callExpr:
|
|
all.add(CallExpr(CallExpr(node).callee))
|
|
node = CallExpr(node).callee
|
|
# Now that we know how many call expressions we
|
|
# need to compile, we start from the outermost
|
|
# one and work our way to the innermost call
|
|
for exp in all:
|
|
result = self.call(exp)
|
|
]#
|
|
discard
|
|
of NodeKind.getItemExpr:
|
|
# Calling a.b()
|
|
# TODO
|
|
let node = GetItemExpr(node.callee)
|
|
of NodeKind.lambdaExpr:
|
|
# Calling a lambda on the fly
|
|
var node = LambdaExpr(node.callee)
|
|
# TODO
|
|
of NodeKind.genericExpr:
|
|
# Calling a generic expression
|
|
# TODO
|
|
var node = GenericExpr(node.callee)
|
|
# TODO
|
|
else:
|
|
let typ = self.infer(node.callee)
|
|
if typ.isNil():
|
|
self.error(&"expression has no type", node)
|
|
else:
|
|
self.error(&"object of type '{self.stringify(typ)}' is not callable", node)
|
|
|
|
|
|
proc refExpr(self: TypeChecker, node: Ref): TypedExpr =
|
|
## Typechecks ref expressions
|
|
result = self.check(node.value, "typevar".toIntrinsic())
|
|
result.kind = result.kind.toRef()
|
|
|
|
|
|
proc constExpr(self: TypeChecker, node: ast.Const): TypedExpr =
|
|
## Typechecks const expressions
|
|
var kind = "any".toIntrinsic().toConst()
|
|
result = self.check(node.value, kind)
|
|
result.kind = result.kind.toConst()
|
|
|
|
|
|
proc lentExpr(self: TypeChecker, node: ast.Lent): TypedExpr =
|
|
## Typechecks lent expressions
|
|
|
|
# Only match references
|
|
var kind = "any".toIntrinsic().toRef()
|
|
result = self.check(node.value, kind)
|
|
# Wrap the result back
|
|
result.kind = result.kind.toLent()
|
|
|
|
#[
|
|
method assignment(self: BytecodeCompiler, node: ASTNode, compile: bool = true): Type {.discardable.} =
|
|
## Typechecks assignment expressions
|
|
case node.kind:
|
|
of assignExpr:
|
|
let node = AssignExpr(node)
|
|
let name = IdentExpr(node.name)
|
|
var r = self.resolveOrError(name)
|
|
if r.constant:
|
|
self.error(&"cannot assign to '{name.token.lexeme}' (value is a constant)", name)
|
|
elif r.mutable:
|
|
self.error(&"cannot reassign '{name.token.lexeme}' (value is immutable)", name)
|
|
self.check(node.value, r.valueType)
|
|
self.expression(node.value, compile)
|
|
var position = r.position
|
|
if r.depth < self.depth and r.belongsTo != self.currentFunction:
|
|
self.warning(WarningKind.MutateOuterScope, &"mutation of '{r.ident.token.lexeme}' declared in outer scope ({r.owner.file}.pn:{r.ident.token.line}:{r.ident.token.relPos.start})", nil, node)
|
|
result = r.valueType
|
|
if not compile:
|
|
return
|
|
self.emitByte(StoreVar, node.token.line)
|
|
self.emitBytes(position.toTriple(), node.token.line)
|
|
of setItemExpr:
|
|
let node = SetItemExpr(node)
|
|
let name = IdentExpr(node.name)
|
|
var r = self.resolveOrError(name)
|
|
if r.constant:
|
|
self.error(&"cannot assign to '{name.token.lexeme}' (value is a constant)", name)
|
|
elif r.mutable:
|
|
self.error(&"cannot reassign '{name.token.lexeme}' (value is immutable)", name)
|
|
if r.valueType.kind != Structure:
|
|
self.error("only types have fields", node)
|
|
else:
|
|
self.error(&"invalid AST node of kind {node.kind} at assignment(): {node} (This is an internal error and most likely a bug)")
|
|
]#
|
|
|
|
|
|
proc expression(self: TypeChecker, node: Expression): TypedExpr =
|
|
## Typechecks expressions
|
|
if node.isConst():
|
|
return self.literal(LiteralExpr(node))
|
|
case node.kind:
|
|
of callExpr:
|
|
result = self.call(CallExpr(node))
|
|
of identExpr:
|
|
result = self.identifier(IdentExpr(node))
|
|
of groupingExpr:
|
|
result = self.expression(GroupingExpr(node).expression)
|
|
of unaryExpr:
|
|
result = self.unary(UnaryExpr(node))
|
|
of binaryExpr:
|
|
result = self.binary(BinaryExpr(node))
|
|
of NodeKind.genericExpr:
|
|
result = self.genericExpr(GenericExpr(node))
|
|
of NodeKind.refExpr:
|
|
result = self.refExpr(Ref(node))
|
|
of ptrExpr:
|
|
result = self.check(Ref(node).value, "typevar".toIntrinsic())
|
|
result.kind = result.kind.toPtr()
|
|
of NodeKind.lentExpr:
|
|
result = self.lentExpr(ast.Lent(node))
|
|
of NodeKind.constExpr:
|
|
result = self.constExpr(ast.Const(node))
|
|
else:
|
|
self.error(&"failed to compile expression of type {node.kind}")
|
|
|
|
|
|
proc blockStmt(self: TypeChecker, node: BlockStmt): TypedBlockStmt =
|
|
## Typechecks block statements
|
|
self.beginScope()
|
|
var body: seq[TypedNode] = @[]
|
|
for decl in node.code:
|
|
body.add(self.validate(decl))
|
|
self.endScope()
|
|
result = newTypedBlockStmt(node, body)
|
|
|
|
|
|
proc ifStmt(self: TypeChecker, node: IfStmt): TypedNode =
|
|
## Typechecks if/else statements
|
|
|
|
# Check the condition
|
|
let condition = self.check(node.condition, "bool".toIntrinsic())
|
|
# Check the "then" part of "if-then-else"
|
|
let then = TypedBlockStmt(self.validate(node.thenBranch))
|
|
# Check the "else" part
|
|
let otherwise = TypedBlockStmt(self.validate(node.elseBranch))
|
|
# Note: Peon enforces the body of loops and conditionals to
|
|
# always be a block statement (for a variety of very good reasons,
|
|
# as it avoids mistakes like the infamous GOTO fail), so the
|
|
# conversion here is safe
|
|
return newTypedIfStmt(node, then, otherwise, condition)
|
|
|
|
|
|
proc whileStmt(self: TypeChecker, node: WhileStmt): TypedNode =
|
|
## Typechecks C-style while loops
|
|
|
|
# Check the condition
|
|
let condition = self.check(node.condition, "bool".toIntrinsic())
|
|
# Check the body
|
|
return newTypedWhileStmt(node, TypedBlockStmt(self.validate(node.body)), condition)
|
|
|
|
|
|
proc varDecl(self: TypeChecker, node: VarDecl): TypedVarDecl =
|
|
## Typechecks variable declarations
|
|
var
|
|
name = self.declare(node)
|
|
init: TypedExpr
|
|
typ: Type
|
|
if node.value.isNil():
|
|
if not node.mutable:
|
|
self.error("let declaration requires an initializer", node)
|
|
if node.constant:
|
|
self.error("const declaration requires an initializer", node)
|
|
else:
|
|
if node.constant and not node.value.isConst():
|
|
self.error("constant initializer is not a constant", node.value)
|
|
init = TypedExpr(self.validate(node.value))
|
|
typ = init.kind
|
|
if not node.valueType.isNil():
|
|
# Explicit type declaration always takes over
|
|
|
|
# Check that the inferred expression represents a type
|
|
# and not a value. This is to guard against things
|
|
# like "var x: 1 = 1;". We unwrap it immediately
|
|
# because we don't want to assign a typevar to the
|
|
# valueType field of the variable-- it would just
|
|
# be redundant
|
|
typ = self.check(typ, "typevar".toIntrinsic(), node.valueType)
|
|
if typ.isNil():
|
|
self.error("expecting either a type declaration or an initializer value, but neither was found", node)
|
|
# Now check that the type of the initializer, if it exists,
|
|
# matches the type of the variable
|
|
if not init.isNil():
|
|
self.check(init.kind, typ)
|
|
name.valueType = typ
|
|
result = newTypedVarDecl(node, name, init)
|
|
|
|
|
|
proc funDecl(self: TypeChecker, node: FunDecl, name: Name = nil): TypedFunDecl =
|
|
## Typechecks function declarations
|
|
|
|
# Some things are just not possible
|
|
if node.token.kind == Operator and node.name.token.lexeme in [".", ]:
|
|
self.error(&"Due to compiler limitations, the '{node.name.token.lexeme}' operator cannot be currently overridden", node.name)
|
|
var name = name
|
|
if name.isNil():
|
|
name = self.declare(node)
|
|
var node = node
|
|
result = newTypedFunDecl(node, name, newTypedBlockStmt(BlockStmt(node.body), @[]))
|
|
# Begin a new scope
|
|
self.beginScope()
|
|
# First we declare the function's generics, if it has any
|
|
self.declareGenerics(name)
|
|
# We now declare and typecheck the function's
|
|
# arguments
|
|
if not node.returnType.isNil():
|
|
# The function needs a return type too!
|
|
name.valueType.returnType = self.inferOrError(node.returnType).kind
|
|
# TODO
|
|
# name.valueType.returnType = self.check(node.returnType, "typevar".toIntrinsic()).kind
|
|
if not name.valueType.isAuto and self.compare(name.valueType.returnType, "auto".toIntrinsic()):
|
|
name.valueType.isAuto = true
|
|
if node.body.isNil():
|
|
# Forward declaration
|
|
# TODO
|
|
self.endScope()
|
|
return
|
|
# We store the current function to restore
|
|
# it later
|
|
let function = self.currentFunction
|
|
self.currentFunction = name
|
|
if BlockStmt(node.body).code.len() == 0:
|
|
self.error("cannot declare function with empty body")
|
|
for decl in BlockStmt(node.body).code:
|
|
result.body.body.add(self.validate(decl))
|
|
self.endScope()
|
|
# Restores the enclosing function (if any).
|
|
# Makes nested calls work (including recursion)
|
|
self.currentFunction = function
|
|
|
|
|
|
proc declareGenerics(self: TypeChecker, name: Name) =
|
|
## Helper to declare the generic arguments of the
|
|
## given name, if it has any
|
|
if name.valueType.kind notin [TypeKind.Structure, TypeKind.Function]:
|
|
return
|
|
var
|
|
constraints: seq[tuple[match: bool, kind: Type, value: Expression]] = @[]
|
|
value: Expression
|
|
for gen in name.node.genericTypes:
|
|
if gen.cond.isNil():
|
|
constraints = @[(match: true, kind: "any".toIntrinsic(), value: value)]
|
|
else:
|
|
self.unpackTypes(gen.cond, constraints)
|
|
let generic = Name(kind: Default,
|
|
ident: gen.name,
|
|
module: self.currentModule,
|
|
owner: self.currentFunction,
|
|
file: self.currentModule.file,
|
|
depth: self.scopeDepth,
|
|
isPrivate: true,
|
|
valueType: Type(kind: Union, types: constraints),
|
|
line: gen.name.token.line,
|
|
)
|
|
self.addName(generic)
|
|
name.valueType.genericTypes[gen.name.token.lexeme] = generic.valueType
|
|
constraints.setLen(0)
|
|
|
|
for gen in name.node.genericValues:
|
|
if gen.cond.isNil():
|
|
constraints = @[(match: true, kind: "any".toIntrinsic(), value: value)]
|
|
else:
|
|
self.unpackTypes(gen.cond, constraints)
|
|
let generic = Name(kind: Default,
|
|
ident: gen.name,
|
|
module: self.currentModule,
|
|
owner: self.currentFunction,
|
|
file: self.currentModule.file,
|
|
depth: self.scopeDepth,
|
|
isPrivate: true,
|
|
valueType: Type(kind: Union, types: constraints).unwrapType(),
|
|
line: gen.name.token.line,
|
|
)
|
|
self.addName(generic)
|
|
name.valueType.genericValues[gen.name.token.lexeme] = generic.valueType
|
|
constraints.setLen(0)
|
|
|
|
|
|
|
|
proc typeDecl(self: TypeChecker, node: TypeDecl, name: Name = nil): TypedTypeDecl =
|
|
## Typechecks type declarations
|
|
var name = name
|
|
if name.isNil():
|
|
name = self.declare(node)
|
|
result = newTypedTypeDecl(node, name, newTable[string, TypedExpr](), nil)
|
|
self.beginScope()
|
|
# Declare the type's generics
|
|
self.declareGenerics(name)
|
|
if node.value.isNil():
|
|
# Type is not a type union nor a type alias
|
|
if not node.isEnum:
|
|
# Type is a structure type
|
|
var fieldType: TypedExpr
|
|
var n: Name
|
|
for field in node.fields:
|
|
fieldType = self.infer(field.valueType)
|
|
if not node.isRef:
|
|
# Check for self-recursion of non-ref types (which would require
|
|
# infinite memory)
|
|
n = fieldType.getName()
|
|
if n.isNil():
|
|
# Expression has no associated name: cannot self-recurse
|
|
continue
|
|
if name == n:
|
|
self.error(&"illegal self-recursion in member '{field.name.token.lexeme}' for non-ref type '{name.ident.token.lexeme}'", fieldType.node)
|
|
result.fields[field.name.token.lexeme] = fieldType
|
|
name.valueType.fields[field.name.token.lexeme] = fieldType.kind
|
|
else:
|
|
# Type is a variant type (aka enum). We'll only declare a single
|
|
# object (the enum type itself) so that we don't pollute the
|
|
# global namespace. I don't love unqualified enums, but I use
|
|
# them because I'm lazy and it often leads to problems, so since
|
|
# peon is also meant (among other things) to address my pain points
|
|
# with the languages I'm currently using, I'm going to explicitly
|
|
# forbid myself from using unqualified enum members ever again :)
|
|
# TODO
|
|
discard
|
|
else:
|
|
case node.value.kind:
|
|
of identExpr:
|
|
# Type alias
|
|
name.valueType = self.inferOrError(node.value).kind
|
|
of binaryExpr, unaryExpr:
|
|
# Untagged type union
|
|
name.valueType = Type(kind: Union, types: @[])
|
|
self.unpackTypes(node.value, name.valueType.types)
|
|
else:
|
|
# Unreachable. Nim should *really* stop enforcing that
|
|
# all case statements have an else branch (or otherwise
|
|
# enumerate all cases), at least by default. Maybe a warning
|
|
# convertible to an error?
|
|
discard
|
|
if not node.parent.isNil():
|
|
# Ensure parent is actually a type
|
|
var subtype = self.check(node.parent, "typevar".toIntrinsic())
|
|
# Grab its name object
|
|
var parentName = subtype.getName()
|
|
# This should *never* be nil
|
|
if parentName.isNil():
|
|
self.error(&"could not obtain name information for the given object: is it a type?", node.parent)
|
|
result.parent = parentName
|
|
for field in TypeDecl(result.parent.node).fields:
|
|
if result.fields.hasKey(field.name.token.lexeme):
|
|
for f in TypeDecl(result.node).fields:
|
|
if f.name.token.lexeme == field.name.token.lexeme:
|
|
# This always eventually runs
|
|
self.error(&"cannot to re-declare type member '{field}'", f.name)
|
|
result.fields[field.name.token.lexeme] = newTypedExpr(field.name, result.parent.valueType.fields[field.name.token.lexeme])
|
|
# Turn the declared type into a typevar so that future references
|
|
# to it will be distinct from its instances
|
|
if not name.valueType.intrinsic:
|
|
name.valueType = name.valueType.wrapType()
|
|
# TODO: Check interfaces
|
|
self.endScope()
|
|
|
|
|
|
proc pragmaExpr(self: TypeChecker, pragma: Pragma) =
|
|
## Validates pragma expressions (not bound to a name)
|
|
if pragma.name.token.lexeme notin self.pragmas:
|
|
self.error(&"unknown pragma '{pragma.name.token.lexeme}'")
|
|
self.pragmas[pragma.name.token.lexeme].handler(self, pragma, nil)
|
|
|
|
|
|
proc validate(self: TypeChecker, node: ASTNode): TypedNode =
|
|
## Dispatches typeless AST nodes to typecheck them and turn
|
|
## them into typed ones
|
|
case node.kind:
|
|
of binaryExpr, unaryExpr, NodeKind.genericExpr, identExpr,
|
|
groupingExpr, callExpr, intExpr, floatExpr, octExpr,
|
|
binExpr, hexExpr, trueExpr, falseExpr, nanExpr, infExpr:
|
|
result = self.expression(Expression(node))
|
|
of exprStmt:
|
|
let statement = ExprStmt(node)
|
|
result = TypedExprStmt(node: statement, expression: TypedExpr(self.validate(statement.expression)))
|
|
of NodeKind.whileStmt:
|
|
result = self.whileStmt(WhileStmt(node))
|
|
of NodeKind.blockStmt:
|
|
result = self.blockStmt(BlockStmt(node))
|
|
of NodeKind.ifStmt:
|
|
result = self.ifStmt(IfStmt(node))
|
|
of NodeKind.varDecl:
|
|
result = self.varDecl(VarDecl(node))
|
|
of NodeKind.funDecl:
|
|
result = self.funDecl(FunDecl(node))
|
|
of NodeKind.typeDecl:
|
|
result = self.typeDecl(TypeDecl(node))
|
|
of NodeKind.pragmaExpr:
|
|
# Pragma "expressions" (they're more like compiler directives)
|
|
# don't really return anything
|
|
self.pragmaExpr(Pragma(node))
|
|
else:
|
|
self.error(&"failed to dispatch node of type {node.kind}", node)
|
|
|
|
|
|
proc validate*(self: TypeChecker, tree: ParseTree, file, source: string, showMismatches: bool = false,
|
|
disabledWarnings: seq[WarningKind] = @[]): seq[TypedNode] =
|
|
## Transforms a sequence of typeless AST nodes
|
|
## into a sequence of typed AST nodes
|
|
self.file = file
|
|
self.source = source
|
|
self.tree = tree
|
|
self.current = 0
|
|
self.scopeDepth = -1
|
|
self.showMismatches = showMismatches
|
|
self.disabledWarnings = disabledWarnings
|
|
self.names = @[]
|
|
self.beginScope()
|
|
var mainModule = Name(kind: NameKind.Module,
|
|
depth: 0,
|
|
isPrivate: true,
|
|
owner: nil,
|
|
file: self.file,
|
|
path: self.file,
|
|
ident: newIdentExpr(Token(lexeme: self.file, kind: Identifier)),
|
|
line: 1)
|
|
self.addName(mainModule)
|
|
self.currentModule = mainModule
|
|
# Every peon program has a hidden entry point in
|
|
# which user code is wrapped. Think of it as if
|
|
# peon is implicitly writing the main() function
|
|
# of your program and putting all of your code in
|
|
# there
|
|
var main = Name(depth: 0,
|
|
isPrivate: true,
|
|
owner: self.currentModule,
|
|
file: self.file,
|
|
valueType: Type(kind: Function,
|
|
returnType: "nil".toIntrinsic(),
|
|
parameters: @[],
|
|
),
|
|
ident: newIdentExpr(Token(lexeme: "", kind: Identifier)),
|
|
line: 1)
|
|
self.addName(main)
|
|
while not self.done():
|
|
result.add(self.validate(self.step()))
|
|
if result[^1].isNil():
|
|
result.delete(result.high())
|
|
assert self.scopeDepth == 0
|
|
# Do not close the global scope if
|
|
# we're being imported
|
|
if self.isMainModule:
|
|
self.endScope()
|