Initial work on function calls
This commit is contained in:
parent
50b7b56feb
commit
9f126845fc
|
@ -155,12 +155,6 @@ type
|
|||
closedOver: seq[Name]
|
||||
|
||||
|
||||
proc `$`(self: Name): string =
|
||||
## Stringifies a name object
|
||||
result &= &"Name(name='{self.name}', depth={self.depth}, owner='{self.owner}', private={self.isPrivate}, let={self.isLet}, const={self.isConst}"
|
||||
result &= &", pos={self.codePos}, closure={self.isClosedOver}, line={self.line})"
|
||||
|
||||
|
||||
proc newCompiler*(enableOptimizations: bool = true): Compiler =
|
||||
## Initializes a new Compiler object
|
||||
new(result)
|
||||
|
@ -437,6 +431,14 @@ proc compareTypes(self: Compiler, a, b: Type): bool =
|
|||
return b == nil
|
||||
elif b == nil:
|
||||
return a == nil
|
||||
elif a.kind == Any or b.kind == Any:
|
||||
# This is needed internally: user code
|
||||
# cannot generate code for matching
|
||||
# arbitrary types, but we need it for
|
||||
# function calls and stuff like that
|
||||
# since peon doesn't have return type
|
||||
# inference
|
||||
return true
|
||||
elif a.kind != b.kind:
|
||||
# Next, we see the type discriminant:
|
||||
# If they're different, then they can't
|
||||
|
@ -463,9 +465,6 @@ proc compareTypes(self: Compiler, a, b: Type): bool =
|
|||
if a.args.len() != b.args.len():
|
||||
return false
|
||||
elif not self.compareTypes(a.returnType, b.returnType):
|
||||
if a.returnType != nil and b.returnType != nil:
|
||||
if a.returnType.kind != Any and b.returnType.kind != Any:
|
||||
return false
|
||||
return false
|
||||
for (argA, argB) in zip(a.args, b.args):
|
||||
if not self.compareTypes(argA.kind, argB.kind):
|
||||
|
@ -517,23 +516,6 @@ proc toIntrinsic(name: string): Type =
|
|||
return nil
|
||||
|
||||
|
||||
proc toIntrinsic(self: Compiler, typ: Expression): Type =
|
||||
## Gets an expression's intrinsic type, if
|
||||
## possible
|
||||
if typ == nil:
|
||||
return nil
|
||||
case typ.kind:
|
||||
of trueExpr, falseExpr, intExpr, floatExpr:
|
||||
return typ.token.lexeme.toIntrinsic()
|
||||
of identExpr:
|
||||
let inferred = self.inferType(typ)
|
||||
if inferred == nil:
|
||||
return typ.token.lexeme.toIntrinsic()
|
||||
return inferred
|
||||
else:
|
||||
discard
|
||||
|
||||
|
||||
proc inferType(self: Compiler, node: LiteralExpr): Type =
|
||||
## Infers the type of a given literal expression
|
||||
if node == nil:
|
||||
|
@ -609,6 +591,15 @@ proc inferType(self: Compiler, node: Expression): Type =
|
|||
result.returnType = self.inferType(node.returnType)
|
||||
for argument in node.arguments:
|
||||
result.args.add((argument.name.token.lexeme, self.inferType(argument.valueType)))
|
||||
of callExpr:
|
||||
var node = CallExpr(node)
|
||||
case node.callee.kind:
|
||||
of identExpr:
|
||||
result = self.resolve(IdentExpr(node.callee)).valueType.returnType
|
||||
of lambdaExpr:
|
||||
result = self.inferType(LambdaExpr(node.callee).returnType)
|
||||
else:
|
||||
discard # Unreachable
|
||||
else:
|
||||
discard # Unreachable
|
||||
|
||||
|
@ -747,6 +738,22 @@ proc literal(self: Compiler, node: ASTNode) =
|
|||
self.error(&"invalid AST node of kind {node.kind} at literal(): {node} (This is an internal error and most likely a bug!)")
|
||||
|
||||
|
||||
proc findByName(self: Compiler, name: string): seq[Name] =
|
||||
## Looks for objects that have been already declared
|
||||
## with the given name. Returns all objects that apply
|
||||
for obj in reversed(self.names):
|
||||
if obj.name.token.lexeme == name:
|
||||
result.add(obj)
|
||||
|
||||
|
||||
proc findByType(self: Compiler, name: string, kind: Type): seq[Name] =
|
||||
## Looks for objects that have already been declared
|
||||
## with the given name and type
|
||||
for obj in self.findByName(name):
|
||||
if self.compareTypes(obj.valueType, kind):
|
||||
result.add(obj)
|
||||
|
||||
|
||||
proc matchImpl(self: Compiler, name: string, kind: Type): Name =
|
||||
## Tries to find a matching function implementation
|
||||
## compatible with the given type and returns its
|
||||
|
@ -964,22 +971,6 @@ proc identifier(self: Compiler, node: IdentExpr) =
|
|||
self.emitBytes(self.closedOver.high().toTriple())
|
||||
|
||||
|
||||
proc findByName(self: Compiler, name: string): seq[Name] =
|
||||
## Looks for objects that have been already declared
|
||||
## with the given name. Returns all objects that apply
|
||||
for obj in reversed(self.names):
|
||||
if obj.name.token.lexeme == name:
|
||||
result.add(obj)
|
||||
|
||||
|
||||
proc findByType(self: Compiler, name: string, kind: Type): seq[Name] =
|
||||
## Looks for objects that have already been declared
|
||||
## with the given name and type
|
||||
for obj in self.findByName(name):
|
||||
if self.compareTypes(obj.valueType, kind):
|
||||
result.add(obj)
|
||||
|
||||
|
||||
proc assignment(self: Compiler, node: ASTNode) =
|
||||
## Compiles assignment expressions
|
||||
case node.kind:
|
||||
|
@ -1018,32 +1009,28 @@ proc beginScope(self: Compiler) =
|
|||
## Begins a new local scope by incrementing the current
|
||||
## scope's depth
|
||||
inc(self.scopeDepth)
|
||||
|
||||
|
||||
|
||||
proc endScope(self: Compiler) =
|
||||
## Ends the current local scope
|
||||
if self.scopeDepth < 0:
|
||||
self.error("cannot call endScope with scopeDepth < 0 (This is an internal error and most likely a bug)")
|
||||
dec(self.scopeDepth)
|
||||
var popped: int = 0
|
||||
var name: Name
|
||||
var indeces: seq[int] = @[]
|
||||
for i, ident in reversed(self.names):
|
||||
if ident.depth > self.scopeDepth and ident.valueType.kind != TypeKind.Function:
|
||||
inc(popped)
|
||||
name = self.names[self.names.high() - i]
|
||||
var names: seq[Name] = @[]
|
||||
for name in self.names:
|
||||
if name.depth > self.scopeDepth:
|
||||
if name.valueType.kind != Function and OpCode(self.chunk.code[name.codePos]) == NoOp:
|
||||
for _ in countup(0, 3):
|
||||
# Since by deleting it the size of the
|
||||
# sequence decreases, we don't need to
|
||||
# increase the index
|
||||
self.chunk.code.delete(name.codePos)
|
||||
indeces.add(self.names.high() - i)
|
||||
names.add(name)
|
||||
if not self.enableOptimizations:
|
||||
# All variables with a scope depth larger than the current one
|
||||
# are now out of scope. Begone, you're now homeless!
|
||||
self.emitByte(Pop)
|
||||
if self.enableOptimizations and popped > 1:
|
||||
if self.enableOptimizations and len(names) > 1:
|
||||
# If we're popping less than 65535 variables, then
|
||||
# we can emit a PopN instruction. This is true for
|
||||
# 99.99999% of the use cases of the language (who the
|
||||
|
@ -1051,20 +1038,31 @@ proc endScope(self: Compiler) =
|
|||
# if you'll ever use more then Peon will emit a PopN instruction
|
||||
# for the first 65 thousand and change local variables and then
|
||||
# emit another batch of plain ol' Pop instructions for the rest
|
||||
if popped <= uint16.high().int():
|
||||
self.emitByte(PopN)
|
||||
self.emitBytes(popped.toDouble())
|
||||
else:
|
||||
self.emitByte(PopN)
|
||||
self.emitBytes(uint16.high().int.toDouble())
|
||||
for i in countdown(self.names.high(), popped - uint16.high().int()):
|
||||
self.emitByte(PopN)
|
||||
self.emitBytes(len(names).toDouble())
|
||||
if len(names) > uint16.high().int():
|
||||
for i in countdown(self.names.high(), len(names) - uint16.high().int()):
|
||||
if self.names[i].depth > self.scopeDepth:
|
||||
self.emitByte(Pop)
|
||||
elif popped == 1:
|
||||
elif len(names) == 1:
|
||||
# We only emit PopN if we're popping more than one value
|
||||
self.emitByte(Pop)
|
||||
for index in indeces:
|
||||
self.names.delete(index)
|
||||
# This seems *really* slow, but
|
||||
# what else should I do? Nim doesn't
|
||||
# allow the removal of items during
|
||||
# seq iteration so ¯\_(ツ)_/¯
|
||||
var idx = 0
|
||||
while idx < self.names.len():
|
||||
for name in names:
|
||||
if self.names[idx] == name:
|
||||
self.names.delete(idx)
|
||||
inc(idx)
|
||||
idx = 0
|
||||
while idx < self.closedOver.len():
|
||||
for name in names:
|
||||
if name.isClosedOver:
|
||||
self.closedOver.delete(idx)
|
||||
inc(idx)
|
||||
|
||||
|
||||
proc blockStmt(self: Compiler, node: BlockStmt) =
|
||||
|
@ -1132,6 +1130,40 @@ proc whileStmt(self: Compiler, node: WhileStmt) =
|
|||
self.emitLoop(start)
|
||||
|
||||
|
||||
proc callFunction(self: Compiler, node: CallExpr) =
|
||||
## Compiles code to call a function
|
||||
var args: seq[tuple[name: string, kind: Type]] = @[]
|
||||
var kind: Type
|
||||
# TODO: Keyword arguments
|
||||
for i, argument in node.arguments.positionals:
|
||||
kind = self.inferType(argument)
|
||||
if kind == nil:
|
||||
if argument.kind == identExpr:
|
||||
self.error(&"reference to undeclared identifier '{IdentExpr(argument).name.lexeme}'")
|
||||
self.error(&"cannot infer the type of argument {i + 1} in function call")
|
||||
args.add(("", kind))
|
||||
for argument in node.arguments.keyword:
|
||||
discard
|
||||
if args.len() >= 16777216:
|
||||
self.error(&"cannot pass more than 16777215 arguments")
|
||||
var funct: Name
|
||||
case node.callee.kind:
|
||||
of identExpr:
|
||||
funct = self.matchImpl(IdentExpr(node.callee).name.lexeme, Type(kind: Function, returnType: Type(kind: Any), args: args))
|
||||
else:
|
||||
discard # TODO: Lambdas
|
||||
self.emitByte(LoadUInt32)
|
||||
# We patch it later!
|
||||
let idx = self.chunk.consts.len()
|
||||
self.emitBytes(self.chunk.writeConstant((0xffffffff'u32).toQuad()))
|
||||
for argument in node.arguments.positionals:
|
||||
self.expression(argument)
|
||||
self.emitByte(Call) # Creates a stack frame
|
||||
self.emitBytes(funct.codePos.toTriple())
|
||||
self.emitBytes(args.len().toTriple())
|
||||
self.patchReturnAddress(idx)
|
||||
|
||||
|
||||
proc expression(self: Compiler, node: Expression) =
|
||||
## Compiles all expressions
|
||||
if self.inferType(node) == nil:
|
||||
|
@ -1141,7 +1173,7 @@ proc expression(self: Compiler, node: Expression) =
|
|||
self.error("expression has no type")
|
||||
case node.kind:
|
||||
of callExpr:
|
||||
discard # TODO
|
||||
self.callFunction(CallExpr(node)) # TODO
|
||||
of getItemExpr:
|
||||
discard # TODO
|
||||
# Note that for setItem and assign we don't convert
|
||||
|
|
Loading…
Reference in New Issue