Updates to README. Cleanup & refactoring

This commit is contained in:
Mattia Giambirtone 2023-10-01 13:15:39 +02:00
parent 5fa463d9b4
commit c1a2c9bc55
Signed by: nocturn9x
GPG Key ID: 8270F9F467971E59
13 changed files with 231 additions and 226 deletions

View File

@ -5,6 +5,11 @@ Peon is a modern, multi-paradigm, async-first programming language with a focus
[Go to the Manual](docs/manual.md) [Go to the Manual](docs/manual.md)
## DISCLAIMER
Peon is currently in the process of being rewritten, but the current design just doesn't allow for that. Hence, a [new repository](https://git.nocturn9x.space/nocturn9x/peon-rewrite)
has been created, to start from a blank slate and take what I learned from Peon 0.1.x and make it significantly better
## What's peon? ## What's peon?
__Note__: For simplicity reasons, the verbs in this section refer to the present even though part of what's described here is not implemented yet. __Note__: For simplicity reasons, the verbs in this section refer to the present even though part of what's described here is not implemented yet.
@ -123,4 +128,4 @@ out for yourself. Fortunately, the process is quite straightforward:
peon. Hopefully I will automate this soon, but as of right now the work is all manual peon. Hopefully I will automate this soon, but as of right now the work is all manual
__Note__: On Linux, peon will also look into `~/.local/peon/stdlib` by default, so you can just create the `~/.local/peon` folder and copy `src/peon/stdlib` there __Note__: On Linux, peon will also look into `~/.local/peon/stdlib` by default, so you can just create the `~/.local/peon` folder and copy `src/peon/stdlib` there

View File

@ -26,8 +26,8 @@ def main() -> int:
try: try:
cmd = f"nim {NIM_FLAGS} r src/main.nim {test_file} {PEON_FLAGS}" cmd = f"nim {NIM_FLAGS} r src/main.nim {test_file} {PEON_FLAGS}"
out = subprocess.run(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = subprocess.run(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out.check_returncode()
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
print(f"An error occurred while executing test -> {type(e).__name__}: {e}")
failed.add(test_file) failed.add(test_file)
continue continue
if not all(map(lambda s: s == b"true", out.stdout.splitlines())): if not all(map(lambda s: s == b"true", out.stdout.splitlines())):

View File

@ -32,7 +32,6 @@ import std/sets
import std/monotimes import std/monotimes
when debugVM or debugMem or debugGC or debugAlloc: when debugVM or debugMem or debugGC or debugAlloc:
import std/sequtils
import std/terminal import std/terminal
@ -227,17 +226,15 @@ proc markRoots(self: var PeonVM): HashSet[ptr HeapObject] =
# the object to be reachable (potentially leading to a nasty # the object to be reachable (potentially leading to a nasty
# memory leak). Hopefully, in a 64-bit address space, this # memory leak). Hopefully, in a 64-bit address space, this
# occurrence is rare enough for us to ignore # occurrence is rare enough for us to ignore
var result = initHashSet[uint64](self.gc.pointers.len()) result = initHashSet[ptr HeapObject](self.gc.pointers.len())
for obj in self.calls: for obj in self.calls:
if obj in self.gc.pointers: if obj in self.gc.pointers:
result.incl(obj) result.incl(cast[ptr HeapObject](obj))
for obj in self.operands: for obj in self.operands:
if obj in self.gc.pointers: if obj in self.gc.pointers:
result.incl(obj) result.incl(cast[ptr HeapObject](obj))
var obj: ptr HeapObject
for p in result: for p in result:
obj = cast[ptr HeapObject](p) if p.mark():
if obj.mark():
when debugMarkGC: when debugMarkGC:
echo &"DEBUG - GC: Marked object: {obj[]}" echo &"DEBUG - GC: Marked object: {obj[]}"
when debugGC: when debugGC:
@ -842,17 +839,9 @@ proc dispatch*(self: var PeonVM) {.inline.} =
# Pops a value off the operand stack # Pops a value off the operand stack
discard self.pop() discard self.pop()
of PushC: of PushC:
# Pushes a value from the operand stack # Pops a value off the operand stack
# onto the call stack # and pushes it onto the call stack
self.pushc(self.pop()) self.pushc(self.pop())
of PopRepl:
# Pops a peon object off the
# operand stack and prints it.
# Used in interactive REPL mode
if self.frames.len() !> 1:
discard self.pop()
continue
echo self.pop()
of PopN: of PopN:
# Pops N elements off the call stack # Pops N elements off the call stack
for _ in 0..<int(self.readShort()): for _ in 0..<int(self.readShort()):
@ -1003,7 +992,10 @@ proc dispatch*(self: var PeonVM) {.inline.} =
of Float32LessOrEqual: of Float32LessOrEqual:
self.push(self.getBool(cast[float32](self.pop()) <= cast[float32](self.pop()))) self.push(self.getBool(cast[float32](self.pop()) <= cast[float32](self.pop())))
of Identity: of Identity:
# Identity is implemented simply as pointer equality :)
self.push(cast[uint64](self.pop() == self.pop())) self.push(cast[uint64](self.pop() == self.pop()))
of LogicalNot:
self.push(uint64(not self.pop().bool))
# Print opcodes # Print opcodes
of PrintInt64: of PrintInt64:
echo cast[int64](self.pop()) echo cast[int64](self.pop())
@ -1033,7 +1025,7 @@ proc dispatch*(self: var PeonVM) {.inline.} =
else: else:
echo "false" echo "false"
of PrintInf: of PrintInf:
if self.pop() == 0x3: if self.pop() == self.getInf(positive=true):
echo "inf" echo "inf"
else: else:
echo "-inf" echo "-inf"
@ -1046,8 +1038,6 @@ proc dispatch*(self: var PeonVM) {.inline.} =
stdout.write("\n") stdout.write("\n")
of SysClock64: of SysClock64:
self.push(cast[uint64](getMonoTime().ticks.float() / 1_000_000_000)) self.push(cast[uint64](getMonoTime().ticks.float() / 1_000_000_000))
of LogicalNot:
self.push(uint64(not self.pop().bool))
else: else:
discard discard

View File

@ -16,10 +16,8 @@
import std/tables import std/tables
import std/strformat import std/strformat
import std/algorithm import std/algorithm
import std/parseutils
import std/strutils import std/strutils
import std/sequtils import std/sequtils
import std/sets
import std/os import std/os
import std/terminal import std/terminal
import std/hashes import std/hashes
@ -54,7 +52,8 @@ type
UInt32, Int64, UInt64, Float32, Float64, UInt32, Int64, UInt64, Float32, Float64,
Char, Byte, String, Function, CustomType, Char, Byte, String, Function, CustomType,
Nil, Nan, Bool, Inf, Typevar, Generic, Nil, Nan, Bool, Inf, Typevar, Generic,
Reference, Pointer, Any, All, Union, Auto Reference, Pointer, Any, All, Union, Auto,
Array
Type* = ref object Type* = ref object
## A wrapper around ## A wrapper around
@ -79,17 +78,22 @@ type
compiled*: bool compiled*: bool
of CustomType: of CustomType:
fields*: TableRef[string, Type] fields*: TableRef[string, Type]
decl*: TypeDecl
of Array:
elemType*: Type
size*: int
elements*: seq[Expression]
of Reference, Pointer: of Reference, Pointer:
value*: Type value*: Type
of Generic: of Generic:
# cond represents a type constraint. For # cond represents a type constraint. For
# example, fn foo[T*: int & ~uint](...) {...} # example, fn foo[T*: int & ~uint](...) {...}
# would map to [(true, int), (false, uint)] # would map to [(true, int), (false, uint)]
cond*: seq[tuple[match: bool, kind: Type]] cond*: seq[tuple[match: bool, kind: Type, value: LiteralExpr]]
asUnion*: bool # If this is true, the constraint is treated like a type union asUnion*: bool # If this is true, the constraint is treated like a type union
name*: string name*: string
of Union: of Union:
types*: seq[tuple[match: bool, kind: Type]] types*: seq[tuple[match: bool, kind: Type, value: LiteralExpr]]
of Typevar: of Typevar:
# What type do we represent? # What type do we represent?
wrapped*: Type wrapped*: Type
@ -100,7 +104,7 @@ type
## A name enumeration type ## A name enumeration type
None, Module, Argument, Var, Function, CustomType, Enum None, Module, Argument, Var, Function, CustomType, Enum
Name* = ref object Name* = ref object of RootObj
## A generic name object ## A generic name object
# Type of the identifier (NOT of the value!) # Type of the identifier (NOT of the value!)
@ -192,8 +196,6 @@ type
# The current scope depth. If > 0, we're # The current scope depth. If > 0, we're
# in a local scope, otherwise it's global # in a local scope, otherwise it's global
depth*: int depth*: int
# Are we in REPL mode?
replMode*: bool
# List of all compile-time names # List of all compile-time names
names*: seq[Name] names*: seq[Name]
# Stores line data for error reporting # Stores line data for error reporting
@ -222,12 +224,6 @@ type
# Currently imported modules # Currently imported modules
modules*: TableRef[string, Name] modules*: TableRef[string, Name]
TypedNode* = ref object
## A wapper for AST nodes
## with attached type information
kind*: Type
node*: ASTNode
## Public getters for nicer error formatting ## Public getters for nicer error formatting
@ -235,19 +231,16 @@ proc getCurrentNode*(self: Compiler): ASTNode = (if self.current >= self.ast.len
proc getCurrentFunction*(self: Compiler): Declaration {.inline.} = (if self.currentFunction.isNil(): nil else: self.currentFunction.valueType.fun) proc getCurrentFunction*(self: Compiler): Declaration {.inline.} = (if self.currentFunction.isNil(): nil else: self.currentFunction.valueType.fun)
proc getSource*(self: Compiler): string {.inline.} = self.source proc getSource*(self: Compiler): string {.inline.} = self.source
## Some forward declarations (some of them arere actually stubs because nim forces forward declarations to be ## Some forward declarations (some of them are actually stubs because nim forces forward declarations to be
## implemented in the same module). They are methods because we need to dispatch to their actual specific ## implemented in the same module). Some of them are methods because we need to dispatch to their actual specific
## implementations inside each target module, so we need the runtime type of the compiler object to be ## implementations inside each target module, so we need the runtime type of the compiler object to be
## taken into account ## taken into account
method makeConcrete(self: Compiler, node: GenericExpr, compile: bool = true): Type {.base.} = nil
method expression*(self: Compiler, node: Expression, compile: bool = true): Type {.discardable, base.} = nil method expression*(self: Compiler, node: Expression, compile: bool = true): Type {.discardable, base.} = nil
method identifier*(self: Compiler, node: IdentExpr, name: Name = nil, compile: bool = true, strict: bool = true): Type {.discardable, base.} = nil method prepareFunction*(self: Compiler, name: Name) {.base.} = discard
method call*(self: Compiler, node: CallExpr, compile: bool = true): Type {.discardable, base.} = nil method dispatchPragmas(self: Compiler, name: Name) {.base.} = discard
method getItemExpr*(self: Compiler, node: GetItemExpr, compile: bool = true, matching: Type = nil): Type {.discardable, base.} = nil method dispatchDelayedPragmas(self: Compiler, name: Name) {.base.} = discard
method unary*(self: Compiler, node: UnaryExpr, compile: bool = true): Type {.discardable, base.} = nil # These are not methods because their behavior is shared across backends and does
method binary*(self: Compiler, node: BinaryExpr, compile: bool = true): Type {.discardable, base.} = nil # not need to change
method lambdaExpr*(self: Compiler, node: LambdaExpr, compile: bool = true): Type {.discardable, base.} = nil
method literal*(self: Compiler, node: ASTNode, compile: bool = true): Type {.discardable, base.} = nil
proc infer*(self: Compiler, node: LiteralExpr): Type proc infer*(self: Compiler, node: LiteralExpr): Type
proc infer*(self: Compiler, node: Expression): Type proc infer*(self: Compiler, node: Expression): Type
proc inferOrError*(self: Compiler, node: Expression): Type proc inferOrError*(self: Compiler, node: Expression): Type
@ -256,9 +249,9 @@ proc findInModule*(self: Compiler, name: string, module: Name): seq[Name]
proc findByType*(self: Compiler, name: string, kind: Type): seq[Name] proc findByType*(self: Compiler, name: string, kind: Type): seq[Name]
proc compare*(self: Compiler, a, b: Type): bool proc compare*(self: Compiler, a, b: Type): bool
proc match*(self: Compiler, name: string, kind: Type, node: ASTNode = nil, allowFwd: bool = true): Name proc match*(self: Compiler, name: string, kind: Type, node: ASTNode = nil, allowFwd: bool = true): Name
method prepareFunction*(self: Compiler, name: Name) {.base.} = discard proc resolve*(self: Compiler, name: string): Name
method dispatchPragmas(self: Compiler, name: Name) {.base.} = discard proc resolve*(self: Compiler, name: IdentExpr): Name
method dispatchDelayedPragmas(self: Compiler, name: Name) {.base.} = discard proc resolveOrError*[T: IdentExpr | string](self: Compiler, name: T): Name
## End of forward declarations ## End of forward declarations
## Utility functions ## Utility functions
@ -294,7 +287,7 @@ proc error*(self: Compiler, message: string, node: ASTNode = nil) {.inline.} =
proc warning*(self: Compiler, kind: WarningKind, message: string, name: Name = nil, node: ASTNode = nil) = proc warning*(self: Compiler, kind: WarningKind, message: string, name: Name = nil, node: ASTNode = nil) =
## Raises a warning. Note that warnings are always disabled in REPL mode ## Raises a warning. Note that warnings are always disabled in REPL mode
if self.replMode or kind in self.disabledWarnings: if kind in self.disabledWarnings:
return return
var node: ASTNode = node var node: ASTNode = node
var fn: Declaration var fn: Declaration
@ -354,7 +347,7 @@ proc wrap*(self: Type): Type =
return self return self
proc unwrap*(self: Type): Type = proc unwrap*(self: Type): Type {.inline.} =
## Unwraps a typevar if it's not already ## Unwraps a typevar if it's not already
## unwrapped ## unwrapped
if self.kind == Typevar: if self.kind == Typevar:
@ -417,7 +410,7 @@ proc resolveOrError*[T: IdentExpr | string](self: Compiler, name: T): Name =
self.error(&"reference to undefined name '{name}'") self.error(&"reference to undefined name '{name}'")
proc compareUnions*(self: Compiler, a, b: seq[tuple[match: bool, kind: Type]]): bool = proc compareUnions(self: Compiler, a, b: seq[tuple[match: bool, kind: Type, value: LiteralExpr]]): bool =
## Compares type unions between each other ## Compares type unions between each other
var var
long = a long = a
@ -449,7 +442,7 @@ proc compare*(self: Compiler, a, b: Type): bool =
return b.isNil() or b.kind == All return b.isNil() or b.kind == All
if b.isNil(): if b.isNil():
return a.isNil() or a.kind == All return a.isNil() or a.kind == All
if a.kind == All or b.kind == All: if a.kind in [All, Auto] or b.kind in [All, Auto]:
return true return true
if a.kind == b.kind: if a.kind == b.kind:
# Here we compare types with the same kind discriminant # Here we compare types with the same kind discriminant
@ -459,7 +452,11 @@ proc compare*(self: Compiler, a, b: Type): bool =
Char, Byte, String, Nil, TypeKind.Nan, Bool, Char, Byte, String, Nil, TypeKind.Nan, Bool,
TypeKind.Inf, Any, Auto: TypeKind.Inf, Any, Auto:
return true return true
of Array:
return self.compare(a.elemType, b.elemType) and a.size == b.size
of Typevar: of Typevar:
if a.wrapped.isNil() or b.wrapped.isNil():
return true
return self.compare(a.wrapped, b.wrapped) return self.compare(a.wrapped, b.wrapped)
of Union: of Union:
return self.compareUnions(a.types, b.types) return self.compareUnions(a.types, b.types)
@ -502,10 +499,6 @@ proc compare*(self: Compiler, a, b: Type): bool =
return true return true
else: else:
discard # TODO: Custom types, enums discard # TODO: Custom types, enums
if a.kind == Typevar:
return self.compare(a.wrapped, b)
if b.kind == Typevar:
return self.compare(a, b.wrapped)
if a.kind == Union: if a.kind == Union:
for constraint in a.types: for constraint in a.types:
if self.compare(constraint.kind, b) and constraint.match: if self.compare(constraint.kind, b) and constraint.match:
@ -589,9 +582,11 @@ proc toIntrinsic*(name: string): Type =
elif name == "bool": elif name == "bool":
return Type(kind: Bool, isBuiltin: true) return Type(kind: Bool, isBuiltin: true)
elif name == "typevar": elif name == "typevar":
return Type(kind: Typevar, isBuiltin: true) return Type(kind: Typevar, isBuiltin: true, wrapped: Type(kind: Any))
elif name == "string": elif name == "string":
return Type(kind: String, isBuiltin: true) return Type(kind: String, isBuiltin: true)
elif name == "array":
return Type(kind: Array, isBuiltin: true, elements: @[])
proc infer*(self: Compiler, node: LiteralExpr): Type = proc infer*(self: Compiler, node: LiteralExpr): Type =
@ -632,33 +627,19 @@ proc infer*(self: Compiler, node: Expression): Type =
## returns it ## returns it
if node.isNil(): if node.isNil():
return nil return nil
if node.isConst():
return self.infer(LiteralExpr(node))
case node.kind: case node.kind:
of NodeKind.genericExpr:
result = self.makeConcrete(GenericExpr(node), compile=false)
of NodeKind.identExpr:
result = self.identifier(IdentExpr(node), compile=false, strict=false)
of NodeKind.unaryExpr:
result = self.unary(UnaryExpr(node), compile=false)
of NodeKind.binaryExpr:
result = self.binary(BinaryExpr(node), compile=false)
of {NodeKind.intExpr, NodeKind.hexExpr, NodeKind.binExpr, NodeKind.octExpr,
NodeKind.strExpr, NodeKind.falseExpr, NodeKind.trueExpr, NodeKind.floatExpr
}:
result = self.infer(LiteralExpr(node))
of NodeKind.callExpr:
result = self.call(CallExpr(node), compile=false)
of NodeKind.refExpr: of NodeKind.refExpr:
result = Type(kind: Reference, value: self.infer(Ref(node).value)) result = Type(kind: Reference, value: self.infer(Ref(node).value))
of NodeKind.ptrExpr: of NodeKind.ptrExpr:
result = Type(kind: Pointer, value: self.infer(Ptr(node).value)) result = Type(kind: Pointer, value: self.infer(Ptr(node).value))
of NodeKind.groupingExpr: of NodeKind.groupingExpr:
result = self.infer(GroupingExpr(node).expression) result = self.infer(GroupingExpr(node).expression)
of NodeKind.getItemExpr:
result = self.getItemExpr(GetItemExpr(node), compile=false)
of NodeKind.lambdaExpr:
result = self.lambdaExpr(LambdaExpr(node), compile=false)
else: else:
discard # TODO # For most cases we can just dispatch to the target
# compiler which will tell us the type of the value
result = self.expression(node, compile=false)
proc inferOrError*(self: Compiler, node: Expression): Type = proc inferOrError*(self: Compiler, node: Expression): Type =
@ -682,11 +663,22 @@ proc stringify*(self: Compiler, typ: Type): string =
TypeKind.Inf, Auto, Any: TypeKind.Inf, Auto, Any:
result &= ($typ.kind).toLowerAscii() result &= ($typ.kind).toLowerAscii()
of Typevar: of Typevar:
result = self.stringify(typ.wrapped) result = &"typevar[{self.stringify(typ.wrapped)}]"
of Pointer: of Pointer:
result &= &"ptr {self.stringify(typ.value)}" result &= &"ptr {self.stringify(typ.value)}"
of Reference: of Reference:
result &= &"ref {self.stringify(typ.value)}" result &= &"ref {self.stringify(typ.value)}"
of CustomType:
result &= &"{typ.decl.name.token.lexeme}"
if typ.decl.generics.len() > 0:
result &= "["
for i, gen in typ.decl.generics:
result &= &"{gen.name.token.lexeme}: {self.stringify(self.inferOrError(gen.cond))}"
if i < typ.decl.generics.len() - 1:
result &= ", "
result &= "]"
of Array:
result &= &"array[{self.stringify(typ.elemType)}, {typ.size}]"
of Function: of Function:
result &= "fn " result &= "fn "
if typ.fun.generics.len() > 0: if typ.fun.generics.len() > 0:
@ -746,6 +738,8 @@ proc findByName*(self: Compiler, name: string): seq[Name] =
## Looks for objects that have been already declared ## Looks for objects that have been already declared
## with the given name. Returns all objects that apply. ## with the given name. Returns all objects that apply.
for obj in reversed(self.names): for obj in reversed(self.names):
if obj.depth > self.depth:
continue
if obj.ident.token.lexeme == name: if obj.ident.token.lexeme == name:
if obj.owner.absPath != self.currentModule.absPath: if obj.owner.absPath != self.currentModule.absPath:
if obj.isPrivate or self.currentModule notin obj.exportedTo: if obj.isPrivate or self.currentModule notin obj.exportedTo:
@ -782,15 +776,6 @@ proc findByType*(self: Compiler, name: string, kind: Type): seq[Name] =
result.add(obj) result.add(obj)
proc findAtDepth*(self: Compiler, name: string, depth: int): seq[Name] {.used.} =
## Looks for objects that have been already declared
## with the given name at the given scope depth.
## Returns all objects that apply
for obj in self.findByName(name):
if obj.depth == depth:
result.add(obj)
proc check*(self: Compiler, term: Expression, kind: Type) {.inline.} = proc check*(self: Compiler, term: Expression, kind: Type) {.inline.} =
## Checks the type of term against a known type. ## Checks the type of term against a known type.
## Raises an error if appropriate and returns ## Raises an error if appropriate and returns
@ -867,10 +852,11 @@ proc match*(self: Compiler, name: string, kind: Type, node: ASTNode = nil, allow
else: else:
msg &= " (compile with --showMismatches for more details)" msg &= " (compile with --showMismatches for more details)"
self.error(msg, node) self.error(msg, node)
# This is only true when we're called by self.patchForwardDeclarations()
if impl[0].valueType.forwarded and not allowFwd:
self.error(&"expecting an implementation for function '{impl[0].ident.token.lexeme}' declared in module '{impl[0].owner.ident.token.lexeme}' at line {impl[0].ident.token.line} of type '{self.stringify(impl[0].valueType)}'")
result = impl[0] result = impl[0]
result.valueType = result.valueType.unwrap()
# This is only true when we're called by self.patchForwardDeclarations()
if result.valueType.forwarded and not allowFwd:
self.error(&"expecting an implementation for '{result.ident.token.lexeme}' declared in '{result.owner.ident.token.lexeme}' at line {result.ident.token.line} of type '{self.stringify(result.valueType)}'")
result.resolved = true result.resolved = true
if result.kind == NameKind.Var and not result.valueType.nameObj.isNil(): if result.kind == NameKind.Var and not result.valueType.nameObj.isNil():
# We found a function bound to a variable, # We found a function bound to a variable,
@ -887,34 +873,37 @@ proc beginScope*(self: Compiler) =
inc(self.depth) inc(self.depth)
proc unpackTypes*(self: Compiler, condition: Expression, list: var seq[tuple[match: bool, kind: Type]], accept: bool = true) = proc unpackTypes*(self: Compiler, condition: Expression, list: var seq[tuple[match: bool, kind: Type, value: LiteralExpr]], accept: bool = true) =
## Recursively unpacks a type constraint ## Recursively unpacks a type constraint
case condition.kind: if condition.isConst():
of identExpr: list.add((accept, self.inferOrError(condition), LiteralExpr(condition)))
var typ = self.inferOrError(condition) else:
if typ.kind != Typevar: case condition.kind:
self.error(&"expecting a type name, got value of type {self.stringify(typ)} instead", condition) of identExpr:
typ = typ.wrapped var typ = self.inferOrError(condition)
if typ.kind == Auto: if typ.kind != Typevar:
self.error("automatic types cannot be used within generics", condition) self.error(&"expecting a type name, got value of type {self.stringify(typ)} instead", condition)
list.add((accept, typ)) typ = typ.unwrap()
of binaryExpr: if typ.kind == Auto:
let condition = BinaryExpr(condition) self.error("automatic types cannot be used within generics", condition)
case condition.operator.lexeme: list.add((accept, typ, nil))
of "|": of binaryExpr:
self.unpackTypes(condition.a, list) let condition = BinaryExpr(condition)
self.unpackTypes(condition.b, list) case condition.operator.lexeme:
else: of "|":
self.error("invalid type constraint in generic declaration", condition) self.unpackTypes(condition.a, list)
of unaryExpr: self.unpackTypes(condition.b, list)
let condition = UnaryExpr(condition) else:
case condition.operator.lexeme: self.error("invalid type constraint in generic declaration", condition)
of "~": of unaryExpr:
self.unpackTypes(condition.a, list, accept=false) let condition = UnaryExpr(condition)
else: case condition.operator.lexeme:
self.error("invalid type constraint in generic declaration", condition) of "~":
else: self.unpackTypes(condition.a, list, accept=false)
self.error("invalid type constraint in generic declaration", condition) else:
self.error("invalid type constraint in generic declaration", condition)
else:
self.error("invalid type constraint in generic declaration", condition)
proc declare*(self: Compiler, node: ASTNode): Name {.discardable.} = proc declare*(self: Compiler, node: ASTNode): Name {.discardable.} =
@ -1010,10 +999,13 @@ proc declare*(self: Compiler, node: ASTNode): Name {.discardable.} =
isPrivate: node.isPrivate, isPrivate: node.isPrivate,
isReal: true, isReal: true,
belongsTo: self.currentFunction, belongsTo: self.currentFunction,
valueType: Type(kind: CustomType) valueType: Type(kind: CustomType,
decl: node)
) )
) )
n = self.names[^1] n = self.names[^1]
if node.generics.len() > 0:
n.isGeneric = true
declaredName = node.name.token.lexeme declaredName = node.name.token.lexeme
if node.value.isNil(): if node.value.isNil():
discard # TODO: Fields discard # TODO: Fields

View File

@ -14,10 +14,6 @@
## Low level bytecode implementation details ## Low level bytecode implementation details
import std/strutils
import std/strformat
import util/multibyte import util/multibyte
@ -69,13 +65,10 @@ type
# to unary opcodes, while a and b # to unary opcodes, while a and b
# represent arguments to binary # represent arguments to binary
# opcodes. Other variable names (c, d, ...) # opcodes. Other variable names (c, d, ...)
# may be used for more complex opcodes. If # may be used for more complex opcodes.
# an opcode takes any arguments at runtime, # Some opcodes (e.g. jumps), take arguments in
# they come from either the stack or the VM's # the form of 16 or 24 bit numbers that are defined
# closure array. Some other opcodes (e.g. # statically at compilation time into the bytecode
# jumps), take arguments in the form of 16
# or 24 bit numbers that are defined statically
# at compilation time into the bytecode
# These push a constant at position x in the # These push a constant at position x in the
# constant table onto the stack # constant table onto the stack
@ -165,8 +158,7 @@ type
PrintInf, PrintInf,
PrintString, PrintString,
## Basic stack operations ## Basic stack operations
Pop, # Pops an element off the stack and discards it Pop, # Pops an element off the operand stack and discards it
PopRepl, # Same as Pop, but also prints the value of what's popped (used in REPL mode)
PopN, # Pops x elements off the call stack (optimization for exiting local scopes which usually pop many elements) PopN, # Pops x elements off the call stack (optimization for exiting local scopes which usually pop many elements)
## Name resolution/handling ## Name resolution/handling
LoadAttribute, # Pushes the attribute b of object a onto the stack LoadAttribute, # Pushes the attribute b of object a onto the stack
@ -196,8 +188,8 @@ type
## Misc ## Misc
Assert, # Raises an exception if x is false Assert, # Raises an exception if x is false
NoOp, # Just a no-op NoOp, # Just a no-op
PopC, # Pop off the call stack onto the operand stack PopC, # Pop a value off the call stack and discard it
PushC, # Pop off the operand stack onto the call stack PushC, # Pop a value off the operand stack and push it onto the call stack
SysClock64, # Pushes the output of a monotonic clock on the stack SysClock64, # Pushes the output of a monotonic clock on the stack
LoadTOS, # Pushes the top of the call stack onto the operand stack LoadTOS, # Pushes the top of the call stack onto the operand stack
DupTop, # Duplicates the top of the operand stack onto the operand stack DupTop, # Duplicates the top of the operand stack onto the operand stack

View File

@ -19,7 +19,6 @@ import std/algorithm
import std/parseutils import std/parseutils
import std/strutils import std/strutils
import std/sequtils import std/sequtils
import std/sets
import std/os import std/os
@ -80,12 +79,17 @@ type
compilerProcs: TableRef[string, CompilerFunc] compilerProcs: TableRef[string, CompilerFunc]
# Stores the position of all jumps # Stores the position of all jumps
jumps: seq[tuple[patched: bool, offset: int]] jumps: seq[tuple[patched: bool, offset: int]]
# Metadata about function locations # Metadata regarding function locations (used to construct
# the debugging fields in the resulting bytecode)
functions: seq[tuple[start, stop, pos: int, fn: Name]] functions: seq[tuple[start, stop, pos: int, fn: Name]]
# Metadata regarding forward declarations
forwarded: seq[tuple[name: Name, pos: int]] forwarded: seq[tuple[name: Name, pos: int]]
# The topmost occupied stack slot # The topmost occupied stack slot
# in the current frame (0-indexed) # in the current frame (0-indexed)
stackIndex: int stackIndex: int
# All the lambdas we encountered (to know
# if we should compile them once we call
# them)
lambdas: seq[LambdaExpr] lambdas: seq[LambdaExpr]
@ -106,10 +110,13 @@ method dispatchDelayedPragmas(self: BytecodeCompiler, name: Name)
proc funDecl(self: BytecodeCompiler, node: FunDecl, name: Name) proc funDecl(self: BytecodeCompiler, node: FunDecl, name: Name)
proc compileModule(self: BytecodeCompiler, module: Name) proc compileModule(self: BytecodeCompiler, module: Name)
proc generateCall(self: BytecodeCompiler, fn: Name, args: seq[Expression], line: int) proc generateCall(self: BytecodeCompiler, fn: Name, args: seq[Expression], line: int)
proc identifier(self: BytecodeCompiler, node: IdentExpr, name: Name = nil, compile: bool = true, strict: bool = true): Type {.discardable.}
proc lambdaExpr(self: BytecodeCompiler, node: LambdaExpr, compile: bool = true): Type {.discardable.}
proc getItemExpr(self: BytecodeCompiler, node: GetItemExpr, compile: bool = true, matching: Type = nil): Type {.discardable.}
# End of forward declarations # End of forward declarations
proc newBytecodeCompiler*(replMode: bool = false): BytecodeCompiler = proc newBytecodeCompiler*: BytecodeCompiler =
## Initializes a new BytecodeCompiler object ## Initializes a new BytecodeCompiler object
new(result) new(result)
result.ast = @[] result.ast = @[]
@ -122,7 +129,6 @@ proc newBytecodeCompiler*(replMode: bool = false): BytecodeCompiler =
result.modules = newTable[string, Name]() result.modules = newTable[string, Name]()
result.lambdas = @[] result.lambdas = @[]
result.currentFunction = nil result.currentFunction = nil
result.replMode = replMode
result.currentModule = nil result.currentModule = nil
result.compilerProcs = newTable[string, CompilerFunc]() result.compilerProcs = newTable[string, CompilerFunc]()
result.compilerProcs["magic"] = CompilerFunc(kind: Immediate, handler: handleMagicPragma) result.compilerProcs["magic"] = CompilerFunc(kind: Immediate, handler: handleMagicPragma)
@ -153,7 +159,7 @@ proc emitBytes(self: BytecodeCompiler, bytarr: openarray[OpCode | uint8], line:
for b in bytarr: for b in bytarr:
self.emitByte(b, line) self.emitByte(b, line)
#[
proc printRepl(self: BytecodeCompiler, typ: Type, node: Expression) = proc printRepl(self: BytecodeCompiler, typ: Type, node: Expression) =
## Emits instruction to print ## Emits instruction to print
## peon types in REPL mode ## peon types in REPL mode
@ -188,7 +194,7 @@ proc printRepl(self: BytecodeCompiler, typ: Type, node: Expression) =
self.emitByte(PrintString, node.token.line) self.emitByte(PrintString, node.token.line)
else: else:
self.emitByte(PrintHex, node.token.line) self.emitByte(PrintHex, node.token.line)
]#
proc makeConstant(self: BytecodeCompiler, val: Expression, typ: Type): array[3, uint8] = proc makeConstant(self: BytecodeCompiler, val: Expression, typ: Type): array[3, uint8] =
## Adds a constant to the current chunk's constant table ## Adds a constant to the current chunk's constant table
@ -287,7 +293,7 @@ proc emitJump(self: BytecodeCompiler, opcode: OpCode, line: int): int =
self.emitBytes(0.toTriple(), line) self.emitBytes(0.toTriple(), line)
result = self.jumps.high() result = self.jumps.high()
#[
proc fixFunctionOffsets(self: BytecodeCompiler, where, oldLen: int) = proc fixFunctionOffsets(self: BytecodeCompiler, where, oldLen: int) =
## Fixes function offsets after the size of our ## Fixes function offsets after the size of our
## bytecode has changed ## bytecode has changed
@ -384,7 +390,7 @@ proc insertAt(self: BytecodeCompiler, where: int, opcode: OpCode, data: openarra
self.fixLines(where, self.chunk.code.len() - oldLen, true) self.fixLines(where, self.chunk.code.len() - oldLen, true)
self.fixNames(where, oldLen) self.fixNames(where, oldLen)
self.fixFunctionOffsets(oldLen, where) self.fixFunctionOffsets(oldLen, where)
]#
proc patchJump(self: BytecodeCompiler, offset: int) = proc patchJump(self: BytecodeCompiler, offset: int) =
@ -463,7 +469,7 @@ proc handleBuiltinFunction(self: BytecodeCompiler, fn: Type, args: seq[Expressio
"LogicalNot": LogicalNot, "LogicalNot": LogicalNot,
"NegInf": LoadNInf, "NegInf": LoadNInf,
"Identity": Identity "Identity": Identity
}.to_table() }.toTable()
if fn.builtinOp == "print": if fn.builtinOp == "print":
var typ = self.inferOrError(args[0]).unwrap() var typ = self.inferOrError(args[0]).unwrap()
case typ.kind: case typ.kind:
@ -576,8 +582,6 @@ proc endScope(self: BytecodeCompiler) =
var names: seq[Name] = @[] var names: seq[Name] = @[]
var popCount = 0 var popCount = 0
for name in self.names: for name in self.names:
if self.replMode and name.depth == 0:
continue
# We only pop names in scopes deeper than ours # We only pop names in scopes deeper than ours
if name.depth > self.depth: if name.depth > self.depth:
if name.depth == 0 and not self.isMainModule: if name.depth == 0 and not self.isMainModule:
@ -688,8 +692,7 @@ proc handleMagicPragma(self: BytecodeCompiler, pragma: Pragma, name: Name) =
if name.valueType.kind == All: if name.valueType.kind == All:
self.error("don't even think about it (compiler-chan is angry at you :/)", pragma) self.error("don't even think about it (compiler-chan is angry at you :/)", pragma)
if name.valueType.isNil(): if name.valueType.isNil():
self.error("'magic' pragma: wrong argument value", pragma.args[0]) self.error("'magic' pragma: wrong argument value", pragma.args[0])
name.valueType.isBuiltin = true
else: else:
self.error("'magic' pragma is not valid in this context") self.error("'magic' pragma is not valid in this context")
@ -799,7 +802,7 @@ method prepareFunction(self: BytecodeCompiler, fn: Name) =
## its arguments and typechecking it ## its arguments and typechecking it
# First we declare the function's generics, if it has any # First we declare the function's generics, if it has any
var constraints: seq[tuple[match: bool, kind: Type]] = @[] var constraints: seq[tuple[match: bool, kind: Type, value: LiteralExpr]] = @[]
for gen in fn.node.generics: for gen in fn.node.generics:
self.unpackTypes(gen.cond, constraints) self.unpackTypes(gen.cond, constraints)
self.names.add(Name(depth: fn.depth + 1, self.names.add(Name(depth: fn.depth + 1,
@ -811,6 +814,7 @@ method prepareFunction(self: BytecodeCompiler, fn: Name) =
belongsTo: fn, belongsTo: fn,
ident: gen.name, ident: gen.name,
owner: self.currentModule, owner: self.currentModule,
kind: NameKind.CustomType,
file: self.file)) file: self.file))
constraints = @[] constraints = @[]
# We now declare and typecheck the function's # We now declare and typecheck the function's
@ -825,14 +829,14 @@ method prepareFunction(self: BytecodeCompiler, fn: Name) =
if self.names.high() > 16777215: if self.names.high() > 16777215:
self.error("cannot declare more than 16777215 variables at a time") self.error("cannot declare more than 16777215 variables at a time")
inc(self.stackIndex) inc(self.stackIndex)
typ = self.inferOrError(argument.valueType) typ = self.inferOrError(argument.valueType).unwrap()
# We can't use self.compare(), because it would # We can't use self.compare(), because it would
# always just return true # always just return true
if typ.kind == Auto: if typ.kind == Auto:
fn.valueType.isAuto = true fn.valueType.isAuto = true
# Magic trick! We turn auto into any, just # Magic trick! We turn auto into any, just
# to make our lives easier # to make our lives easier
typ = "any".toIntrinsic() #typ = "any".toIntrinsic()
self.names.add(Name(depth: fn.depth + 1, self.names.add(Name(depth: fn.depth + 1,
isPrivate: true, isPrivate: true,
owner: fn.owner, owner: fn.owner,
@ -858,7 +862,7 @@ method prepareFunction(self: BytecodeCompiler, fn: Name) =
fn.valueType.args.add((self.names[^1].ident.token.lexeme, typ, default)) fn.valueType.args.add((self.names[^1].ident.token.lexeme, typ, default))
# The function needs a return type too! # The function needs a return type too!
if not node.returnType.isNil(): if not node.returnType.isNil():
fn.valueType.returnType = self.inferOrError(node.returnType) fn.valueType.returnType = self.inferOrError(node.returnType).unwrap()
if fn.valueType.returnType.kind == Auto: if fn.valueType.returnType.kind == Auto:
fn.valueType.isAuto = true fn.valueType.isAuto = true
# Here we don't bother changing the return type # Here we don't bother changing the return type
@ -1042,7 +1046,7 @@ proc beginProgram(self: BytecodeCompiler): int =
self.emitBytes(0.toTriple(), 1) self.emitBytes(0.toTriple(), 1)
method literal(self: BytecodeCompiler, node: ASTNode, compile: bool = true): Type {.discardable.} = proc literal(self: BytecodeCompiler, node: ASTNode, compile: bool = true): Type {.discardable.} =
## Emits instructions for literals such ## Emits instructions for literals such
## as singletons, strings and numbers ## as singletons, strings and numbers
case node.kind: case node.kind:
@ -1140,7 +1144,7 @@ method literal(self: BytecodeCompiler, node: ASTNode, compile: bool = true): Typ
self.error(&"invalid AST node of kind {node.kind} at literal(): {node} (This is an internal error and most likely a bug!)") self.error(&"invalid AST node of kind {node.kind} at literal(): {node} (This is an internal error and most likely a bug!)")
method unary(self: BytecodeCompiler, node: UnaryExpr, compile: bool = true): Type {.discardable.} = proc unary(self: BytecodeCompiler, node: UnaryExpr, compile: bool = true): Type {.discardable.} =
## Compiles all unary expressions ## Compiles all unary expressions
var default: Expression var default: Expression
let fn = Type(kind: Function, let fn = Type(kind: Function,
@ -1158,7 +1162,7 @@ method unary(self: BytecodeCompiler, node: UnaryExpr, compile: bool = true): Typ
self.generateCall(impl, @[node.a], impl.line) self.generateCall(impl, @[node.a], impl.line)
method binary(self: BytecodeCompiler, node: BinaryExpr, compile: bool = true): Type {.discardable.} = proc binary(self: BytecodeCompiler, node: BinaryExpr, compile: bool = true): Type {.discardable.} =
## Compiles all binary expressions ## Compiles all binary expressions
var default: Expression var default: Expression
let fn = Type(kind: Function, returnType: "any".toIntrinsic(), args: @[("", self.inferOrError(node.a), default), ("", self.inferOrError(node.b), default)]) let fn = Type(kind: Function, returnType: "any".toIntrinsic(), args: @[("", self.inferOrError(node.a), default), ("", self.inferOrError(node.b), default)])
@ -1174,7 +1178,7 @@ method binary(self: BytecodeCompiler, node: BinaryExpr, compile: bool = true): T
self.generateCall(impl, @[node.a, node.b], impl.line) self.generateCall(impl, @[node.a, node.b], impl.line)
method identifier(self: BytecodeCompiler, node: IdentExpr, name: Name = nil, compile: bool = true, strict: bool = true): Type {.discardable.} = proc identifier(self: BytecodeCompiler, node: IdentExpr, name: Name = nil, compile: bool = true, strict: bool = true): Type {.discardable.} =
## Compiles access to identifiers ## Compiles access to identifiers
var s = name var s = name
if s.isNil(): if s.isNil():
@ -1223,7 +1227,7 @@ method identifier(self: BytecodeCompiler, node: IdentExpr, name: Name = nil, com
self.emitBytes(s.position.toTriple(), s.ident.token.line) self.emitBytes(s.position.toTriple(), s.ident.token.line)
method assignment(self: BytecodeCompiler, node: ASTNode, compile: bool = true): Type {.discardable.} = proc assignment(self: BytecodeCompiler, node: ASTNode, compile: bool = true): Type {.discardable.} =
## Compiles assignment expressions ## Compiles assignment expressions
case node.kind: case node.kind:
of assignExpr: of assignExpr:
@ -1253,50 +1257,52 @@ method assignment(self: BytecodeCompiler, node: ASTNode, compile: bool = true):
elif r.isLet: elif r.isLet:
self.error(&"cannot reassign '{name.token.lexeme}' (value is immutable)", name) self.error(&"cannot reassign '{name.token.lexeme}' (value is immutable)", name)
if r.valueType.kind != CustomType: if r.valueType.kind != CustomType:
self.error("only types have fields", node) self.error(&"cannot set attributes on object of type {self.stringify(r.valueType)}", node)
else: else:
self.error(&"invalid AST node of kind {node.kind} at assignment(): {node} (This is an internal error and most likely a bug)") self.error(&"invalid AST node of kind {node.kind} at assignment(): {node} (This is an internal error and most likely a bug)")
method makeConcrete(self: BytecodeCompiler, node: GenericExpr, compile: bool = true): Type = proc makeConcrete(self: BytecodeCompiler, node: GenericExpr, compile: bool = true): Type =
## Builds a concrete type from the given generic ## Builds a concrete type from the given generic
## instantiation ## instantiation
var name = self.resolveOrError(node.ident) var name = self.resolveOrError(node.ident)
if not name.isGeneric: if not name.isGeneric:
self.error(&"cannot instantiate concrete type from {self.stringify(name.valueType)}: a generic is required") self.error(&"cannot instantiate a concrete type from non-generic type {self.stringify(name.valueType)}")
var fun = FunDecl(name.node) var decl = name.node
if fun.generics.len() != node.args.len(): if decl.generics.len() != node.args.len():
self.error(&"wrong number of types supplied for generic instantiation (expected {fun.generics.len()}, got {node.args.len()} instead)") self.error(&"wrong number of types supplied for generic instantiation (expected {decl.generics.len()}, got {node.args.len()} instead)")
var concrete = deepCopy(name.valueType) var concrete = deepCopy(name.valueType)
var types: seq[Type] = @[] var types: seq[Type] = @[]
var map = newTable[string, Type]() var map = newTable[string, Type]()
for arg in node.args: for i, (gen, value) in zip(decl.generics, node.args):
types.add(self.inferOrError(arg)) self.check(value, self.inferOrError(gen.cond))
if types[^1].kind != Typevar: types.add(self.inferOrError(value))
self.error(&"expecting type name during generic instantiation, got {self.stringify(types[^1])} instead", arg) if types[^1].kind != Typevar and not value.isConst():
for (gen, value) in zip(fun.generics, node.args): self.error(&"expecting type name or constant as generic parameter, got {self.stringify(types[^1])} instead", value)
map[gen.name.token.lexeme] = self.inferOrError(value) map[decl.generics[i].name.token.lexeme] = types[^1]
for i, argument in concrete.args: case concrete.kind:
if argument.kind.kind != Generic: of Array:
continue concrete.size = parseInt(node.args[1].token.lexeme)
elif argument.name in map: concrete.elemType = map["T"]
concrete.args[i].kind = map[argument.name] of Function:
for i, argument in concrete.args:
if argument.kind.kind != Generic:
continue
elif argument.kind.name in map:
concrete.args[i].kind = map[argument.kind.name]
else:
self.error(&"unknown generic argument name '{argument.kind.name}'", FunDecl(concrete.fun).arguments[i].name)
if not concrete.returnType.isNil() and concrete.returnType.kind == Generic:
if concrete.returnType.name in map:
concrete.returnType = map[concrete.returnType.name]
else:
self.error(&"unknown generic argument name '{concrete.returnType.name}'", concrete.fun)
else: else:
self.error(&"unknown generic argument name '{argument.name}'", concrete.fun) discard
if not concrete.returnType.isNil() and concrete.returnType.kind == Generic: result = Type(kind: Typevar, wrapped: concrete)
if concrete.returnType.name in map:
concrete.returnType = map[concrete.returnType.name]
else:
self.error(&"unknown generic argument name '{concrete.returnType.name}'", concrete.fun)
if compile:
# Types don't exist at runtime, but if you want to
# assign them to variables then you need *something*
# to pop off the stack, so we just push a nil
self.emitByte(LoadNil, node.token.line)
result = concrete
method call(self: BytecodeCompiler, node: CallExpr, compile: bool = true): Type {.discardable.} = proc call(self: BytecodeCompiler, node: CallExpr, compile: bool = true): Type {.discardable.} =
## Compiles function calls ## Compiles function calls
var args: seq[tuple[name: string, kind: Type, default: Expression]] = @[] var args: seq[tuple[name: string, kind: Type, default: Expression]] = @[]
var argExpr: seq[Expression] = @[] var argExpr: seq[Expression] = @[]
@ -1354,6 +1360,7 @@ method call(self: BytecodeCompiler, node: CallExpr, compile: bool = true): Type
self.generateCall(result, argExpr, node.token.line) self.generateCall(result, argExpr, node.token.line)
result = result.returnType result = result.returnType
of NodeKind.getItemExpr: of NodeKind.getItemExpr:
# Calling stuff like a.b()
let node = GetItemExpr(node.callee) let node = GetItemExpr(node.callee)
result = self.getItemExpr(node, compile=false, matching=Type(kind: Function, args: args, returnType: Type(kind: All))) result = self.getItemExpr(node, compile=false, matching=Type(kind: Function, args: args, returnType: Type(kind: All)))
var fn: Name var fn: Name
@ -1383,7 +1390,7 @@ method call(self: BytecodeCompiler, node: CallExpr, compile: bool = true): Type
let node = GenericExpr(node.callee) let node = GenericExpr(node.callee)
let concrete = self.makeConcrete(node) let concrete = self.makeConcrete(node)
var impl = self.resolve(node.ident).deepCopy() var impl = self.resolve(node.ident).deepCopy()
impl.valueType = concrete impl.valueType = concrete.unwrap()
result = impl.valueType.returnType result = impl.valueType.returnType
if compile: if compile:
self.generateCall(impl, argExpr, node.token.line) self.generateCall(impl, argExpr, node.token.line)
@ -1395,7 +1402,7 @@ method call(self: BytecodeCompiler, node: CallExpr, compile: bool = true): Type
self.error(&"object of type '{self.stringify(typ)}' is not callable", node) self.error(&"object of type '{self.stringify(typ)}' is not callable", node)
method getItemExpr(self: BytecodeCompiler, node: GetItemExpr, compile: bool = true, matching: Type = nil): Type {.discardable.} = proc getItemExpr(self: BytecodeCompiler, node: GetItemExpr, compile: bool = true, matching: Type = nil): Type {.discardable.} =
## Compiles accessing to fields of a type or ## Compiles accessing to fields of a type or
## module namespace. If the compile flag is set ## module namespace. If the compile flag is set
## to false, no code is generated for resolving ## to false, no code is generated for resolving
@ -1445,7 +1452,7 @@ proc blockStmt(self: BytecodeCompiler, node: BlockStmt, compile: bool = true) =
self.endScope() self.endScope()
method lambdaExpr(self: BytecodeCompiler, node: LambdaExpr, compile: bool = true): Type {.discardable.} = proc lambdaExpr(self: BytecodeCompiler, node: LambdaExpr, compile: bool = true): Type {.discardable.} =
## Compiles lambda functions as expressions ## Compiles lambda functions as expressions
result = Type(kind: Function, isLambda: true, fun: node, location: 0, compiled: true) result = Type(kind: Function, isLambda: true, fun: node, location: 0, compiled: true)
let function = self.currentFunction let function = self.currentFunction
@ -1554,8 +1561,10 @@ method lambdaExpr(self: BytecodeCompiler, node: LambdaExpr, compile: bool = true
method expression(self: BytecodeCompiler, node: Expression, compile: bool = true): Type {.discardable.} = method expression(self: BytecodeCompiler, node: Expression, compile: bool = true): Type {.discardable.} =
## Compiles all expressions ## Compiles all expressions
case node.kind: case node.kind:
of NodeKind.arrayExpr:
discard # TODO
of NodeKind.genericExpr: of NodeKind.genericExpr:
return self.makeConcrete(GenericExpr(node)) return self.makeConcrete(GenericExpr(node), compile)
of NodeKind.callExpr: of NodeKind.callExpr:
return self.call(CallExpr(node), compile) return self.call(CallExpr(node), compile)
of NodeKind.getItemExpr: of NodeKind.getItemExpr:
@ -1573,12 +1582,12 @@ method expression(self: BytecodeCompiler, node: Expression, compile: bool = true
of NodeKind.unaryExpr: of NodeKind.unaryExpr:
# Unary expressions such as ~5 and -3 # Unary expressions such as ~5 and -3
return self.unary(UnaryExpr(node), compile) return self.unary(UnaryExpr(node), compile)
of NodeKind.binaryExpr:
# Binary expressions such as 2 ^ 5 and 0.66 * 3.14
return self.binary(BinaryExpr(node), compile)
of NodeKind.groupingExpr: of NodeKind.groupingExpr:
# Grouping expressions like (2 + 1) # Grouping expressions like (2 + 1)
return self.expression(GroupingExpr(node).expression, compile) return self.expression(GroupingExpr(node).expression, compile)
of NodeKind.binaryExpr:
# Binary expressions such as 2 ^ 5 and 0.66 * 3.14
return self.binary(BinaryExpr(node))
of NodeKind.intExpr, NodeKind.hexExpr, NodeKind.binExpr, NodeKind.octExpr, of NodeKind.intExpr, NodeKind.hexExpr, NodeKind.binExpr, NodeKind.octExpr,
NodeKind.strExpr, NodeKind.falseExpr, NodeKind.trueExpr, NodeKind.floatExpr: NodeKind.strExpr, NodeKind.falseExpr, NodeKind.trueExpr, NodeKind.floatExpr:
# Since all of these AST nodes share the # Since all of these AST nodes share the
@ -1851,8 +1860,6 @@ proc statement(self: BytecodeCompiler, node: Statement) =
# The expression has no type and produces no value, # The expression has no type and produces no value,
# so we don't have to pop anything # so we don't have to pop anything
discard discard
elif self.replMode:
self.printRepl(kind, expression)
else: else:
self.emitByte(Pop, node.token.line) self.emitByte(Pop, node.token.line)
of NodeKind.switchStmt: of NodeKind.switchStmt:
@ -1906,7 +1913,6 @@ proc varDecl(self: BytecodeCompiler, node: VarDecl) =
if node.value.isNil(): if node.value.isNil():
# Variable has no value: the type declaration # Variable has no value: the type declaration
# takes over # takes over
# TODO: Implement T.default()!
if self.compare(typ, "auto".toIntrinsic()): if self.compare(typ, "auto".toIntrinsic()):
self.error("automatic types require initialization", node) self.error("automatic types require initialization", node)
typ = self.inferOrError(node.valueType) typ = self.inferOrError(node.valueType)
@ -1915,15 +1921,17 @@ proc varDecl(self: BytecodeCompiler, node: VarDecl) =
# the typevar and compare the wrapped type, which is not what we want # the typevar and compare the wrapped type, which is not what we want
if typ.kind != Typevar: if typ.kind != Typevar:
self.error(&"expecting type name, got value of type {self.stringify(typ)} instead", node.name) self.error(&"expecting type name, got value of type {self.stringify(typ)} instead", node.name)
# TODO: Implement T.default()!
self.error(&"cannot compute default value for {self.stringify(typ)}: please provide one explicitly")
elif node.valueType.isNil(): elif node.valueType.isNil():
# Variable has no type declaration: the type # Variable has no type declaration: the type
# of its value takes over # of its value takes over
typ = self.inferOrError(node.value) typ = self.inferOrError(node.value).unwrap()
else: else:
# Variable has both a type declaration and # Variable has both a type declaration and
# a value: the value's type must match the # a value: the value's type must match the
# type declaration # type declaration
let expected = self.inferOrError(node.valueType) let expected = self.inferOrError(node.valueType).unwrap()
if not self.compare(expected, "auto".toIntrinsic()): if not self.compare(expected, "auto".toIntrinsic()):
self.check(node.value, expected) self.check(node.value, expected)
# If this doesn't fail, then we're good # If this doesn't fail, then we're good
@ -1932,7 +1940,7 @@ proc varDecl(self: BytecodeCompiler, node: VarDecl) =
# Let the compiler infer the type (this # Let the compiler infer the type (this
# is the default behavior already, but # is the default behavior already, but
# some users may prefer to be explicit!) # some users may prefer to be explicit!)
typ = self.inferOrError(node.value) typ = self.inferOrError(node.value).unwrap()
self.expression(node.value) self.expression(node.value)
self.emitByte(AddVar, node.token.line) self.emitByte(AddVar, node.token.line)
inc(self.stackIndex) inc(self.stackIndex)
@ -2095,15 +2103,10 @@ proc compile*(self: BytecodeCompiler, ast: seq[Declaration], file: string, lines
self.file = file self.file = file
self.depth = 0 self.depth = 0
self.currentFunction = nil self.currentFunction = nil
if self.replMode: self.ast = ast
self.ast &= ast self.current = 0
self.source &= "\n" & source self.lines = lines
self.lines &= lines self.source = source
else:
self.ast = ast
self.current = 0
self.lines = lines
self.source = source
self.isMainModule = isMainModule self.isMainModule = isMainModule
self.disabledWarnings = disabledWarnings self.disabledWarnings = disabledWarnings
self.showMismatches = showMismatches self.showMismatches = showMismatches
@ -2154,8 +2157,6 @@ proc compileModule(self: BytecodeCompiler, module: Name) =
let currentModule = self.currentModule let currentModule = self.currentModule
let mainModule = self.isMainModule let mainModule = self.isMainModule
let parentModule = self.parentModule let parentModule = self.parentModule
let replMode = self.replMode
self.replMode = false
# Set the current module to the new module # Set the current module to the new module
# and the current module as the parent module: # and the current module as the parent module:
# this is needed for export statements # this is needed for export statements
@ -2189,7 +2190,6 @@ proc compileModule(self: BytecodeCompiler, module: Name) =
self.currentModule = currentModule self.currentModule = currentModule
self.isMainModule = mainModule self.isMainModule = mainModule
self.parentModule = parentModule self.parentModule = parentModule
self.replMode = replMode
self.lines = lines self.lines = lines
self.source = src self.source = src
self.modules[module.absPath] = module self.modules[module.absPath] = module

View File

@ -17,7 +17,6 @@ import multibyte
import std/strformat import std/strformat
import std/strutils
import std/terminal import std/terminal
@ -223,8 +222,6 @@ proc disassembleInstruction*(self: Debugger) =
self.current += 1 self.current += 1
proc parseFunctions(self: Debugger) = proc parseFunctions(self: Debugger) =
## Parses function information in the chunk ## Parses function information in the chunk
var var

View File

@ -79,6 +79,7 @@ type
pragmaExpr, pragmaExpr,
refExpr, refExpr,
ptrExpr, ptrExpr,
arrayExpr,
genericExpr, genericExpr,
switchStmt switchStmt
@ -160,7 +161,6 @@ type
ident*: IdentExpr ident*: IdentExpr
args*: seq[Expression] args*: seq[Expression]
UnaryExpr* = ref object of Expression UnaryExpr* = ref object of Expression
operator*: Token operator*: Token
a*: Expression a*: Expression
@ -171,6 +171,11 @@ type
# inherit from that and add a second operand # inherit from that and add a second operand
b*: Expression b*: Expression
ArrayExpr* = ref object of Expression
elements*: seq[Expression]
startToken*: Token
endToken*: Token
YieldExpr* = ref object of Expression YieldExpr* = ref object of Expression
expression*: Expression expression*: Expression
@ -335,6 +340,15 @@ proc newPragma*(name: IdentExpr, args: seq[LiteralExpr]): Pragma =
result.token = name.token result.token = name.token
proc newArrayExpr*(elements: seq[Expression], startToken, endToken: Token): ArrayExpr =
new(result)
result.kind = arrayExpr
result.elements = elements
result.startToken = startToken
result.endToken = endToken
result.token = startToken
proc newRefExpr*(expression: Expression, token: Token): Ref = proc newRefExpr*(expression: Expression, token: Token): Ref =
new(result) new(result)
result.kind = refExpr result.kind = refExpr
@ -801,6 +815,9 @@ proc `$`*(self: ASTNode): string =
of genericExpr: of genericExpr:
var self = GenericExpr(self) var self = GenericExpr(self)
result &= &"Generic(ident={self.ident}, args={self.args})" result &= &"Generic(ident={self.ident}, args={self.args})"
of arrayExpr:
var self = ArrayExpr(self)
result &= &"Array(elements={self.elements})"
else: else:
discard discard
@ -865,6 +882,9 @@ proc getRelativeBoundaries*(self: ASTNode): tuple[start, stop: int] =
if self.args.len() > 0: if self.args.len() > 0:
stop = getRelativeBoundaries(self.args[^1]).stop stop = getRelativeBoundaries(self.args[^1]).stop
result = (ident.start, stop) result = (ident.start, stop)
of arrayExpr:
var self = ArrayExpr(self)
result = (self.startToken.relPos.start, self.endToken.relPos.stop)
else: else:
result = (0, 0) result = (0, 0)

View File

@ -395,6 +395,16 @@ proc primary(self: Parser): Expression =
discard self.step() discard self.step()
result = newPtrExpr(self.expression(), self.peek(-1)) result = newPtrExpr(self.expression(), self.peek(-1))
result.file = self.file result.file = self.file
of TokenType.LeftBracket:
# Array
let start = self.step()
var elems: seq[Expression] = @[]
while not self.done() and not self.check(RightBracket):
elems.add(self.expression())
if not self.match(Comma):
break
self.expect(RightBracket, "missing ']' in array construction")
result = newArrayExpr(elems, start, self.peek(-1))
else: else:
self.error("invalid syntax") self.error("invalid syntax")
@ -439,8 +449,7 @@ proc parseGenericArgs(self: Parser): Expression =
var item = newIdentExpr(self.peek(-2), self.scopeDepth) var item = newIdentExpr(self.peek(-2), self.scopeDepth)
var types: seq[Expression] = @[] var types: seq[Expression] = @[]
while not self.check(RightBracket) and not self.done(): while not self.check(RightBracket) and not self.done():
self.expect(Identifier) types.add(self.expression())
types.add(newIdentExpr(self.peek(-1), self.scopeDepth))
if not self.match(Comma): if not self.match(Comma):
break break
self.expect(RightBracket) self.expect(RightBracket)
@ -962,15 +971,14 @@ proc varDecl(self: Parser, isLet: bool = false,
var name = newIdentExpr(self.peek(-1), self.scopeDepth) var name = newIdentExpr(self.peek(-1), self.scopeDepth)
let isPrivate = not self.match("*") let isPrivate = not self.match("*")
self.checkDecl(isPrivate) self.checkDecl(isPrivate)
var valueType: IdentExpr var valueType: Expression
var hasInit = false var hasInit = false
var pragmas: seq[Pragma] = @[] var pragmas: seq[Pragma] = @[]
if self.match(":"): if self.match(":"):
# We don't enforce it here because # We don't enforce it here because
# the compiler may be able to infer # the compiler may be able to infer
# the type later! # the type later!
self.expect(Identifier, "expecting type name after ':'") valueType = self.parseOr()
valueType = newIdentExpr(self.peek(-1), self.scopeDepth)
if self.match("="): if self.match("="):
hasInit = true hasInit = true
value = self.expression() value = self.expression()

View File

@ -146,6 +146,7 @@ proc runFile(f: string, fromString: bool = false, dump: bool = true, breakpoints
print(exc) print(exc)
except CompileError as exc: except CompileError as exc:
print(exc) print(exc)
#raise
except SerializationError as exc: except SerializationError as exc:
var file = exc.file var file = exc.file
if file notin ["<string>", ""]: if file notin ["<string>", ""]:

View File

@ -74,6 +74,10 @@ type typevar* = object {
} }
type array*[T: any, S: int] = object {
#pragma[magic: "array"]
}
# Some convenience aliases # Some convenience aliases
type int* = int64; type int* = int64;
type float* = float64; type float* = float64;

View File

@ -24,6 +24,6 @@ fn testGlobals*: bool {
} }
fn cast*[T: any](x: any): T { fn cast*[T: typevar, D: any](x: T): D {
#pragma[magic: "cast"] #pragma[magic: "cast"]
} }

View File

@ -5,9 +5,5 @@ import std;
# as the type I'm telling you, trust me bro". There is no data conversion # as the type I'm telling you, trust me bro". There is no data conversion
# occurring whatsoever! For that, use converters (once they're implemented LoL) # occurring whatsoever! For that, use converters (once they're implemented LoL)
print(cast[int](2.0) == 4611686018427387904); print(cast[int, float](2.0) == 4611686018427387904);
print(cast[float](4611686018427387904) == 2.0); print(cast[float, int](4611686018427387904) == 2.0);
# If that strikes your fancy, you can do this:
var x = int;
var caster = cast[x];
print(caster(2.0) == 4611686018427387904);