Added Makefile, prettyfied code, initial work on pragmas

This commit is contained in:
Nocturn9x 2022-05-18 13:32:32 +02:00
parent 0e22351e35
commit 2f7a628d79
14 changed files with 448 additions and 317 deletions

5
Makefile Normal file
View File

@ -0,0 +1,5 @@
run:
nim --hints:off --warnings:off r src/test.nim
pretty:
nimpretty src/*.nim src/backend/*.nim src/frontend/*.nim src/frontend/meta/*.nim src/memory/*.nim src/util/*.nim

View File

@ -52,4 +52,3 @@ type
fields*: seq[PeonObject] fields*: seq[PeonObject]
else: else:
discard # TODO discard # TODO

View File

@ -126,7 +126,8 @@ proc readInt64(self: PeonVM, idx: int): PeonObject =
## chunk's constant table and ## chunk's constant table and
## returns a Peon object. Assumes ## returns a Peon object. Assumes
## the constant is an Int64 ## the constant is an Int64
var arr = [self.chunk.byteConsts[idx], self.chunk.byteConsts[idx + 1], self.chunk.byteConsts[idx + 2], self.chunk.byteConsts[idx + 3]] var arr = [self.chunk.byteConsts[idx], self.chunk.byteConsts[idx + 1],
self.chunk.byteConsts[idx + 2], self.chunk.byteConsts[idx + 3]]
result = PeonObject(kind: Int64) result = PeonObject(kind: Int64)
copyMem(result.long.addr, arr.addr, sizeof(arr)) copyMem(result.long.addr, arr.addr, sizeof(arr))
@ -136,7 +137,8 @@ proc readUInt64(self: PeonVM, idx: int): PeonObject =
## chunk's constant table and ## chunk's constant table and
## returns a Peon object. Assumes ## returns a Peon object. Assumes
## the constant is an UInt64 ## the constant is an UInt64
var arr = [self.chunk.byteConsts[idx], self.chunk.byteConsts[idx + 1], self.chunk.byteConsts[idx + 2], self.chunk.byteConsts[idx + 3]] var arr = [self.chunk.byteConsts[idx], self.chunk.byteConsts[idx + 1],
self.chunk.byteConsts[idx + 2], self.chunk.byteConsts[idx + 3]]
result = PeonObject(kind: UInt64) result = PeonObject(kind: UInt64)
copyMem(result.uLong.addr, arr.addr, sizeof(arr)) copyMem(result.uLong.addr, arr.addr, sizeof(arr))

View File

@ -96,7 +96,7 @@ type
# The bytecode chunk where we write code to # The bytecode chunk where we write code to
chunk: Chunk chunk: Chunk
# The output of our parser (AST) # The output of our parser (AST)
ast: seq[ASTNode] ast: seq[Declaration]
# The current AST node we're looking at # The current AST node we're looking at
current: int current: int
# The current file being compiled (used only for # The current file being compiled (used only for
@ -185,7 +185,8 @@ proc done(self: Compiler): bool =
result = self.current > self.ast.high() result = self.current > self.ast.high()
proc error(self: Compiler, message: string) {.raises: [CompileError, ValueError].} = proc error(self: Compiler, message: string) {.raises: [CompileError,
ValueError].} =
## Raises a formatted CompileError exception ## Raises a formatted CompileError exception
var tok = self.getCurrentNode().token var tok = self.getCurrentNode().token
raise newException(CompileError, &"A fatal error occurred while compiling '{self.file}', module '{self.currentModule}' line {tok.line} at '{tok.lexeme}' -> {message}") raise newException(CompileError, &"A fatal error occurred while compiling '{self.file}', module '{self.currentModule}' line {tok.line} at '{tok.lexeme}' -> {message}")
@ -325,7 +326,8 @@ proc resolve(self: Compiler, name: IdentExpr,
return nil return nil
proc getStackPos(self: Compiler, name: IdentExpr, depth: int = self.scopeDepth): tuple[closedOver: bool, pos: int] = proc getStackPos(self: Compiler, name: IdentExpr,
depth: int = self.scopeDepth): tuple[closedOver: bool, pos: int] =
## Iterates the internal list of declared names backwards and ## Iterates the internal list of declared names backwards and
## returns a tuple (closedOver, pos) that tells the caller whether the ## returns a tuple (closedOver, pos) that tells the caller whether the
## the name is to be emitted as a closure as well as its predicted ## the name is to be emitted as a closure as well as its predicted
@ -348,7 +350,8 @@ proc getStackPos(self: Compiler, name: IdentExpr, depth: int = self.scopeDepth):
return (false, -1) return (false, -1)
proc detectClosureVariable(self: Compiler, name: IdentExpr, depth: int = self.scopeDepth) = proc detectClosureVariable(self: Compiler, name: IdentExpr,
depth: int = self.scopeDepth) =
## Detects if the given name is used in a local scope deeper ## Detects if the given name is used in a local scope deeper
## than the given one and modifies the code emitted for it ## than the given one and modifies the code emitted for it
## to store it as a closure variable if it is. Does nothing if the name ## to store it as a closure variable if it is. Does nothing if the name
@ -399,7 +402,8 @@ proc compareTypes(self: Compiler, a, b: Type): bool =
return false return false
elif a.arguments.len() != b.arguments.len(): elif a.arguments.len() != b.arguments.len():
return false return false
elif not self.compareTypes(self.inferType(a.returnType), self.inferType(b.returnType)): elif not self.compareTypes(self.inferType(a.returnType),
self.inferType(b.returnType)):
return false return false
for (argA, argB) in zip(a.arguments, b.arguments): for (argA, argB) in zip(a.arguments, b.arguments):
if argA.mutable != argB.mutable: if argA.mutable != argB.mutable:
@ -408,7 +412,8 @@ proc compareTypes(self: Compiler, a, b: Type): bool =
return false return false
elif argA.isPtr != argB.isPtr: elif argA.isPtr != argB.isPtr:
return false return false
elif not self.compareTypes(self.inferType(argA.valueType), self.inferType(argB.valueType)): elif not self.compareTypes(self.inferType(argA.valueType),
self.inferType(argB.valueType)):
return false return false
return true return true
else: else:
@ -736,7 +741,8 @@ proc declareName(self: Compiler, node: Declaration) =
isPrivate: node.isPrivate, isPrivate: node.isPrivate,
owner: self.currentModule, owner: self.currentModule,
isConst: node.isConst, isConst: node.isConst,
valueType: Type(kind: self.inferType(node.value).kind, node: node), valueType: Type(kind: self.inferType(
node.value).kind, node: node),
codePos: self.chunk.code.len(), codePos: self.chunk.code.len(),
isLet: node.isLet)) isLet: node.isLet))
self.emitByte(StoreVar) self.emitByte(StoreVar)
@ -754,7 +760,9 @@ proc declareName(self: Compiler, node: Declaration) =
isPrivate: node.isPrivate, isPrivate: node.isPrivate,
isConst: false, isConst: false,
owner: self.currentModule, owner: self.currentModule,
valueType: Type(kind: Function, node: node, returnType: self.inferType(node.returnType)), valueType: Type(kind: Function, node: node,
returnType: self.inferType(
node.returnType)),
codePos: self.chunk.code.len(), codePos: self.chunk.code.len(),
name: node.name, name: node.name,
isLet: false)) isLet: false))
@ -1250,7 +1258,7 @@ proc declaration(self: Compiler, node: Declaration) =
self.statement(Statement(node)) self.statement(Statement(node))
proc compile*(self: Compiler, ast: seq[ASTNode], file: string): Chunk = proc compile*(self: Compiler, ast: seq[Declaration], file: string): Chunk =
## Compiles a sequence of AST nodes into a chunk ## Compiles a sequence of AST nodes into a chunk
## object ## object
self.chunk = newChunk() self.chunk = newChunk()

View File

@ -99,7 +99,8 @@ proc getToken(self: Lexer, lexeme: string): Token =
## string according to the symbol table or ## string according to the symbol table or
## returns nil if there's no match ## returns nil if there's no match
let table = self.symbols let table = self.symbols
var kind = table.symbols.getOrDefault(lexeme, table.keywords.getOrDefault(lexeme, NoMatch)) var kind = table.symbols.getOrDefault(lexeme, table.keywords.getOrDefault(
lexeme, NoMatch))
if kind == NoMatch: if kind == NoMatch:
return nil return nil
new(result) new(result)
@ -147,7 +148,8 @@ proc getStart*(self: Lexer): int = self.start
proc getCurrent*(self: Lexer): int = self.current proc getCurrent*(self: Lexer): int = self.current
proc getLine*(self: Lexer): int = self.line proc getLine*(self: Lexer): int = self.line
proc getSource*(self: Lexer): string = self.source proc getSource*(self: Lexer): string = self.source
proc getRelPos*(self: Lexer, line: int): tuple[start, stop: int] = (if line > 1: self.lines[line - 2] else: (start: 0, stop: self.current)) proc getRelPos*(self: Lexer, line: int): tuple[start, stop: int] = (if line >
1: self.lines[line - 2] else: (start: 0, stop: self.current))
proc newLexer*(self: Lexer = nil): Lexer = proc newLexer*(self: Lexer = nil): Lexer =
@ -206,7 +208,8 @@ proc peek(self: Lexer, distance: int = 0, length: int = 1): string =
## will be shorter than length bytes ## will be shorter than length bytes
var i = distance var i = distance
while len(result) < length: while len(result) < length:
if self.done() or self.current + i > self.source.high() or self.current + i < 0: if self.done() or self.current + i > self.source.high() or
self.current + i < 0:
break break
else: else:
result.add(self.source[self.current + i]) result.add(self.source[self.current + i])
@ -490,7 +493,8 @@ proc parseNumber(self: Lexer) =
discard self.step() discard self.step()
if self.match("'"): if self.match("'"):
# Could be a size specifier, better catch it # Could be a size specifier, better catch it
while (self.peek().isAlphaNumeric() or self.check("_")) and not self.done(): while (self.peek().isAlphaNumeric() or self.check("_")) and
not self.done():
discard self.step() discard self.step()
self.createToken(kind) self.createToken(kind)
if kind == Binary: if kind == Binary:
@ -558,7 +562,8 @@ proc next(self: Lexer) =
elif self.match(["\"", "'"]): elif self.match(["\"", "'"]):
# String or character literal # String or character literal
var mode = "single" var mode = "single"
if self.peek(-1) != "'" and self.check(self.peek(-1)) and self.check(self.peek(-1), 1): if self.peek(-1) != "'" and self.check(self.peek(-1)) and self.check(
self.peek(-1), 1):
# Multiline strings start with 3 quotes # Multiline strings start with 3 quotes
discard self.step(2) discard self.step(2)
mode = "multi" mode = "multi"

View File

@ -76,6 +76,7 @@ type
nanExpr, nanExpr,
infExpr, infExpr,
identExpr, # Identifier identExpr, # Identifier
pragmaExpr
# Here I would've rather used object variants, and in fact that's what was in # Here I would've rather used object variants, and in fact that's what was in
# place before, but not being able to re-declare a field of the same type in # place before, but not being able to re-declare a field of the same type in
@ -97,6 +98,8 @@ type
Declaration* = ref object of ASTNode Declaration* = ref object of ASTNode
## A declaration ## A declaration
pragmas*: seq[Pragma] pragmas*: seq[Pragma]
generics*: seq[tuple[name: IdentExpr, cond: Expression]]
Statement* = ref object of Declaration Statement* = ref object of Declaration
## A statement ## A statement
Expression* = ref object of Statement Expression* = ref object of Statement
@ -145,7 +148,8 @@ type
CallExpr* = ref object of Expression CallExpr* = ref object of Expression
callee*: Expression # The object being called callee*: Expression # The object being called
arguments*: tuple[positionals: seq[Expression], keyword: seq[tuple[name: IdentExpr, value: Expression]]] arguments*: tuple[positionals: seq[Expression], keyword: seq[tuple[
name: IdentExpr, value: Expression]]]
UnaryExpr* = ref object of Expression UnaryExpr* = ref object of Expression
operator*: Token operator*: Token
@ -165,7 +169,8 @@ type
LambdaExpr* = ref object of Expression LambdaExpr* = ref object of Expression
body*: Statement body*: Statement
arguments*: seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]] arguments*: seq[tuple[name: IdentExpr, valueType: Expression,
mutable: bool, isRef: bool, isPtr: bool]]
defaults*: seq[Expression] defaults*: seq[Expression]
isGenerator*: bool isGenerator*: bool
isAsync*: bool isAsync*: bool
@ -245,7 +250,8 @@ type
FunDecl* = ref object of Declaration FunDecl* = ref object of Declaration
name*: IdentExpr name*: IdentExpr
body*: Statement body*: Statement
arguments*: seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]] arguments*: seq[tuple[name: IdentExpr, valueType: Expression,
mutable: bool, isRef: bool, isPtr: bool]]
defaults*: seq[Expression] defaults*: seq[Expression]
isAsync*: bool isAsync*: bool
isGenerator*: bool isGenerator*: bool
@ -264,7 +270,8 @@ proc isConst*(self: ASTNode): bool =
## strings and singletons count as ## strings and singletons count as
## constants ## constants
case self.kind: case self.kind:
of intExpr, hexExpr, binExpr, octExpr, strExpr, falseExpr, trueExpr, infExpr, nanExpr, floatExpr, nilExpr: of intExpr, hexExpr, binExpr, octExpr, strExpr, falseExpr, trueExpr,
infExpr, nanExpr, floatExpr, nilExpr:
return true return true
else: else:
return false return false
@ -285,6 +292,13 @@ proc newASTNode*(kind: NodeKind, token: Token): ASTNode =
result.token = token result.token = token
proc newPragma*(name: IdentExpr, args: seq[LiteralExpr]): Pragma =
new(result)
result.kind = pragmaExpr
result.args = args
result.name = name
proc newIntExpr*(literal: Token): IntExpr = proc newIntExpr*(literal: Token): IntExpr =
result = IntExpr(kind: intExpr) result = IntExpr(kind: intExpr)
result.literal = literal result.literal = literal
@ -315,11 +329,16 @@ proc newFloatExpr*(literal: Token): FloatExpr =
result.token = literal result.token = literal
proc newTrueExpr*(token: Token): LiteralExpr = LiteralExpr(kind: trueExpr, token: token, literal: token) proc newTrueExpr*(token: Token): LiteralExpr = LiteralExpr(kind: trueExpr,
proc newFalseExpr*(token: Token): LiteralExpr = LiteralExpr(kind: falseExpr, token: token, literal: token) token: token, literal: token)
proc newNaNExpr*(token: Token): LiteralExpr = LiteralExpr(kind: nanExpr, token: token, literal: token) proc newFalseExpr*(token: Token): LiteralExpr = LiteralExpr(kind: falseExpr,
proc newNilExpr*(token: Token): LiteralExpr = LiteralExpr(kind: nilExpr, token: token, literal: token) token: token, literal: token)
proc newInfExpr*(token: Token): LiteralExpr = LiteralExpr(kind: infExpr, token: token, literal: token) proc newNaNExpr*(token: Token): LiteralExpr = LiteralExpr(kind: nanExpr,
token: token, literal: token)
proc newNilExpr*(token: Token): LiteralExpr = LiteralExpr(kind: nilExpr,
token: token, literal: token)
proc newInfExpr*(token: Token): LiteralExpr = LiteralExpr(kind: infExpr,
token: token, literal: token)
proc newStrExpr*(literal: Token): StrExpr = proc newStrExpr*(literal: Token): StrExpr =
@ -346,8 +365,10 @@ proc newGroupingExpr*(expression: Expression, token: Token): GroupingExpr =
result.token = token result.token = token
proc newLambdaExpr*(arguments: seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]], defaults: seq[Expression], body: Statement, proc newLambdaExpr*(arguments: seq[tuple[name: IdentExpr, valueType: Expression,
isGenerator: bool, isAsync: bool, token: Token, returnType: Expression): LambdaExpr = mutable: bool, isRef: bool, isPtr: bool]], defaults: seq[Expression],
body: Statement, isGenerator: bool, isAsync: bool, token: Token,
returnType: Expression, pragmas: seq[Pragma]): LambdaExpr =
result = LambdaExpr(kind: lambdaExpr) result = LambdaExpr(kind: lambdaExpr)
result.body = body result.body = body
result.arguments = arguments result.arguments = arguments
@ -357,16 +378,19 @@ proc newLambdaExpr*(arguments: seq[tuple[name: IdentExpr, valueType: Expression,
result.token = token result.token = token
result.returnType = returnType result.returnType = returnType
result.isPure = false result.isPure = false
result.pragmas = pragmas
proc newGetItemExpr*(obj: Expression, name: IdentExpr, token: Token): GetItemExpr = proc newGetItemExpr*(obj: Expression, name: IdentExpr,
token: Token): GetItemExpr =
result = GetItemExpr(kind: getItemExpr) result = GetItemExpr(kind: getItemExpr)
result.obj = obj result.obj = obj
result.name = name result.name = name
result.token = token result.token = token
proc newSetItemExpr*(obj: Expression, name: IdentExpr, value: Expression, token: Token): SetItemExpr = proc newSetItemExpr*(obj: Expression, name: IdentExpr, value: Expression,
token: Token): SetItemExpr =
result = SetItemExpr(kind: setItemExpr) result = SetItemExpr(kind: setItemExpr)
result.obj = obj result.obj = obj
result.name = name result.name = name
@ -374,8 +398,8 @@ proc newSetItemExpr*(obj: Expression, name: IdentExpr, value: Expression, token:
result.token = token result.token = token
proc newCallExpr*(callee: Expression, arguments: tuple[positionals: seq[Expression], proc newCallExpr*(callee: Expression, arguments: tuple[positionals: seq[
keyword: seq[tuple[name: IdentExpr, value: Expression]]], Expression], keyword: seq[tuple[name: IdentExpr, value: Expression]]],
token: Token): CallExpr = token: Token): CallExpr =
result = CallExpr(kind: callExpr) result = CallExpr(kind: callExpr)
result.callee = callee result.callee = callee
@ -412,7 +436,8 @@ proc newYieldExpr*(expression: Expression, token: Token): YieldExpr =
result.token = token result.token = token
proc newAssignExpr*(name: Expression, value: Expression, token: Token): AssignExpr = proc newAssignExpr*(name: Expression, value: Expression,
token: Token): AssignExpr =
result = AssignExpr(kind: assignExpr) result = AssignExpr(kind: assignExpr)
result.name = name result.name = name
result.value = value result.value = value
@ -484,15 +509,16 @@ proc newBlockStmt*(code: seq[Declaration], token: Token): BlockStmt =
result.token = token result.token = token
proc newWhileStmt*(condition: Expression, body: Statement, token: Token): WhileStmt = proc newWhileStmt*(condition: Expression, body: Statement,
token: Token): WhileStmt =
result = WhileStmt(kind: whileStmt) result = WhileStmt(kind: whileStmt)
result.condition = condition result.condition = condition
result.body = body result.body = body
result.token = token result.token = token
proc newForEachStmt*(identifier: IdentExpr, expression: Expression, body: Statement, proc newForEachStmt*(identifier: IdentExpr, expression: Expression,
token: Token): ForEachStmt = body: Statement, token: Token): ForEachStmt =
result = ForEachStmt(kind: forEachStmt) result = ForEachStmt(kind: forEachStmt)
result.identifier = identifier result.identifier = identifier
result.expression = expression result.expression = expression

View File

@ -234,7 +234,8 @@ proc findOrAddConstant(self: Chunk, constant: Expression, kind: Type): int =
if c.kind != constant.kind: if c.kind != constant.kind:
continue continue
if constant.isConst(): if constant.isConst():
if LiteralExpr(c).literal.lexeme == LiteralExpr(constant).literal.lexeme: if LiteralExpr(c).literal.lexeme == LiteralExpr(
constant).literal.lexeme:
# This wouldn't work for stuff like 2e3 and 2000.0, but those # This wouldn't work for stuff like 2e3 and 2000.0, but those
# forms are collapsed in the compiler before being written # forms are collapsed in the compiler before being written
# to the constants table # to the constants table

View File

@ -84,6 +84,8 @@ type
# Stores the current scope depth (0 = global, > 0 local) # Stores the current scope depth (0 = global, > 0 local)
scopeDepth: int scopeDepth: int
operators: OperatorTable operators: OperatorTable
# The AST node
tree: seq[Declaration]
proc newOperatorTable: OperatorTable = proc newOperatorTable: OperatorTable =
@ -140,11 +142,13 @@ proc newParser*: Parser =
result.currentLoop = LoopContext.None result.currentLoop = LoopContext.None
result.scopeDepth = 0 result.scopeDepth = 0
result.operators = newOperatorTable() result.operators = newOperatorTable()
result.tree = @[]
# Public getters for improved error formatting # Public getters for improved error formatting
proc getCurrent*(self: Parser): int {.inline.} = self.current proc getCurrent*(self: Parser): int {.inline.} = self.current
proc getCurrentToken*(self: Parser): Token {.inline.} = (if self.getCurrent() >= self.tokens.high() or proc getCurrentToken*(self: Parser): Token {.inline.} = (if self.getCurrent() >=
self.tokens.high() or
self.getCurrent() - 1 < 0: self.tokens[^1] else: self.tokens[self.current - 1]) self.getCurrent() - 1 < 0: self.tokens[^1] else: self.tokens[self.current - 1])
# Handy templates to make our life easier, thanks nim! # Handy templates to make our life easier, thanks nim!
@ -158,7 +162,8 @@ proc peek(self: Parser, distance: int = 0): Token =
## token is returned. A negative distance may ## token is returned. A negative distance may
## be used to retrieve previously consumed ## be used to retrieve previously consumed
## tokens ## tokens
if self.tokens.high() == -1 or self.current + distance > self.tokens.high() or self.current + distance < 0: if self.tokens.high() == -1 or self.current + distance > self.tokens.high(
) or self.current + distance < 0:
result = endOfFile result = endOfFile
else: else:
result = self.tokens[self.current + distance] result = self.tokens[self.current + distance]
@ -197,7 +202,8 @@ proc error(self: Parser, message: string) {.raises: [ParseError, ValueError].} =
# tell at tokenization time which of the two contexts we're in, we just treat everything # tell at tokenization time which of the two contexts we're in, we just treat everything
# as a symbol and in the cases where we need a specific token we just match the string # as a symbol and in the cases where we need a specific token we just match the string
# directly # directly
proc check[T: TokenType or string](self: Parser, kind: T, distance: int = 0): bool = proc check[T: TokenType or string](self: Parser, kind: T,
distance: int = 0): bool =
## Checks if the given token at the given distance ## Checks if the given token at the given distance
## matches the expected kind and returns a boolean. ## matches the expected kind and returns a boolean.
## The distance parameter is passed directly to ## The distance parameter is passed directly to
@ -243,7 +249,8 @@ proc match[T: TokenType or string](self: Parser, kind: openarray[T]): bool =
result = false result = false
proc expect[T: TokenType or string](self: Parser, kind: T, message: string = "") = proc expect[T: TokenType or string](self: Parser, kind: T,
message: string = "") =
## Behaves like self.match(), except that ## Behaves like self.match(), except that
## when a token doesn't match, an error ## when a token doesn't match, an error
## is raised. If no error message is ## is raised. If no error message is
@ -255,7 +262,8 @@ proc expect[T: TokenType or string](self: Parser, kind: T, message: string = "")
self.error(message) self.error(message)
proc expect[T: TokenType or string](self: Parser, kind: openarray[T], message: string = "") = proc expect[T: TokenType or string](self: Parser, kind: openarray[T],
message: string = "") =
## Behaves like self.expect(), except that ## Behaves like self.expect(), except that
## an error is raised only if none of the ## an error is raised only if none of the
## given token kinds matches ## given token kinds matches
@ -270,8 +278,10 @@ proc expect[T: TokenType or string](self: Parser, kind: openarray[T], message: s
proc expression(self: Parser): Expression proc expression(self: Parser): Expression
proc expressionStatement(self: Parser): Statement proc expressionStatement(self: Parser): Statement
proc statement(self: Parser): Statement proc statement(self: Parser): Statement
proc varDecl(self: Parser, isLet: bool = false, isConst: bool = false): Declaration proc varDecl(self: Parser, isLet: bool = false,
proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false, isLambda: bool = false, isOperator: bool = false): Declaration isConst: bool = false): Declaration
proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false,
isLambda: bool = false, isOperator: bool = false): Declaration
proc declaration(self: Parser): Declaration proc declaration(self: Parser): Declaration
# End of forward declarations # End of forward declarations
@ -343,7 +353,8 @@ proc primary(self: Parser): Expression =
result = Expression(self.funDecl(isAsync = true, isLambda = true)) result = Expression(self.funDecl(isAsync = true, isLambda = true))
of Generator: of Generator:
discard self.step() discard self.step()
result = Expression(self.funDecl(isGenerator=true, isLambda=true)) result = Expression(self.funDecl(isGenerator = true,
isLambda = true))
else: else:
self.error("invalid syntax") self.error("invalid syntax")
@ -353,7 +364,9 @@ proc makeCall(self: Parser, callee: Expression): Expression =
## to parse a function call ## to parse a function call
let tok = self.peek(-1) let tok = self.peek(-1)
var argNames: seq[IdentExpr] = @[] var argNames: seq[IdentExpr] = @[]
var arguments: tuple[positionals: seq[Expression], keyword: seq[tuple[name: IdentExpr, value: Expression]]] = (positionals: @[], keyword: @[]) var arguments: tuple[positionals: seq[Expression], keyword: seq[tuple[
name: IdentExpr, value: Expression]]] = (positionals: @[],
keyword: @[])
var argument: Expression = nil var argument: Expression = nil
var argCount = 0 var argCount = 0
if not self.check(RightParen): if not self.check(RightParen):
@ -367,7 +380,8 @@ proc makeCall(self: Parser, callee: Expression): Expression =
if IdentExpr(AssignExpr(argument).name) in argNames: if IdentExpr(AssignExpr(argument).name) in argNames:
self.error("duplicate keyword argument in call") self.error("duplicate keyword argument in call")
argNames.add(IdentExpr(AssignExpr(argument).name)) argNames.add(IdentExpr(AssignExpr(argument).name))
arguments.keyword.add((name: IdentExpr(AssignExpr(argument).name), value: AssignExpr(argument).value)) arguments.keyword.add((name: IdentExpr(AssignExpr(
argument).name), value: AssignExpr(argument).value))
elif arguments.keyword.len() == 0: elif arguments.keyword.len() == 0:
arguments.positionals.add(argument) arguments.positionals.add(argument)
else: else:
@ -388,7 +402,8 @@ proc call(self: Parser): Expression =
result = self.makeCall(result) result = self.makeCall(result)
elif self.match(Dot): elif self.match(Dot):
self.expect(Identifier, "expecting attribute name after '.'") self.expect(Identifier, "expecting attribute name after '.'")
result = newGetItemExpr(result, newIdentExpr(self.peek(-1)), self.peek(-1)) result = newGetItemExpr(result, newIdentExpr(self.peek(-1)),
self.peek(-1))
elif self.match(LeftBracket): elif self.match(LeftBracket):
# Slicing such as a[1:2], which is then # Slicing such as a[1:2], which is then
# translated to `[]`(a, 1, 2) # translated to `[]`(a, 1, 2)
@ -484,7 +499,8 @@ proc parseAssign(self: Parser): Expression =
of identExpr, sliceExpr: of identExpr, sliceExpr:
result = newAssignExpr(result, value, tok) result = newAssignExpr(result, value, tok)
of getItemExpr: of getItemExpr:
result = newSetItemExpr(GetItemExpr(result).obj, GetItemExpr(result).name, value, tok) result = newSetItemExpr(GetItemExpr(result).obj, GetItemExpr(
result).name, value, tok)
else: else:
self.error("invalid assignment target") self.error("invalid assignment target")
@ -531,6 +547,8 @@ proc blockStmt(self: Parser): Statement =
var code: seq[Declaration] = @[] var code: seq[Declaration] = @[]
while not self.check(RightBrace) and not self.done(): while not self.check(RightBrace) and not self.done():
code.add(self.declaration()) code.add(self.declaration())
if self.tree[^1] == nil:
self.tree.delete(self.tree.high())
self.expect(RightBrace, "expecting '}'") self.expect(RightBrace, "expecting '}'")
result = newBlockStmt(code, tok) result = newBlockStmt(code, tok)
self.endScope() self.endScope()
@ -715,7 +733,8 @@ proc forStmt(self: Parser): Statement =
if increment != nil: if increment != nil:
# The increment runs after each iteration, so we # The increment runs after each iteration, so we
# inject it into the block as the last statement # inject it into the block as the last statement
body = newBlockStmt(@[Declaration(body), newExprStmt(increment, increment.token)], tok) body = newBlockStmt(@[Declaration(body), newExprStmt(increment,
increment.token)], tok)
if condition == nil: if condition == nil:
## An empty condition is functionally ## An empty condition is functionally
## equivalent to "true" ## equivalent to "true"
@ -766,7 +785,8 @@ template checkDecl(self: Parser, isPrivate: bool) =
self.error("cannot bind public names inside local scopes") self.error("cannot bind public names inside local scopes")
proc varDecl(self: Parser, isLet: bool = false, isConst: bool = false): Declaration = proc varDecl(self: Parser, isLet: bool = false,
isConst: bool = false): Declaration =
## Parses variable declarations ## Parses variable declarations
var tok = self.peek(-1) var tok = self.peek(-1)
var value: Expression var value: Expression
@ -792,17 +812,22 @@ proc varDecl(self: Parser, isLet: bool = false, isConst: bool = false): Declarat
self.expect(Semicolon, &"expecting semicolon after declaration") self.expect(Semicolon, &"expecting semicolon after declaration")
case tok.kind: case tok.kind:
of Var: of Var:
result = newVarDecl(name, value, isPrivate=isPrivate, token=tok, valueType=valueType, pragmas=(@[])) result = newVarDecl(name, value, isPrivate = isPrivate, token = tok,
valueType = valueType, pragmas = (@[]))
of Const: of Const:
result = newVarDecl(name, value, isPrivate=isPrivate, token=tok, isConst=true, valueType=valueType, pragmas=(@[])) result = newVarDecl(name, value, isPrivate = isPrivate, token = tok,
isConst = true, valueType = valueType, pragmas = (@[]))
of Let: of Let:
result = newVarDecl(name, value, isPrivate=isPrivate, token=tok, isLet=isLet, valueType=valueType, pragmas=(@[])) result = newVarDecl(name, value, isPrivate = isPrivate, token = tok,
isLet = isLet, valueType = valueType, pragmas = (@[]))
else: else:
discard # Unreachable discard # Unreachable
proc parseDeclArguments(self: Parser, arguments: var seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]], proc parseDeclArguments(self: Parser, arguments: var seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]],
parameter: var tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool], parameter: var tuple[name: IdentExpr,
valueType: Expression, mutable: bool,
isRef: bool, isPtr: bool],
defaults: var seq[Expression]) = defaults: var seq[Expression]) =
## Helper to parse declaration arguments and avoid code duplication ## Helper to parse declaration arguments and avoid code duplication
while not self.check(RightParen): while not self.check(RightParen):
@ -843,11 +868,13 @@ proc parseDeclArguments(self: Parser, arguments: var seq[tuple[name: IdentExpr,
self.error(&"missing type declaration for '{argument.name.token.lexeme}' in function declaration") self.error(&"missing type declaration for '{argument.name.token.lexeme}' in function declaration")
proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false, isLambda: bool = false, isOperator: bool = false): Declaration = proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false,
isLambda: bool = false, isOperator: bool = false): Declaration =
## Parses functions, coroutines, generators, anonymous functions and operators ## Parses functions, coroutines, generators, anonymous functions and operators
let tok = self.peek(-1) let tok = self.peek(-1)
var enclosingFunction = self.currentFunction var enclosingFunction = self.currentFunction
var arguments: seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]] = @[] var arguments: seq[tuple[name: IdentExpr, valueType: Expression,
mutable: bool, isRef: bool, isPtr: bool]] = @[]
var defaults: seq[Expression] = @[] var defaults: seq[Expression] = @[]
var returnType: Expression var returnType: Expression
if not isLambda and self.check(Identifier): if not isLambda and self.check(Identifier):
@ -860,8 +887,11 @@ proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false, isL
self.expect(Identifier, &"expecting identifier after '{tok.lexeme}'") self.expect(Identifier, &"expecting identifier after '{tok.lexeme}'")
self.checkDecl(not self.check("*")) self.checkDecl(not self.check("*"))
self.currentFunction = newFunDecl(nil, arguments, defaults, newBlockStmt(@[], Token()), self.currentFunction = newFunDecl(nil, arguments, defaults, newBlockStmt(@[], Token()),
isAsync=isAsync, isGenerator=isGenerator, isPrivate=true, isAsync = isAsync,
token=tok, pragmas=(@[]), returnType=nil) isGenerator = isGenerator,
isPrivate = true,
token = tok, pragmas = (@[]),
returnType = nil)
FunDecl(self.currentFunction).name = newIdentExpr(self.peek(-1)) FunDecl(self.currentFunction).name = newIdentExpr(self.peek(-1))
if self.match("*"): if self.match("*"):
FunDecl(self.currentFunction).isPrivate = false FunDecl(self.currentFunction).isPrivate = false
@ -879,7 +909,7 @@ proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false, isL
return result return result
elif isLambda: elif isLambda:
self.currentFunction = newLambdaExpr(arguments, defaults, newBlockStmt(@[], Token()), isGenerator = isGenerator, isAsync = isAsync, token = tok, self.currentFunction = newLambdaExpr(arguments, defaults, newBlockStmt(@[], Token()), isGenerator = isGenerator, isAsync = isAsync, token = tok,
returnType=nil) returnType = nil, pragmas = (@[]))
elif not isOperator: elif not isOperator:
self.error("funDecl: invalid state") self.error("funDecl: invalid state")
if self.match(":"): if self.match(":"):
@ -890,12 +920,20 @@ proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false, isL
# the type declaration for a function lacks # the type declaration for a function lacks
# the braces that would qualify it as an # the braces that would qualify it as an
# expression # expression
var arguments: seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]] = @[] var arguments: seq[tuple[name: IdentExpr, valueType: Expression,
mutable: bool, isRef: bool, isPtr: bool]] = @[]
var defaults: seq[Expression] = @[] var defaults: seq[Expression] = @[]
returnType = newLambdaExpr(arguments, defaults, nil, isGenerator = self.peek(-1).kind == Generator, returnType = newLambdaExpr(arguments, defaults, nil, isGenerator = self.peek(-1).kind == Generator,
isAsync=self.peek(-1).kind == Coroutine, isAsync = self.peek(
token=self.peek(-1), returnType=nil) -1).kind ==
var parameter: tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool] Coroutine,
token = self.peek(
-1),
returnType = nil,
pragmas = (
@[]))
var parameter: tuple[name: IdentExpr, valueType: Expression,
mutable: bool, isRef: bool, isPtr: bool]
if self.match(LeftParen): if self.match(LeftParen):
self.parseDeclArguments(arguments, parameter, defaults) self.parseDeclArguments(arguments, parameter, defaults)
if self.match(":"): if self.match(":"):
@ -904,17 +942,26 @@ proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false, isL
returnType = self.expression() returnType = self.expression()
if not self.match(LeftBrace): if not self.match(LeftBrace):
self.expect(LeftParen) self.expect(LeftParen)
var parameter: tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool] var parameter: tuple[name: IdentExpr, valueType: Expression,
mutable: bool, isRef: bool, isPtr: bool]
self.parseDeclArguments(arguments, parameter, defaults) self.parseDeclArguments(arguments, parameter, defaults)
if self.match(":"): if self.match(":"):
# Function's return type # Function's return type
if self.match([Function, Coroutine, Generator]): if self.match([Function, Coroutine, Generator]):
var arguments: seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]] = @[] var arguments: seq[tuple[name: IdentExpr, valueType: Expression,
mutable: bool, isRef: bool, isPtr: bool]] = @[]
var defaults: seq[Expression] = @[] var defaults: seq[Expression] = @[]
returnType = newLambdaExpr(arguments, defaults, nil, isGenerator = self.peek(-1).kind == Generator, returnType = newLambdaExpr(arguments, defaults, nil, isGenerator = self.peek(-1).kind == Generator,
isAsync=self.peek(-1).kind == Coroutine, isAsync = self.peek(
token=self.peek(-1), returnType=nil) -1).kind ==
var parameter: tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool] Coroutine,
token = self.peek(
-1),
returnType = nil,
pragmas = (
@[]))
var parameter: tuple[name: IdentExpr, valueType: Expression,
mutable: bool, isRef: bool, isPtr: bool]
if self.match(LeftParen): if self.match(LeftParen):
self.parseDeclArguments(arguments, parameter, defaults) self.parseDeclArguments(arguments, parameter, defaults)
if self.match(":"): if self.match(":"):
@ -1020,12 +1067,30 @@ proc statement(self: Parser): Statement =
result = self.expressionStatement() result = self.expressionStatement()
proc parsePragma(self: Parser): Pragma =
## Parses pragmas
if self.scopeDepth == 0:
## Pragmas used at the
## top level are either
## used for compile-time
## switches or for variable
## declarations
var decl: VarDecl
for node in self.tree:
if node.token.line == self.peek(-1).line and node.kind == varDecl:
decl = VarDecl(node)
break
else:
var decl = self.currentFunction
proc declaration(self: Parser): Declaration = proc declaration(self: Parser): Declaration =
## Parses declarations ## Parses declarations
case self.peek().kind: case self.peek().kind:
of Var, Const, Let: of Var, Const, Let:
let keyword = self.step() let keyword = self.step()
result = self.varDecl(isLet=keyword.kind == Let, isConst=keyword.kind == Const) result = self.varDecl(isLet = keyword.kind == Let,
isConst = keyword.kind == Const)
of Function: of Function:
discard self.step() discard self.step()
result = self.funDecl() result = self.funDecl()
@ -1038,15 +1103,17 @@ proc declaration(self: Parser): Declaration =
of Operator: of Operator:
discard self.step() discard self.step()
result = self.funDecl(isOperator = true) result = self.funDecl(isOperator = true)
of Type, TokenType.Whitespace, TokenType.Tab, Comment: of TokenType.Comment:
# TODO: Comments, pragmas, docstrings let tok = self.step()
if tok.lexeme.startsWith("#pragma["):
result = self.parsePragma()
of Type, TokenType.Whitespace, TokenType.Tab:
discard self.step() # TODO discard self.step() # TODO
return newNilExpr(Token(lexeme: "nil"))
else: else:
result = Declaration(self.statement()) result = Declaration(self.statement())
proc parse*(self: Parser, tokens: seq[Token], file: string): seq[ASTNode] = proc parse*(self: Parser, tokens: seq[Token], file: string): seq[Declaration] =
## Parses a series of tokens into an AST node ## Parses a series of tokens into an AST node
self.tokens = tokens self.tokens = tokens
self.file = file self.file = file
@ -1055,6 +1122,7 @@ proc parse*(self: Parser, tokens: seq[Token], file: string): seq[ASTNode] =
self.currentFunction = nil self.currentFunction = nil
self.scopeDepth = 0 self.scopeDepth = 0
self.operators = newOperatorTable() self.operators = newOperatorTable()
self.tree = @[]
for i, token in self.tokens: for i, token in self.tokens:
# We do a first pass over the tokens # We do a first pass over the tokens
# to find operators. Note that this # to find operators. Note that this
@ -1069,4 +1137,7 @@ proc parse*(self: Parser, tokens: seq[Token], file: string): seq[ASTNode] =
# well perform some extra checks # well perform some extra checks
self.error("invalid state: found malformed tokenizer input while looking for operators (missing EOF)") self.error("invalid state: found malformed tokenizer input while looking for operators (missing EOF)")
while not self.done(): while not self.done():
result.add(self.declaration()) self.tree.add(self.declaration())
if self.tree[^1] == nil:
self.tree.delete(self.tree.high())
result = self.tree

View File

@ -54,9 +54,11 @@ proc reallocate*(p: pointer, oldSize: int, newSize: int): pointer =
quit(139) # For now, there's not much we can do if we can't get the memory we need, so we exit quit(139) # For now, there's not much we can do if we can't get the memory we need, so we exit
template resizeArray*(kind: untyped, pointr: pointer, oldCount, newCount: int): untyped = template resizeArray*(kind: untyped, pointr: pointer, oldCount,
newCount: int): untyped =
## Handy macro (in the C sense of macro, not nim's) to resize a dynamic array ## Handy macro (in the C sense of macro, not nim's) to resize a dynamic array
cast[ptr UncheckedArray[kind]](reallocate(pointr, sizeof(kind) * oldCount, sizeof(kind) * newCount)) cast[ptr UncheckedArray[kind]](reallocate(pointr, sizeof(kind) * oldCount,
sizeof(kind) * newCount))
template freeArray*(kind: untyped, pointr: pointer, oldCount: int): untyped = template freeArray*(kind: untyped, pointr: pointer, oldCount: int): untyped =

View File

@ -42,7 +42,7 @@ when isMainModule:
var var
keep = true keep = true
tokens: seq[Token] = @[] tokens: seq[Token] = @[]
tree: seq[ASTNode] = @[] tree: seq[Declaration] = @[]
compiled: Chunk compiled: Chunk
serialized: Serialized serialized: Serialized
serializedRaw: seq[byte] serializedRaw: seq[byte]
@ -66,7 +66,8 @@ when isMainModule:
if input.len() == 0: if input.len() == 0:
continue continue
# Currently the parser doesn't handle these tokens well # Currently the parser doesn't handle these tokens well
tokens = filter(tokenizer.lex(input, "stdin"), proc (x: Token): bool = x.kind notin {TokenType.Whitespace, Tab}) tokens = filter(tokenizer.lex(input, "stdin"), proc (
x: Token): bool = x.kind notin {TokenType.Whitespace, Tab})
if tokens.len() == 0: if tokens.len() == 0:
continue continue
when debugLexer: when debugLexer:

View File

@ -52,9 +52,11 @@ proc simpleInstruction(instruction: OpCode, offset: int): int =
return offset + 1 return offset + 1
proc stackTripleInstruction(instruction: OpCode, chunk: Chunk, offset: int): int = proc stackTripleInstruction(instruction: OpCode, chunk: Chunk,
offset: int): int =
## Debugs instructions that operate on a single value on the stack using a 24-bit operand ## Debugs instructions that operate on a single value on the stack using a 24-bit operand
var slot = [chunk.code[offset + 1], chunk.code[offset + 2], chunk.code[offset + 3]].fromTriple() var slot = [chunk.code[offset + 1], chunk.code[offset + 2], chunk.code[
offset + 3]].fromTriple()
printInstruction(instruction) printInstruction(instruction)
stdout.write(&", points to index ") stdout.write(&", points to index ")
setForegroundColor(fgYellow) setForegroundColor(fgYellow)
@ -63,7 +65,8 @@ proc stackTripleInstruction(instruction: OpCode, chunk: Chunk, offset: int): int
return offset + 4 return offset + 4
proc stackDoubleInstruction(instruction: OpCode, chunk: Chunk, offset: int): int = proc stackDoubleInstruction(instruction: OpCode, chunk: Chunk,
offset: int): int =
## Debugs instructions that operate on a single value on the stack using a 16-bit operand ## Debugs instructions that operate on a single value on the stack using a 16-bit operand
var slot = [chunk.code[offset + 1], chunk.code[offset + 2]].fromDouble() var slot = [chunk.code[offset + 1], chunk.code[offset + 2]].fromDouble()
printInstruction(instruction) printInstruction(instruction)
@ -74,7 +77,8 @@ proc stackDoubleInstruction(instruction: OpCode, chunk: Chunk, offset: int): int
return offset + 3 return offset + 3
proc argumentDoubleInstruction(instruction: OpCode, chunk: Chunk, offset: int): int = proc argumentDoubleInstruction(instruction: OpCode, chunk: Chunk,
offset: int): int =
## Debugs instructions that operate on a hardcoded value on the stack using a 16-bit operand ## Debugs instructions that operate on a hardcoded value on the stack using a 16-bit operand
var slot = [chunk.code[offset + 1], chunk.code[offset + 2]].fromDouble() var slot = [chunk.code[offset + 1], chunk.code[offset + 2]].fromDouble()
printInstruction(instruction) printInstruction(instruction)
@ -87,7 +91,8 @@ proc argumentDoubleInstruction(instruction: OpCode, chunk: Chunk, offset: int):
proc constantInstruction(instruction: OpCode, chunk: Chunk, offset: int): int = proc constantInstruction(instruction: OpCode, chunk: Chunk, offset: int): int =
## Debugs instructions that operate on the constant table ## Debugs instructions that operate on the constant table
var constant = [chunk.code[offset + 1], chunk.code[offset + 2], chunk.code[offset + 3]].fromTriple() var constant = [chunk.code[offset + 1], chunk.code[offset + 2], chunk.code[
offset + 3]].fromTriple()
printInstruction(instruction) printInstruction(instruction)
stdout.write(&", points to constant at position ") stdout.write(&", points to constant at position ")
setForegroundColor(fgYellow) setForegroundColor(fgYellow)
@ -111,8 +116,10 @@ proc jumpInstruction(instruction: OpCode, chunk: Chunk, offset: int): int =
case instruction: case instruction:
of Jump, JumpIfFalse, JumpIfTrue, JumpIfFalsePop, JumpForwards, JumpBackwards: of Jump, JumpIfFalse, JumpIfTrue, JumpIfFalsePop, JumpForwards, JumpBackwards:
jump = [chunk.code[offset + 1], chunk.code[offset + 2]].fromDouble().int() jump = [chunk.code[offset + 1], chunk.code[offset + 2]].fromDouble().int()
of LongJump, LongJumpIfFalse, LongJumpIfTrue, LongJumpIfFalsePop, LongJumpForwards, LongJumpBackwards: of LongJump, LongJumpIfFalse, LongJumpIfTrue, LongJumpIfFalsePop,
jump = [chunk.code[offset + 1], chunk.code[offset + 2], chunk.code[offset + 3]].fromTriple().int() LongJumpForwards, LongJumpBackwards:
jump = [chunk.code[offset + 1], chunk.code[offset + 2], chunk.code[
offset + 3]].fromTriple().int()
else: else:
discard # Unreachable discard # Unreachable
printInstruction(instruction, true) printInstruction(instruction, true)

View File

@ -51,7 +51,8 @@ proc `$`*(self: Serialized): string =
proc error(self: Serializer, message: string) = proc error(self: Serializer, message: string) =
## Raises a formatted SerializationError exception ## Raises a formatted SerializationError exception
raise newException(SerializationError, &"A fatal error occurred while (de)serializing '{self.filename}' -> {message}") raise newException(SerializationError,
&"A fatal error occurred while (de)serializing '{self.filename}' -> {message}")
proc newSerializer*(self: Serializer = nil): Serializer = proc newSerializer*(self: Serializer = nil): Serializer =
@ -204,7 +205,8 @@ proc readConstants(self: Serializer, stream: seq[byte]): int =
stream = stream[1..^1] stream = stream[1..^1]
let size = self.bytesToInt([stream[0], stream[1], stream[2]]) let size = self.bytesToInt([stream[0], stream[1], stream[2]])
stream = stream[3..^1] stream = stream[3..^1]
self.chunk.consts.add(newIdentExpr(Token(lexeme: self.bytesToString(stream[0..<size])))) self.chunk.consts.add(newIdentExpr(Token(
lexeme: self.bytesToString(stream[0..<size]))))
# TODO # TODO
# discard self.chunk.addConstant(newIdentExpr(Token(lexeme: self.bytesToString(stream[0..<size])))) # discard self.chunk.addConstant(newIdentExpr(Token(lexeme: self.bytesToString(stream[0..<size]))))
stream = stream[size..^1] stream = stream[size..^1]
@ -263,7 +265,8 @@ proc loadBytes*(self: Serializer, stream: seq[byte]): Serialized =
if stream[0..<len(BYTECODE_MARKER)] != self.toBytes(BYTECODE_MARKER): if stream[0..<len(BYTECODE_MARKER)] != self.toBytes(BYTECODE_MARKER):
self.error("malformed bytecode marker") self.error("malformed bytecode marker")
stream = stream[len(BYTECODE_MARKER)..^1] stream = stream[len(BYTECODE_MARKER)..^1]
result.peonVer = (major: int(stream[0]), minor: int(stream[1]), patch: int(stream[2])) result.peonVer = (major: int(stream[0]), minor: int(stream[1]),
patch: int(stream[2]))
stream = stream[3..^1] stream = stream[3..^1]
let branchLength = stream[0] let branchLength = stream[0]
stream = stream[1..^1] stream = stream[1..^1]
@ -271,7 +274,8 @@ proc loadBytes*(self: Serializer, stream: seq[byte]): Serialized =
stream = stream[branchLength..^1] stream = stream[branchLength..^1]
result.commitHash = self.bytesToString(stream[0..<40]).toLowerAscii() result.commitHash = self.bytesToString(stream[0..<40]).toLowerAscii()
stream = stream[40..^1] stream = stream[40..^1]
result.compileDate = self.bytesToInt([stream[0], stream[1], stream[2], stream[3], stream[4], stream[5], stream[6], stream[7]]) result.compileDate = self.bytesToInt([stream[0], stream[1], stream[2],
stream[3], stream[4], stream[5], stream[6], stream[7]])
stream = stream[8..^1] stream = stream[8..^1]
result.fileHash = self.bytesToString(stream[0..<32]).toHex().toLowerAscii() result.fileHash = self.bytesToString(stream[0..<32]).toHex().toLowerAscii()
stream = stream[32..^1] stream = stream[32..^1]