Initial work on pragma handling (+ some parser fixes). Changed the way the lexer handles indentation and made tabs illegal. Added instructions for some operations on primitive types, removed file argument from serializer

This commit is contained in:
Mattia Giambirtone 2022-06-14 12:12:56 +02:00
parent d241333047
commit 5d572386a3
11 changed files with 184 additions and 110 deletions

View File

@ -20,7 +20,7 @@ type
Int8, UInt8, Int16, UInt16, Int32,
UInt32, Int64, UInt64, Float32, Float64,
Char, Byte, String, Function, CustomType,
Nil, Nan, Bool, Inf
Nil, Nan, Bool, Inf, Reference, Pointer
PeonObject* = object
## A generic Peon object
case kind*: ObjectKind:
@ -58,5 +58,7 @@ type
`float`*: float
of Function:
ip*: uint32
of Reference, Pointer:
value*: ptr PeonObject
else:
discard # TODO

View File

@ -426,7 +426,7 @@ proc dispatch*(self: PeonVM) =
# stack
self.push(self.closedOver[self.readLong()])
of PopClosure:
# Pops a closed-over variable off the closure
# Removes a closed-over variable from the closure
# array
discard self.closedOver.pop()
of LoadVar:
@ -505,7 +505,7 @@ proc dispatch*(self: PeonVM) =
proc run*(self: PeonVM, chunk: Chunk) =
## Executes a piece of Peon bytecode.
## Executes a piece of Peon bytecode
self.chunk = chunk
self.frames = @[]
self.calls = @[]

View File

@ -19,6 +19,7 @@ import ../util/multibyte
import lexer
import parser
import tables
import strformat
import algorithm
import parseutils
@ -137,9 +138,10 @@ type
# keep track of where to jump)
currentLoop: Loop
# Are we in REPL mode? If so, Pop instructions
# for expression statements emit a special
# PopRepl instruction that stores the value
# to be printed once the expression is evaluated
# for expression statements at the top level are
# swapped for a special PopRepl instruction that
# prints the result of the expression once it is
# evaluated
replMode: bool
# The current module being compiled
# (used to restrict access to statically
@ -159,25 +161,10 @@ type
deferred: seq[uint8]
# List of closed-over variables
closedOver: seq[Name]
# Keeps track of stack frames
frames: seq[int]
proc `$`(self: Name): string =
result &= &"Name(name='{self.name.name.lexeme}', depth={self.depth}, codePos={self.codePos})"
proc newCompiler*(enableOptimizations: bool = true, replMode: bool = false): Compiler =
## Initializes a new Compiler object
new(result)
result.ast = @[]
result.current = 0
result.file = ""
result.names = @[]
result.scopeDepth = 0
result.currentFunction = nil
result.enableOptimizations = enableOptimizations
result.replMode = replMode
result.currentModule = ""
# Compiler procedures called by pragmas
compilerProcs: TableRef[string, proc (self: Compiler, pragma: Pragma, node: ASTNode)]
## Forward declarations
@ -194,8 +181,29 @@ proc findByName(self: Compiler, name: string): seq[Name]
proc findByType(self: Compiler, name: string, kind: Type): seq[Name]
proc compareTypes(self: Compiler, a, b: Type): bool
proc patchReturnAddress(self: Compiler, pos: int)
proc handleMagicPragma(self: Compiler, pragma: Pragma, node: ASTnode)
proc handlePurePragma(self: Compiler, pragma: Pragma, node: ASTnode)
## End of forward declarations
proc newCompiler*(enableOptimizations: bool = true, replMode: bool = false): Compiler =
## Initializes a new Compiler object
new(result)
result.ast = @[]
result.current = 0
result.file = ""
result.names = @[]
result.scopeDepth = 0
result.currentFunction = nil
result.enableOptimizations = enableOptimizations
result.replMode = replMode
result.currentModule = ""
result.compilerProcs = newTable[string, proc (self: Compiler, pragma: Pragma, node: ASTNode)]()
result.compilerProcs["magic"] = handleMagicPragma
result.compilerProcs["pure"] = handlePurePragma
## Public getter for nicer error formatting
proc getCurrentNode*(self: Compiler): ASTNode = (if self.current >=
self.ast.len(): self.ast[^1] else: self.ast[self.current - 1])
@ -211,25 +219,24 @@ proc peek(self: Compiler, distance: int = 0): ASTNode =
## AST node in the tree is returned. A negative
## distance may be used to retrieve previously
## consumed AST nodes
if self.ast.high() == -1 or self.current + distance > self.ast.high() or
self.current + distance < 0:
if self.ast.high() == -1 or self.current + distance > self.ast.high() or self.current + distance < 0:
result = self.ast[^1]
else:
result = self.ast[self.current + distance]
proc done(self: Compiler): bool =
proc done(self: Compiler): bool {.inline.} =
## Returns true if the compiler is done
## compiling, false otherwise
result = self.current > self.ast.high()
proc error(self: Compiler, message: string) {.raises: [CompileError].} =
proc error(self: Compiler, message: string) {.raises: [CompileError], inline.} =
## Raises a CompileError exception
raise CompileError(msg: message, node: self.getCurrentNode(), file: self.file, module: self.currentModule)
proc step(self: Compiler): ASTNode =
proc step(self: Compiler): ASTNode {.inline.} =
## Steps to the next node and returns
## the consumed one
result = self.peek()
@ -237,7 +244,7 @@ proc step(self: Compiler): ASTNode =
self.current += 1
proc emitByte(self: Compiler, byt: OpCode | uint8) =
proc emitByte(self: Compiler, byt: OpCode | uint8) {.inline.} =
## Emits a single byte, writing it to
## the current chunk being compiled
when DEBUG_TRACE_COMPILER:
@ -245,7 +252,7 @@ proc emitByte(self: Compiler, byt: OpCode | uint8) =
self.chunk.write(uint8 byt, self.peek().token.line)
proc emitBytes(self: Compiler, bytarr: openarray[OpCode | uint8]) =
proc emitBytes(self: Compiler, bytarr: openarray[OpCode | uint8]) {.inline.} =
## Handy helper method to write arbitrary bytes into
## the current chunk, calling emitByte on each of its
## elements
@ -403,7 +410,7 @@ proc detectClosureVariable(self: Compiler, name: Name, depth: int = self.scopeDe
## unpredictably or crash
if name.isNil() or name.depth == 0:
return
elif name.depth < depth and not name.isClosedOver:
elif name.depth < depth and not name.isClosedOver:
# Ding! The given name is closed over: we need to
# change the dummy Jump instruction that self.declareName
# put in place for us into a StoreClosure. We also update
@ -1521,9 +1528,16 @@ proc typeDecl(self: Compiler, node: TypeDecl) =
# TODO
proc handleMagicPragma(self: Compiler, pragma: Pragma, node: ASTNode) =
## Handles the "magic" pragma
proc handlePurePragma(self: Compiler, pragma: Pragma, node: ASTNode) =
## Handles the "pure" pragma
proc funDecl(self: Compiler, node: FunDecl) =
## Compiles function declarations
var function = self.currentFunction
self.declareName(node)
self.frames.add(self.names.high())
@ -1553,7 +1567,10 @@ proc funDecl(self: Compiler, node: FunDecl) =
# TODO: Forward declarations
if not node.body.isNil():
if BlockStmt(node.body).code.len() == 0:
self.error("cannot declare function with empty body")
if node.pragmas.len() > 0:
discard
else:
self.error("cannot declare function with empty body")
let fnType = self.inferType(node)
let impl = self.findByType(node.name.token.lexeme, fnType)
if impl.len() > 1:

View File

@ -51,6 +51,7 @@ type
file: string
lines: seq[tuple[start, stop: int]]
lastLine: int
spaces: int
proc newSymbolTable: SymbolTable =
@ -173,6 +174,7 @@ proc newLexer*(self: Lexer = nil): Lexer =
result.lines = @[]
result.lastLine = 0
result.symbols = newSymbolTable()
result.spaces = 0
proc done(self: Lexer): bool =
@ -282,6 +284,8 @@ proc createToken(self: Lexer, tokenType: TokenType) =
tok.kind = tokenType
tok.lexeme = self.source[self.start..<self.current]
tok.line = self.line
tok.spaces = self.spaces
self.spaces = 0
tok.pos = (start: self.start, stop: self.current)
if len(tok.lexeme) != tok.pos.stop - tok.pos.start:
self.error("invalid state: len(tok.lexeme) != tok.pos.stop - tok.pos.start (this is most likely a compiler bug!)")
@ -555,10 +559,9 @@ proc next(self: Lexer) =
return
elif self.match(" "):
# Whitespaces
self.createToken(TokenType.Whitespace)
self.spaces += 1
elif self.match("\r"):
# Tabs
self.createToken(TokenType.Tab)
self.error("tabs are not allowed in peon code")
elif self.match("\n"):
# New line
self.incLine()

View File

@ -696,13 +696,13 @@ proc `$`*(self: ASTNode): string =
result &= &"Var(name={self.name}, value={self.value}, const={self.isConst}, private={self.isPrivate}, type={self.valueType}, pragmas={self.pragmas})"
of funDecl:
var self = FunDecl(self)
result &= &"""FunDecl(name={self.name}, body={self.body}, type={self.returnType}, arguments=[{self.arguments.join(", ")}], defaults=[{self.defaults.join(", ")}], generics=[{self.generics.join(", ")}], async={self.isAsync}, generator={self.isGenerator}, private={self.isPrivate})"""
result &= &"""FunDecl(name={self.name}, body={self.body}, type={self.returnType}, arguments=[{self.arguments.join(", ")}], defaults=[{self.defaults.join(", ")}], generics=[{self.generics.join(", ")}], async={self.isAsync}, generator={self.isGenerator}, private={self.isPrivate}, pragmas={self.pragmas})"""
of typeDecl:
var self = TypeDecl(self)
result &= &"""TypeDecl(name={self.name}, fields={self.fields}, defaults={self.defaults}, private={self.isPrivate}, pragmas={self.pragmas}, generics={self.generics}, ref={self.isRef})"""
result &= &"""TypeDecl(name={self.name}, fields={self.fields}, defaults={self.defaults}, private={self.isPrivate}, pragmas={self.pragmas}, generics={self.generics}, ref={self.isRef}, pragmas={self.pragmas})"""
of lambdaExpr:
var self = LambdaExpr(self)
result &= &"""Lambda(body={self.body}, type={self.returnType}, arguments=[{self.arguments.join(", ")}], defaults=[{self.defaults.join(", ")}], generator={self.isGenerator}, async={self.isAsync})"""
result &= &"""Lambda(body={self.body}, type={self.returnType}, arguments=[{self.arguments.join(", ")}], defaults=[{self.defaults.join(", ")}], generator={self.isGenerator}, async={self.isAsync}, pragmas={self.pragmas})"""
of deferStmt:
var self = DeferStmt(self)
result &= &"Defer({self.expression})"

View File

@ -23,7 +23,7 @@ import ../../util/multibyte
type
Chunk* = ref object
## A piece of bytecode.
## consts is used when serializing to/from a bytecode stream.
## consts is the code's constants table.
## code is the linear sequence of compiled bytecode instructions.
## lines maps bytecode instructions to line numbers using Run
## Length Encoding. Instructions are encoded in groups whose structure
@ -90,6 +90,39 @@ type
LoadFalse,
LoadNan,
LoadInf,
## Operations on primitive types
AddInt64,
AddUInt64,
AddInt32,
AddUInt32
AddInt16,
AddUInt16,
AddInt8,
AddUInt8,
SubInt64,
SubUInt64,
SubInt32,
SubUInt32,
SubInt16,
SubUInt16,
SubInt8,
SubUInt8,
MulInt64,
MulUInt64,
MulInt32,
MulUInt32,
MulInt16,
MulUInt16,
MulInt8,
MulUInt8,
DivInt64,
DivUInt64,
DivInt32,
DivUInt32,
DivInt16,
DivUInt16,
DivInt8,
DivUInt8,
## Basic stack operations
Pop, # Pops an element off the stack and discards it
PopRepl, # Same as Pop, but also prints the value of what's popped (used in REPL mode)
@ -139,9 +172,20 @@ const simpleInstructions* = {Return, LoadNil,
LoadNan, LoadInf,
Pop, PopRepl, Raise,
BeginTry, FinishTry, Yield,
Await, NoOp, PopClosure,
SetResult, LoadArgument,
PopC, PushC}
Await, NoOp, SetResult,
LoadArgument, PopC, PushC,
AddInt64, AddUInt64, AddInt32,
AddUInt32, AddInt16, AddUInt16,
AddInt8, AddUInt8, SubInt64,
SubUInt64, SubInt32, SubUInt32,
SubInt16, SubUInt16, SubInt8,
SubUInt8, MulInt64, MulUInt64,
MulInt32, MulUInt32, MulInt16,
MulUInt16, MulInt8, MulUInt8,
DivInt64, DivUInt64, DivInt32,
DivUInt32, DivInt16, DivUInt16,
DivInt8, DivUInt8
}
# Constant instructions are instructions that operate on the bytecode constant table
const constantInstructions* = {LoadInt64, LoadUInt64,
@ -153,7 +197,7 @@ const constantInstructions* = {LoadInt64, LoadUInt64,
# Stack triple instructions operate on the stack at arbitrary offsets and pop arguments off of it in the form
# of 24 bit integers
const stackTripleInstructions* = {StoreVar, LoadVar, LoadCLosure, StoreClosure, }
const stackTripleInstructions* = {StoreVar, LoadVar, LoadCLosure, }
# Stack double instructions operate on the stack at arbitrary offsets and pop arguments off of it in the form
# of 16 bit integers
@ -163,7 +207,7 @@ const stackDoubleInstructions* = {}
const argumentDoubleInstructions* = {PopN, }
# Argument double argument instructions take hardcoded arguments as 24 bit integers
const argumentTripleInstructions* = {LoadFunctionObj, }
const argumentTripleInstructions* = {LoadFunctionObj, StoreClosure, PopClosure}
# Instructions that call functions
const callInstructions* = {Call, }

View File

@ -60,11 +60,7 @@ type
Comment, # Useful for documentation comments, pragmas, etc.
Symbol, # A generic symbol
Pragma,
# These are not used at the moment but may be
# employed to enforce indentation or other neat
# stuff I haven't thought about yet
Whitespace,
Tab,
@ -75,13 +71,14 @@ type
line*: int # The line where the token appears
pos*: tuple[start, stop: int] # The absolute position in the source file
# (0-indexed and inclusive at the beginning)
spaces*: int # Number of spaces before this token
proc `$`*(self: Token): string =
## Strinfifies
if self != nil:
result = &"Token(kind={self.kind}, lexeme='{$(self.lexeme)}', line={self.line}, pos=({self.pos.start}, {self.pos.stop}))"
result = &"Token(kind={self.kind}, lexeme='{$(self.lexeme)}', line={self.line}, pos=({self.pos.start}, {self.pos.stop}), spaces={self.spaces})"
else:
result = "nil"

View File

@ -788,6 +788,44 @@ template checkDecl(self: Parser, isPrivate: bool) =
self.error("cannot bind public names inside local scopes")
proc parsePragmas(self: Parser): seq[Pragma] =
## Parses pragmas
var
name: IdentExpr
args: seq[LiteralExpr]
exp: Expression
names: seq[string]
while not self.match("]") and not self.done():
args = @[]
self.expect(Identifier, "expecting pragma name")
if self.peek(-1).lexeme in names:
self.error("duplicate pragmas are not allowed")
names.add(self.peek(-1).lexeme)
name = newIdentExpr(self.peek(-1))
if not self.match(":"):
if self.match("]"):
result.add(newPragma(name, @[]))
break
elif self.match("("):
while not self.match(")") and not self.done():
exp = self.primary()
if not exp.isLiteral():
self.error("pragma arguments can only be literals")
args.add(LiteralExpr(exp))
if not self.match(","):
break
self.expect(")", "unterminated parenthesis in pragma arguments")
else:
exp = self.primary()
if not exp.isLiteral():
self.error("pragma arguments can only be literals")
args.add(LiteralExpr(exp))
result.add(newPragma(name, args))
if self.match(","):
continue
proc varDecl(self: Parser, isLet: bool = false,
isConst: bool = false): Declaration =
## Parses variable declarations
@ -799,6 +837,7 @@ proc varDecl(self: Parser, isLet: bool = false,
self.checkDecl(isPrivate)
var valueType: IdentExpr
var hasInit = false
var pragmas: seq[Pragma] = @[]
if self.match(":"):
# We don't enforce it here because
# the compiler may be able to infer
@ -815,6 +854,9 @@ proc varDecl(self: Parser, isLet: bool = false,
self.error(&"{tok.lexeme} declaration requires an initializer")
value = newNilExpr(Token(lexeme: "nil"))
self.expect(Semicolon, "expecting semicolon after declaration")
if self.match(TokenType.Pragma):
for pragma in self.parsePragmas():
pragmas.add(pragma)
case tok.kind:
of Var:
result = newVarDecl(name, value, isPrivate = isPrivate, token = tok,
@ -828,7 +870,8 @@ proc varDecl(self: Parser, isLet: bool = false,
else:
discard # Unreachable
if not hasInit and VarDecl(result).valueType == nil:
self.error("expecting initializer or type declaration, but neither was found")
self.error("expecting initializer or explicit type declaration, but neither was found")
result.pragmas = pragmas
proc parseDeclArguments(self: Parser, arguments: var seq[tuple[name: IdentExpr, valueType: Expression, mutable: bool, isRef: bool, isPtr: bool]],
@ -920,6 +963,7 @@ proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false,
mutable: bool, isRef: bool, isPtr: bool]] = @[]
var defaults: seq[Expression] = @[]
var returnType: Expression
var pragmas: seq[Pragma] = @[]
if not isLambda and self.match(Identifier):
# We do this extra check because we might
# be called from a context where it's
@ -981,16 +1025,25 @@ proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false,
# If we don't find a semicolon,
# it's not a forward declaration
self.expect(LeftBrace)
if self.match(TokenType.Pragma):
for pragma in self.parsePragmas():
pragmas.add(pragma)
FunDecl(self.currentFunction).body = self.blockStmt()
else:
# This is a forward declaration, so we explicitly
# nullify the function's body to tell the compiler
# to look for it elsewhere in the file later
FunDecl(self.currentFunction).body = nil
if self.match(TokenType.Pragma):
for pragma in self.parsePragmas():
pragmas.add(pragma)
FunDecl(self.currentFunction).arguments = arguments
FunDecl(self.currentFunction).returnType = returnType
else:
self.expect(LeftBrace)
if self.match(TokenType.Pragma):
for pragma in self.parsePragmas():
pragmas.add(pragma)
LambdaExpr(Expression(self.currentFunction)).body = self.blockStmt()
LambdaExpr(Expression(self.currentFunction)).arguments = arguments
LambdaExpr(Expression(self.currentFunction)).returnType = returnType
@ -1004,6 +1057,7 @@ proc funDecl(self: Parser, isAsync: bool = false, isGenerator: bool = false,
if argument.valueType == nil:
self.error(&"missing type declaration for '{argument.name.token.lexeme}' in function declaration")
self.currentFunction = enclosingFunction
result.pragmas = pragmas
proc expression(self: Parser): Expression =
@ -1076,43 +1130,6 @@ proc statement(self: Parser): Statement =
result = self.expressionStatement()
proc parsePragmas(self: Parser): seq[Pragma] =
## Parses pragmas
var
name: IdentExpr
args: seq[LiteralExpr]
exp: Expression
names: seq[string]
while not self.match("]") and not self.done():
args = @[]
self.expect(Identifier, "expecting pragma name")
if self.peek(-1).lexeme in names:
self.error("duplicate pragmas are not allowed")
names.add(self.peek(-1).lexeme)
name = newIdentExpr(self.peek(-1))
if not self.match(":"):
if self.match("]"):
result.add(newPragma(name, @[]))
break
elif self.match("("):
while not self.match(")") and not self.done():
exp = self.primary()
if not exp.isLiteral():
self.error("pragma arguments can only be literals")
args.add(LiteralExpr(exp))
if not self.match(","):
break
self.expect(")", "unterminated parenthesis in pragma arguments")
else:
exp = self.primary()
if not exp.isLiteral():
self.error("pragma arguments can only be literals")
args.add(LiteralExpr(exp))
if self.match(","):
continue
result.add(newPragma(name, args))
proc typeDecl(self: Parser): TypeDecl =
## Parses type declarations
let token = self.peek(-1)
@ -1140,6 +1157,9 @@ proc typeDecl(self: Parser): TypeDecl =
else:
self.error("invalid syntax")
self.expect(LeftBrace, "expecting '{' after type declaration")
if self.match(TokenType.Pragma):
for pragma in self.parsePragmas():
pragmas.add(pragma)
var
argName: IdentExpr
argMutable: bool
@ -1169,6 +1189,7 @@ proc typeDecl(self: Parser): TypeDecl =
if self.match("="):
result.defaults.add(self.expression())
self.expect(";", "expecting semicolon after field declaration")
result.pragmas = pragmas
proc declaration(self: Parser): Declaration =
@ -1197,8 +1218,8 @@ proc declaration(self: Parser): Declaration =
of Type:
discard self.step()
result = self.typeDecl()
of TokenType.Whitespace, TokenType.Tab:
discard self.step() # TODO
of Comment:
discard self.step() # TODO: Docstrings and stuff
else:
result = Declaration(self.statement())
@ -1206,11 +1227,6 @@ proc declaration(self: Parser): Declaration =
proc parse*(self: Parser, tokens: seq[Token], file: string): seq[Declaration] =
## Parses a sequence of tokens into a sequence of AST nodes
self.tokens = @[]
# The parser is not designed to handle these tokens.
# Maybe create a separate syntax checker module?
for token in tokens:
if token.kind notin {TokenType.Whitespace, Tab}:
self.tokens.add(token)
self.file = file
self.current = 0
self.currentLoop = LoopContext.None

View File

@ -105,10 +105,9 @@ proc repl(vm: PeonVM = newPeonVM()) =
debugger.disassembleChunk(compiled, "stdin")
echo ""
serializer.dumpFile(compiled, input, "stdin", "stdin.pbc")
serializer.dumpFile(compiled, "stdin", "stdin.pbc")
serialized = serializer.loadFile("stdin.pbc")
when debugSerializer:
var hashMatches = computeSHA256(input).toHex().toLowerAscii() == serialized.fileHash
styledEcho fgCyan, "Serialization step: "
styledEcho fgBlue, "\t- Peon version: ", fgYellow, &"{serialized.version.major}.{serialized.version.minor}.{serialized.version.patch}", fgBlue, " (commit ", fgYellow, serialized.commit[0..8], fgBlue, ") on branch ", fgYellow, serialized.branch
stdout.styledWriteLine(fgBlue, "\t- Compilation date & time: ", fgYellow, fromUnix(serialized.compileDate).format("d/M/yyyy HH:mm:ss"))
@ -229,7 +228,7 @@ proc runFile(f: string, interactive: bool = false, fromString: bool = false) =
debugger.disassembleChunk(compiled, f)
echo ""
serializer.dumpFile(compiled, input, f, splitFile(f).dir & "/" & splitFile(f).name & ".pbc")
serializer.dumpFile(compiled, f, splitFile(f).dir & "/" & splitFile(f).name & ".pbc")
serialized = serializer.loadFile(splitFile(f).dir & "/" & splitFile(f).name & ".pbc")
when debugSerializer:
var hashMatches = computeSHA256(input).toHex().toLowerAscii() == serialized.fileHash

View File

@ -60,7 +60,7 @@ proc newSerializer*(self: Serializer = nil): Serializer =
result.chunk = nil
proc writeHeaders(self: Serializer, stream: var seq[byte], file: string) =
proc writeHeaders(self: Serializer, stream: var seq[byte]) =
## Writes the Peon bytecode headers in-place into a byte stream
stream.extend(BYTECODE_MARKER.toBytes())
stream.add(byte(PEON_VERSION.major))
@ -174,25 +174,22 @@ proc readCode(self: Serializer, stream: seq[byte]): int =
return int(size)
proc dumpBytes*(self: Serializer, chunk: Chunk, file, filename: string): seq[byte] =
proc dumpBytes*(self: Serializer, chunk: Chunk, filename: string): seq[byte] =
## Dumps the given bytecode and file to a sequence of bytes and returns it.
## The file argument must be the actual file's content and is needed to
## compute its SHA256 hash.
self.file = file
self.filename = filename
self.chunk = chunk
self.writeHeaders(result, self.file)
self.writeHeaders(result)
self.writeLineData(result)
self.writeCFIData(result)
self.writeConstants(result)
self.writeCode(result)
proc dumpFile*(self: Serializer, chunk: Chunk, file, filename, dest: string) =
proc dumpFile*(self: Serializer, chunk: Chunk, filename, dest: string) =
## Dumps the result of dumpBytes to a file at dest
var fp = open(dest, fmWrite)
defer: fp.close()
let data = self.dumpBytes(chunk, file, filename)
let data = self.dumpBytes(chunk, filename)
discard fp.writeBytes(data, 0, len(data))

View File

@ -1,7 +1,6 @@
fn makeClosure(n: int): fn: int {
var x = n;
fn inner: int {
return x;
return n;
}
return inner;
}