also derete this #3

Closed
N00nehere wants to merge 49 commits from (deleted):n00nehere-patch-2 into master
3 changed files with 38 additions and 7 deletions
Showing only changes of commit 625d090350 - Show all commits

View File

@ -479,7 +479,7 @@ proc declareName(self: Compiler, node: ASTNode, kind: IdentExpr) =
if self.names.high() > 16777215:
# If someone ever hits this limit in real-world scenarios, I swear I'll
# slap myself 100 times with a sign saying "I'm dumb". Mark my words
self.error("cannot declare more than 16777215 variables at a time")
self.error("cannot declare more than 16777216 variables at a time")
self.names.add(Name(depth: self.scopeDepth, name: IdentExpr(node.name),
isPrivate: node.isPrivate,
owner: self.currentModule,
@ -499,7 +499,7 @@ proc declareName(self: Compiler, node: ASTNode, kind: IdentExpr) =
# and decrements the scope depth)
for argument in node.arguments:
if self.names.high() > 16777215:
self.error("cannot declare more than 16777215 variables at a time")
self.error("cannot declare more than 16777216 variables at a time")
self.names.add(Name(depth: self.scopeDepth + 1, isPrivate: true, owner: self.currentModule, isConst: false, name: IdentExpr(argument.name), valueType: kind))
self.emitByte(LoadVar)
self.emitBytes(self.names.high().toTriple())

View File

@ -34,6 +34,7 @@ const debugSerializer = true
when isMainModule:
setControlCHook(proc () {.noconv.} = quit(0))
var
keep = true
@ -106,11 +107,17 @@ when isMainModule:
serialized = serializer.loadBytes(serializedRaw)
when debugSerializer:
echo "Serialization step: "
echo "Dumping bytecode to 'stdin.pbc'\n"
serializer.dumpToFile(compiled, input, "<stdin>", "stdin.pbc")
echo "Loading 'stdin.pbc'\n"
serialized = serializer.loadFile("stdin.pbc")
echo "Deserialized 'stdin.pbc':"
stdout.write("\t")
echo &"""Raw hex output: {serializedRaw.mapIt(toHex(it)).join("").toLowerAscii()}"""
echo ""
echo "Deserialization step:"
echo &"\t- File hash: {serialized.fileHash} (matches: {computeSHA256(input).toHex().toLowerAscii() == serialized.fileHash})"
echo &"\t- Peon version: {serialized.peonVer.major}.{serialized.peonVer.minor}.{serialized.peonVer.patch} (commit {serialized.commitHash[0..8]} on branch {serialized.peonBranch})"
stdout.write("\t")
@ -152,7 +159,8 @@ when isMainModule:
echo getCurrentExceptionMsg()
echo &"Source line: {line}"
echo " ".repeat(relPos.start + len("Source line: ")) & "^".repeat(relPos.stop - compiler.getCurrentNode().token.lexeme.len())
except SerializationError:
echo getCurrentExceptionMsg()
quit(0)

View File

@ -226,7 +226,7 @@ proc readCode(self: Serializer, stream: seq[byte]): int =
var stream = stream[3..^1]
for i in countup(0, int(size) - 1):
self.chunk.code.add(stream[i])
assert len(self.chunk.code) == int(size)
doAssert len(self.chunk.code) == int(size)
return int(size)
@ -241,6 +241,14 @@ proc dumpBytes*(self: Serializer, chunk: Chunk, file, filename: string): seq[byt
self.writeCode(result)
proc dumpToFile*(self: Serializer, chunk: Chunk, file, filename, dest: string) =
## Dumps the result of dumpBytes to a file at dest
var fp = open(dest, fmWrite)
defer: fp.close()
let data = self.dumpBytes(chunk, file, filename)
discard fp.writeBytes(data, 0, len(data))
proc loadBytes*(self: Serializer, stream: seq[byte]): Serialized =
## Loads the result from dumpBytes to a Serializer object
## for use in the VM or for inspection
@ -268,6 +276,21 @@ proc loadBytes*(self: Serializer, stream: seq[byte]): Serialized =
stream = stream[self.readConstants(stream)..^1]
stream = stream[self.readCode(stream)..^1]
except IndexDefect:
self.error("truncated bytecode file")
self.error("truncated bytecode stream")
except AssertionDefect:
self.error("corrupted bytecode file")
self.error(&"corrupted bytecode stream: {getCurrentExceptionMsg()}")
proc loadFile*(self: Serializer, src: string): Serialized =
## Loads a bytecode file
var fp = open(src, fmRead)
defer: fp.close()
let size = fp.getFileSize()
var pos = 0'i64
var data: seq[byte] = newSeqOfCap[byte](size)
for _ in 0..<size:
data.add(0)
while pos < size:
discard fp.readBytes(data, pos, size)
pos = fp.getFilePos()
return self.loadBytes(data)