Hooked up old JAPL components

This commit is contained in:
Mattia Giambirtone 2022-04-07 15:06:15 +02:00
parent 78e169bd82
commit a388333278
5 changed files with 113 additions and 81 deletions

View File

@ -14,7 +14,7 @@
import strformat
const BYTECODE_MARKER* = "JAPL_BYTECODE"
const BYTECODE_MARKER* = "PEON_BYTECODE"
const MAP_LOAD_FACTOR* = 0.75 # Load factor for builtin hashmaps
when MAP_LOAD_FACTOR >= 1.0:
{.fatal: "Hashmap load factor must be < 1".}
@ -24,21 +24,21 @@ when HEAP_GROW_FACTOR <= 1:
const MAX_STACK_FRAMES* = 800 # The maximum number of stack frames at any one time. Acts as a recursion limiter (1 frame = 1 call)
when MAX_STACK_FRAMES <= 0:
{.fatal: "The frame limit must be > 0".}
const JAPL_VERSION* = (major: 0, minor: 4, patch: 0)
const JAPL_RELEASE* = "alpha"
const JAPL_COMMIT_HASH* = "ba9c8b4e5664c0670eb8925d65b307e397d6ed82"
when len(JAPL_COMMIT_HASH) != 40:
const PEON_VERSION* = (major: 0, minor: 4, patch: 0)
const PEON_RELEASE* = "alpha"
const PEON_COMMIT_HASH* = "ba9c8b4e5664c0670eb8925d65b307e397d6ed82"
when len(PEON_COMMIT_HASH) != 40:
{.fatal: "The git commit hash must be exactly 40 characters long".}
const JAPL_BRANCH* = "master"
when len(JAPL_BRANCH) >= 255:
const PEON_BRANCH* = "master"
when len(PEON_BRANCH) >= 255:
{.fatal: "The git branch name's length must be less than or equal to 255 characters".}
const DEBUG_TRACE_VM* = false # Traces VM execution
const SKIP_STDLIB_INIT* = false # Skips stdlib initialization (can be imported manually)
const DEBUG_TRACE_GC* = false # Traces the garbage collector (TODO)
const DEBUG_TRACE_ALLOCATION* = false # Traces memory allocation/deallocation
const DEBUG_TRACE_COMPILER* = false # Traces the compiler
const JAPL_VERSION_STRING* = &"JAPL {JAPL_VERSION.major}.{JAPL_VERSION.minor}.{JAPL_VERSION.patch} {JAPL_RELEASE} ({JAPL_BRANCH}, {CompileDate}, {CompileTime}, {JAPL_COMMIT_HASH[0..8]}) [Nim {NimVersion}] on {hostOS} ({hostCPU})"
const HELP_MESSAGE* = """The JAPL programming language, Copyright (C) 2022 Mattia Giambirtone & All Contributors
const PEON_VERSION_STRING* = &"Peon {PEON_VERSION.major}.{PEON_VERSION.minor}.{PEON_VERSION.patch} {PEON_RELEASE} ({PEON_BRANCH}, {CompileDate}, {CompileTime}, {PEON_COMMIT_HASH[0..8]}) [Nim {NimVersion}] on {hostOS} ({hostCPU})"
const HELP_MESSAGE* = """The peon programming language, Copyright (C) 2022 Mattia Giambirtone & All Contributors
This program is free software, see the license distributed with this program or check
http://www.apache.org/licenses/LICENSE-2.0 for more info.
@ -47,13 +47,13 @@ Basic usage
-----------
$ jpl Opens an interactive session (REPL)
$ jpl file.jpl Runs the given JAPL source file
$ jpl file.jpl Runs the given PEON source file
Command-line options
--------------------
-h, --help Shows this help text and exits
-v, --version Prints the JAPL version number and exits
-v, --version Prints the peon version number and exits
-s, --string Executes the passed string as if it was a file
-i, --interactive Enables interactive mode, which opens a REPL session after execution of a file or source string
-c, --nocache Disables dumping the result of bytecode compilation to files for caching

View File

@ -262,7 +262,7 @@ type
name*: ASTNode
body*: ASTNode
arguments*: seq[tuple[name: ASTNode, valueType: ASTNode]]
defaults: seq[ASTNode]
defaults*: seq[ASTNode]
isAsync*: bool
isGenerator*: bool
isPrivate*: bool

View File

@ -201,14 +201,14 @@ proc optimizeBinary(self: Optimizer, node: BinaryExpr): ASTNode =
# Special case, yields a float
return FloatExpr(kind: intExpr, literal: Token(kind: Float, lexeme: $(x / y), line: IntExpr(a).literal.line, pos: (start: -1, stop: -1)))
else:
result = BinaryExpr(kind: binaryExpr, a: a, b: b, operator: node.operator)
result = BinaryExpr(kind: binaryExpr, a: a, b: b, operator: node.operator, token: node.token)
except OverflowDefect:
self.newWarning(valueOverflow, node)
return BinaryExpr(kind: binaryExpr, a: a, b: b, operator: node.operator)
except RangeDefect:
# TODO: What warning do we raise here?
return BinaryExpr(kind: binaryExpr, a: a, b: b, operator: node.operator)
result = IntExpr(kind: intExpr, literal: Token(kind: Integer, lexeme: $z, line: IntExpr(a).literal.line, pos: (start: -1, stop: -1)))
result = newIntExpr(Token(kind: Integer, lexeme: $z, line: IntExpr(a).literal.line, pos: (start: -1, stop: -1)))
elif a.kind == floatExpr or b.kind == floatExpr:
var x, y, z: float
if a.kind == intExpr:
@ -279,23 +279,6 @@ proc optimizeBinary(self: Optimizer, node: BinaryExpr): ASTNode =
result = node
proc detectClosures(self: Optimizer, node: FunDecl) =
## Goes trough a function's code and detects
## references to variables in enclosing local
## scopes
var names: seq[Declaration] = @[]
for line in BlockStmt(node.body).code:
case line.kind:
of varDecl:
names.add(VarDecl(line))
of funDecl:
names.add(FunDecl(line))
else:
discard
for name in names:
discard
proc optimizeNode(self: Optimizer, node: ASTNode): ASTNode =
## Analyzes an AST node and attempts to perform
## optimizations on it. If no optimizations can be

View File

@ -1,16 +1,9 @@
# Builtins
# Builtins & external libs
import sequtils
import strutils
import strformat
# Our stuff
import frontend/lexer as l
import frontend/parser as p
import frontend/compiler as c
import util/debugger
# Thanks art <3
import strutils
import nimSHA2
import times
import jale/editor as ed
import jale/templates
import jale/plugin/defaults
@ -19,14 +12,25 @@ import jale/keycodes
import jale/multiline
# Our stuff
import frontend/lexer as l
import frontend/parser as p
import frontend/compiler as c
import frontend/optimizer as o
import util/serializer as s
import util/debugger
# Forward declarations
proc fillSymbolTable(tokenizer: Lexer)
proc getLineEditor: LineEditor
const debugLexer = false
const debugLexer = true
const debugParser = true
const debugCompiler = true
const debugOptimizer = true
const debugSerializer = true
when isMainModule:
@ -35,9 +39,14 @@ when isMainModule:
var tokens: seq[Token] = @[]
var tree: seq[ASTNode] = @[]
var compiled: Chunk
var optimized: tuple[tree: seq[ASTNode], warnings: seq[Warning]]
var serialized: Serialized
var serializedRaw: seq[byte]
var tokenizer = newLexer()
var parser = newParser()
var optimizer = initOptimizer()
var compiler = initCompiler()
var serializer = initSerializer()
let editor = getLineEditor()
var input: string
editor.bindEvent(jeQuit):
@ -53,28 +62,68 @@ when isMainModule:
if input.len() > 0:
# Currently the parser doesn't handle these tokens well
tokens = filter(tokenizer.lex(input, "<stdin>"), proc (x: Token): bool = x.kind notin {TokenType.Whitespace, Tab})
when debugLexer:
echo "Tokenization step:"
for i, token in tokens:
if i == tokens.high():
# Who cares about EOF?
break
echo "\t", token
echo ""
tree = parser.parse(tokens, "<stdin>")
when debugParser:
echo "Parsing step:"
for node in tree:
if tokens.len() > 0:
when debugLexer:
echo "Tokenization step:"
for i, token in tokens:
if i == tokens.high():
# Who cares about EOF?
break
echo "\t", token
echo ""
tree = parser.parse(tokens, "<stdin>")
when debugParser:
echo "Parsing step:"
for node in tree:
echo "\t", node
echo ""
optimized = optimizer.optimize(tree)
when debugOptimizer:
echo &"Optimization step (constant folding enabled: {optimizer.foldConstants}):"
for node in optimized.tree:
echo "\t", node
echo ""
compiled = compiler.compile(tree, "<stdin>")
when debugCompiler:
echo "Compilation step:"
stdout.write("\t")
echo &"""Raw byte stream: [{compiled.code.join(", ")}]"""
echo "\nBytecode disassembler output below:\n"
disassembleChunk(compiled, "<stdin>")
echo ""
stdout.write(&"Produced warnings: ")
if optimized.warnings.len() > 0:
echo ""
for warning in optimized.warnings:
echo "\t", warning
else:
stdout.write("No warnings produced\n")
echo ""
compiled = compiler.compile(optimized.tree, "<stdin>")
when debugCompiler:
echo "Compilation step:"
stdout.write("\t")
echo &"""Raw byte stream: [{compiled.code.join(", ")}]"""
echo "\nBytecode disassembler output below:\n"
disassembleChunk(compiled, "<stdin>")
echo ""
serializedRaw = serializer.dumpBytes(compiled, input, "<stdin>")
serialized = serializer.loadBytes(serializedRaw)
when debugSerializer:
echo "Serialization step: "
stdout.write("\t")
echo &"""Raw hex output: {serializedRaw.mapIt(toHex(it)).join("").toLowerAscii()}"""
echo ""
echo "Deserialization step:"
echo &"\t- File hash: {serialized.fileHash} (matches: {computeSHA256(input).toHex().toLowerAscii() == serialized.fileHash})"
echo &"\t- Peon version: {serialized.peonVer.major}.{serialized.peonVer.minor}.{serialized.peonVer.patch} (commit {serialized.commitHash[0..8]} on branch {serialized.peonBranch})"
stdout.write("\t")
echo &"""- Compilation date & time: {fromUnix(serialized.compileDate).format("d/M/yyyy HH:mm:ss")}"""
stdout.write(&"\t- Reconstructed constants table: [")
for i, e in serialized.chunk.consts:
stdout.write(e)
if i < len(serialized.chunk.consts) - 1:
stdout.write(", ")
stdout.write("]\n")
stdout.write(&"\t- Reconstructed bytecode: [")
for i, e in serialized.chunk.code:
stdout.write($e)
if i < len(serialized.chunk.code) - 1:
stdout.write(", ")
stdout.write(&"] (matches: {serialized.chunk.code == compiled.code})\n")
except IOError:
break
# TODO: The code for error reporting completely

View File

@ -11,12 +11,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import meta/ast
import meta/errors
import meta/bytecode
import meta/token
import ../frontend/meta/ast
import ../frontend/meta/errors
import ../frontend/meta/bytecode
import ../frontend/meta/token
import ../config
import ../util/multibyte
import multibyte
import strformat
import strutils
@ -37,15 +37,15 @@ type
## procedures to store
## metadata
fileHash*: string
japlVer*: tuple[major, minor, patch: int]
japlBranch*: string
peonVer*: tuple[major, minor, patch: int]
peonBranch*: string
commitHash*: string
compileDate*: int
chunk*: Chunk
proc `$`*(self: Serialized): string =
result = &"Serialized(fileHash={self.fileHash}, version={self.japlVer.major}.{self.japlVer.minor}.{self.japlVer.patch}, branch={self.japlBranch}), commitHash={self.commitHash}, date={self.compileDate}, chunk={self.chunk[]}"
result = &"Serialized(fileHash={self.fileHash}, version={self.peonVer.major}.{self.peonVer.minor}.{self.peonVer.patch}, branch={self.peonBranch}), commitHash={self.commitHash}, date={self.compileDate}, chunk={self.chunk[]}"
proc error(self: Serializer, message: string) =
@ -98,16 +98,16 @@ proc extend[T](s: var seq[T], a: openarray[T]) =
proc writeHeaders(self: Serializer, stream: var seq[byte], file: string) =
## Writes the JAPL bytecode headers in-place into a byte stream
## Writes the Peon bytecode headers in-place into a byte stream
stream.extend(self.toBytes(BYTECODE_MARKER))
stream.add(byte(JAPL_VERSION.major))
stream.add(byte(JAPL_VERSION.minor))
stream.add(byte(JAPL_VERSION.patch))
stream.add(byte(len(JAPL_BRANCH)))
stream.extend(self.toBytes(JAPL_BRANCH))
if len(JAPL_COMMIT_HASH) != 40:
stream.add(byte(PEON_VERSION.major))
stream.add(byte(PEON_VERSION.minor))
stream.add(byte(PEON_VERSION.patch))
stream.add(byte(len(PEON_BRANCH)))
stream.extend(self.toBytes(PEON_BRANCH))
if len(PEON_COMMIT_HASH) != 40:
self.error("the commit hash must be exactly 40 characters long")
stream.extend(self.toBytes(JAPL_COMMIT_HASH))
stream.extend(self.toBytes(PEON_COMMIT_HASH))
stream.extend(self.toBytes(getTime().toUnixFloat().int()))
stream.extend(self.toBytes(computeSHA256(file)))
@ -253,11 +253,11 @@ proc loadBytes*(self: Serializer, stream: seq[byte]): Serialized =
if stream[0..<len(BYTECODE_MARKER)] != self.toBytes(BYTECODE_MARKER):
self.error("malformed bytecode marker")
stream = stream[len(BYTECODE_MARKER)..^1]
result.japlVer = (major: int(stream[0]), minor: int(stream[1]), patch: int(stream[2]))
result.peonVer = (major: int(stream[0]), minor: int(stream[1]), patch: int(stream[2]))
stream = stream[3..^1]
let branchLength = stream[0]
stream = stream[1..^1]
result.japlBranch = self.bytesToString(stream[0..<branchLength])
result.peonBranch = self.bytesToString(stream[0..<branchLength])
stream = stream[branchLength..^1]
result.commitHash = self.bytesToString(stream[0..<40]).toLowerAscii()
stream = stream[40..^1]