also derete this #3

Closed
N00nehere wants to merge 49 commits from (deleted):n00nehere-patch-2 into master
6 changed files with 28 additions and 31 deletions
Showing only changes of commit 5202bf5489 - Show all commits

View File

@ -101,7 +101,7 @@ type
proc initCompiler*(enableOptimizations: bool = true): Compiler =
proc newCompiler*(enableOptimizations: bool = true): Compiler =
## Initializes a new Compiler object
new(result)
result.ast = @[]

View File

@ -13,7 +13,6 @@
# limitations under the License.
import strformat
import strutils
type

View File

@ -39,7 +39,7 @@ type
foldConstants*: bool
proc initOptimizer*(foldConstants: bool = true): Optimizer =
proc newOptimizer*(foldConstants: bool = true): Optimizer =
## Initializes a new optimizer object
new(result)
result.foldConstants = foldConstants

View File

@ -739,17 +739,12 @@ proc forStmt(self: Parser): ASTNode =
var initializer: ASTNode = nil
var condition: ASTNode = nil
var increment: ASTNode = nil
# The code below is not really that illuminating, but
# it's there to disallow weird things like a public for loop
# increment variable which doesn't really make sense, but still
# allow people that like verbosity (for *some* reason) to use
# private static var declarations as well as just private var
# and static var as well as providing decently specific error
# messages
if self.match(Semicolon):
discard
elif self.match(Var):
initializer = self.varDecl()
if not VarDecl(initializer).isPrivate:
self.error("cannot declare public for loop initializer")
else:
initializer = self.expressionStatement()
if not self.check(Semicolon):
@ -1067,5 +1062,7 @@ proc parse*(self: Parser, tokens: seq[Token], file: string): seq[ASTNode] =
break
else:
self.operators.add(self.tokens[i + 1].lexeme)
if i == self.tokens.high() and token.kind != EndOfFile:
self.error("invalid state: found malformed tokenizer input while looking for operators (missing EOF)")
while not self.done():
result.add(self.declaration())

View File

@ -25,7 +25,7 @@ import util/debugger
proc fillSymbolTable(tokenizer: Lexer)
proc getLineEditor: LineEditor
# Handy dandy compile-time constants
const debugLexer = true
const debugParser = true
const debugCompiler = true
@ -35,27 +35,28 @@ const debugSerializer = true
when isMainModule:
setControlCHook(proc () {.noconv.} = quit(0))
var keep = true
var tokens: seq[Token] = @[]
var tree: seq[ASTNode] = @[]
var compiled: Chunk
var optimized: tuple[tree: seq[ASTNode], warnings: seq[Warning]]
var serialized: Serialized
var serializedRaw: seq[byte]
var tokenizer = newLexer()
var parser = newParser()
var optimizer = initOptimizer()
var compiler = initCompiler()
var serializer = initSerializer()
let editor = getLineEditor()
var input: string
var
keep = true
tokens: seq[Token] = @[]
tree: seq[ASTNode] = @[]
compiled: Chunk
optimized: tuple[tree: seq[ASTNode], warnings: seq[Warning]]
serialized: Serialized
serializedRaw: seq[byte]
tokenizer = newLexer()
parser = newParser()
optimizer = newOptimizer()
compiler = newCompiler()
serializer = newSerializer()
editor = getLineEditor()
input: string
tokenizer.fillSymbolTable()
editor.bindEvent(jeQuit):
keep = false
editor.bindKey("ctrl+a"):
editor.content.home()
editor.bindKey("ctrl+e"):
editor.content.`end`()
tokenizer.fillSymbolTable()
while keep:
try:
input = editor.read()
@ -256,6 +257,6 @@ proc fillSymbolTable(tokenizer: Lexer) =
proc getLineEditor: LineEditor =
result = newLineEditor()
result.prompt = "=> "
result.populateDefaults() # Setup default keybindings
let hist = result.plugHistory() # Create history object
result.bindHistory(hist) # Set default history keybindings
result.populateDefaults()
let history = result.plugHistory()
result.bindHistory(history)

View File

@ -53,7 +53,7 @@ proc error(self: Serializer, message: string) =
raise newException(SerializationError, &"A fatal error occurred while (de)serializing '{self.filename}' -> {message}")
proc initSerializer*(self: Serializer = nil): Serializer =
proc newSerializer*(self: Serializer = nil): Serializer =
new(result)
if self != nil:
result = self
@ -244,7 +244,7 @@ proc dumpBytes*(self: Serializer, chunk: Chunk, file, filename: string): seq[byt
proc loadBytes*(self: Serializer, stream: seq[byte]): Serialized =
## Loads the result from dumpBytes to a Serializer object
## for use in the VM or for inspection
discard self.initSerializer()
discard self.newSerializer()
new(result)
result.chunk = newChunk()
self.chunk = result.chunk