Added notes to README, minor changes to lexer and test file

This commit is contained in:
Mattia Giambirtone 2022-04-07 11:51:36 +02:00
parent e82ef02772
commit ae8d348e0d
3 changed files with 64 additions and 30 deletions

View File

@ -1,3 +1,8 @@
# peon
Peon is a simple, functional, async-first programming language with a focus on correctness and speed
Peon is a simple, functional, async-first programming language with a focus on correctness and speed
## Disclaimer
Currently the majority of this code comes from [japl](https://git.nocturn9x.space/japl/JustAnotherJAPL) and is
being adapted to the new paradigm and type system. Nothing really works yet

View File

@ -64,9 +64,6 @@ proc addSymbol*(self: SymbolTable, lexeme: string, token: TokenType) =
self.symbols[lexeme] = token
proc existsSymbol*(self: SymbolTable, lexeme: string): bool {.inline.} = lexeme in self.symbols
proc removeSymbol*(self: SymbolTable, lexeme: string) =
## Removes a symbol from the symbol table
## (does nothing if it does not exist)
@ -85,10 +82,24 @@ proc removeKeyword*(self: SymbolTable, lexeme: string) =
self.keywords.del(lexeme)
proc existsSymbol*(self: SymbolTable, lexeme: string): bool {.inline.} =
## Returns true if a given symbol exists
## in the symbol table already
lexeme in self.symbols
proc existsKeyword*(self: SymbolTable, lexeme: string): bool {.inline.} =
## Returns true if a given keyword exists
## in the symbol table already
lexeme in self.keywords
proc getToken(self: Lexer, lexeme: string): Token =
## Gets the matching token object for a given string
## or returns nil if there's no match
var table = self.symbols
## Gets the matching token object for a given
## string according to the symbol table or
## returns nil if there's no match
let table = self.symbols
var kind = table.symbols.getOrDefault(lexeme, table.keywords.getOrDefault(lexeme, NoMatch))
if kind == NoMatch:
return nil
@ -131,7 +142,8 @@ proc isAlphaNumeric(s: string): bool =
return false
return true
# Simple public getters
# Simple public getters used for error
# formatting and whatnot
proc getStart*(self: Lexer): int = self.start
proc getCurrent*(self: Lexer): int = self.current
proc getLine*(self: Lexer): int = self.line

View File

@ -1,24 +1,24 @@
# Builtins
import sequtils
import strutils
import strformat
import frontend/lexer
# Our stuff
import frontend/lexer as l
import frontend/parser as p
import jale/editor
# Thanks art <3
import jale/editor as ed
import jale/templates
import jale/plugin/defaults
import jale/plugin/editor_history
import jale/keycodes
import jale/multiline
# Forward declarations
proc fillSymbolTable(tokenizer: Lexer)
proc getLineEditor: LineEditor =
result = newLineEditor()
result.prompt = "=> "
result.populateDefaults() # Setup default keybindings
let hist = result.plugHistory() # Create history object
result.bindHistory(hist) # Set default history keybindings
proc getLineEditor: LineEditor
when isMainModule:
@ -27,21 +27,21 @@ when isMainModule:
var tokens: seq[Token] = @[]
var tokenizer = newLexer()
var parser = newParser()
let lineEditor = getLineEditor()
let editor = getLineEditor()
var input: string
lineEditor.bindEvent(jeQuit):
editor.bindEvent(jeQuit):
keep = false
lineEditor.bindKey("ctrl+a"):
lineEditor.content.home()
lineEditor.bindKey("ctrl+e"):
lineEditor.content.`end`()
editor.bindKey("ctrl+a"):
editor.content.home()
editor.bindKey("ctrl+e"):
editor.content.`end`()
tokenizer.fillSymbolTable()
while keep:
try:
input = lineEditor.read()
input = editor.read()
if input.len() > 0:
# Currently the parser doesn't handle these tokens well
tokens = filter(tokenizer.lex(input, "<stdin>"), proc (x: Token): bool = x.kind notin {Whitespace, Tab})
tokens = filter(tokenizer.lex(input, "<stdin>"), proc (x: Token): bool = x.kind notin {TokenType.Whitespace, Tab})
echo "Tokenization step:"
for i, token in tokens:
if i == tokens.high():
@ -55,9 +55,19 @@ when isMainModule:
except IOError:
break
except LexingError:
let lineNo = tokenizer.getLine()
let relPos = tokenizer.getRelPos(lineNo)
let line = tokenizer.getSource().splitLines()[lineNo - 1].strip()
echo getCurrentExceptionMsg()
echo &"Source line: {line}"
echo " ".repeat(relPos.start + len("Source line: ")) & "^".repeat(relPos.stop - relPos.start)
except ParseError:
let lineNo = parser.getCurrentToken().line
let relPos = tokenizer.getRelPos(lineNo)
let line = tokenizer.getSource().splitLines()[lineNo - 1].strip()
echo getCurrentExceptionMsg()
echo &"Source line: {line}"
echo " ".repeat(relPos.start + len("Source line: ")) & "^".repeat(relPos.stop - parser.getCurrentToken().lexeme.len())
quit(0)
@ -164,7 +174,14 @@ proc fillSymbolTable(tokenizer: Lexer) =
tokenizer.symbols.addKeyword("not", LogicalNot)
# P.S.: There's no reason for the order of addition of
# symbols to be ascending (the symbol table uses a hashmap
# intrernally). You can add/remove symbols (and keywords
# for that matter) as you like!
# symbols to be ascending in length (the symbol table uses
# a hashmap internally). You can add/remove symbols (and
# keywords for that matter) as you like!
proc getLineEditor: LineEditor =
result = newLineEditor()
result.prompt = "=> "
result.populateDefaults() # Setup default keybindings
let hist = result.plugHistory() # Create history object
result.bindHistory(hist) # Set default history keybindings