Added a beta implementation for arrow functions

This commit is contained in:
nocturn9x 2021-01-14 12:55:51 +01:00
parent 56938897d6
commit 2e60ab9fb6
5 changed files with 45 additions and 14 deletions

View File

@ -925,7 +925,7 @@ proc parseFunction(self: Compiler, funType: FunctionType) =
## keyword ones as well ## keyword ones as well
var self = initCompiler(funType, self, self.parser, self.file) var self = initCompiler(funType, self, self.parser, self.file)
self.beginScope() self.beginScope()
self.parser.consume(LP, "Expecting '(' after function name") self.parser.consume(LP, "Expecting '('")
if self.parser.hadError: if self.parser.hadError:
return return
var paramNames: seq[string] = @[] var paramNames: seq[string] = @[]
@ -970,13 +970,22 @@ proc parseFunction(self: Compiler, funType: FunctionType) =
self.emitBytes(self.makeLongConstant(fun)) self.emitBytes(self.makeLongConstant(fun))
proc funDeclaration(self: Compiler) =
proc parseLambda(self: Compiler, canAssign: bool) =
## Parses lambda expressions of the form => (params) {code}
self.parseFunction(FunctionType.LAMBDA)
proc funDeclaration(self: Compiler, named: bool = true) =
## Parses function declarations and declares ## Parses function declarations and declares
## them in the current scope ## them in the current scope
if named:
var funName = self.parseVariable("expecting function name") var funName = self.parseVariable("expecting function name")
self.markInitialized() self.markInitialized()
self.parseFunction(FunctionType.FUNC) self.parseFunction(FunctionType.FUNC)
self.defineVariable(funName) self.defineVariable(funName)
else:
self.parseFunction(FunctionType.LAMBDA)
proc argumentList(self: Compiler): uint8 = proc argumentList(self: Compiler): uint8 =
@ -1153,7 +1162,8 @@ var rules: array[TokenType, ParseRule] = [
makeRule(nil, binary, Precedence.Term), # BOR makeRule(nil, binary, Precedence.Term), # BOR
makeRule(unary, nil, Precedence.None), # TILDE makeRule(unary, nil, Precedence.None), # TILDE
makeRule(nil, binary, Precedence.Is), # IS makeRule(nil, binary, Precedence.Is), # IS
makeRule(nil, binary, Precedence.As) # AS makeRule(nil, binary, Precedence.As), # AS
makeRule(parseLambda, nil, Precedence.None) # LAMBDA
] ]
@ -1229,9 +1239,15 @@ proc initCompiler*(context: FunctionType, enclosing: Compiler = nil, parser: Par
result.parser.file = file result.parser.file = file
result.locals.add(Local(depth: 0, name: Token(kind: EOF, lexeme: ""))) result.locals.add(Local(depth: 0, name: Token(kind: EOF, lexeme: "")))
inc(result.localCount) inc(result.localCount)
result.function = result.markObject(newFunction(chunk=newChunk())) case context:
if context != SCRIPT: # If we're compiling a function, we give it its name of FunctionType.Func:
result.function.name = asStr(enclosing.parser.previous().lexeme) result.function = result.markObject(newFunction(enclosing.parser.previous().lexeme, newChunk()))
of FunctionType.Lambda:
result.function = result.markObject(newLambda(newChunk()))
else: # Script
result.function = result.markObject(newFunction("", newChunk()))
result.function.name = nil
# This way the compiler can be executed on its own # This way the compiler can be executed on its own
# without the VM # without the VM

View File

@ -218,6 +218,8 @@ proc scanToken(self: Lexer) =
self.parseComment() self.parseComment()
elif single == '=' and self.match('='): elif single == '=' and self.match('='):
self.tokens.add(self.createToken(TokenType.DEQ)) self.tokens.add(self.createToken(TokenType.DEQ))
elif single == '=' and self.match('>'):
self.tokens.add(self.createToken(TokenType.LAMBDA))
elif single == '>' and self.match('='): elif single == '>' and self.match('='):
self.tokens.add(self.createToken(TokenType.GE)) self.tokens.add(self.createToken(TokenType.GE))
elif single == '>' and self.match('>'): elif single == '>' and self.match('>'):

View File

@ -29,7 +29,7 @@ type
WHILE, DEL, BREAK, EOF, WHILE, DEL, BREAK, EOF,
COLON, CONTINUE, CARET, COLON, CONTINUE, CARET,
SHL, SHR, NAN, INF, BAND, SHL, SHR, NAN, INF, BAND,
BOR, TILDE, IS, AS BOR, TILDE, IS, AS, LAMBDA
Token* = ref object Token* = ref object
kind*: TokenType kind*: TokenType
lexeme*: string lexeme*: string

View File

@ -12,7 +12,7 @@ type
## the top-level code, this tiny ## the top-level code, this tiny
## enum is used to tell the two ## enum is used to tell the two
## contexts apart when compiling ## contexts apart when compiling
Func, Script Func, Script, Lambda
Function* = object of Obj Function* = object of Obj
## A function object ## A function object
@ -40,13 +40,27 @@ proc newFunction*(name: string = "", chunk: Chunk, arity: int = 0): ptr Function
result.isHashable = false result.isHashable = false
proc newLambda*(chunk: Chunk, arity: int = 0): ptr Function =
## Allocates a new lambda object (anonymous function) with the given
## bytecode chunk and arity
# TODO: Add lambdas
# TODO: Add support for optional parameters
result = allocateObj(Function, ObjectType.Function)
result.name = "<lambda function>".asStr()
result.arity = arity
result.chunk = chunk
result.isHashable = false
proc typeName*(self: ptr Function): string = proc typeName*(self: ptr Function): string =
result = "function" result = "function"
proc stringify*(self: ptr Function): string = proc stringify*(self: ptr Function): string =
if self.name != nil: if self.name != nil and self.name.toStr() != "<lambda function>":
result = "<function '" & self.name.toStr() & "'>" result = "<function '" & self.name.toStr() & "'>"
elif self.name.toStr() == "<lambda function>":
return self.name.toStr()
else: else:
result = "<code object>" result = "<code object>"

View File

@ -19,7 +19,6 @@ import baseObject
import numbers import numbers
import ../memory import ../memory
import strutils import strutils
import strformat
type type