added missing files

This commit is contained in:
Productive2 2020-10-22 10:19:00 +02:00
parent 1daae9c30a
commit ccc78148d8
28 changed files with 2388 additions and 1099 deletions

4
JAPL/__init__.py Normal file
View File

@ -0,0 +1,4 @@
from .lexer import Lexer
from .parser import Parser
from .interpreter import Interpreter

View File

@ -1,352 +1,352 @@
import operator
from typing import List
from .types.callable import Callable
from .types.japlclass import JAPLClass
from .types.instance import JAPLInstance
from .meta.environment import Environment
from .meta.tokentype import TokenType
from .meta.exceptions import JAPLError, BreakException, ReturnException
from .types.native import Clock, Type, JAPLFunction, Truthy, Stringify, PrintFunction, IsInstance, IsSubclass, IsSuperclass
from .meta.expression import Expression, Variable, Literal, Logical, Binary, Unary, Grouping, Assignment, Call, Get, Set
from .meta.statement import Statement, StatementExpr, If, While, Del, Break, Return, Var, Block, Function, Class
class Interpreter(Expression.Visitor, Statement.Visitor):
"""
An interpreter for the JAPL
programming language
"""
OPS = {TokenType.MINUS: operator.sub, TokenType.PLUS: operator.add, TokenType.SLASH: operator.truediv,
TokenType.STAR: operator.mul, TokenType.DEQ: operator.eq, TokenType.GT: operator.gt,
TokenType.GE: operator.ge, TokenType.LT: operator.lt, TokenType.LE: operator.le, TokenType.EQ: operator.eq,
TokenType.NE: operator.ne, TokenType.MOD: operator.mod, TokenType.POW: operator.pow}
def __init__(self):
"""Object constructor"""
self.environment = Environment()
self.locals = {}
self.globals = self.environment
self.globals.define("clock", Clock())
self.globals.define("type", Type())
self.globals.define("truthy", Truthy())
self.globals.define("stringify", Stringify())
self.globals.define("print", PrintFunction())
self.globals.define("isinstance", IsInstance())
self.globals.define("issuperclass", IsSuperclass())
self.globals.define("issubclass", IsSubclass())
def number_operand(self, op, operand):
"""
An helper method to check if the operand
to a unary operator is a number
"""
if isinstance(operand, (int, float)):
return
raise JAPLError(op,
f"Unsupported unary operator '{op.lexeme}' for object of type '{type(operand).__name__}'")
def compatible_operands(self, op, left, right):
"""
Helper method to check types when doing binary
operations
"""
if op.kind == TokenType.SLASH and right == 0:
raise JAPLError(op, "Cannot divide by 0")
elif isinstance(left, (bool, type(None))) or isinstance(right, (bool, type(None))):
if op.kind not in (TokenType.DEQ, TokenType.NE):
raise JAPLError(op, f"Unsupported binary operator '{op.lexeme}' for objects of type '{type(left).__name__}' and '{type(right).__name__}'")
return
elif isinstance(left, (int, float)) and isinstance(right, (int, float)):
return
elif op.kind in (TokenType.PLUS, TokenType.STAR, TokenType.DEQ, TokenType.NE):
if isinstance(left, str) and isinstance(right, str):
return
elif isinstance(left, str) and isinstance(right, int):
return
elif isinstance(left, int) and isinstance(right, str):
return
raise JAPLError(operator, f"Unsupported binary operator '{op.lexeme}' for objects of type '{type(left).__name__}' and '{type(right).__name__}'")
def visit_literal(self, expr: Literal):
"""
Visits a Literal node in the Abstract Syntax Tree,
returning its value to the visitor
"""
return expr.value
def visit_logical(self, expr: Logical):
"""Visits a logical node"""
left = self.eval(expr.left)
if expr.operator.kind == TokenType.OR:
if bool(left):
return left
elif not bool(left):
return self.eval(expr.right)
return self.eval(expr.right)
def eval(self, expr: Expression):
"""
Evaluates an expression by calling its accept()
method and passing self to it. This mechanism is known
as the 'Visitor Pattern': the expression object will
later call the interpreter's appropriate method to
evaluate itself
"""
return expr.accept(self)
def visit_grouping(self, grouping: Grouping):
"""
Visits a Grouping node in the Abstract Syntax Tree,
recursively evaluating its subexpressions
"""
return self.eval(grouping.expr)
def visit_unary(self, expr: Unary):
"""
Visits a Unary node in the Abstract Syntax Teee,
returning the negation of the given object, if
the operation is supported
"""
right = self.eval(expr.right)
self.number_operand(expr.operator, right)
if expr.operator.kind == TokenType.NEG:
return not right
return -right
def visit_binary(self, expr: Binary):
"""
Visits a Binary node in the Abstract Syntax Tree,
recursively evaulating both operands first and then
performing the operation specified by the operator
"""
left = self.eval(expr.left)
right = self.eval(expr.right)
self.compatible_operands(expr.operator, left, right)
return self.OPS[expr.operator.kind](left, right)
def visit_statement_expr(self, stmt: StatementExpr):
"""
Visits an expression statement and evaluates it
"""
self.eval(stmt.expression)
def visit_if(self, statement: If):
"""
Visits an If node and evaluates it
"""
if self.eval(statement.condition):
self.exec(statement.then_branch)
elif statement.else_branch:
self.exec(statement.else_branch)
def visit_class(self, stmt: Class):
"""Visits a class declaration"""
superclass = None
if stmt.superclass:
superclass = self.eval(stmt.superclass)
if not isinstance(superclass, JAPLClass):
raise JAPLError(stmt.superclass.name, "Superclass must be a class")
self.environment.define(stmt.name.lexeme, None)
if superclass:
environment = Environment(self.environment)
environment.define("super", superclass)
else:
environment = self.environment
methods = {}
for method in stmt.methods:
func = JAPLFunction(method, environment)
methods[method.name.lexeme] = func
klass = JAPLClass(stmt.name.lexeme, methods, superclass)
if superclass:
self.environment = environment.enclosing
self.environment.assign(stmt.name, klass)
def visit_while(self, statement: While):
"""
Visits a while node and executes it
"""
while self.eval(statement.condition):
try:
self.exec(statement.body)
except BreakException:
break
def visit_var_stmt(self, stmt: Var):
"""
Visits a var statement
"""
val = None
if stmt.init:
val = self.eval(stmt.init)
self.environment.define(stmt.name.lexeme, val)
def lookup(self, name, expr: Expression):
"""
Performs name lookups in the closest scope
"""
distance = self.locals.get(expr)
if distance is not None:
return self.environment.get_at(distance, name.lexeme)
else:
return self.globals.get(name)
def visit_var_expr(self, expr: Variable):
"""
Visits a var expression
"""
return self.lookup(expr.name, expr)
def visit_del(self, stmt: Del):
"""
Visits a del expression
"""
return self.environment.delete(stmt.name)
def visit_assign(self, stmt: Assignment):
"""
Visits an assignment expression
"""
right = self.eval(stmt.value)
distance = self.locals.get(stmt)
if distance is not None:
self.environment.assign_at(distance, stmt.name, right)
else:
self.globals.assign(stmt.name, right)
return right
def visit_block(self, stmt: Block):
"""
Visits a new scope block
"""
return self.execute_block(stmt.statements, Environment(self.environment))
def visit_break(self, stmt: Break):
"""
Visits a break statement
"""
raise BreakException()
def visit_call_expr(self, expr: Call):
"""
Visits a call expression
"""
callee = self.eval(expr.callee)
if not isinstance(callee, Callable):
raise JAPLError(expr.paren, f"'{type(callee).__name__}' is not callable")
arguments = []
for argument in expr.arguments:
arguments.append(self.eval(argument))
function = callee
if function.arity != len(arguments):
raise JAPLError(expr.paren, f"Expecting {function.arity} arguments, got {len(arguments)}")
return function.call(self, arguments)
def execute_block(self, statements: List[Statement], scope: Environment):
"""
Executes a block of statements
"""
prev = self.environment
try:
self.environment = scope
for statement in statements:
self.exec(statement)
finally:
self.environment = prev
def visit_return(self, statement: Return):
"""
Visits a return statement
"""
value = None
if statement.value:
value = self.eval(statement.value)
raise ReturnException(value)
def visit_function(self, statement: Function):
"""
Visits a function
"""
function = JAPLFunction(statement, self.environment)
self.environment.define(statement.name.lexeme, function)
def visit_get(self, expr: Get):
"""Visits property get expressions and evaluates them"""
obj = self.eval(expr.object)
if isinstance(obj, JAPLInstance):
return obj.get(expr.name)
raise JAPLError(expr.name, "Only instances have properties")
def visit_set(self, expr: Set):
"""Visits property set expressions and evaluates them"""
obj = self.eval(expr.object)
if not isinstance(obj, JAPLInstance):
raise JAPLError(expr, "Only instances have fields")
value = self.eval(expr.value)
obj.set(expr.name, value)
def visit_this(self, expr):
"""Evaluates 'this' expressions"""
return self.lookup(expr.keyword, expr)
def visit_super(self, expr):
"""Evaluates 'super' expressions"""
distance = self.locals.get(expr)
superclass = self.environment.get_at(distance, "super")
instance = self.environment.get_at(distance - 1, "this")
meth = superclass.get_method(expr.method.lexeme)
if not meth:
raise JAPLError(expr.method, f"Undefined property '{expr.method.lexeme}'")
return meth.bind(instance)
def exec(self, statement: Statement):
"""
Executes a statement
"""
statement.accept(self)
def interpret(self, statements: List[Statement]):
"""
Executes a JAPL program
"""
for statement in statements:
self.exec(statement)
def resolve(self, expr: Expression, depth: int):
"""
Stores the result of the name resolution: this
info will be used later to know exactly in which
environment to look up a given variable
"""
self.locals[expr] = depth # How many environments to skip!
import operator
from typing import List
from .types.callable import Callable
from .types.japlclass import JAPLClass
from .types.instance import JAPLInstance
from .meta.environment import Environment
from .meta.tokentype import TokenType
from .meta.exceptions import JAPLError, BreakException, ReturnException
from .types.native import Clock, Type, JAPLFunction, Truthy, Stringify, PrintFunction, IsInstance, IsSubclass, IsSuperclass
from .meta.expression import Expression, Variable, Literal, Logical, Binary, Unary, Grouping, Assignment, Call, Get, Set
from .meta.statement import Statement, StatementExpr, If, While, Del, Break, Return, Var, Block, Function, Class
class Interpreter(Expression.Visitor, Statement.Visitor):
"""
An interpreter for the JAPL
programming language
"""
OPS = {TokenType.MINUS: operator.sub, TokenType.PLUS: operator.add, TokenType.SLASH: operator.truediv,
TokenType.STAR: operator.mul, TokenType.DEQ: operator.eq, TokenType.GT: operator.gt,
TokenType.GE: operator.ge, TokenType.LT: operator.lt, TokenType.LE: operator.le, TokenType.EQ: operator.eq,
TokenType.NE: operator.ne, TokenType.MOD: operator.mod, TokenType.POW: operator.pow}
def __init__(self):
"""Object constructor"""
self.environment = Environment()
self.locals = {}
self.globals = self.environment
self.globals.define("clock", Clock())
self.globals.define("type", Type())
self.globals.define("truthy", Truthy())
self.globals.define("stringify", Stringify())
self.globals.define("print", PrintFunction())
self.globals.define("isinstance", IsInstance())
self.globals.define("issuperclass", IsSuperclass())
self.globals.define("issubclass", IsSubclass())
def number_operand(self, op, operand):
"""
An helper method to check if the operand
to a unary operator is a number
"""
if isinstance(operand, (int, float)):
return
raise JAPLError(op,
f"Unsupported unary operator '{op.lexeme}' for object of type '{type(operand).__name__}'")
def compatible_operands(self, op, left, right):
"""
Helper method to check types when doing binary
operations
"""
if op.kind == TokenType.SLASH and right == 0:
raise JAPLError(op, "Cannot divide by 0")
elif isinstance(left, (bool, type(None))) or isinstance(right, (bool, type(None))):
if op.kind not in (TokenType.DEQ, TokenType.NE):
raise JAPLError(op, f"Unsupported binary operator '{op.lexeme}' for objects of type '{type(left).__name__}' and '{type(right).__name__}'")
return
elif isinstance(left, (int, float)) and isinstance(right, (int, float)):
return
elif op.kind in (TokenType.PLUS, TokenType.STAR, TokenType.DEQ, TokenType.NE):
if isinstance(left, str) and isinstance(right, str):
return
elif isinstance(left, str) and isinstance(right, int):
return
elif isinstance(left, int) and isinstance(right, str):
return
raise JAPLError(operator, f"Unsupported binary operator '{op.lexeme}' for objects of type '{type(left).__name__}' and '{type(right).__name__}'")
def visit_literal(self, expr: Literal):
"""
Visits a Literal node in the Abstract Syntax Tree,
returning its value to the visitor
"""
return expr.value
def visit_logical(self, expr: Logical):
"""Visits a logical node"""
left = self.eval(expr.left)
if expr.operator.kind == TokenType.OR:
if bool(left):
return left
elif not bool(left):
return self.eval(expr.right)
return self.eval(expr.right)
def eval(self, expr: Expression):
"""
Evaluates an expression by calling its accept()
method and passing self to it. This mechanism is known
as the 'Visitor Pattern': the expression object will
later call the interpreter's appropriate method to
evaluate itself
"""
return expr.accept(self)
def visit_grouping(self, grouping: Grouping):
"""
Visits a Grouping node in the Abstract Syntax Tree,
recursively evaluating its subexpressions
"""
return self.eval(grouping.expr)
def visit_unary(self, expr: Unary):
"""
Visits a Unary node in the Abstract Syntax Teee,
returning the negation of the given object, if
the operation is supported
"""
right = self.eval(expr.right)
self.number_operand(expr.operator, right)
if expr.operator.kind == TokenType.NEG:
return not right
return -right
def visit_binary(self, expr: Binary):
"""
Visits a Binary node in the Abstract Syntax Tree,
recursively evaulating both operands first and then
performing the operation specified by the operator
"""
left = self.eval(expr.left)
right = self.eval(expr.right)
self.compatible_operands(expr.operator, left, right)
return self.OPS[expr.operator.kind](left, right)
def visit_statement_expr(self, stmt: StatementExpr):
"""
Visits an expression statement and evaluates it
"""
self.eval(stmt.expression)
def visit_if(self, statement: If):
"""
Visits an If node and evaluates it
"""
if self.eval(statement.condition):
self.exec(statement.then_branch)
elif statement.else_branch:
self.exec(statement.else_branch)
def visit_class(self, stmt: Class):
"""Visits a class declaration"""
superclass = None
if stmt.superclass:
superclass = self.eval(stmt.superclass)
if not isinstance(superclass, JAPLClass):
raise JAPLError(stmt.superclass.name, "Superclass must be a class")
self.environment.define(stmt.name.lexeme, None)
if superclass:
environment = Environment(self.environment)
environment.define("super", superclass)
else:
environment = self.environment
methods = {}
for method in stmt.methods:
func = JAPLFunction(method, environment)
methods[method.name.lexeme] = func
klass = JAPLClass(stmt.name.lexeme, methods, superclass)
if superclass:
self.environment = environment.enclosing
self.environment.assign(stmt.name, klass)
def visit_while(self, statement: While):
"""
Visits a while node and executes it
"""
while self.eval(statement.condition):
try:
self.exec(statement.body)
except BreakException:
break
def visit_var_stmt(self, stmt: Var):
"""
Visits a var statement
"""
val = None
if stmt.init:
val = self.eval(stmt.init)
self.environment.define(stmt.name.lexeme, val)
def lookup(self, name, expr: Expression):
"""
Performs name lookups in the closest scope
"""
distance = self.locals.get(expr)
if distance is not None:
return self.environment.get_at(distance, name.lexeme)
else:
return self.globals.get(name)
def visit_var_expr(self, expr: Variable):
"""
Visits a var expression
"""
return self.lookup(expr.name, expr)
def visit_del(self, stmt: Del):
"""
Visits a del expression
"""
return self.environment.delete(stmt.name)
def visit_assign(self, stmt: Assignment):
"""
Visits an assignment expression
"""
right = self.eval(stmt.value)
distance = self.locals.get(stmt)
if distance is not None:
self.environment.assign_at(distance, stmt.name, right)
else:
self.globals.assign(stmt.name, right)
return right
def visit_block(self, stmt: Block):
"""
Visits a new scope block
"""
return self.execute_block(stmt.statements, Environment(self.environment))
def visit_break(self, stmt: Break):
"""
Visits a break statement
"""
raise BreakException()
def visit_call_expr(self, expr: Call):
"""
Visits a call expression
"""
callee = self.eval(expr.callee)
if not isinstance(callee, Callable):
raise JAPLError(expr.paren, f"'{type(callee).__name__}' is not callable")
arguments = []
for argument in expr.arguments:
arguments.append(self.eval(argument))
function = callee
if function.arity != len(arguments):
raise JAPLError(expr.paren, f"Expecting {function.arity} arguments, got {len(arguments)}")
return function.call(self, arguments)
def execute_block(self, statements: List[Statement], scope: Environment):
"""
Executes a block of statements
"""
prev = self.environment
try:
self.environment = scope
for statement in statements:
self.exec(statement)
finally:
self.environment = prev
def visit_return(self, statement: Return):
"""
Visits a return statement
"""
value = None
if statement.value:
value = self.eval(statement.value)
raise ReturnException(value)
def visit_function(self, statement: Function):
"""
Visits a function
"""
function = JAPLFunction(statement, self.environment)
self.environment.define(statement.name.lexeme, function)
def visit_get(self, expr: Get):
"""Visits property get expressions and evaluates them"""
obj = self.eval(expr.object)
if isinstance(obj, JAPLInstance):
return obj.get(expr.name)
raise JAPLError(expr.name, "Only instances have properties")
def visit_set(self, expr: Set):
"""Visits property set expressions and evaluates them"""
obj = self.eval(expr.object)
if not isinstance(obj, JAPLInstance):
raise JAPLError(expr, "Only instances have fields")
value = self.eval(expr.value)
obj.set(expr.name, value)
def visit_this(self, expr):
"""Evaluates 'this' expressions"""
return self.lookup(expr.keyword, expr)
def visit_super(self, expr):
"""Evaluates 'super' expressions"""
distance = self.locals.get(expr)
superclass = self.environment.get_at(distance, "super")
instance = self.environment.get_at(distance - 1, "this")
meth = superclass.get_method(expr.method.lexeme)
if not meth:
raise JAPLError(expr.method, f"Undefined property '{expr.method.lexeme}'")
return meth.bind(instance)
def exec(self, statement: Statement):
"""
Executes a statement
"""
statement.accept(self)
def interpret(self, statements: List[Statement]):
"""
Executes a JAPL program
"""
for statement in statements:
self.exec(statement)
def resolve(self, expr: Expression, depth: int):
"""
Stores the result of the name resolution: this
info will be used later to know exactly in which
environment to look up a given variable
"""
self.locals[expr] = depth # How many environments to skip!

View File

@ -1,212 +1,212 @@
from .meta.tokenobject import Token
from .meta.tokentype import TokenType
from .meta.exceptions import ParseError
from typing import List
class Lexer(object):
"""
A simple tokenizer for the JAPL programming
language, scans a input source file and
produces a list of tokens. Some errors
are caught here as well.
"""
TOKENS = {"(": TokenType.LP, ")": TokenType.RP,
"{": TokenType.LB, "}": TokenType.RB,
".": TokenType.DOT, ",": TokenType.COMMA,
"-": TokenType.MINUS, "+": TokenType.PLUS,
";": TokenType.SEMICOLON, "*": TokenType.STAR,
">": TokenType.GT, "<": TokenType.LT,
"=": TokenType.EQ, "!": TokenType.NEG,
"/": TokenType.SLASH, "%": TokenType.MOD}
RESERVED = {"or": TokenType.OR, "and": TokenType.AND,
"class": TokenType.CLASS, "fun": TokenType.FUN,
"if": TokenType.IF, "else": TokenType.ELSE,
"for": TokenType.FOR, "while": TokenType.WHILE,
"var": TokenType.VAR, "nil": TokenType.NIL,
"true": TokenType.TRUE, "false": TokenType.FALSE,
"return": TokenType.RETURN,
"this": TokenType.THIS, "super": TokenType.SUPER,
"del": TokenType.DEL, "break": TokenType.BREAK}
def __init__(self, source: str):
"""Object constructor"""
self.source = source
self.tokens: List[Token] = []
self.line: int = 1 # Points to the line being lexed
self.start: int = 0 # The position of the first character of the current lexeme
self.current: int = 0 # The position of the current character being lexed
def step(self) -> str:
"""
'Steps' one character in the source code and returns it
"""
if self.done():
return ""
self.current += 1
return self.source[self.current - 1]
def peek(self) -> str:
"""
Returns the current character without consuming it
or an empty string if all text has been consumed
"""
if self.done():
return ""
return self.source[self.current]
def peek_next(self) -> str:
"""
Returns the next character after self.current
or an empty string if the input has been consumed
"""
if self.current + 1 >= len(self.source):
return ""
return self.source[self.current + 1]
def string(self, delimiter: str):
"""Parses a string literal"""
while self.peek() != delimiter and not self.done():
if self.peek() == "\n":
self.line += 1
self.step()
if self.done():
raise ParseError(f"unterminated string literal at line {self.line}")
self.step() # Consume the '"'
value = self.source[self.start + 1:self.current - 1] # Get the actual string
self.tokens.append(self.create_token(TokenType.STR, value))
def number(self):
"""Parses a number literal"""
convert = int
while self.peek().isdigit():
self.step()
if self.peek() == ".":
self.step() # Consume the "."
while self.peek().isdigit():
self.step()
convert = float
self.tokens.append(self.create_token(TokenType.NUM,
convert(self.source[self.start:self.current])))
def identifier(self):
"""Parses identifiers and reserved keywords"""
while self.peek().isalnum() or self.is_identifier(self.peek()):
self.step()
kind = TokenType.ID
value = self.source[self.start:self.current]
if self.RESERVED.get(value, None):
kind = self.RESERVED[value]
self.tokens.append(self.create_token(kind))
def comment(self):
"""Handles multi-line comments"""
closed = False
while not self.done():
end = self.peek() + self.peek_next()
if end == "/*": # Nested comments
self.step()
self.step()
self.comment()
elif end == "*/":
closed = True
self.step() # Consume the two ends
self.step()
break
self.step()
if self.done() and not closed:
raise ParseError(f"Unexpected EOF at line {self.line}")
def match(self, char: str) -> bool:
"""
Returns True if the current character in self.source matches
the given character
"""
if self.done():
return False
elif self.source[self.current] != char:
return False
self.current += 1
return True
def done(self) -> bool:
"""
Helper method that's used by the lexer
to know if all source has been consumed
"""
return self.current >= len(self.source)
def create_token(self, kind: TokenType, literal: object = None) -> Token:
"""
Creates and returns a token object
"""
return Token(kind, self.source[self.start:self.current], literal, self.line)
def is_identifier(self, char: str):
"""Returns if a character can be an identifier"""
if char.isalpha() or char in ("_", ): # More coming soon
return True
def scan_token(self):
"""
Scans for a single token and adds it to
self.tokens
"""
char = self.step()
if char in (" ", "\t", "\r"): # Useless characters
return
elif char == "\n": # New line
self.line += 1
elif char in ("'", '"'): # A string literal
self.string(char)
elif char.isdigit():
self.number()
elif self.is_identifier(char): # Identifier or reserved keyword
self.identifier()
elif char in self.TOKENS:
if char == "/" and self.match("/"):
while self.peek() != "\n" and not self.done():
self.step() # Who cares about comments?
elif char == "/" and self.match("*"):
self.comment()
elif char == "=" and self.match("="):
self.tokens.append(self.create_token(TokenType.DEQ))
elif char == ">" and self.match("="):
self.tokens.append(self.create_token(TokenType.GE))
elif char == "<" and self.match("="):
self.tokens.append(self.create_token(TokenType.LE))
elif char == "!" and self.match("="):
self.tokens.append(self.create_token(TokenType.NE))
elif char == "*" and self.match("*"):
self.tokens.append(self.create_token(TokenType.POW))
else:
self.tokens.append(self.create_token(self.TOKENS[char]))
else:
raise ParseError(f"unexpected character '{char}' at line {self.line}")
def lex(self) -> List[Token]:
"""
Performs lexical analysis on self.source
and returns a list of tokens
"""
while not self.done():
self.start = self.current
self.scan_token()
self.tokens.append(Token(TokenType.EOF, "", None, self.line))
return self.tokens
from .meta.tokenobject import Token
from .meta.tokentype import TokenType
from .meta.exceptions import ParseError
from typing import List
class Lexer(object):
"""
A simple tokenizer for the JAPL programming
language, scans a input source file and
produces a list of tokens. Some errors
are caught here as well.
"""
TOKENS = {"(": TokenType.LP, ")": TokenType.RP,
"{": TokenType.LB, "}": TokenType.RB,
".": TokenType.DOT, ",": TokenType.COMMA,
"-": TokenType.MINUS, "+": TokenType.PLUS,
";": TokenType.SEMICOLON, "*": TokenType.STAR,
">": TokenType.GT, "<": TokenType.LT,
"=": TokenType.EQ, "!": TokenType.NEG,
"/": TokenType.SLASH, "%": TokenType.MOD}
RESERVED = {"or": TokenType.OR, "and": TokenType.AND,
"class": TokenType.CLASS, "fun": TokenType.FUN,
"if": TokenType.IF, "else": TokenType.ELSE,
"for": TokenType.FOR, "while": TokenType.WHILE,
"var": TokenType.VAR, "nil": TokenType.NIL,
"true": TokenType.TRUE, "false": TokenType.FALSE,
"return": TokenType.RETURN,
"this": TokenType.THIS, "super": TokenType.SUPER,
"del": TokenType.DEL, "break": TokenType.BREAK}
def __init__(self, source: str):
"""Object constructor"""
self.source = source
self.tokens: List[Token] = []
self.line: int = 1 # Points to the line being lexed
self.start: int = 0 # The position of the first character of the current lexeme
self.current: int = 0 # The position of the current character being lexed
def step(self) -> str:
"""
'Steps' one character in the source code and returns it
"""
if self.done():
return ""
self.current += 1
return self.source[self.current - 1]
def peek(self) -> str:
"""
Returns the current character without consuming it
or an empty string if all text has been consumed
"""
if self.done():
return ""
return self.source[self.current]
def peek_next(self) -> str:
"""
Returns the next character after self.current
or an empty string if the input has been consumed
"""
if self.current + 1 >= len(self.source):
return ""
return self.source[self.current + 1]
def string(self, delimiter: str):
"""Parses a string literal"""
while self.peek() != delimiter and not self.done():
if self.peek() == "\n":
self.line += 1
self.step()
if self.done():
raise ParseError(f"unterminated string literal at line {self.line}")
self.step() # Consume the '"'
value = self.source[self.start + 1:self.current - 1] # Get the actual string
self.tokens.append(self.create_token(TokenType.STR, value))
def number(self):
"""Parses a number literal"""
convert = int
while self.peek().isdigit():
self.step()
if self.peek() == ".":
self.step() # Consume the "."
while self.peek().isdigit():
self.step()
convert = float
self.tokens.append(self.create_token(TokenType.NUM,
convert(self.source[self.start:self.current])))
def identifier(self):
"""Parses identifiers and reserved keywords"""
while self.peek().isalnum() or self.is_identifier(self.peek()):
self.step()
kind = TokenType.ID
value = self.source[self.start:self.current]
if self.RESERVED.get(value, None):
kind = self.RESERVED[value]
self.tokens.append(self.create_token(kind))
def comment(self):
"""Handles multi-line comments"""
closed = False
while not self.done():
end = self.peek() + self.peek_next()
if end == "/*": # Nested comments
self.step()
self.step()
self.comment()
elif end == "*/":
closed = True
self.step() # Consume the two ends
self.step()
break
self.step()
if self.done() and not closed:
raise ParseError(f"Unexpected EOF at line {self.line}")
def match(self, char: str) -> bool:
"""
Returns True if the current character in self.source matches
the given character
"""
if self.done():
return False
elif self.source[self.current] != char:
return False
self.current += 1
return True
def done(self) -> bool:
"""
Helper method that's used by the lexer
to know if all source has been consumed
"""
return self.current >= len(self.source)
def create_token(self, kind: TokenType, literal: object = None) -> Token:
"""
Creates and returns a token object
"""
return Token(kind, self.source[self.start:self.current], literal, self.line)
def is_identifier(self, char: str):
"""Returns if a character can be an identifier"""
if char.isalpha() or char in ("_", ): # More coming soon
return True
def scan_token(self):
"""
Scans for a single token and adds it to
self.tokens
"""
char = self.step()
if char in (" ", "\t", "\r"): # Useless characters
return
elif char == "\n": # New line
self.line += 1
elif char in ("'", '"'): # A string literal
self.string(char)
elif char.isdigit():
self.number()
elif self.is_identifier(char): # Identifier or reserved keyword
self.identifier()
elif char in self.TOKENS:
if char == "/" and self.match("/"):
while self.peek() != "\n" and not self.done():
self.step() # Who cares about comments?
elif char == "/" and self.match("*"):
self.comment()
elif char == "=" and self.match("="):
self.tokens.append(self.create_token(TokenType.DEQ))
elif char == ">" and self.match("="):
self.tokens.append(self.create_token(TokenType.GE))
elif char == "<" and self.match("="):
self.tokens.append(self.create_token(TokenType.LE))
elif char == "!" and self.match("="):
self.tokens.append(self.create_token(TokenType.NE))
elif char == "*" and self.match("*"):
self.tokens.append(self.create_token(TokenType.POW))
else:
self.tokens.append(self.create_token(self.TOKENS[char]))
else:
raise ParseError(f"unexpected character '{char}' at line {self.line}")
def lex(self) -> List[Token]:
"""
Performs lexical analysis on self.source
and returns a list of tokens
"""
while not self.done():
self.start = self.current
self.scan_token()
self.tokens.append(Token(TokenType.EOF, "", None, self.line))
return self.tokens

0
JAPL/meta/__init__.py Normal file
View File

View File

@ -1,7 +1,7 @@
from enum import Enum, auto
class ClassType(Enum):
NONE = auto()
CLASS = auto()
from enum import Enum, auto
class ClassType(Enum):
NONE = auto()
CLASS = auto()

View File

@ -1,71 +1,71 @@
from .exceptions import JAPLError
from .tokenobject import Token
from .expression import Variable
class Environment(object):
"""
A wrapper around a hashmap representing
a scope
"""
def __init__(self, enclosing=None):
"""Object constructor"""
self.map = {}
self.enclosing = enclosing
def define(self, name: str, attr: object):
"""Defines a new variable in the scope"""
self.map[name] = attr
def get(self, name: Token):
"""Gets a variable"""
if name.lexeme in self.map:
return self.map[name.lexeme]
elif self.enclosing:
return self.enclosing.get(name)
raise JAPLError(name, f"Undefined name '{name.lexeme}'")
def get_at(self, distance, name):
"""Gets a variable in a specific scope"""
return self.ancestor(distance).map.get(name)
def ancestor(self, distance):
"""Finds the scope specified by distance"""
env = self
for _ in range(distance):
env = env.enclosing
return env
def assign_at(self, distance, name, value):
"""Same as get_at, but assigns instead of retrieving"""
self.ancestor(distance).map[name.lexeme] = value
def delete(self, var):
"""Deletes a variable"""
if var.name.lexeme in self.map:
del self.map[var.name.lexeme]
elif self.enclosing:
self.enclosing.delete(var)
else:
raise JAPLError(var.name, f"Undefined name '{var.name.lexeme}'")
def assign(self, name: Token, value: object):
"""Assigns a variable"""
if name.lexeme in self.map:
if isinstance(value, Variable):
self.map[name.lexeme] = self.get(value.name)
else:
self.map[name.lexeme] = value
elif self.enclosing:
self.enclosing.assign(name, value)
else:
raise JAPLError(name, f"Undefined name '{name.lexeme}'")
from .exceptions import JAPLError
from .tokenobject import Token
from .expression import Variable
class Environment(object):
"""
A wrapper around a hashmap representing
a scope
"""
def __init__(self, enclosing=None):
"""Object constructor"""
self.map = {}
self.enclosing = enclosing
def define(self, name: str, attr: object):
"""Defines a new variable in the scope"""
self.map[name] = attr
def get(self, name: Token):
"""Gets a variable"""
if name.lexeme in self.map:
return self.map[name.lexeme]
elif self.enclosing:
return self.enclosing.get(name)
raise JAPLError(name, f"Undefined name '{name.lexeme}'")
def get_at(self, distance, name):
"""Gets a variable in a specific scope"""
return self.ancestor(distance).map.get(name)
def ancestor(self, distance):
"""Finds the scope specified by distance"""
env = self
for _ in range(distance):
env = env.enclosing
return env
def assign_at(self, distance, name, value):
"""Same as get_at, but assigns instead of retrieving"""
self.ancestor(distance).map[name.lexeme] = value
def delete(self, var):
"""Deletes a variable"""
if var.name.lexeme in self.map:
del self.map[var.name.lexeme]
elif self.enclosing:
self.enclosing.delete(var)
else:
raise JAPLError(var.name, f"Undefined name '{var.name.lexeme}'")
def assign(self, name: Token, value: object):
"""Assigns a variable"""
if name.lexeme in self.map:
if isinstance(value, Variable):
self.map[name.lexeme] = self.get(value.name)
else:
self.map[name.lexeme] = value
elif self.enclosing:
self.enclosing.assign(name, value)
else:
raise JAPLError(name, f"Undefined name '{name.lexeme}'")

View File

@ -1,32 +1,32 @@
from .tokentype import TokenType
class JAPLError(BaseException):
"""JAPL's exceptions base class"""
def __repr__(self):
return self.args[1]
class ParseError(JAPLError):
"""An error occurred while parsing"""
def __repr__(self):
if len(self.args) > 1:
message, token = self.args
if token.kind == TokenType.EOF:
return f"Unexpected error while parsing at line {token.line}, at end: {message}"
else:
return f"Unexpected error while parsing at line {token.line} at '{token.lexeme}': {message}"
return self.args[0]
def __str__(self):
return self.__repr__()
class BreakException(JAPLError):
"""Notifies a loop that it's time to break"""
class ReturnException(JAPLError):
"""Notifies a function that it's time to return"""
from .tokentype import TokenType
class JAPLError(BaseException):
"""JAPL's exceptions base class"""
def __repr__(self):
return self.args[1]
class ParseError(JAPLError):
"""An error occurred while parsing"""
def __repr__(self):
if len(self.args) > 1:
message, token = self.args
if token.kind == TokenType.EOF:
return f"Unexpected error while parsing at line {token.line}, at end: {message}"
else:
return f"Unexpected error while parsing at line {token.line} at '{token.lexeme}': {message}"
return self.args[0]
def __str__(self):
return self.__repr__()
class BreakException(JAPLError):
"""Notifies a loop that it's time to break"""
class ReturnException(JAPLError):
"""Notifies a function that it's time to return"""

View File

@ -1,167 +1,167 @@
from dataclasses import dataclass
from abc import ABC, abstractmethod
from .tokenobject import Token
from typing import List
class Expression(object):
"""
An object representing a JAPL expression.
This class is not meant to be instantiated directly,
inherit from it instead!
"""
def accept(self, visitor):
raise NotImplementedError
class Visitor(ABC):
"""
Visitor abstract base class to implement
the Visitor pattern
"""
@abstractmethod
def visit_literal(self, visitor):
raise NotImplementedError
@abstractmethod
def visit_binary(self, visitor):
raise NotImplementedError
@abstractmethod
def visit_grouping(self, visitor):
raise NotImplementedError
@abstractmethod
def visit_unary(self, visitor):
raise NotImplementedError
@staticmethod
def visit_get(self, visitor):
raise NotImplementedError
@staticmethod
def visit_set(self, visitor):
raise NotImplementedError
@dataclass
class Binary(Expression):
left: Expression
operator: Token
right: Expression
def accept(self, visitor):
return visitor.visit_binary(self)
@dataclass
class Unary(Expression):
operator: Token
right: Expression
def accept(self, visitor):
return visitor.visit_unary(self)
@dataclass
class Literal(Expression):