parser+nodes+tokenizer: add Statements
This commit is contained in:
parent
2b9943cdbf
commit
0ef9960230
4 changed files with 32 additions and 3 deletions
|
|
@ -19,7 +19,7 @@ def main():
|
||||||
|
|
||||||
data = """
|
data = """
|
||||||
2 + 8 - 1 * (byte = 3 + 5)
|
2 + 8 - 1 * (byte = 3 + 5)
|
||||||
/ (byte = 255) + byte
|
/ (byte = 255) + byte;
|
||||||
"""
|
"""
|
||||||
if not args.mock:
|
if not args.mock:
|
||||||
data = sys.stdin.read().strip()
|
data = sys.stdin.read().strip()
|
||||||
|
|
|
||||||
|
|
@ -254,6 +254,22 @@ class Expression(Node):
|
||||||
return self.node.location()
|
return self.node.location()
|
||||||
|
|
||||||
|
|
||||||
|
class Statement(Node):
|
||||||
|
|
||||||
|
def __init__(self, *nodes: Node):
|
||||||
|
super().__init__()
|
||||||
|
self.nodes = list(nodes)
|
||||||
|
|
||||||
|
def _values(self) -> list[Node | Any]:
|
||||||
|
return self.nodes
|
||||||
|
|
||||||
|
def intermediate_representation(self) -> list[ir.IRItem]:
|
||||||
|
result: list[ir.IRItem] = []
|
||||||
|
for node in self.nodes:
|
||||||
|
result += node.intermediate_representation()
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class Identifier(Literal):
|
class Identifier(Literal):
|
||||||
def __init__(self, location: SourceLocation, name: str):
|
def __init__(self, location: SourceLocation, name: str):
|
||||||
super().__init__(location, name)
|
super().__init__(location, name)
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ from beartype.typing import List, Dict, Callable
|
||||||
from .errors import CompilationError, UnexpectedTokenError
|
from .errors import CompilationError, UnexpectedTokenError
|
||||||
from .logger import Logger, Tracer, LogLevel
|
from .logger import Logger, Tracer, LogLevel
|
||||||
from .nodes import Float, Sum, Value, Product, Node, Division, Sub, Integer, Expression, Identifier, Assignment, \
|
from .nodes import Float, Sum, Value, Product, Node, Division, Sub, Integer, Expression, Identifier, Assignment, \
|
||||||
Variable
|
Variable, Statement, PseudoNode
|
||||||
from .tokenizer import Tokens, Token
|
from .tokenizer import Tokens, Token
|
||||||
|
|
||||||
logger = Logger(__name__)
|
logger = Logger(__name__)
|
||||||
|
|
@ -149,9 +149,21 @@ class Parser:
|
||||||
elif mandatory:
|
elif mandatory:
|
||||||
raise UnexpectedTokenError(self.token, "expression")
|
raise UnexpectedTokenError(self.token, "expression")
|
||||||
|
|
||||||
|
@tracer.trace_method
|
||||||
|
def statement(self, mandatory: bool = False) -> Statement:
|
||||||
|
if lbrace := self.accept(Tokens.Brace_Left):
|
||||||
|
block = self.block(name="anon")
|
||||||
|
rbrace = self.expect(Tokens.Brace_Right)
|
||||||
|
return Statement(block, PseudoNode(lbrace), PseudoNode(rbrace))
|
||||||
|
elif expr := self.expression(mandatory):
|
||||||
|
semicolon = PseudoNode(self.expect(Tokens.Semicolon))
|
||||||
|
return Statement(expr, semicolon)
|
||||||
|
elif mandatory:
|
||||||
|
raise UnexpectedTokenError(expr, wanted="expression")
|
||||||
|
|
||||||
@tracer.trace_method
|
@tracer.trace_method
|
||||||
def root(self) -> Node:
|
def root(self) -> Node:
|
||||||
return self.expression(mandatory=True)
|
return self.statement(mandatory=True)
|
||||||
|
|
||||||
def parse(self) -> Node:
|
def parse(self) -> Node:
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,7 @@ class Tokens(enum.Enum):
|
||||||
Parens_Right = re.compile(r"\)")
|
Parens_Right = re.compile(r"\)")
|
||||||
Identifier = re.compile(r"[a-zA-Z_][a-zA-Z_0-9]*")
|
Identifier = re.compile(r"[a-zA-Z_][a-zA-Z_0-9]*")
|
||||||
Equal = re.compile(r"=")
|
Equal = re.compile(r"=")
|
||||||
|
Semicolon = re.compile(r";")
|
||||||
Newline = re.compile(r"\n", flags=re.MULTILINE)
|
Newline = re.compile(r"\n", flags=re.MULTILINE)
|
||||||
EOF = re.compile(r"\Z")
|
EOF = re.compile(r"\Z")
|
||||||
Blank = re.compile(r"[ \t]+")
|
Blank = re.compile(r"[ \t]+")
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue