From 0ef9960230023db722f9a447e95a8b7a1f585608 Mon Sep 17 00:00:00 2001 From: Antoine Viallon Date: Fri, 12 May 2023 01:35:38 +0200 Subject: [PATCH] parser+nodes+tokenizer: add Statements --- compiler/__main__.py | 2 +- compiler/nodes.py | 16 ++++++++++++++++ compiler/parser.py | 16 ++++++++++++++-- compiler/tokenizer.py | 1 + 4 files changed, 32 insertions(+), 3 deletions(-) diff --git a/compiler/__main__.py b/compiler/__main__.py index 707b40d..646ad90 100644 --- a/compiler/__main__.py +++ b/compiler/__main__.py @@ -19,7 +19,7 @@ def main(): data = """ 2 + 8 - 1 * (byte = 3 + 5) - / (byte = 255) + byte + / (byte = 255) + byte; """ if not args.mock: data = sys.stdin.read().strip() diff --git a/compiler/nodes.py b/compiler/nodes.py index ac79886..37cec2c 100644 --- a/compiler/nodes.py +++ b/compiler/nodes.py @@ -254,6 +254,22 @@ class Expression(Node): return self.node.location() +class Statement(Node): + + def __init__(self, *nodes: Node): + super().__init__() + self.nodes = list(nodes) + + def _values(self) -> list[Node | Any]: + return self.nodes + + def intermediate_representation(self) -> list[ir.IRItem]: + result: list[ir.IRItem] = [] + for node in self.nodes: + result += node.intermediate_representation() + return result + + class Identifier(Literal): def __init__(self, location: SourceLocation, name: str): super().__init__(location, name) diff --git a/compiler/parser.py b/compiler/parser.py index 43c9bdd..9f027b0 100644 --- a/compiler/parser.py +++ b/compiler/parser.py @@ -5,7 +5,7 @@ from beartype.typing import List, Dict, Callable from .errors import CompilationError, UnexpectedTokenError from .logger import Logger, Tracer, LogLevel from .nodes import Float, Sum, Value, Product, Node, Division, Sub, Integer, Expression, Identifier, Assignment, \ - Variable + Variable, Statement, PseudoNode from .tokenizer import Tokens, Token logger = Logger(__name__) @@ -149,9 +149,21 @@ class Parser: elif mandatory: raise UnexpectedTokenError(self.token, "expression") + @tracer.trace_method + def statement(self, mandatory: bool = False) -> Statement: + if lbrace := self.accept(Tokens.Brace_Left): + block = self.block(name="anon") + rbrace = self.expect(Tokens.Brace_Right) + return Statement(block, PseudoNode(lbrace), PseudoNode(rbrace)) + elif expr := self.expression(mandatory): + semicolon = PseudoNode(self.expect(Tokens.Semicolon)) + return Statement(expr, semicolon) + elif mandatory: + raise UnexpectedTokenError(expr, wanted="expression") + @tracer.trace_method def root(self) -> Node: - return self.expression(mandatory=True) + return self.statement(mandatory=True) def parse(self) -> Node: try: diff --git a/compiler/tokenizer.py b/compiler/tokenizer.py index 07c0c86..14d5fee 100644 --- a/compiler/tokenizer.py +++ b/compiler/tokenizer.py @@ -35,6 +35,7 @@ class Tokens(enum.Enum): Parens_Right = re.compile(r"\)") Identifier = re.compile(r"[a-zA-Z_][a-zA-Z_0-9]*") Equal = re.compile(r"=") + Semicolon = re.compile(r";") Newline = re.compile(r"\n", flags=re.MULTILINE) EOF = re.compile(r"\Z") Blank = re.compile(r"[ \t]+")