parser+nodes+tokenizer: add Blocks

This commit is contained in:
Antoine Viallon 2023-05-12 01:37:54 +02:00
parent 0ef9960230
commit 36b1fad7fe
Signed by: aviallon
GPG key ID: D126B13AB555E16F
4 changed files with 32 additions and 3 deletions

View file

@ -18,8 +18,15 @@ def main():
args = parser.parse_args() args = parser.parse_args()
data = """ data = """
{
byte = 42;
}
2 + 8 - 1 * (byte = 3 + 5) 2 + 8 - 1 * (byte = 3 + 5)
/ (byte = 255) + byte; / (byte = 255) + byte;
byte = byte + byte;
{
a = byte;
}
""" """
if not args.mock: if not args.mock:
data = sys.stdin.read().strip() data = sys.stdin.read().strip()
@ -39,7 +46,6 @@ def main():
context = semantic.Context("root") context = semantic.Context("root")
ast.semantic_analysis(context) ast.semantic_analysis(context)
print(context)
intermediate_representation = ast.intermediate_representation() intermediate_representation = ast.intermediate_representation()
@ -55,6 +61,8 @@ def main():
messages += [f"{repr(ir_item)}\n"] messages += [f"{repr(ir_item)}\n"]
print("\n".join(messages)) print("\n".join(messages))
print("\n---\n", repr(context))
except CompilationError as e: except CompilationError as e:
e.location.source = data e.location.source = data
print(f"{e}\n{e.location.show_in_source()}", file=sys.stderr) print(f"{e}\n{e.location.show_in_source()}", file=sys.stderr)

View file

@ -270,6 +270,19 @@ class Statement(Node):
return result return result
class Block(Statement):
def __init__(self, name: str, *nodes: Node):
super().__init__(*nodes)
self.name = name
def semantic_analysis(self, context: semantic.Context | None):
child_context = semantic.Context(name=self.name, parent=context)
if context is not None:
context.add_context(child_context)
logger.debug(f"Created new context: {child_context}")
super().semantic_analysis(child_context)
class Identifier(Literal): class Identifier(Literal):
def __init__(self, location: SourceLocation, name: str): def __init__(self, location: SourceLocation, name: str):
super().__init__(location, name) super().__init__(location, name)

View file

@ -5,7 +5,7 @@ from beartype.typing import List, Dict, Callable
from .errors import CompilationError, UnexpectedTokenError from .errors import CompilationError, UnexpectedTokenError
from .logger import Logger, Tracer, LogLevel from .logger import Logger, Tracer, LogLevel
from .nodes import Float, Sum, Value, Product, Node, Division, Sub, Integer, Expression, Identifier, Assignment, \ from .nodes import Float, Sum, Value, Product, Node, Division, Sub, Integer, Expression, Identifier, Assignment, \
Variable, Statement, PseudoNode Variable, Statement, PseudoNode, Block
from .tokenizer import Tokens, Token from .tokenizer import Tokens, Token
logger = Logger(__name__) logger = Logger(__name__)
@ -161,9 +161,15 @@ class Parser:
elif mandatory: elif mandatory:
raise UnexpectedTokenError(expr, wanted="expression") raise UnexpectedTokenError(expr, wanted="expression")
def block(self, name: str) -> Block:
nodes: list[Statement] = []
while stmt := self.statement(mandatory=False):
nodes += [stmt]
return Block(name, *nodes)
@tracer.trace_method @tracer.trace_method
def root(self) -> Node: def root(self) -> Node:
return self.statement(mandatory=True) return self.block(name="root")
def parse(self) -> Node: def parse(self) -> Node:
try: try:

View file

@ -33,6 +33,8 @@ class Tokens(enum.Enum):
Op_Divide = re.compile(r"/") Op_Divide = re.compile(r"/")
Parens_Left = re.compile(r"\(") Parens_Left = re.compile(r"\(")
Parens_Right = re.compile(r"\)") Parens_Right = re.compile(r"\)")
Brace_Left = re.compile(r"\{")
Brace_Right = re.compile(r"}")
Identifier = re.compile(r"[a-zA-Z_][a-zA-Z_0-9]*") Identifier = re.compile(r"[a-zA-Z_][a-zA-Z_0-9]*")
Equal = re.compile(r"=") Equal = re.compile(r"=")
Semicolon = re.compile(r";") Semicolon = re.compile(r";")