compiler/compiler/__main__.py

75 lines
2.1 KiB
Python

from __future__ import annotations
import argparse
import sys
from pprint import pprint
from . import semantic
from .errors import CompilationError
from .logger import rootLogger, LogLevel
from .parser import Parser
from .lexer import Lexer, Tokens
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--mock", action="store_true", help="Use default test input")
args = parser.parse_args()
data = """
{
byte = 42;
}
2 + 8 - 1 * (byte = 3 + 5)
/ (byte = 255) + byte;
byte = byte + byte;
{
a = byte;
}
"""
if not args.mock:
data = sys.stdin.read().strip()
print("Source:\n", data)
tokens = Lexer(data)
tokens = [token for token in tokens if token.kind not in [Tokens.Blank, Tokens.Newline]]
if rootLogger.level <= LogLevel.Debug:
pprint(tokens)
parser = Parser(tokens)
try:
ast = parser.parse()
ast.pprint(depth=10)
context = semantic.BuiltinContext()
ast.semantic_analysis(context)
intermediate_representation = ast.intermediate_representation()
messages = []
for ir_item in intermediate_representation:
ir_item.location.source = data
prefix = f"{str(ir_item.location) + ':':<30}"
source_info = ir_item.location.source_substring.splitlines(keepends=False)
messages += [f"# {prefix} {source_info.pop(0)}"]
while len(source_info) > 0:
messages += [f"# {' ' * len(prefix)} {source_info.pop(0)}"]
messages += [f"{repr(ir_item)}\n"]
print("\n".join(messages))
print("\n---\n", repr(context))
except CompilationError as e:
e.location.source = data
print(f"{e}\n{e.location.show_in_source()}", file=sys.stderr)
if e.__cause__ is not None:
if rootLogger.level <= LogLevel.Debug:
raise e.__cause__
print(f"Caused by:\n{e.__cause__.__class__.__name__}: {e.__cause__}", file=sys.stderr)
if __name__ == "__main__":
main()