lexer: fix non-stopping list() issue

Convert a SystemExit to a CompilationError
This commit is contained in:
Antoine Viallon 2024-07-26 01:33:00 +02:00
parent 7655b2a6a8
commit d7613c8ed2
Signed by: aviallon
GPG key ID: 186FC35EDEB25716

View file

@ -7,6 +7,7 @@ import typing
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import cast from typing import cast
from .errors import CompilationError
from .logger import Logger from .logger import Logger
from .source import SourceLocation, Location, TextIOWithMemory from .source import SourceLocation, Location, TextIOWithMemory
from .typechecking import typecheck from .typechecking import typecheck
@ -68,7 +69,7 @@ class Tokens(enum.Enum):
class Lexer(collections.abc.Iterator): class Lexer(collections.abc.Iterator):
def __init__(self, input_stream: TextIOWithMemory, token_filter: typing.Callable[[Token], bool] | None = None): def __init__(self, input_stream: TextIOWithMemory, token_filter: typing.Callable[[Token], bool] | None = None):
self.input = input_stream self.input = input_stream
self.tokens = [] self.tokens: list[Token] = []
self.length: int | None = None self.length: int | None = None
self.begin: int = 0 self.begin: int = 0
self.end: int = 0 self.end: int = 0
@ -77,6 +78,8 @@ class Lexer(collections.abc.Iterator):
self.token_filter = token_filter self.token_filter = token_filter
def __next__(self): def __next__(self):
if len(self.tokens) and self.tokens[-1].kind == Tokens.EOF:
raise StopIteration("EOF already reached")
return self._filtered_next_token() return self._filtered_next_token()
def _filtered_next_token(self) -> Token: def _filtered_next_token(self) -> Token:
@ -103,6 +106,9 @@ class Lexer(collections.abc.Iterator):
if self.tokens[-1].kind in [Tokens.BEGIN, Tokens.Newline]: if self.tokens[-1].kind in [Tokens.BEGIN, Tokens.Newline]:
self.input.readline() self.input.readline()
if self.tokens[-1].kind == Tokens.EOF:
raise StopIteration("EOF already reached")
if self.begin == len(self.input.stream_cache): if self.begin == len(self.input.stream_cache):
eof_token = Token(Tokens.EOF, value=None, loc=SourceLocation( eof_token = Token(Tokens.EOF, value=None, loc=SourceLocation(
Location(line=self.line, character=0), Location(line=self.line, character=0),
@ -144,8 +150,8 @@ class Lexer(collections.abc.Iterator):
self.character += len(best_result.value) self.character += len(best_result.value)
if best_result.kind == Tokens.Unknown: if best_result.kind == Tokens.Unknown:
source_hint = best_result.loc.show_in_source() source_hint = best_result.loc.show_in_source()
logger.error(f"{best_result.loc}: Unknown token '{best_result.loc.source_substring}'\n{source_hint}") raise CompilationError(best_result.loc,
exit(1) message=f"Unknown token '{best_result.loc.source_substring}'\n{source_hint}")
elif best_result.kind == Tokens.Newline: elif best_result.kind == Tokens.Newline:
self.line += 1 self.line += 1
self.character = 0 self.character = 0
@ -155,5 +161,3 @@ class Lexer(collections.abc.Iterator):
self.tokens += [best_result] self.tokens += [best_result]
return best_result return best_result
else:
raise IndexError("EOF already reached")