tests: lexer: remaining test cases for 100% lexer coverage
This commit is contained in:
parent
d7613c8ed2
commit
7bb0d7b0a4
1 changed files with 44 additions and 1 deletions
|
|
@ -1,6 +1,9 @@
|
|||
import io
|
||||
|
||||
import pytest
|
||||
|
||||
from compiler.lexer import Tokens, TextIOWithMemory, Lexer, Token
|
||||
from compiler import errors
|
||||
|
||||
|
||||
def _check_token_type(lexer: Lexer, expected_tokens: list[Tokens]):
|
||||
|
|
@ -14,6 +17,11 @@ def _check_token_type(lexer: Lexer, expected_tokens: list[Tokens]):
|
|||
assert tuple(tokens) == tuple(expected_tokens)
|
||||
|
||||
|
||||
def test_lexer_misc():
|
||||
# Tokens are supposed to be always true
|
||||
assert Tokens.Blank
|
||||
|
||||
|
||||
def test_lexer_empty():
|
||||
data_raw = io.StringIO("")
|
||||
data = TextIOWithMemory(data_raw)
|
||||
|
|
@ -21,13 +29,14 @@ def test_lexer_empty():
|
|||
my_lexer = Lexer(input_stream=data)
|
||||
_check_token_type(my_lexer, [Tokens.BEGIN, Tokens.EOF])
|
||||
|
||||
|
||||
def test_lexer_arithmetic():
|
||||
data_raw = io.StringIO("1+5-41*2/1")
|
||||
data = TextIOWithMemory(data_raw)
|
||||
|
||||
my_lexer = Lexer(input_stream=data)
|
||||
_check_token_type(my_lexer, [Tokens.BEGIN, Tokens.Integer, Tokens.Op_Plus, Tokens.Integer,
|
||||
Tokens.Op_Minus, Tokens.Integer, Tokens.Op_Multiply, Tokens.Integer,
|
||||
Tokens.Op_Minus, Tokens.Integer, Tokens.Op_Multiply, Tokens.Integer,
|
||||
Tokens.Op_Divide, Tokens.Integer, Tokens.EOF])
|
||||
|
||||
|
||||
|
|
@ -47,3 +56,37 @@ def test_lexer_blocks():
|
|||
Tokens.Newline,
|
||||
Tokens.Blank, Tokens.Brace_Right, Tokens.Newline,
|
||||
Tokens.Blank, Tokens.EOF])
|
||||
|
||||
|
||||
def test_lexer_invalid_token():
|
||||
with pytest.raises(errors.CompilationError) as e:
|
||||
my_lexer = Lexer(input_stream=TextIOWithMemory(io.StringIO("4a")))
|
||||
|
||||
# Force lexer to consume all tokens
|
||||
list(my_lexer)
|
||||
assert e.type == errors.CompilationError
|
||||
assert e.value.args == ("<none>:0:1: error: Unknown token 'a'\nLine 0: 4a\n ^",)
|
||||
|
||||
|
||||
def test_lexer_eof():
|
||||
my_lexer = Lexer(input_stream=TextIOWithMemory(io.StringIO("{ 1; }")))
|
||||
# Force lexer to consume all tokens
|
||||
list(my_lexer)
|
||||
|
||||
with pytest.raises(StopIteration) as e:
|
||||
next(my_lexer)
|
||||
assert e.type == StopIteration
|
||||
assert e.value.args == ("EOF already reached",)
|
||||
|
||||
with pytest.raises(StopIteration) as e:
|
||||
my_lexer._next_token()
|
||||
assert e.type == StopIteration
|
||||
assert e.value.args == ("EOF already reached",)
|
||||
|
||||
|
||||
def test_lexer_keyword_match():
|
||||
# For entering the "Best match is a keyword and current match is not, skipping" branch
|
||||
my_lexer = Lexer(input_stream=TextIOWithMemory(io.StringIO("{ let leta = 3; }")))
|
||||
|
||||
# Force lexer to consume all tokens
|
||||
list(my_lexer)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue