tests: add basic lexer tests
This commit is contained in:
parent
4d13cdfb46
commit
0a042d0696
1 changed files with 32 additions and 0 deletions
32
compiler/tests/test_lexer.py
Normal file
32
compiler/tests/test_lexer.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
import io
|
||||
|
||||
from compiler.lexer import Tokens, TextIOWithMemory, Lexer, Token
|
||||
|
||||
|
||||
def test_lexer_empty():
|
||||
data_raw = io.StringIO("")
|
||||
data = TextIOWithMemory(data_raw)
|
||||
|
||||
my_lexer = Lexer(input_stream=data)
|
||||
assert next(my_lexer).kind == Tokens.BEGIN
|
||||
assert next(my_lexer).kind == Tokens.EOF
|
||||
|
||||
|
||||
def test_lexer_arithmetic():
|
||||
data_raw = io.StringIO("1+5-4*2/1")
|
||||
data = TextIOWithMemory(data_raw)
|
||||
|
||||
my_lexer = Lexer(input_stream=data)
|
||||
tokens = []
|
||||
expected_tokens = [Tokens.BEGIN, Tokens.Integer, Tokens.Op_Plus, Tokens.Integer,
|
||||
Tokens.Op_Minus, Tokens.Integer, Tokens.Op_Multiply, Tokens.Integer,
|
||||
Tokens.Op_Divide, Tokens.Integer, Tokens.EOF]
|
||||
|
||||
tok: Token = Token(Tokens.Unknown)
|
||||
while tok.kind != Tokens.EOF:
|
||||
tok = next(my_lexer)
|
||||
tokens += [tok]
|
||||
|
||||
assert len(tokens) == len(expected_tokens)
|
||||
for tok, expected in zip(tokens, expected_tokens):
|
||||
assert tok.kind == expected
|
||||
Loading…
Add table
Add a link
Reference in a new issue