41 lines
1.4 KiB
Python
41 lines
1.4 KiB
Python
import pytest
|
|
|
|
from parsers.ParserInput import ParserInput
|
|
from parsers.tokenizer import LexerError, TokenKind
|
|
|
|
|
|
def test_i_can_parser_input():
|
|
parser_input = ParserInput("def concept a")
|
|
assert parser_input.init() is True
|
|
assert parser_input.exception is None
|
|
|
|
|
|
def test_i_can_detect_errors():
|
|
parser_input = ParserInput('def concept "a')
|
|
assert parser_input.init() is False
|
|
assert isinstance(parser_input.exception, LexerError)
|
|
|
|
|
|
def test_can_as_text_and_track_tokens():
|
|
parser_input = ParserInput("execute(c:name1: if r:#id: else c:name2:)")
|
|
parser_input.init()
|
|
|
|
switcher = {TokenKind.CONCEPT: lambda t: f"__CONCEPT__{t.value[0]}",
|
|
TokenKind.RULE: lambda t: f"__RULE__{t.value[1]}"}
|
|
tracker = {}
|
|
text = parser_input.as_text(switcher, tracker)
|
|
|
|
assert text == "execute(__CONCEPT__name1 if __RULE__id else __CONCEPT__name2)"
|
|
assert len(tracker) == 3
|
|
assert tracker["__CONCEPT__name1"] == parser_input.all_tokens[2]
|
|
assert tracker["__RULE__id"] == parser_input.all_tokens[6]
|
|
assert tracker["__CONCEPT__name2"] == parser_input.all_tokens[10]
|
|
|
|
|
|
def test_i_must_call_init_before_call_as_text():
|
|
parser_input = ParserInput("execute(c:name1: if r:#id: else c:name2:)")
|
|
with pytest.raises(Exception) as ex:
|
|
parser_input.as_text()
|
|
|
|
assert ex.value.args[0] == "You must call init() first !"
|