26 lines
941 B
Python
26 lines
941 B
Python
import pytest
|
|
|
|
from core.tokenizer import Tokenizer, TokenKind
|
|
from parsers.BaseParser import BaseParser
|
|
|
|
|
|
@pytest.mark.parametrize("text, expected_text", [
|
|
("hello world", "hello world"),
|
|
("'hello' 'world'", "'hello' 'world'"),
|
|
("def concept a from", "def concept a from"),
|
|
("()[]{}1=1.5+-/*><&é", "()[]{}1=1.5+-/*><&é"),
|
|
("execute(c:concept_name:)", "execute(c:concept_name:)")
|
|
|
|
])
|
|
def test_i_can_get_text_from_tokens(text, expected_text):
|
|
tokens = list(Tokenizer(text))
|
|
assert BaseParser.get_text_from_tokens(tokens) == expected_text
|
|
|
|
|
|
@pytest.mark.parametrize("text, custom, expected_text", [
|
|
("execute(c:concept_name:)", {TokenKind.CONCEPT: lambda t: f"__C__{t.value}"}, "execute(__C__concept_name)")
|
|
])
|
|
def test_i_can_get_text_from_tokens_with_custom_switcher(text, custom, expected_text):
|
|
tokens = list(Tokenizer(text))
|
|
assert BaseParser.get_text_from_tokens(tokens, custom) == expected_text
|