18 lines
581 B
Python
18 lines
581 B
Python
import pytest
|
|
|
|
from core.tokenizer import Tokenizer, Token, TokenKind
|
|
from parsers.BaseParser import BaseParser
|
|
|
|
|
|
@pytest.mark.parametrize("text, expected_text", [
|
|
("hello world", "hello world"),
|
|
("'hello' 'world'", "'hello' 'world'"),
|
|
("def concept a from", "def concept a from"),
|
|
("()[]{}1=1.5+-/*><&é", "()[]{}1=1.5+-/*><&é"),
|
|
("execute(c:concept_name:)", "execute(__C__concept_name__C__)")
|
|
|
|
])
|
|
def test_i_can_get_text_from_tokens(text, expected_text):
|
|
tokens = list(Tokenizer(text))
|
|
assert BaseParser.get_text_from_tokens(tokens) == expected_text
|