First version of explain. Creating a new parser was a wrong approach. Need to reimplement
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from core.tokenizer import Tokenizer, TokenKind
|
||||
from parsers.BaseParser import BaseParser
|
||||
from core.tokenizer import Tokenizer, TokenKind, Token
|
||||
from parsers.BaseParser import BaseParser, BaseSplitIterParser
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected_text", [
|
||||
@@ -23,3 +23,45 @@ def test_i_can_get_text_from_tokens(text, expected_text):
|
||||
def test_i_can_get_text_from_tokens_with_custom_switcher(text, custom, expected_text):
|
||||
tokens = list(Tokenizer(text))
|
||||
assert BaseParser.get_text_from_tokens(tokens, custom) == expected_text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("", ["<eof>"]),
|
||||
("one two -f --file", ["one", "two", "-f", "--file", "<eof>"]),
|
||||
("one 'two three'", ["one", "two three", "<eof>"]),
|
||||
('one "two three"', ["one", "two three", "<eof>"]),
|
||||
('one\\ two three"', ["one two", "three", "<eof>"]),
|
||||
("one 'two\\' three'", ["one", "two' three", "<eof>"]),
|
||||
("one\\\\two three", ["one\\two", "three", "<eof>"]),
|
||||
("one\ntwo three", ["one", "two", "three", "<eof>"]),
|
||||
("one \n two three", ["one", "two", "three", "<eof>"]),
|
||||
("'one \n two' three", ["one \n two", "three", "<eof>"]),
|
||||
("a=b", ["a", "=", "b", "<eof>"]),
|
||||
("a = b", ["a", "=", "b", "<eof>"]),
|
||||
("a==b", ["a", "==", "b", "<eof>"]),
|
||||
("a == b", ["a", "==", "b", "<eof>"]),
|
||||
])
|
||||
def test_i_can_split_using_base_split_iterparser_class(text, expected):
|
||||
parser = BaseSplitIterParser("BaseSplitIterParser", 0)
|
||||
parser.reset_parser(None, text)
|
||||
res = [t.value for t in parser.split()]
|
||||
|
||||
assert res == expected
|
||||
|
||||
|
||||
def test_i_can_test_split_iter_parser_indexes():
|
||||
parser = BaseSplitIterParser("BaseSplitIterParser", 0)
|
||||
text = "one two \n three = ==(),"
|
||||
parser.reset_parser(None, text)
|
||||
res = []
|
||||
while parser.next_token():
|
||||
res.append(parser.get_token())
|
||||
|
||||
assert res[0] == Token(TokenKind.WORD, "one", 0, 1, 1)
|
||||
assert res[1] == Token(TokenKind.WORD, "two", 4, 1, 5)
|
||||
assert res[2] == Token(TokenKind.WORD, "three", 10, 2, 2)
|
||||
assert res[3] == Token(TokenKind.EQUALS, "=", 16, 2, 8)
|
||||
assert res[4] == Token(TokenKind.EQUALSEQUALS, "==", 18, 2, 10)
|
||||
assert res[5] == Token(TokenKind.LPAR, "(", 20, 2, 12)
|
||||
assert res[6] == Token(TokenKind.RPAR, ")", 21, 2, 13)
|
||||
assert res[7] == Token(TokenKind.COMMA, ",", 22, 2, 14)
|
||||
|
||||
Reference in New Issue
Block a user