21 lines
664 B
Python
21 lines
664 B
Python
import pytest
|
|
|
|
from core.tokenizer import Tokenizer
|
|
from parsers.BaseParser import BaseParser
|
|
|
|
|
|
@pytest.mark.parametrize("tokens, expected", [
|
|
(None, None),
|
|
([], (0, 0)),
|
|
(list(Tokenizer("")), (0, 0)),
|
|
(list(Tokenizer("", yield_eof=False)), (0, 0)),
|
|
(list(Tokenizer(" a")), (1, 1)),
|
|
(list(Tokenizer(" a", yield_eof=False)), (1, 1)),
|
|
(list(Tokenizer("a ")), (0, 0)),
|
|
(list(Tokenizer("a ", yield_eof=False)), (0, 0)),
|
|
(list(Tokenizer(" a ")), (1, 1)),
|
|
(list(Tokenizer(" a ", yield_eof=False)), (1, 1)),
|
|
])
|
|
def test_i_can_get_tokens_boundaries(tokens, expected):
|
|
assert BaseParser.get_tokens_boundaries(tokens) == expected
|