Introduced ParserInput
This commit is contained in:
@@ -0,0 +1,79 @@
|
||||
import pytest
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from core.tokenizer import Tokenizer
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, start, end, expected", [
|
||||
("def concept a", None, None, "def concept a"),
|
||||
("&é#(-è_çà)='string'", None, None, "&é#(-è_çà)='string'"),
|
||||
("def concept a", 2, None, "concept a"),
|
||||
("def concept a", 0, 2, "def concept"),
|
||||
])
|
||||
def test_i_can_use_parser_input(text, start, end, expected):
|
||||
parser_input = ParserInput(text, start=start, end=end).reset()
|
||||
assert parser_input.as_text() == expected
|
||||
|
||||
|
||||
def test_i_can_get_the_next_token():
|
||||
parser_input = ParserInput("def concept a from bnf 'xyz' as 'xyz'").reset()
|
||||
res = []
|
||||
while parser_input.next_token():
|
||||
res.append(f"{parser_input.token.str_value}")
|
||||
|
||||
assert res == ['def', 'concept', 'a', 'from', 'bnf', "'xyz'", 'as', "'xyz'", '']
|
||||
|
||||
parser_input = ParserInput("def concept a concept name from bnf 'xyz' as 'xyz'", start=4, end=9).reset()
|
||||
res = []
|
||||
while parser_input.next_token(skip_whitespace=False):
|
||||
res.append(f"{parser_input.token.str_value}")
|
||||
|
||||
assert res == ['a', ' ', 'concept', ' ', 'name', ' ']
|
||||
|
||||
|
||||
def test_i_can_get_the_next_token_when_initialised_with_tokens():
|
||||
tokens = list(Tokenizer(" def concept a as 'xyz' "))
|
||||
parser_input = ParserInput(" def concept a as 'xyz' ", tokens).reset()
|
||||
res = []
|
||||
while parser_input.next_token():
|
||||
res.append(f"{parser_input.token.str_value}")
|
||||
|
||||
assert res == ['def', 'concept', 'a', 'as', "'xyz'", '']
|
||||
|
||||
tokens = list(Tokenizer(" def concept a as 'xyz' ", yield_eof=False))
|
||||
parser_input = ParserInput(" def concept a as 'xyz' ", tokens).reset()
|
||||
res = []
|
||||
while parser_input.next_token():
|
||||
res.append(f"{parser_input.token.str_value}")
|
||||
|
||||
assert res == ['def', 'concept', 'a', 'as', "'xyz'"]
|
||||
|
||||
|
||||
def test_i_can_parse_twice():
|
||||
text = """
|
||||
def concept a + b
|
||||
where isinstance(a, int) and isinstance(b, int)
|
||||
pre isinstance(a, int) and isinstance(b, int)
|
||||
post isinstance(res, int)
|
||||
as:
|
||||
def func(x,y):
|
||||
return x+y
|
||||
func(a,b)
|
||||
"""
|
||||
|
||||
p1 = ParserInput(text).reset()
|
||||
while p1.next_token():
|
||||
pass
|
||||
|
||||
p1.reset()
|
||||
p2 = ParserInput(text).reset()
|
||||
|
||||
while p1.next_token():
|
||||
p2.next_token()
|
||||
assert p1.token == p2.token
|
||||
|
||||
p1.reset()
|
||||
p2 = ParserInput(text).reset()
|
||||
|
||||
while p2.next_token():
|
||||
p1.next_token()
|
||||
assert p1.token == p2.token
|
||||
Reference in New Issue
Block a user