84 lines
3.0 KiB
Python
84 lines
3.0 KiB
Python
import ast
|
|
import pytest
|
|
from core.builtin_concepts import ParserResultConcept, NotForMeConcept
|
|
from core.tokenizer import Tokenizer, LexerError
|
|
from parsers.PythonParser import PythonNode, PythonParser, PythonErrorNode
|
|
import core.utils
|
|
|
|
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
|
|
|
|
|
|
class TestPythonParser(TestUsingMemoryBasedSheerka):
|
|
|
|
@pytest.mark.parametrize("text, expected", [
|
|
("1+1", PythonNode("1+1", ast.parse("1+1", mode="eval"))),
|
|
("a=10", PythonNode("a=10", ast.parse("a=10", mode="exec"))),
|
|
])
|
|
def test_i_can_parse_a_simple_expression(self, text, expected):
|
|
parser = PythonParser()
|
|
res = parser.parse(self.get_context(), text)
|
|
|
|
assert res.status
|
|
assert res.who == parser.name
|
|
assert isinstance(res.value, ParserResultConcept)
|
|
assert res.value.value == expected
|
|
|
|
@pytest.mark.parametrize("text, expected", [
|
|
("1+1", PythonNode("1+1", ast.parse("1+1", mode="eval"))),
|
|
("a=10", PythonNode("a=10", ast.parse("a=10", mode="exec"))),
|
|
])
|
|
def test_i_can_parse_from_tokens(self, text, expected):
|
|
parser = PythonParser()
|
|
tokens = list(Tokenizer(text))
|
|
res = parser.parse(self.get_context(), tokens)
|
|
|
|
assert res.status
|
|
assert res.who == parser.name
|
|
assert isinstance(res.value, ParserResultConcept)
|
|
assert res.value.value == expected
|
|
|
|
@pytest.mark.parametrize("text", [
|
|
"1+",
|
|
"'name",
|
|
"foo = 'name"
|
|
])
|
|
def test_i_can_detect_error(self, text):
|
|
parser = PythonParser()
|
|
res = parser.parse(self.get_context(), text)
|
|
|
|
assert not res.status
|
|
assert res.who == parser.name
|
|
assert isinstance(res.value, NotForMeConcept)
|
|
assert res.value.body == text
|
|
assert len(res.value.get_value("reason")) == 1
|
|
assert isinstance(res.value.get_value("reason")[0], PythonErrorNode)
|
|
assert isinstance(res.value.get_value("reason")[0].exception, SyntaxError)
|
|
|
|
@pytest.mark.parametrize("text, error_msg, error_text", [
|
|
("c::", "Concept identifiers not found", ""),
|
|
("c:: + 1", "Concept identifiers not found", ""),
|
|
])
|
|
def test_i_can_detect_lexer_errors(self, text, error_msg, error_text):
|
|
parser = PythonParser()
|
|
res = parser.parse(self.get_context(), text)
|
|
|
|
assert not res.status
|
|
assert isinstance(res.value, NotForMeConcept)
|
|
assert res.value.body == text
|
|
|
|
assert len(res.value.get_value("reason")) == 1
|
|
assert isinstance(res.value.get_value("reason")[0], LexerError)
|
|
assert res.value.get_value("reason")[0].message == error_msg
|
|
assert res.value.get_value("reason")[0].text == error_text
|
|
|
|
def test_i_can_parse_a_concept(self):
|
|
text = "c:name|key: + 1"
|
|
|
|
parser = PythonParser()
|
|
res = parser.parse(self.get_context(), text)
|
|
|
|
assert res
|
|
assert res.value.value == PythonNode(
|
|
"c:name|key: + 1",
|
|
ast.parse(core.utils.encode_concept(("name", "key"), True) + "+1", mode="eval"))
|