First version of explain. Creating a new parser was a wrong approach. Need to reimplement
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from core.tokenizer import Tokenizer, TokenKind
|
||||
from parsers.BaseParser import BaseParser
|
||||
from core.tokenizer import Tokenizer, TokenKind, Token
|
||||
from parsers.BaseParser import BaseParser, BaseSplitIterParser
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected_text", [
|
||||
@@ -23,3 +23,45 @@ def test_i_can_get_text_from_tokens(text, expected_text):
|
||||
def test_i_can_get_text_from_tokens_with_custom_switcher(text, custom, expected_text):
|
||||
tokens = list(Tokenizer(text))
|
||||
assert BaseParser.get_text_from_tokens(tokens, custom) == expected_text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("", ["<eof>"]),
|
||||
("one two -f --file", ["one", "two", "-f", "--file", "<eof>"]),
|
||||
("one 'two three'", ["one", "two three", "<eof>"]),
|
||||
('one "two three"', ["one", "two three", "<eof>"]),
|
||||
('one\\ two three"', ["one two", "three", "<eof>"]),
|
||||
("one 'two\\' three'", ["one", "two' three", "<eof>"]),
|
||||
("one\\\\two three", ["one\\two", "three", "<eof>"]),
|
||||
("one\ntwo three", ["one", "two", "three", "<eof>"]),
|
||||
("one \n two three", ["one", "two", "three", "<eof>"]),
|
||||
("'one \n two' three", ["one \n two", "three", "<eof>"]),
|
||||
("a=b", ["a", "=", "b", "<eof>"]),
|
||||
("a = b", ["a", "=", "b", "<eof>"]),
|
||||
("a==b", ["a", "==", "b", "<eof>"]),
|
||||
("a == b", ["a", "==", "b", "<eof>"]),
|
||||
])
|
||||
def test_i_can_split_using_base_split_iterparser_class(text, expected):
|
||||
parser = BaseSplitIterParser("BaseSplitIterParser", 0)
|
||||
parser.reset_parser(None, text)
|
||||
res = [t.value for t in parser.split()]
|
||||
|
||||
assert res == expected
|
||||
|
||||
|
||||
def test_i_can_test_split_iter_parser_indexes():
|
||||
parser = BaseSplitIterParser("BaseSplitIterParser", 0)
|
||||
text = "one two \n three = ==(),"
|
||||
parser.reset_parser(None, text)
|
||||
res = []
|
||||
while parser.next_token():
|
||||
res.append(parser.get_token())
|
||||
|
||||
assert res[0] == Token(TokenKind.WORD, "one", 0, 1, 1)
|
||||
assert res[1] == Token(TokenKind.WORD, "two", 4, 1, 5)
|
||||
assert res[2] == Token(TokenKind.WORD, "three", 10, 2, 2)
|
||||
assert res[3] == Token(TokenKind.EQUALS, "=", 16, 2, 8)
|
||||
assert res[4] == Token(TokenKind.EQUALSEQUALS, "==", 18, 2, 10)
|
||||
assert res[5] == Token(TokenKind.LPAR, "(", 20, 2, 12)
|
||||
assert res[6] == Token(TokenKind.RPAR, ")", 21, 2, 13)
|
||||
assert res[7] == Token(TokenKind.COMMA, ",", 22, 2, 14)
|
||||
|
||||
@@ -73,7 +73,7 @@ def cprop(concept, prop_name):
|
||||
return concept.compiled[prop_name]
|
||||
|
||||
|
||||
class TestBnfConceptLexerParser(TestUsingMemoryBasedSheerka):
|
||||
class TestBnfNodeParser(TestUsingMemoryBasedSheerka):
|
||||
|
||||
def init(self, concepts, grammar):
|
||||
sheerka = self.get_sheerka(singleton=True)
|
||||
@@ -0,0 +1,205 @@
|
||||
import pytest
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from parsers.BaseParser import UnexpectedTokenErrorNode, UnexpectedEof
|
||||
from parsers.ExplainParser import ExplainParser, ExplanationNode, MultipleDigestError, ValueErrorNode, \
|
||||
RecurseDefNode, FormatLNode, UnionNode, FilterNode, FormatDNode
|
||||
from parsers.ExpressionParser import PropertyContainsNode, PropertyEqualsNode, TrueNode, AndNode, OrNode
|
||||
|
||||
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
|
||||
|
||||
|
||||
class TestExplainParser(TestUsingMemoryBasedSheerka):
|
||||
def init_parser(self, **kwargs):
|
||||
sheerka = self.get_sheerka(singleton=True, **kwargs)
|
||||
context = self.get_context(sheerka)
|
||||
parser = ExplainParser()
|
||||
return sheerka, context, parser
|
||||
|
||||
def test_i_cannot_parse_empty_string(self):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, "")
|
||||
|
||||
assert not res.status
|
||||
assert sheerka.isinstance(res.body, BuiltinConcepts.NOT_FOR_ME)
|
||||
|
||||
def test_i_cannot_parse_if_not_for_me(self):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
text = "foo"
|
||||
res = parser.parse(context, text)
|
||||
not_for_me = res.body
|
||||
|
||||
assert not res.status
|
||||
assert sheerka.isinstance(not_for_me, BuiltinConcepts.NOT_FOR_ME)
|
||||
assert not_for_me.body == text
|
||||
assert isinstance(not_for_me.reason[0], UnexpectedTokenErrorNode)
|
||||
|
||||
@pytest.mark.parametrize("text, digest, command, directives", [
|
||||
# ("explain", "", "explain", []),
|
||||
("explain digest", "digest", "explain digest", []),
|
||||
("explain -r 3", "", "explain -r 3", [RecurseDefNode(3)]),
|
||||
("explain digest -r 3", "digest", "explain digest -r 3", [RecurseDefNode(3)]),
|
||||
])
|
||||
def test_i_can_parse_explain_without_filter(self, text, digest, command, directives):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, text)
|
||||
parser_result = res.body
|
||||
explanation_node = res.body.body
|
||||
|
||||
assert res.status
|
||||
assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT)
|
||||
assert parser_result.parser.name == "parsers.Explain"
|
||||
assert parser_result.source == text
|
||||
|
||||
assert explanation_node.digest == digest
|
||||
assert explanation_node.command == command
|
||||
assert explanation_node.expr == UnionNode([FilterNode(TrueNode(), directives)])
|
||||
|
||||
def test_i_can_parse_using_filter(self):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
text = "explain -f a=b"
|
||||
res = parser.parse(context, text)
|
||||
parser_result = res.body
|
||||
explanation_node = res.body.body
|
||||
|
||||
assert res.status
|
||||
assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT)
|
||||
assert parser_result.parser.name == "parsers.Explain"
|
||||
assert parser_result.source == text
|
||||
|
||||
assert explanation_node.expr == UnionNode([
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyContainsNode("a", "b"))])
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("-f a==b", PropertyEqualsNode("a", "b")),
|
||||
("--filter a==b", PropertyEqualsNode("a", "b")),
|
||||
("-f a==b and c=d", AndNode(PropertyEqualsNode("a", "b"), PropertyContainsNode("c", "d"))),
|
||||
("-f a==b or c=d", OrNode(PropertyEqualsNode("a", "b"), PropertyContainsNode("c", "d"))),
|
||||
("-f a==b or c==d and e==f", OrNode(
|
||||
PropertyEqualsNode("a", "b"),
|
||||
AndNode(PropertyEqualsNode("c", "d"), PropertyEqualsNode("e", "f")))),
|
||||
("-f a==b and c==d or e==f", OrNode(
|
||||
AndNode(PropertyEqualsNode("a", "b"), PropertyEqualsNode("c", "d")),
|
||||
PropertyEqualsNode("e", "f"))),
|
||||
("-f (a==b or c==d) and e==f", AndNode(
|
||||
OrNode(PropertyEqualsNode("a", "b"), PropertyEqualsNode("c", "d")),
|
||||
PropertyEqualsNode("e", "f"))),
|
||||
])
|
||||
def test_i_can_parse_filter_expressions(self, text, expected):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, "explain " + text)
|
||||
parser_result = res.body
|
||||
explanation_node = res.body.body
|
||||
expr_node = explanation_node.expr.filters[-1].expr
|
||||
|
||||
assert res.status
|
||||
assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT)
|
||||
assert isinstance(explanation_node, ExplanationNode)
|
||||
|
||||
assert expr_node == expected
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("-r 2", [
|
||||
FilterNode(TrueNode(), [RecurseDefNode(2)])
|
||||
]),
|
||||
("--format_l 'abc'", [
|
||||
FilterNode(TrueNode(), [FormatLNode('abc')])
|
||||
]),
|
||||
("--format_d 'abc'", [
|
||||
FilterNode(TrueNode(), [FormatDNode({"abc": "{abc}"})])
|
||||
]),
|
||||
("--format_d a,b,c", [
|
||||
FilterNode(TrueNode(), [FormatDNode({"a": "{a}", "b": "{b}", "c": "{c}"})])
|
||||
]),
|
||||
("--format_d a , b , c", [
|
||||
FilterNode(TrueNode(), [FormatDNode({"a": "{a}", "b": "{b}", "c": "{c}"})])
|
||||
]),
|
||||
("-r 2 --format_l 'abc'", [
|
||||
FilterNode(TrueNode(), [RecurseDefNode(2), FormatLNode('abc')])
|
||||
]),
|
||||
("--format_d a, b -r 2", [
|
||||
FilterNode(TrueNode(), [FormatDNode({"a": "{a}", "b": "{b}"}), RecurseDefNode(2)])
|
||||
]),
|
||||
("-f a==b -r 3", [
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), [RecurseDefNode(3)]),
|
||||
]),
|
||||
("-f a==b --format_l 'abc'", [
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), [FormatLNode("abc")]),
|
||||
]),
|
||||
("-r 3 -f a==b", [
|
||||
FilterNode(TrueNode(), [RecurseDefNode(3)]),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), []),
|
||||
]),
|
||||
("--format_l 'abc' -f a==b", [
|
||||
FilterNode(TrueNode(), [FormatLNode("abc")]),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), []),
|
||||
]),
|
||||
("-f a==b -f c==d", [
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("a", "b")),
|
||||
FilterNode(PropertyEqualsNode("c", "d"))
|
||||
]),
|
||||
("-r 1 -f a==b -r 2 -f c==d -r 3", [
|
||||
FilterNode(TrueNode(), [RecurseDefNode(1)]),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), [RecurseDefNode(2)]),
|
||||
FilterNode(PropertyEqualsNode("c", "d"), [RecurseDefNode(3)])
|
||||
]),
|
||||
])
|
||||
def test_i_can_parse_other_directives(self, text, expected):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, "explain " + text)
|
||||
parser_result = res.body
|
||||
explanation_node = res.body.body
|
||||
expr_node = explanation_node.expr
|
||||
|
||||
assert res.status
|
||||
assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT)
|
||||
assert isinstance(explanation_node, ExplanationNode)
|
||||
|
||||
assert expr_node.filters == expected
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("explain -d digest", "digest"),
|
||||
("explain -d", ""),
|
||||
("explain -d -f a=b", "")
|
||||
])
|
||||
def test_i_can_parse_record_digest(self, text, expected):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, text)
|
||||
explanation_node = res.body.body
|
||||
|
||||
assert explanation_node.digest == expected
|
||||
assert explanation_node.record_digest
|
||||
|
||||
@pytest.mark.parametrize("text, expected_error_type", [
|
||||
("explain digest1 digest2", MultipleDigestError),
|
||||
("explain -r", UnexpectedEof),
|
||||
("explain -r foo", ValueErrorNode),
|
||||
("explain -r 1.2", ValueErrorNode),
|
||||
("explain -f -r 1.2", UnexpectedTokenErrorNode),
|
||||
("explain -f", UnexpectedEof),
|
||||
("explain --format_d", UnexpectedEof),
|
||||
("explain --format_l", UnexpectedEof),
|
||||
("explain --format_l -r foo", UnexpectedTokenErrorNode),
|
||||
("explain --format_d -r foo", UnexpectedTokenErrorNode),
|
||||
])
|
||||
def test_i_cannot_parse(self, text, expected_error_type):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, text)
|
||||
error = res.body
|
||||
errors = res.body.body
|
||||
|
||||
assert not res.status
|
||||
assert sheerka.isinstance(error, BuiltinConcepts.ERROR)
|
||||
assert len(errors) == 1
|
||||
assert isinstance(errors[0], expected_error_type)
|
||||
@@ -0,0 +1,103 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
|
||||
from core.concept import Concept
|
||||
from parsers.ExpressionParser import PropertyEqualsNode, PropertyEqualsSequenceNode, PropertyContainsNode, AndNode, \
|
||||
OrNode, NotNode, LambdaNode, IsaNode
|
||||
|
||||
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
|
||||
|
||||
|
||||
@dataclass
|
||||
class Obj:
|
||||
prop_a: object
|
||||
prop_b: object = None
|
||||
prop_c: object = None
|
||||
parent: object = None
|
||||
|
||||
|
||||
class TestExpressionParser(TestUsingMemoryBasedSheerka):
|
||||
|
||||
def test_i_can_test_property_equals(self):
|
||||
node = PropertyEqualsNode("prop_a", "good value")
|
||||
|
||||
assert node.eval(Obj(prop_a="good value"))
|
||||
assert not node.eval(Obj(prop_a="other value"))
|
||||
|
||||
def test_i_can_test_property_equals_for_int(self):
|
||||
node = PropertyEqualsNode("prop_a", "1")
|
||||
|
||||
assert node.eval(Obj(prop_a=1))
|
||||
assert node.eval(Obj(prop_a="1"))
|
||||
|
||||
def test_i_can_test_property_equals_sequence(self):
|
||||
node = PropertyEqualsSequenceNode(["prop_b", "prop_a"], ["good parent", "good child"])
|
||||
|
||||
assert node.eval(Obj(prop_a="good child", parent=Obj(prop_a="Don't care", prop_b="good parent")))
|
||||
assert not node.eval(Obj(prop_a="good child", parent=Obj(prop_a="Don't care", prop_b="wrong parent")))
|
||||
assert not node.eval(Obj(prop_a="good child"))
|
||||
assert not node.eval(Obj(prop_a="wrong child", parent=Obj(prop_a="Don't care", prop_b="good parent")))
|
||||
|
||||
def test_i_can_test_property_contains(self):
|
||||
node = PropertyContainsNode("prop_a", "substring")
|
||||
|
||||
assert node.eval(Obj(prop_a="it contains substring in it"))
|
||||
assert not node.eval(Obj(prop_a="it does not"))
|
||||
|
||||
def test_i_can_test_property_contains_for_int(self):
|
||||
node = PropertyContainsNode("prop_a", "44")
|
||||
|
||||
assert node.eval(Obj(prop_a=123445))
|
||||
assert not node.eval(Obj(prop_a=12435))
|
||||
|
||||
def test_i_can_test_and(self):
|
||||
left = PropertyEqualsNode("prop_a", "good a")
|
||||
right = PropertyEqualsNode("prop_b", "good b")
|
||||
other = PropertyEqualsNode("prop_c", "good c")
|
||||
and_node = AndNode(left, right, other)
|
||||
|
||||
assert and_node.eval(Obj("good a", "good b", "good c"))
|
||||
assert not and_node.eval(Obj("wrong a", "good b", "good c"))
|
||||
assert not and_node.eval(Obj("good a", "wrong b", "good c"))
|
||||
assert not and_node.eval(Obj("good a", "good b", "wrong c"))
|
||||
|
||||
def test_i_can_test_or(self):
|
||||
left = PropertyEqualsNode("prop_a", "good a")
|
||||
right = PropertyEqualsNode("prop_b", "good b")
|
||||
other = PropertyEqualsNode("prop_c", "good c")
|
||||
or_node = OrNode(left, right, other)
|
||||
|
||||
assert or_node.eval(Obj("wrong a", "good b", "good c"))
|
||||
assert or_node.eval(Obj("good a", "wrong b", "good c"))
|
||||
assert or_node.eval(Obj("good a", "good b", "wrong c"))
|
||||
assert not or_node.eval(Obj("wrong a", "wrong b", "wrong c"))
|
||||
|
||||
def test_i_can_test_not(self):
|
||||
node = PropertyEqualsNode("prop_a", "good value")
|
||||
not_node = NotNode(node)
|
||||
|
||||
assert not not_node.eval(Obj(prop_a="good value"))
|
||||
assert not_node.eval(Obj(prop_a="wrong value"))
|
||||
|
||||
def test_i_can_test_lambda_node(self):
|
||||
node = LambdaNode(lambda o: o.prop_a + o.prop_b == "ab")
|
||||
|
||||
assert node.eval(Obj(prop_a="a", prop_b="b"))
|
||||
assert not node.eval(Obj(prop_a="wrong value", prop_b="wrong value"))
|
||||
assert not node.eval(Obj(prop_a="wrong value")) # exception is caught
|
||||
|
||||
def test_i_can_test_isa_node(self):
|
||||
class_node = IsaNode(Obj)
|
||||
assert class_node.eval(Obj(prop_a="value"))
|
||||
assert not class_node.eval(TestExpressionParser())
|
||||
|
||||
concept_node = IsaNode(BuiltinConcepts.RETURN_VALUE)
|
||||
assert concept_node.eval(ReturnValueConcept())
|
||||
assert concept_node.eval(Concept(name="foo", key=BuiltinConcepts.RETURN_VALUE))
|
||||
assert not concept_node.eval(Obj)
|
||||
assert not concept_node.eval(Concept())
|
||||
|
||||
concept_node2 = IsaNode("foo")
|
||||
assert concept_node2.eval(Concept("foo").init_key())
|
||||
assert not concept_node2.eval(Obj)
|
||||
assert not concept_node2.eval(Concept())
|
||||
@@ -191,7 +191,7 @@ class TestMultipleConceptsParser(TestUsingMemoryBasedSheerka):
|
||||
|
||||
@pytest.mark.parametrize("text, expected_source, expected_end", [
|
||||
("True", "True", 0),
|
||||
("1 == 1", "1 == 1", 5),
|
||||
("1 == 1", "1 == 1", 4),
|
||||
("1!xdf", "1", 0),
|
||||
("1", "1", 0),
|
||||
])
|
||||
|
||||
Reference in New Issue
Block a user