Added bnf when adding a new concept + Started logging filtering

This commit is contained in:
2019-12-13 20:26:11 +01:00
parent 75c8793d53
commit c668cc46d2
29 changed files with 1487 additions and 190 deletions
+175 -34
View File
@@ -2,8 +2,18 @@ import pytest
from core.builtin_concepts import BuiltinConcepts
from core.concept import Concept
from core.sheerka import Sheerka, ExecutionContext
from core.tokenizer import Tokenizer, TokenKind
from parsers.ConceptLexerParser import ConceptLexerParser, ConceptNode, Sequence, StrMatch, OrderedChoice, Optional, \
CrossRef
CrossRef, RegexParser, ZeroOrMore, OneOrMore, UnexpectedEndOfFileError, UnexpectedTokenErrorNode, ConceptMatch, \
ParsingExpressionVisitor
class ConceptVisitor(ParsingExpressionVisitor):
def __init__(self):
self.concepts = set()
def visit_ConceptMatch(self, node):
self.concepts.add(node.concept_name)
@pytest.mark.parametrize("match, text", [
@@ -23,7 +33,7 @@ def test_i_can_match_simple_tokens(match, text):
foo = Concept(name="foo")
concepts = {foo: text}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, text)
@@ -38,7 +48,7 @@ def test_i_can_match_multiple_concepts_in_one_input():
two = Concept(name="two")
concepts = {one: "one", two: "two"}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two one")
@@ -69,7 +79,7 @@ def test_i_cannot_match_when_part_of_the_input_is_unknown():
two = Concept(name="two")
concepts = {one: "one", two: "two"}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two three")
assert not res.status
@@ -86,7 +96,7 @@ def test_i_can_match_sequence():
foo = Concept(name="foo")
concepts = {foo: Sequence("one", "two", "three")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two three")
@@ -100,7 +110,7 @@ def test_wrong_sequence_is_not_matched():
foo = Concept(name="foo")
concepts = {foo: Sequence("one", "two", "three")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two three one")
@@ -116,7 +126,7 @@ def test_i_cannot_match_sequence_if_end_of_file():
foo = Concept(name="foo")
concepts = {foo: Sequence("one", "two", "three")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two")
assert not res.status
@@ -133,7 +143,7 @@ def test_i_always_choose_the_longest_match():
concepts = {bar: Sequence("one", "two"), foo: Sequence("one", "two", "three")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two three")
@@ -149,7 +159,7 @@ def test_i_can_match_several_sequences():
concepts = {bar: Sequence("one", "two"), foo: Sequence("one", "two", "three")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two three one two")
@@ -166,7 +176,7 @@ def test_i_can_match_ordered_choice():
foo = Concept(name="foo")
concepts = {foo: OrderedChoice("one", "two")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res1 = parser.parse(context, "one")
assert res1.status
@@ -189,7 +199,7 @@ def test_i_cannot_match_ordered_choice_with_empty_alternative():
foo = Concept(name="foo")
concepts = {foo: Sequence(OrderedChoice("one", ""), "two")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "ok") # because token[0] is not "one" and not "" (it is 'two')
assert not res.status
@@ -201,7 +211,7 @@ def test_i_can_mix_sequences_and_ordered_choices():
concepts = {foo: Sequence(OrderedChoice("twenty", "thirty"), "one", "ok")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res1 = parser.parse(context, "twenty one ok")
assert res1.status
@@ -225,7 +235,7 @@ def test_i_can_mix_ordered_choices_and_sequences():
concepts = {foo: OrderedChoice(Sequence("twenty", "thirty"), "one")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "twenty thirty")
assert res.status
@@ -240,7 +250,7 @@ def test_i_cannot_parse_empty_optional():
concepts = {foo: Optional("one")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "")
assert not res.status
@@ -253,7 +263,7 @@ def test_i_can_parse_optional():
concepts = {foo: Optional("one")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one")
assert res.status
@@ -266,7 +276,7 @@ def test_i_can_parse_sequence_starting_with_optional():
concepts = {foo: Sequence(Optional("twenty"), "one")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "twenty one")
assert res.status
@@ -283,7 +293,7 @@ def test_i_can_parse_sequence_ending_with_optional():
concepts = {foo: Sequence("one", "two", Optional("three"))}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two three")
assert res.status
@@ -300,7 +310,7 @@ def test_i_can_parse_sequence_with_optional_in_between():
concepts = {foo: Sequence("one", Optional("two"), "three")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two three")
assert res.status
@@ -312,19 +322,16 @@ def test_i_can_parse_sequence_with_optional_in_between():
def test_i_can_use_reference():
# The problem here is when there are multiple match for the same input
# The parsing result is a list of all concepts found
# So it's already a list that represents a sequence, not a choice
# So I need to create a choice concept
# create the return value for every possible graph
# --> The latter seems to be the best as we don't defer the resolution of the problem to someone else
# when there are multiple matches for the same input
# Do I need to create a choice concept ?
# No, create a return value for every possible graph
context = get_context()
foo = Concept(name="foo")
bar = Concept(name="bar")
concepts = {foo: Sequence("one", "two"), bar: foo}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two")
assert len(res) == 2
@@ -350,7 +357,7 @@ def test_i_can_use_context_reference_with_multiple_levels():
concepts = {foo: Sequence("one", "two"), bar: foo, baz: bar}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two")
assert len(res) == 3
@@ -375,7 +382,7 @@ def test_order_is_not_important_when_using_references():
concepts = {bar: foo, foo: Sequence("one", "two")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "one two")
assert len(res) == 2
@@ -390,7 +397,7 @@ def test_i_can_parse_when_reference():
concepts = {bar: Sequence(foo, OrderedChoice("one", "two")), foo: OrderedChoice("twenty", "thirty")}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "twenty two")
assert res.status
@@ -415,7 +422,7 @@ def test_i_can_detect_duplicates_when_reference():
foo: OrderedChoice("twenty", "thirty")
}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
res = parser.parse(context, "twenty")
assert len(res) == 2
@@ -437,7 +444,7 @@ def test_i_can_detect_infinite_recursion():
foo: bar
}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(get_context(), concepts)
assert bar not in parser.concepts_dict
assert foo not in parser.concepts_dict
@@ -452,7 +459,7 @@ def test_i_can_detect_indirect_infinite_recursion_with_ordered_choice():
foo: OrderedChoice(bar, "foo")
}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(get_context(), concepts)
assert foo not in parser.concepts_dict # removed because of the infinite recursion
assert bar not in parser.concepts_dict # removed because of the infinite recursion
@@ -464,7 +471,7 @@ def test_i_can_detect_indirect_infinite_recursion_with_ordered_choice():
foo: OrderedChoice("foo", bar)
}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(context, concepts)
assert foo in parser.concepts_dict
assert bar in parser.concepts_dict
@@ -485,7 +492,7 @@ def test_i_can_detect_indirect_infinite_recursion_with_sequence():
foo: Sequence("one", bar, "two")
}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(get_context(), concepts)
assert foo not in parser.concepts_dict # removed because of the infinite recursion
assert bar not in parser.concepts_dict # removed because of the infinite recursion
@@ -500,7 +507,7 @@ def test_i_can_detect_indirect_infinite_recursion_with_sequence_or_ordered_choic
foo: Sequence("one", OrderedChoice(bar, "other"), "two")
}
parser = ConceptLexerParser()
parser.initialize(concepts)
parser.initialize(get_context(), concepts)
assert foo not in parser.concepts_dict # removed because of the infinite recursion
assert bar not in parser.concepts_dict # removed because of the infinite recursion
@@ -510,6 +517,140 @@ def test_i_can_detect_indirect_infinite_recursion_with_optional():
# TODO infinite recursion with optional
pass
@pytest.mark.parametrize("expression, expected", [
("'str'", StrMatch("str")),
("1", StrMatch("1")),
(" 1", StrMatch("1")),
(",", StrMatch(",")),
("'foo'?", Optional(StrMatch("foo"))),
("'foo'*", ZeroOrMore(StrMatch("foo"))),
("'foo'+", OneOrMore(StrMatch("foo"))),
("1 | 2 | 3", OrderedChoice(StrMatch("1"), StrMatch("2"), StrMatch("3"))),
("1|2|3", OrderedChoice(StrMatch("1"), StrMatch("2"), StrMatch("3"))),
("1 2 'foo'", Sequence(StrMatch("1"), StrMatch("2"), StrMatch("foo"))),
("1 2 | 3 4+", OrderedChoice(
Sequence(StrMatch("1"), StrMatch("2")),
Sequence(StrMatch("3"), OneOrMore(StrMatch("4"))))),
("1 (2 | 3) 4+", Sequence(StrMatch("1"), OrderedChoice(StrMatch("2"), StrMatch("3")), OneOrMore(StrMatch("4")))),
("(1|2)+", OneOrMore(OrderedChoice(StrMatch("1"), StrMatch("2")))),
("(1 2)+", OneOrMore(Sequence(StrMatch("1"), StrMatch("2")))),
("1 *", Sequence(StrMatch("1"), StrMatch("*"))),
("1 ?", Sequence(StrMatch("1"), StrMatch("?"))),
("1 +", Sequence(StrMatch("1"), StrMatch("+"))),
("(1|*) +", Sequence(OrderedChoice(StrMatch("1"), StrMatch("*")), StrMatch("+"))),
("1, :&", Sequence(StrMatch("1"), StrMatch(","), StrMatch(":"), StrMatch("&"))),
("(1 )", StrMatch("1")),
])
def test_i_can_parse_regex(expression, expected):
parser = RegexParser()
res = parser.parse(get_context(), Tokenizer(expression))
assert not parser.has_error
assert res.status
assert res.value.value == expected
assert res.value.source == expression
@pytest.mark.parametrize("expression, error", [
("1 ", UnexpectedEndOfFileError()),
("1|", UnexpectedEndOfFileError()),
("(1|)", UnexpectedTokenErrorNode("Unexpected token 'TokenKind.EOF'", [TokenKind.RPAR])),
])
def test_i_can_detect_errors(expression, error):
parser = RegexParser()
res = parser.parse(get_context(), Tokenizer(expression))
ret_value = res.value.value
assert parser.has_error
assert not res.status
assert ret_value[0] == error
def test_i_can_parse_regex_with_reference():
expression = "foo"
parser = RegexParser()
res = parser.parse(get_context(), Tokenizer(expression))
assert res.status
assert res.value.value == ConceptMatch("foo")
assert res.value.source == expression
def test_i_can_parse_cross_ref_with_modifier():
expression = "foo*"
parser = RegexParser()
res = parser.parse(get_context(), Tokenizer(expression))
assert res.status
assert res.value.value == ZeroOrMore(ConceptMatch("foo"))
assert res.value.source == expression
def test_i_can_parse_sequence_with_cross_ref():
expression = "foo 'and' bar+"
parser = RegexParser()
res = parser.parse(get_context(), Tokenizer(expression))
assert res.status
assert res.value.value == Sequence(ConceptMatch("foo"), StrMatch("and"), OneOrMore(ConceptMatch("bar")))
assert res.value.source == expression
def test_i_can_parse_choice_with_cross_ref():
foo = Concept("foo")
bar = Concept("bar")
context = get_context()
context.sheerka.add_in_cache(foo)
context.sheerka.add_in_cache(bar)
expression = "foo | bar?"
parser = RegexParser()
res = parser.parse(context, Tokenizer(expression))
assert res.status
assert res.value.value == OrderedChoice(ConceptMatch("foo"), Optional(ConceptMatch("bar")))
assert res.value.source == expression
def test_i_can_use_the_result_of_regex_parsing_to_parse_a_text():
foo = Concept(name="foo")
bar = Concept(name="bar")
context = get_context()
context.sheerka.add_in_cache(foo)
context.sheerka.add_in_cache(bar)
regex_parser = RegexParser()
foo_definition = regex_parser.parse(context, "'twenty' | 'thirty'").value.value
bar_definition = regex_parser.parse(context, "foo ('one' | 'two')").value.value
concepts = {bar: bar_definition, foo: foo_definition}
concept_parser = ConceptLexerParser()
concept_parser.initialize(context, concepts)
res = concept_parser.parse(context, "twenty two")
assert res.status
assert res.value.body == [ConceptNode(bar, 0, 2, source="twenty two")]
res = concept_parser.parse(context, "thirty one")
assert res.status
assert res.value.body == [ConceptNode(bar, 0, 2, source="thirty one")]
res = concept_parser.parse(context, "twenty")
assert res.status
assert res.value.body == [ConceptNode(foo, 0, 0, source="twenty")]
def test_i_can_visit_parsing_expression():
mult = Concept(name="mult")
add = Concept(name="add")
visitor = ConceptVisitor()
visitor.visit(Sequence(mult, Optional(Sequence("+", add))))
assert sorted(list(visitor.concepts)) == ["add", "mult"]
#
# def test_i_can_parse_basic_arithmetic_operations_and_resolve_properties():
# context = get_context()