Implemented a first and basic version of a Rete rule engine

This commit is contained in:
2021-02-09 16:06:32 +01:00
parent 821dbed189
commit a2a8d5c5e5
110 changed files with 7301 additions and 1654 deletions
+31 -2
View File
@@ -1,7 +1,8 @@
from dataclasses import dataclass
import core.utils
import pytest
import core.utils
from core.builtin_concepts import BuiltinConcepts
from core.builtin_helpers import evaluate_expression
from core.concept import Concept
@@ -111,7 +112,7 @@ def test_i_can_get_sub_classes():
([['a'], ['b']], ['c', 'd', 'e'], [['a', 'c'], ['b', 'c'], ['a', 'd'], ['b', 'd'], ['a', 'e'], ['b', 'e']]),
])
def test_i_can_product(a, b, expected):
res = core.utils.product(a, b)
res = core.utils.sheerka_product(a, b)
assert res == expected
@@ -425,3 +426,31 @@ def test_i_can_deep_copy_a_custom_type():
assert core.utils.sheerka_deepcopy(NotInit) is NotInit
assert core.utils.sheerka_deepcopy(NotFound) is NotFound
assert core.utils.sheerka_deepcopy(Removed) is Removed
@pytest.mark.parametrize("expression1, expression2, expected", [
("foo bar baz", "foo bar baz", True),
("foo()", " foo ( ) ", True),
("is_instance()", "is _ instance()", False),
("foo bar baz", "foo bar", False)
])
def test_tokens_are_matching(expression1, expression2, expected):
assert core.utils.tokens_are_matching(Tokenizer(expression1), Tokenizer(expression2)) == expected
def test_tokens_are_matching_when_no_eof():
expression1 = "foo bar baz"
expression2 = "foo bar"
tokens1 = Tokenizer(expression1, yield_eof=False)
tokens2 = Tokenizer(expression2, yield_eof=False)
assert not core.utils.tokens_are_matching(tokens1, tokens2)
def test_tokens_are_matching_when_eof_differs():
expression1 = "foo bar baz"
expression2 = "foo bar baz"
tokens1 = Tokenizer(expression1, yield_eof=True)
tokens2 = Tokenizer(expression2, yield_eof=False)
assert core.utils.tokens_are_matching(tokens1, tokens2)