Implemented a first and basic version of a Rete rule engine

This commit is contained in:
2021-02-09 16:06:32 +01:00
parent 821dbed189
commit a2a8d5c5e5
110 changed files with 7301 additions and 1654 deletions
+33 -4
View File
@@ -3,12 +3,14 @@ import importlib
import inspect
import os
import pkgutil
import re
from copy import deepcopy
from pyparsing import *
# from pyparsing import *
from pyparsing import Literal, Word, nums, Combine, Optional, delimitedList, oneOf, alphas, Suppress
from core.global_symbols import CustomType
from core.tokenizer import TokenKind, Tokenizer
from core.tokenizer import TokenKind, Tokenizer, Token
COLORS = {
"black",
@@ -249,7 +251,7 @@ def make_unique(lst, get_id=None):
return list(_make_unique(lst, get_id))
def product(a, b):
def sheerka_product(a, b):
"""
Kind of cartesian product between lists a and b
knowing that a is also a list : a is a list of list !!!
@@ -569,7 +571,7 @@ def as_bag(obj, forced_properties=None):
"""
Get the properties of an object (static and dynamic)
:param obj:
:param forced_properties:
:param forced_properties: special mode where properties are given in parameter
:return:
"""
@@ -638,6 +640,33 @@ def get_text_from_tokens(tokens, custom_switcher=None, tracker=None):
return res
def tokens_are_matching(tokens1, tokens2, skip_tokens=True):
def get_next(it):
try:
return next(it)
except StopIteration:
return Token(TokenKind.EOF, "", -1, -1, -1)
iter1 = iter(tokens1)
iter2 = iter(tokens2)
while True:
t1 = get_next(iter1)
t2 = get_next(iter2)
if skip_tokens:
if t1.type == TokenKind.WHITESPACE:
t1 = next(iter1)
if t2.type == TokenKind.WHITESPACE:
t2 = next(iter2)
if t1.type == TokenKind.EOF and t2.type == TokenKind.EOF:
return True
if t1.type != t2.type or t1.value != t2.value:
return False
def dump_ast(node):
dump = ast.dump(node)
for to_remove in [", ctx=Load()", ", kind=None", ", type_ignores=[]"]: