Implemented a first and basic version of a Rete rule engine

This commit is contained in:
2021-02-09 16:06:32 +01:00
parent 821dbed189
commit a2a8d5c5e5
110 changed files with 7301 additions and 1654 deletions
+2 -33
View File
@@ -139,6 +139,7 @@ class LexerError(Exception):
class Keywords(Enum):
DEF = "def"
CONCEPT = "concept"
RULE = "rule"
FROM = "from"
BNF = "bnf"
AS = "as"
@@ -149,6 +150,7 @@ class Keywords(Enum):
RET = "ret"
WHEN = "when"
PRINT = "print"
THEN = "then"
class Tokenizer:
@@ -557,36 +559,3 @@ class IterParser:
return token_after
except StopIteration:
return Token(TokenKind.EOF, -1, -1, -1, -1)
# @dataclass
# class PropDef:
# prop: str
# index: int
#
#
# class SimpleExpressionParser(IterParser):
# def __init__(self, source):
# super().__init__(source)
# self.properties = []
#
# def parse(self):
#
# prop, index, key = None, None, None
# while self.next_token():
# if self.token.type == TokenKind.DOT:
# self.properties.append(PropDef(prop, index, key))
# prop, index, key = None, None, None
# continue
#
# if self.token.type == TokenKind.LBRACKET:
# index = self.parse_index()
# elif self.token.type == TokenKind.LBRACE:
# key = self.parse_key()
# else:
# prop = self.token.value
#
# if prop is not None:
# self.properties.append(PropDef(prop, index, key))
#
# def parse_i