Implemented a first and basic version of a Rete rule engine
This commit is contained in:
@@ -1,33 +1,12 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts, ParserResultConcept
|
||||
from core.concept import Concept
|
||||
from core.error import ErrorObj
|
||||
from core.global_symbols import ErrorObj
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from core.sheerka_logger import get_logger
|
||||
from core.tokenizer import TokenKind, Token, Tokenizer, LexerError
|
||||
|
||||
|
||||
# # keep a cache for the parser input
|
||||
# pi_cache = Cache(default=lambda key: ParserInput(key), max_size=20)
|
||||
#
|
||||
#
|
||||
# def get_parser_input(text, tokens=None, length=None):
|
||||
# """
|
||||
# Returns new or existing parser input
|
||||
# :param text:
|
||||
# :param tokens:
|
||||
# :param length:
|
||||
# :return:
|
||||
# """
|
||||
# if tokens is None or pi_cache.has(text):
|
||||
# return pi_cache.get(text)
|
||||
# pi = ParserInput(text, tokens, length)
|
||||
# pi_cache.put(text, pi)
|
||||
# return pi
|
||||
from core.tokenizer import TokenKind, Token, LexerError
|
||||
|
||||
|
||||
@dataclass()
|
||||
@@ -35,13 +14,6 @@ class Node:
|
||||
pass
|
||||
|
||||
|
||||
class NotInitializedNode(Node):
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
return "**N/A**"
|
||||
|
||||
|
||||
@dataclass()
|
||||
class ParsingError(Node, ErrorObj):
|
||||
pass
|
||||
@@ -206,17 +178,6 @@ class BaseParser:
|
||||
|
||||
return parser_input.value
|
||||
|
||||
# @staticmethod
|
||||
# def manage_eof(lst, strip_eof):
|
||||
# if strip_eof:
|
||||
# if len(lst) and lst[-1].type == TokenKind.EOF:
|
||||
# lst.pop()
|
||||
# return lst
|
||||
#
|
||||
# if len(lst) == 0 or not lst[-1].type == TokenKind.EOF:
|
||||
# lst.append(Token(TokenKind.EOF, "", -1, -1, -1))
|
||||
# return lst
|
||||
|
||||
@staticmethod
|
||||
def get_tokens_boundaries(tokens):
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user