Added first version of DebugManager. Implemented draft of the rule engine
This commit is contained in:
@@ -0,0 +1,88 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.rule import Rule, ACTION_TYPE_DEFERRED
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from core.tokenizer import LexerError, TokenKind
|
||||
from parsers.BaseParser import BaseParser, ErrorNode, UnexpectedTokenErrorNode
|
||||
|
||||
|
||||
class RuleNotFound(ErrorNode):
|
||||
def __init__(self, id_as_tuple):
|
||||
self.key = id_as_tuple[0]
|
||||
self.id = id_as_tuple[1]
|
||||
|
||||
def __repr__(self):
|
||||
return f"RuleNotFound(id={self.id}, key={self.key}"
|
||||
|
||||
|
||||
class RuleParser(BaseParser):
|
||||
"""
|
||||
Tries to recognize rules
|
||||
"""
|
||||
|
||||
NAME = "Rule"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
BaseParser.__init__(self, RuleParser.NAME, 80)
|
||||
|
||||
def parse(self, context, parser_input: ParserInput):
|
||||
"""
|
||||
text can be string, but text can also be an list of tokens
|
||||
:param context:
|
||||
:param parser_input:
|
||||
:return:
|
||||
"""
|
||||
|
||||
context.log(f"Parsing '{parser_input}'", self.name)
|
||||
sheerka = context.sheerka
|
||||
|
||||
if parser_input.is_empty():
|
||||
return sheerka.ret(self.name,
|
||||
False,
|
||||
sheerka.new(BuiltinConcepts.IS_EMPTY))
|
||||
|
||||
try:
|
||||
parser_input.reset()
|
||||
|
||||
parser_input.next_token()
|
||||
if parser_input.token.type != TokenKind.RULE:
|
||||
return sheerka.ret(self.name,
|
||||
False,
|
||||
sheerka.new(BuiltinConcepts.NOT_FOR_ME, body=parser_input.as_text()))
|
||||
|
||||
token = parser_input.token
|
||||
|
||||
if parser_input.next_token():
|
||||
reason = UnexpectedTokenErrorNode("Only one rule supported",
|
||||
parser_input.token,
|
||||
[TokenKind.EOF])
|
||||
return sheerka.ret(self.name,
|
||||
False,
|
||||
sheerka.new(BuiltinConcepts.NOT_FOR_ME, body=parser_input.as_text(), reason=reason))
|
||||
|
||||
if token.value[1] is None:
|
||||
return sheerka.ret(self.name,
|
||||
False,
|
||||
sheerka.new(BuiltinConcepts.NOT_IMPLEMENTED))
|
||||
|
||||
if token.value[1].isdigit():
|
||||
rule = sheerka.get_rule_by_id(token.value[1])
|
||||
else:
|
||||
rule = Rule().set_id(token.value[1])
|
||||
rule.metadata.action_type = ACTION_TYPE_DEFERRED
|
||||
|
||||
if sheerka.isinstance(rule, BuiltinConcepts.UNKNOWN_RULE):
|
||||
return sheerka.ret(self.name,
|
||||
False,
|
||||
sheerka.new(BuiltinConcepts.ERROR,
|
||||
body=[RuleNotFound(token.value)]))
|
||||
body = sheerka.new(BuiltinConcepts.PARSER_RESULT,
|
||||
parser=self,
|
||||
source=parser_input.as_text(),
|
||||
body=[rule],
|
||||
try_parsed=[rule])
|
||||
|
||||
return sheerka.ret(self.name, True, body)
|
||||
|
||||
except LexerError as e:
|
||||
context.log(f"Error found in tokenizer {e}", self.name)
|
||||
return sheerka.ret(self.name, False, sheerka.new(BuiltinConcepts.ERROR, body=e))
|
||||
Reference in New Issue
Block a user