I can manage infinite recursion when building concept

This commit is contained in:
2020-12-10 21:08:10 +01:00
parent 4b6e1dd55b
commit 657c7536f7
28 changed files with 816 additions and 446 deletions
+38 -40
View File
@@ -1,7 +1,7 @@
from core.builtin_concepts import BuiltinConcepts
from core.rule import Rule, ACTION_TYPE_DEFERRED
from core.sheerka.services.SheerkaExecute import ParserInput
from core.tokenizer import LexerError, TokenKind
from core.tokenizer import TokenKind
from parsers.BaseParser import BaseParser, ErrorNode, UnexpectedTokenErrorNode
@@ -40,49 +40,47 @@ class RuleParser(BaseParser):
False,
sheerka.new(BuiltinConcepts.IS_EMPTY))
try:
parser_input.reset()
if not self.reset_parser(context, parser_input):
error = self.error_sink[0]
context.log(f"Error found in tokenizer {error}", self.name)
return sheerka.ret(self.name, False, sheerka.new(BuiltinConcepts.ERROR, body=error))
parser_input.next_token()
if parser_input.token.type != TokenKind.RULE:
return sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.NOT_FOR_ME, body=parser_input.as_text()))
parser_input.next_token()
if parser_input.token.type != TokenKind.RULE:
return sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.NOT_FOR_ME, body=parser_input.as_text()))
token = parser_input.token
token = parser_input.token
if parser_input.next_token():
reason = UnexpectedTokenErrorNode("Only one rule supported",
parser_input.token,
[TokenKind.EOF])
return sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.NOT_FOR_ME, body=parser_input.as_text(), reason=reason))
if parser_input.next_token():
reason = UnexpectedTokenErrorNode("Only one rule supported",
parser_input.token,
[TokenKind.EOF])
return sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.NOT_FOR_ME, body=parser_input.as_text(), reason=reason))
if token.value[1] is None:
return sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.NOT_IMPLEMENTED))
if token.value[1] is None:
return sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.NOT_IMPLEMENTED))
if token.value[1].isdigit():
rule = sheerka.get_rule_by_id(token.value[1])
else:
rule = Rule().set_id(token.value[1])
rule.metadata.action_type = ACTION_TYPE_DEFERRED
if token.value[1].isdigit():
rule = sheerka.get_rule_by_id(token.value[1])
else:
rule = Rule().set_id(token.value[1])
rule.metadata.action_type = ACTION_TYPE_DEFERRED
if sheerka.isinstance(rule, BuiltinConcepts.UNKNOWN_RULE):
return sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.ERROR,
body=[RuleNotFound(token.value)]))
body = sheerka.new(BuiltinConcepts.PARSER_RESULT,
parser=self,
source=parser_input.as_text(),
body=[rule],
try_parsed=[rule])
if sheerka.isinstance(rule, BuiltinConcepts.UNKNOWN_RULE):
return sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.ERROR,
body=[RuleNotFound(token.value)]))
body = sheerka.new(BuiltinConcepts.PARSER_RESULT,
parser=self,
source=parser_input.as_text(),
body=[rule],
try_parsed=[rule])
return sheerka.ret(self.name, True, body)
except LexerError as e:
context.log(f"Error found in tokenizer {e}", self.name)
return sheerka.ret(self.name, False, sheerka.new(BuiltinConcepts.ERROR, body=e))
return sheerka.ret(self.name, True, body)