Fixed bug when evaluating numbers several times

This commit is contained in:
2020-11-27 11:01:02 +01:00
parent cac732bd93
commit 4f899280c4
16 changed files with 887 additions and 491 deletions
-19
View File
@@ -294,27 +294,8 @@ class BnfDefinitionParser(BaseParser):
expression.rule_name = token.value
self.next_token()
if BnfDefinitionParser.is_expression_a_set(self.context, expression):
root_concept = self.context.search(start_with_self=True,
predicate=lambda ec: ec.action == BuiltinConcepts.INIT_BNF,
get_obj=lambda ec: ec.action_context,
stop=lambda ec: ec.action == BuiltinConcepts.INIT_BNF)
root_concept = list(root_concept)
if root_concept and hasattr(root_concept[0], "id"):
expression.recurse_id = expression.get_recurse_id(root_concept[0].id,
expression.concept.id,
expression.rule_name)
return expression
@staticmethod
def is_expression_a_set(context, expression):
return isinstance(expression, ConceptExpression) and context.sheerka.isaset(context, expression.concept)
@staticmethod
def update_recurse_id(context, concept_id, expression):
if BnfDefinitionParser.is_expression_a_set(context, expression):
expression.recurse_id = expression.get_recurse_id(concept_id, expression.concept.id, expression.rule_name)
for element in expression.elements:
BnfDefinitionParser.update_recurse_id(context, concept_id, element)
+197 -170
View File
@@ -17,8 +17,8 @@ from core.builtin_concepts import BuiltinConcepts
from core.concept import DEFINITION_TYPE_BNF, DoNotResolve, ConceptParts, Concept
from core.sheerka.services.SheerkaExecute import ParserInput
from core.tokenizer import Tokenizer, TokenKind, Token
from core.utils import CONSOLE_COLORS_MAP as CCM
from parsers.BaseNodeParser import BaseNodeParser, GrammarErrorNode, UnrecognizedTokensNode, ConceptNode, LexerNode
from parsers.BaseParser import BaseParser
PARSERS = ["Sequence", "Sya", "Python"]
@@ -50,6 +50,10 @@ class ParsingContext:
res.append(self.clone())
return res
def __repr__(self):
res = f"ParsingContext('{self.node.get_debug()}', pos={self.pos})"
return res
class NonTerminalNode(LexerNode):
"""
@@ -86,6 +90,10 @@ class NonTerminalNode(LexerNode):
clone = NonTerminalNode(self.parsing_expression, self.start, self.end, self.tokens, self.children.copy())
return clone
def get_debug(self):
res = f"{self.parsing_expression.concept}=>" if isinstance(self.parsing_expression, ConceptExpression) else ""
return res + ".".join([c.get_debug() for c in self.children])
class TerminalNode(LexerNode):
"""
@@ -118,6 +126,9 @@ class TerminalNode(LexerNode):
clone = TerminalNode(self.parsing_expression, self.start, self.end, self.value)
return clone
def get_debug(self):
return self.value
class MultiNode:
""""
@@ -155,8 +166,6 @@ class ParsingExpression:
def __init__(self, *args, **kwargs):
self.elements = args
self.debug_enabled = False
self._has_unordered_choice = None
nodes = kwargs.get('nodes', []) or []
if not hasattr(nodes, '__iter__'):
@@ -184,45 +193,19 @@ class ParsingExpression:
def __hash__(self):
return hash((self.rule_name, self.elements))
def parse(self, parser):
def parse(self, parser_helper):
# TODO : add memoization
if self.debug_enabled:
self.debug(f">> {parser.pos:3d} : {self}")
# parser_helper.debugger.debug_log(f">> {parser_helper.pos:3d} : {self}")
# if self.debug_enabled:
# self.debug(f">> {parser_helper.pos:3d} : {self}")
res = self._parse(parser)
res = self._parse(parser_helper)
return res
def add_rule_name_if_needed(self, text):
return text + "=" + self.rule_name if self.rule_name else text
def has_unordered_choice(self):
if self._has_unordered_choice is None:
visitor = HasUnorderedChoiceVisitor()
visitor.visit(self)
self._has_unordered_choice = visitor.value
return self._has_unordered_choice
def debug(self, msg):
self.log_sink.append((id(self), msg))
def get_debug(self):
if not self.debug_enabled:
return None
# search for the first debug line for the current pexpression
id_self = id(self)
for i, line in enumerate(self.log_sink):
if line[0] == id_self:
break
else:
return ""
n, debug = self.inner_get_debug(i, "")
self.log_sink.clear()
return debug
def inner_get_debug(self, n, tab=""):
"""
@@ -275,6 +258,13 @@ class ParsingExpression:
return n, debug
@staticmethod
def debug_prefix(self_name, parser_helper):
current_rule_name = parser_helper.get_current_rule_name()
current_concept = parser_helper.concepts[-1]
str_rule_name = f":{current_rule_name}" if current_rule_name not in (None, current_concept.name) else ""
return f"{self_name}({current_concept}{str_rule_name})"
class ConceptExpression(ParsingExpression):
"""
@@ -284,10 +274,9 @@ class ConceptExpression(ParsingExpression):
When the grammar is created, it is replaced by the actual concept
"""
def __init__(self, concept, rule_name="", recurse_id=None, nodes=None):
def __init__(self, concept, rule_name="", nodes=None):
super().__init__(rule_name=rule_name, nodes=nodes)
self.concept = concept
self.recurse_id = recurse_id
def __repr__(self):
return self.add_rule_name_if_needed(f"{self.concept}")
@@ -299,10 +288,6 @@ class ConceptExpression(ParsingExpression):
if not isinstance(other, ConceptExpression):
return False
# TODO : enable self.recurse_id when it will be correctly implemented
# if self.recurse_id != other.recurse_id:
# return False
if isinstance(self.concept, Concept):
return self.concept.id == other.concept.id
@@ -313,7 +298,17 @@ class ConceptExpression(ParsingExpression):
return hash((self.concept, self.rule_name))
def _parse(self, parser_helper):
parser_helper.rules_names.append(self.rule_name)
parser_helper.push_concept(self.concept)
# parser_helper.debug_concept(self.debug_prefix("ConceptExpression", parser_helper) + "=start")
node = self.nodes[0].parse(parser_helper)
# parser_helper.debug_concept(self.debug_prefix("ConceptExpression", parser_helper) + "=end")
parser_helper.pop_concept()
parser_helper.rules_names.pop()
if node is None:
return None
@@ -327,7 +322,7 @@ class ConceptExpression(ParsingExpression):
[node])
@staticmethod
def get_recurse_id(parent_id, concept_id, rule_name):
def get_recursion_id(parent_id, concept_id, rule_name):
return f"{parent_id}#{concept_id}({rule_name})"
@@ -340,6 +335,9 @@ class Sequence(ParsingExpression):
init_pos = parser_helper.pos
end_pos = parser_helper.pos
if parser_helper.debugger.is_enabled():
debug_prefix = self.debug_prefix("Sequence", parser_helper)
parser_helper.debug_concept(debug_prefix, nodes=self.nodes)
ntn = NonTerminalNode(self,
init_pos,
end_pos,
@@ -351,10 +349,14 @@ class Sequence(ParsingExpression):
for e in self.nodes:
for pcontext in parsing_contexts:
if parser_helper.debugger.is_enabled():
parser_helper.debug_concept(debug_prefix, node=e, pcontext=pcontext)
parser_helper.seek(pcontext.pos)
node = e.parse(parser_helper)
if node is None:
to_remove.append(pcontext)
elif isinstance(node, MultiNode):
clones = pcontext * len(node.results) # clones pcontext (but first item is pcontext)
to_append.extend(clones[1:])
@@ -373,8 +375,8 @@ class Sequence(ParsingExpression):
parsing_contexts.extend(to_append)
if len(parsing_contexts) == 0:
if self.debug_enabled:
self.debug(f"<< Failed matching {e}")
if parser_helper.debugger.is_enabled():
parser_helper.debug_concept(debug_prefix + " All pcontexts are failed. Sequence failed")
return None
to_append.clear()
@@ -388,12 +390,10 @@ class Sequence(ParsingExpression):
pcontext.fix_tokens(parser_helper)
if len(parsing_contexts) == 1:
if self.debug_enabled:
self.debug(f"<< Found match '{parsing_contexts[0].node.source}'")
# parser_helper.debugger.debug_log(f"<< Found match '{parsing_contexts[0].node.source}'")
return parsing_contexts[0].node
if self.debug_enabled:
self.debug(f"<< Found matches {[r.node.source for r in parsing_contexts]}")
# parser_helper.debugger.debug_log(f"<< Found matches {[r.node.source for r in parsing_contexts]}")
return MultiNode(parsing_contexts)
def __repr__(self):
@@ -440,9 +440,18 @@ class UnOrderedChoice(ParsingExpression):
init_pos = parser_helper.pos
parsing_contexts = []
if parser_helper.debugger.is_enabled():
debug_prefix = self.debug_prefix("UnOrderedChoice", parser_helper)
parser_helper.debug_concept(debug_prefix)
debug_text = ""
for e in self.nodes:
if isinstance(e, ConceptExpression) and e.concept.id in parser_helper.get_concepts_ids():
# avoid circular reference
continue
node = e.parse(parser_helper)
if node:
debug_text += CCM["green"] + str(e) + CCM["reset"] + ", "
if isinstance(node, MultiNode):
node.combine(self)
parsing_contexts.extend(node.results)
@@ -453,8 +462,13 @@ class UnOrderedChoice(ParsingExpression):
parser_helper.parser.parser_input.tokens[init_pos: node.end + 1],
[node])
parsing_contexts.append(ParsingContext(tn, parser_helper.pos))
else:
debug_text += f"{e}, "
parser_helper.seek(init_pos) # backtrack
if parser_helper.debugger.is_enabled():
parser_helper.debug_concept(debug_prefix, raw=f"[{debug_text}]")
if len(parsing_contexts) == 0:
return None
@@ -675,18 +689,22 @@ class StrMatch(Match):
def _parse(self, parser_helper):
token = parser_helper.get_token()
if parser_helper.debugger.is_enabled():
debug_prefix = self.debug_prefix("StrMatch", parser_helper)
debug_text = f"pos={parser_helper.pos}, to_match={self.to_match}, token={token.str_value}"
m = token.str_value.lower() == self.to_match.lower() if self.ignore_case \
else token.strip_quote == self.to_match
if m:
if self.debug_enabled:
self.debug(f"pos={parser_helper.pos}, token={token.str_value}, to_match={self.to_match} => Matched")
if parser_helper.debugger.is_enabled():
parser_helper.debug_concept(debug_prefix, raw=f"{CCM['green']}{debug_text}{CCM['reset']}")
node = TerminalNode(self, parser_helper.pos, parser_helper.pos, token.str_value)
parser_helper.next_token(self.skip_white_space)
return node
if self.debug_enabled:
self.debug(f"pos={parser_helper.pos}, token={token.str_value}, to_match={self.to_match} => No Match")
if parser_helper.debugger.is_enabled():
parser_helper.debug_concept(debug_prefix, raw=f"{CCM['red']}{debug_text}{CCM['reset']}")
return None
@@ -839,28 +857,17 @@ class BnfNodeConceptExpressionVisitor(ParsingExpressionVisitor):
self.references.append(pe.concept)
class HasUnorderedChoiceVisitor(ParsingExpressionVisitor):
def __init__(self):
super().__init__(lambda pe: pe.nodes, circular_ref_strategy="skip")
self.value = False
def __repr__(self):
return f"HasUnorderedChoiceVisitor(={self.value})"
def reset(self):
self.value = False
def visit_UnOrderedChoice(self, parsing_expression):
self.value = True
return ParsingExpressionVisitor.STOP
class BnfConceptParserHelper:
def __init__(self, parser):
def __init__(self, parser, debugger):
self.parser = parser
self.debug = []
self.errors = []
self.sequence = []
self.debugger = debugger
self.debug = [] # keep track of the tokens
self.errors = [] # sink of errors
self.sequence = [] # output. List of lexer nodes correctly parsed
self.concepts = [] # stack of concepts being processed (fed by ConceptExpression)
self.concepts_ids = [] # ids if the concept to increase speed
self.rules_names = [] # stack of concepts rules names
self.concept_depth = 0 # depth of concept (+1 for each ConceptExpression which is not an OrderedChoice)
self.unrecognized_tokens = UnrecognizedTokensNode(-1, -1, [])
self.has_unrecognized = False
@@ -872,7 +879,8 @@ class BnfConceptParserHelper:
self.pos = -1
def __repr__(self):
return f"BnfConceptParserHelper({self.sequence})"
concepts = [item.concept if isinstance(item, ConceptNode) else "***" for item in self.sequence]
return f"BnfConceptParserHelper({concepts})"
def __eq__(self, other):
if id(self) == id(other):
@@ -886,6 +894,26 @@ class BnfConceptParserHelper:
def __hash__(self):
return len(self.sequence) + len(self.errors)
def debug_concept(self, text, **kwargs):
if len(self.concepts) <= 2:
self.debugger.debug_concept(self.concepts[0], text, **kwargs)
def get_current_rule_name(self):
for rule_name in reversed(self.rules_names):
if rule_name:
return rule_name
def push_concept(self, concept):
self.concepts.append(concept)
self.concepts_ids.append(concept.id)
def pop_concept(self):
self.concepts.pop()
self.concepts_ids.pop()
def get_concepts_ids(self):
return self.concepts_ids
def get_token(self) -> Token:
return self.token
@@ -917,39 +945,45 @@ class BnfConceptParserHelper:
if self.is_locked():
return
self.debug.append(concept)
self.manage_unrecognized()
for forked in self.forked:
# manage the fact that some clone may have been forked
forked.eat_concept(concept, token)
# init
parsing_expression = self.parser.get_parsing_expression(self.parser.context, concept)
if not isinstance(parsing_expression, ParsingExpression):
try:
self.push_concept(concept)
self.debug.append(concept)
error_msg = f"Failed to parse concept '{concept}'"
if parsing_expression is not None:
error_msg += f". Reason: '{parsing_expression}'"
self.errors.append(GrammarErrorNode(error_msg))
return
self.pos = self.parser.parser_input.pos
self.token = self.parser.parser_input.tokens[self.pos]
self.manage_unrecognized()
for forked in self.forked:
# manage the fact that some clone may have been forked
forked.eat_concept(concept, token)
# parse
node = parsing_expression.parse(self)
# init
parsing_expression = self.parser.get_parsing_expression(self.parser.context, concept)
if not isinstance(parsing_expression, ParsingExpression):
self.debug.append(concept)
error_msg = f"Failed to parse concept '{concept}'"
if parsing_expression is not None:
error_msg += f". Reason: '{parsing_expression}'"
self.errors.append(GrammarErrorNode(error_msg))
return
if isinstance(node, MultiNode):
# when multiple choices are found, use the longest result
node = node.results[0].node
if node is not None and node.end != -1:
self.sequence.append(self.create_concept_node(concept, node))
self.pos = node.end
self.bnf_parsed = True
else:
self.debug.append(("Rewind", token))
self.unrecognized_tokens.add_token(token, self.parser.parser_input.pos)
self.pos = self.parser.parser_input.pos # reset position
self.pos = self.parser.parser_input.pos
self.token = self.parser.parser_input.tokens[self.pos]
# parse
self.debugger.debug_concept(concept, parsing_expression=parsing_expression)
node = parsing_expression.parse(self)
if isinstance(node, MultiNode):
# when multiple choices are found, use the longest result
node = node.results[0].node
if node is not None and node.end != -1:
self.sequence.append(self.create_concept_node(concept, node))
self.pos = node.end
self.bnf_parsed = True
else:
self.debug.append(("Rewind", token))
self.unrecognized_tokens.add_token(token, self.parser.parser_input.pos)
self.pos = self.parser.parser_input.pos # reset position
finally:
self.concepts.pop()
def eat_unrecognized(self, token):
if self.is_locked():
@@ -998,7 +1032,7 @@ class BnfConceptParserHelper:
self.unrecognized_tokens = UnrecognizedTokensNode(-1, -1, [])
def clone(self):
clone = BnfConceptParserHelper(self.parser)
clone = BnfConceptParserHelper(self.parser, self.debugger)
clone.debug = self.debug[:]
self.errors = self.errors[:]
clone.sequence = self.sequence[:]
@@ -1148,7 +1182,6 @@ class ToUpdate:
class BnfNodeParser(BaseNodeParser):
NAME = "Bnf"
def __init__(self, **kwargs):
@@ -1215,7 +1248,7 @@ class BnfNodeParser(BaseNodeParser):
return res[0] if len(res) == 1 else Sequence(*res)
def get_concepts_sequences(self):
def get_concepts_sequences(self, context):
"""
Main method that parses the tokens and extract the concepts
:return:
@@ -1241,23 +1274,36 @@ class BnfNodeParser(BaseNodeParser):
return by_end_pos[max(by_end_pos)]
forked = []
concept_parser_helpers = [BnfConceptParserHelper(self)]
debugger = context.get_debugger(self.NAME, "parse")
debugger.debug_entering(source=self.parser_input.as_text())
concept_parser_helpers = [BnfConceptParserHelper(self, debugger)]
while self.parser_input.next_token(False):
token = self.parser_input.token
if debugger.is_enabled():
debug_prefix = f"pos={self.parser_input.pos}, {token=}, {len(concept_parser_helpers)} parser(s)"
try:
not_locked = [p for p in concept_parser_helpers if not p.is_locked()]
if len(not_locked) == 0:
if debugger.is_enabled():
debugger.debug_log(debug_prefix + ", all parsers are locked. Nothing to do.")
continue
concepts = self.get_concepts(token, self._is_eligible, strip_quotes=False)
if not concepts:
for concept_parser in concept_parser_helpers:
if debugger.is_enabled():
debugger.debug_log(debug_prefix + ", no concept found.")
for concept_parser in not_locked:
concept_parser.eat_unrecognized(token)
continue
if debugger.is_enabled():
debugger.debug_log(debug_prefix + f", concept(s) found={concepts}")
if len(concepts) == 1:
for concept_parser in concept_parser_helpers:
for concept_parser in not_locked:
concept_parser.eat_concept(concepts[0], token)
continue
@@ -1274,9 +1320,13 @@ class BnfNodeParser(BaseNodeParser):
clone = concept_parser.clone()
temp_res.append(clone)
clone.eat_concept(concept, token)
if debugger.is_enabled():
debugger.debug_log(f"..{concept}, parsed={clone.bnf_parsed}, length={clone.pos}")
# only keep the longest
concept_parser_helpers = _get_longest(temp_res)
if debugger.is_enabled() and len(temp_res) > 1:
debugger.debug_log(f"Only keep longest -> {len(concept_parser_helpers)} parser(s) left")
finally:
_add_forked_to_concept_parser_helpers()
@@ -1286,6 +1336,7 @@ class BnfNodeParser(BaseNodeParser):
concept_parser.finalize()
_add_forked_to_concept_parser_helpers()
debugger.debug_var("result", concept_parser_helpers)
return concept_parser_helpers
def fix_infinite_recursions(self, context, grammar, concept_id, parsing_expression):
@@ -1306,7 +1357,7 @@ class BnfNodeParser(BaseNodeParser):
for node_id in path_:
expression_ = expression_.nodes[0] if isinstance(expression_, ConceptExpression) else expression_
for i, node in [(i, n) for i, n in enumerate(expression_.nodes) if isinstance(n, ConceptExpression)]:
if node_id in (node.recurse_id, node.concept.id):
if node_id == node.concept.id:
index_ = i
parent_ = expression_
expression_ = node # take the child of the ConceptExpression found
@@ -1336,7 +1387,6 @@ class BnfNodeParser(BaseNodeParser):
expression_update.rule_name, new_grammar, set())
new = ConceptExpression(expression_update.concept,
rule_name=expression_update.rule_name,
recurse_id=expression_update.recurse_id,
nodes=new_nodes)
parent.nodes[index] = new
@@ -1358,12 +1408,12 @@ class BnfNodeParser(BaseNodeParser):
def check_for_infinite_recursion(self, parsing_expression, already_found, in_recursion, only_first=False):
if isinstance(parsing_expression, ConceptExpression):
id_to_use = parsing_expression.recurse_id or parsing_expression.concept.id
if id_to_use in already_found:
already_found.append(id_to_use) # add the id again, to know where the cycle starts
if parsing_expression.concept.id in already_found:
already_found.append(parsing_expression.concept.id) # add the id again, to know where the cycle starts
in_recursion.extend(already_found)
return True
already_found.append(id_to_use)
already_found.append(parsing_expression.concept.id)
return self.check_for_infinite_recursion(parsing_expression.nodes[0],
already_found,
in_recursion,
@@ -1396,13 +1446,13 @@ class BnfNodeParser(BaseNodeParser):
return False
return False
if isinstance(parsing_expression, UnOrderedChoice):
for node in parsing_expression.nodes:
already_found_for_current_node.clear()
already_found_for_current_node.extend(already_found.copy())
if self.check_for_infinite_recursion(node, already_found_for_current_node, in_recursion, True):
return True
return False
# if isinstance(parsing_expression, UnOrderedChoice):
# for node in parsing_expression.nodes:
# already_found_for_current_node.clear()
# already_found_for_current_node.extend(already_found.copy())
# if self.check_for_infinite_recursion(node, already_found_for_current_node, in_recursion, True):
# return True
# return False
return False
@@ -1429,50 +1479,34 @@ class BnfNodeParser(BaseNodeParser):
desc=desc) as sub_context:
# get the parsing expression
to_skip = {concept.id}
ret = self.resolve_concept_parsing_expression(sub_context, concept, None, grammar, to_skip, to_update)
presult = self.resolve_concept_parsing_expression(sub_context, concept, None, grammar, to_skip, to_update)
# check and update parsing expression that are still under construction
# Note that we only update the concept that will update concepts_grammars
# because pe.node may be large
for item in to_update:
pe = item.parsing_expression
for i, node in enumerate(pe.nodes):
if isinstance(node, UnderConstruction):
pe.nodes[i] = grammar.get(node.concept_id)
# KSI 20200826
# To be rewritten into get_infinite_recursions
# I have changed resolve_concept_parsing_expression() to directly avoid obvious circular references
# So it's no longer need to search and fix them
concepts_in_recursion = self.fix_infinite_recursions(context, grammar, concept.id, ret)
# check for infinite recursion definitions
already_seen = [concept.id]
in_recursion = [] # there may be cases where in_recursion is less than already_seen
concepts_in_recursion = self.check_for_infinite_recursion(presult, already_seen, in_recursion)
if concepts_in_recursion:
chicken_anf_egg = context.sheerka.new(BuiltinConcepts.CHICKEN_AND_EGG, body=concepts_in_recursion)
for concept_id in concepts_in_recursion:
chicken_anf_egg = context.sheerka.new(BuiltinConcepts.CHICKEN_AND_EGG, body=in_recursion)
for concept_id in in_recursion:
grammar[concept_id] = chicken_anf_egg
# update, in case of infinite circular recursion
ret = grammar[concept.id]
# update, in case of infinite recursion
presult = grammar[concept.id]
# finally, update the list of the known pexpression (self.concepts_grammars)
# We do not add pexpressions that contain UnOrderedChoice because the choices always depend on the current
# concept.
# For example, the pexpression for 'twenties' found under the concept 'hundreds' won't be the same than
# the pexpression 'twenties' under the concept 'thousand' or even the pexpression 'twenties' without any
# context.
# finally, update the list of the known pexpression (self.concepts_grammars) for latter use
for k, v in grammar.items():
if k == concept.id:
self.concepts_grammars.put(k, v)
elif context.sheerka.isinstance(v, BuiltinConcepts.CHICKEN_AND_EGG):
# not quite sure that it is a good idea.
# Why do we want to corrupt previous valid entries ?
self.concepts_grammars.put(k, v)
else:
if not v.has_unordered_choice():
self.concepts_grammars.put(k, v)
self.concepts_grammars.put(k, v)
sub_context.add_values(return_values=ret)
sub_context.add_values(return_values=presult)
return ret
return presult
def resolve_concept_parsing_expression(self, context, concept, name, grammar, to_skip, to_update):
"""
@@ -1487,16 +1521,17 @@ class BnfNodeParser(BaseNodeParser):
"""
sheerka = context.sheerka
if sheerka.isaset(context, concept) and hasattr(context, "obj"):
key_to_use = ConceptExpression.get_recurse_id(context.obj.id, concept.id, name)
else:
key_to_use = concept.id
# if sheerka.isaset(context, concept) and hasattr(context, "obj"):
# key_to_use = ConceptExpression.get_recursion_id(context.obj.id, concept.id, name)
# else:
# key_to_use = concept.id
key_to_use = concept.id
if key_to_use in self.concepts_grammars:
# Use the global pexpression only if it does not contains UnOrderedChoice
pe = self.concepts_grammars.get(key_to_use)
if not pe.has_unordered_choice():
return self.concepts_grammars.get(key_to_use)
return self.concepts_grammars.get(key_to_use)
# # Use the global pexpression only if it does not contains UnOrderedChoice
# pe = self.concepts_grammars.get(key_to_use)
# if not pe.has_unordered_choice():
if key_to_use in grammar: # under construction entry
return grammar.get(key_to_use)
@@ -1522,20 +1557,12 @@ class BnfNodeParser(BaseNodeParser):
ssc.add_inputs(concept=concept)
concepts_in_group = self.sheerka.get_set_elements(ssc, concept)
valid_concepts = [c for c in concepts_in_group if c.id not in to_skip]
# for c in concepts_in_group:
# if c.id == context.obj.id:
# continue
#
# if hasattr(context, "concepts_to_skip") and c.id in context.concepts_to_skip:
# continue
#
# valid_concepts.append(c)
# valid_concepts = [c for c in concepts_in_group if c.id not in to_skip]
valid_concepts = concepts_in_group
nodes = []
for c in valid_concepts:
c_recurse_id = f"{c.id}#{c.name}#{concept.id}" if self.sheerka.isaset(context, c) else None
nodes.append(ConceptExpression(c, rule_name=c.name, recurse_id=c_recurse_id))
nodes.append(ConceptExpression(c, rule_name=c.name))
resolved = self.resolve_parsing_expression(ssc,
UnOrderedChoice(*nodes),
@@ -1664,7 +1691,7 @@ class BnfNodeParser(BaseNodeParser):
False,
context.sheerka.new(BuiltinConcepts.ERROR, body=self.error_sink))
sequences = self.get_concepts_sequences()
sequences = self.get_concepts_sequences(context)
valid_parser_helpers = self.get_valid(sequences)
if valid_parser_helpers is None:
# token error
+4
View File
@@ -40,10 +40,12 @@ class PythonNode(Node):
self.ast_ = ast_ # if ast_ else ast.parse(source, mode="eval") if source else None
self.objects = objects or {} # when objects (mainly concepts or rules) are recognized in the expression
self.compiled = None
self.ast_str = self.get_dump(self.ast_)
def init_ast(self):
if self.ast_ is None and self.source:
self.ast_ = ast.parse(self.source, mode="eval")
self.ast_str = self.get_dump(self.ast_)
return self
def get_compiled(self):
@@ -77,6 +79,8 @@ class PythonNode(Node):
@staticmethod
def get_dump(ast_):
if not ast_:
return None
dump = ast.dump(ast_)
for to_remove in [", ctx=Load()", ", kind=None", ", type_ignores=[]"]:
dump = dump.replace(to_remove, "")