Fixed #30 : Add variable support in BNF concept definition

Fixed #31 : Add regex support in BNF Concept
Fixed #33 : Do not memorize object during restore
This commit is contained in:
2021-02-24 17:23:03 +01:00
parent cac2dad17f
commit 646c428edb
32 changed files with 2107 additions and 360 deletions
+40 -20
View File
@@ -342,8 +342,9 @@ def evaluate(context,
def get_lexer_nodes(return_values, start, tokens):
"""
From a parser result, return the corresponding LexerNode
either ConceptNode, UnrecognizedTokensNode or SourceCodeNode
Transform all elements from return_values into lexer nodes (ConceptNode, UnrecognizedTokensNode, SourceCodeNode...)
On the contrary of the other method (get_lexer_nodes_using_positions),
all created lexer node will use the same offset (start)
:param return_values:
:param start:
:param tokens:
@@ -360,13 +361,12 @@ def get_lexer_nodes(return_values, start, tokens):
continue
end = start + len(tokens) - 1
lexer_nodes.append(
[SourceCodeNode(start,
end,
tokens,
ret_val.body.source,
python_node=ret_val.body.body,
return_value=ret_val)])
lexer_nodes.append([SourceCodeNode(start,
end,
tokens,
ret_val.body.source,
python_node=ret_val.body.body,
return_value=ret_val)])
elif ret_val.who == "parsers.ExactConcept":
concepts = ret_val.body.body if hasattr(ret_val.body.body, "__iter__") else [ret_val.body.body]
@@ -379,6 +379,11 @@ def get_lexer_nodes(return_values, start, tokens):
for node in nodes:
node.start += start
node.end += start
if isinstance(node, ConceptNode):
for k, v in node.concept.get_compiled().items():
if hasattr(v, "start"):
v.start += start
v.end += start
# but append the whole sequence if when it's a sequence
lexer_nodes.append(nodes)
@@ -397,9 +402,15 @@ def get_lexer_nodes(return_values, start, tokens):
def get_lexer_nodes_using_positions(return_values, positions):
"""
Transform all elements from return_values into lexer nodes
use positions to remap the exact positions
Transform all elements from return_values into lexer nodes (ConceptNode, UnrecognizedTokensNode, SourceCodeNode...)
Use positions to compute the exact new positions
On the contrary of the other method (get_lexer_nodes),
one return value is mapped with one position. it's not a offset, but an absolute position
:param return_values:
:param positions: is a list of triplets (start, end, tokens)
:return:
"""
lexer_nodes = []
for ret_val, position in zip(return_values, positions):
if ret_val.who in ("parsers.Python", 'parsers.PythonWithConcepts'):
@@ -425,6 +436,11 @@ def get_lexer_nodes_using_positions(return_values, positions):
for node in nodes:
node.start = position.start
node.end = position.end
if isinstance(node, ConceptNode):
for k, v in node.concept.get_compiled().items():
if hasattr(v, "start"):
v.start += position.start
v.end += position.start
# but append the whole sequence if when it's a sequence
lexer_nodes.extend(nodes)
@@ -493,9 +509,10 @@ def get_lexer_nodes_from_unrecognized(context, unrecognized_tokens_node, parsers
def update_compiled(context, concept, errors, parsers=None):
"""
recursively iterate over concept.get_compiled() to replace LexerNode into concepts or list of ReturnValueConcept
When parsing using a LexerNodeParser (SyaNodeParser, BnfNodeParser...)
the result will be a LexerNode.
TL;DR;
Recursively iterate over concept.get_compiled() to replace LexerNode into concepts or list of ReturnValueConcept
Long version:
When parsing using a LexerNodeParser (SyaNodeParser, BnfNodeParser...) the result will be a LexerNode.
In the specific case of a ConceptNode, the compiled variables will also be LexerNode (UnrecognizedTokensNode...)
This function iterate over the compile to transform these nodes into concept of compiled AST
:param context:
@@ -518,9 +535,12 @@ def update_compiled(context, concept, errors, parsers=None):
_validate_concept(v)
elif isinstance(v, SourceCodeWithConceptNode):
from parsers.PythonWithConceptsParser import PythonWithConceptsParser
parser_helper = PythonWithConceptsParser()
res = parser_helper.parse_nodes(context, v.get_all_nodes())
if v.return_value:
res = v.return_value
else:
from parsers.PythonWithConceptsParser import PythonWithConceptsParser
parser_helper = PythonWithConceptsParser()
res = parser_helper.parse_nodes(context, v.get_all_nodes())
if res.status:
c.get_compiled()[k] = [res]
else:
@@ -556,7 +576,7 @@ def update_compiled(context, concept, errors, parsers=None):
# example : Concept("a plus b").def_var("a").def_var("b")
# and the user has entered 'a plus b'
# Chances are that we are talking about the concept itself, and not an instantiation (like '10 plus 2')
# This means that 'a' and 'b' don't have any real value
# This means that 'a' and 'b' don't have any real values
if len(concept.get_metadata().variables) > 0:
for name, value in concept.get_metadata().variables:
if _get_source(concept.get_compiled(), name) != name:
@@ -633,7 +653,7 @@ def ensure_concept_or_rule(*items):
raise TypeError(f"'{items}' must be a concept or rule")
def ensure_bnf(context, concept, parser_name="BaseNodeParser"):
def ensure_bnf(context, concept, parser_name="BaseNodeParser", update_bnf_for_cached_concept=True):
if concept.get_metadata().definition_type == DEFINITION_TYPE_BNF and not concept.get_bnf():
from parsers.BnfDefinitionParser import BnfDefinitionParser
regex_parser = BnfDefinitionParser()
@@ -651,7 +671,7 @@ def ensure_bnf(context, concept, parser_name="BaseNodeParser"):
raise Exception(bnf_parsing_ret_val.value)
concept.set_bnf(bnf_parsing_ret_val.body.body)
if concept.id:
if concept.id and update_bnf_for_cached_concept:
context.sheerka.get_by_id(concept.id).set_bnf(concept.get_bnf()) # update bnf in cache