Fixed #109 : Mix python and concept. List comprehension
Fixed #110 : SheerkaDebugManager: add list_debug_settings Fixed #111 : SheerkaDebugManager: Implement ListDebugLogger Fixed #112 : SyaNodeParser: rewrite this parser Fixed #113 : Sheerka.: Add enable_parser_caching to disable parsers caching Fixed #114 : SyaNodeParser : Implement fast cache to resolve unrecognized tokens requests Fixed #115 : BnfNodeParser : Implement fast cache to resolve unrecognized tokens requests Fixed #116 : SequenceNodeParser : Implement fast cache to resolve unrecognized tokens requests Fixed #117 : ResolveMultiplePluralAmbiguityEvaluator: Resolve Multiple plural ambiguity
This commit is contained in:
+65
-15
@@ -5,15 +5,11 @@ from cache.Cache import Cache
|
||||
from core.ast_helpers import ast_to_props
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, ConceptParts, DEFINITION_TYPE_BNF, concept_part_value
|
||||
from core.global_symbols import NotInit, NotFound, INIT_AST_PARSERS, DEFAULT_EVALUATORS
|
||||
from core.global_symbols import DEFAULT_EVALUATORS, INIT_AST_PARSERS, NotFound, NotInit
|
||||
from core.rule import Rule
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from core.tokenizer import Tokenizer, TokenKind
|
||||
from core.tokenizer import TokenKind, Tokenizer
|
||||
from core.utils import as_bag
|
||||
from parsers.BaseExpressionParser import compile_disjunctions, AndNode
|
||||
from parsers.BaseNodeParser import SourceCodeNode, ConceptNode, UnrecognizedTokensNode, SourceCodeWithConceptNode, \
|
||||
RuleNode, LexerNode
|
||||
from parsers.BaseParser import ParsingError
|
||||
|
||||
PARSE_STEPS = [BuiltinConcepts.BEFORE_PARSING, BuiltinConcepts.PARSING, BuiltinConcepts.AFTER_PARSING]
|
||||
EVAL_ONLY_STEPS = [BuiltinConcepts.BEFORE_EVALUATION, BuiltinConcepts.EVALUATION, BuiltinConcepts.AFTER_EVALUATION]
|
||||
@@ -226,9 +222,9 @@ def resolve_ambiguity(context, concepts):
|
||||
remaining_concepts.extend(by_complexity[complexity])
|
||||
else:
|
||||
for c in by_complexity[complexity]:
|
||||
from core.sheerka.services.SheerkaEvaluateConcept import EvaluationHints
|
||||
evaluated = context.sheerka.evaluate_concept(context, c,
|
||||
eval_body=False,
|
||||
validation_only=True,
|
||||
hints=EvaluationHints(eval_body=False, expression_only=True),
|
||||
metadata=[ConceptParts.PRE, ConceptParts.WHERE])
|
||||
if context.sheerka.is_success(evaluated) or evaluated.key == c.key:
|
||||
remaining_concepts.append(c)
|
||||
@@ -255,6 +251,9 @@ def get_condition_complexity(context, condition):
|
||||
|
||||
# # count the number of conjunctions
|
||||
from parsers.LogicalOperatorParser import LogicalOperatorParser
|
||||
from parsers.BaseExpressionParser import compile_disjunctions
|
||||
from parsers.BaseExpressionParser import AndNode
|
||||
|
||||
parser = LogicalOperatorParser()
|
||||
res = parser.parse(context, ParserInput(condition))
|
||||
if not res.status:
|
||||
@@ -314,6 +313,9 @@ def only_parsers_results(context, return_values):
|
||||
:return:
|
||||
"""
|
||||
|
||||
from parsers.BaseNodeParser import UnrecognizedTokensNode
|
||||
from parsers.BaseParser import ParsingError
|
||||
|
||||
if not isinstance(return_values, list):
|
||||
return return_values
|
||||
|
||||
@@ -335,8 +337,8 @@ def only_parsers_results(context, return_values):
|
||||
if isinstance(ret_val.body.body, ParsingError):
|
||||
continue
|
||||
if isinstance(ret_val.body.body, list) and \
|
||||
len(ret_val.body.body) == 1 and \
|
||||
isinstance(ret_val.body.body[0], UnrecognizedTokensNode):
|
||||
len(ret_val.body.body) == 1 and \
|
||||
isinstance(ret_val.body.body[0], UnrecognizedTokensNode):
|
||||
continue
|
||||
temp_ret_val.append(ret_val)
|
||||
return_values_ok = temp_ret_val
|
||||
@@ -479,6 +481,7 @@ def get_lexer_nodes(return_values, start, tokens):
|
||||
:return: list of list (list of concept node sequence)
|
||||
"""
|
||||
from evaluators.BaseEvaluator import BaseEvaluator
|
||||
from parsers.BaseNodeParser import ConceptNode, LexerNode, RuleNode, SourceCodeNode
|
||||
|
||||
lexer_nodes = []
|
||||
for ret_val in return_values:
|
||||
@@ -546,6 +549,7 @@ def get_lexer_nodes_using_positions(return_values, positions):
|
||||
"""
|
||||
|
||||
from evaluators.BaseEvaluator import BaseEvaluator
|
||||
from parsers.BaseNodeParser import ConceptNode, LexerNode, RuleNode, SourceCodeNode
|
||||
|
||||
lexer_nodes = []
|
||||
for ret_val, position in zip(return_values, positions):
|
||||
@@ -615,8 +619,8 @@ def ensure_evaluated(context, concept, eval_body=True, metadata=None):
|
||||
:param metadata:
|
||||
:return:
|
||||
"""
|
||||
from core.sheerka.services.SheerkaEvaluateConcept import SheerkaEvaluateConcept, EvaluationHints
|
||||
if concept.get_hints().is_evaluated:
|
||||
from core.sheerka.services.SheerkaEvaluateConcept import SheerkaEvaluateConcept
|
||||
return SheerkaEvaluateConcept.apply_ret(concept,
|
||||
eval_body or context.in_context(BuiltinConcepts.EVAL_BODY_REQUESTED))
|
||||
|
||||
@@ -624,11 +628,14 @@ def ensure_evaluated(context, concept, eval_body=True, metadata=None):
|
||||
if concept.get_metadata().definition_type != DEFINITION_TYPE_BNF:
|
||||
for var_name, var_default_value in concept.get_metadata().variables:
|
||||
if var_default_value is None and \
|
||||
var_name not in concept.get_compiled() and \
|
||||
(var_name not in concept.values() or concept.get_value(var_name) == NotInit):
|
||||
var_name not in concept.get_compiled() and \
|
||||
(var_name not in concept.values() or concept.get_value(var_name) == NotInit):
|
||||
return concept
|
||||
|
||||
evaluated = context.sheerka.evaluate_concept(context, concept, eval_body=eval_body, metadata=metadata)
|
||||
evaluated = context.sheerka.evaluate_concept(context,
|
||||
concept,
|
||||
hints=EvaluationHints(eval_body=eval_body),
|
||||
metadata=metadata)
|
||||
return evaluated
|
||||
|
||||
|
||||
@@ -663,6 +670,9 @@ def update_compiled(context, concept, errors, parsers=None):
|
||||
:param parsers: to customize the parsers to use
|
||||
:return:
|
||||
"""
|
||||
|
||||
from parsers.BaseNodeParser import ConceptNode, SourceCodeNode, SourceCodeWithConceptNode, UnrecognizedTokensNode
|
||||
|
||||
sheerka = context.sheerka
|
||||
parsers = parsers or PARSERS
|
||||
|
||||
@@ -676,6 +686,15 @@ def update_compiled(context, concept, errors, parsers=None):
|
||||
if isinstance(v, Concept):
|
||||
_validate_concept(v)
|
||||
|
||||
elif isinstance(v, ConceptNode):
|
||||
_validate_concept(v.concept)
|
||||
c.get_compiled()[k] = v.concept
|
||||
|
||||
elif isinstance(v, SourceCodeNode):
|
||||
if not v.return_value:
|
||||
raise NotImplementedError("SourceCodeNode")
|
||||
c.get_compiled()[k] = [v.return_value]
|
||||
|
||||
elif isinstance(v, SourceCodeWithConceptNode):
|
||||
if v.return_value:
|
||||
res = v.return_value
|
||||
@@ -939,7 +958,8 @@ def get_possible_variables_from_concept(context, concept):
|
||||
return set()
|
||||
|
||||
concept_name = [t.str_value for t in Tokenizer(concept.name, yield_eof=False)]
|
||||
names = [v_value or v_name for v_name, v_value in concept.get_metadata().variables if v_name in concept_name]
|
||||
names = [v_value.strip() or v_name for v_name, v_value in concept.get_metadata().variables if
|
||||
v_name in concept_name]
|
||||
possible_vars = filter(lambda x: context.sheerka.is_not_a_concept_name(x), names)
|
||||
to_keep = set()
|
||||
for var in possible_vars:
|
||||
@@ -961,6 +981,36 @@ def is_only_successful(sheerka, return_value):
|
||||
sheerka.isinstance(return_value.body, BuiltinConcepts.ONLY_SUCCESSFUL)
|
||||
|
||||
|
||||
def debug_nodes(nodes):
|
||||
from parsers.BaseNodeParser import UnrecognizedTokensNode
|
||||
|
||||
res = []
|
||||
for node in nodes:
|
||||
if isinstance(node, UnrecognizedTokensNode):
|
||||
res.append(node.source)
|
||||
elif hasattr(node, "get_concept"):
|
||||
concept = node.get_concept()
|
||||
res.append(concept)
|
||||
else:
|
||||
res.append(node)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def get_new_variables_definitions(concept):
|
||||
"""
|
||||
Return a new set of variable definition, where the default value are initialized with what was compiled
|
||||
"""
|
||||
new_variables = []
|
||||
for var_name, var_default_value in concept.get_metadata().variables:
|
||||
if var_name in concept.get_metadata().parameters and hasattr(concept.get_compiled()[var_name], "source"):
|
||||
new_variables.append((var_name, concept.get_compiled()[var_name].source))
|
||||
else:
|
||||
new_variables.append((var_name, var_default_value))
|
||||
|
||||
return new_variables
|
||||
|
||||
|
||||
class CreateObjectIdentifiers:
|
||||
"""
|
||||
Class that creates unique identifiers for Concept or Rule objects
|
||||
|
||||
Reference in New Issue
Block a user