Added first version of DebugManager. Implemented draft of the rule engine
This commit is contained in:
@@ -1,152 +0,0 @@
|
||||
from core.builtin_concepts import BuiltinConcepts, ListConcept
|
||||
from core.concept import Concept, ConceptParts
|
||||
import ast
|
||||
import core.utils
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NodeParent:
|
||||
"""
|
||||
Class that represent the ancestor of a Node
|
||||
For example, the 'For' nodes has three fields (target, iter and body)
|
||||
So, for a node under For.iter
|
||||
node -> For
|
||||
field -> iter
|
||||
"""
|
||||
|
||||
def __init__(self, node, field):
|
||||
self.node = node
|
||||
self.field = field
|
||||
|
||||
def __repr__(self):
|
||||
if self.node is None:
|
||||
return None
|
||||
|
||||
if self.field is None:
|
||||
return self.node.get_node_type()
|
||||
|
||||
return self.node.get_node_type() + "." + self.field
|
||||
|
||||
def __eq__(self, other):
|
||||
# I can compare with type for simplification
|
||||
if isinstance(other, tuple):
|
||||
return self.node.get_node_type() == other[0] and self.field == other[1]
|
||||
|
||||
# normal equals implementation
|
||||
if not isinstance(other, NodeParent):
|
||||
return False
|
||||
|
||||
return self.node.get_node_type() == other.node.get_node_type() and self.field == other.field
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.node.get_node_type(), self.field))
|
||||
|
||||
|
||||
class NodeConcept(Concept):
|
||||
def __init__(self, key, node_type, parent: NodeParent):
|
||||
super().__init__(key, True, False, key)
|
||||
self.parent = parent
|
||||
self.node_type = node_type
|
||||
|
||||
def get_node_type(self):
|
||||
return self.node_type
|
||||
|
||||
|
||||
class GenericNodeConcept(NodeConcept):
|
||||
def __init__(self, node_type, parent):
|
||||
super().__init__(BuiltinConcepts.GENERIC_NODE, node_type, parent)
|
||||
|
||||
def __repr__(self):
|
||||
return "Generic:" + self.node_type
|
||||
|
||||
def get_node_type(self):
|
||||
return self.node_type
|
||||
|
||||
def get_obj_value(self):
|
||||
if self.node_type == "Name":
|
||||
return self.get_value("id")
|
||||
|
||||
if self.node_type == "arg":
|
||||
return self.get_value("arg")
|
||||
|
||||
return self.body
|
||||
|
||||
|
||||
class IdentifierNodeConcept(NodeConcept):
|
||||
def __init__(self, parent, name):
|
||||
super().__init__(BuiltinConcepts.IDENTIFIER_NODE, "Name", parent)
|
||||
self.set_value(ConceptParts.BODY, name)
|
||||
|
||||
|
||||
class CallNodeConcept(NodeConcept):
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(BuiltinConcepts.IDENTIFIER_NODE, "Call", parent)
|
||||
|
||||
def get_args_names(self, sheerka):
|
||||
return sheerka.objvalues(self.get_value("args"))
|
||||
|
||||
|
||||
def python_to_concept(python_node):
|
||||
"""
|
||||
Transform Python AST node into concept nodes
|
||||
for better usage
|
||||
:param python_node:
|
||||
:return:
|
||||
"""
|
||||
|
||||
def _transform(node, parent):
|
||||
node_type = node.__class__.__name__
|
||||
concept = GenericNodeConcept(node_type, parent).init_key()
|
||||
for field in node._fields:
|
||||
if not hasattr(node, field):
|
||||
continue
|
||||
|
||||
value = getattr(node, field)
|
||||
concept.def_var(field)
|
||||
if isinstance(value, list):
|
||||
lst = ListConcept().init_key()
|
||||
for i in value:
|
||||
lst.append(_transform(i, NodeParent(concept, field)))
|
||||
concept.set_value(field, lst)
|
||||
elif isinstance(value, ast.AST):
|
||||
concept.set_value(field, _transform(value, NodeParent(concept, field)))
|
||||
else:
|
||||
concept.set_value(field, value)
|
||||
|
||||
concept.metadata.is_evaluated = True
|
||||
return concept
|
||||
|
||||
return _transform(python_node, None)
|
||||
|
||||
|
||||
def concept_to_python(concept_node):
|
||||
"""
|
||||
Transform back concept_node to Python AST node
|
||||
:param concept_node:
|
||||
:return:
|
||||
"""
|
||||
|
||||
def _transform(node):
|
||||
node_type = node.get_node_type()
|
||||
ast_object = core.utils.new_object("_ast." + node_type)
|
||||
for field in node.values:
|
||||
if field not in ast_object._fields:
|
||||
continue
|
||||
|
||||
value = node.get_value(field)
|
||||
if isinstance(value, list) or isinstance(value, Concept) and value.key == str(BuiltinConcepts.LIST):
|
||||
lst = []
|
||||
for i in value.body:
|
||||
lst.append(_transform(i))
|
||||
setattr(ast_object, field, lst)
|
||||
elif isinstance(value, NodeConcept):
|
||||
setattr(ast_object, field, _transform(value))
|
||||
else:
|
||||
setattr(ast_object, field, value)
|
||||
return ast_object
|
||||
|
||||
res = _transform(concept_node)
|
||||
return res
|
||||
@@ -1,128 +0,0 @@
|
||||
from core.ast.nodes import GenericNodeConcept, NodeConcept
|
||||
from core.builtin_concepts import ListConcept
|
||||
|
||||
|
||||
class ConceptNodeVisitor:
|
||||
"""
|
||||
Base class to visit NodeConcept
|
||||
It is insolently inspired by python AST.Visitor class
|
||||
"""
|
||||
|
||||
def visit(self, node):
|
||||
|
||||
"""Visit a node."""
|
||||
name = node.node_type if isinstance(node, GenericNodeConcept) else node.name
|
||||
name = str(name).capitalize()
|
||||
|
||||
method = 'visit_' + name
|
||||
visitor = getattr(self, method, self.generic_visit)
|
||||
return visitor(node)
|
||||
|
||||
def generic_visit(self, node):
|
||||
"""Called if no explicit visitor function exists for a node."""
|
||||
for field, value in iter_props(node):
|
||||
if isinstance(value, ListConcept):
|
||||
for item in value.body:
|
||||
if isinstance(item, NodeConcept):
|
||||
self.visit(item)
|
||||
elif isinstance(value, NodeConcept):
|
||||
self.visit(value)
|
||||
|
||||
def visit_Constant(self, node):
|
||||
value = node.get_value("value")
|
||||
type_name = _const_node_type_names.get(type(value))
|
||||
if type_name is None:
|
||||
for cls, name in _const_node_type_names.items():
|
||||
if isinstance(value, cls):
|
||||
type_name = name
|
||||
break
|
||||
if type_name is not None:
|
||||
method = 'visit_' + type_name
|
||||
try:
|
||||
visitor = getattr(self, method)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
import warnings
|
||||
warnings.warn(f"{method} is deprecated; add visit_Constant",
|
||||
PendingDeprecationWarning, 2)
|
||||
return visitor(node)
|
||||
return self.generic_visit(node)
|
||||
|
||||
|
||||
class UnreferencedNamesVisitor(ConceptNodeVisitor):
|
||||
def __init__(self, sheerka):
|
||||
self.names = set()
|
||||
self.sheerka = sheerka
|
||||
|
||||
def visit_Name(self, node):
|
||||
parents = get_parents(node)
|
||||
if ("For", "target") in parents: # variable used by the 'for' iteration
|
||||
return
|
||||
|
||||
if ("Call", "func") in parents: # name of the function
|
||||
return
|
||||
|
||||
# if ("Assign", "targets") in parents: # variable which is assigned
|
||||
# return
|
||||
|
||||
if self.can_be_discarded(self.sheerka.objvalue(node), parents):
|
||||
return
|
||||
|
||||
self.names.add(self.sheerka.objvalue(node))
|
||||
|
||||
def can_be_discarded(self, variable_name, parents):
|
||||
|
||||
for node in (parent.node for parent in parents):
|
||||
if node is None:
|
||||
return False
|
||||
|
||||
if node.get_node_type() == "For" and self.sheerka.objvalue(node.get_value("target")) == variable_name:
|
||||
# variable used by the loop
|
||||
return True
|
||||
|
||||
if node.get_node_type() == "FunctionDef":
|
||||
# variable defined as a function parameter
|
||||
args = node.get_value("args")
|
||||
args_values = list(self.sheerka.objvalues(args.get_value("args")))
|
||||
if variable_name in args_values:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class ExtractPredicateVisitor(ConceptNodeVisitor):
|
||||
def __init__(self, variable_name):
|
||||
self.predicates = []
|
||||
self.variable_name = variable_name
|
||||
|
||||
|
||||
def get_parents(node):
|
||||
if node.parent is None:
|
||||
return []
|
||||
|
||||
res = []
|
||||
while True:
|
||||
if node.parent is None:
|
||||
break
|
||||
res.append(node.parent)
|
||||
node = node.parent.node
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def iter_props(node):
|
||||
for p in [p for p in node.values if isinstance(p, str)]:
|
||||
yield p, node.get_value(p)
|
||||
|
||||
|
||||
_const_node_type_names = {
|
||||
bool: 'NameConstant', # should be before int
|
||||
type(None): 'NameConstant',
|
||||
int: 'Num',
|
||||
float: 'Num',
|
||||
complex: 'Num',
|
||||
str: 'Str',
|
||||
bytes: 'Bytes',
|
||||
type(...): 'Ellipsis',
|
||||
}
|
||||
@@ -0,0 +1,129 @@
|
||||
import ast
|
||||
from dataclasses import dataclass
|
||||
|
||||
from cache.FastCache import FastCache
|
||||
|
||||
|
||||
@dataclass
|
||||
class PropDef:
|
||||
"""
|
||||
Helper class when parsing simple expression
|
||||
"""
|
||||
prop: str # name of the property
|
||||
index: object # indexing if any
|
||||
|
||||
|
||||
def ast_to_props(res, _ast, _index):
|
||||
"""
|
||||
|
||||
:param res: list where to put the results
|
||||
:param _ast: ast to parse (must start by an ast.Attribute, ast.Name or ast.subScript)
|
||||
:param _index: index in the array/dictionary or None
|
||||
:return:
|
||||
"""
|
||||
|
||||
def get_index(_slice):
|
||||
if not isinstance(_slice, ast.Index):
|
||||
raise NotImplementedError(f"ast_to_prop: {_slice}")
|
||||
|
||||
if isinstance(_slice.value, ast.Name):
|
||||
return _slice.value.id
|
||||
elif isinstance(_slice.value, ast.Constant):
|
||||
return _slice.value.value
|
||||
else:
|
||||
raise NotImplementedError(f"ast_to_prop: {_slice.value}")
|
||||
|
||||
if isinstance(_ast, ast.Attribute):
|
||||
res.append(PropDef(_ast.attr, _index))
|
||||
ast_to_props(res, _ast.value, None)
|
||||
elif isinstance(_ast, ast.Name):
|
||||
res.append(PropDef(_ast.id, _index))
|
||||
elif isinstance(_ast, ast.Subscript):
|
||||
index = get_index(_ast.slice)
|
||||
ast_to_props(res, _ast.value, index)
|
||||
|
||||
|
||||
class UnreferencedNamesVisitor(ast.NodeVisitor):
|
||||
"""
|
||||
Try to find variables (names) that will be requested by the ast
|
||||
"""
|
||||
|
||||
cache = FastCache()
|
||||
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
self.names = set()
|
||||
|
||||
def get_names(self, node):
|
||||
names = UnreferencedNamesVisitor.cache.get(node)
|
||||
if names is not None:
|
||||
return names
|
||||
|
||||
self.visit(node)
|
||||
UnreferencedNamesVisitor.cache.put(node, self.names)
|
||||
return self.names
|
||||
|
||||
def visit_Name(self, node):
|
||||
self.names.add(node.id)
|
||||
|
||||
def visit_For(self, node: ast.For):
|
||||
self.visit_selected(node, ["body", "orelse"])
|
||||
|
||||
def visit_selected(self, node, to_visit):
|
||||
"""Called if no explicit visitor function exists for a node."""
|
||||
for field in to_visit:
|
||||
value = getattr(node, field)
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, ast.AST):
|
||||
self.visit(item)
|
||||
elif isinstance(value, ast.AST):
|
||||
self.visit(value)
|
||||
|
||||
|
||||
class UnreferencedVariablesVisitor(UnreferencedNamesVisitor):
|
||||
"""
|
||||
Try to find variables (names) that will be requested by the ast
|
||||
"""
|
||||
|
||||
def visit_Call(self, node: ast.Call):
|
||||
self.visit_selected(node, ["args"])
|
||||
|
||||
|
||||
class NamesWithAttributesVisitor(ast.NodeVisitor):
|
||||
|
||||
def __init__(self):
|
||||
self.sequences = []
|
||||
self.temp = []
|
||||
self.to_lookup = None
|
||||
|
||||
def get_sequences(self, ast_, to_lookup):
|
||||
self.to_lookup = to_lookup
|
||||
self.visit(ast_)
|
||||
return self.sequences
|
||||
|
||||
def visit_Attribute(self, node: ast.Attribute):
|
||||
self.temp.append(node.attr)
|
||||
if isinstance(node.value, ast.Attribute):
|
||||
self.visit_Attribute(node.value)
|
||||
if isinstance(node.value, ast.Subscript):
|
||||
self.visit_Subscript(node.value)
|
||||
elif isinstance(node.value, ast.Name):
|
||||
self.visit_Name(node.value)
|
||||
|
||||
def visit_Subscript(self, node: ast.Subscript):
|
||||
# TODO manage the index when it will be needed
|
||||
# using node.slice
|
||||
if isinstance(node.value, ast.Attribute):
|
||||
self.visit_Attribute(node.value)
|
||||
if isinstance(node.value, ast.Subscript):
|
||||
self.visit_Subscript(node.value)
|
||||
elif isinstance(node.value, ast.Name):
|
||||
self.visit_Name(node.value)
|
||||
|
||||
def visit_Name(self, node: ast.Name):
|
||||
if node.id == self.to_lookup:
|
||||
self.temp.append(node.id)
|
||||
self.temp.reverse()
|
||||
self.sequences.append(self.temp.copy())
|
||||
self.temp.clear()
|
||||
+289
-303
@@ -1,9 +1,8 @@
|
||||
from enum import Enum
|
||||
|
||||
from core.concept import Concept, ConceptParts
|
||||
from core.error import ErrorObj
|
||||
|
||||
|
||||
class BuiltinConcepts(Enum):
|
||||
class BuiltinConcepts:
|
||||
"""
|
||||
List of builtin concepts that do no need any specific implementation
|
||||
Please note that the value of the enum is informal. It is not used in the system
|
||||
@@ -13,116 +12,102 @@ class BuiltinConcepts(Enum):
|
||||
|
||||
The values of the enum is not used the code
|
||||
"""
|
||||
SHEERKA = "sheerka"
|
||||
SHEERKA = "__SHEERKA"
|
||||
|
||||
# processing instructions during sheerka.execute() or sheerka.evaluate_concept()
|
||||
# The instruction may alter how the actions work
|
||||
DEBUG = "debug" # activate all debug information
|
||||
EVAL_BODY_REQUESTED = "eval body" # to evaluate the body
|
||||
EVAL_WHERE_REQUESTED = "eval where" # to evaluate the where clause
|
||||
RETURN_BODY_REQUESTED = "return body" # returns the body of the concept instead of the concept itself
|
||||
REDUCE_REQUESTED = "reduce" # remove meaningless error when possible
|
||||
EVAL_UNTIL_SUCCESS_REQUESTED = "eval until success" # PythonEvaluator tries combination until True is found
|
||||
EVAL_QUESTION_REQUESTED = "question" # the user input must be treated as question
|
||||
# The instructions may alter how the actions work
|
||||
DEBUG = "__DEBUG" # activate all debug information
|
||||
EVAL_BODY_REQUESTED = "__EVAL_BODY_REQUESTED" # to evaluate the body
|
||||
EVAL_WHERE_REQUESTED = "__EVAL_WHERE_REQUESTED" # to evaluate the where clause
|
||||
RETURN_BODY_REQUESTED = "__RETURN_BODY_REQUESTED" # returns the body of the concept instead of the concept itself
|
||||
REDUCE_REQUESTED = "__REDUCE_REQUESTED" # remove meaningless error when possible
|
||||
EVAL_UNTIL_SUCCESS_REQUESTED = "__EVAL_UNTIL_SUCCESS_REQUESTED" # PythonEvaluator tries combination until True is found
|
||||
EVAL_QUESTION_REQUESTED = "__EVAL_QUESTION_REQUESTED" # the user input must be treated as question
|
||||
|
||||
# possible actions during sheerka.execute()
|
||||
INIT_SHEERKA = "init sheerka" #
|
||||
PROCESS_INPUT = "process input" # Processing user input or other input
|
||||
PROCESSING = "processing input" # Processing user input or other input
|
||||
BEFORE_PARSING = "before parsing" # activated before evaluation by the parsers
|
||||
PARSING = "parsing" # activated during the parsing. It contains the text to parse
|
||||
AFTER_PARSING = "after parsing" # after parsing
|
||||
BEFORE_EVALUATION = "before evaluation" # before evaluation
|
||||
EVALUATION = "evaluation" # activated when the parsing process seems to be finished
|
||||
AFTER_EVALUATION = "after evaluation" # activated when the parsing process seems to be finished
|
||||
BEFORE_RENDERING = "before rendering" # activate before the output is rendered
|
||||
RENDERING = "rendering" # rendering the response from sheerka
|
||||
AFTER_RENDERING = "after rendering" # rendering the response from sheerka
|
||||
EVALUATE_SOURCE = "evaluate source" #
|
||||
EVALUATE_CONCEPT = "evaluate concept" # a concept will be evaluated
|
||||
EVALUATING_CONCEPT = "evaluating concept" # a concept will be evaluated
|
||||
EVALUATING_ATTRIBUTE = "evaluating concept attribute" #
|
||||
VALIDATE_CONCEPT = "validate concept"
|
||||
VALIDATING_CONCEPT = "validating concept"
|
||||
INIT_COMPILED = "initializing concept compiled"
|
||||
INIT_BNF = "initialize bnf"
|
||||
MANAGE_INFINITE_RECURSION = "manage infinite recursion"
|
||||
PARSE_CODE = "execute source code"
|
||||
EXEC_CODE = "execute source code" # to use when executing Python or other language compiled code
|
||||
TESTING = "testing"
|
||||
# possible actions during sheerka.execute() or sheerka.evaluate_rules()
|
||||
INIT_SHEERKA = "__INIT_SHEERKA" #
|
||||
PROCESS_INPUT = "__PROCESS_INPUT" # Processing user input or other input
|
||||
PROCESSING = "__PROCESSING" # Processing user input or other input
|
||||
BEFORE_PARSING = "__BEFORE_PARSING" # activated before evaluation by the parsers
|
||||
PARSING = "__PARSING" # activated during the parsing. It contains the text to parse
|
||||
AFTER_PARSING = "__AFTER_PARSING" # after parsing
|
||||
BEFORE_EVALUATION = "__BEFORE_EVALUATION" # before evaluation
|
||||
EVALUATION = "__EVALUATION" # activated when the parsing process seems to be finished
|
||||
AFTER_EVALUATION = "__AFTER_EVALUATION" # activated when the parsing process seems to be finished
|
||||
BEFORE_RENDERING = "__BEFORE_RENDERING" # activate before the output is rendered
|
||||
RENDERING = "__RENDERING" # rendering the response from sheerka
|
||||
AFTER_RENDERING = "__AFTER_RENDERING" # rendering the response from sheerka
|
||||
EVALUATE_SOURCE = "__EVALUATE_SOURCE" #
|
||||
EVALUATE_CONCEPT = "__EVALUATE_CONCEPT" # a concept will be evaluated
|
||||
EVALUATING_CONCEPT = "__EVALUATING_CONCEPT" # a concept will be evaluated
|
||||
EVALUATING_ATTRIBUTE = "__EVALUATING_ATTRIBUTE" #
|
||||
VALIDATE_CONCEPT = "__VALIDATE_CONCEPT"
|
||||
VALIDATING_CONCEPT = "__VALIDATING_CONCEPT"
|
||||
INIT_COMPILED = "__INIT_COMPILED"
|
||||
INIT_BNF = "__INIT_BNF"
|
||||
MANAGE_INFINITE_RECURSION = "__MANAGE_INFINITE_RECURSION"
|
||||
PARSE_CODE = "__PARSE_CODE"
|
||||
EXEC_CODE = "__EXEC_CODE" # to use when executing Python or other language compiled code
|
||||
TESTING = "__TESTING"
|
||||
EVALUATOR_PRE_PROCESS = "__EVALUATOR_PRE_PROCESS" # used modify / tweak behaviour of evaluators
|
||||
EVALUATING_RULES = "__EVALUATING_RULES"
|
||||
|
||||
# builtin attributes
|
||||
ISA = "is a" # when a concept is an instance of another one
|
||||
HASA = "has a" # when a concept has/owns another concept
|
||||
AUTO_EVAL = "auto eval" # when the concept must be auto evaluated
|
||||
ISA = "__ISA" # when a concept is an instance of another one
|
||||
HASA = "__HASA" # when a concept has/owns another concept
|
||||
AUTO_EVAL = "__AUTO_EVAL" # when the concept must be auto evaluated
|
||||
|
||||
# object
|
||||
USER_INPUT = "user input concept" # represent an input from an user
|
||||
SUCCESS = "success concept"
|
||||
ERROR = "error concept"
|
||||
UNKNOWN_CONCEPT = "unknown concept" # the request concept is not recognized
|
||||
CANNOT_RESOLVE_CONCEPT = "cannot resolve concept" # when too many concepts with the same name
|
||||
RETURN_VALUE = "return value concept" # a value is returned
|
||||
CONCEPT_TOO_LONG = "concept too long concept" # concept cannot be processed by exactConcept parser
|
||||
NEW_CONCEPT = "new concept" # when a new concept is added
|
||||
UNKNOWN_PROPERTY = "unknown property" # when requesting for a unknown property
|
||||
PARSER_RESULT = "parser result"
|
||||
TOO_MANY_SUCCESS = "too many success" # when expecting a limited number of successful return value
|
||||
TOO_MANY_ERRORS = "too many errors" # when expecting a limited number of successful return value
|
||||
ONLY_SUCCESSFUL = "only successful" # filter the result, only keep successful ones
|
||||
MULTIPLE_ERRORS = "multiple errors" # filter the result, only keep evaluator in error
|
||||
NOT_FOR_ME = "not for me" # a parser recognize that the entry is not meant for it
|
||||
IS_EMPTY = "is empty" # when a set is empty
|
||||
NO_RESULT = "no result" # no return value returned
|
||||
INVALID_RETURN_VALUE = "invalid return value" # the return value of an evaluator is not correct
|
||||
CONCEPT_ALREADY_DEFINED = "concept already defined" # when you try to add the same object twice (a concept or whatever)
|
||||
PROPERTY_ALREADY_DEFINED = "property already defined" # When you try to add the same element in a property
|
||||
NOP = "no operation" # no operation concept. Does nothing
|
||||
CONCEPT_EVAL_ERROR = "concept evaluation error" # cannot evaluate a property or metadata of a concept
|
||||
ENUMERATION = "enum" # represents a list or a set
|
||||
LIST = "list" # represents a list
|
||||
FILTERED = "filtered" # represents the result of a filtering
|
||||
CONCEPT_ALREADY_IN_SET = "concept already in set"
|
||||
EVALUATOR_PRE_PROCESS = "evaluator pre process" # used modify / tweak behaviour of evaluators
|
||||
NOT_A_SET = "not a set" # the concept has no entry in sets
|
||||
CONDITION_FAILED = "where clause failed" # failed to validate where clause during evaluation
|
||||
CHICKEN_AND_EGG = "chicken and egg" # infinite recursion when declaring concept
|
||||
EXPLANATION = "explanation"
|
||||
PRECEDENCE = "precedence" # use to set priority among concepts when parsing
|
||||
ASSOCIATIVITY = "associativity" # use to set priority among concepts when parsing
|
||||
NOT_INITIALIZED = "not initialized"
|
||||
NOT_FOUND = "not found" # when the wanted resource is not found
|
||||
FORMAT_INSTRUCTIONS = "format instructions" # to express how to print the concept
|
||||
NOT_IMPLEMENTED = "not implemented" # instead of raise an error
|
||||
PYTHON_SECURITY_ERROR = "security error" # when trying to execute statement when only expression is allowed
|
||||
INVALID_LESSER_OPERATION = "Invalid lesser operation"
|
||||
INVALID_GREATEST_OPERATION = "Invalid greatest operation"
|
||||
USER_INPUT = "__USER_INPUT" # represent an input from an user
|
||||
SUCCESS = "__SUCCESS"
|
||||
ERROR = "__ERROR"
|
||||
UNKNOWN_CONCEPT = "__UNKNOWN_CONCEPT" # the request concept is not recognized
|
||||
CANNOT_RESOLVE_CONCEPT = "__CANNOT_RESOLVE_CONCEPT" # when too many concepts with the same name
|
||||
RETURN_VALUE = "__RETURN_VALUE" # a value is returned
|
||||
CONCEPT_TOO_LONG = "__CONCEPT_TOO_LONG" # concept cannot be processed by exactConcept parser
|
||||
NEW_CONCEPT = "__NEW_CONCEPT" # when a new concept is added
|
||||
UNKNOWN_PROPERTY = "__UNKNOWN_PROPERTY" # when requesting for a unknown property
|
||||
PARSER_RESULT = "__PARSER_RESULT"
|
||||
TOO_MANY_SUCCESS = "__TOO_MANY_SUCCESS" # when expecting a limited number of successful return value
|
||||
TOO_MANY_ERRORS = "__TOO_MANY_ERRORS" # when expecting a limited number of successful return value
|
||||
ONLY_SUCCESSFUL = "__ONLY_SUCCESSFUL" # filter the result, only keep successful ones
|
||||
MULTIPLE_ERRORS = "__MULTIPLE_ERRORS" # filter the result, only keep evaluator in error
|
||||
NOT_FOR_ME = "__NOT_FOR_ME" # a parser recognize that the entry is not meant for it
|
||||
IS_EMPTY = "__IS_EMPTY" # when a set is empty
|
||||
NO_RESULT = "__NO_RESULT" # no return value returned
|
||||
INVALID_RETURN_VALUE = "__INVALID_RETURN_VALUE" # the return value of an evaluator is not correct
|
||||
CONCEPT_ALREADY_DEFINED = "__CONCEPT_ALREADY_DEFINED" # when you try to add the same object twice (a concept or whatever)
|
||||
PROPERTY_ALREADY_DEFINED = "__PROPERTY_ALREADY_DEFINED" # When you try to add the same element in a property
|
||||
NOP = "__NOP" # no operation concept. Does nothing
|
||||
CONCEPT_EVAL_ERROR = "__CONCEPT_EVAL_ERROR" # cannot evaluate a property or metadata of a concept
|
||||
ENUMERATION = "__ENUMERATION" # represents a list or a set
|
||||
LIST = "__LIST" # represents a list
|
||||
FILTERED = "__FILTERED" # represents the result of a filtering
|
||||
CONCEPT_ALREADY_IN_SET = "__CONCEPT_ALREADY_IN_SET"
|
||||
NOT_A_SET = "__NOT_A_SET" # the concept has no entry in sets
|
||||
CONDITION_FAILED = "__CONDITION_FAILED" # failed to validate where clause during evaluation
|
||||
CHICKEN_AND_EGG = "__CHICKEN_AND_EGG" # infinite recursion when declaring concept
|
||||
EXPLANATION = "__EXPLANATION"
|
||||
PRECEDENCE = "__PRECEDENCE" # use to set priority among concepts when parsing
|
||||
ASSOCIATIVITY = "__ASSOCIATIVITY" # use to set priority among concepts when parsing
|
||||
NOT_FOUND = "__NOT_FOUND" # when the wanted resource is not found
|
||||
FORMAT_INSTRUCTIONS = "__FORMAT_INSTRUCTIONS" # to express how to print the concept
|
||||
NOT_IMPLEMENTED = "__NOT_IMPLEMENTED" # instead of raise an error
|
||||
PYTHON_SECURITY_ERROR = "__PYTHON_SECURITY_ERROR" # when trying to execute statement when only expression is allowed
|
||||
INVALID_LESSER_OPERATION = "__INVALID_LESSER_OPERATION"
|
||||
INVALID_GREATEST_OPERATION = "__INVALID_GREATEST_OPERATION"
|
||||
NEW_RULE = "__NEW_RULE"
|
||||
UNKNOWN_RULE = "__UNKNOWN_RULE"
|
||||
|
||||
NODE = "node"
|
||||
GENERIC_NODE = "generic node"
|
||||
IDENTIFIER_NODE = "identifier node"
|
||||
NODE = "__NODE"
|
||||
GENERIC_NODE = "__GENERIC_NODE"
|
||||
IDENTIFIER_NODE = "__IDENTIFIER_NODE"
|
||||
|
||||
def __repr__(self):
|
||||
return "__" + self.name
|
||||
# formatting
|
||||
TO_LIST = "__TO_LIST"
|
||||
|
||||
def __str__(self):
|
||||
return "__" + self.name
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if isinstance(other, str):
|
||||
return str(self) == other
|
||||
|
||||
if not isinstance(other, BuiltinConcepts):
|
||||
return False
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.value)
|
||||
|
||||
AllBuiltinConcepts = [v for n, v in BuiltinConcepts.__dict__.items() if not n.startswith("__")]
|
||||
|
||||
BuiltinUnique = [
|
||||
BuiltinConcepts.EVAL_BODY_REQUESTED,
|
||||
@@ -163,7 +148,7 @@ BuiltinUnique = [
|
||||
BuiltinConcepts.INVALID_GREATEST_OPERATION,
|
||||
]
|
||||
|
||||
BuiltinErrors = [str(e) for e in {
|
||||
BuiltinErrors = [
|
||||
BuiltinConcepts.ERROR,
|
||||
BuiltinConcepts.UNKNOWN_CONCEPT,
|
||||
BuiltinConcepts.CANNOT_RESOLVE_CONCEPT,
|
||||
@@ -180,12 +165,18 @@ BuiltinErrors = [str(e) for e in {
|
||||
BuiltinConcepts.NOT_A_SET,
|
||||
BuiltinConcepts.CONDITION_FAILED,
|
||||
BuiltinConcepts.CHICKEN_AND_EGG,
|
||||
BuiltinConcepts.NOT_INITIALIZED,
|
||||
BuiltinConcepts.NOT_FOUND,
|
||||
BuiltinConcepts.INVALID_LESSER_OPERATION,
|
||||
BuiltinConcepts.INVALID_GREATEST_OPERATION,
|
||||
# DO NOT PUT NOT_INITIALIZED. It's not an error
|
||||
}]
|
||||
]
|
||||
|
||||
BuiltinContainers = [
|
||||
BuiltinConcepts.PARSER_RESULT,
|
||||
BuiltinConcepts.ONLY_SUCCESSFUL,
|
||||
BuiltinConcepts.FILTERED,
|
||||
BuiltinConcepts.EXPLANATION,
|
||||
BuiltinConcepts.TO_LIST,
|
||||
]
|
||||
|
||||
"""
|
||||
Some concepts have a specific implementation
|
||||
@@ -194,39 +185,51 @@ It's mainly to ease the usage
|
||||
|
||||
|
||||
class UserInputConcept(Concept):
|
||||
ALL_ATTRIBUTES = ["text", "user_name"]
|
||||
|
||||
def __init__(self, text=None, user_name=None):
|
||||
super().__init__(BuiltinConcepts.USER_INPUT, True, False, BuiltinConcepts.USER_INPUT)
|
||||
self.set_value(ConceptParts.BODY, text)
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.USER_INPUT,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.USER_INPUT, bound_body="text")
|
||||
self.set_value("text", text)
|
||||
self.set_value("user_name", user_name)
|
||||
self.metadata.is_evaluated = True
|
||||
|
||||
@property
|
||||
def text(self):
|
||||
return self.body
|
||||
|
||||
@property
|
||||
def user_name(self):
|
||||
return self.get_value("user_name")
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"({self.id}){self.name}: '{self.body}'"
|
||||
|
||||
|
||||
class ErrorConcept(Concept):
|
||||
def __init__(self, error=None):
|
||||
super().__init__(BuiltinConcepts.ERROR, True, False, BuiltinConcepts.ERROR)
|
||||
self.set_value(ConceptParts.BODY, error)
|
||||
self.metadata.is_evaluated = True
|
||||
class ErrorConcept(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = ["error"]
|
||||
|
||||
def __init__(self, error=None, concept_id=None):
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.ERROR,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.ERROR,
|
||||
id=concept_id,
|
||||
bound_body="error")
|
||||
self.set_value("error", error)
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"({self.id}){self.name}: {self.body}"
|
||||
|
||||
|
||||
class UnknownConcept(Concept):
|
||||
def __init__(self, metadata=None):
|
||||
super().__init__(BuiltinConcepts.UNKNOWN_CONCEPT, True, False, BuiltinConcepts.UNKNOWN_CONCEPT)
|
||||
self.set_value(ConceptParts.BODY, metadata)
|
||||
self.metadata.is_evaluated = True
|
||||
class UnknownConcept(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = ["concept_ref"]
|
||||
|
||||
def __init__(self, concept_ref=None):
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.UNKNOWN_CONCEPT,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.UNKNOWN_CONCEPT, bound_body="concept_ref")
|
||||
self.set_value("concept_ref", concept_ref)
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"({self.id}){self.name}: {self.body}"
|
||||
@@ -238,67 +241,36 @@ class ReturnValueConcept(Concept):
|
||||
It's the main input for the evaluators
|
||||
"""
|
||||
|
||||
def __init__(self, who=None, status=None, value=None, message=None, parents=None, concept_id=None):
|
||||
super().__init__(BuiltinConcepts.RETURN_VALUE, True, False, BuiltinConcepts.RETURN_VALUE)
|
||||
self.set_value(ConceptParts.BODY, value)
|
||||
ALL_ATTRIBUTES = ["who", "status", "value", "parents", "message"]
|
||||
|
||||
def __init__(self, who=None, status=None, value=None, parents=None, message=None, concept_id=None):
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.RETURN_VALUE,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.RETURN_VALUE,
|
||||
id=concept_id,
|
||||
bound_body="value")
|
||||
self.set_value("who", who)
|
||||
self.set_value("status", status)
|
||||
self.set_value("message", message)
|
||||
self.set_value("value", value)
|
||||
self.set_value("parents", parents)
|
||||
self.metadata.is_evaluated = True
|
||||
self.metadata.id = concept_id
|
||||
|
||||
@property
|
||||
def who(self):
|
||||
return self.get_value("who")
|
||||
|
||||
@who.setter
|
||||
def who(self, value):
|
||||
self.set_value("who", value)
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
return self.get_value("status")
|
||||
|
||||
@status.setter
|
||||
def status(self, value):
|
||||
self.set_value("status", value)
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return self.body
|
||||
|
||||
@value.setter
|
||||
def value(self, value):
|
||||
self.set_value(ConceptParts.BODY, value)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
return self.get_value("message")
|
||||
|
||||
@message.setter
|
||||
def message(self, value):
|
||||
self.set_value("message", value)
|
||||
|
||||
@property
|
||||
def parents(self):
|
||||
return self.get_value("parents")
|
||||
|
||||
@parents.setter
|
||||
def parents(self, value):
|
||||
self.set_value("parents", value)
|
||||
self.set_value("message", message)
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"ReturnValue(who={self.who}, status={self.status}, value={self.value}, message={self.message})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if not isinstance(other, ReturnValueConcept):
|
||||
return False
|
||||
|
||||
return self.who == other.who and \
|
||||
self.status == other.status and \
|
||||
self.value == other.value and \
|
||||
self.message == other.message
|
||||
self.value == other.value
|
||||
|
||||
def __hash__(self):
|
||||
if hasattr(self.value, "__iter__") and not isinstance(self.value, str):
|
||||
@@ -309,47 +281,52 @@ class ReturnValueConcept(Concept):
|
||||
return hash((self.who, self.status, value_hash))
|
||||
|
||||
|
||||
class UnknownPropertyConcept(Concept):
|
||||
class UnknownPropertyConcept(Concept, ErrorObj):
|
||||
"""
|
||||
This error is raised when, during sheerka.new(), an unknown property is asked
|
||||
"""
|
||||
ALL_ATTRIBUTES = ["property_name", "concept"]
|
||||
|
||||
def __init__(self, property_name=None, concept=None):
|
||||
super().__init__(BuiltinConcepts.UNKNOWN_PROPERTY, True, False, BuiltinConcepts.UNKNOWN_PROPERTY)
|
||||
self.set_value(ConceptParts.BODY, property_name)
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.UNKNOWN_PROPERTY,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.UNKNOWN_PROPERTY,
|
||||
bound_body="property_name")
|
||||
self.set_value("property_name", property_name)
|
||||
self.set_value("concept", concept)
|
||||
self.metadata.is_evaluated = True
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"UnknownProperty(property={self.property_name}, concept={self.concept})"
|
||||
|
||||
@property
|
||||
def concept(self):
|
||||
return self.get_value("concept")
|
||||
|
||||
@property
|
||||
def property_name(self):
|
||||
return self.body
|
||||
|
||||
|
||||
class ParserResultConcept(Concept):
|
||||
"""
|
||||
Result of a parsing
|
||||
"""
|
||||
|
||||
def __init__(self, parser=None, source=None, tokens=None, value=None, try_parsed=None):
|
||||
super().__init__(BuiltinConcepts.PARSER_RESULT, True, False, BuiltinConcepts.PARSER_RESULT)
|
||||
self.set_value(ConceptParts.BODY, value)
|
||||
ALL_ATTRIBUTES = ["parser", "source", "tokens", "value", "try_parsed"]
|
||||
|
||||
def __init__(self, parser=None, source=None, tokens=None, value=None, try_parsed=None, concept_id=None):
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.PARSER_RESULT,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.PARSER_RESULT,
|
||||
id=concept_id,
|
||||
bound_body="value")
|
||||
self.set_value("parser", parser)
|
||||
self.set_value("source", source)
|
||||
self.set_value("tokens", tokens)
|
||||
self.set_value("try_parsed", try_parsed) # in case of error, what was found before the error
|
||||
self.metadata.is_evaluated = True
|
||||
self.set_value("value", value)
|
||||
self.set_value("try_parsed", try_parsed)
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
text = f"ParserResult(parser={self.get_value('parser')}"
|
||||
source = self.get_value('source')
|
||||
text += f", source='{source}')" if source else f", body='{self.body}')"
|
||||
text = f"ParserResult(parser={self.parser}"
|
||||
text += f", source='{self.source}')" if self.source else f", body='{self.value}')"
|
||||
return text
|
||||
|
||||
def __eq__(self, other):
|
||||
@@ -361,27 +338,10 @@ class ParserResultConcept(Concept):
|
||||
|
||||
return self.source == other.source and \
|
||||
self_parser_name == other_parser_name and \
|
||||
self.body == other.body and \
|
||||
self.try_parsed == other.try_parsed
|
||||
self.value == other.value
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.metadata.name)
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return self.body
|
||||
|
||||
@property
|
||||
def try_parsed(self):
|
||||
return self.get_value("try_parsed")
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
return self.get_value("source")
|
||||
|
||||
@property
|
||||
def parser(self):
|
||||
return self.get_value("parser")
|
||||
return hash(self._metadata.name)
|
||||
|
||||
@staticmethod
|
||||
def get_parser_name(parser):
|
||||
@@ -389,191 +349,217 @@ class ParserResultConcept(Concept):
|
||||
return parser.name if isinstance(parser, BaseParser) else str(parser)
|
||||
|
||||
|
||||
class InvalidReturnValueConcept(Concept):
|
||||
class InvalidReturnValueConcept(Concept, ErrorObj):
|
||||
"""
|
||||
Error returned when an evaluator is not correctly coded
|
||||
The accepted return value are
|
||||
ReturnValueConcept, list of ReturnValueConcept or None
|
||||
"""
|
||||
|
||||
ALL_ATTRIBUTES = ["return_value", "evaluator"]
|
||||
|
||||
def __init__(self, return_value=None, evaluator=None):
|
||||
super().__init__(
|
||||
BuiltinConcepts.INVALID_RETURN_VALUE,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.INVALID_RETURN_VALUE)
|
||||
self.set_value(ConceptParts.BODY, return_value)
|
||||
BuiltinConcepts.INVALID_RETURN_VALUE,
|
||||
bound_body="return_value")
|
||||
self.set_value("return_value", return_value)
|
||||
self.set_value("evaluator", evaluator)
|
||||
self.metadata.is_evaluated = True
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
|
||||
class ConceptEvalError(Concept):
|
||||
class ConceptEvalError(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = ["error", "concept", "property_name"]
|
||||
|
||||
def __init__(self, error=None, concept=None, property_name=None):
|
||||
super().__init__(BuiltinConcepts.CONCEPT_EVAL_ERROR,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.CONCEPT_EVAL_ERROR)
|
||||
self.set_value(ConceptParts.BODY, error)
|
||||
BuiltinConcepts.CONCEPT_EVAL_ERROR,
|
||||
bound_body="error")
|
||||
self.set_value("error", error)
|
||||
self.set_value("concept", concept)
|
||||
self.set_value("property_name", property_name)
|
||||
self.metadata.is_evaluated = True
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"ConceptEvalError(error={self.error}, concept={self.concept}, property={self.property_name})"
|
||||
|
||||
@property
|
||||
def error(self):
|
||||
return self.body
|
||||
|
||||
@property
|
||||
def concept(self):
|
||||
return self.get_value("concept")
|
||||
|
||||
@property
|
||||
def property_name(self):
|
||||
return self.get_value("property_name")
|
||||
|
||||
|
||||
class EnumerationConcept(Concept):
|
||||
def __init__(self, iteration=None):
|
||||
super().__init__(BuiltinConcepts.ENUMERATION, True, False, BuiltinConcepts.ENUMERATION)
|
||||
self.set_value(ConceptParts.BODY, iteration)
|
||||
self.metadata.is_evaluated = True
|
||||
|
||||
|
||||
class ListConcept(Concept):
|
||||
ALL_ATTRIBUTES = ["items"]
|
||||
|
||||
def __init__(self, items=None):
|
||||
super().__init__(BuiltinConcepts.LIST, True, False, BuiltinConcepts.LIST)
|
||||
self.set_value(ConceptParts.BODY, items or [])
|
||||
self.metadata.is_evaluated = True
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.LIST,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.LIST,
|
||||
bound_body="items")
|
||||
self.set_value("items", items or [])
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def append(self, obj):
|
||||
self.body.append(obj)
|
||||
|
||||
|
||||
class FilteredConcept(Concept):
|
||||
ALL_ATTRIBUTES = ["filtered", "iterable", "predicate"]
|
||||
|
||||
def __init__(self, filtered=None, iterable=None, predicate=None):
|
||||
super().__init__(BuiltinConcepts.FILTERED, True, False, BuiltinConcepts.FILTERED)
|
||||
self.set_value(ConceptParts.BODY, filtered)
|
||||
self.set_value("iterable", iterable)
|
||||
self.set_value("predicate", predicate)
|
||||
self.metadata.is_evaluated = True
|
||||
|
||||
|
||||
class ConceptAlreadyInSet(Concept):
|
||||
def __init__(self, concept=None, concept_set=None):
|
||||
super().__init__(BuiltinConcepts.CONCEPT_ALREADY_IN_SET,
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.FILTERED,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.CONCEPT_ALREADY_IN_SET)
|
||||
self.set_value(ConceptParts.BODY, concept)
|
||||
BuiltinConcepts.FILTERED,
|
||||
bound_body="filtered")
|
||||
self.set_value("filtered", filtered)
|
||||
self.set_value("iterable", iterable)
|
||||
self.set_value("predicate", predicate)
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
|
||||
class ConceptAlreadyInSet(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = ["concept", "concept_set"]
|
||||
|
||||
def __init__(self, concept=None, concept_set=None):
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.CONCEPT_ALREADY_IN_SET,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.CONCEPT_ALREADY_IN_SET,
|
||||
bound_body="concept")
|
||||
self.set_value("concept", concept)
|
||||
self.set_value("concept_set", concept_set)
|
||||
self.metadata.is_evaluated = True
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"ConceptAlreadyInSet(concept={self.concept}, concept_set={self.concept_set})"
|
||||
|
||||
@property
|
||||
def concept(self):
|
||||
return self.body
|
||||
|
||||
@property
|
||||
def concept_set(self):
|
||||
return self.get_value("concept_set")
|
||||
class PropertyAlreadyDefined(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = ["property_name", "property_value", "concept"]
|
||||
|
||||
|
||||
class PropertyAlreadyDefined(Concept):
|
||||
def __init__(self, property_name=None, property_value=None, concept=None):
|
||||
super().__init__(BuiltinConcepts.PROPERTY_ALREADY_DEFINED,
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.PROPERTY_ALREADY_DEFINED,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.PROPERTY_ALREADY_DEFINED)
|
||||
self.set_value(ConceptParts.BODY, property_name)
|
||||
BuiltinConcepts.PROPERTY_ALREADY_DEFINED,
|
||||
bound_body="property_name")
|
||||
self.set_value("property_name", property_name)
|
||||
self.set_value("property_value", property_value)
|
||||
self.set_value("concept", concept)
|
||||
self.metadata.is_evaluated = True
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"PropertyAlreadyDefined(property={self.property_name}, value={self.property_value}, concept={self.concept})"
|
||||
|
||||
@property
|
||||
def property_name(self):
|
||||
return self.body
|
||||
|
||||
@property
|
||||
def property_value(self):
|
||||
return self.get_value("property_value")
|
||||
class ConditionFailed(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = ["condition", "concept", "prop", "reason"]
|
||||
|
||||
@property
|
||||
def concept(self):
|
||||
return self.get_value("concept")
|
||||
|
||||
|
||||
class ConditionFailed(Concept):
|
||||
def __init__(self, condition=None, concept=None, prop=None):
|
||||
super().__init__(BuiltinConcepts.CONDITION_FAILED,
|
||||
def __init__(self, condition=None, concept=None, prop=None, reason=None):
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.CONDITION_FAILED,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.CONDITION_FAILED)
|
||||
self.set_value(ConceptParts.BODY, condition)
|
||||
BuiltinConcepts.CONDITION_FAILED,
|
||||
bound_body="condition")
|
||||
self.set_value("condition", condition)
|
||||
self.set_value("concept", concept)
|
||||
self.set_value("prop", prop)
|
||||
self.metadata.is_evaluated = True
|
||||
self.set_value("reason", reason)
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"ConditionFailed(condition='{self.body}', concept='{self.concept}', prop='{self.prop}')"
|
||||
|
||||
|
||||
class NotForMeConcept(Concept):
|
||||
class NotForMeConcept(Concept): # Not considered as an error ?
|
||||
ALL_ATTRIBUTES = ["source", "reason"]
|
||||
|
||||
def __init__(self, source=None, reason=None):
|
||||
super().__init__(BuiltinConcepts.NOT_FOR_ME,
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.NOT_FOR_ME,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.NOT_FOR_ME)
|
||||
self.set_value(ConceptParts.BODY, source)
|
||||
BuiltinConcepts.NOT_FOR_ME,
|
||||
bound_body="source")
|
||||
self.set_value("source", source)
|
||||
self.set_value("reason", reason)
|
||||
self.metadata.is_evaluated = True
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
def __repr__(self):
|
||||
return f"NotForMeConcept(source={self.body}, reason={self.get_value('reason')})"
|
||||
|
||||
|
||||
class ExplanationConcept(Concept):
|
||||
ALL_ATTRIBUTES = ["digest", "command", "title", "instructions", "execution_result"]
|
||||
|
||||
def __init__(self, digest=None, command=None, title=None, instructions=None, execution_result=None):
|
||||
super().__init__(BuiltinConcepts.EXPLANATION,
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.EXPLANATION,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.EXPLANATION)
|
||||
BuiltinConcepts.EXPLANATION,
|
||||
bound_body="execution_result")
|
||||
self.set_value("digest", digest) # event digest
|
||||
self.set_value("command", command) # explain command parameters
|
||||
self.set_value("title", title) # a title to the explanation
|
||||
self.set_value("instructions", instructions) # instructions for SheerkaPrint
|
||||
self.set_value(ConceptParts.BODY, execution_result) # list of results
|
||||
self.metadata.is_evaluated = True
|
||||
self.set_value("execution_result", execution_result) # list of results
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
|
||||
class PythonSecurityError(Concept):
|
||||
class PythonSecurityError(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = ["prop", "source_code", "source", "line", "column"]
|
||||
|
||||
def __init__(self, prop=None, source_code=None, source=None, line=None, column=None):
|
||||
super().__init__(BuiltinConcepts.PYTHON_SECURITY_ERROR,
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.PYTHON_SECURITY_ERROR,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.PYTHON_SECURITY_ERROR)
|
||||
BuiltinConcepts.PYTHON_SECURITY_ERROR,
|
||||
bound_body="source_code")
|
||||
|
||||
self.set_value("prop", prop) # property or variable that was evaluated
|
||||
self.set_value("source", source) # origin of the source code (eg. file name)
|
||||
self.set_value("line", line) # line number
|
||||
self.set_value("column", column) # column number
|
||||
self.set_value(ConceptParts.BODY, source_code) # code being executed
|
||||
self.metadata.is_evaluated = True
|
||||
self.set_value("source_code", source_code) # code being executed
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
|
||||
class NotFound(Concept):
|
||||
class NotFound(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = []
|
||||
|
||||
def __init__(self, body=None):
|
||||
super().__init__(BuiltinConcepts.NOT_FOUND,
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.NOT_FOUND,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.NOT_FOUND)
|
||||
self.set_value(ConceptParts.BODY, body)
|
||||
|
||||
def __repr__(self):
|
||||
return f"({self.metadata.id}){self.metadata.name}, body={self.get_value(ConceptParts.BODY)}"
|
||||
return f"({self._metadata.id}){self._metadata.name}, body={self.get_value(ConceptParts.BODY)}"
|
||||
|
||||
|
||||
class ToListConcept(Concept):
|
||||
ALL_ATTRIBUTES = ["items", "recursion_depth", "recurse_on", "tab"]
|
||||
|
||||
def __init__(self, items=None, recursion_depth=None, recurse_on=None, tab=None):
|
||||
Concept.__init__(self,
|
||||
BuiltinConcepts.TO_LIST,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.TO_LIST,
|
||||
bound_body="items")
|
||||
self.set_value("items", items) # items to display
|
||||
self.set_value("recursion_depth", recursion_depth) # recursion depth when showing children
|
||||
self.set_value("recurse_on", recurse_on) # which sub items should we display
|
||||
self.set_value("tab", tab) # customise tab (content and length)
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
+78
-167
@@ -1,21 +1,18 @@
|
||||
import ast
|
||||
import logging
|
||||
|
||||
import core.ast.nodes
|
||||
from core.ast.nodes import CallNodeConcept
|
||||
from core.ast.visitors import UnreferencedNamesVisitor
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, NotInit, ConceptParts
|
||||
from core.concept import Concept, NotInit, ConceptParts, DEFINITION_TYPE_BNF, concept_part_value
|
||||
from core.rule import Rule
|
||||
from core.sheerka.services.SheerkaExecute import SheerkaExecute
|
||||
from core.tokenizer import Keywords
|
||||
# from evaluators.BaseEvaluator import BaseEvaluator
|
||||
from parsers.BaseNodeParser import SourceCodeNode, ConceptNode, UnrecognizedTokensNode, SourceCodeWithConceptNode
|
||||
from parsers.BaseNodeParser import SourceCodeNode, ConceptNode, UnrecognizedTokensNode, SourceCodeWithConceptNode, \
|
||||
RuleNode
|
||||
from parsers.BaseParser import BaseParser, ErrorNode
|
||||
|
||||
PARSE_STEPS = [BuiltinConcepts.BEFORE_PARSING, BuiltinConcepts.PARSING, BuiltinConcepts.AFTER_PARSING]
|
||||
EVAL_STEPS = PARSE_STEPS + [BuiltinConcepts.BEFORE_EVALUATION, BuiltinConcepts.EVALUATION,
|
||||
BuiltinConcepts.AFTER_EVALUATION]
|
||||
PARSERS = ["EmptyString", "ShortTermMemory", "AtomNode", "BnfNode", "SyaNode", "Python"]
|
||||
PARSERS = ["EmptyString", "ShortTermMemory", "Sequence", "Bnf", "Sya", "Python"]
|
||||
|
||||
|
||||
def is_same_success(context, return_values):
|
||||
@@ -31,7 +28,7 @@ def is_same_success(context, return_values):
|
||||
if not ret_val.status:
|
||||
raise Exception("Status is false")
|
||||
|
||||
if isinstance(ret_val.body, Concept) and not ret_val.body.metadata.is_evaluated:
|
||||
if isinstance(ret_val.body, Concept) and not ret_val.body.get_metadata().is_evaluated:
|
||||
raise Exception("Concept is not evaluated")
|
||||
|
||||
return context.sheerka.objvalue(ret_val)
|
||||
@@ -185,7 +182,7 @@ def resolve_ambiguity(context, concepts):
|
||||
# the concept matches the context
|
||||
by_complexity = {}
|
||||
for c in concepts:
|
||||
by_complexity.setdefault(get_condition_complexity(c, "pre"), []).append(c)
|
||||
by_complexity.setdefault(get_condition_complexity(c, concept_part_value(ConceptParts.PRE)), []).append(c)
|
||||
|
||||
remaining_concepts = []
|
||||
for complexity in sorted(by_complexity.keys(), reverse=True):
|
||||
@@ -193,7 +190,7 @@ def resolve_ambiguity(context, concepts):
|
||||
remaining_concepts.extend(by_complexity[complexity])
|
||||
else:
|
||||
for c in by_complexity[complexity]:
|
||||
evaluated = context.sheerka.evaluate_concept(context, c, metadata=["pre"])
|
||||
evaluated = context.sheerka.evaluate_concept(context, c, metadata=[ConceptParts.PRE])
|
||||
if context.sheerka.is_success(evaluated) or evaluated.key == c.key:
|
||||
remaining_concepts.append(c)
|
||||
|
||||
@@ -208,21 +205,21 @@ def resolve_ambiguity(context, concepts):
|
||||
# when the input is "hello world"
|
||||
by_number_of_vars = {}
|
||||
for c in remaining_concepts:
|
||||
by_number_of_vars.setdefault(len(c.metadata.variables), []).append(c)
|
||||
by_number_of_vars.setdefault(len(c.get_metadata().variables), []).append(c)
|
||||
|
||||
return by_number_of_vars[min(by_number_of_vars.keys())]
|
||||
|
||||
|
||||
def get_condition_complexity(concept, concept_part_str):
|
||||
"""
|
||||
Need to find a proper algorithm to compute the complexity of a concept
|
||||
So far, the concept is considered as complex if it has pre
|
||||
Need to find a proper algorithm to compute the complexity of a concept metadata
|
||||
So far, the concept is considered as complex if it has concept_part_str (so far with concept_part_str='pre')
|
||||
:param concept:
|
||||
:param concept_part_str:
|
||||
:return:
|
||||
"""
|
||||
concept_part_value = getattr(concept.metadata, concept_part_str)
|
||||
if concept_part_value is None or concept_part_value.strip() == 0:
|
||||
value = getattr(concept.get_metadata(), concept_part_str)
|
||||
if value is None or value.strip() == 0:
|
||||
return 0
|
||||
|
||||
return 1 # no real computing as of now
|
||||
@@ -270,7 +267,8 @@ def only_parsers_results(context, return_values):
|
||||
return sheerka.ret(
|
||||
context.who,
|
||||
False,
|
||||
sheerka.new(BuiltinConcepts.TOO_MANY_ERRORS, body=return_values),
|
||||
sheerka.new(BuiltinConcepts.TOO_MANY_ERRORS if len(return_values) > 1 else BuiltinConcepts.ERROR,
|
||||
body=return_values),
|
||||
parents=return_values)
|
||||
|
||||
return sheerka.ret(
|
||||
@@ -291,7 +289,7 @@ def parse_unrecognized(context, source, parsers, who=None, prop=None, filter_fun
|
||||
:param parsers:
|
||||
:param who: who is asking the parsing ?
|
||||
:param prop: Extra info, when parsing a property
|
||||
:param filter_func: filter function to call is provided
|
||||
:param filter_func: Once the result are found, call this function to filter them
|
||||
:return:
|
||||
"""
|
||||
sheerka = context.sheerka
|
||||
@@ -306,11 +304,12 @@ def parse_unrecognized(context, source, parsers, who=None, prop=None, filter_fun
|
||||
with context.push(BuiltinConcepts.PARSING, action_context, who=who, desc=desc) as sub_context:
|
||||
# disable all parsers but the requested ones
|
||||
if parsers != "all":
|
||||
sub_context.add_preprocess(BaseParser.PREFIX + "*", enabled=False)
|
||||
for parser in parsers:
|
||||
sub_context.add_preprocess(BaseParser.PREFIX + parser, enabled=True)
|
||||
sub_context.preprocess_parsers = [BaseParser.PREFIX + parser for parser in parsers]
|
||||
# sub_context.add_preprocess(BaseParser.PREFIX + "*", enabled=False)
|
||||
# for parser in parsers:
|
||||
# sub_context.add_preprocess(BaseParser.PREFIX + parser, enabled=True)
|
||||
|
||||
if prop in (Keywords.WHERE, Keywords.PRE, ConceptParts.WHERE, ConceptParts.PRE):
|
||||
if prop in (Keywords.WHERE, Keywords.PRE, ConceptParts.WHERE, ConceptParts.PRE, Keywords.WHEN):
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_QUESTION_REQUESTED)
|
||||
|
||||
sub_context.add_inputs(source=source)
|
||||
@@ -323,25 +322,7 @@ def parse_unrecognized(context, source, parsers, who=None, prop=None, filter_fun
|
||||
res = filter_func(sub_context, res)
|
||||
|
||||
sub_context.add_values(return_values=res)
|
||||
if not hasattr(res, "__iter__"):
|
||||
return res
|
||||
|
||||
# discard Python response if accepted by AtomNode
|
||||
is_concept = False
|
||||
for r in res:
|
||||
if r.status and r.who == "parsers.AtomNode":
|
||||
is_concept = True
|
||||
|
||||
if not is_concept:
|
||||
return res
|
||||
|
||||
no_python = []
|
||||
for r in res:
|
||||
if r.who == "parsers.Python":
|
||||
continue
|
||||
no_python.append(r)
|
||||
|
||||
return no_python
|
||||
return res
|
||||
|
||||
|
||||
def parse_function(context, source, tokens=None, start=0):
|
||||
@@ -465,7 +446,7 @@ def get_lexer_nodes(return_values, start, tokens):
|
||||
for concept in concepts:
|
||||
lexer_nodes.append([ConceptNode(concept, start, end, tokens, ret_val.body.source)])
|
||||
|
||||
elif ret_val.who in ("parsers.BnfNode", "parsers.SyaNode", "parsers.AtomNode"):
|
||||
elif ret_val.who in ("parsers.Bnf", "parsers.Sya", "parsers.Sequence"):
|
||||
nodes = [node for node in ret_val.body.body]
|
||||
for node in nodes:
|
||||
node.start += start
|
||||
@@ -474,6 +455,12 @@ def get_lexer_nodes(return_values, start, tokens):
|
||||
# but append the whole sequence if when it's a sequence
|
||||
lexer_nodes.append(nodes)
|
||||
|
||||
elif ret_val.who == "parsers.Rule":
|
||||
rules = ret_val.body.body if hasattr(ret_val.body.body, "__iter__") else [ret_val.body.body]
|
||||
end = start + len(tokens) - 1
|
||||
for rule in rules:
|
||||
lexer_nodes.append([RuleNode(rule, start, end, tokens, ret_val.body.source)])
|
||||
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -488,16 +475,16 @@ def ensure_evaluated(context, concept, eval_body=True):
|
||||
:param eval_body:
|
||||
:return:
|
||||
"""
|
||||
if concept.metadata.is_evaluated:
|
||||
if concept.get_metadata().is_evaluated:
|
||||
return concept
|
||||
|
||||
# do not try to evaluate concept that are not fully initialized
|
||||
for var in concept.metadata.variables:
|
||||
# to code
|
||||
if var[1] is None and \
|
||||
var[0] not in concept.compiled and \
|
||||
(var[0] not in concept.values or concept.get_value(var[0]) == NotInit):
|
||||
return concept
|
||||
if concept.get_metadata().definition_type != DEFINITION_TYPE_BNF:
|
||||
for var in concept.get_metadata().variables:
|
||||
if var[1] is None and \
|
||||
var[0] not in concept.get_compiled() and \
|
||||
(var[0] not in concept.values() or concept.get_value(var[0]) == NotInit):
|
||||
return concept
|
||||
|
||||
evaluated = context.sheerka.evaluate_concept(context, concept, eval_body=eval_body)
|
||||
return evaluated
|
||||
@@ -523,7 +510,7 @@ def get_lexer_nodes_from_unrecognized(context, unrecognized_tokens_node, parsers
|
||||
|
||||
def update_compiled(context, concept, errors, parsers=None):
|
||||
"""
|
||||
recursively iterate thru concept.compiled to replace LexerNode into concepts or list of ReturnValueConcept
|
||||
recursively iterate thru concept.get_compiled() to replace LexerNode into concepts or list of ReturnValueConcept
|
||||
When parsing using a LexerNodeParser (SyaNodeParser, BnfNodeParser...)
|
||||
the result will be a LexerNode.
|
||||
In the specific case of a ConceptNode, the compiled variables will also be LexerNode (UnrecognizedTokensNode...)
|
||||
@@ -534,7 +521,6 @@ def update_compiled(context, concept, errors, parsers=None):
|
||||
:param parsers: to customize the parsers to use
|
||||
:return:
|
||||
"""
|
||||
|
||||
sheerka = context.sheerka
|
||||
parsers = parsers or PARSERS
|
||||
|
||||
@@ -544,7 +530,7 @@ def update_compiled(context, concept, errors, parsers=None):
|
||||
:param c:
|
||||
:return:
|
||||
"""
|
||||
for k, v in c.compiled.items():
|
||||
for k, v in c.get_compiled().items():
|
||||
if isinstance(v, Concept):
|
||||
_validate_concept(v)
|
||||
|
||||
@@ -553,7 +539,7 @@ def update_compiled(context, concept, errors, parsers=None):
|
||||
parser_helper = PythonWithConceptsParser()
|
||||
res = parser_helper.parse_nodes(context, v.get_all_nodes())
|
||||
if res.status:
|
||||
c.compiled[k] = [res]
|
||||
c.get_compiled()[k] = [res]
|
||||
else:
|
||||
errors.append(sheerka.new(BuiltinConcepts.ERROR, body=f"Cannot parse '{v.source}'"))
|
||||
|
||||
@@ -561,7 +547,7 @@ def update_compiled(context, concept, errors, parsers=None):
|
||||
res = parse_unrecognized(context, v.source, parsers)
|
||||
res = only_successful(context, res) # only key successful parsers
|
||||
if res.status:
|
||||
c.compiled[k] = res.body.body
|
||||
c.get_compiled()[k] = res.body.body
|
||||
else:
|
||||
errors.append(sheerka.new(BuiltinConcepts.ERROR, body=f"Cannot parse '{v.source}'"))
|
||||
|
||||
@@ -588,117 +574,12 @@ def update_compiled(context, concept, errors, parsers=None):
|
||||
# and the user has entered 'a plus b'
|
||||
# Chances are that we are talking about the concept itself, and not an instantiation (like '10 plus 2')
|
||||
# This means that 'a' and 'b' don't have any real value
|
||||
if len(concept.metadata.variables) > 0:
|
||||
for name, value in concept.metadata.variables:
|
||||
if _get_source(concept.compiled, name) != name:
|
||||
if len(concept.get_metadata().variables) > 0:
|
||||
for name, value in concept.get_metadata().variables:
|
||||
if _get_source(concept.get_compiled(), name) != name:
|
||||
break
|
||||
else:
|
||||
concept.metadata.is_evaluated = True
|
||||
|
||||
|
||||
def get_names(sheerka, concept_node):
|
||||
"""
|
||||
Finds all the names referenced by the concept_node
|
||||
:param sheerka:
|
||||
:param concept_node:
|
||||
:return:
|
||||
"""
|
||||
unreferenced_names_visitor = UnreferencedNamesVisitor(sheerka)
|
||||
unreferenced_names_visitor.visit(concept_node)
|
||||
return list(unreferenced_names_visitor.names)
|
||||
|
||||
|
||||
def extract_predicates(sheerka, expression, variables_to_include, variables_to_exclude):
|
||||
"""
|
||||
from a given expression and a variable (or list of variables)
|
||||
tries to find out all the predicates referencing the(se) variable(s), and the(se) variable(s) solely
|
||||
for example
|
||||
exp : isinstance(a, int) and isinstance(b, str)
|
||||
will return 'isinstance(a, int)' if variable_name == 'a'
|
||||
:param sheerka:
|
||||
:param expression:
|
||||
:param variables_to_include:
|
||||
:param variables_to_exclude:
|
||||
:return: list of predicates
|
||||
"""
|
||||
|
||||
if len(variables_to_include) == 0:
|
||||
return []
|
||||
|
||||
def _get_predicates(_nodes):
|
||||
_predicates = []
|
||||
for _node in _nodes:
|
||||
python_node = ast.Expression(body=core.ast.nodes.concept_to_python(_node))
|
||||
python_node = ast.fix_missing_locations(python_node)
|
||||
_predicates.append(python_node)
|
||||
return _predicates
|
||||
|
||||
if isinstance(expression, str):
|
||||
node = ast.parse(expression, mode="eval")
|
||||
else:
|
||||
return NotImplementedError()
|
||||
|
||||
concept_node = core.ast.nodes.python_to_concept(node)
|
||||
main_op = concept_node.get_value("body")
|
||||
|
||||
return _get_predicates(_extract_predicates(sheerka, main_op, variables_to_include, variables_to_exclude))
|
||||
|
||||
|
||||
def _extract_predicates(sheerka, node, variables_to_include, variables_to_exclude):
|
||||
predicates = []
|
||||
|
||||
def _matches(_names, to_include, to_exclude):
|
||||
_res = None
|
||||
for n in _names:
|
||||
if n in to_include and _res is None:
|
||||
_res = True
|
||||
if n in to_exclude:
|
||||
_res = False
|
||||
return _res
|
||||
|
||||
if node.node_type == "Compare":
|
||||
if node.get_value("left").node_type == "Name":
|
||||
"""Simple case of one comparison"""
|
||||
comparison_name = sheerka.objvalue(node.get_value("left"))
|
||||
if comparison_name in variables_to_include and comparison_name not in variables_to_exclude:
|
||||
predicates.append(node)
|
||||
else:
|
||||
"""The left part is an expression"""
|
||||
res = _extract_predicates(sheerka, node.get_value("left"), variables_to_include, variables_to_exclude)
|
||||
if len(res) > 0:
|
||||
predicates.append(node)
|
||||
elif node.node_type == "Call":
|
||||
"""Simple case predicate"""
|
||||
call_node = node if isinstance(node, CallNodeConcept) else CallNodeConcept().update_from(node)
|
||||
args = list(call_node.get_args_names(sheerka))
|
||||
if _matches(args, variables_to_include, variables_to_exclude):
|
||||
predicates.append(node)
|
||||
elif node.node_type == "UnaryOp" and node.get_value("op").node_type == "Not":
|
||||
"""Simple case of negation"""
|
||||
res = _extract_predicates(sheerka, node.get_value("operand"), variables_to_include, variables_to_exclude)
|
||||
if len(res) > 0:
|
||||
predicates.append(node)
|
||||
elif node.node_type == "BinOp":
|
||||
names = get_names(sheerka, node)
|
||||
if _matches(names, variables_to_include, variables_to_exclude):
|
||||
predicates.append(node)
|
||||
elif node.node_type == "BoolOp":
|
||||
all_op = True
|
||||
temp_res = []
|
||||
for op in node.get_value("values").body:
|
||||
res = _extract_predicates(sheerka, op, variables_to_include, variables_to_exclude)
|
||||
if len(res) == 0:
|
||||
all_op = False
|
||||
else:
|
||||
temp_res.extend(res)
|
||||
|
||||
if all_op:
|
||||
predicates.append(node)
|
||||
else:
|
||||
for res in temp_res:
|
||||
predicates.append(res)
|
||||
|
||||
return predicates
|
||||
concept.get_metadata().is_evaluated = True
|
||||
|
||||
|
||||
def add_to_ret_val(sheerka, context, return_values, concept_key):
|
||||
@@ -732,8 +613,38 @@ def set_is_evaluated(concepts, check_nb_variables=False):
|
||||
|
||||
if hasattr(concepts, "__iter__"):
|
||||
for c in concepts:
|
||||
if not check_nb_variables or check_nb_variables and len(c.metadata.variables) > 0:
|
||||
c.metadata.is_evaluated = True
|
||||
if not check_nb_variables or check_nb_variables and len(c.get_metadata().variables) > 0:
|
||||
c.get_metadata().is_evaluated = True
|
||||
else:
|
||||
if not check_nb_variables or check_nb_variables and len(concepts.metadata.variables) > 0:
|
||||
concepts.metadata.is_evaluated = True
|
||||
if not check_nb_variables or check_nb_variables and len(concepts.get_metadata().variables) > 0:
|
||||
concepts.get_metadata().is_evaluated = True
|
||||
|
||||
|
||||
def ensure_concept(*concepts):
|
||||
if hasattr(concepts, "__iter__"):
|
||||
for concept in concepts:
|
||||
if not isinstance(concept, Concept):
|
||||
raise TypeError(f"'{concept}' must be a concept")
|
||||
else:
|
||||
if not isinstance(concepts, Concept):
|
||||
raise TypeError(f"'{concepts}' must be a concept")
|
||||
|
||||
|
||||
def ensure_rule(*rules):
|
||||
if hasattr(rules, "__iter__"):
|
||||
for rule in rules:
|
||||
if not isinstance(rule, Rule):
|
||||
raise TypeError(f"'{rule}' must be a rule")
|
||||
else:
|
||||
if not isinstance(rules, Rule):
|
||||
raise TypeError(f"'{rules}' must be a rule")
|
||||
|
||||
|
||||
def ensure_concept_or_rule(*items):
|
||||
if hasattr(items, "__iter__"):
|
||||
for item in items:
|
||||
if not isinstance(item, (Concept, Rule)):
|
||||
raise TypeError(f"'{item}' must be a concept or rule")
|
||||
else:
|
||||
if not isinstance(items, (Concept, Rule)):
|
||||
raise TypeError(f"'{items}' must be a concept or rule")
|
||||
|
||||
+173
-124
@@ -2,11 +2,9 @@ import hashlib
|
||||
from collections import namedtuple
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Union
|
||||
|
||||
import core.utils
|
||||
from core.sheerka_logger import get_logger
|
||||
from core.tokenizer import Tokenizer, TokenKind
|
||||
|
||||
PROPERTIES_FOR_DIGEST = ("name", "key",
|
||||
@@ -28,23 +26,29 @@ class NotInitialized:
|
||||
def __repr__(self):
|
||||
return self.value
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, NotInitialized)
|
||||
|
||||
|
||||
NotInit = NotInitialized()
|
||||
|
||||
|
||||
class ConceptParts(Enum):
|
||||
class ConceptParts:
|
||||
"""
|
||||
Lists metadata that can contains some code
|
||||
"""
|
||||
WHERE = "where"
|
||||
PRE = "pre"
|
||||
POST = "post"
|
||||
BODY = "body"
|
||||
RET = "ret"
|
||||
WHERE = "#where#"
|
||||
PRE = "#pre#"
|
||||
POST = "#post#"
|
||||
BODY = "#body#"
|
||||
RET = "#ret#"
|
||||
|
||||
@staticmethod
|
||||
def get_parts():
|
||||
return set(item.value for item in ConceptParts)
|
||||
|
||||
AllConceptParts = [v for k, v in ConceptParts.__dict__.items() if not k.startswith("__")]
|
||||
|
||||
|
||||
def concept_part_value(c):
|
||||
return c[1:-1]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -69,6 +73,28 @@ class ConceptMetadata:
|
||||
full_serialization: bool = False # If True, the full object will be serialized, rather than just the diff
|
||||
|
||||
|
||||
ALL_ATTRIBUTES = {}
|
||||
|
||||
|
||||
def get_concept_attrs(concept):
|
||||
if concept.ALL_ATTRIBUTES is not None:
|
||||
return concept.ALL_ATTRIBUTES
|
||||
|
||||
try:
|
||||
return ALL_ATTRIBUTES[concept.id]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
all_attributes = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"]
|
||||
if concept.id:
|
||||
ALL_ATTRIBUTES[concept.id] = all_attributes
|
||||
return all_attributes
|
||||
|
||||
|
||||
def freeze_concept_attrs(concept):
|
||||
ALL_ATTRIBUTES[concept.id] = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"]
|
||||
|
||||
|
||||
class Concept:
|
||||
"""
|
||||
Default concept object
|
||||
@@ -76,6 +102,8 @@ class Concept:
|
||||
Everything is a concept
|
||||
"""
|
||||
|
||||
ALL_ATTRIBUTES = None
|
||||
|
||||
def __init__(self, name=None,
|
||||
is_builtin=False,
|
||||
is_unique=False,
|
||||
@@ -90,13 +118,14 @@ class Concept:
|
||||
desc=None,
|
||||
id=None,
|
||||
props=None,
|
||||
variables=None):
|
||||
variables=None,
|
||||
bound_body=None):
|
||||
|
||||
metadata = ConceptMetadata(
|
||||
str(name) if name else None,
|
||||
name if name else None,
|
||||
is_builtin,
|
||||
is_unique,
|
||||
str(key) if key else None,
|
||||
key if key else None,
|
||||
body,
|
||||
where,
|
||||
pre,
|
||||
@@ -110,17 +139,16 @@ class Concept:
|
||||
variables or []
|
||||
)
|
||||
|
||||
self.metadata = metadata
|
||||
self.compiled = {} # cached ast for the where, pre, post and body parts and variables
|
||||
self.values = {} # resolved values. As compiled, it's used both for metadata and variables
|
||||
self.bnf = None # parsing expression
|
||||
self.log = get_logger("core." + self.__class__.__name__)
|
||||
self.init_log = get_logger("init.core." + self.__class__.__name__)
|
||||
self.original_definition_hash = None # concept hash before any alteration of the metadata
|
||||
self._metadata = metadata
|
||||
self._bound_body = bound_body
|
||||
self._compiled = {} # cached ast for the where, pre, post and body parts and variables
|
||||
self._bnf = None # parsing expression
|
||||
self._original_definition_hash = None # concept hash before any alteration of the metadata
|
||||
self._format = None # how to print the concept
|
||||
|
||||
def __repr__(self):
|
||||
text = f"({self.metadata.id}){self.metadata.name}"
|
||||
return text + " (" + self.metadata.pre + ")" if self.metadata.pre else text
|
||||
text = f"({self._metadata.id}){self._metadata.name}"
|
||||
return text + " (" + self._metadata.pre + ")" if self._metadata.pre else text
|
||||
|
||||
def __eq__(self, other):
|
||||
|
||||
@@ -139,18 +167,18 @@ class Concept:
|
||||
# check the metadata
|
||||
for prop in PROPERTIES_TO_SERIALIZE:
|
||||
# print(prop) # use full to know which id does not match
|
||||
my_value = getattr(self.metadata, prop)
|
||||
other_value = getattr(other.metadata, prop)
|
||||
my_value = getattr(self._metadata, prop)
|
||||
other_value = getattr(other._metadata, prop)
|
||||
if isinstance(my_value, Concept) and isinstance(other_value, Concept):
|
||||
# need to check if circular references
|
||||
if id(self) == id(other):
|
||||
continue
|
||||
|
||||
sub_value = getattr(other_value.metadata, prop)
|
||||
sub_value = getattr(other_value._metadata, prop)
|
||||
while isinstance(sub_value, Concept):
|
||||
if id(self) == id(sub_value):
|
||||
return False # circular reference
|
||||
sub_value = getattr(sub_value.metadata, prop)
|
||||
sub_value = getattr(sub_value._metadata, prop)
|
||||
|
||||
if my_value != other_value:
|
||||
return False
|
||||
@@ -160,26 +188,28 @@ class Concept:
|
||||
return False
|
||||
|
||||
# checks the values
|
||||
if len(self.values) != len(other.values):
|
||||
self_values = self.values()
|
||||
other_values = other.values()
|
||||
if len(self_values) != len(other_values):
|
||||
return False
|
||||
|
||||
for name in self.values:
|
||||
if self.get_value(name) != other.get_value(name):
|
||||
for name, value in self_values.items():
|
||||
if value != other.get_value(name):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.metadata.name)
|
||||
return hash(self._metadata.name)
|
||||
|
||||
def __getattr__(self, item):
|
||||
# I have this complicated implementation because of the usage of Pickle
|
||||
|
||||
if 'values' in vars(self) and item in self.values:
|
||||
return self.get_value(item)
|
||||
|
||||
name = self.name if 'metadata' in vars(self) else 'Concept'
|
||||
raise AttributeError(f"'{name}' concept has no attribute '{item}'")
|
||||
# def __getattr__(self, item):
|
||||
# # I have this complicated implementation because of the usage of Pickle
|
||||
#
|
||||
# if 'values' in vars(self) and item in self.values:
|
||||
# return self.get_value(item)
|
||||
#
|
||||
# name = self.name if 'metadata' in vars(self) else 'Concept'
|
||||
# raise AttributeError(f"'{name}' concept has no attribute '{item}'")
|
||||
|
||||
def def_var(self, var_name, default_value=None):
|
||||
"""
|
||||
@@ -196,7 +226,7 @@ class Concept:
|
||||
# - list of concepts is used by ISA
|
||||
assert default_value is None or isinstance(default_value, str)
|
||||
|
||||
self.metadata.variables.append((var_name, default_value))
|
||||
self._metadata.variables.append((var_name, default_value))
|
||||
|
||||
self.set_value(var_name, NotInit) # do not set the default value
|
||||
# why not setting variables to the default values ?
|
||||
@@ -212,21 +242,40 @@ class Concept:
|
||||
:return:
|
||||
"""
|
||||
assert value is None or isinstance(value, str) # default properties will have to be evaluated
|
||||
var_name = self.metadata.variables[index]
|
||||
self.metadata.variables[index] = (var_name[0], value) # change the default value
|
||||
var_name = self._metadata.variables[index]
|
||||
self._metadata.variables[index] = (var_name[0], value) # change the default value
|
||||
return self
|
||||
|
||||
def get_metadata(self):
|
||||
return self._metadata
|
||||
|
||||
def get_compiled(self):
|
||||
return self._compiled
|
||||
|
||||
def set_compiled(self, compiled):
|
||||
self._compiled = compiled
|
||||
|
||||
def get_bnf(self):
|
||||
return self._bnf
|
||||
|
||||
def set_bnf(self, value):
|
||||
self._bnf = value
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.metadata.name
|
||||
return self._metadata.name
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self.metadata.id
|
||||
return self._metadata.id
|
||||
|
||||
@property
|
||||
def str_id(self):
|
||||
return core.utils.str_concept(self)
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return self.metadata.key
|
||||
return self._metadata.key
|
||||
|
||||
def init_key(self, tokens=None):
|
||||
"""
|
||||
@@ -237,16 +286,16 @@ class Concept:
|
||||
:param tokens:
|
||||
:return:
|
||||
"""
|
||||
if self.metadata.key is not None:
|
||||
if self._metadata.key is not None:
|
||||
return self
|
||||
|
||||
if tokens is None:
|
||||
if self.metadata.definition_type == DEFINITION_TYPE_DEF:
|
||||
tokens = list(Tokenizer(self.metadata.definition))
|
||||
if self._metadata.definition_type == DEFINITION_TYPE_DEF:
|
||||
tokens = list(Tokenizer(self._metadata.definition))
|
||||
else:
|
||||
tokens = list(Tokenizer(self.metadata.name))
|
||||
tokens = list(Tokenizer(self._metadata.name))
|
||||
|
||||
variables = [p[0] for p in self.metadata.variables] if len(core.utils.strip_tokens(tokens, True)) > 1 else []
|
||||
variables = [p[0] for p in self._metadata.variables] if len(core.utils.strip_tokens(tokens, True)) > 1 else []
|
||||
|
||||
key = ""
|
||||
first = True
|
||||
@@ -264,7 +313,7 @@ class Concept:
|
||||
key += token.value.value if token.type == TokenKind.KEYWORD else token.value
|
||||
first = False
|
||||
|
||||
self.metadata.key = key
|
||||
self._metadata.key = key
|
||||
return self
|
||||
|
||||
@property
|
||||
@@ -304,9 +353,9 @@ class Concept:
|
||||
props_as_dict = {}
|
||||
for prop in props_to_use:
|
||||
if prop == "props": # no need to copy variables as the ref won't be used in from_dict
|
||||
props_as_dict[prop] = deepcopy(getattr(self.metadata, prop))
|
||||
props_as_dict[prop] = deepcopy(getattr(self._metadata, prop))
|
||||
else:
|
||||
props_as_dict[prop] = getattr(self.metadata, prop)
|
||||
props_as_dict[prop] = getattr(self._metadata, prop)
|
||||
return props_as_dict
|
||||
|
||||
def from_dict(self, as_dict):
|
||||
@@ -321,7 +370,7 @@ class Concept:
|
||||
for name, value in as_dict[prop]:
|
||||
self.def_var(name, value)
|
||||
else:
|
||||
setattr(self.metadata, prop, as_dict[prop])
|
||||
setattr(self._metadata, prop, as_dict[prop])
|
||||
return self
|
||||
|
||||
def update_from(self, other, update_value=True):
|
||||
@@ -339,16 +388,25 @@ class Concept:
|
||||
if id(other) == id(self):
|
||||
return self
|
||||
|
||||
# update metadata
|
||||
self.from_dict(other.to_dict())
|
||||
for prop in PROPERTIES_TO_SERIALIZE:
|
||||
if prop == "variables":
|
||||
for name, value in other.get_metadata().variables:
|
||||
self.def_var(name, value)
|
||||
elif prop == "props":
|
||||
self._metadata.props = deepcopy(other.get_metadata().props)
|
||||
else:
|
||||
setattr(self._metadata, prop, getattr(other.get_metadata(), prop))
|
||||
|
||||
# # update metadata
|
||||
# self.from_dict(other.to_dict())
|
||||
|
||||
# update values
|
||||
if update_value:
|
||||
for k in other.values:
|
||||
self.set_value(k, other.get_value(k))
|
||||
for k, v in other.values().items():
|
||||
self.set_value(k, v)
|
||||
|
||||
# update bnf definition
|
||||
self.bnf = other.bnf
|
||||
self._bnf = other.get_bnf()
|
||||
|
||||
# origin
|
||||
from sdp.sheerkaSerializer import Serializer
|
||||
@@ -365,10 +423,10 @@ class Concept:
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
if property_name in self.metadata.props:
|
||||
self.metadata.props[property_name].add(value)
|
||||
if property_name in self._metadata.props:
|
||||
self._metadata.props[property_name].add(value)
|
||||
else:
|
||||
self.metadata.props[property_name] = {value} # a set
|
||||
self._metadata.props[property_name] = {value} # a set
|
||||
return self
|
||||
|
||||
def set_prop(self, property_name, value):
|
||||
@@ -379,7 +437,7 @@ class Concept:
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
self.metadata.props[property_name] = value
|
||||
self._metadata.props[property_name] = value
|
||||
|
||||
def get_prop(self, concept_key):
|
||||
"""
|
||||
@@ -387,7 +445,7 @@ class Concept:
|
||||
:param concept_key: name of the behaviour
|
||||
:return:
|
||||
"""
|
||||
return self.metadata.props.get(concept_key, None)
|
||||
return self._metadata.props.get(concept_key, None)
|
||||
|
||||
def set_value(self, name, value):
|
||||
"""
|
||||
@@ -396,10 +454,14 @@ class Concept:
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
if name in self.values:
|
||||
self.values[name].value = value
|
||||
else:
|
||||
self.values[name] = Property(name, value)
|
||||
try:
|
||||
setattr(self, name, value)
|
||||
if name == self._bound_body:
|
||||
setattr(self, ConceptParts.BODY, value)
|
||||
elif self._bound_body and name == ConceptParts.BODY:
|
||||
setattr(self, self._bound_body, value)
|
||||
except AttributeError:
|
||||
print(f"Cannot set {name}")
|
||||
return self
|
||||
|
||||
def get_value(self, prop_name):
|
||||
@@ -408,13 +470,29 @@ class Concept:
|
||||
:param prop_name:
|
||||
:return:
|
||||
"""
|
||||
if prop_name not in self.values:
|
||||
try:
|
||||
return getattr(self, prop_name)
|
||||
except AttributeError:
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
return BuiltinConcepts.NOT_INITIALIZED
|
||||
return self.values[prop_name].value
|
||||
return NotInit
|
||||
|
||||
def values(self):
|
||||
try:
|
||||
values = {k: getattr(self, k) for k in get_concept_attrs(self)}
|
||||
except AttributeError as err:
|
||||
print(f"{err}, {self=}")
|
||||
raise err
|
||||
|
||||
for prop_name in AllConceptParts:
|
||||
try:
|
||||
values[prop_name] = getattr(self, prop_name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return values
|
||||
|
||||
def variables(self):
|
||||
return dict([(k, v) for k, v in self.values.items() if isinstance(k, str)])
|
||||
return {k: v for k, v in self.values().items() if not k[0] == "#"}
|
||||
# return dict([(k, v) for k, v in self.values.items() if isinstance(k, str)])
|
||||
|
||||
def auto_init(self):
|
||||
"""
|
||||
@@ -424,25 +502,25 @@ class Concept:
|
||||
:return:
|
||||
"""
|
||||
|
||||
if self.metadata.is_evaluated:
|
||||
if self._metadata.is_evaluated:
|
||||
return self
|
||||
|
||||
for metadata in ConceptParts:
|
||||
value = getattr(self.metadata, metadata.value)
|
||||
for metadata in AllConceptParts:
|
||||
value = getattr(self._metadata, concept_part_value(metadata))
|
||||
if value is not None:
|
||||
self.set_value(metadata, value)
|
||||
|
||||
for var, value in self.metadata.variables:
|
||||
for var, value in self._metadata.variables:
|
||||
self.set_value(var, value)
|
||||
|
||||
self.metadata.is_evaluated = True
|
||||
self._metadata.is_evaluated = True
|
||||
return self
|
||||
|
||||
def freeze_definition_hash(self):
|
||||
self.original_definition_hash = self.get_definition_hash()
|
||||
self._original_definition_hash = self.get_definition_hash()
|
||||
|
||||
def get_original_definition_hash(self):
|
||||
return self.original_definition_hash
|
||||
return self._original_definition_hash
|
||||
|
||||
def as_bag(self):
|
||||
"""
|
||||
@@ -450,11 +528,8 @@ class Concept:
|
||||
It quicker to implement than creating the actual property mechanism with @property
|
||||
And it removes the visibility from the other attributes/methods
|
||||
"""
|
||||
bag = {}
|
||||
for var in self.values:
|
||||
if isinstance(var, str):
|
||||
bag[var] = self.get_value(var)
|
||||
bag["var." + var] = self.get_value(var)
|
||||
bag = self.variables()
|
||||
|
||||
for prop in ("id", "name", "key", "body"):
|
||||
bag[prop] = getattr(self, prop)
|
||||
return bag
|
||||
@@ -467,29 +542,14 @@ class Concept:
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
self.set_prop(BuiltinConcepts.FORMAT_INSTRUCTIONS, instructions)
|
||||
|
||||
def set_format_instr(self, **kwargs):
|
||||
self._format = kwargs
|
||||
|
||||
class Property:
|
||||
"""
|
||||
Defines the variables of a concept
|
||||
It as its specific class, because from experience,
|
||||
property management is more complex than a key/value pair
|
||||
"""
|
||||
def get_format_instr(self, key):
|
||||
if self._format is None:
|
||||
return None
|
||||
|
||||
def __init__(self, name, value):
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.name}={self.value}"
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Property):
|
||||
return False
|
||||
|
||||
return self.name == other.name and self.value == other.value
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.name, self.value))
|
||||
return self._format.get(key, None)
|
||||
|
||||
|
||||
@dataclass()
|
||||
@@ -500,7 +560,7 @@ class DoNotResolve:
|
||||
|
||||
For example, if you want to set a value to the BODY that will not change when
|
||||
when the concept will be evaluated,
|
||||
set concept.compiled[BODY] to DoNotResolve(value)
|
||||
set concept._compiled[BODY] to DoNotResolve(value)
|
||||
"""
|
||||
value: object
|
||||
|
||||
@@ -514,16 +574,6 @@ class InfiniteRecursionResolved:
|
||||
return self.value
|
||||
|
||||
|
||||
def ensure_concept(*concepts):
|
||||
if hasattr(concepts, "__iter__"):
|
||||
for concept in concepts:
|
||||
if not isinstance(concept, Concept):
|
||||
raise TypeError(f"'{concept}' must be a concept")
|
||||
else:
|
||||
if not isinstance(concepts, Concept):
|
||||
raise TypeError(f"'{concepts}' must be a concept")
|
||||
|
||||
|
||||
# ################################
|
||||
#
|
||||
# Class created for tests purpose
|
||||
@@ -563,9 +613,9 @@ class CC:
|
||||
if other.key != self.concept_key:
|
||||
return False
|
||||
if self.exclude_body:
|
||||
to_compare = {k: v for k, v in other.compiled.items() if k != ConceptParts.BODY}
|
||||
to_compare = {k: v for k, v in other.get_compiled().items() if k != ConceptParts.BODY}
|
||||
else:
|
||||
to_compare = other.compiled
|
||||
to_compare = other.get_compiled()
|
||||
if self.compiled == to_compare:
|
||||
return True
|
||||
else:
|
||||
@@ -647,10 +697,9 @@ class CV:
|
||||
self.concept = concept if isinstance(concept, Concept) else None
|
||||
self.values = {}
|
||||
for k, v in kwargs.items():
|
||||
try:
|
||||
concept_part = ConceptParts(k)
|
||||
self.values[concept_part] = v
|
||||
except ValueError:
|
||||
if f"#{k}#" in AllConceptParts:
|
||||
self.values[f"#{k}#"] = v
|
||||
else:
|
||||
self.values[k] = v
|
||||
|
||||
def __eq__(self, other):
|
||||
@@ -694,10 +743,10 @@ class CMV:
|
||||
if other.key != self.concept_key:
|
||||
return False
|
||||
|
||||
if len(other.metadata.variables) != len(self.variables):
|
||||
if len(other._metadata.variables) != len(self.variables):
|
||||
return False
|
||||
|
||||
for name, value in other.metadata.variables:
|
||||
for name, value in other._metadata.variables:
|
||||
if self.variables[name] != value:
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
class ErrorObj:
|
||||
"""
|
||||
To indicate that somehow, the underlying object is (or has) an error
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
# events
|
||||
CONCEPT_PRECEDENCE_MODIFIED = "cpm"
|
||||
RULE_PRECEDENCE_MODIFIED = "rpm"
|
||||
CONTEXT_DISPOSED = "cd"
|
||||
|
||||
# comparison context
|
||||
RULE_COMPARISON_CONTEXT = "Rule"
|
||||
CONCEPT_COMPARISON_CONTEXT = "Sya"
|
||||
+28
-6
@@ -6,19 +6,41 @@ import pstats
|
||||
from cProfile import Profile
|
||||
|
||||
|
||||
def profile(sort_args=None, print_args=None):
|
||||
sort_args = sort_args or ['cumulative']
|
||||
print_args = print_args or [20]
|
||||
# sort by
|
||||
# 'calls' : call count
|
||||
# 'cumulative' : cumulative time
|
||||
# 'cumtime' : cumulative time
|
||||
# 'file' : file name
|
||||
# 'filename' : file name
|
||||
# 'module' : file name
|
||||
# 'ncalls' : call count
|
||||
# 'pcalls' : primitive call count
|
||||
# 'line' : line number
|
||||
# 'name' : function name
|
||||
# 'nfl' : name / file / line
|
||||
# 'stdname' : standard name
|
||||
# 'time' : internal time
|
||||
# 'tottime' : internal time
|
||||
|
||||
def profile(sort_args=None, print_args=None, filename=None):
|
||||
sort_args = sort_args or ["cumulative"]
|
||||
print_args = print_args or [2000]
|
||||
profiler = Profile()
|
||||
|
||||
def decorator(fn):
|
||||
def inner(*args, **kwargs):
|
||||
result = None
|
||||
try:
|
||||
result = profiler.runcall(fn, *args, **kwargs)
|
||||
finally:
|
||||
stats = pstats.Stats(profiler)
|
||||
stats.strip_dirs().sort_stats(*sort_args).print_stats(*print_args)
|
||||
if filename:
|
||||
with open(filename + ".txt", "w") as out:
|
||||
stats = pstats.Stats(profiler, stream=out)
|
||||
stats.strip_dirs().sort_stats(*sort_args).print_stats(*print_args)
|
||||
profiler.dump_stats(filename + ".prof")
|
||||
else:
|
||||
stats = pstats.Stats(profiler)
|
||||
stats.strip_dirs().sort_stats(*sort_args).print_stats(*print_args)
|
||||
|
||||
return result
|
||||
|
||||
return inner
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
import core.utils
|
||||
|
||||
ACTION_TYPE_PRINT = "print"
|
||||
ACTION_TYPE_EXEC = "exec"
|
||||
ACTION_TYPE_DEFERRED = "deferred"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RuleMetadata:
|
||||
action_type: str # print, exec, deferred
|
||||
name: Union[str, None]
|
||||
predicate: str
|
||||
action: str
|
||||
|
||||
id: str = None
|
||||
is_compiled: bool = False
|
||||
is_enabled: bool = False
|
||||
|
||||
|
||||
class Rule:
|
||||
def __init__(self,
|
||||
action_type=ACTION_TYPE_EXEC,
|
||||
name=None,
|
||||
predicate=None,
|
||||
action=None,
|
||||
priority=None,
|
||||
is_enabled=None):
|
||||
self.metadata = RuleMetadata(action_type, name, predicate, action, is_enabled=is_enabled)
|
||||
self.compiled_predicate = None
|
||||
self.compiled_action = None
|
||||
from core.sheerka.services.SheerkaComparisonManager import SheerkaComparisonManager
|
||||
self.priority = priority if priority is not None else SheerkaComparisonManager.DEFAULT_COMPARISON_VALUE
|
||||
self.error_sink = None
|
||||
|
||||
def __repr__(self):
|
||||
return f"Rule(#{self.metadata.id}, when '{self.metadata.predicate}' {self.metadata.action_type} '{self.metadata.action}', priority={self.priority})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(other) == id(self):
|
||||
return True
|
||||
|
||||
if not isinstance(other, Rule):
|
||||
return False
|
||||
|
||||
for p in ["name", "predicate", "action_type", "action", "id"]:
|
||||
if getattr(self.metadata, p) != getattr(other.metadata, p):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.metadata.name,
|
||||
self.metadata.predicate,
|
||||
self.metadata.action_type,
|
||||
self.metadata.action))
|
||||
|
||||
def set_id(self, rule_id):
|
||||
self.metadata.id = rule_id
|
||||
return self
|
||||
|
||||
def to_tuple_id(self):
|
||||
return self.metadata.name, self.id
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self.metadata.id
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.metadata.name
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
return self.metadata.name
|
||||
|
||||
@property
|
||||
def str_id(self):
|
||||
return core.utils.str_concept(self, drop_name=True, prefix="r:")
|
||||
|
||||
def short_str(self):
|
||||
return f"Rule(#{self.metadata.id}, '{self.metadata.predicate}', priority={self.priority})"
|
||||
@@ -1,17 +1,26 @@
|
||||
import logging
|
||||
import os
|
||||
import pprint
|
||||
import time
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts, ParserResultConcept
|
||||
from core.concept import Concept
|
||||
from core.concept import Concept, get_concept_attrs
|
||||
from core.global_symbols import CONTEXT_DISPOSED
|
||||
from core.sheerka.services.SheerkaExecute import NO_MATCH
|
||||
from core.sheerka.services.SheerkaMemory import SheerkaMemory
|
||||
from core.sheerka_logger import get_logger
|
||||
from core.utils import CONSOLE_COLORS_MAP as CCM
|
||||
from sdp.sheerkaDataProvider import Event
|
||||
|
||||
try:
|
||||
rows, columns = os.popen('stty size', 'r').read().split()
|
||||
except ValueError:
|
||||
rows, columns = 50, 80
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=2, width=columns)
|
||||
|
||||
DEBUG_TAB_SIZE = 4
|
||||
|
||||
PROPERTIES_TO_SERIALIZE = ("_id",
|
||||
"_bag",
|
||||
"_children",
|
||||
"_start",
|
||||
"_stop",
|
||||
@@ -50,19 +59,17 @@ class ExecutionContext:
|
||||
logger=None,
|
||||
global_hints=None,
|
||||
errors=None,
|
||||
**kwargs):
|
||||
obj=None,
|
||||
concepts=None):
|
||||
|
||||
self._id = ExecutionContext.get_id(event.get_digest()) if event else None
|
||||
self._parent = None
|
||||
self._children = []
|
||||
self._tab = ""
|
||||
self._bag = {} # context variables
|
||||
self._start = 0 # when the execution starts (to measure elapsed time)
|
||||
self._stop = 0 # when the execution stops (to measure elapses time)
|
||||
self._logger = logger
|
||||
self._format_instructions = None # how to print the execution context
|
||||
self._stat_log = get_logger("stats")
|
||||
self._show_stats = False
|
||||
self._push = None
|
||||
|
||||
self.who = who # who is asking
|
||||
self.event = event # what was the (original) trigger
|
||||
@@ -70,6 +77,8 @@ class ExecutionContext:
|
||||
self.action = action
|
||||
self.action_context = action_context
|
||||
self.desc = desc # human description of what is going on
|
||||
self.preprocess_parsers = None
|
||||
self.preprocess_evaluators = None
|
||||
self.preprocess = None
|
||||
self.stm = False # True if the context has short term memory entries
|
||||
|
||||
@@ -80,13 +89,11 @@ class ExecutionContext:
|
||||
|
||||
self.inputs = {} # what were the parameters of the execution context
|
||||
self.values = {} # what was produced by the execution context
|
||||
self.obj = kwargs.pop("obj", None) # current obj we are working on
|
||||
self.obj = obj
|
||||
self.concepts = concepts
|
||||
|
||||
self.concepts = kwargs.pop("concepts", {}) # known concepts specific to this context
|
||||
|
||||
# update the other elements
|
||||
for k, v in kwargs.items():
|
||||
self._bag[k] = v
|
||||
self_debug, self.debug_mode = sheerka.get_context_debug_mode(self.id)
|
||||
self.debug_enabled = self_debug is not None
|
||||
|
||||
@property
|
||||
def elapsed(self):
|
||||
@@ -117,24 +124,19 @@ class ExecutionContext:
|
||||
"""
|
||||
return self._children
|
||||
|
||||
def __getattr__(self, item):
|
||||
if item in self._bag:
|
||||
return self._bag[item]
|
||||
|
||||
raise AttributeError(f"'ExecutionContext' object has no attribute '{item}'")
|
||||
|
||||
def __enter__(self):
|
||||
self._start = time.time_ns()
|
||||
self.log_new()
|
||||
# self.log_new()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._push:
|
||||
return
|
||||
|
||||
if self.stm:
|
||||
self.sheerka.services[SheerkaMemory.NAME].remove_context(self)
|
||||
self.sheerka.publish(self, CONTEXT_DISPOSED)
|
||||
|
||||
self._stop = time.time_ns()
|
||||
if self._show_stats:
|
||||
self._stat_log.debug(f"[{self._id:2}]" + self._tab + "Execution time: " + self.elapsed_str)
|
||||
|
||||
def __repr__(self):
|
||||
msg = f"ExecutionContext(who={self.who}, id={self._id}, action={self.action}, context={self.action_context}"
|
||||
@@ -143,11 +145,6 @@ class ExecutionContext:
|
||||
msg += ")"
|
||||
return msg
|
||||
|
||||
# def __str__(self):
|
||||
# msg = self.desc or "New Context"
|
||||
# msg += f", who={self.who}, id={self.id}"
|
||||
# return msg
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
@@ -168,12 +165,12 @@ class ExecutionContext:
|
||||
|
||||
return True
|
||||
|
||||
def push(self, action: BuiltinConcepts, action_context, who=None, desc=None, logger=None, **kwargs):
|
||||
def push(self, action: BuiltinConcepts, action_context, who=None, desc=None, logger=None, obj=None, concepts=None):
|
||||
if self._push:
|
||||
return self._push
|
||||
|
||||
who = who or self.who
|
||||
logger = logger or self._logger
|
||||
_kwargs = {"obj": self.obj, "concepts": self.concepts}
|
||||
_kwargs.update(self._bag)
|
||||
_kwargs.update(kwargs)
|
||||
new = ExecutionContext(
|
||||
who,
|
||||
self.event,
|
||||
@@ -184,19 +181,40 @@ class ExecutionContext:
|
||||
logger,
|
||||
self.global_hints,
|
||||
self.errors,
|
||||
**_kwargs)
|
||||
obj or self.obj,
|
||||
concepts or self.concepts)
|
||||
new._parent = self
|
||||
new._tab = self._tab + " " * DEBUG_TAB_SIZE
|
||||
new.preprocess = self.preprocess
|
||||
new.preprocess_parsers = self.preprocess_parsers
|
||||
new.preprocess_evaluators = self.preprocess_evaluators
|
||||
new.protected_hints.update(self.protected_hints)
|
||||
|
||||
if new.debug_mode is None and self.debug_mode == "protected":
|
||||
new.debug_mode = "protected"
|
||||
new.debug_enabled = True
|
||||
|
||||
self._children.append(new)
|
||||
|
||||
return new
|
||||
|
||||
def deactivate_push(self):
|
||||
self._push = self.push(BuiltinConcepts.NOP, None)
|
||||
self._push._push = self._push
|
||||
if self.stm:
|
||||
bag = self.sheerka.services[SheerkaMemory.NAME].get_all_short_term_memory(self)
|
||||
self.sheerka.add_many_to_short_term_memory(self._push, bag)
|
||||
|
||||
def activate_push(self):
|
||||
if self._push:
|
||||
if self._push.stm:
|
||||
self.sheerka.publish(self._push, CONTEXT_DISPOSED)
|
||||
self._push._stop = time.time_ns()
|
||||
|
||||
self._push = None
|
||||
|
||||
def add_preprocess(self, name, **kwargs):
|
||||
preprocess = self.sheerka.new(BuiltinConcepts.EVALUATOR_PRE_PROCESS)
|
||||
preprocess.set_value("name", name)
|
||||
preprocess.set_value("preprocess_name", name)
|
||||
for k, v in kwargs.items():
|
||||
preprocess.set_value(k, v)
|
||||
|
||||
@@ -206,13 +224,17 @@ class ExecutionContext:
|
||||
return self
|
||||
|
||||
def add_inputs(self, **kwargs):
|
||||
for k, v in kwargs.items():
|
||||
self.inputs[k] = v
|
||||
if self._push:
|
||||
return
|
||||
|
||||
self.inputs.update(kwargs)
|
||||
return self
|
||||
|
||||
def add_values(self, **kwargs):
|
||||
for k, v in kwargs.items():
|
||||
self.values[k] = v
|
||||
if self._push:
|
||||
return
|
||||
|
||||
self.values.update(kwargs)
|
||||
return self
|
||||
|
||||
def add_to_short_term_memory(self, key, concept):
|
||||
@@ -224,6 +246,9 @@ class ExecutionContext:
|
||||
"""
|
||||
self.sheerka.add_to_short_term_memory(self, key, concept)
|
||||
|
||||
def clear_short_term_memory(self):
|
||||
self.sheerka.clear_short_term_memory(self)
|
||||
|
||||
def get_from_short_term_memory(self, key):
|
||||
"""
|
||||
|
||||
@@ -237,11 +262,10 @@ class ExecutionContext:
|
||||
if isinstance(self.obj, Concept):
|
||||
if self.obj.key == key:
|
||||
return self.obj
|
||||
for var_name in self.obj.values:
|
||||
if var_name == key:
|
||||
value = self.obj.get_value(var_name)
|
||||
if isinstance(value, Concept):
|
||||
return value
|
||||
if key in get_concept_attrs(self.obj):
|
||||
value = self.obj.get_value(key)
|
||||
if isinstance(value, Concept):
|
||||
return value
|
||||
|
||||
# search in concepts
|
||||
if self.concepts:
|
||||
@@ -296,8 +320,34 @@ class ExecutionContext:
|
||||
to_str = self.return_value_to_str(r)
|
||||
self._logger.debug(f"[{self._id:2}]" + self._tab + "-> " + to_str)
|
||||
|
||||
def debug(self, text):
|
||||
print(text)
|
||||
def get_debugger(self, who, method_name):
|
||||
return self.sheerka.get_debugger(self, who, method_name)
|
||||
|
||||
def debug(self, who, method_name, variable_name, text, is_error=False):
|
||||
activated = self.sheerka.debug_activated_for(who)
|
||||
if activated:
|
||||
str_text = pp.pformat(text)
|
||||
color = 'red' if is_error else 'green'
|
||||
if "\n" not in str(str_text):
|
||||
self.sheerka.debug(
|
||||
f"[{self._id:3}] {CCM[color]}{who}.{method_name}.{variable_name}: {CCM['reset']}{str_text}")
|
||||
else:
|
||||
self.sheerka.debug(f"[{self._id:3}] {CCM[color]}{who}.{method_name}.{variable_name}: {CCM['reset']}")
|
||||
self.sheerka.debug(str_text)
|
||||
|
||||
def debug_entering(self, who, method_name, **kwargs):
|
||||
if self.sheerka.debug_activated_for(who):
|
||||
str_text = pp.pformat(kwargs)
|
||||
if "\n" not in str(str_text):
|
||||
self.sheerka.debug(
|
||||
f"[{self._id:3}] {CCM['blue']}Entering {who}.{method_name} with {CCM['reset']}{str_text}")
|
||||
else:
|
||||
self.sheerka.debug(f"[{self._id:3}] {CCM['blue']}Entering {who}.{method_name}:{CCM['reset']}")
|
||||
self.sheerka.debug(f"[{self._id:3}] {str_text}")
|
||||
|
||||
def debug_log(self, who, text):
|
||||
if self.sheerka.debug_activated_for(who):
|
||||
self.sheerka.debug(f"[{self._id:3}] {CCM['blue']}{text}{CCM['reset']}")
|
||||
|
||||
def get_parent(self):
|
||||
return self._parent
|
||||
@@ -384,9 +434,6 @@ class ExecutionContext:
|
||||
And it removes the visibility from the other attributes/methods
|
||||
"""
|
||||
bag = {}
|
||||
for k, v in self._bag.items():
|
||||
bag[k] = v
|
||||
bag["bag." + k] = v
|
||||
for prop in ("id", "who", "action", "desc", "obj", "inputs", "values", "concepts"):
|
||||
bag[prop] = getattr(self, prop)
|
||||
bag["context"] = self.action_context
|
||||
@@ -396,6 +443,7 @@ class ExecutionContext:
|
||||
bag["elapsed"] = self.elapsed
|
||||
bag["elapsed_str"] = self.elapsed_str
|
||||
bag["digest"] = self.event.get_digest() if self.event else None
|
||||
bag["_children"] = self._children
|
||||
return bag
|
||||
|
||||
@staticmethod
|
||||
@@ -438,3 +486,17 @@ class ExecutionContext:
|
||||
break
|
||||
|
||||
current = current._parent
|
||||
|
||||
def has_parent(self, context_id):
|
||||
current = self
|
||||
|
||||
while current._parent:
|
||||
current = current._parent
|
||||
if current.id == context_id:
|
||||
return True
|
||||
if current.id < context_id:
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
+167
-81
@@ -11,8 +11,10 @@ from cache.IncCache import IncCache
|
||||
from cache.ListIfNeededCache import ListIfNeededCache
|
||||
from cache.SetCache import SetCache
|
||||
from core.builtin_concepts import BuiltinConcepts, ErrorConcept, ReturnValueConcept, BuiltinErrors, BuiltinUnique, \
|
||||
UnknownConcept
|
||||
from core.concept import Concept, ConceptParts, PROPERTIES_FOR_NEW
|
||||
UnknownConcept, AllBuiltinConcepts
|
||||
from core.concept import Concept, ConceptParts, NotInit, get_concept_attrs
|
||||
from core.error import ErrorObj
|
||||
from core.profiling import profile
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from core.sheerka_logger import console_handler
|
||||
from core.tokenizer import Token, TokenKind
|
||||
@@ -66,15 +68,18 @@ class Sheerka(Concept):
|
||||
MAX_EXECUTION_HISTORY = 100
|
||||
MAX_RETURN_VALUES_HISTORY = 100
|
||||
|
||||
ALL_ATTRIBUTES = []
|
||||
|
||||
def __init__(self, cache_only=False, debug=False, loggers=None):
|
||||
self.init_logging(debug, loggers)
|
||||
self.loggers = loggers
|
||||
|
||||
super().__init__(BuiltinConcepts.SHEERKA, True, True, BuiltinConcepts.SHEERKA)
|
||||
self.log.debug("Starting Sheerka.")
|
||||
# self.log.debug("Starting Sheerka.")
|
||||
|
||||
self.bnp = None # reference to the BaseNodeParser class (to compute first keyword token)
|
||||
self.return_value_concept_id = None
|
||||
self.error_concept_id = None
|
||||
|
||||
# a concept can be instantiated
|
||||
# ex: File is a concept, but File('foo.txt') is an instance
|
||||
@@ -85,7 +90,7 @@ class Sheerka(Concept):
|
||||
# ex: hello => say('hello')
|
||||
self.rules = []
|
||||
|
||||
self.sdp: SheerkaDataProvider = None # SheerkaDataProvider
|
||||
self.sdp: SheerkaDataProvider = None
|
||||
self.cache_manager = CacheManager(cache_only)
|
||||
|
||||
self.services = {} # sheerka plugins
|
||||
@@ -103,6 +108,7 @@ class Sheerka(Concept):
|
||||
self._builtins_classes_cache = None
|
||||
|
||||
self.save_execution_context = True
|
||||
self.enable_process_return_values = False
|
||||
|
||||
self.methods_with_context = {"test_using_context"} # only the names, the method is defined in sheerka_methods
|
||||
self.sheerka_methods = {
|
||||
@@ -171,17 +177,22 @@ class Sheerka(Concept):
|
||||
"""
|
||||
self.sheerka_pipeables[func_name] = SheerkaMethod(function, has_side_effect)
|
||||
|
||||
def initialize(self, root_folder: str = None, save_execution_context=True):
|
||||
def initialize(self, root_folder: str = None, save_execution_context=None, enable_process_return_values=None):
|
||||
"""
|
||||
Starting Sheerka
|
||||
Loads the current configuration
|
||||
Notes that when it's the first time, it also create the needed working folders
|
||||
:param root_folder: root configuration folder
|
||||
:param save_execution_context:
|
||||
:param enable_process_return_values:
|
||||
:return: ReturnValue(Success or Error)
|
||||
"""
|
||||
|
||||
self.save_execution_context = save_execution_context
|
||||
if save_execution_context is not None:
|
||||
self.save_execution_context = save_execution_context
|
||||
|
||||
if enable_process_return_values is not None:
|
||||
self.enable_process_return_values = enable_process_return_values
|
||||
|
||||
try:
|
||||
from sheerkapickle.sheerka_handlers import initialize_pickle_handlers
|
||||
@@ -189,7 +200,10 @@ class Sheerka(Concept):
|
||||
|
||||
self.sdp = SheerkaDataProvider(root_folder, self)
|
||||
self.initialize_caching()
|
||||
self.get_builtin_parsers()
|
||||
self.get_builtin_evaluators()
|
||||
self.initialize_services()
|
||||
self.initialize_builtin_evaluators()
|
||||
|
||||
event = Event("Initializing Sheerka.", user_id=self.name)
|
||||
self.sdp.save_event(event)
|
||||
@@ -198,25 +212,24 @@ class Sheerka(Concept):
|
||||
self,
|
||||
BuiltinConcepts.INIT_SHEERKA,
|
||||
None,
|
||||
desc="Initializing Sheerka.",
|
||||
logger=self.init_log) as exec_context:
|
||||
desc="Initializing Sheerka.") as exec_context:
|
||||
if self.sdp.first_time:
|
||||
self.first_time_initialisation(exec_context)
|
||||
|
||||
self.initialize_builtin_parsers()
|
||||
self.initialize_builtin_evaluators()
|
||||
self.initialize_builtin_concepts()
|
||||
self.initialize_concept_node_parsing(exec_context)
|
||||
res = ReturnValueConcept(self, True, self)
|
||||
|
||||
self.initialize_services_deferred(exec_context, self.sdp.first_time)
|
||||
|
||||
res = ReturnValueConcept(self, True, self)
|
||||
exec_context.add_values(return_values=res)
|
||||
|
||||
if self.cache_manager.is_dirty:
|
||||
self.cache_manager.commit(exec_context)
|
||||
|
||||
if save_execution_context:
|
||||
if self.save_execution_context:
|
||||
self.sdp.save_result(exec_context, is_admin=True)
|
||||
self.init_log.debug(f"Sheerka successfully initialized")
|
||||
# self.init_log.debug(f"Sheerka successfully initialized")
|
||||
|
||||
except IOError as e:
|
||||
res = ReturnValueConcept(self, False, self.get(BuiltinConcepts.ERROR), e)
|
||||
@@ -276,7 +289,7 @@ class Sheerka(Concept):
|
||||
Introspect to find services and bind them
|
||||
:return:
|
||||
"""
|
||||
self.init_log.debug("Initializing services")
|
||||
# self.init_log.debug("Initializing services")
|
||||
|
||||
core.utils.import_module_and_sub_module('core.sheerka.services')
|
||||
base_class = "core.sheerka.services.sheerka_service.BaseService"
|
||||
@@ -286,49 +299,63 @@ class Sheerka(Concept):
|
||||
instance.initialize()
|
||||
self.services[service.NAME] = instance
|
||||
|
||||
def initialize_services_deferred(self, context, is_first_time):
|
||||
"""
|
||||
Initialize part of services that may takes some time or that need the execution context
|
||||
TODO: Create a separate thread for these initialisations as they may take time
|
||||
:return:
|
||||
"""
|
||||
# self.init_log.debug("Initializing services (deferred)")
|
||||
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "initialize_deferred"):
|
||||
service.initialize_deferred(context, is_first_time)
|
||||
|
||||
def first_time_initialisation(self, context):
|
||||
|
||||
self.cache_manager.put(self.CONCEPTS_KEYS_ENTRY, self.USER_CONCEPTS_KEYS, 1000)
|
||||
self.record(context, self.name, "save_execution_context", True)
|
||||
self.record_var(context, self.name, "save_execution_context", True)
|
||||
|
||||
def initialize_builtin_concepts(self):
|
||||
"""
|
||||
Initializes the builtin concepts
|
||||
:return: None
|
||||
"""
|
||||
self.init_log.debug("Initializing builtin concepts")
|
||||
# self.init_log.debug("Initializing builtin concepts")
|
||||
builtins_classes = self.get_builtins_classes_as_dict()
|
||||
|
||||
# this all initialization of the builtins seems to be little bit complicated
|
||||
# why do we need to update it from DB ?
|
||||
for key in BuiltinConcepts:
|
||||
for key in AllBuiltinConcepts:
|
||||
concept = self if key == BuiltinConcepts.SHEERKA \
|
||||
else builtins_classes[str(key)]() if str(key) in builtins_classes \
|
||||
else Concept(key, True, False, key)
|
||||
|
||||
if key in BuiltinUnique:
|
||||
concept.metadata.is_unique = True
|
||||
concept.metadata.is_evaluated = True
|
||||
concept._metadata.is_unique = True
|
||||
concept._metadata.is_evaluated = True
|
||||
|
||||
if not concept.metadata.is_unique and str(key) in builtins_classes:
|
||||
if not concept._metadata.is_unique and str(key) in builtins_classes:
|
||||
self.builtin_cache[key] = builtins_classes[str(key)]
|
||||
|
||||
from_db = self.cache_manager.get(self.CONCEPTS_BY_KEY_ENTRY, concept.metadata.key)
|
||||
from_db = self.cache_manager.get(self.CONCEPTS_BY_KEY_ENTRY, concept._metadata.key)
|
||||
if from_db is None:
|
||||
self.init_log.debug(f"'{concept.name}' concept is not found in db. Adding.")
|
||||
# self.init_log.debug(f"'{concept.name}' concept is not found in db. Adding.")
|
||||
self.set_id_if_needed(concept, True)
|
||||
self.cache_manager.add_concept(concept)
|
||||
|
||||
if key == BuiltinConcepts.RETURN_VALUE:
|
||||
self.return_value_concept_id = concept.id
|
||||
elif key == BuiltinConcepts.ERROR:
|
||||
self.error_concept_id = concept.id
|
||||
|
||||
else:
|
||||
self.init_log.debug(f"Found concept '{from_db}' in db. Updating.")
|
||||
# self.init_log.debug(f"Found concept '{from_db}' in db. Updating.")
|
||||
concept.update_from(from_db)
|
||||
|
||||
return
|
||||
|
||||
def initialize_builtin_parsers(self):
|
||||
def get_builtin_parsers(self):
|
||||
"""
|
||||
Init the parsers
|
||||
:return:
|
||||
@@ -343,7 +370,7 @@ class Sheerka(Concept):
|
||||
continue
|
||||
|
||||
qualified_name = core.utils.get_full_qualified_name(parser)
|
||||
self.init_log.debug(f"Adding builtin parser '{qualified_name}'")
|
||||
# self.init_log.debug(f"Adding builtin parser '{qualified_name}'")
|
||||
temp_result[qualified_name] = parser
|
||||
|
||||
# keep a reference to base_node_parser
|
||||
@@ -361,22 +388,29 @@ class Sheerka(Concept):
|
||||
|
||||
self.parsers[name] = temp_result[name]
|
||||
|
||||
def get_builtin_evaluators(self):
|
||||
"""
|
||||
get all evaluators
|
||||
:return:
|
||||
"""
|
||||
core.utils.import_module_and_sub_module("evaluators")
|
||||
evaluators = core.utils.get_sub_classes("evaluators", "evaluators.BaseEvaluator.OneReturnValueEvaluator")
|
||||
evaluators.extend(core.utils.get_sub_classes("evaluators", "evaluators.BaseEvaluator.AllReturnValuesEvaluator"))
|
||||
|
||||
for evaluator in evaluators:
|
||||
self.evaluators.append(evaluator)
|
||||
|
||||
def initialize_builtin_evaluators(self):
|
||||
"""
|
||||
Init the evaluators
|
||||
:return:
|
||||
"""
|
||||
core.utils.import_module_and_sub_module("evaluators")
|
||||
for evaluator in core.utils.get_sub_classes("evaluators", "evaluators.BaseEvaluator.OneReturnValueEvaluator"):
|
||||
self.init_log.debug(f"Adding builtin evaluator '{evaluator.__name__}'")
|
||||
self.evaluators.append(evaluator)
|
||||
|
||||
for evaluator in core.utils.get_sub_classes("evaluators", "evaluators.BaseEvaluator.AllReturnValuesEvaluator"):
|
||||
self.init_log.debug(f"Adding builtin evaluator '{evaluator.__name__}'")
|
||||
self.evaluators.append(evaluator)
|
||||
for evaluator in self.evaluators:
|
||||
if hasattr(evaluator, "initialize"):
|
||||
evaluator.initialize(self)
|
||||
|
||||
def initialize_concept_node_parsing(self, context):
|
||||
self.init_log.debug("siInitializing concepts by first keyword.")
|
||||
# self.init_log.debug("Initializing concepts by first keyword.")
|
||||
|
||||
concepts_by_first_keyword = self.cache_manager.copy(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY)
|
||||
res = self.bnp.resolve_concepts_by_first_keyword(context, concepts_by_first_keyword)
|
||||
@@ -391,11 +425,16 @@ class Sheerka(Concept):
|
||||
service.initialize()
|
||||
else:
|
||||
self.cache_manager.clear()
|
||||
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "reset"):
|
||||
service.reset()
|
||||
|
||||
self.printer_handler.reset()
|
||||
self.sdp.reset()
|
||||
self.locals = {}
|
||||
|
||||
# @profile()
|
||||
# @profile(filename="profile_80")
|
||||
def evaluate_user_input(self, text: str, user_name="kodjo"):
|
||||
"""
|
||||
Note to KSI: If you try to add execution context to this function,
|
||||
@@ -404,19 +443,20 @@ class Sheerka(Concept):
|
||||
:param user_name:
|
||||
:return:
|
||||
"""
|
||||
self.log.debug(f"Processing user input '{text}', {user_name=}.")
|
||||
# self.log.debug(f"Processing user input '{text}', {user_name=}.")
|
||||
event = Event(text, user_name)
|
||||
evt_digest = self.sdp.save_event(event)
|
||||
self.log.debug(f"{evt_digest=}")
|
||||
self.sdp.save_event(event)
|
||||
|
||||
with ExecutionContext(self.key,
|
||||
event,
|
||||
self,
|
||||
BuiltinConcepts.PROCESS_INPUT,
|
||||
text,
|
||||
desc=f"Evaluating '{text}'",
|
||||
logger=self.log) as execution_context:
|
||||
desc=f"Evaluating '{text}'") as execution_context:
|
||||
|
||||
user_input = self.ret(self.name, True, self.new(BuiltinConcepts.USER_INPUT, body=text, user_name=user_name))
|
||||
|
||||
# TODO. Must be a context hint, not a return value
|
||||
reduce_requested = self.ret(self.name, True, self.new(BuiltinConcepts.REDUCE_REQUESTED))
|
||||
|
||||
ret = self.execute(execution_context, [user_input, reduce_requested], EXECUTE_STEPS)
|
||||
@@ -425,17 +465,21 @@ class Sheerka(Concept):
|
||||
if self.cache_manager.is_dirty:
|
||||
self.cache_manager.commit(execution_context)
|
||||
|
||||
try:
|
||||
if self.save_execution_context and self.load(self.name, "save_execution_context"):
|
||||
# exec_count = ExecutionContext.ids[execution_context.event.get_digest()]
|
||||
# print("Execution Context Count:", exec_count)
|
||||
if self.save_execution_context:
|
||||
try:
|
||||
# if exec_count > 3400:
|
||||
# print("Saving result. digest=", execution_context.event.get_digest())
|
||||
self.sdp.save_result(execution_context)
|
||||
except Exception as ex:
|
||||
self.log.error(f"Failed to save execution context. Reason: {ex}")
|
||||
except Exception as ex:
|
||||
print(f"Failed to save execution context. Reason: {ex}")
|
||||
pass
|
||||
# self.log.error(f"Failed to save execution context. Reason: {ex}")
|
||||
|
||||
# # hack to save valid concept definition
|
||||
# if not self.during_restore:
|
||||
# if len(ret) == 1 and ret[0].status and self.isinstance(ret[0].value, BuiltinConcepts.NEW_CONCEPT):
|
||||
# with open(CONCEPTS_FILE, "a") as f:
|
||||
# f.write(text + "\n")
|
||||
# Do not save execution contexts from process_return_values
|
||||
if self.enable_process_return_values:
|
||||
self.process_return_values(execution_context, ret)
|
||||
|
||||
self.execution_count += 1
|
||||
self._last_execution = execution_context
|
||||
@@ -461,17 +505,16 @@ class Sheerka(Concept):
|
||||
def set_id_if_needed(self, obj: Concept, is_builtin: bool):
|
||||
"""
|
||||
Set the key for the concept if needed
|
||||
For test purpose only !!!!!
|
||||
:param obj:
|
||||
:param is_builtin:
|
||||
:return:
|
||||
"""
|
||||
if obj.metadata.id is not None:
|
||||
if obj._metadata.id is not None:
|
||||
return
|
||||
|
||||
key = self.BUILTIN_CONCEPTS_KEYS if is_builtin else self.USER_CONCEPTS_KEYS
|
||||
obj.metadata.id = str(self.cache_manager.get(self.CONCEPTS_KEYS_ENTRY, key))
|
||||
self.log.debug(f"Setting id '{obj.metadata.id}' to concept '{obj.metadata.name}'.")
|
||||
obj._metadata.id = str(self.cache_manager.get(self.CONCEPTS_KEYS_ENTRY, key))
|
||||
# self.log.debug(f"Setting id '{obj.metadata.id}' to concept '{obj.metadata.name}'.")
|
||||
|
||||
def force_sya_def(self, context, list_of_def):
|
||||
"""
|
||||
@@ -588,9 +631,9 @@ class Sheerka(Concept):
|
||||
# ##############
|
||||
# if the entry is a concept token, use its values.
|
||||
if isinstance(concept, Token):
|
||||
if concept.type != TokenKind.CONCEPT:
|
||||
if concept.type == TokenKind.RULE: # do not recognize rules !!!
|
||||
return None
|
||||
concept = concept.value
|
||||
concept = concept.value # concept is now a tuple
|
||||
|
||||
if isinstance(concept, str) and \
|
||||
concept.startswith("c:") and \
|
||||
@@ -607,7 +650,7 @@ class Sheerka(Concept):
|
||||
if concept[1]:
|
||||
if self.is_known(found := self.get_by_id(concept[1])):
|
||||
instance = self.new_from_template(found, found.key)
|
||||
instance.metadata.is_evaluated = True
|
||||
instance._metadata.is_evaluated = True
|
||||
return instance
|
||||
elif concept[0]:
|
||||
if self.is_known(found := self.get_by_name(concept[0])):
|
||||
@@ -626,6 +669,28 @@ class Sheerka(Concept):
|
||||
|
||||
return None
|
||||
|
||||
def fast_resolve(self, key, return_new=True):
|
||||
def new_instances(concepts):
|
||||
if hasattr(concepts, "__iter__"):
|
||||
return [self.new_from_template(c, c.key) for c in concepts]
|
||||
return self.new_from_template(concepts, concepts.key)
|
||||
|
||||
if isinstance(key, Token):
|
||||
if key.type == TokenKind.RULE: # do not recognize rules !!!
|
||||
return None
|
||||
|
||||
if key.value[1]:
|
||||
concept = self.cache_manager.get(self.CONCEPTS_BY_ID_ENTRY, key.value[1])
|
||||
else:
|
||||
concept = self.cache_manager.get(self.CONCEPTS_BY_NAME_ENTRY, key.value[0])
|
||||
|
||||
else:
|
||||
concept = self.cache_manager.get(self.CONCEPTS_BY_NAME_ENTRY, key)
|
||||
|
||||
if concept is None:
|
||||
return None
|
||||
return new_instances(concept) if return_new else concept
|
||||
|
||||
def has_id(self, concept_id):
|
||||
"""
|
||||
Returns True if a concept with this id exists in cache
|
||||
@@ -694,13 +759,13 @@ class Sheerka(Concept):
|
||||
def new_from_template(self, template, key, **kwargs):
|
||||
# core.utils.my_debug(f"Created {template}, {key=}, {kwargs=}")
|
||||
# manage singleton
|
||||
if template.metadata.is_unique:
|
||||
if template.get_metadata().is_unique:
|
||||
return template
|
||||
|
||||
# otherwise, create another instance
|
||||
concept = self.builtin_cache[key]() if key in self.builtin_cache else Concept()
|
||||
concept.update_from(template, update_value=False)
|
||||
concept.freeze_definition_hash()
|
||||
# concept.freeze_definition_hash()
|
||||
|
||||
if len(kwargs) == 0:
|
||||
return concept
|
||||
@@ -708,17 +773,17 @@ class Sheerka(Concept):
|
||||
# update the properties, values, attributes
|
||||
# Not quite sure that this is the correct process order
|
||||
for k, v in kwargs.items():
|
||||
if k in concept.values:
|
||||
if k in get_concept_attrs(concept):
|
||||
concept.set_value(k, v)
|
||||
elif k in PROPERTIES_FOR_NEW:
|
||||
concept.set_value(ConceptParts(k), v)
|
||||
elif k == "body":
|
||||
concept.set_value(ConceptParts.BODY, v)
|
||||
elif hasattr(concept, k):
|
||||
setattr(concept, k, v)
|
||||
else:
|
||||
return self.new(BuiltinConcepts.UNKNOWN_PROPERTY, body=k, concept=concept)
|
||||
|
||||
# TODO : add the concept to the list of known concepts (self.instances)
|
||||
concept.metadata.is_evaluated = True # because we have manually set the variables
|
||||
concept._metadata.is_evaluated = True # because we have manually set the variables
|
||||
return concept
|
||||
|
||||
def ret(self, who: str, status: bool, value, message=None, parents=None):
|
||||
@@ -732,22 +797,16 @@ class Sheerka(Concept):
|
||||
:return:
|
||||
"""
|
||||
|
||||
# 1 second saved every twenty seconds in unit tests
|
||||
return ReturnValueConcept(
|
||||
who=who,
|
||||
status=status,
|
||||
value=value,
|
||||
message=message,
|
||||
parents=parents,
|
||||
concept_id=self.return_value_concept_id
|
||||
)
|
||||
# return self.new(
|
||||
# BuiltinConcepts.RETURN_VALUE,
|
||||
# who=who,
|
||||
# status=status,
|
||||
# value=value,
|
||||
# message=message,
|
||||
# parents=parents)
|
||||
|
||||
def err(self, body):
|
||||
return ErrorConcept(body, self.error_concept_id)
|
||||
|
||||
def objvalue(self, obj, reduce_simple_list=False):
|
||||
if obj is None:
|
||||
@@ -759,7 +818,7 @@ class Sheerka(Concept):
|
||||
if not isinstance(obj, Concept):
|
||||
return obj
|
||||
|
||||
if obj.body is BuiltinConcepts.NOT_INITIALIZED:
|
||||
if obj.body is NotInit:
|
||||
return obj
|
||||
|
||||
if reduce_simple_list and (isinstance(obj.body, list) or isinstance(obj.body, set)) and len(obj.body) == 1:
|
||||
@@ -796,7 +855,7 @@ class Sheerka(Concept):
|
||||
return self.value_by_concept(obj.body, concept)
|
||||
|
||||
def get_error(self, obj):
|
||||
if isinstance(obj, Concept) and obj.metadata.is_builtin and obj.key in BuiltinErrors:
|
||||
if isinstance(obj, Concept) and obj._metadata.is_builtin and obj.key in BuiltinErrors:
|
||||
return obj
|
||||
|
||||
if isinstance(obj, (list, set, tuple)):
|
||||
@@ -848,9 +907,9 @@ class Sheerka(Concept):
|
||||
def test(self):
|
||||
return f"I have access to Sheerka !"
|
||||
|
||||
def test_using_context(self, context, param1, param2):
|
||||
def test_using_context(self, context, param):
|
||||
event = context.event.get_digest()
|
||||
return f"I have access to Sheerka ! {param1=}, {param2=}, {event=}."
|
||||
return f"I have access to Sheerka ! {param=}, {event=}."
|
||||
|
||||
def test_error(self):
|
||||
raise Exception("I can raise an error")
|
||||
@@ -863,14 +922,17 @@ class Sheerka(Concept):
|
||||
if isinstance(obj, ReturnValueConcept):
|
||||
return obj.status
|
||||
|
||||
if isinstance(obj, ErrorObj):
|
||||
return False
|
||||
|
||||
# other cases ?
|
||||
# ...
|
||||
|
||||
# manage internal errors
|
||||
if isinstance(obj, Concept) and obj.metadata.is_builtin and obj.key in BuiltinErrors:
|
||||
if isinstance(obj, Concept) and obj._metadata.is_builtin and obj.key in BuiltinErrors:
|
||||
return False
|
||||
|
||||
return obj
|
||||
return bool(obj)
|
||||
|
||||
@staticmethod
|
||||
def is_known(obj):
|
||||
@@ -914,9 +976,7 @@ class Sheerka(Concept):
|
||||
|
||||
unknown_concept = UnknownConcept() # don't use new() for prevent circular reference
|
||||
unknown_concept.set_value(ConceptParts.BODY, metadata)
|
||||
for meta in (metadata if isinstance(metadata, list) else [metadata]):
|
||||
unknown_concept.set_value(meta[0], meta[1])
|
||||
unknown_concept.metadata.is_evaluated = True
|
||||
unknown_concept._metadata.is_evaluated = True
|
||||
return unknown_concept
|
||||
|
||||
@staticmethod
|
||||
@@ -924,7 +984,7 @@ class Sheerka(Concept):
|
||||
res = {}
|
||||
for c in core.utils.get_classes("core.builtin_concepts"):
|
||||
if issubclass(c, Concept) and c != Concept:
|
||||
res[c().metadata.key] = c
|
||||
res[c()._metadata.key] = c
|
||||
|
||||
return res
|
||||
|
||||
@@ -967,3 +1027,29 @@ class Sheerka(Concept):
|
||||
logging.addLevelName(logging.ERROR, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.ERROR))
|
||||
# uncomment the following line to enable colors
|
||||
# logging.StreamHandler.emit = add_coloring_to_emit_ansi(logging.StreamHandler.emit)
|
||||
|
||||
|
||||
def to_profile():
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize(save_execution_context=False, enable_process_return_values=False)
|
||||
event = Event("test", "kodjoko")
|
||||
execution_context = ExecutionContext(sheerka.name,
|
||||
event,
|
||||
sheerka,
|
||||
BuiltinConcepts.PROCESS_INPUT,
|
||||
None)
|
||||
|
||||
profile_push(execution_context)
|
||||
|
||||
|
||||
@profile(filename="profile_push")
|
||||
def profile_push(execution_context):
|
||||
for i in range(177942):
|
||||
execution_context.push(BuiltinConcepts.NOP,
|
||||
{"action": "fake"},
|
||||
execution_context.sheerka.name,
|
||||
desc="a proper description")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
to_profile()
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import sys
|
||||
import time
|
||||
from os import path
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.builtin_concepts import BuiltinConcepts, BuiltinContainers
|
||||
from core.concept import Concept
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
|
||||
CONCEPTS_FILE_LITE = "_concepts_lite.txt"
|
||||
CONCEPTS_FILE_ALL_CONCEPTS = "_concepts.txt"
|
||||
CONCEPTS_FILE_TO_USE = CONCEPTS_FILE_ALL_CONCEPTS
|
||||
CONCEPTS_FILE_FULL = "_concepts_full.txt"
|
||||
CONCEPTS_FILE_TO_USE = CONCEPTS_FILE_FULL
|
||||
|
||||
|
||||
class SheerkaAdmin(BaseService):
|
||||
@@ -22,6 +24,10 @@ class SheerkaAdmin(BaseService):
|
||||
self.sheerka.bind_service_method(self.concepts, False)
|
||||
self.sheerka.bind_service_method(self.last_created_concept, False)
|
||||
self.sheerka.bind_service_method(self.last_ret, False)
|
||||
self.sheerka.bind_service_method(self.last_error_ret, False)
|
||||
self.sheerka.bind_service_method(self.extended_isinstance, False)
|
||||
self.sheerka.bind_service_method(self.is_container, False)
|
||||
self.sheerka.bind_service_method(self.format_rules, False)
|
||||
|
||||
def caches_names(self):
|
||||
"""
|
||||
@@ -53,18 +59,19 @@ class SheerkaAdmin(BaseService):
|
||||
|
||||
def restore_from_file(file_name):
|
||||
_nb_lines, _nb_instructions, _nb_lines_in_error = 0, 0, 0
|
||||
if not path.exists(file_name):
|
||||
self.sheerka.log.error(f"\u001b[31mFile '{file_name}' is not found !\u001b[0m")
|
||||
file_path = path.join(path.dirname(sys.argv[0]), file_name)
|
||||
if not path.exists(file_path):
|
||||
print(f"\u001b[31mFile '{file_path}' is not found !\u001b[0m")
|
||||
return 0, 0, 1
|
||||
|
||||
with open(file_name, "r") as f:
|
||||
with open(file_path, "r") as f:
|
||||
for line in f.readlines():
|
||||
_nb_lines += 1
|
||||
line = line.strip()
|
||||
|
||||
if line.startswith("#import "):
|
||||
to_import = "_concepts_" + line[8:] + ".txt"
|
||||
self.sheerka.log.info(f"Importing {to_import}")
|
||||
print(f"Importing {to_import}")
|
||||
res = restore_from_file(to_import)
|
||||
_nb_lines += res[0]
|
||||
_nb_instructions += res[1]
|
||||
@@ -74,42 +81,49 @@ class SheerkaAdmin(BaseService):
|
||||
if line == "" or line.startswith("#"):
|
||||
continue
|
||||
|
||||
self.sheerka.log.info(line)
|
||||
print(line)
|
||||
_nb_instructions += 1
|
||||
res = self.sheerka.evaluate_user_input(line)
|
||||
if len(res) > 1 or not res[0].status:
|
||||
_nb_lines_in_error += 1
|
||||
self.sheerka.log.error("\u001b[31mError detected !\u001b[0m")
|
||||
print("\u001b[31mError detected !\u001b[0m")
|
||||
|
||||
return _nb_lines, _nb_instructions, _nb_lines_in_error
|
||||
|
||||
if concept_file == "full":
|
||||
concept_file = CONCEPTS_FILE_ALL_CONCEPTS
|
||||
|
||||
elif not concept_file.startswith("_concepts"):
|
||||
if not concept_file.startswith("_concepts"):
|
||||
concept_file = f"_concepts_{concept_file}.txt"
|
||||
|
||||
try:
|
||||
start = time.time_ns()
|
||||
self.sheerka.during_restore = True
|
||||
self.sheerka.save_execution_context = False
|
||||
enable_process_return_values_previous_value = self.sheerka.enable_process_return_values
|
||||
self.sheerka.enable_process_return_values = False
|
||||
|
||||
nb_lines, nb_instructions, nb_lines_in_error = restore_from_file(concept_file)
|
||||
|
||||
self.sheerka.enable_process_return_values = enable_process_return_values_previous_value
|
||||
self.sheerka.save_execution_context = True
|
||||
self.sheerka.during_restore = False
|
||||
stop = time.time_ns()
|
||||
|
||||
nano_sec = stop - start
|
||||
dt = nano_sec / 1e6
|
||||
elapsed = f"{dt} ms" if dt < 1000 else f"{dt / 1000} s"
|
||||
self.sheerka.log.info(f"Imported {nb_lines} line(s) in {elapsed}.")
|
||||
self.sheerka.log.info(f"{nb_instructions} instruction(s).")
|
||||
print(f"Imported {nb_lines} line(s) in {elapsed}.")
|
||||
print(f"{nb_instructions} instruction(s).")
|
||||
if nb_lines_in_error > 0:
|
||||
self.sheerka.log.info(f"\u001b[31m{nb_lines_in_error} errors(s) found.\u001b[0m")
|
||||
print(f"\u001b[31m{nb_lines_in_error} errors(s) found.\u001b[0m")
|
||||
else:
|
||||
self.sheerka.log.info(f"No error.")
|
||||
print(f"No error.")
|
||||
except IOError as e:
|
||||
raise e
|
||||
|
||||
def concepts(self):
|
||||
return self.sheerka.sdp.list(self.sheerka.CONCEPTS_BY_ID_ENTRY)
|
||||
return self.sheerka.new(BuiltinConcepts.TO_LIST, body=self.sheerka.sdp.list(self.sheerka.CONCEPTS_BY_ID_ENTRY))
|
||||
|
||||
def format_rules(self):
|
||||
return self.sheerka.new(BuiltinConcepts.TO_LIST, items=self.sheerka.get_format_rules())
|
||||
|
||||
def last_created_concept(self, use_history=False):
|
||||
for exec_result in reversed(self.sheerka.last_executions):
|
||||
@@ -124,4 +138,53 @@ class SheerkaAdmin(BaseService):
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND)
|
||||
|
||||
def last_ret(self, context, index=-1):
|
||||
return self.sheerka.last_return_values[index]
|
||||
try:
|
||||
last = self.sheerka.last_return_values[index]
|
||||
return last[0] if isinstance(last, list) and len(last) == 1 else last
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
def last_error_ret(self, context, index=-1):
|
||||
while index >= -len(self.sheerka.last_return_values):
|
||||
last = self.sheerka.last_return_values[index]
|
||||
last = [last] if not hasattr(last, "__iter__") else last
|
||||
last = [ret_val for ret_val in last if not ret_val.status]
|
||||
if len(last) == 0:
|
||||
index -= 1
|
||||
continue
|
||||
|
||||
if len(last) > 1:
|
||||
return context.sheerka.ret(SheerkaAdmin.NAME,
|
||||
False,
|
||||
context.sheerka.new(BuiltinConcepts.TOO_MANY_ERRORS, body=last))
|
||||
|
||||
return last[0]
|
||||
|
||||
return context.sheerka.ret(SheerkaAdmin.NAME,
|
||||
False,
|
||||
context.sheerka.new(BuiltinConcepts.NOT_FOUND))
|
||||
|
||||
def extended_isinstance(self, a, b):
|
||||
"""
|
||||
switch between sheerka.isinstance and builtin.isinstance
|
||||
:param a:
|
||||
:param b:
|
||||
:return:
|
||||
"""
|
||||
|
||||
if isinstance(b, (type, tuple)):
|
||||
return isinstance(a, b)
|
||||
|
||||
return self.sheerka.isinstance(a, b)
|
||||
|
||||
@staticmethod
|
||||
def is_container(obj):
|
||||
"""
|
||||
A container concept is a builtin concept that embed a result
|
||||
:param obj:
|
||||
:return:
|
||||
"""
|
||||
if not isinstance(obj, Concept):
|
||||
return False
|
||||
|
||||
return obj.key in BuiltinContainers
|
||||
|
||||
@@ -3,7 +3,11 @@ from dataclasses import dataclass
|
||||
from cache.Cache import Cache
|
||||
from cache.ListCache import ListCache
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import ensure_concept, Concept
|
||||
from core.global_symbols import CONCEPT_PRECEDENCE_MODIFIED, RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT, \
|
||||
CONCEPT_COMPARISON_CONTEXT
|
||||
from core.builtin_helpers import ensure_concept_or_rule
|
||||
from core.concept import Concept
|
||||
from core.sheerka.services.SheerkaRuleManager import SheerkaRuleManager
|
||||
from core.sheerka.services.sheerka_service import ServiceObj, BaseService
|
||||
|
||||
|
||||
@@ -43,7 +47,7 @@ class SheerkaComparisonManager(BaseService):
|
||||
:return:
|
||||
"""
|
||||
if isinstance(prop_name, Concept):
|
||||
prefix = prop_name.key if prop_name.metadata.is_builtin else prop_name.id
|
||||
prefix = prop_name.key if prop_name.get_metadata().is_builtin else prop_name.id
|
||||
else:
|
||||
prefix = prop_name
|
||||
|
||||
@@ -66,9 +70,11 @@ class SheerkaComparisonManager(BaseService):
|
||||
for _ in range(len(comparison_objs)):
|
||||
for comparison_obj in comparison_objs:
|
||||
if comparison_obj.op == ">":
|
||||
values[comparison_obj.a] = values[comparison_obj.b] + 1
|
||||
if values[comparison_obj.a] <= values[comparison_obj.b]:
|
||||
values[comparison_obj.a] = values[comparison_obj.b] + 1
|
||||
else:
|
||||
values[comparison_obj.b] = values[comparison_obj.a] + 1
|
||||
if values[comparison_obj.b] <= values[comparison_obj.a]:
|
||||
values[comparison_obj.b] = values[comparison_obj.a] + 1
|
||||
|
||||
return values
|
||||
|
||||
@@ -128,17 +134,17 @@ class SheerkaComparisonManager(BaseService):
|
||||
res.setdefault(v, []).append(k)
|
||||
return res
|
||||
|
||||
def _add_comparison(self, comparison_obj):
|
||||
def _add_comparison(self, context, comparison_obj):
|
||||
key = self._compute_key(comparison_obj.property, comparison_obj.context)
|
||||
previous = self.sheerka.cache_manager.get(self.COMPARISON_ENTRY, key)
|
||||
new = previous.copy() if previous else []
|
||||
|
||||
for co in new:
|
||||
if co.property == comparison_obj.property and \
|
||||
co.a == comparison_obj.a and \
|
||||
co.b == comparison_obj.b and \
|
||||
co.op == comparison_obj.op and \
|
||||
co.context == comparison_obj.context:
|
||||
co.a == comparison_obj.a and \
|
||||
co.b == comparison_obj.b and \
|
||||
co.op == comparison_obj.op and \
|
||||
co.context == comparison_obj.context:
|
||||
return self.sheerka.ret(self.NAME, False, self.sheerka.new(BuiltinConcepts.CONCEPT_ALREADY_DEFINED))
|
||||
|
||||
new.append(comparison_obj)
|
||||
@@ -166,7 +172,7 @@ class SheerkaComparisonManager(BaseService):
|
||||
|
||||
cycles = self.detect_cycles(new)
|
||||
if cycles:
|
||||
concepts_in_cycle = [self.sheerka.get_by_id(c) for c in cycles]
|
||||
concepts_in_cycle = [self.sheerka.resolve(c) for c in cycles]
|
||||
chicken_an_egg = self.sheerka.new(BuiltinConcepts.CHICKEN_AND_EGG, body=concepts_in_cycle)
|
||||
return self.sheerka.ret(self.NAME, False, chicken_an_egg)
|
||||
|
||||
@@ -175,6 +181,12 @@ class SheerkaComparisonManager(BaseService):
|
||||
lesser_objs_ids,
|
||||
greatest_objs_ids))
|
||||
|
||||
if comparison_obj.property == BuiltinConcepts.PRECEDENCE:
|
||||
if comparison_obj.context == CONCEPT_COMPARISON_CONTEXT:
|
||||
self.sheerka.publish(context, CONCEPT_PRECEDENCE_MODIFIED)
|
||||
elif comparison_obj.context == RULE_COMPARISON_CONTEXT:
|
||||
self.sheerka.publish(context, RULE_PRECEDENCE_MODIFIED)
|
||||
|
||||
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def initialize(self):
|
||||
@@ -191,41 +203,51 @@ class SheerkaComparisonManager(BaseService):
|
||||
self.sheerka.bind_service_method(self.get_partition, False)
|
||||
self.sheerka.bind_service_method(self.get_concepts_weights, False)
|
||||
|
||||
def set_is_greater_than(self, context, prop_name, concept_a, concept_b, comparison_context="#"):
|
||||
def set_is_greater_than(self, context, prop_name, item_a, item_b, comparison_context="#"):
|
||||
"""
|
||||
Records that the property of concept a is greater than concept b's one
|
||||
:param context:
|
||||
:param prop_name:
|
||||
:param concept_a:
|
||||
:param concept_b:
|
||||
:param item_a:
|
||||
:param item_b:
|
||||
:param comparison_context:
|
||||
:return:
|
||||
"""
|
||||
context.log(f"Setting concept {concept_a} is greater than {concept_b}", who=self.NAME)
|
||||
ensure_concept(concept_a, concept_b)
|
||||
context.log(f"Setting item {item_a} is greater than {item_b}", who=self.NAME)
|
||||
ensure_concept_or_rule(item_a, item_b)
|
||||
|
||||
event_digest = context.event.get_digest()
|
||||
comparison_obj = ComparisonObj(event_digest, prop_name, concept_a.id, concept_b.id, ">", comparison_context)
|
||||
return self._add_comparison(comparison_obj)
|
||||
comparison_obj = ComparisonObj(event_digest,
|
||||
prop_name,
|
||||
item_a.str_id,
|
||||
item_b.str_id,
|
||||
">",
|
||||
comparison_context)
|
||||
return self._add_comparison(context, comparison_obj)
|
||||
|
||||
def set_is_less_than(self, context, prop_name, concept_a, concept_b, comparison_context="#"):
|
||||
def set_is_less_than(self, context, prop_name, item_a, item_b, comparison_context="#"):
|
||||
"""
|
||||
Records that the property of concept a is lesser than concept b's one
|
||||
:param context:
|
||||
:param prop_name:
|
||||
:param concept_a:
|
||||
:param concept_b:
|
||||
:param item_a:
|
||||
:param item_b:
|
||||
:param comparison_context:
|
||||
:return:
|
||||
"""
|
||||
context.log(f"Setting concept {concept_a} is less than {concept_b}", who=self.NAME)
|
||||
ensure_concept(concept_a, concept_b)
|
||||
context.log(f"Setting item {item_a} is less than {item_b}", who=self.NAME)
|
||||
ensure_concept_or_rule(item_a, item_b)
|
||||
|
||||
event_digest = context.event.get_digest()
|
||||
comparison_obj = ComparisonObj(event_digest, prop_name, concept_a.id, concept_b.id, "<", comparison_context)
|
||||
return self._add_comparison(comparison_obj)
|
||||
comparison_obj = ComparisonObj(event_digest,
|
||||
prop_name,
|
||||
item_a.str_id,
|
||||
item_b.str_id,
|
||||
"<",
|
||||
comparison_context)
|
||||
return self._add_comparison(context, comparison_obj)
|
||||
|
||||
def set_is_lesser(self, context, prop_name, concept, comparison_context="#"):
|
||||
def set_is_lesser(self, context, prop_name, item, comparison_context="#"):
|
||||
"""
|
||||
Records that the concept is less than any other concept if no direct comparison is given
|
||||
|
||||
@@ -235,18 +257,23 @@ class SheerkaComparisonManager(BaseService):
|
||||
* All lesser concepts that have no comparison directive are greater than the others (and share the same weight)
|
||||
:param context:
|
||||
:param prop_name:
|
||||
:param concept:
|
||||
:param item:
|
||||
:param comparison_context:
|
||||
:return:
|
||||
"""
|
||||
context.log(f"Setting concept {concept} is lesser", who=self.NAME)
|
||||
ensure_concept(concept)
|
||||
context.log(f"Setting item {item} is lesser", who=self.NAME)
|
||||
ensure_concept_or_rule(item)
|
||||
|
||||
event_digest = context.event.get_digest()
|
||||
comparison_obj = ComparisonObj(event_digest, prop_name, concept.id, None, "<<", comparison_context)
|
||||
return self._add_comparison(comparison_obj)
|
||||
comparison_obj = ComparisonObj(event_digest,
|
||||
prop_name,
|
||||
item.str_id,
|
||||
None,
|
||||
"<<",
|
||||
comparison_context)
|
||||
return self._add_comparison(context, comparison_obj)
|
||||
|
||||
def set_is_greatest(self, context, prop_name, concept, comparison_context="#"):
|
||||
def set_is_greatest(self, context, prop_name, item, comparison_context="#"):
|
||||
"""
|
||||
Records that the concept is greater than any other concept if no direct comparison is given
|
||||
|
||||
@@ -256,16 +283,21 @@ class SheerkaComparisonManager(BaseService):
|
||||
* All greatest concepts that have no comparison directive are less than the others (and share the same weight)
|
||||
:param context:
|
||||
:param prop_name:
|
||||
:param concept:
|
||||
:param item:
|
||||
:param comparison_context:
|
||||
:return:
|
||||
"""
|
||||
context.log(f"Setting concept {concept} is greatest", who=self.NAME)
|
||||
ensure_concept(concept)
|
||||
context.log(f"Setting item {item} is greatest", who=self.NAME)
|
||||
ensure_concept_or_rule(item)
|
||||
|
||||
event_digest = context.event.get_digest()
|
||||
comparison_obj = ComparisonObj(event_digest, prop_name, concept.id, None, ">>", comparison_context)
|
||||
return self._add_comparison(comparison_obj)
|
||||
comparison_obj = ComparisonObj(event_digest,
|
||||
prop_name,
|
||||
item.str_id,
|
||||
None,
|
||||
">>",
|
||||
comparison_context)
|
||||
return self._add_comparison(context, comparison_obj)
|
||||
|
||||
def set_are_equivalent(self, context, prop_name, concept_a, concept_b, comparison_context="#"):
|
||||
"""
|
||||
@@ -281,9 +313,6 @@ class SheerkaComparisonManager(BaseService):
|
||||
"""
|
||||
pass
|
||||
|
||||
def set_are_equiv(self, context, prop_name, concept_a, concept_b, comparison_context="#"):
|
||||
pass
|
||||
|
||||
def get_partition(self, prop_name, comparison_context="#"):
|
||||
"""
|
||||
Returns the equivalent classes for the property, using the comparison_context
|
||||
|
||||
@@ -2,7 +2,8 @@ from dataclasses import dataclass
|
||||
from operator import attrgetter
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, ensure_concept
|
||||
from core.builtin_helpers import ensure_concept
|
||||
from core.concept import Concept
|
||||
from core.sheerka.Sheerka import Sheerka
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
|
||||
@@ -81,13 +82,13 @@ class SheerkaConceptsAlgebra(BaseService):
|
||||
:param key:
|
||||
:return:
|
||||
"""
|
||||
if key not in source.metadata.props:
|
||||
if key not in source.get_metadata().props:
|
||||
return
|
||||
|
||||
if key in destination.metadata.props:
|
||||
destination.metadata.props[key].update(source.metadata.props[key])
|
||||
if key in destination.get_metadata().props:
|
||||
destination.get_metadata().props[key].update(source.get_metadata().props[key])
|
||||
else:
|
||||
destination.metadata.props[key] = source.metadata.props[key].copy()
|
||||
destination.get_metadata().props[key] = source.get_metadata().props[key].copy()
|
||||
|
||||
def sub_props(self, destination, source, key):
|
||||
"""
|
||||
@@ -97,11 +98,11 @@ class SheerkaConceptsAlgebra(BaseService):
|
||||
:param key:
|
||||
:return:
|
||||
"""
|
||||
if key not in source.metadata.props or key not in destination.metadata.props:
|
||||
if key not in source.get_metadata().props or key not in destination.get_metadata().props:
|
||||
return
|
||||
|
||||
for item in source.metadata.props[key]:
|
||||
destination.metadata.props[key].discard(item)
|
||||
for item in source.get_metadata().props[key]:
|
||||
destination.get_metadata().props[key].discard(item)
|
||||
|
||||
def recognize(self, concept, all_scores=False):
|
||||
"""
|
||||
@@ -118,7 +119,7 @@ class SheerkaConceptsAlgebra(BaseService):
|
||||
return res
|
||||
|
||||
all_concepts = self.sheerka.cache_manager.copy(Sheerka.CONCEPTS_BY_ID_ENTRY).values() \
|
||||
if self.sheerka.cache_manager.cache_only else self.sheerka.concepts()
|
||||
if self.sheerka.cache_manager.cache_only else self.sheerka.sdp.list(self.sheerka.CONCEPTS_BY_ID_ENTRY)
|
||||
|
||||
for c in all_concepts:
|
||||
score = self._compute_score(c, concept, step_b=round(1 / nb_props, 2))
|
||||
@@ -127,8 +128,8 @@ class SheerkaConceptsAlgebra(BaseService):
|
||||
|
||||
if len(res) == 0:
|
||||
props = []
|
||||
for p in [p for p in PROPERTIES_TO_COMPUTE if p in concept.metadata.props]:
|
||||
props.append((p, concept.metadata.props[p]))
|
||||
for p in [p for p in PROPERTIES_TO_COMPUTE if p in concept.get_metadata().props]:
|
||||
props.append((p, concept.get_metadata().props[p]))
|
||||
return self.sheerka.get_unknown(props)
|
||||
|
||||
res.sort(key=attrgetter('score'), reverse=True)
|
||||
@@ -158,9 +159,9 @@ class SheerkaConceptsAlgebra(BaseService):
|
||||
|
||||
# adds step_b for every property that are in both a and b
|
||||
for prop in PROPERTIES_TO_COMPUTE:
|
||||
if prop in b.metadata.props and prop in a.metadata.props:
|
||||
for prop_value in b.metadata.props[prop]:
|
||||
if prop_value in a.metadata.props[prop]:
|
||||
if prop in b.get_metadata().props and prop in a.get_metadata().props:
|
||||
for prop_value in b.get_metadata().props[prop]:
|
||||
if prop_value in a.get_metadata().props[prop]:
|
||||
score += step_b
|
||||
|
||||
if not step_a:
|
||||
@@ -171,11 +172,11 @@ class SheerkaConceptsAlgebra(BaseService):
|
||||
|
||||
# remove step_a for every property that is in a, but not in b
|
||||
for prop in PROPERTIES_TO_COMPUTE:
|
||||
if prop in a.metadata.props and prop not in a.metadata.props:
|
||||
score += step_a * len(a.metadata.props)
|
||||
elif prop in a.metadata.props and prop in a.metadata.props:
|
||||
for prop_value in a.metadata.props[prop]:
|
||||
if prop_value not in b.metadata.props[prop]:
|
||||
if prop in a.get_metadata().props and prop not in a.get_metadata().props:
|
||||
score += step_a * len(a.get_metadata().props)
|
||||
elif prop in a.get_metadata().props and prop in a.get_metadata().props:
|
||||
for prop_value in a.get_metadata().props[prop]:
|
||||
if prop_value not in b.get_metadata().props[prop]:
|
||||
score -= step_b
|
||||
|
||||
return score
|
||||
@@ -189,6 +190,6 @@ class SheerkaConceptsAlgebra(BaseService):
|
||||
"""
|
||||
nb_props = 0
|
||||
for prop in PROPERTIES_TO_COMPUTE:
|
||||
if prop in concept.metadata.props:
|
||||
nb_props += len(concept.metadata.props[prop])
|
||||
if prop in concept.get_metadata().props:
|
||||
nb_props += len(concept.get_metadata().props[prop])
|
||||
return nb_props
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import core.utils
|
||||
from core.builtin_concepts import BuiltinConcepts, ErrorConcept
|
||||
from core.concept import Concept, DEFINITION_TYPE_DEF, ensure_concept, DEFINITION_TYPE_BNF
|
||||
from core.builtin_helpers import ensure_concept
|
||||
from core.concept import Concept, DEFINITION_TYPE_DEF, DEFINITION_TYPE_BNF, freeze_concept_attrs
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from sdp.sheerkaDataProvider import SheerkaDataProviderDuplicateKeyError
|
||||
|
||||
@@ -28,7 +29,6 @@ class SheerkaCreateNewConcept(BaseService):
|
||||
:param concept: DefConceptNode
|
||||
:return: digest of the new concept
|
||||
"""
|
||||
|
||||
ensure_concept(concept)
|
||||
|
||||
sheerka = self.sheerka
|
||||
@@ -50,6 +50,9 @@ class SheerkaCreateNewConcept(BaseService):
|
||||
# set id before saving in db
|
||||
sheerka.set_id_if_needed(concept, False)
|
||||
|
||||
# freeze attributes
|
||||
freeze_concept_attrs(concept)
|
||||
|
||||
# compute new concepts_by_first_keyword
|
||||
init_ret_value = self.bnp.get_concepts_by_first_token(context, [concept], True)
|
||||
if not init_ret_value.status:
|
||||
@@ -68,9 +71,9 @@ class SheerkaCreateNewConcept(BaseService):
|
||||
cache_manager.put(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, concepts_by_first_keyword)
|
||||
cache_manager.put(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, resolved_concepts_by_first_keyword)
|
||||
|
||||
if concept.metadata.definition_type == DEFINITION_TYPE_DEF and concept.metadata.definition != concept.name:
|
||||
if concept.get_metadata().definition_type == DEFINITION_TYPE_DEF and concept.get_metadata().definition != concept.name:
|
||||
# allow search by definition when definition relevant
|
||||
cache_manager.put(self.sheerka.CONCEPTS_BY_NAME_ENTRY, concept.metadata.definition, concept)
|
||||
cache_manager.put(self.sheerka.CONCEPTS_BY_NAME_ENTRY, concept.get_metadata().definition, concept)
|
||||
|
||||
# update references
|
||||
for ref in self.compute_references(concept):
|
||||
@@ -78,7 +81,7 @@ class SheerkaCreateNewConcept(BaseService):
|
||||
|
||||
# TODO : this line seems to be useless
|
||||
# The grammar is never reset
|
||||
if concept.bnf and init_bnf_ret_value is not None and init_bnf_ret_value.status:
|
||||
if concept.get_bnf() and init_bnf_ret_value is not None and init_bnf_ret_value.status:
|
||||
sheerka.cache_manager.clear(sheerka.CONCEPTS_GRAMMARS_ENTRY)
|
||||
|
||||
# process the return if needed
|
||||
@@ -94,10 +97,10 @@ class SheerkaCreateNewConcept(BaseService):
|
||||
"""
|
||||
refs = set()
|
||||
|
||||
if concept.metadata.definition_type == DEFINITION_TYPE_BNF:
|
||||
if concept.get_metadata().definition_type == DEFINITION_TYPE_BNF:
|
||||
from parsers.BnfNodeParser import BnfNodeConceptExpressionVisitor
|
||||
other_concepts_visitor = BnfNodeConceptExpressionVisitor()
|
||||
other_concepts_visitor.visit(concept.bnf)
|
||||
other_concepts_visitor.visit(concept.get_bnf())
|
||||
|
||||
for concept in other_concepts_visitor.references:
|
||||
if isinstance(concept, str):
|
||||
|
||||
@@ -0,0 +1,440 @@
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.utils import CONSOLE_COLORS_MAP as CCM
|
||||
from core.utils import evaluate_expression, as_bag
|
||||
|
||||
try:
|
||||
rows, columns = os.popen('stty size', 'r').read().split()
|
||||
except ValueError:
|
||||
rows, columns = 50, 80
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=2, width=columns)
|
||||
|
||||
|
||||
class BaseDebugLogger:
|
||||
ids = {}
|
||||
|
||||
@staticmethod
|
||||
def next_id(hint):
|
||||
if hint in BaseDebugLogger.ids:
|
||||
BaseDebugLogger.ids[hint] += 1
|
||||
else:
|
||||
BaseDebugLogger.ids[hint] = 0
|
||||
return BaseDebugLogger.ids[hint]
|
||||
|
||||
def __init__(self, debug_manager, who, method_name, context_id, debug_id):
|
||||
pass
|
||||
|
||||
def debug_entering(self, **kwargs):
|
||||
pass
|
||||
|
||||
def debug_var(self, name, value, is_error=False):
|
||||
pass
|
||||
|
||||
def debug_rule(self, rule, results):
|
||||
pass
|
||||
|
||||
def debug_log(self, text):
|
||||
pass
|
||||
|
||||
|
||||
class NullDebugLogger(BaseDebugLogger):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class ConsoleDebugLogger(BaseDebugLogger):
|
||||
|
||||
def __init__(self, debug_manager, service_name, method_name, context_id, debug_id):
|
||||
BaseDebugLogger.__init__(self, debug_manager, service_name, method_name, context_id, debug_id)
|
||||
self.debug_manager = debug_manager
|
||||
self.service_name = service_name
|
||||
self.method_name = method_name
|
||||
self.context_id = context_id
|
||||
self.debug_id = debug_id
|
||||
self.is_highlighted = ""
|
||||
|
||||
def debug_entering(self, **kwargs):
|
||||
super().debug_entering(**kwargs)
|
||||
|
||||
str_text = f"{CCM['blue']}Entering {self.service_name}.{self.method_name} with {CCM['reset']}"
|
||||
str_vars = pp.pformat(kwargs)
|
||||
if "\n" not in str(str_vars):
|
||||
self.debug_manager.debug(self.prefix() + str_text + str_vars)
|
||||
else:
|
||||
self.debug_manager.debug(self.prefix() + str_text)
|
||||
self.debug_manager.debug(self.prefix() + str_vars)
|
||||
|
||||
def debug_var(self, name, value, is_error=False):
|
||||
enabled = self.debug_manager.compute_var_debug(self.service_name,
|
||||
self.method_name,
|
||||
self.context_id,
|
||||
name,
|
||||
self.context_id)
|
||||
if enabled == False:
|
||||
return
|
||||
|
||||
color = 'red' if is_error else 'green'
|
||||
str_text = f"{CCM[color]}..{name}={CCM['reset']}"
|
||||
str_vars = "" if isinstance(enabled, str) else pp.pformat(value)
|
||||
if "\n" not in str(str_vars):
|
||||
self.debug_manager.debug(self.prefix() + str_text + str_vars)
|
||||
else:
|
||||
self.debug_manager.debug(self.prefix() + str_text)
|
||||
self.debug_manager.debug(self.prefix() + str_vars)
|
||||
|
||||
def debug_rule(self, rule, results):
|
||||
if not self.debug_manager.compute_debug_rule(rule.id, self.context_id, self.debug_id):
|
||||
return
|
||||
|
||||
str_text = f"{CCM['green']}..results({rule.id})={CCM['reset']}"
|
||||
str_vars = pp.pformat(results)
|
||||
if "\n" not in str(str_vars):
|
||||
self.debug_manager.debug(self.prefix() + str_text + str_vars)
|
||||
else:
|
||||
self.debug_manager.debug(self.prefix() + str_text)
|
||||
self.debug_manager.debug(self.prefix() + str_vars)
|
||||
|
||||
def debug_log(self, text):
|
||||
self.debug_manager.debug(self.prefix() + f"{CCM['blue']}..{text}{CCM['reset']}")
|
||||
|
||||
def prefix(self):
|
||||
return f"[{self.context_id:2}][{self.debug_id:2}] {self.is_highlighted}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DebugVarSetting:
|
||||
service_name: str
|
||||
method_name: str
|
||||
variable_name: str
|
||||
context_id: int
|
||||
context_children: bool
|
||||
debug_id: int
|
||||
debug_children: bool
|
||||
|
||||
enabled: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class DebugRuleSetting:
|
||||
rule_id: str
|
||||
context_id: int
|
||||
debug_id: int
|
||||
|
||||
enabled: bool
|
||||
|
||||
|
||||
class SheerkaDebugManager(BaseService):
|
||||
NAME = "Debug"
|
||||
PREFIX = "debug."
|
||||
|
||||
children_activation_regex = re.compile(r"(\d+)\+")
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.activated = False # is debug activated
|
||||
self.explicit = False # No need to activate context debug when debug mode is on
|
||||
self.context_cache = set() # debug for specific context
|
||||
self.variable_cache = set() # debug for specific variable
|
||||
self.debug_vars_settings = []
|
||||
self.debug_rules_settings = []
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.set_debug, True)
|
||||
self.sheerka.bind_service_method(self.set_explicit, True)
|
||||
self.sheerka.bind_service_method(self.activate_debug_for, True)
|
||||
self.sheerka.bind_service_method(self.deactivate_debug_for, True)
|
||||
self.sheerka.bind_service_method(self.debug_activated, False)
|
||||
self.sheerka.bind_service_method(self.debug_activated_for, False)
|
||||
self.sheerka.bind_service_method(self.get_context_debug_mode, False)
|
||||
self.sheerka.bind_service_method(self.debug_rule, True)
|
||||
self.sheerka.bind_service_method(self.debug_rule_activated, False)
|
||||
self.sheerka.bind_service_method(self.inspect, False)
|
||||
self.sheerka.bind_service_method(self.debug, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.get_debugger, False)
|
||||
self.sheerka.bind_service_method(self.debug_var, False)
|
||||
self.sheerka.bind_service_method(self.reset_debug, False)
|
||||
self.sheerka.bind_service_method(self.get_debug_settings, False, as_name="debug_settings")
|
||||
|
||||
def initialize_deferred(self, context, is_first_time):
|
||||
self.restore_values("activated",
|
||||
"explicit",
|
||||
"context_cache",
|
||||
"variable_cache",
|
||||
"debug_vars_settings",
|
||||
"debug_rules_settings")
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
For test purpose
|
||||
:return:
|
||||
"""
|
||||
self.activated = False
|
||||
self.context_cache.clear()
|
||||
self.variable_cache.clear()
|
||||
self.debug_vars_settings.clear()
|
||||
self.debug_rules_settings.clear()
|
||||
|
||||
def set_debug(self, context, value=True):
|
||||
self.activated = value
|
||||
self.sheerka.record_var(context, self.NAME, "activated", self.activated)
|
||||
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def set_explicit(self, context, value=True):
|
||||
self.explicit = value
|
||||
self.sheerka.record_var(context, self.NAME, "explicit", self.explicit)
|
||||
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def activate_debug_for(self, context, debug_id, children=False):
|
||||
"""
|
||||
|
||||
:param context:
|
||||
:param debug_id: if debug_id is str, activate variable cache, context_cache otherwise
|
||||
:param children:
|
||||
:return:
|
||||
"""
|
||||
# preprocess
|
||||
if isinstance(debug_id, str) and (m := self.children_activation_regex.match(debug_id)):
|
||||
debug_id = int(m.group(1))
|
||||
children = True
|
||||
|
||||
if isinstance(debug_id, str):
|
||||
self.variable_cache.add(debug_id)
|
||||
self.sheerka.record_var(context, self.NAME, "variable_cache", self.variable_cache)
|
||||
else:
|
||||
self.context_cache.add(debug_id)
|
||||
if children:
|
||||
self.context_cache.add(str(debug_id) + "+")
|
||||
self.sheerka.record_var(context, self.NAME, "context_cache", self.context_cache)
|
||||
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def deactivate_debug_for(self, context, debug_id, children=False):
|
||||
if isinstance(debug_id, str):
|
||||
self.variable_cache.discard(debug_id)
|
||||
self.sheerka.record_var(context, self.NAME, "variable_cache", self.variable_cache)
|
||||
else:
|
||||
self.context_cache.discard(debug_id)
|
||||
if children:
|
||||
self.context_cache.discard(str(debug_id) + "+")
|
||||
self.sheerka.record_var(context, self.NAME, "context_cache", self.context_cache)
|
||||
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def debug_activated(self):
|
||||
return self.activated
|
||||
|
||||
def debug_activated_for(self, debug_id):
|
||||
if not self.activated:
|
||||
return None
|
||||
|
||||
return debug_id in self.variable_cache
|
||||
|
||||
def debug_rule_activated(self, rule_id, context_id):
|
||||
"""
|
||||
|
||||
:param rule_id:
|
||||
:param context_id:
|
||||
:return:
|
||||
"""
|
||||
key = f"{rule_id}|{context_id}"
|
||||
return key in self.rules_cache
|
||||
|
||||
def get_context_debug_mode(self, context_id):
|
||||
if not self.activated:
|
||||
return None, None
|
||||
|
||||
debug_for_children = "protected" if str(context_id) + "+" in self.context_cache else None
|
||||
debug_for_self = "private" if not self.explicit or context_id in self.context_cache else None
|
||||
|
||||
return debug_for_self, debug_for_children
|
||||
|
||||
def inspect(self, context, context_id, *props):
|
||||
"""
|
||||
Print
|
||||
:param context:
|
||||
:param context_id:
|
||||
:return:
|
||||
"""
|
||||
to_inspect = self.sheerka.get_execution_item(context, context_id)
|
||||
if not isinstance(to_inspect, ExecutionContext):
|
||||
return to_inspect
|
||||
|
||||
if not props:
|
||||
props = ["inputs", "values.return_values"]
|
||||
|
||||
bag = as_bag(to_inspect)
|
||||
res = {}
|
||||
for prop in props:
|
||||
res[prop] = evaluate_expression(prop, bag)
|
||||
# res = {
|
||||
# "return_values": to_inspect.values.get("return_values", None)
|
||||
# }
|
||||
|
||||
pp.pprint(res)
|
||||
return None
|
||||
|
||||
def debug(self, *args, **kwargs):
|
||||
print(*args, **kwargs)
|
||||
|
||||
def get_debugger(self, context, who, method_name):
|
||||
if self.compute_debug(who, method_name, context):
|
||||
debug_id = ConsoleDebugLogger.next_id(context.event.get_digest() + str(context.id))
|
||||
return ConsoleDebugLogger(self, who, method_name, context.id, debug_id)
|
||||
|
||||
return NullDebugLogger()
|
||||
|
||||
def debug_var(self, context,
|
||||
service=None,
|
||||
method=None,
|
||||
variable=None,
|
||||
context_id=None,
|
||||
context_children=False,
|
||||
debug_id=None,
|
||||
debug_children=False,
|
||||
enabled=True):
|
||||
|
||||
for setting in self.debug_vars_settings:
|
||||
if setting.service_name == service and \
|
||||
setting.method_name == method and \
|
||||
setting.variable_name == variable and \
|
||||
setting.context_id == context_id and \
|
||||
setting.context_children == context_children and \
|
||||
setting.debug_id == debug_id and \
|
||||
setting.debug_children == debug_children:
|
||||
setting.enabled = enabled
|
||||
break
|
||||
else:
|
||||
self.debug_vars_settings.append(DebugVarSetting(service,
|
||||
method,
|
||||
variable,
|
||||
context_id,
|
||||
context_children,
|
||||
debug_id,
|
||||
debug_children,
|
||||
enabled))
|
||||
|
||||
self.sheerka.record_var(context, self.NAME, "debug_vars_settings", self.debug_vars_settings)
|
||||
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def reset_debug(self, context):
|
||||
self.debug_vars_settings.clear()
|
||||
self.debug_rules_settings.clear()
|
||||
self.sheerka.record_var(context, self.NAME, "debug_vars_settings", self.debug_vars_settings)
|
||||
self.sheerka.record_var(context, self.NAME, "debug_rules_settings", self.debug_vars_settings)
|
||||
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def compute_debug(self, service_name, method_name, context):
|
||||
if not self.activated:
|
||||
return False
|
||||
|
||||
selected = []
|
||||
for setting in self.debug_vars_settings:
|
||||
if setting.service_name is None and setting.method_name is None and setting.context_id is None:
|
||||
continue
|
||||
|
||||
if (setting.service_name is None or setting.service_name == service_name) and \
|
||||
(setting.method_name is None or setting.method_name == method_name) and \
|
||||
(setting.context_id is None or setting.context_id == context.id or (
|
||||
setting.context_children and context.has_parent(setting.context_id))):
|
||||
selected.append(setting.enabled)
|
||||
|
||||
if len(selected) == 0:
|
||||
return False
|
||||
|
||||
res = selected[0]
|
||||
for enabled in selected[1:]:
|
||||
res &= enabled
|
||||
|
||||
return res
|
||||
|
||||
def compute_var_debug(self, service_name, method_name, context_id, variable_name, debug_id):
|
||||
if not self.activated:
|
||||
return False
|
||||
|
||||
selected = []
|
||||
for setting in self.debug_vars_settings:
|
||||
if setting.variable_name is None and setting.debug_id is None:
|
||||
continue
|
||||
|
||||
if (setting.service_name is None or setting.service_name == service_name) and \
|
||||
(setting.method_name is None or setting.method_name == method_name) and \
|
||||
(setting.context_id is None or setting.context_id == context_id) and \
|
||||
(setting.variable_name is None or
|
||||
setting.variable_name == "*" or
|
||||
setting.variable_name == variable_name) and \
|
||||
(setting.debug_id is None or setting.debug_id == debug_id):
|
||||
selected.append(setting.enabled)
|
||||
|
||||
if len(selected) == 0:
|
||||
return False
|
||||
|
||||
res = selected[0]
|
||||
for enabled in selected[1:]:
|
||||
if res == False or enabled == False:
|
||||
return False
|
||||
|
||||
if isinstance(res, str):
|
||||
continue
|
||||
|
||||
res = enabled
|
||||
|
||||
return res
|
||||
|
||||
def debug_rule(self, context, rule=None, context_id=None, debug_id=None, enabled=True):
|
||||
"""
|
||||
Add a debug rule request
|
||||
:param context:
|
||||
:param rule:
|
||||
:param context_id:
|
||||
:param debug_id:
|
||||
:param enabled:
|
||||
:return:
|
||||
"""
|
||||
rule = str(rule) if rule is not None else None
|
||||
for setting in self.debug_rules_settings:
|
||||
if setting.rule_id == rule and \
|
||||
setting.context_id == context_id and \
|
||||
setting.debug_id == debug_id:
|
||||
setting.enabled = enabled
|
||||
break
|
||||
else:
|
||||
self.debug_rules_settings.append(DebugRuleSetting(rule,
|
||||
context_id,
|
||||
debug_id,
|
||||
enabled))
|
||||
|
||||
self.sheerka.record_var(context, self.NAME, "debug_rules_settings", self.debug_rules_settings)
|
||||
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def compute_debug_rule(self, rule_id, context_id, debug_id):
|
||||
if not self.activated:
|
||||
return False
|
||||
|
||||
selected = []
|
||||
for setting in self.debug_rules_settings:
|
||||
if (setting.rule_id is None or setting.rule_id == rule_id) and \
|
||||
(setting.context_id is None or setting.context_id == context_id) and \
|
||||
(setting.debug_id is None or setting.debug_id == debug_id):
|
||||
selected.append(setting.enabled)
|
||||
|
||||
if len(selected) == 0:
|
||||
return False
|
||||
|
||||
res = selected[0]
|
||||
for enabled in selected[1:]:
|
||||
res &= enabled
|
||||
|
||||
return res
|
||||
|
||||
def reset_debug_rules(self, context):
|
||||
self.debug_rules_settings.clear()
|
||||
self.sheerka.record_var(context, self.NAME, "debug_rules_settings", self.debug_rules_settings)
|
||||
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def get_debug_settings(self):
|
||||
return self.debug_vars_settings
|
||||
@@ -21,7 +21,7 @@ class SheerkaDump(BaseService):
|
||||
super().__init__(sheerka)
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.dump_desc, True, "desc") # because concept is evaluated
|
||||
self.sheerka.bind_service_method(self.dump_desc, True, "desc") # has_side_effect 'cause concept is evaluated
|
||||
self.sheerka.bind_service_method(self.dump_sdp, False, "dump_sdp")
|
||||
|
||||
def dump_desc(self, *concept_names, eval=False):
|
||||
@@ -34,7 +34,7 @@ class SheerkaDump(BaseService):
|
||||
else:
|
||||
concepts = self.sheerka.get_by_key(concept_name)
|
||||
if self.sheerka.isinstance(concepts, BuiltinConcepts.UNKNOWN_CONCEPT):
|
||||
self.sheerka.log.error(f"Concept '{concept_name}' is unknown")
|
||||
print(f"Concept '{concept_name}' is unknown")
|
||||
return False
|
||||
|
||||
if not hasattr(concepts, "__iter__"):
|
||||
@@ -46,36 +46,35 @@ class SheerkaDump(BaseService):
|
||||
value = evaluated.body if evaluated.key == c.key else evaluated
|
||||
|
||||
if not first:
|
||||
self.sheerka.log.info("")
|
||||
self.sheerka.log.info(f"id : {c.id}")
|
||||
self.sheerka.log.info(f"name : {c.name}")
|
||||
self.sheerka.log.info(f"key : {c.key}")
|
||||
self.sheerka.log.info(f"definition : {c.metadata.definition}")
|
||||
self.sheerka.log.info(f"type : {c.metadata.definition_type}")
|
||||
self.sheerka.log.info(f"body : {c.metadata.body}")
|
||||
self.sheerka.log.info(f"where : {c.metadata.where}")
|
||||
self.sheerka.log.info(f"pre : {c.metadata.pre}")
|
||||
self.sheerka.log.info(f"post : {c.metadata.post}")
|
||||
self.sheerka.log.info(f"ret : {c.metadata.ret}")
|
||||
self.sheerka.log.info(f"vars : {c.metadata.variables}")
|
||||
self.sheerka.log.info(f"props : {c.metadata.props}")
|
||||
print("")
|
||||
print(f"id : {c.id}")
|
||||
print(f"name : {c.name}")
|
||||
print(f"key : {c.key}")
|
||||
print(f"definition : {c.get_metadata().definition}")
|
||||
print(f"type : {c.get_metadata().definition_type}")
|
||||
print(f"body : {c.get_metadata().body}")
|
||||
print(f"where : {c.get_metadata().where}")
|
||||
print(f"pre : {c.get_metadata().pre}")
|
||||
print(f"post : {c.get_metadata().post}")
|
||||
print(f"ret : {c.get_metadata().ret}")
|
||||
print(f"vars : {c.get_metadata().variables}")
|
||||
print(f"props : {c.get_metadata().props}")
|
||||
if eval:
|
||||
self.sheerka.log.info(f"value : {value}")
|
||||
if c.values:
|
||||
for v in c.values:
|
||||
self.sheerka.log.info(f"{v}: {c.get_value(v)}")
|
||||
print(f"value : {value}")
|
||||
for v in c.values():
|
||||
print(f"{v}: {c.get_value(v)}")
|
||||
else:
|
||||
self.sheerka.log.info("No variable")
|
||||
print("No variable")
|
||||
|
||||
self.sheerka.log.info(f"digest : {c.get_origin()}")
|
||||
print(f"digest : {c.get_origin()}")
|
||||
|
||||
if self.sheerka.isaset(context, c):
|
||||
items = self.sheerka.get_set_elements(context, c)
|
||||
self.sheerka.log.info(f"elements : {items}")
|
||||
print(f"elements : {items}")
|
||||
|
||||
first = False
|
||||
|
||||
def dump_sdp(self):
|
||||
snapshot = self.sheerka.sdp.get_snapshot(SheerkaDataProvider.HeadFile)
|
||||
state = self.sheerka.sdp.load_state(snapshot)
|
||||
self.sheerka.log.info(get_pp().pformat(state.data))
|
||||
print(get_pp().pformat(state.data))
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.builtin_helpers import expect_one, only_successful, parse_unrecognized, evaluate
|
||||
from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, NotInit, ensure_concept
|
||||
from core.builtin_helpers import expect_one, only_successful, parse_unrecognized, evaluate, ensure_concept
|
||||
from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, NotInit, AllConceptParts, \
|
||||
concept_part_value
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.tokenizer import Tokenizer
|
||||
@@ -47,8 +48,10 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
|
||||
parent = context.get_parent()
|
||||
while parent is not None:
|
||||
if parent.who == context.who and parent.action == BuiltinConcepts.EVALUATING_CONCEPT and \
|
||||
parent.obj == concept and parent.obj.compiled == concept.compiled:
|
||||
if (parent.who == context.who and
|
||||
parent.action == BuiltinConcepts.EVALUATING_CONCEPT and
|
||||
parent.obj == concept and
|
||||
parent.obj.get_compiled() == concept.get_compiled()):
|
||||
return True
|
||||
|
||||
parent = parent.get_parent()
|
||||
@@ -90,15 +93,15 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
vars_needed = False
|
||||
body_needed = False
|
||||
|
||||
if concept_part in concept.compiled and concept.compiled[concept_part] is not None:
|
||||
concept_part_source = getattr(concept.metadata, concept_part.value)
|
||||
if concept_part in concept.get_compiled() and concept.get_compiled()[concept_part] is not None:
|
||||
concept_part_source = getattr(concept.get_metadata(), concept_part_value(concept_part))
|
||||
|
||||
assert concept_part_source is not None
|
||||
|
||||
tokens = [t.str_value for t in Tokenizer(concept_part_source)]
|
||||
|
||||
if check_vars:
|
||||
for var_name in (v[0] for v in concept.metadata.variables):
|
||||
for var_name in (v[0] for v in concept.get_metadata().variables):
|
||||
if var_name in tokens:
|
||||
vars_needed = True
|
||||
ret.append("variables")
|
||||
@@ -106,9 +109,9 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
|
||||
if check_body and "self" in tokens:
|
||||
body_needed = True
|
||||
ret.append("body")
|
||||
ret.append(ConceptParts.BODY)
|
||||
|
||||
ret.append(concept_part.value)
|
||||
ret.append(concept_part)
|
||||
|
||||
return ret, vars_needed, body_needed
|
||||
|
||||
@@ -121,16 +124,16 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
:param var_name:
|
||||
:return:
|
||||
"""
|
||||
if concept.metadata.where is None or concept.metadata.where.strip() == "":
|
||||
if concept.get_metadata().where is None or concept.get_metadata().where.strip() == "":
|
||||
return None
|
||||
|
||||
ret = ExpressionParser().parse(context, ParserInput(concept.metadata.where))
|
||||
ret = ExpressionParser().parse(context, ParserInput(concept.get_metadata().where))
|
||||
if not ret.status:
|
||||
# TODO: manage invalid where clause
|
||||
return None
|
||||
expr = ret.body.body
|
||||
|
||||
to_trueify = [v[0] for v in concept.metadata.variables if v[0] != var_name]
|
||||
to_trueify = [v[0] for v in concept.get_metadata().variables if v[0] != var_name]
|
||||
trueified_where = str(TrueifyVisitor(to_trueify, [var_name]).visit(expr))
|
||||
|
||||
tokens = [t.str_value for t in Tokenizer(trueified_where)]
|
||||
@@ -140,7 +143,7 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
compiled = compile(trueified_where, "<where clause>", "eval")
|
||||
except Exception:
|
||||
pass
|
||||
return WhereClauseDef(concept, concept.metadata.where, trueified_where, var_name, compiled)
|
||||
return WhereClauseDef(concept, concept.get_metadata().where, trueified_where, var_name, compiled)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -153,7 +156,8 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
:return:
|
||||
"""
|
||||
ret = []
|
||||
for r in [r for r in return_values if r.status]:
|
||||
valid_return_values = [r for r in return_values if r.status]
|
||||
for r in valid_return_values:
|
||||
if where_clause_def.compiled:
|
||||
try:
|
||||
if eval(where_clause_def.compiled, {where_clause_def.prop: self.sheerka.objvalue(r)}):
|
||||
@@ -177,10 +181,13 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
if len(ret) > 0:
|
||||
return ret
|
||||
|
||||
reason = [r.body for r in return_values] if len(valid_return_values) == 0 else None
|
||||
|
||||
return self.sheerka.new(BuiltinConcepts.CONDITION_FAILED,
|
||||
body=where_clause_def.clause,
|
||||
concept=where_clause_def.concept,
|
||||
prop=where_clause_def.prop)
|
||||
prop=where_clause_def.prop,
|
||||
reason=reason)
|
||||
|
||||
def manage_infinite_recursion(self, context):
|
||||
"""
|
||||
@@ -194,7 +201,7 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
concepts_found = set()
|
||||
while parent and parent.obj:
|
||||
if parent.who == context.who and parent.action == BuiltinConcepts.EVALUATING_CONCEPT:
|
||||
body = parent.obj.metadata.body
|
||||
body = parent.obj.get_metadata().body
|
||||
try:
|
||||
return self.sheerka.ret(self.NAME, True, InfiniteRecursionResolved(eval(body)))
|
||||
except Exception:
|
||||
@@ -226,11 +233,11 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
return self.sheerka.resolve(identifier)
|
||||
return None
|
||||
|
||||
for part_key in ConceptParts:
|
||||
if part_key in concept.compiled:
|
||||
for part_key in AllConceptParts:
|
||||
if part_key in concept.get_compiled():
|
||||
continue
|
||||
|
||||
source = getattr(concept.metadata, part_key.value)
|
||||
source = getattr(concept.get_metadata(), concept_part_value(part_key))
|
||||
if source is None: # or not isinstance(source, str):
|
||||
continue
|
||||
|
||||
@@ -238,22 +245,22 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
raise Exception("Invalid concept init. metadata must be a string")
|
||||
|
||||
if source.strip() == "":
|
||||
concept.compiled[part_key] = DoNotResolve(source)
|
||||
concept.get_compiled()[part_key] = DoNotResolve(source)
|
||||
else:
|
||||
# first case, when the metadata references another concept via c:xxx: keyword
|
||||
if concept_found := parse_token_concept(source):
|
||||
context.log(f"Recognized concept '{concept_found}'", self.NAME)
|
||||
concept.compiled[part_key] = concept_found
|
||||
concept.get_compiled()[part_key] = concept_found
|
||||
else:
|
||||
res = parse_unrecognized(context,
|
||||
source,
|
||||
parsers="all",
|
||||
prop=part_key,
|
||||
filter_func=only_successful)
|
||||
concept.compiled[part_key] = res.body.body if is_only_successful(res) else res
|
||||
concept.get_compiled()[part_key] = res.body.body if is_only_successful(res) else res
|
||||
|
||||
for var_name, default_value in concept.metadata.variables:
|
||||
if var_name in concept.compiled:
|
||||
for var_name, default_value in concept.get_metadata().variables:
|
||||
if var_name in concept.get_compiled():
|
||||
continue
|
||||
|
||||
if default_value is None:
|
||||
@@ -263,23 +270,23 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
raise Exception("Invalid concept init. variable metadata must be a string")
|
||||
|
||||
if default_value.strip() == "":
|
||||
concept.compiled[var_name] = DoNotResolve(default_value)
|
||||
concept.get_compiled()[var_name] = DoNotResolve(default_value)
|
||||
else:
|
||||
# first case, when the metadata references another concept via c:xxx: keyword
|
||||
if concept_found := parse_token_concept(default_value):
|
||||
context.log(f"Recognized concept '{concept_found}'", self.NAME)
|
||||
concept.compiled[var_name] = concept_found
|
||||
concept.get_compiled()[var_name] = concept_found
|
||||
else:
|
||||
res = parse_unrecognized(context,
|
||||
default_value,
|
||||
parsers="all",
|
||||
prop=var_name,
|
||||
filter_func=only_successful)
|
||||
concept.compiled[var_name] = res.body.body if is_only_successful(res) else res
|
||||
concept.get_compiled()[var_name] = res.body.body if is_only_successful(res) else res
|
||||
|
||||
# Updates the cache of concepts when possible
|
||||
if self.sheerka.has_id(concept.id):
|
||||
self.sheerka.get_by_id(concept.id).compiled = concept.compiled
|
||||
self.sheerka.get_by_id(concept.id).set_compiled(concept.get_compiled())
|
||||
|
||||
def resolve(self,
|
||||
context,
|
||||
@@ -325,8 +332,8 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
with context.push(BuiltinConcepts.EVALUATING_ATTRIBUTE,
|
||||
current_prop,
|
||||
desc=desc,
|
||||
obj=current_concept,
|
||||
path=path) as sub_context:
|
||||
obj=current_concept) as sub_context:
|
||||
sub_context.add_inputs(path=path)
|
||||
|
||||
if force_evaluation:
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_BODY_REQUESTED)
|
||||
@@ -352,7 +359,7 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
else:
|
||||
# update short term memory with current concept variables
|
||||
if current_concept:
|
||||
for var in current_concept.metadata.variables:
|
||||
for var in current_concept.get_metadata().variables:
|
||||
value = current_concept.get_value(var[0])
|
||||
if value != NotInit:
|
||||
sub_context.add_to_short_term_memory(var[0], current_concept.get_value(var[0]))
|
||||
@@ -433,23 +440,23 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
:return: value of the evaluation or error
|
||||
"""
|
||||
|
||||
if concept.metadata.is_evaluated:
|
||||
if concept.get_metadata().is_evaluated:
|
||||
return concept
|
||||
|
||||
# I cannot use cache because of concept like 'number'.
|
||||
# They don't have variables, but their values change every time they are instantiated
|
||||
# TODO: Need to find a way to cache despite of them
|
||||
# need_body = eval_body or context.in_context(BuiltinConcepts.EVAL_BODY_REQUESTED)
|
||||
# if need_body and len(concept.metadata.variables) == 0 and context.sheerka.has_id(concept.id):
|
||||
# if need_body and len(concept.get_metadata().variables) == 0 and context.sheerka.has_id(concept.id):
|
||||
# from_cache = context.sheerka.get_by_id(concept.id)
|
||||
# if from_cache.metadata.is_evaluated:
|
||||
# if from_cache.get_metadata().is_evaluated:
|
||||
# concept.set_value(ConceptParts.BODY, from_cache.body)
|
||||
# concept.metadata.is_evaluated = True
|
||||
# concept.get_metadata().is_evaluated = True
|
||||
# return concept
|
||||
|
||||
desc = f"Evaluating concept {concept}"
|
||||
with context.push(BuiltinConcepts.EVALUATING_CONCEPT, concept, desc=desc, eval_body=eval_body) as sub_context:
|
||||
|
||||
with context.push(BuiltinConcepts.EVALUATING_CONCEPT, concept, desc=desc) as sub_context:
|
||||
sub_context.add_inputs(eval_body=eval_body)
|
||||
if eval_body:
|
||||
# ask for body evaluation
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_BODY_REQUESTED)
|
||||
@@ -466,8 +473,8 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
|
||||
for metadata_to_eval in all_metadata_to_eval:
|
||||
if metadata_to_eval == "variables":
|
||||
for var_name in (v for v in concept.variables() if v in concept.compiled):
|
||||
prop_ast = concept.compiled[var_name]
|
||||
for var_name in (v for v in concept.variables() if v in concept.get_compiled()):
|
||||
prop_ast = concept.get_compiled()[var_name]
|
||||
|
||||
w_clause = self.get_where_clause_def(context, concept, var_name)
|
||||
# TODO, manage when the where clause cannot be parsed
|
||||
@@ -485,17 +492,17 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
else:
|
||||
concept.set_value(var_name, resolved)
|
||||
else:
|
||||
part_key = ConceptParts(metadata_to_eval)
|
||||
part_key = metadata_to_eval
|
||||
|
||||
# do not evaluate where when the body is a set
|
||||
# Indeed, the way that the where clause is expressed is not a valid python or concept code
|
||||
if part_key == ConceptParts.WHERE and self.sheerka.isaset(sub_context, concept.body):
|
||||
continue
|
||||
|
||||
if part_key not in concept.compiled or concept.compiled[part_key] is None:
|
||||
if part_key not in concept.get_compiled() or concept.get_compiled()[part_key] is None:
|
||||
continue
|
||||
|
||||
metadata_ast = concept.compiled[part_key]
|
||||
metadata_ast = concept.get_compiled()[part_key]
|
||||
|
||||
# if part_key is PRE, POST or WHERE, the concept need to be evaluated
|
||||
# if we want the predicates to be resolved => so force_eval = True
|
||||
@@ -514,7 +521,7 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
# validate PRE and WHERE condition
|
||||
if part_key in (ConceptParts.PRE, ConceptParts.WHERE) and not self.sheerka.objvalue(resolved):
|
||||
return self.sheerka.new(BuiltinConcepts.CONDITION_FAILED,
|
||||
body=getattr(concept.metadata, metadata_to_eval),
|
||||
body=getattr(concept.get_metadata(), concept_part_value(metadata_to_eval)),
|
||||
concept=concept,
|
||||
prop=part_key)
|
||||
|
||||
@@ -524,19 +531,19 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
|
||||
concept.init_key() # Necessary for old unit tests. To remove someday
|
||||
|
||||
if "body" in all_metadata_to_eval:
|
||||
concept.metadata.is_evaluated = True
|
||||
if ConceptParts.BODY in all_metadata_to_eval:
|
||||
concept.get_metadata().is_evaluated = True
|
||||
|
||||
# # update the cache for concepts with no variables
|
||||
# Cannot use cache. See the comment at the beginning of this method
|
||||
# if len(concept.metadata.variables) == 0:
|
||||
# if len(concept.get_metadata().variables) == 0:
|
||||
# self.sheerka.cache_manager.put(self.sheerka.CONCEPTS_BY_ID_ENTRY, concept.id, concept)
|
||||
|
||||
if not concept.metadata.is_builtin:
|
||||
if not concept.get_metadata().is_builtin:
|
||||
self.sheerka.register_object(sub_context, concept.name, concept)
|
||||
|
||||
# manage RET metadata
|
||||
if sub_context.in_context(BuiltinConcepts.EVAL_BODY_REQUESTED) and ConceptParts.RET in concept.values:
|
||||
if sub_context.in_context(BuiltinConcepts.EVAL_BODY_REQUESTED) and ConceptParts.RET in concept.values():
|
||||
return concept.get_value(ConceptParts.RET)
|
||||
else:
|
||||
return concept
|
||||
@@ -547,7 +554,7 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
needed, variables, body = self.get_needed_metadata(concept, ConceptParts.PRE, True, True)
|
||||
to_eval.extend(needed)
|
||||
|
||||
if context.in_context(BuiltinConcepts.EVAL_WHERE_REQUESTED) or concept.metadata.need_validation:
|
||||
if context.in_context(BuiltinConcepts.EVAL_WHERE_REQUESTED) or concept.get_metadata().need_validation:
|
||||
# What are the cases where we do not need a validation ?
|
||||
# see test_sheerka_non_reg::test_i_can_evaluate_bnf_concept_with_where_clause()
|
||||
# res = sheerka.evaluate_user_input("foobar")
|
||||
@@ -572,7 +579,7 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
to_eval.append('variables')
|
||||
|
||||
if not body:
|
||||
to_eval.append("body")
|
||||
to_eval.append(ConceptParts.BODY)
|
||||
|
||||
return to_eval
|
||||
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.builtin_helpers import expect_one
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from evaluators.ConceptEvaluator import ConceptEvaluator
|
||||
|
||||
DISABLED_RULES = "#disabled#"
|
||||
LOW_PRIORITY_RULES = "#low_priority#"
|
||||
|
||||
|
||||
class SheerkaEvaluateRules(BaseService):
|
||||
NAME = "EvaluateRules"
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.evaluators_by_name = None
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.evaluate_format_rules, False)
|
||||
self.reset_evaluators()
|
||||
|
||||
def reset_evaluators(self):
|
||||
# instantiate evaluators, once for all, only keep when it's enabled
|
||||
evaluators = [e_class() for e_class in self.sheerka.evaluators]
|
||||
evaluators = [e for e in evaluators if e.enabled]
|
||||
self.evaluators_by_name = {e.short_name: e for e in evaluators}
|
||||
|
||||
def evaluate_format_rules(self, context, bag, disabled):
|
||||
return self.evaluate_rules(context, self.sheerka.get_format_rules(), bag, disabled)
|
||||
|
||||
def evaluate_rules(self, context, rules, bag, disabled):
|
||||
"""
|
||||
evaluate the format rules, in the context of 'bag'
|
||||
CAUTION : the rules MUST be sorted by priority
|
||||
:param context:
|
||||
:param rules:
|
||||
:param bag:
|
||||
:param disabled: disabled rules (because they have already been fired or whatever)
|
||||
:return: { True : list of success, False :list of failed, '#disabled"': list of disabled...}
|
||||
"""
|
||||
with context.push(BuiltinConcepts.EVALUATING_RULES, bag, desc="Evaluating rules...") as sub_context:
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_BODY_REQUESTED)
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_WHERE_REQUESTED)
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_UNTIL_SUCCESS_REQUESTED)
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_QUESTION_REQUESTED)
|
||||
sub_context.add_inputs(bag=bag)
|
||||
|
||||
debugger = sub_context.get_debugger(SheerkaEvaluateRules.NAME, "evaluate_rules")
|
||||
debugger.debug_entering(bag=bag)
|
||||
|
||||
results = {}
|
||||
|
||||
sub_context.sheerka.add_many_to_short_term_memory(sub_context, bag)
|
||||
success_priority = None
|
||||
for rule in rules:
|
||||
if not rule.metadata.is_enabled or rule.id in disabled:
|
||||
results.setdefault(DISABLED_RULES, []).append(rule)
|
||||
continue
|
||||
|
||||
if success_priority and rule.priority != success_priority:
|
||||
results.setdefault(LOW_PRIORITY_RULES, []).append(rule)
|
||||
continue
|
||||
|
||||
res = self.evaluate_rule(sub_context, rule, bag)
|
||||
ok = res.status and self.sheerka.is_success(self.sheerka.objvalue(res))
|
||||
results.setdefault(ok, []).append(rule)
|
||||
if ok and success_priority is None:
|
||||
success_priority = rule.priority
|
||||
|
||||
debugger.debug_var("results", self.get_debug_format(results))
|
||||
|
||||
sub_context.add_values(rules_result=results)
|
||||
return results
|
||||
|
||||
def evaluate_rule(self, context, rule, bag):
|
||||
"""
|
||||
Evaluate all the predicate
|
||||
:param context:
|
||||
:param rule:
|
||||
:param bag:
|
||||
:return:
|
||||
"""
|
||||
|
||||
results = []
|
||||
for rule_predicate in rule.compiled_predicate:
|
||||
|
||||
if rule_predicate.source in bag:
|
||||
# simple case where the rule is an item of the bag. No need of complicate evaluation
|
||||
results.append(context.sheerka.ret(self.NAME, True, bag[rule_predicate.source]))
|
||||
|
||||
else:
|
||||
|
||||
# do not forget to reset the 'is_evaluated' in the case of a concept
|
||||
if rule_predicate.evaluator == ConceptEvaluator.NAME:
|
||||
rule_predicate.concept.get_metadata().is_evaluated = False
|
||||
|
||||
evaluator = self.evaluators_by_name[rule_predicate.evaluator]
|
||||
results.append(evaluator.eval(context, rule_predicate.predicate))
|
||||
|
||||
debugger = context.get_debugger(SheerkaEvaluateRules.NAME, "evaluate_rule")
|
||||
debugger.debug_rule(rule, results)
|
||||
# if context.sheerka.debug_rule_activated(rule_id, context.id):
|
||||
# context.debug(SheerkaEvaluateRules.NAME, "evaluate_rules", f"result(#{rule_id})", results)
|
||||
|
||||
return expect_one(context, results)
|
||||
|
||||
@staticmethod
|
||||
def get_debug_format(result):
|
||||
"""
|
||||
Return the same dictionary, the with the short formatting of the rules
|
||||
eg without the action clause
|
||||
:param result:
|
||||
:return:
|
||||
"""
|
||||
return {key: [str(r) if key == True else r.short_str() for r in rules] for key, rules in result.items()}
|
||||
@@ -0,0 +1,60 @@
|
||||
from threading import RLock
|
||||
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
|
||||
|
||||
class SheerkaEventManager(BaseService):
|
||||
"""
|
||||
This class implement a very basic publish and subscribe mechanism
|
||||
It supposes that the subscriber has a little knowledge of how the publisher works
|
||||
"""
|
||||
NAME = "EventManager"
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self._lock = RLock()
|
||||
self.subscribers = {}
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.subscribe, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.publish, True, visible=False)
|
||||
|
||||
def subscribe(self, topic, callback):
|
||||
"""
|
||||
To subscribe to a topic, just give the callback to call
|
||||
Note that the callback must be a function whose first argument is a context
|
||||
:param topic:
|
||||
:param callback:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
self.subscribers.setdefault(topic, []).append(callback)
|
||||
|
||||
def publish(self, context, topic, data=None):
|
||||
"""
|
||||
Publish on a topic
|
||||
The data is not mandatory
|
||||
:param context:
|
||||
:param topic:
|
||||
:param data:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
try:
|
||||
subscribers = self.subscribers[topic]
|
||||
if data:
|
||||
for callback in subscribers:
|
||||
callback(context, data)
|
||||
else:
|
||||
for callback in subscribers:
|
||||
callback(context)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def reset_topic(self, topic):
|
||||
"""
|
||||
Remove all subsccribers from a given topic
|
||||
:param topic:
|
||||
:return:
|
||||
"""
|
||||
self.subscribers[topic].clear()
|
||||
@@ -5,6 +5,16 @@ from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.tokenizer import Tokenizer, TokenKind, Token
|
||||
|
||||
NO_MATCH = "** No Match **"
|
||||
EVALUATOR_STEPS = [
|
||||
BuiltinConcepts.BEFORE_PARSING,
|
||||
BuiltinConcepts.AFTER_PARSING,
|
||||
BuiltinConcepts.BEFORE_EVALUATION,
|
||||
BuiltinConcepts.EVALUATION,
|
||||
BuiltinConcepts.AFTER_EVALUATION,
|
||||
BuiltinConcepts.BEFORE_RENDERING,
|
||||
BuiltinConcepts.RENDERING,
|
||||
BuiltinConcepts.AFTER_RENDERING,
|
||||
]
|
||||
|
||||
|
||||
class ParserInput:
|
||||
@@ -74,10 +84,10 @@ class ParserInput:
|
||||
if self.start == 0 and self.end == self.length:
|
||||
self.sub_text = self.text
|
||||
return self.sub_text
|
||||
self.sub_text = self.get_text_from_tokens(self.tokens[self.start:self.end])
|
||||
self.sub_text = core.utils.get_text_from_tokens(self.tokens[self.start:self.end])
|
||||
return self.sub_text
|
||||
else:
|
||||
return self.get_text_from_tokens(self.as_tokens(), custom_switcher, tracker)
|
||||
return core.utils.get_text_from_tokens(self.as_tokens(), custom_switcher, tracker)
|
||||
|
||||
def as_tokens(self):
|
||||
if self.sub_tokens:
|
||||
@@ -145,36 +155,6 @@ class ParserInput:
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_text_from_tokens(tokens, custom_switcher=None, tracker=None):
|
||||
"""
|
||||
Create the source code, from the list of token
|
||||
:param tokens: list of tokens
|
||||
:param custom_switcher: to override the behaviour (the return value) of some token
|
||||
:param tracker: keep track of the original token value when custom switched
|
||||
:return:
|
||||
"""
|
||||
if tokens is None:
|
||||
return ""
|
||||
res = ""
|
||||
|
||||
if not hasattr(tokens, "__iter__"):
|
||||
tokens = [tokens]
|
||||
|
||||
switcher = {
|
||||
TokenKind.CONCEPT: lambda t: core.utils.str_concept(t.value),
|
||||
}
|
||||
|
||||
if custom_switcher:
|
||||
switcher.update(custom_switcher)
|
||||
|
||||
for token in tokens:
|
||||
value = switcher.get(token.type, lambda t: t.value)(token)
|
||||
res += value
|
||||
if tracker is not None and token.type in custom_switcher:
|
||||
tracker[value] = token.value
|
||||
return res
|
||||
|
||||
|
||||
class SheerkaExecute(BaseService):
|
||||
"""
|
||||
@@ -187,18 +167,156 @@ class SheerkaExecute(BaseService):
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.pi_cache = Cache(default=lambda key: ParserInput(key), max_size=20)
|
||||
self.instantiated_evaluators = None
|
||||
self.evaluators_by_name = None
|
||||
self.grouped_evaluators_cache = {} # key=step, value=tuple(evaluators for this step, sorted priorities)
|
||||
self.old_values = []
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.execute, True)
|
||||
|
||||
self.sheerka.cache_manager.register_cache(self.PARSERS_INPUTS_ENTRY, self.pi_cache, False)
|
||||
self.reset_evaluators()
|
||||
|
||||
def reset_evaluators(self):
|
||||
# instantiate evaluators, once for all, only keep when it's enabled
|
||||
self.instantiated_evaluators = [e_class() for e_class in self.sheerka.evaluators]
|
||||
self.instantiated_evaluators = [e for e in self.instantiated_evaluators if e.enabled]
|
||||
self.evaluators_by_name = {e.short_name: e for e in self.instantiated_evaluators}
|
||||
|
||||
# get default evaluators by process step
|
||||
for process_step in EVALUATOR_STEPS:
|
||||
self.grouped_evaluators_cache[f"{process_step}|__default"] = self.get_grouped_evaluators(
|
||||
[e for e in self.instantiated_evaluators if process_step in e.steps])
|
||||
|
||||
# @staticmethod
|
||||
# def get_grouped_evaluators(instantiated_evaluators, process_step):
|
||||
# """
|
||||
# For a given list of evaluators and a given process step
|
||||
# Computes
|
||||
# * the evaluators eligible for this step
|
||||
# * the list of sorted priorities for theses evaluators
|
||||
# :param instantiated_evaluators:
|
||||
# :param process_step:
|
||||
# :return:
|
||||
# """
|
||||
# grouped = {}
|
||||
# for evaluator in [e for e in instantiated_evaluators if e.enabled and process_step in e.steps]:
|
||||
# grouped.setdefault(evaluator.priority, []).append(evaluator)
|
||||
#
|
||||
# sorted_groups = sorted(grouped.keys(), reverse=True)
|
||||
# return grouped, sorted_groups
|
||||
|
||||
@staticmethod
|
||||
def get_grouped_evaluators(evaluators):
|
||||
"""
|
||||
For a given list of evaluators,
|
||||
group them by priorities
|
||||
sort the priorities
|
||||
:param evaluators:
|
||||
:return: tuple({priority: List of evaluators with this priority}, list of sorted priorities)
|
||||
"""
|
||||
grouped = {}
|
||||
for evaluator in evaluators:
|
||||
grouped.setdefault(evaluator.priority, []).append(evaluator)
|
||||
|
||||
sorted_groups = sorted(grouped.keys(), reverse=True)
|
||||
return grouped, sorted_groups
|
||||
|
||||
def preprocess(self, items, preprocess_definitions):
|
||||
for preprocess in preprocess_definitions:
|
||||
for item in items:
|
||||
if self.matches(item.name, preprocess.get_value("preprocess_name")):
|
||||
for var_name, value in preprocess.values().items():
|
||||
if var_name == "preprocess_name":
|
||||
continue
|
||||
if hasattr(item, var_name):
|
||||
self.old_values.append((item, var_name, getattr(item, var_name)))
|
||||
setattr(item, var_name, value)
|
||||
|
||||
def preprocess_old(self, context, parsers_or_evaluators, mode):
|
||||
if mode == "parsers":
|
||||
if not context.preprocess and not context.preprocess_parsers:
|
||||
return parsers_or_evaluators
|
||||
items = context.preprocess_parsers
|
||||
elif mode == "evaluators":
|
||||
if not context.preprocess and not context.preprocess_evaluators:
|
||||
return parsers_or_evaluators
|
||||
items = context.preprocess_evaluators
|
||||
else:
|
||||
raise ValueError(mode)
|
||||
|
||||
if not hasattr(parsers_or_evaluators, "__iter__"):
|
||||
single_one = True
|
||||
parsers_or_evaluators = [parsers_or_evaluators]
|
||||
else:
|
||||
single_one = False
|
||||
|
||||
if items:
|
||||
res = []
|
||||
for item in items:
|
||||
for e in parsers_or_evaluators:
|
||||
if item == e.name:
|
||||
res.append(e)
|
||||
break
|
||||
else:
|
||||
raise ValueError(f"{item} not found.")
|
||||
parsers_or_evaluators = res
|
||||
|
||||
if context.preprocess:
|
||||
for preprocess in context.preprocess:
|
||||
for e in parsers_or_evaluators:
|
||||
if self.matches(e.name, preprocess.get_value("name")):
|
||||
for var_name in preprocess.values:
|
||||
if var_name == "name":
|
||||
continue
|
||||
if hasattr(e, var_name):
|
||||
self.old_values.append((e, var_name, getattr(e, var_name)))
|
||||
setattr(e, var_name, preprocess.get_value(var_name))
|
||||
|
||||
return parsers_or_evaluators[0] if single_one else parsers_or_evaluators
|
||||
|
||||
def get_evaluators(self, context, process_step):
|
||||
"""
|
||||
Returns the list of evaluators to use for a specific test
|
||||
:param context:
|
||||
:param process_step:
|
||||
:return:
|
||||
"""
|
||||
# Normal case, the evaluators are the default one
|
||||
if not context.preprocess_evaluators and not context.preprocess:
|
||||
return self.grouped_evaluators_cache[f"{process_step}|__default"]
|
||||
|
||||
# First case, only use a subset of evaluators
|
||||
if context.preprocess_evaluators and not context.preprocess:
|
||||
key = str(process_step) + "|" + "|".join(context.preprocess_evaluators)
|
||||
try:
|
||||
return self.grouped_evaluators_cache[key]
|
||||
except KeyError:
|
||||
evaluators = [self.evaluators_by_name[e] for e in context.preprocess_evaluators]
|
||||
grouped = self.get_grouped_evaluators(evaluators)
|
||||
self.grouped_evaluators_cache[key] = grouped
|
||||
return grouped
|
||||
|
||||
# final case, evaluators attributes are modified by the context
|
||||
# So first, get the modified evaluators
|
||||
evaluators = [self.evaluators_by_name[e] for e in
|
||||
context.preprocess_evaluators] if context.preprocess_evaluators else self.instantiated_evaluators
|
||||
self.preprocess(evaluators, context.preprocess)
|
||||
evaluators = [e for e in evaluators if e.enabled] # make sure they are still enabled
|
||||
key = str(process_step) + "|" + "|".join([e.name for e in evaluators if e.enabled])
|
||||
try:
|
||||
return self.grouped_evaluators_cache[key]
|
||||
except KeyError:
|
||||
grouped = self.get_grouped_evaluators(evaluators)
|
||||
self.grouped_evaluators_cache[key] = grouped
|
||||
return grouped
|
||||
|
||||
def get_parser_input(self, text, tokens=None):
|
||||
"""
|
||||
Returns new or existing parser input
|
||||
:param text:
|
||||
:param tokens:
|
||||
:param length:
|
||||
:return:
|
||||
"""
|
||||
|
||||
@@ -212,7 +330,7 @@ class SheerkaExecute(BaseService):
|
||||
self.pi_cache.put(text, pi)
|
||||
return pi
|
||||
|
||||
key = text or ParserInput.get_text_from_tokens(tokens)
|
||||
key = text or core.utils.get_text_from_tokens(tokens)
|
||||
pi = ParserInput(key, tokens)
|
||||
self.pi_cache.put(key, pi)
|
||||
return pi
|
||||
@@ -251,7 +369,7 @@ class SheerkaExecute(BaseService):
|
||||
|
||||
# group the parsers by priorities
|
||||
instantiated_parsers = [parser(sheerka=self.sheerka) for parser in self.sheerka.parsers.values()]
|
||||
instantiated_parsers = self.preprocess(context, instantiated_parsers)
|
||||
instantiated_parsers = self.preprocess_old(context, instantiated_parsers, "parsers")
|
||||
|
||||
grouped_parsers = {}
|
||||
for parser in [p for p in instantiated_parsers if p.enabled]:
|
||||
@@ -272,13 +390,12 @@ class SheerkaExecute(BaseService):
|
||||
|
||||
# if self.sheerka.log.isEnabledFor(logging.DEBUG):
|
||||
# debug_text = "'" + to_parse + "'" if isinstance(to_parse, str) \
|
||||
# else "'" + BaseParser.get_text_from_tokens(to_parse) + "' as tokens"
|
||||
# else "'" + core.utils.get_text_from_tokens(to_parse) + "' as tokens"
|
||||
# context.log(f"Parsing {debug_text}")
|
||||
|
||||
with context.push(BuiltinConcepts.PARSING,
|
||||
{"parser": parser.name},
|
||||
desc=f"Parsing using {parser.name}",
|
||||
logger=parser.verbose_log) as sub_context:
|
||||
desc=f"Parsing using {parser.name}") as sub_context:
|
||||
sub_context.add_inputs(to_parse=to_parse)
|
||||
res = parser.parse(sub_context, to_parse)
|
||||
if res is not None:
|
||||
@@ -318,27 +435,13 @@ class SheerkaExecute(BaseService):
|
||||
if not isinstance(return_values, list):
|
||||
return_values = [return_values]
|
||||
|
||||
# group the evaluators by priority and sort them
|
||||
# The first one to be applied will be the one with the highest priority
|
||||
grouped_evaluators = {}
|
||||
instantiated_evaluators = [e_class() for e_class in self.sheerka.evaluators]
|
||||
grouped_evaluators, sorted_priorities = self.get_evaluators(context, process_step)
|
||||
|
||||
# pre-process evaluators if needed
|
||||
instantiated_evaluators = self.preprocess(context, instantiated_evaluators)
|
||||
|
||||
for evaluator in [e for e in instantiated_evaluators if e.enabled and process_step in e.steps]:
|
||||
grouped_evaluators.setdefault(evaluator.priority, []).append(evaluator)
|
||||
|
||||
# order the groups by priority, the higher first
|
||||
sorted_priorities = sorted(grouped_evaluators.keys(), reverse=True)
|
||||
|
||||
# process
|
||||
iteration = 0
|
||||
while True:
|
||||
with context.push(process_step,
|
||||
{"iteration": iteration},
|
||||
desc=f"iteration #{iteration}",
|
||||
iteration=iteration) as iteration_context:
|
||||
{"step": process_step, "iteration": iteration},
|
||||
desc=f"iteration #{iteration}") as iteration_context:
|
||||
simple_digest = return_values[:]
|
||||
iteration_context.add_inputs(return_values=simple_digest)
|
||||
|
||||
@@ -348,13 +451,14 @@ class SheerkaExecute(BaseService):
|
||||
evaluated_items = []
|
||||
to_delete = []
|
||||
for evaluator in grouped_evaluators[priority]:
|
||||
evaluator = self.preprocess(context, evaluator.__class__()) # fresh copy
|
||||
evaluator.reset()
|
||||
|
||||
sub_context_desc = f"Evaluating using {evaluator.name} ({priority=})"
|
||||
with iteration_context.push(process_step,
|
||||
{"iteration": iteration, "evaluator": evaluator.name},
|
||||
desc=sub_context_desc,
|
||||
logger=evaluator.verbose_log) as sub_context:
|
||||
{"step": process_step,
|
||||
"iteration": iteration,
|
||||
"evaluator": evaluator.name},
|
||||
desc=sub_context_desc) as sub_context:
|
||||
sub_context.add_inputs(return_values=original_items)
|
||||
|
||||
# process evaluators that work on one simple return value at the time
|
||||
@@ -365,6 +469,8 @@ class SheerkaExecute(BaseService):
|
||||
if evaluator.matches(sub_context, item):
|
||||
|
||||
# init the evaluator is possible
|
||||
# KSI. 20201102 : Evaluators are now instantiated at startup,
|
||||
# Can we move this section into reset_evaluators()
|
||||
if hasattr(evaluator, "init_evaluator") and not evaluator.is_initialized:
|
||||
evaluator.init_evaluator(sub_context, original_items)
|
||||
|
||||
@@ -401,6 +507,7 @@ class SheerkaExecute(BaseService):
|
||||
# process evaluators that work on all return values
|
||||
else:
|
||||
if evaluator.matches(sub_context, original_items):
|
||||
|
||||
results = evaluator.eval(sub_context, original_items)
|
||||
if results is None:
|
||||
continue
|
||||
@@ -427,6 +534,8 @@ class SheerkaExecute(BaseService):
|
||||
# inc the iteration and continue
|
||||
iteration += 1
|
||||
|
||||
self.undo_preprocess()
|
||||
|
||||
return return_values
|
||||
|
||||
def execute(self, context, return_values, execution_steps):
|
||||
@@ -441,40 +550,30 @@ class SheerkaExecute(BaseService):
|
||||
for step in execution_steps:
|
||||
copy = return_values[:] if hasattr(return_values, "__iter__") else [return_values]
|
||||
with context.push(BuiltinConcepts.PROCESSING,
|
||||
{"step": step},
|
||||
step=step, iteration=0, desc=f"{step=}") as sub_context:
|
||||
{"step": step, "iteration": 0},
|
||||
desc=f"{step=}") as sub_context:
|
||||
|
||||
sub_context.add_inputs(return_values=copy)
|
||||
|
||||
if step == BuiltinConcepts.PARSING:
|
||||
return_values = self.call_parsers(sub_context, return_values)
|
||||
else:
|
||||
return_values = self.call_evaluators(sub_context, return_values, step)
|
||||
|
||||
if copy != return_values:
|
||||
has_changed = copy != return_values
|
||||
if has_changed:
|
||||
sub_context.log_result(return_values)
|
||||
|
||||
sub_context.add_values(return_values=return_values)
|
||||
sub_context.add_values(has_changed=has_changed)
|
||||
|
||||
return return_values
|
||||
|
||||
def preprocess(self, context, parsers_or_evaluators):
|
||||
if not context.preprocess:
|
||||
return parsers_or_evaluators
|
||||
def undo_preprocess(self):
|
||||
for item, var_name, value in self.old_values:
|
||||
setattr(item, var_name, value)
|
||||
|
||||
if not hasattr(parsers_or_evaluators, "__iter__"):
|
||||
single_one = True
|
||||
parsers_or_evaluators = [parsers_or_evaluators]
|
||||
else:
|
||||
single_one = False
|
||||
|
||||
for preprocess in context.preprocess:
|
||||
for e in parsers_or_evaluators:
|
||||
if self.matches(e.name, preprocess.get_value("name")):
|
||||
for var_name in preprocess.values:
|
||||
if var_name == "name":
|
||||
continue
|
||||
if hasattr(e, var_name):
|
||||
setattr(e, var_name, preprocess.get_value(var_name))
|
||||
return parsers_or_evaluators[0] if single_one else parsers_or_evaluators
|
||||
self.old_values.clear()
|
||||
|
||||
@staticmethod
|
||||
def matches(parser_or_evaluator_name, preprocessor_name):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import ensure_concept
|
||||
from core.builtin_helpers import ensure_concept
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
|
||||
|
||||
@@ -25,8 +25,8 @@ class SheerkaHasAManager(BaseService):
|
||||
context.log(f"Setting concept {concept_a} has a {concept_b}", who=self.NAME)
|
||||
ensure_concept(concept_a, concept_b)
|
||||
|
||||
if (BuiltinConcepts.HASA in concept_a.metadata.props and
|
||||
concept_b in concept_a.metadata.props[BuiltinConcepts.HASA]):
|
||||
if (BuiltinConcepts.HASA in concept_a.get_metadata().props and
|
||||
concept_b in concept_a.get_metadata().props[BuiltinConcepts.HASA]):
|
||||
return self.sheerka.ret(
|
||||
self.NAME,
|
||||
False,
|
||||
@@ -49,5 +49,5 @@ class SheerkaHasAManager(BaseService):
|
||||
"""
|
||||
|
||||
ensure_concept(concept_a, concept_b)
|
||||
return (BuiltinConcepts.HASA in concept_a.metadata.props and
|
||||
concept_b in concept_a.metadata.props[BuiltinConcepts.HASA])
|
||||
return (BuiltinConcepts.HASA in concept_a.get_metadata().props and
|
||||
concept_b in concept_a.get_metadata().props[BuiltinConcepts.HASA])
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from cache.FastCache import FastCache
|
||||
from cache.ListIfNeededCache import ListIfNeededCache
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import CONTEXT_DISPOSED
|
||||
from core.sheerka.services.sheerka_service import BaseService, ServiceObj
|
||||
|
||||
|
||||
@@ -13,48 +15,72 @@ class MemoryObject(ServiceObj):
|
||||
|
||||
class SheerkaMemory(BaseService):
|
||||
NAME = "Memory"
|
||||
GLOBAL = "global"
|
||||
|
||||
SHORT_TERM_OBJECTS_ENTRY = "Memory:ShortTermMemoryObjects"
|
||||
OBJECTS_ENTRY = "Memory:Objects"
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.short_term_objects = ListIfNeededCache()
|
||||
self.short_term_objects = FastCache()
|
||||
self.objects = ListIfNeededCache(default=lambda k: self.sheerka.sdp.get(self.OBJECTS_ENTRY, k))
|
||||
self.registration = {}
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.get_from_short_term_memory, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.add_to_short_term_memory, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.remove_context, True, as_name="clear_short_term_memory", visible=False)
|
||||
self.sheerka.bind_service_method(self.add_to_memory, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.add_many_to_short_term_memory, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.get_from_memory, False)
|
||||
self.sheerka.bind_service_method(self.register_object, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.unregister_object, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.add_registered_objects, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.memory, False)
|
||||
|
||||
self.sheerka.cache_manager.register_cache(self.SHORT_TERM_OBJECTS_ENTRY, self.short_term_objects, persist=False)
|
||||
self.sheerka.cache_manager.register_cache(self.OBJECTS_ENTRY, self.objects, persist=True, use_ref=True)
|
||||
|
||||
def reset(self):
|
||||
self.short_term_objects.clear()
|
||||
|
||||
def initialize_deferred(self, context, is_first_time):
|
||||
self.sheerka.subscribe(CONTEXT_DISPOSED, self.remove_context)
|
||||
|
||||
def get_from_short_term_memory(self, context, key):
|
||||
while True:
|
||||
key_to_use = (str(context.id) if context else "") + ":" + key
|
||||
if (obj := self.sheerka.cache_manager.get(self.SHORT_TERM_OBJECTS_ENTRY, key_to_use)) is not None:
|
||||
return obj
|
||||
try:
|
||||
id_to_use = context.id if context else self.GLOBAL
|
||||
return self.short_term_objects.cache[id_to_use][key]
|
||||
except KeyError:
|
||||
if context is None:
|
||||
return None
|
||||
|
||||
if context is None:
|
||||
return None
|
||||
context = context.get_parent()
|
||||
|
||||
context = context.get_parent()
|
||||
def get_all_short_term_memory(self, context):
|
||||
return self.short_term_objects.get(context.id)
|
||||
|
||||
def add_to_short_term_memory(self, context, key, concept):
|
||||
def add_to_short_term_memory(self, context, key, value):
|
||||
if context:
|
||||
context.stm = True
|
||||
key_to_use = (str(context.id) if context else "") + ":" + key
|
||||
return self.sheerka.cache_manager.put(self.SHORT_TERM_OBJECTS_ENTRY, key_to_use, concept)
|
||||
id_to_use = context.id
|
||||
else:
|
||||
id_to_use = SheerkaMemory.GLOBAL
|
||||
|
||||
if id_to_use in self.short_term_objects.cache:
|
||||
self.short_term_objects.cache[id_to_use][key] = value
|
||||
else:
|
||||
self.short_term_objects.put(id_to_use, {key: value})
|
||||
|
||||
def add_many_to_short_term_memory(self, context, bag):
|
||||
context.stm = True
|
||||
self.short_term_objects.put(context.id if context else self.GLOBAL, bag)
|
||||
|
||||
def remove_context(self, context):
|
||||
self.short_term_objects.evict_by_key(lambda k: k.startswith(str(context.id) + ":"))
|
||||
try:
|
||||
del self.short_term_objects.cache[context.id]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def add_to_memory(self, context, key, concept):
|
||||
"""
|
||||
@@ -74,6 +100,11 @@ class SheerkaMemory(BaseService):
|
||||
def register_object(self, context, key, concept):
|
||||
"""
|
||||
Before adding objects to memory, they first need to be registered
|
||||
More:
|
||||
We don't want to add all evaluated concept into memory
|
||||
(because some of them may be ref to concept already in memory)
|
||||
So we first register them, and add the end of sheerka.evaluate_user_input()
|
||||
all remaining registered concepts will be added to memory
|
||||
:param context:
|
||||
:param key:
|
||||
:param concept:
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import ensure_concept
|
||||
from core.builtin_helpers import ensure_concept
|
||||
from core.concept import NotInit, freeze_concept_attrs, Concept
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from parsers.BnfParser import BnfParser
|
||||
from parsers.BnfDefinitionParser import BnfDefinitionParser
|
||||
|
||||
|
||||
class SheerkaModifyConcept(BaseService):
|
||||
@@ -47,6 +48,9 @@ class SheerkaModifyConcept(BaseService):
|
||||
BuiltinConcepts.CONCEPT_ALREADY_DEFINED,
|
||||
body=concept))
|
||||
|
||||
# update attributes
|
||||
freeze_concept_attrs(concept)
|
||||
|
||||
self.sheerka.cache_manager.update_concept(old_version, concept)
|
||||
|
||||
# TODO : update concept by first keyword : have a look at update_references() below
|
||||
@@ -72,8 +76,8 @@ class SheerkaModifyConcept(BaseService):
|
||||
for concept_id in refs:
|
||||
concept = self.sheerka.get_by_id(concept_id)
|
||||
|
||||
if concept.bnf is not None:
|
||||
BnfParser.update_recurse_id(context, concept_id, concept.bnf)
|
||||
if concept.get_bnf() is not None:
|
||||
BnfDefinitionParser.update_recurse_id(context, concept_id, concept.get_bnf())
|
||||
|
||||
# remove the grammar entry so that it can be recreated
|
||||
self.sheerka.cache_manager.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_id)
|
||||
@@ -88,7 +92,9 @@ class SheerkaModifyConcept(BaseService):
|
||||
:return:
|
||||
"""
|
||||
ensure_concept(concept)
|
||||
concept.set_value(attribute, value)
|
||||
|
||||
attr = attribute.str_id if isinstance(attribute, Concept) else attribute
|
||||
concept.set_value(attr, value)
|
||||
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def get_attr(self, concept, attribute):
|
||||
@@ -103,6 +109,8 @@ class SheerkaModifyConcept(BaseService):
|
||||
if not self.sheerka.is_success(concept):
|
||||
return concept
|
||||
|
||||
if (value := concept.get_value(attribute)) == BuiltinConcepts.NOT_INITIALIZED:
|
||||
attr = attribute.str_id if isinstance(attribute, Concept) else attribute
|
||||
|
||||
if (value := concept.get_value(attr)) == NotInit:
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"#concept": concept, "#attr": attribute})
|
||||
return value
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.utils import as_bag
|
||||
from out.ConsoleVisistor import ConsoleVisitor
|
||||
from out.DeveloperVisitor import DeveloperVisitor
|
||||
|
||||
|
||||
class SheerkaOut(BaseService):
|
||||
NAME = "Out"
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.out_visitors = [ConsoleVisitor()]
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.process_return_values, False)
|
||||
|
||||
def create_out_tree(self, context, obj):
|
||||
return self.create_out_tree_recursive(context, {'__obj': obj}, DeveloperVisitor(self, set(), 0))
|
||||
|
||||
def create_out_tree_recursive(self, context, bag, visitor):
|
||||
debugger = context.get_debugger(SheerkaOut.NAME, "create_out_tree")
|
||||
debugger.debug_entering(bag=bag)
|
||||
|
||||
current_obj = bag["__obj"]
|
||||
bag = self.update_bag(bag, visitor.list_recursion_depth)
|
||||
|
||||
valid_rules = self.sheerka.evaluate_format_rules(context, bag, visitor.already_seen).get(True, None)
|
||||
res = None
|
||||
if valid_rules:
|
||||
if len(valid_rules) > 1:
|
||||
# TODO manage when too many rules
|
||||
pass
|
||||
|
||||
rule = valid_rules[0]
|
||||
if rule.id in visitor.already_seen:
|
||||
debugger.debug_log(f"Rule #{rule.id} already fired.")
|
||||
else:
|
||||
debugger.debug_log(f"Applying rule {rule}.")
|
||||
visitor.already_seen.add(rule.id)
|
||||
|
||||
bag.update(as_bag(current_obj)) # update with the current obj attributes
|
||||
visitor.visit(context, rule.compiled_action, bag)
|
||||
res = visitor.get_result()
|
||||
|
||||
if res is None:
|
||||
debugger.debug_log(f"No matching rule.")
|
||||
res = current_obj
|
||||
|
||||
debugger.debug_var("out_tree", res)
|
||||
|
||||
return res
|
||||
|
||||
def process_return_values(self, context, ret):
|
||||
with context.push(BuiltinConcepts.BEFORE_RENDERING,
|
||||
None,
|
||||
desc=f"step='{BuiltinConcepts.BEFORE_RENDERING}'") as sub_context:
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_BODY_REQUESTED)
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_WHERE_REQUESTED)
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_UNTIL_SUCCESS_REQUESTED)
|
||||
sub_context.protected_hints.add(BuiltinConcepts.EVAL_QUESTION_REQUESTED)
|
||||
# sub_context.deactivate_push()
|
||||
|
||||
out_tree = self.create_out_tree(sub_context, ret)
|
||||
|
||||
# sub_context.activate_push()
|
||||
|
||||
if out_tree:
|
||||
for visitor in self.out_visitors:
|
||||
visitor.visit(context, out_tree, None)
|
||||
if hasattr(visitor, "finalize"):
|
||||
visitor.finalize()
|
||||
|
||||
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
return self.sheerka.ret(self.NAME, False, self.sheerka.new(BuiltinConcepts.NO_RESULT))
|
||||
|
||||
def update_bag(self, bag, depth):
|
||||
obj = bag["__obj"]
|
||||
bag["__tab"] = " " * depth
|
||||
|
||||
if self.sheerka.isinstance(obj, BuiltinConcepts.RETURN_VALUE):
|
||||
bag["__ret"] = obj
|
||||
if self.sheerka.is_container(obj.body):
|
||||
bag["__ret_container"] = obj.body
|
||||
bag["__ret_value"] = self.simplify_list(obj.body.body)
|
||||
bag["__ret_val"] = bag["__ret_value"]
|
||||
else:
|
||||
bag["__ret_value"] = self.simplify_list(obj.body)
|
||||
bag["__ret_val"] = bag["__ret_value"]
|
||||
elif isinstance(obj, list) and len(obj) > 0 and self.sheerka.isinstance(obj[0], BuiltinConcepts.RETURN_VALUE):
|
||||
bag["__rets"] = obj
|
||||
|
||||
return bag
|
||||
|
||||
@staticmethod
|
||||
def simplify_list(item):
|
||||
try:
|
||||
return item[0] if hasattr(item, "__len__") and len(item) == 1 else item
|
||||
except KeyError:
|
||||
return item # Caution. it's a dict, not a list !
|
||||
@@ -1,5 +1,8 @@
|
||||
import ast
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.utils import as_bag
|
||||
|
||||
|
||||
class SheerkaResultConcept(BaseService):
|
||||
@@ -14,10 +17,31 @@ class SheerkaResultConcept(BaseService):
|
||||
self.sheerka.bind_service_method(self.get_results_by_command, True) # digest is recorded
|
||||
self.sheerka.bind_service_method(self.get_last_results, True) # digest is recorded
|
||||
self.sheerka.bind_service_method(self.get_results, False)
|
||||
self.sheerka.bind_service_method(self.get_execution_item, False)
|
||||
|
||||
def get_results_by_digest(self, context, digest, record_digest=True):
|
||||
@staticmethod
|
||||
def get_predicate(**kwargs):
|
||||
if len(kwargs) == 0:
|
||||
return None
|
||||
res = []
|
||||
if "filter" in kwargs:
|
||||
res.append(kwargs["filter"])
|
||||
kwargs.pop("filter")
|
||||
|
||||
for k, v in kwargs.items():
|
||||
if k in ("depth", "recursion_depth"):
|
||||
continue
|
||||
|
||||
if isinstance(v, str):
|
||||
v = '"' + v.translate(str.maketrans({'"': r'\"'})) + '"'
|
||||
res.append(f"{k} == {v}")
|
||||
predicate = " and ".join(res)
|
||||
return compile(ast.parse(predicate, mode="eval"), "<SheerkaResultManager.get_predicate>", mode="eval")
|
||||
|
||||
def get_results_by_digest(self, context, digest, filter=None, record_digest=True, **kwargs):
|
||||
"""
|
||||
Gets the entire execution tree for the given event digest
|
||||
:param filter:
|
||||
:param context:
|
||||
:param digest:
|
||||
:param record_digest:
|
||||
@@ -26,27 +50,39 @@ class SheerkaResultConcept(BaseService):
|
||||
if digest is None:
|
||||
return None
|
||||
|
||||
if filter is not None:
|
||||
kwargs["filter"] = filter
|
||||
|
||||
try:
|
||||
result = self.sheerka.sdp.load_result(digest)
|
||||
event = self.sheerka.sdp.load_event(digest)
|
||||
|
||||
if record_digest:
|
||||
context.log(f"Recording digest '{digest}'")
|
||||
self.sheerka.record(context, self.NAME, "digest", digest)
|
||||
self.sheerka.record_var(context, self.NAME, "digest", digest)
|
||||
|
||||
explanation = self.sheerka.new(BuiltinConcepts.EXPLANATION,
|
||||
digest=event.get_digest(),
|
||||
command=event.message,
|
||||
body=self.as_list(result, self.get_predicate(**kwargs)))
|
||||
|
||||
# add format instructions if applicable
|
||||
if (depth := kwargs.get("depth", None)) is not None or \
|
||||
(depth := kwargs.get("recursion_depth", None)) is not None:
|
||||
explanation.set_format_instr(recursion_depth=depth, recurse_on="_children")
|
||||
|
||||
return explanation
|
||||
|
||||
return self.sheerka.new(BuiltinConcepts.EXPLANATION,
|
||||
digest=event.get_digest(),
|
||||
command=event.message,
|
||||
body=self.as_list(result))
|
||||
except FileNotFoundError as ex:
|
||||
context.log_error(f"Digest {digest} is not found.", self.NAME, ex)
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"digest": digest})
|
||||
|
||||
def get_results_by_command(self, context, command, record_digest=True):
|
||||
def get_results_by_command(self, context, command, filter=None, record_digest=True, **kwargs):
|
||||
"""
|
||||
Get the result of the command that starts with command
|
||||
:param context:
|
||||
:param command:
|
||||
:param filter:
|
||||
:param record_digest:
|
||||
:return:
|
||||
"""
|
||||
@@ -59,7 +95,7 @@ class SheerkaResultConcept(BaseService):
|
||||
for event in self.sheerka.sdp.load_events(self.page_size, start):
|
||||
consumed += 1
|
||||
if event.message.startswith(command):
|
||||
return self.get_results_by_digest(context, event.get_digest(), record_digest)
|
||||
return self.get_results_by_digest(context, event.get_digest(), filter, record_digest, **kwargs)
|
||||
|
||||
if consumed < self.page_size:
|
||||
break
|
||||
@@ -69,10 +105,11 @@ class SheerkaResultConcept(BaseService):
|
||||
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"command": command})
|
||||
|
||||
def get_last_results(self, context, record_digest=True):
|
||||
def get_last_results(self, context, filter=None, record_digest=True, **kwargs):
|
||||
"""
|
||||
Gets the results of the last command
|
||||
:param context:
|
||||
:param filter:
|
||||
:param record_digest:
|
||||
:return:
|
||||
"""
|
||||
@@ -84,7 +121,7 @@ class SheerkaResultConcept(BaseService):
|
||||
for event in self.sheerka.sdp.load_events(page_size, start):
|
||||
consumed += 1
|
||||
if self.sheerka.sdp.has_result(event.get_digest()):
|
||||
return self.get_results_by_digest(context, event.get_digest(), record_digest)
|
||||
return self.get_results_by_digest(context, event.get_digest(), filter, record_digest, **kwargs)
|
||||
|
||||
if consumed < page_size:
|
||||
break
|
||||
@@ -97,27 +134,45 @@ class SheerkaResultConcept(BaseService):
|
||||
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"query": "last"})
|
||||
|
||||
def get_results(self, context):
|
||||
def get_results(self, context, filter=None, **kwargs):
|
||||
"""
|
||||
Use the last digest saved to get the execution results
|
||||
:param context:
|
||||
:param filter:
|
||||
:return:
|
||||
"""
|
||||
|
||||
digest = self.sheerka.load(self.NAME, "digest")
|
||||
digest = self.sheerka.load_var(self.NAME, "digest")
|
||||
if digest is None:
|
||||
context.log("No recorded digest found.")
|
||||
return None
|
||||
|
||||
return self.get_results_by_digest(context, digest, False)
|
||||
return self.get_results_by_digest(context, digest, filter, False, **kwargs)
|
||||
|
||||
def get_execution_item(self, context, item_id):
|
||||
digest = self.sheerka.load_var(self.NAME, "digest")
|
||||
if digest is None:
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body="no digest")
|
||||
|
||||
try:
|
||||
result = self.sheerka.sdp.load_result(digest)
|
||||
items = list(self.as_list(result, self.get_predicate(id=item_id)))
|
||||
|
||||
if len(items) == 0:
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"id": item_id})
|
||||
|
||||
return items[0]
|
||||
|
||||
except FileNotFoundError as ex:
|
||||
context.log_error(f"Digest {digest} is not found.", self.NAME, ex)
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"digest": digest})
|
||||
|
||||
@staticmethod
|
||||
def as_list(execution_context):
|
||||
|
||||
def as_list(execution_context, predicate):
|
||||
def _yield_result(lst):
|
||||
|
||||
for e in lst:
|
||||
yield e
|
||||
if predicate is None or eval(predicate, as_bag(e)):
|
||||
yield e
|
||||
|
||||
if e._children:
|
||||
yield from _yield_result(e._children)
|
||||
|
||||
@@ -0,0 +1,642 @@
|
||||
import operator
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from cache.Cache import Cache
|
||||
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
|
||||
from core.builtin_helpers import parse_unrecognized, only_successful, ensure_rule
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT
|
||||
from core.rule import Rule
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.tokenizer import Keywords, TokenKind, Token, IterParser
|
||||
from core.utils import index_tokens, COLORS, get_text_from_tokens
|
||||
from evaluators.ConceptEvaluator import ConceptEvaluator
|
||||
from evaluators.PythonEvaluator import PythonEvaluator
|
||||
from parsers.BaseNodeParser import SourceCodeWithConceptNode, ConceptNode, SourceCodeNode
|
||||
from parsers.PythonParser import PythonNode
|
||||
|
||||
identifier_regex = re.compile(r"[\w _.]+")
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatRuleError:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class BraceMismatch(FormatRuleError):
|
||||
lbrace: Token
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnexpectedEof(FormatRuleError):
|
||||
message: str
|
||||
token: Token = None
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if not isinstance(other, UnexpectedEof):
|
||||
return False
|
||||
|
||||
return self.message == other.message and (other.token is None or other.token == self.token)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.message, self.token)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatRuleSyntaxError(FormatRuleError):
|
||||
message: str
|
||||
token: Token
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatAstNode:
|
||||
@staticmethod
|
||||
def repr_value(items):
|
||||
if items is None:
|
||||
return ""
|
||||
|
||||
return ", ".join(repr(item) for item in items)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatAstRawText(FormatAstNode):
|
||||
text: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatAstVariable(FormatAstNode):
|
||||
name: str
|
||||
format: Union[str, None] = None
|
||||
value: object = None
|
||||
index: object = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatAstVariableNotFound(FormatAstNode):
|
||||
name: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatAstGrid(FormatAstNode):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatAstList(FormatAstNode):
|
||||
variable: str
|
||||
items_prop: str = None # where to search the list if variable does not resolve to an iterable
|
||||
recurse_on: str = None
|
||||
recursion_depth: int = 0
|
||||
items: object = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatAstColor(FormatAstNode):
|
||||
def __init__(self, color, format_ast):
|
||||
self.color = color
|
||||
self.format_ast = format_ast
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.color}({self.format_ast})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if not isinstance(other, FormatAstColor):
|
||||
return False
|
||||
|
||||
return self.color == other.color and self.format_ast == other.format_ast
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.color, self.format_ast))
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatAstFunction(FormatAstNode):
|
||||
name: str
|
||||
args: list = None
|
||||
kwargs: dict = None
|
||||
|
||||
|
||||
class FormatAstSequence(FormatAstNode):
|
||||
def __init__(self, items):
|
||||
self.items = items
|
||||
|
||||
def __repr__(self):
|
||||
return "FormatAstSequence(" + self.repr_value(self.items) + ")"
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if not isinstance(other, FormatAstSequence):
|
||||
return False
|
||||
|
||||
return self.items == other.items
|
||||
|
||||
|
||||
class FormatRuleParser(IterParser):
|
||||
|
||||
@staticmethod
|
||||
def to_text(list_or_dict_of_tokens):
|
||||
"""
|
||||
Works on list of list of tokens
|
||||
or dict of list of tokens
|
||||
:param list_or_dict_of_tokens:
|
||||
:return:
|
||||
"""
|
||||
get_text = get_text_from_tokens
|
||||
if isinstance(list_or_dict_of_tokens, list):
|
||||
return [get_text(i) for i in list_or_dict_of_tokens]
|
||||
if isinstance(list_or_dict_of_tokens, dict):
|
||||
return {k: get_text(v) for k, v in list_or_dict_of_tokens.items()}
|
||||
raise NotImplementedError("")
|
||||
|
||||
def to_value(self, tokens):
|
||||
"""
|
||||
Works on list of tokens
|
||||
return string or numeric value of the tokens
|
||||
:return:
|
||||
"""
|
||||
|
||||
value = get_text_from_tokens(tokens)
|
||||
if value[0] in ("'", '"'):
|
||||
return value[1:-1]
|
||||
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError:
|
||||
self.error_sink = FormatRuleSyntaxError(f"'{value}' is not numeric", None)
|
||||
|
||||
def parse(self):
|
||||
"""
|
||||
Parses a format rule
|
||||
format ::= {variable'} | function(...) | rawtext
|
||||
:return:
|
||||
"""
|
||||
|
||||
if self.source == "":
|
||||
return FormatAstRawText("")
|
||||
|
||||
buffer = []
|
||||
result = []
|
||||
res = None
|
||||
escaped = False
|
||||
|
||||
def _flush_buffer():
|
||||
if len(buffer) > 0:
|
||||
result.append(FormatAstRawText(get_text_from_tokens(buffer)))
|
||||
buffer.clear()
|
||||
|
||||
while self.next_token(skip_whitespace=False):
|
||||
if not escaped:
|
||||
if self.token.type == TokenKind.IDENTIFIER and self.the_token_after().type == TokenKind.LPAR:
|
||||
_flush_buffer()
|
||||
res = self.parse_function(self.token)
|
||||
elif self.token.type == TokenKind.LBRACE:
|
||||
_flush_buffer()
|
||||
res = self.parse_variable(self.token)
|
||||
elif self.token.type == TokenKind.BACK_SLASH:
|
||||
escaped = True
|
||||
else:
|
||||
buffer.append(self.token)
|
||||
else:
|
||||
escaped = False
|
||||
buffer.append(self.token)
|
||||
|
||||
if self.error_sink:
|
||||
break
|
||||
|
||||
if res:
|
||||
result.append(res)
|
||||
res = None
|
||||
|
||||
_flush_buffer()
|
||||
|
||||
return [] if len(result) == 0 else result[0] if len(result) == 1 else FormatAstSequence(result)
|
||||
|
||||
def parse_function(self, func_name):
|
||||
self.next_token()
|
||||
self.next_token()
|
||||
|
||||
if self.token.type == TokenKind.EOF:
|
||||
self.error_sink = UnexpectedEof("while parsing function", func_name)
|
||||
return None
|
||||
|
||||
param_buffer = []
|
||||
args = []
|
||||
kwargs = {}
|
||||
get_text = get_text_from_tokens
|
||||
|
||||
def _process_parameters():
|
||||
if len(param_buffer) == 0:
|
||||
self.error_sink = FormatRuleSyntaxError("no parameter found", self.token)
|
||||
return None
|
||||
if (index := index_tokens(param_buffer, "=")) > 0:
|
||||
kwargs[get_text(param_buffer[:index])] = param_buffer[index + 1:]
|
||||
else:
|
||||
args.append(param_buffer.copy())
|
||||
param_buffer.clear()
|
||||
|
||||
while True:
|
||||
if self.token.type == TokenKind.RPAR:
|
||||
if len(param_buffer) > 0:
|
||||
_process_parameters()
|
||||
break
|
||||
|
||||
elif self.token.type == TokenKind.COMMA:
|
||||
_process_parameters()
|
||||
if self.error_sink:
|
||||
break
|
||||
|
||||
else:
|
||||
param_buffer.append(self.token)
|
||||
|
||||
if not self.next_token():
|
||||
break
|
||||
|
||||
if self.error_sink:
|
||||
return None
|
||||
|
||||
if self.token.type != TokenKind.RPAR:
|
||||
self.error_sink = UnexpectedEof("while parsing function", func_name)
|
||||
return None
|
||||
|
||||
if func_name.value in COLORS:
|
||||
return self.return_color(func_name.value, args, kwargs)
|
||||
elif func_name.value == "list":
|
||||
return self.return_list(args, kwargs)
|
||||
|
||||
return FormatAstFunction(func_name.value, self.to_text(args), self.to_text(kwargs))
|
||||
|
||||
def parse_variable(self, lbrace):
|
||||
self.next_token()
|
||||
|
||||
if self.token.type == TokenKind.EOF:
|
||||
self.error_sink = UnexpectedEof("while parsing variable", lbrace)
|
||||
return None
|
||||
|
||||
buffer = []
|
||||
while True:
|
||||
if self.token.type == TokenKind.RBRACE:
|
||||
break
|
||||
buffer.append(self.token)
|
||||
|
||||
if not self.next_token():
|
||||
break
|
||||
|
||||
# if self.error_sink:
|
||||
# return None
|
||||
|
||||
if self.token.type != TokenKind.RBRACE:
|
||||
self.error_sink = UnexpectedEof("while parsing variable", lbrace)
|
||||
return None
|
||||
|
||||
if len(buffer) == 0:
|
||||
self.error_sink = FormatRuleSyntaxError("variable name not found", None)
|
||||
return None
|
||||
|
||||
variable = get_text_from_tokens(buffer)
|
||||
try:
|
||||
index = variable.index(":")
|
||||
return FormatAstVariable(variable[:index], variable[index + 1:])
|
||||
except ValueError:
|
||||
return FormatAstVariable(variable)
|
||||
|
||||
def return_color(self, color, args, kwargs):
|
||||
if len(kwargs) > 0:
|
||||
self.error_sink = FormatRuleSyntaxError("keyword arguments are not supported", None)
|
||||
return None
|
||||
|
||||
if len(args) == 0:
|
||||
return FormatAstColor(color, FormatAstRawText(""))
|
||||
|
||||
if len(args) > 1:
|
||||
self.error_sink = FormatRuleSyntaxError("only one parameter supported", args[1][0])
|
||||
return None
|
||||
|
||||
source = get_text_from_tokens(args[0])
|
||||
if len(source) > 1 and source[0] in ("'", '"') and source[-1] in ("'", '"'):
|
||||
source = source[1:-1]
|
||||
parser = FormatRuleParser(source)
|
||||
res = parser.parse()
|
||||
self.error_sink = parser.error_sink
|
||||
return FormatAstColor(color, res)
|
||||
else:
|
||||
try:
|
||||
index = source.index(":")
|
||||
variable, vformat = source[:index], source[index + 1:]
|
||||
except ValueError:
|
||||
variable, vformat = source, None
|
||||
|
||||
if not identifier_regex.fullmatch(variable):
|
||||
self.error_sink = FormatRuleSyntaxError("Invalid identifier", None)
|
||||
return None
|
||||
return FormatAstColor(color, FormatAstVariable(variable, vformat))
|
||||
|
||||
def return_list(self, args, kwargs):
|
||||
len_args = len(args)
|
||||
if len_args < 1:
|
||||
self.error_sink = FormatRuleSyntaxError("variable name not found", None)
|
||||
return None
|
||||
|
||||
if len_args > 3:
|
||||
self.error_sink = FormatRuleSyntaxError("too many positional arguments", args[3][0])
|
||||
return None
|
||||
|
||||
variable_name = get_text_from_tokens(args[0])
|
||||
recurse_on, recursion_depth, items_prop = None, 0, None
|
||||
|
||||
if len_args == 2:
|
||||
recursion_depth = self.to_value(args[1])
|
||||
elif len_args == 3:
|
||||
recursion_depth = self.to_value(args[1])
|
||||
recurse_on = self.to_value(args[2])
|
||||
|
||||
if "recurse_on" in kwargs:
|
||||
recurse_on = self.to_value(kwargs["recurse_on"])
|
||||
|
||||
if "recursion_depth" in kwargs:
|
||||
recursion_depth = self.to_value(kwargs["recursion_depth"])
|
||||
|
||||
if "items_prop" in kwargs:
|
||||
items_prop = self.to_value(kwargs["items_prop"])
|
||||
|
||||
if self.error_sink:
|
||||
return None
|
||||
|
||||
if not isinstance(recursion_depth, int):
|
||||
self.error_sink = FormatRuleSyntaxError("'recursion_depth' must be an integer", None)
|
||||
return None
|
||||
|
||||
return FormatAstList(variable_name, items_prop, recurse_on, recursion_depth)
|
||||
|
||||
|
||||
@dataclass()
|
||||
class RulePredicate:
|
||||
source: str
|
||||
evaluator: str
|
||||
predicate: ReturnValueConcept
|
||||
concept: Union[Concept, None]
|
||||
|
||||
|
||||
class SheerkaRuleManager(BaseService):
|
||||
NAME = "RuleManager"
|
||||
RULE_IDS = "Rules_Ids"
|
||||
FORMAT_RULE_ENTRY = "RuleManager:FormatRules"
|
||||
EXEC_RULE_ENTRY = "RuleManager:ExecRules"
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.format_rule_cache = Cache(default=lambda k: self.sheerka.sdp.get(self.FORMAT_RULE_ENTRY, k))
|
||||
self.exec_rule_cache = Cache(default=lambda k: self.sheerka.sdp.get(self.EXEC_RULE_ENTRY, k))
|
||||
|
||||
self._format_rules = None # sorted by priority
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.create_new_rule, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.get_rule_by_id, False)
|
||||
self.sheerka.bind_service_method(self.dump_desc_rule, False, as_name="desc_rule")
|
||||
self.sheerka.bind_service_method(self.get_format_rules, False, visible=False)
|
||||
|
||||
self.sheerka.cache_manager.register_cache(self.FORMAT_RULE_ENTRY, self.format_rule_cache, True, True)
|
||||
self.sheerka.cache_manager.register_cache(self.EXEC_RULE_ENTRY, self.exec_rule_cache, True, True)
|
||||
|
||||
def initialize_deferred(self, context, is_first_time):
|
||||
|
||||
if is_first_time:
|
||||
# add builtin rules if it's the first initialization of Sheerka
|
||||
self.init_builtin_rules(context)
|
||||
|
||||
# adds the other rules (when it's not the first time)
|
||||
self.format_rule_cache.populate(lambda: self.sheerka.sdp.list(self.FORMAT_RULE_ENTRY), lambda rule: rule.id)
|
||||
self.exec_rule_cache.populate(lambda: self.sheerka.sdp.list(self.EXEC_RULE_ENTRY), lambda rule: rule.id)
|
||||
|
||||
# compile all the rules
|
||||
for rule_id in self.format_rule_cache:
|
||||
rule = self.init_rule(context, self.format_rule_cache.get(rule_id))
|
||||
|
||||
# update rules priorities
|
||||
self.update_rules_priorities(context)
|
||||
|
||||
self.sheerka.subscribe(RULE_PRECEDENCE_MODIFIED, self.update_rules_priorities)
|
||||
|
||||
def update_rules_priorities(self, context):
|
||||
"""
|
||||
Ask the SheerkaComparisonManager for the priorities
|
||||
:return:
|
||||
"""
|
||||
# get the priorities
|
||||
rules_weights = self.sheerka.get_concepts_weights(BuiltinConcepts.PRECEDENCE, RULE_COMPARISON_CONTEXT)
|
||||
|
||||
# compile all the rules
|
||||
for rule_id in self.format_rule_cache:
|
||||
rule = self.format_rule_cache.get(rule_id)
|
||||
if rule.str_id in rules_weights:
|
||||
rule.priority = rules_weights[rule.str_id]
|
||||
|
||||
self._format_rules = None
|
||||
|
||||
def init_rule(self, context, rule: Rule):
|
||||
if rule.metadata.is_compiled:
|
||||
return
|
||||
|
||||
if rule.compiled_predicate is None:
|
||||
res = self.compile_when(context, self.NAME, rule.metadata.predicate)
|
||||
if not isinstance(res, list):
|
||||
rule.error_sink = [res.body]
|
||||
return
|
||||
rule.compiled_predicate = res
|
||||
|
||||
if rule.compiled_action is None:
|
||||
res = self.compile_print(context, rule.metadata.action)
|
||||
if not res.status:
|
||||
rule.error_sink = [res.body]
|
||||
return
|
||||
rule.compiled_action = res.body
|
||||
|
||||
# rule.variables = self.get_variables()
|
||||
|
||||
rule.metadata.is_compiled = True
|
||||
rule.metadata.is_enabled = True
|
||||
return rule
|
||||
|
||||
def compile_when(self, context, name, source):
|
||||
# parser_input = self.sheerka.services[SheerkaExecute.NAME].get_parser_input(source)
|
||||
parsed = parse_unrecognized(context,
|
||||
source,
|
||||
parsers="all",
|
||||
who=name,
|
||||
prop=Keywords.WHEN,
|
||||
filter_func=only_successful)
|
||||
|
||||
if not parsed.status:
|
||||
return parsed
|
||||
|
||||
if self.sheerka.isinstance(parsed.body, BuiltinConcepts.ONLY_SUCCESSFUL):
|
||||
parsed = parsed.body.body
|
||||
|
||||
return self.add_evaluators(source, parsed if hasattr(parsed, "__iter__") else [parsed])
|
||||
|
||||
def compile_print(self, context, source):
|
||||
parser = FormatRuleParser(source)
|
||||
parsed = parser.parse()
|
||||
if parser.error_sink:
|
||||
return self.sheerka.ret(self.NAME,
|
||||
False,
|
||||
self.sheerka.new(BuiltinConcepts.ERROR, body=[parser.error_sink]))
|
||||
else:
|
||||
return self.sheerka.ret(self.NAME, True, parsed)
|
||||
|
||||
def set_id_if_needed(self, rule: Rule):
|
||||
"""
|
||||
Set the id for the concept if needed
|
||||
:param rule:
|
||||
:return:
|
||||
"""
|
||||
if rule.metadata.id is not None:
|
||||
return
|
||||
|
||||
rule.metadata.id = str(self.sheerka.cache_manager.get(self.sheerka.CONCEPTS_KEYS_ENTRY, self.RULE_IDS))
|
||||
|
||||
def create_new_rule(self, context, rule):
|
||||
"""
|
||||
Saves the new rule in DB
|
||||
:param context:
|
||||
:param rule:
|
||||
:return:
|
||||
"""
|
||||
sheerka = self.sheerka
|
||||
|
||||
# set id before saving in db
|
||||
self.set_id_if_needed(rule)
|
||||
if rule.compiled_predicate and rule.compiled_action:
|
||||
rule.metadata.is_compiled = True
|
||||
rule.metadata.is_enabled = True
|
||||
|
||||
# save it
|
||||
if rule.metadata.action_type == "print":
|
||||
self.sheerka.cache_manager.put(self.FORMAT_RULE_ENTRY, rule.metadata.id, rule)
|
||||
self._format_rules = None
|
||||
else:
|
||||
self.sheerka.cache_manager.put(self.EXEC_RULE_ENTRY, rule.metadata.id, rule)
|
||||
|
||||
# process the return if needed
|
||||
ret = sheerka.ret(self.NAME, True, sheerka.new(BuiltinConcepts.NEW_RULE, body=rule))
|
||||
return ret
|
||||
|
||||
def init_builtin_rules(self, context):
|
||||
# self.sheerka.init_log.debug("Initializing default rules")
|
||||
rules = [
|
||||
Rule("print", "Print return values", "__rets", "list(__rets)"),
|
||||
Rule("print", "Print ReturnValue",
|
||||
"__ret",
|
||||
"\\ReturnValue(who={__ret.who}, status={__ret.status}, value={__ret.value})"),
|
||||
Rule("print", "Failed ReturnValue in red",
|
||||
"__ret and not __ret.status",
|
||||
"red(__ret)"),
|
||||
Rule("print", "List explanations",
|
||||
"isinstance(__ret_container, BuiltinConcepts.EXPLANATION)",
|
||||
"blue(__ret_container.digest) : {__ret_container.command}\nlist(__ret_container)"),
|
||||
Rule("print", "Print ExecutionContext",
|
||||
"isinstance(__obj, ExecutionContext)",
|
||||
"[{id:3}] {__tab}{desc} ({status})"),
|
||||
Rule("print", "Display formatted list",
|
||||
"isinstance(__ret_container, BuiltinConcepts.TO_LIST)",
|
||||
"list(__ret_container)"),
|
||||
]
|
||||
|
||||
for r in rules:
|
||||
self.create_new_rule(context, r)
|
||||
|
||||
self.sheerka.set_is_less_than(context, BuiltinConcepts.PRECEDENCE, rules[1], rules[2], RULE_COMPARISON_CONTEXT)
|
||||
self.sheerka.set_is_less_than(context, BuiltinConcepts.PRECEDENCE, rules[1], rules[3], RULE_COMPARISON_CONTEXT)
|
||||
self.sheerka.set_is_less_than(context, BuiltinConcepts.PRECEDENCE, rules[1], rules[5], RULE_COMPARISON_CONTEXT)
|
||||
self.sheerka.set_is_greatest(context, BuiltinConcepts.PRECEDENCE, rules[0], RULE_COMPARISON_CONTEXT)
|
||||
|
||||
def get_rule_by_id(self, rule_id):
|
||||
"""
|
||||
Looks in the caches for a specific rule id
|
||||
:param rule_id:
|
||||
:return:
|
||||
"""
|
||||
if rule_id is None:
|
||||
return None
|
||||
|
||||
rule = self.format_rule_cache.get(rule_id)
|
||||
if rule:
|
||||
return rule
|
||||
|
||||
rule = self.exec_rule_cache.get(rule_id)
|
||||
if rule:
|
||||
return rule
|
||||
|
||||
metadata = [("id", rule_id)]
|
||||
return self.sheerka.new(BuiltinConcepts.UNKNOWN_RULE, body=metadata)
|
||||
|
||||
def dump_desc_rule(self, rules):
|
||||
"""
|
||||
dumps the definition of a rule
|
||||
:param rules:
|
||||
:return:
|
||||
"""
|
||||
ensure_rule(rules)
|
||||
|
||||
if not hasattr(rules, "__iter__"):
|
||||
rules = [rules]
|
||||
|
||||
first = True
|
||||
for rule in rules:
|
||||
if not first:
|
||||
self.sheerka.log.info("")
|
||||
self.sheerka.log.info(f"id : {rule.id}")
|
||||
self.sheerka.log.info(f"name : {rule.metadata.name}")
|
||||
self.sheerka.log.info(f"type : {rule.metadata.action_type}")
|
||||
self.sheerka.log.info(f"predicate : {rule.metadata.predicate}")
|
||||
self.sheerka.log.info(f"action : {rule.metadata.action}")
|
||||
self.sheerka.log.info(f"compiled : {rule.metadata.is_compiled}")
|
||||
self.sheerka.log.info(f"enabled : {rule.metadata.is_enabled}")
|
||||
|
||||
def get_format_rules(self):
|
||||
if self._format_rules:
|
||||
return self._format_rules
|
||||
|
||||
self._format_rules = sorted(self.format_rule_cache.get_all(), key=operator.attrgetter('priority'), reverse=True)
|
||||
return self._format_rules
|
||||
|
||||
def add_evaluators(self, source, ret_vals):
|
||||
"""
|
||||
Browse the ReturnValueConcepts to determine the evaluator to use
|
||||
Returns a list of tuple (evaluator_name, return_value)
|
||||
:param source:
|
||||
:param ret_vals:
|
||||
:return:
|
||||
"""
|
||||
res = []
|
||||
for r in ret_vals:
|
||||
underlying = self.sheerka.objvalue(r)
|
||||
if isinstance(underlying, PythonNode):
|
||||
res.append(RulePredicate(source, PythonEvaluator.NAME, r, None))
|
||||
elif isinstance(underlying, SourceCodeWithConceptNode):
|
||||
res.append(RulePredicate(source, PythonEvaluator.NAME, r, None))
|
||||
elif isinstance(underlying, SourceCodeNode):
|
||||
res.append(RulePredicate(source, PythonEvaluator.NAME, r, None))
|
||||
elif isinstance(underlying, Concept):
|
||||
res.append(RulePredicate(source, ConceptEvaluator.NAME, r, underlying))
|
||||
elif hasattr(underlying, "__iter__") and len(underlying) == 1 and isinstance(underlying[0], ConceptNode):
|
||||
res.append(RulePredicate(source, ConceptEvaluator.NAME, r, underlying[0].concept))
|
||||
else:
|
||||
raise NotImplementedError(r)
|
||||
return res
|
||||
@@ -1,9 +1,9 @@
|
||||
import core.builtin_helpers
|
||||
from cache.Cache import Cache
|
||||
from cache.SetCache import SetCache
|
||||
from core.ast.nodes import python_to_concept
|
||||
from core.ast_helpers import UnreferencedVariablesVisitor
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, ConceptParts, ensure_concept, DEFINITION_TYPE_BNF
|
||||
from core.concept import Concept, ConceptParts, DEFINITION_TYPE_BNF
|
||||
from core.sheerka.services.SheerkaModifyConcept import SheerkaModifyConcept
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
|
||||
@@ -41,9 +41,10 @@ class SheerkaSetsManager(BaseService):
|
||||
"""
|
||||
|
||||
context.log(f"Setting concept {concept} is a {concept_set}", who=self.NAME)
|
||||
ensure_concept(concept, concept_set)
|
||||
core.builtin_helpers.ensure_concept(concept, concept_set)
|
||||
|
||||
if BuiltinConcepts.ISA in concept.metadata.props and concept_set in concept.metadata.props[BuiltinConcepts.ISA]:
|
||||
if BuiltinConcepts.ISA in concept.get_metadata().props and concept_set in concept.get_metadata().props[
|
||||
BuiltinConcepts.ISA]:
|
||||
return self.sheerka.ret(
|
||||
self.NAME,
|
||||
False,
|
||||
@@ -71,7 +72,7 @@ class SheerkaSetsManager(BaseService):
|
||||
"""
|
||||
|
||||
context.log(f"Adding concept {concept} to set {concept_set}", who=self.NAME)
|
||||
ensure_concept(concept, concept_set)
|
||||
core.builtin_helpers.ensure_concept(concept, concept_set)
|
||||
|
||||
set_elements = self.sets.get(concept_set.id)
|
||||
if set_elements and concept.id in set_elements:
|
||||
@@ -98,7 +99,7 @@ class SheerkaSetsManager(BaseService):
|
||||
"""
|
||||
|
||||
context.log(f"Adding concepts {concepts} to set {concept_set}", who=self.NAME)
|
||||
ensure_concept(concept_set)
|
||||
core.builtin_helpers.ensure_concept(concept_set)
|
||||
already_in_set = []
|
||||
for concept in concepts:
|
||||
res = self.add_concept_to_set(context, concept, concept_set)
|
||||
@@ -124,7 +125,7 @@ class SheerkaSetsManager(BaseService):
|
||||
:return:
|
||||
"""
|
||||
|
||||
ensure_concept(concept)
|
||||
core.builtin_helpers.ensure_concept(concept)
|
||||
|
||||
def _get_set_elements(sub_concept):
|
||||
if not self.isaset(context, sub_concept):
|
||||
@@ -146,8 +147,8 @@ class SheerkaSetsManager(BaseService):
|
||||
concepts.extend(other_concepts)
|
||||
|
||||
# apply the where clause if any
|
||||
if sub_concept.metadata.where:
|
||||
new_condition = self._validate_where_clause(sub_concept)
|
||||
if sub_concept.get_metadata().where:
|
||||
new_condition = self._validate_where_clause(context, sub_concept)
|
||||
if not new_condition:
|
||||
return self.sheerka.new(BuiltinConcepts.CONDITION_FAILED, body=sub_concept)
|
||||
|
||||
@@ -179,7 +180,7 @@ class SheerkaSetsManager(BaseService):
|
||||
:return:
|
||||
"""
|
||||
|
||||
ensure_concept(a, b)
|
||||
core.builtin_helpers.ensure_concept(a, b)
|
||||
|
||||
# TODO, first check the 'isa' property of a
|
||||
if not (a.id and b.id):
|
||||
@@ -190,11 +191,11 @@ class SheerkaSetsManager(BaseService):
|
||||
|
||||
def isa(self, a, b):
|
||||
|
||||
ensure_concept(a, b)
|
||||
if BuiltinConcepts.ISA not in a.metadata.props:
|
||||
core.builtin_helpers.ensure_concept(a, b)
|
||||
if BuiltinConcepts.ISA not in a.get_metadata().props:
|
||||
return False
|
||||
|
||||
for c in a.metadata.props[BuiltinConcepts.ISA]:
|
||||
for c in a.get_metadata().props[BuiltinConcepts.ISA]:
|
||||
if c == b:
|
||||
return True
|
||||
if self.isa(self.sheerka.get_by_id(c.id), b):
|
||||
@@ -216,7 +217,7 @@ class SheerkaSetsManager(BaseService):
|
||||
|
||||
# KSI 20200629
|
||||
# To resolve infinite recursion between group concepts and BNF concepts
|
||||
if concept.metadata.definition_type == DEFINITION_TYPE_BNF:
|
||||
if concept.get_metadata().definition_type == DEFINITION_TYPE_BNF:
|
||||
return False
|
||||
|
||||
# check if it has a group
|
||||
@@ -231,18 +232,18 @@ class SheerkaSetsManager(BaseService):
|
||||
|
||||
return self.isaset(context, concept.body)
|
||||
|
||||
def _validate_where_clause(self, concept):
|
||||
python_parser_result = [r for r in concept.compiled[ConceptParts.WHERE] if r.who == "parsers.Python"]
|
||||
def _validate_where_clause(self, context, concept):
|
||||
python_parser_result = [r for r in concept.get_compiled()[ConceptParts.WHERE] if r.who == "parsers.Python"]
|
||||
if not python_parser_result or not python_parser_result[0].status:
|
||||
return None
|
||||
|
||||
ast_ = python_parser_result[0].body.body.ast_
|
||||
ast_as_concepts = python_to_concept(ast_)
|
||||
names = core.builtin_helpers.get_names(self.sheerka, ast_as_concepts)
|
||||
if len(names) != 1 or names[0] != concept.metadata.body:
|
||||
visitor = UnreferencedVariablesVisitor(context)
|
||||
names = list(visitor.get_names(ast_))
|
||||
if len(names) != 1 or names[0] != concept.get_metadata().body:
|
||||
return None
|
||||
|
||||
condition = concept.metadata.where.replace(concept.metadata.body, "sheerka.objvalue(x)")
|
||||
condition = concept.get_metadata().where.replace(concept.get_metadata().body, "sheerka.objvalue(x)")
|
||||
expression = f"""
|
||||
result=[]
|
||||
for x in xx__concepts__xx:
|
||||
@@ -277,7 +278,7 @@ for x in xx__concepts__xx:
|
||||
errors = []
|
||||
for element_id in ids:
|
||||
concept = self.sheerka.get_by_id(element_id)
|
||||
if len(concept.metadata.variables) == 0:
|
||||
if len(concept.get_metadata().variables) == 0:
|
||||
# The concepts are directly taken from Sheerka.get_by_id, so variable cannot be filled
|
||||
# It's the reason why we only evaluate concept with no variable
|
||||
evaluated = self.sheerka.evaluate_concept(sub_context, concept)
|
||||
|
||||
@@ -19,6 +19,9 @@ class Variable(ServiceObj):
|
||||
def get_key(self):
|
||||
return f"{self.who}|{self.key}"
|
||||
|
||||
def __str__(self):
|
||||
return f"({self.who}){self.key}={self.value}"
|
||||
|
||||
|
||||
class SheerkaVariableManager(BaseService):
|
||||
NAME = "VariableManager"
|
||||
@@ -26,18 +29,28 @@ class SheerkaVariableManager(BaseService):
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.bound = {
|
||||
"sheerka.enable_process_return_values": "enable_process_return_values",
|
||||
"sheerka.save_execution_context": "save_execution_context"
|
||||
}
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.record, True)
|
||||
self.sheerka.bind_service_method(self.load, False)
|
||||
self.sheerka.bind_service_method(self.delete, True)
|
||||
self.sheerka.bind_service_method(self.set, True)
|
||||
self.sheerka.bind_service_method(self.get, False)
|
||||
self.sheerka.bind_service_method(self.record_var, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.load_var, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.delete_var, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.set_var, True)
|
||||
self.sheerka.bind_service_method(self.get_var, False)
|
||||
self.sheerka.bind_service_method(self.list_vars, False)
|
||||
|
||||
cache = Cache(default=lambda k: self.sheerka.sdp.get(self.VARIABLES_ENTRY, k))
|
||||
cache = Cache()
|
||||
cache.populate(lambda: self.sheerka.sdp.list(self.VARIABLES_ENTRY), lambda var: var.get_key())
|
||||
self.sheerka.cache_manager.register_cache(self.VARIABLES_ENTRY, cache, True, True)
|
||||
|
||||
def record(self, context, who, key, value):
|
||||
for variable in cache.get_all():
|
||||
if variable.key in self.bound:
|
||||
setattr(self.sheerka, self.bound[variable.key], variable.value)
|
||||
|
||||
def record_var(self, context, who, key, value):
|
||||
"""
|
||||
|
||||
:param context:
|
||||
@@ -49,20 +62,33 @@ class SheerkaVariableManager(BaseService):
|
||||
|
||||
variable = Variable(context.event.get_digest(), who, key, value, None)
|
||||
self.sheerka.cache_manager.put(self.VARIABLES_ENTRY, variable.get_key(), variable)
|
||||
|
||||
# TODO: manage credentials
|
||||
if key in self.bound:
|
||||
setattr(self.sheerka, self.bound[key], value)
|
||||
|
||||
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def load(self, who, key):
|
||||
def load_var(self, who, key):
|
||||
variable = self.sheerka.cache_manager.get(self.VARIABLES_ENTRY, who + "|" + key)
|
||||
if variable is None:
|
||||
return None
|
||||
|
||||
return variable.value
|
||||
|
||||
def delete(self, context, who, key):
|
||||
def delete_var(self, context, who, key):
|
||||
self.sheerka.cache_manager.delete(self.VARIABLES_ENTRY, who + "|" + key)
|
||||
|
||||
def set(self, context, key, value):
|
||||
return self.record(context, context.event.user_id, key, value)
|
||||
def set_var(self, context, key, value):
|
||||
return self.record_var(context, context.event.user_id, key, value)
|
||||
|
||||
def get(self, context, key):
|
||||
return self.load(context.event.user_id, key)
|
||||
def get_var(self, context, key):
|
||||
return self.load_var(context.event.user_id, key)
|
||||
|
||||
def list_vars(self, context, all_vars=False):
|
||||
if all_vars:
|
||||
res = [str(v) for v in self.sheerka.cache_manager.copy(self.VARIABLES_ENTRY).values()]
|
||||
else:
|
||||
res = [str(v) for v in self.sheerka.cache_manager.copy(self.VARIABLES_ENTRY).values() if
|
||||
v.who == context.event.user_id]
|
||||
return res
|
||||
|
||||
@@ -10,6 +10,7 @@ class BaseService:
|
||||
"""
|
||||
Base class for services
|
||||
"""
|
||||
|
||||
def __init__(self, sheerka):
|
||||
self.sheerka = sheerka
|
||||
|
||||
@@ -19,3 +20,13 @@ class BaseService:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
|
||||
def restore_values(self, *args):
|
||||
"""
|
||||
Use Variable Manager to restore the state of a service
|
||||
:param args:
|
||||
:return:
|
||||
"""
|
||||
for prop_name in args:
|
||||
if (value := self.sheerka.load_var(self.NAME, prop_name)) is not None:
|
||||
setattr(self, prop_name, value)
|
||||
|
||||
+98
-26
@@ -9,13 +9,14 @@ class TokenKind(Enum):
|
||||
KEYWORD = "keyword"
|
||||
IDENTIFIER = "identifier"
|
||||
CONCEPT = "concept"
|
||||
RULE = "rule"
|
||||
STRING = "string"
|
||||
NUMBER = "number"
|
||||
TRUE = "true"
|
||||
FALSE = "false"
|
||||
LPAR = "lpar"
|
||||
RPAR = "rpar"
|
||||
LBRACKET = "lbrace"
|
||||
LBRACKET = "lbracket"
|
||||
RBRACKET = "rbracket"
|
||||
LBRACE = "lbrace"
|
||||
RBRACE = "rbrace"
|
||||
@@ -49,7 +50,7 @@ class TokenKind(Enum):
|
||||
WORD = "word"
|
||||
EQUALSEQUALS = "=="
|
||||
VAR_DEF = "__var__"
|
||||
REGEX = "r'xxx' or r\"xxx\" or r:xxx: or r|xxx| or r/xxx/"
|
||||
REGEX = "r'xxx' or r\"xxx\" or r|xxx| or r/xxx/"
|
||||
|
||||
|
||||
@dataclass()
|
||||
@@ -65,18 +66,7 @@ class Token:
|
||||
_repr_value: str = field(default=None, repr=False, compare=False, hash=None)
|
||||
|
||||
def __repr__(self):
|
||||
if self.type == TokenKind.IDENTIFIER:
|
||||
value = str(self.value)
|
||||
elif self.type == TokenKind.WHITESPACE:
|
||||
value = "<ws!>" if self.value == "" else "<tab>" if self.value[0] == "\t" else "<ws>"
|
||||
elif self.type == TokenKind.NEWLINE:
|
||||
value = "<nl>"
|
||||
elif self.type == TokenKind.EOF:
|
||||
value = "<EOF>"
|
||||
else:
|
||||
value = self.value
|
||||
|
||||
return f"Token({value})"
|
||||
return f"Token({self.repr_value})"
|
||||
|
||||
@property
|
||||
def strip_quote(self):
|
||||
@@ -102,9 +92,15 @@ class Token:
|
||||
if self.type == TokenKind.EOF:
|
||||
self._repr_value = "<EOF>"
|
||||
elif self.type == TokenKind.WHITESPACE:
|
||||
self._repr_value = "<ws>"
|
||||
self._repr_value = "<ws!>" if self.value == "" else "<tab>" if self.value[0] == "\t" else "<ws>"
|
||||
elif self.type == TokenKind.NEWLINE:
|
||||
self._repr_value = "<nl>"
|
||||
elif self.type == TokenKind.CONCEPT:
|
||||
from core.utils import str_concept
|
||||
self._repr_value = str_concept(self.value)
|
||||
elif self.type == TokenKind.RULE:
|
||||
from core.utils import str_concept
|
||||
self._repr_value = str_concept(self.value, prefix="r:")
|
||||
else:
|
||||
self._repr_value = self.str_value
|
||||
return self._repr_value
|
||||
@@ -121,6 +117,9 @@ class Token:
|
||||
elif self.type == TokenKind.CONCEPT:
|
||||
from core.utils import str_concept
|
||||
return str_concept(self.value)
|
||||
elif self.type == TokenKind.RULE:
|
||||
from core.utils import str_concept
|
||||
return str_concept(self.value, prefix="r:")
|
||||
else:
|
||||
return str(self.value)
|
||||
|
||||
@@ -192,18 +191,18 @@ class Tokenizer:
|
||||
self.column += 1
|
||||
elif c == "_":
|
||||
from core.concept import VARIABLE_PREFIX
|
||||
if self.i + 1 < self.text_len and self.text[self.i + 1].isalpha():
|
||||
identifier = self.eat_identifier(self.i)
|
||||
yield Token(TokenKind.IDENTIFIER, identifier, self.i, self.line, self.column)
|
||||
self.i += len(identifier)
|
||||
self.column += len(identifier)
|
||||
elif self.i + 7 < self.text_len and \
|
||||
if self.i + 7 < self.text_len and \
|
||||
self.text[self.i: self.i + 7] == VARIABLE_PREFIX and \
|
||||
self.text[self.i + 7].isdigit():
|
||||
number = self.eat_number(self.i + 7)
|
||||
yield Token(TokenKind.VAR_DEF, VARIABLE_PREFIX + number, self.i, self.line, self.column)
|
||||
self.i += 7 + len(number)
|
||||
self.column += 7 + len(number)
|
||||
elif self.i + 1 < self.text_len and (self.text[self.i + 1].isalpha() or self.text[self.i + 1] == "_"):
|
||||
identifier = self.eat_identifier(self.i)
|
||||
yield Token(TokenKind.IDENTIFIER, identifier, self.i, self.line, self.column)
|
||||
self.i += len(identifier)
|
||||
self.column += len(identifier)
|
||||
else:
|
||||
yield Token(TokenKind.UNDERSCORE, "_", self.i, self.line, self.column)
|
||||
self.i += 1
|
||||
@@ -341,7 +340,12 @@ class Tokenizer:
|
||||
yield Token(TokenKind.CONCEPT, (name, id), self.i, self.line, self.column)
|
||||
self.i += length + 2
|
||||
self.column += length + 2
|
||||
elif c == "r" and self.i + 1 < self.text_len and self.text[self.i + 1] in "'\":|/":
|
||||
elif c == "r" and self.i + 1 < self.text_len and self.text[self.i + 1] == ":":
|
||||
name, id, length = self.eat_concept(self.i + 2, self.line, self.column + 2)
|
||||
yield Token(TokenKind.RULE, (name, id), self.i, self.line, self.column)
|
||||
self.i += length + 2
|
||||
self.column += length + 2
|
||||
elif c == "r" and self.i + 1 < self.text_len and self.text[self.i + 1] in "'\"|/":
|
||||
string, newlines, column_index = self.eat_string(self.i + 1, self.line, self.column)
|
||||
yield Token(TokenKind.REGEX, string, self.i, self.line, self.column) # quotes are kept
|
||||
self.i += len(string) + 1
|
||||
@@ -368,10 +372,10 @@ class Tokenizer:
|
||||
self.i += len(string)
|
||||
self.column = column_index # 1 if newlines > 0 else self.column + len(string)
|
||||
self.line += newlines
|
||||
elif c == "_":
|
||||
yield Token(TokenKind.UNDERSCORE, "_", self.i, self.line, self.column)
|
||||
self.i += 1
|
||||
self.column += 1
|
||||
# elif c == "_":
|
||||
# yield Token(TokenKind.UNDERSCORE, "_", self.i, self.line, self.column)
|
||||
# self.i += 1
|
||||
# self.column += 1
|
||||
else:
|
||||
raise LexerError(f"Unknown token '{c}'", self.text, self.i, self.line, self.column)
|
||||
|
||||
@@ -518,3 +522,71 @@ class Tokenizer:
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class IterParser:
|
||||
def __init__(self, source):
|
||||
self.source = source
|
||||
self.iterator = iter(Tokenizer(source))
|
||||
self.tokens_after = []
|
||||
self.token = None
|
||||
self.error_sink = None
|
||||
|
||||
def next_token(self, skip_whitespace=True):
|
||||
try:
|
||||
if len(self.tokens_after) > 0:
|
||||
self.token = self.tokens_after.pop(0)
|
||||
else:
|
||||
self.token = next(self.iterator)
|
||||
if skip_whitespace:
|
||||
while self.token.type in (TokenKind.WHITESPACE, TokenKind.NEWLINE):
|
||||
self.token = next(self.iterator)
|
||||
return self.token.type != TokenKind.EOF
|
||||
except StopIteration:
|
||||
return False
|
||||
|
||||
def the_token_after(self, skip_whitespace=True):
|
||||
try:
|
||||
token_after = next(self.iterator)
|
||||
self.tokens_after.append(token_after)
|
||||
if skip_whitespace:
|
||||
while token_after.type in (TokenKind.WHITESPACE, TokenKind.NEWLINE):
|
||||
token_after = next(self.iterator)
|
||||
self.tokens_after.append(token_after)
|
||||
|
||||
return token_after
|
||||
except StopIteration:
|
||||
return Token(TokenKind.EOF, -1, -1, -1, -1)
|
||||
|
||||
|
||||
# @dataclass
|
||||
# class PropDef:
|
||||
# prop: str
|
||||
# index: int
|
||||
#
|
||||
#
|
||||
# class SimpleExpressionParser(IterParser):
|
||||
# def __init__(self, source):
|
||||
# super().__init__(source)
|
||||
# self.properties = []
|
||||
#
|
||||
# def parse(self):
|
||||
#
|
||||
# prop, index, key = None, None, None
|
||||
# while self.next_token():
|
||||
# if self.token.type == TokenKind.DOT:
|
||||
# self.properties.append(PropDef(prop, index, key))
|
||||
# prop, index, key = None, None, None
|
||||
# continue
|
||||
#
|
||||
# if self.token.type == TokenKind.LBRACKET:
|
||||
# index = self.parse_index()
|
||||
# elif self.token.type == TokenKind.LBRACE:
|
||||
# key = self.parse_key()
|
||||
# else:
|
||||
# prop = self.token.value
|
||||
#
|
||||
# if prop is not None:
|
||||
# self.properties.append(PropDef(prop, index, key))
|
||||
#
|
||||
# def parse_i
|
||||
|
||||
+187
-13
@@ -1,15 +1,54 @@
|
||||
import ast
|
||||
import importlib
|
||||
import inspect
|
||||
import pkgutil
|
||||
import re
|
||||
|
||||
from core.tokenizer import TokenKind
|
||||
from cache.Cache import Cache
|
||||
from core.ast_helpers import ast_to_props
|
||||
from core.tokenizer import TokenKind, Tokenizer
|
||||
|
||||
default_debug_name = "*default*"
|
||||
debug_activated = set()
|
||||
|
||||
COLORS = {
|
||||
"black",
|
||||
"red",
|
||||
"green",
|
||||
"yellow",
|
||||
"blue",
|
||||
"magenta",
|
||||
"cyan",
|
||||
"white",
|
||||
}
|
||||
|
||||
CONSOLE_COLORS_MAP = {
|
||||
"reset": "\u001b[0m",
|
||||
"black": "\u001b[30m",
|
||||
"red": "\u001b[31m",
|
||||
"green": "\u001b[32m",
|
||||
"yellow": "\u001b[33m",
|
||||
"blue": "\u001b[34m",
|
||||
"magenta": "\u001b[35m",
|
||||
"cyan": "\u001b[36m",
|
||||
"white": "\u001b[37m",
|
||||
}
|
||||
|
||||
PRIMITIVES_TYPES = (str, bool, type(None), int, float, list, dict, set, bytes, tuple, type)
|
||||
|
||||
expressions_cache = Cache()
|
||||
|
||||
|
||||
def my_debug(*args, check_started=None):
|
||||
"""
|
||||
Write one line per arg in 'debug.txt'
|
||||
:param args:
|
||||
:param check_started:
|
||||
True : first check if start_debug() was called
|
||||
<name> : first check if start_debug(name) was called
|
||||
list of <names> : first check if start_debug() is called for all names
|
||||
:return:
|
||||
"""
|
||||
if check_started and default_debug_name not in debug_activated:
|
||||
return
|
||||
|
||||
@@ -30,14 +69,14 @@ def my_debug(*args, check_started=None):
|
||||
f.write(f"{arg}\n")
|
||||
|
||||
|
||||
def start_debug(msg=None, debug_name=default_debug_name):
|
||||
def start_debug(debug_name=default_debug_name, msg=None):
|
||||
debug_activated.add(debug_name)
|
||||
if msg:
|
||||
with open("debug.txt", "a") as f:
|
||||
f.write(f"{msg}\n")
|
||||
|
||||
|
||||
def stop_debug(msg=None, debug_name=default_debug_name):
|
||||
def stop_debug(debug_name=default_debug_name, msg=None):
|
||||
if msg:
|
||||
with open("debug.txt", "a") as f:
|
||||
f.write(f"{msg}\n")
|
||||
@@ -357,6 +396,26 @@ def strip_tokens(tokens, strip_eof=False):
|
||||
return tokens[start: end + 1]
|
||||
|
||||
|
||||
def index_tokens(tokens, value):
|
||||
"""
|
||||
Returns the index of the token whose value equals 'value'
|
||||
>>> assert index_tokens(Tokenizer("xxx=yyy"), "=") == 1
|
||||
>>> assert index_tokens(Tokenizer("xxx = yyy"), "=") == 2
|
||||
>>> assert index_tokens(Tokenizer("yyy"), "=") == -1
|
||||
>>> assert index_tokens(Tokenizer("xxx = yyy"), " = ") == -1 # " = " is not valid token
|
||||
:param tokens:
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
if not tokens:
|
||||
return -1
|
||||
|
||||
for i, t in enumerate(tokens):
|
||||
if t.value == value:
|
||||
return i
|
||||
return -1
|
||||
|
||||
|
||||
def escape_char(text, to_escape):
|
||||
res = ""
|
||||
|
||||
@@ -392,7 +451,7 @@ def decode_enum(enum_repr: str):
|
||||
return None
|
||||
|
||||
|
||||
def str_concept(t, drop_name=None):
|
||||
def str_concept(t, drop_name=None, prefix="c:"):
|
||||
"""
|
||||
The key,id identifiers of a concept are stored in a tuple
|
||||
we want to return the key and the id, separated by a pipe
|
||||
@@ -404,25 +463,29 @@ def str_concept(t, drop_name=None):
|
||||
>>> assert str_concept((None, None)) == ""
|
||||
>>> assert str_concept(Concept(name="foo", id="bar")) == "c:foo|bar:"
|
||||
>>> assert str_concept(Concept(name="foo", id="bar"), drop_name=True) == "c:|bar:"
|
||||
>>> assert str_concept(("key", "id"), prefix='r:') == "r:key|id:"
|
||||
:param t:
|
||||
:param drop_name: True if we only want the id (and not the key)
|
||||
:param prefix:
|
||||
:return:
|
||||
"""
|
||||
if isinstance(t, tuple):
|
||||
name, id_ = t[0], t[1]
|
||||
elif prefix == "r:":
|
||||
name, id_ = t.metadata.name, t.id
|
||||
else:
|
||||
name, id_ = t.key, t.id
|
||||
|
||||
if name is None and id_ is None:
|
||||
return ""
|
||||
|
||||
result = 'c:' if (name is None or drop_name) else "c:" + name
|
||||
result = prefix if (name is None or drop_name) else prefix + name
|
||||
if id_:
|
||||
result += "|" + id_
|
||||
return result + ":"
|
||||
|
||||
|
||||
def unstr_concept(concept_repr):
|
||||
def unstr_concept(concept_repr, prefix='c:'):
|
||||
"""
|
||||
if concept_repr is like :c:key:id:
|
||||
return the key and the id
|
||||
@@ -430,6 +493,7 @@ def unstr_concept(concept_repr):
|
||||
>>> assert unstr_concept("c:key|id:") == ("key", "id")
|
||||
>>> assert unstr_concept("c:|id:") == ("None", "id")
|
||||
>>> assert unstr_concept("c:key|:") == ("key", "None")
|
||||
>>> assert unstr_concept("r:key|id:", prefix='r:') == ("key", "id")
|
||||
>>> # Otherwise, return (None,None)
|
||||
|
||||
:param concept_repr:
|
||||
@@ -437,7 +501,7 @@ def unstr_concept(concept_repr):
|
||||
"""
|
||||
if not (concept_repr and
|
||||
isinstance(concept_repr, str) and
|
||||
concept_repr.startswith("c:") and
|
||||
concept_repr.startswith(prefix) and
|
||||
concept_repr.endswith(":")):
|
||||
return None, None
|
||||
|
||||
@@ -470,7 +534,7 @@ def unstr_concept(concept_repr):
|
||||
return key if key != "" else None, id if id != "" else None
|
||||
|
||||
|
||||
def encode_concept(t):
|
||||
def encode_concept(t, wrapper="C"):
|
||||
"""
|
||||
Given a tuple of concept id, concept id
|
||||
Create a valid Python identifier that can be parsed back
|
||||
@@ -480,24 +544,27 @@ def encode_concept(t):
|
||||
>>> assert encode_concept(("key", None)) == "__C__KEY_key__ID_00None00__C__"
|
||||
|
||||
:param t:
|
||||
:param wrapper:
|
||||
:return:
|
||||
"""
|
||||
|
||||
key, id_ = (t[0], t[1]) if isinstance(t, tuple) else (t.key, t.id)
|
||||
prefix = "__C"
|
||||
sanitized_key = "".join(c if c.isalnum() else "0" for c in key) if key else "00None00"
|
||||
return prefix + f"__KEY_{sanitized_key}__ID_{id_ or '00None00'}__C__"
|
||||
return f"__{wrapper}__KEY_{sanitized_key}__ID_{id_ or '00None00'}__{wrapper}__"
|
||||
|
||||
|
||||
decode_regex = re.compile(r"__KEY_(\w+)__ID_(\w+)__C__")
|
||||
concept_decode_regex = re.compile(r"__KEY_(\w+)__ID_(\w+)__C__") # it is compiled only once
|
||||
rule_decode_regex = re.compile(r"__KEY_(\w+)__ID_(\w+)__R__") # it is compiled only once
|
||||
|
||||
|
||||
def decode_concept(text):
|
||||
def decode_concept(text, wrapper="C"):
|
||||
"""
|
||||
Decode what was encoded by encode_concept_key_id
|
||||
:param text:
|
||||
:param wrapper:
|
||||
:return:
|
||||
"""
|
||||
decode_regex = concept_decode_regex if wrapper == "C" else rule_decode_regex
|
||||
m = decode_regex.search(text)
|
||||
lookup = {"00None00": None}
|
||||
if m:
|
||||
@@ -539,7 +606,114 @@ def as_bag(obj):
|
||||
if hasattr(obj, "as_bag"):
|
||||
bag = obj.as_bag()
|
||||
else:
|
||||
bag = {prop: getattr(obj, prop) for prop in dir(obj) if not prop.startswith("_")}
|
||||
bag = {} if type(obj) in PRIMITIVES_TYPES else {prop: getattr(obj, prop)
|
||||
for prop in dir(obj) if not prop.startswith("_")}
|
||||
|
||||
bag["self"] = obj
|
||||
return bag
|
||||
|
||||
|
||||
def flatten_all_children(item, get_children):
|
||||
"""
|
||||
Return a list containing the current item and all its children, recursively
|
||||
:param item:
|
||||
:param get_children: lambda to get the children
|
||||
:return:
|
||||
"""
|
||||
|
||||
def inner_get_all_children(inner_item):
|
||||
yield inner_item
|
||||
for child in get_children(inner_item):
|
||||
yield from inner_get_all_children(child)
|
||||
|
||||
return inner_get_all_children(item)
|
||||
|
||||
|
||||
def evaluate_expression(expr, bag):
|
||||
"""
|
||||
Try to evaluate expr in context of bag
|
||||
:param expr:
|
||||
:param bag:
|
||||
:return:
|
||||
"""
|
||||
|
||||
if expr is None or expr.strip() == "":
|
||||
return None
|
||||
|
||||
if expr in bag:
|
||||
return bag[expr]
|
||||
|
||||
props_definitions = expressions_cache.get(expr)
|
||||
if props_definitions is None:
|
||||
_ast = ast.parse(expr, mode="eval")
|
||||
props_definitions = []
|
||||
ast_to_props(props_definitions, _ast.body, None)
|
||||
props_definitions.reverse()
|
||||
expressions_cache.put(expr, props_definitions)
|
||||
|
||||
return evaluate_object(bag, props_definitions)
|
||||
|
||||
|
||||
def evaluate_object(bag, properties):
|
||||
"""
|
||||
Evaluate the properties of an object
|
||||
Works with evaluate_expression
|
||||
:param bag:
|
||||
:param properties: List of ast_helpers.PropDef
|
||||
:return:
|
||||
"""
|
||||
for prop in properties:
|
||||
try:
|
||||
obj = bag[prop.prop]
|
||||
except KeyError:
|
||||
try:
|
||||
obj = bag["self"][prop.prop]
|
||||
except Exception:
|
||||
raise NameError(prop.prop)
|
||||
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
if prop.index is not None:
|
||||
obj = obj[prop.index]
|
||||
|
||||
bag = as_bag(obj)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def get_text_from_tokens(tokens, custom_switcher=None, tracker=None):
|
||||
"""
|
||||
Create the source code, from the list of token
|
||||
:param tokens: list of tokens
|
||||
:param custom_switcher: to override the behaviour (the return value) of some token
|
||||
:param tracker: keep track of the original token value when custom switched
|
||||
:return:
|
||||
"""
|
||||
if tokens is None:
|
||||
return ""
|
||||
res = ""
|
||||
|
||||
if not hasattr(tokens, "__iter__"):
|
||||
tokens = [tokens]
|
||||
|
||||
switcher = {
|
||||
# TokenKind.CONCEPT: lambda t: core.utils.str_concept(t.value),
|
||||
}
|
||||
|
||||
if custom_switcher:
|
||||
switcher.update(custom_switcher)
|
||||
|
||||
for token in tokens:
|
||||
value = switcher.get(token.type, lambda t: t.str_value)(token)
|
||||
res += value
|
||||
if tracker is not None and token.type in custom_switcher:
|
||||
tracker[value] = token
|
||||
return res
|
||||
|
||||
|
||||
def dump_ast(node):
|
||||
dump = ast.dump(node)
|
||||
for to_remove in [", ctx=Load()", ", kind=None", ", type_ignores=[]"]:
|
||||
dump = dump.replace(to_remove, "")
|
||||
return dump
|
||||
|
||||
Reference in New Issue
Block a user