Implemented some enhancement request and fixed some bugs

Fixed #2 : Variables are not recognized when inside a rule token
Fixed #15 : Rule: rete attributes are lost when a new ontology is created
Fixed #14 : ReteNetwork: Format rules must not be added to Rete network
Fixed #16 : DefConcept: Variables are not recognized when they are keyword arguments
Fixed #4 : Comparison are not correctly set when comparison property is a concept
Fixed #14 : Parser: merge FunctionParser.NamesNode and ExpressionParser.NamesNode
Fixed #18 : Parser: Add SourceCodeNode test to UnrecognizedNodeParser
Fixed #20 : At startup Number concept is saved in db a numerous number of time
Fixed #21 : CacheManager: I can remove all elements from a ListIfNeededCache and fill it again
Fixed #22 : CacheManager: I can remove all elements from a SetCache and fill it again
Fixed #23 : HistoryManager: history() no longer works
Fixed #24 : HistoryManager: history() no longer works after creating an exec rule
Fixed #25 : SheerkaMemory: Use MemoryObject instead of sheerka.local
Fixed #26 : Debugger: add the list all available services..
Fixed #27 : CONCEPTS_GRAMMARS_ENTRY does not seems to be in use any more
Fixed #28 : Give order to services
This commit is contained in:
2021-02-12 15:15:31 +01:00
parent 3a12ea58df
commit cac2dad17f
62 changed files with 1182 additions and 480 deletions
+2 -2
View File
@@ -15,7 +15,7 @@ class ListIfNeededCache(BaseCache):
if isinstance(self._cache[key], list):
self._cache[key].append(value)
else:
self._cache[key] = [self._cache[key], value]
self._cache[key] = value if self._cache[key] is Removed else [self._cache[key], value]
else:
self._sync(key)
@@ -28,7 +28,7 @@ class ListIfNeededCache(BaseCache):
if isinstance(self._cache[key], list):
self._cache[key].append(value)
else:
self._cache[key] = [self._cache[key], value]
self._cache[key] = value if self._cache[key] is Removed else [self._cache[key], value]
else:
self._cache[key] = value
self._add_to_add(key)
+9 -3
View File
@@ -19,9 +19,12 @@ class SetCache(BaseCache):
def _put(self, key, value, alt_sdp):
if key in self._cache:
if value in self._cache[key]:
if self._cache[key] is Removed:
self._cache[key] = {value}
elif value in self._cache[key]:
return False
self._cache[key].add(value)
else:
self._cache[key].add(value)
else:
self._sync(key)
@@ -31,7 +34,10 @@ class SetCache(BaseCache):
self._cache[key] = sheerka_deepcopy(previous)
if key in self._cache:
self._cache[key].add(value)
if self._cache[key] == Removed:
self._cache[key] = {value}
else:
self._cache[key].add(value)
else:
self._cache[key] = {value}
+5 -1
View File
@@ -88,7 +88,11 @@ class UnreferencedVariablesVisitor(UnreferencedNamesVisitor):
"""
def visit_Call(self, node: ast.Call):
self.visit_selected(node, ["args"])
self.visit_selected(node, ["args", "keywords"])
def visit_keyword(self, node: ast.keyword):
self.names.add(node.arg)
self.visit_selected(node, ["value"])
class NamesWithAttributesVisitor(ast.NodeVisitor):
+3 -113
View File
@@ -7,13 +7,10 @@ from core.builtin_concepts import BuiltinConcepts
from core.concept import Concept, ConceptParts, DEFINITION_TYPE_BNF, concept_part_value
from core.global_symbols import NotInit, NotFound
from core.rule import Rule
from core.sheerka.services.SheerkaExecute import SheerkaExecute
from core.tokenizer import Keywords
from core.utils import as_bag
from parsers.BaseNodeParser import SourceCodeNode, ConceptNode, UnrecognizedTokensNode, SourceCodeWithConceptNode, \
RuleNode
from parsers.BaseParser import ParsingError
from parsers.PythonParser import PythonParser
PARSE_STEPS = [BuiltinConcepts.BEFORE_PARSING, BuiltinConcepts.PARSING, BuiltinConcepts.AFTER_PARSING]
EVAL_STEPS = PARSE_STEPS + [BuiltinConcepts.BEFORE_EVALUATION, BuiltinConcepts.EVALUATION,
@@ -287,109 +284,6 @@ def only_parsers_results(context, return_values):
parents=return_values)
def parse_unrecognized(context, source, parsers, who=None, prop=None, filter_func=None):
"""
Try to recognize concepts or code from source using the given parsers
:param context:
:param source: ParserInput if possible
:param parsers:
:param who: who is asking the parsing ?
:param prop: Extra info, when parsing a property
:param filter_func: Once the result are found, call this function to filter them
:return:
"""
sheerka = context.sheerka
if prop:
action_context = {"prop": prop, "source": source}
desc = f"Parsing attribute '{prop}'"
else:
action_context = source
desc = f"Parsing '{source}'"
with context.push(BuiltinConcepts.PARSING, action_context, who=who, desc=desc) as sub_context:
# disable all parsers but the requested ones
if parsers != "all":
sub_context.preprocess_parsers = parsers
# sub_context.add_preprocess(BaseParser.PREFIX + "*", enabled=False)
# for parser in parsers:
# sub_context.add_preprocess(BaseParser.PREFIX + parser, enabled=True)
if prop in (Keywords.WHERE, Keywords.PRE, ConceptParts.WHERE, ConceptParts.PRE, Keywords.WHEN):
sub_context.protected_hints.add(BuiltinConcepts.EVAL_QUESTION_REQUESTED)
sub_context.add_inputs(source=source)
to_parse = sheerka.ret(context.who,
True,
sheerka.new(BuiltinConcepts.USER_INPUT, body=source))
res = sheerka.execute(sub_context, to_parse, PARSE_STEPS)
if filter_func:
res = filter_func(sub_context, res)
sub_context.add_values(return_values=res)
return res
def parse_function(context, source, tokens=None, start=0):
"""
Helper function that parses what is supposed to be a function
:param context:
:param source:
:param tokens:
:param start: start index for the source code node
:return:
"""
sheerka = context.sheerka
from parsers.FunctionParser import FunctionParser
parser = FunctionParser()
desc = f"Parsing function '{source}'"
with context.push(BuiltinConcepts.PARSE_CODE, source, desc=desc) as sub_context:
sheerka_execution = sheerka.services[SheerkaExecute.NAME]
res = parser.parse(sub_context, sheerka_execution.get_parser_input(source, tokens))
if not isinstance(res, list):
res = [res]
for r in [r for r in res if sheerka.isinstance(r.body, BuiltinConcepts.PARSER_RESULT)]:
r.body.body.start += start
r.body.body.end += start
if isinstance(r.body.body, SourceCodeWithConceptNode):
for n in [r.body.body.first, r.body.body.last] + r.body.body.nodes:
n.start += start
n.end += start
return res
def parse_python(context, source, desc=None):
"""
Helper function that parses what is known to be Python source code
:param context:
:param source:
:param desc: option description when creating the sub context
"""
desc = desc or f"Compiling python '{source}'"
with context.push(BuiltinConcepts.PARSE_CODE,
{"language": "Python", "source": source},
desc) as sub_context:
parser_input = context.sheerka.services[SheerkaExecute.NAME].get_parser_input(source)
python_parser = PythonParser()
return python_parser.parse(sub_context, parser_input)
def parse_expression(context, source, desc=None):
"""
Helper function to parser expressions with AND, OR and NOT
"""
desc = desc or f"Parsing expression '{source}'"
with context.push(BuiltinConcepts.PARSE_CODE, source, desc) as sub_context:
parser_input = context.sheerka.services[SheerkaExecute.NAME].get_parser_input(source)
from parsers.ExpressionParser import ExpressionParser
expr_parser = ExpressionParser()
return expr_parser.parse(sub_context, parser_input)
def evaluate(context,
source,
evaluators="all",
@@ -588,7 +482,7 @@ def get_lexer_nodes_from_unrecognized(context, unrecognized_tokens_node, parsers
:return:
"""
res = parse_unrecognized(context, unrecognized_tokens_node.source, parsers)
res = context.sheerka.parse_unrecognized(context, unrecognized_tokens_node.source, parsers)
res = only_parsers_results(context, res)
if not res.status:
@@ -633,7 +527,7 @@ def update_compiled(context, concept, errors, parsers=None):
errors.append(sheerka.new(BuiltinConcepts.ERROR, body=f"Cannot parse '{v.source}'"))
elif isinstance(v, UnrecognizedTokensNode):
res = parse_unrecognized(context, v.source, parsers)
res = context.sheerka.parse_unrecognized(context, v.source, parsers)
res = only_successful(context, res) # only key successful parsers
if res.status:
c.get_compiled()[k] = res.body.body
@@ -830,11 +724,7 @@ def is_a_question(context, concept):
if pre in (None, NotInit, ""):
return False
parser_input_service = context.sheerka.services[SheerkaExecute.NAME]
from parsers.ExpressionParser import ExpressionParser
parser = ExpressionParser()
res = parser.parse(context, parser_input_service.get_parser_input(pre))
res = context.sheerka.parse_expression(context, pre)
if not res.status:
return False
+2
View File
@@ -17,6 +17,8 @@ EVENT_ONTOLOGY_DELETED = "evt_o_d"
RULE_COMPARISON_CONTEXT = "Rule"
CONCEPT_COMPARISON_CONTEXT = "Sya"
NO_MATCH = "** No Match **"
class CustomType:
+4
View File
@@ -83,6 +83,10 @@ class Rule:
copy.metadata.id_is_unresolved = self.metadata.id_is_unresolved
# copy.error_sink = self.error_sink # Uncomment this line if necessary
copy.rete_net = self.rete_net
copy.rete_p_nodes = self.rete_p_nodes
copy.rete_disjunctions = self.rete_disjunctions
return copy
def __copy__(self):
+1 -2
View File
@@ -4,8 +4,7 @@ import time
from core.builtin_concepts import BuiltinConcepts, ParserResultConcept
from core.concept import Concept, get_concept_attrs
from core.global_symbols import EVENT_CONTEXT_DISPOSED
from core.sheerka.services.SheerkaExecute import NO_MATCH
from core.global_symbols import EVENT_CONTEXT_DISPOSED, NO_MATCH
from core.sheerka.services.SheerkaMemory import SheerkaMemory
from core.utils import CONSOLE_COLORS_MAP as CCM, CONSOLE_COLUMNS
from sdp.sheerkaDataProvider import Event
-22
View File
@@ -6,7 +6,6 @@ from operator import attrgetter
import core.builtin_helpers
import core.utils
from cache.Cache import Cache
from cache.DictionaryCache import DictionaryCache
from cache.IncCache import IncCache
from core.builtin_concepts import ErrorConcept, ReturnValueConcept, UnknownConcept
from core.builtin_concepts_ids import BuiltinErrors, BuiltinConcepts
@@ -66,9 +65,6 @@ class Sheerka(Concept):
CONCEPTS_BY_ID_ENTRY = "ConceptManager:Concepts_By_ID"
CONCEPTS_BY_NAME_ENTRY = "ConceptManager:Concepts_By_Name"
CONCEPTS_SYA_DEFINITION_ENTRY = "Concepts_Sya_Definitions"
RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY = "Resolved_Concepts_Sya_Definitions"
CONCEPTS_GRAMMARS_ENTRY = "Concepts_Grammars"
CHICKEN_AND_EGG_CONCEPTS_ENTRY = "Chicken_And_Egg_Concepts"
OBJECTS_IDS_ENTRY = "Objects_Ids"
@@ -119,7 +115,6 @@ class Sheerka(Concept):
"test_error": SheerkaMethod(self.test_error, False),
}
self.locals = {}
self.concepts_ids = None
def __copy__(self):
@@ -128,13 +123,6 @@ class Sheerka(Concept):
def __deepcopy__(self, memodict={}):
return self
@property
def concepts_grammars(self):
"""
Quick access to BNF grammars
"""
return self.om.current_cache_manager().caches[self.CONCEPTS_GRAMMARS_ENTRY].cache
@property
def chicken_and_eggs(self):
return self.om.current_cache_manager().caches[self.CHICKEN_AND_EGG_CONCEPTS_ENTRY].cache
@@ -237,16 +225,6 @@ class Sheerka(Concept):
cache = IncCache().auto_configure(self.OBJECTS_IDS_ENTRY)
self.om.register_cache(self.OBJECTS_IDS_ENTRY, cache)
cache = DictionaryCache().auto_configure(self.CONCEPTS_SYA_DEFINITION_ENTRY)
self.om.register_cache(self.CONCEPTS_SYA_DEFINITION_ENTRY, cache)
self.om.get(self.CONCEPTS_SYA_DEFINITION_ENTRY, None) # to init from sdp
cache = DictionaryCache().auto_configure(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY)
self.om.register_cache(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY, cache, persist=False)
cache = Cache().auto_configure(self.CONCEPTS_GRAMMARS_ENTRY)
self.om.register_cache(self.CONCEPTS_GRAMMARS_ENTRY, cache, persist=False)
cache = Cache().auto_configure(self.CHICKEN_AND_EGG_CONCEPTS_ENTRY)
self.om.register_cache(self.CHICKEN_AND_EGG_CONCEPTS_ENTRY, cache, persist=False)
@@ -5,7 +5,6 @@ from cache.SetCache import SetCache
from core.concept import copy_concepts_attrs, load_concepts_attrs
from core.global_symbols import NotFound, Removed, EVENT_CONCEPT_CREATED, EVENT_CONCEPT_DELETED, EVENT_RULE_CREATED, \
EVENT_RULE_DELETED, EVENT_CONCEPT_ID_DELETED, EVENT_RULE_ID_DELETED
from core.utils import sheerka_deepcopy
from sdp.sheerkaDataProvider import SheerkaDataProvider
@@ -83,7 +82,6 @@ class Ontology:
self.cache_manager = cache_manager
self.alt_sdp = alt_sdp
self.concepts_attributes = None
self.local_variables = None
def __repr__(self):
return f"Ontology('{self.name}')"
@@ -253,13 +251,10 @@ class SheerkaOntologyManager:
"""
# TODO persist these information ?
self.current_ontology().concepts_attributes = copy_concepts_attrs()
self.current_ontology().local_variables = sheerka_deepcopy(self.sheerka.locals)
def reset_sheerka_state(self):
if self.current_ontology().concepts_attributes is not None:
load_concepts_attrs(self.current_ontology().concepts_attributes)
if self.current_ontology().local_variables is not None:
self.sheerka.locals = self.current_ontology().local_variables
def current_cache_manager(self) -> CacheManager:
return self.ontologies[0].cache_manager
@@ -5,6 +5,7 @@ from os import path
from core.builtin_concepts_ids import BuiltinConcepts, BuiltinContainers
from core.builtin_helpers import ensure_concept_or_rule
from core.concept import Concept
from core.sheerka.services.SheerkaHistoryManager import SheerkaHistoryManager
from core.sheerka.services.SheerkaMemory import SheerkaMemory
from core.sheerka.services.sheerka_service import BaseService
@@ -33,6 +34,7 @@ class SheerkaAdmin(BaseService):
self.sheerka.bind_service_method(self.admin_pop_ontology, True, as_name="pop_ontology")
self.sheerka.bind_service_method(self.ontologies, False)
self.sheerka.bind_service_method(self.in_memory, False)
self.sheerka.bind_service_method(self.admin_history, False, as_name="history")
def caches_names(self):
"""
@@ -214,3 +216,7 @@ class SheerkaAdmin(BaseService):
res[k] = obj.obj
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.TO_DICT, body=res))
def admin_history(self, depth=10, start=0):
history = self.sheerka.services[SheerkaHistoryManager.NAME].history(depth, start)
return self.sheerka.new(BuiltinConcepts.TO_LIST, body=history)
@@ -98,6 +98,8 @@ class SheerkaConceptManager(BaseService):
CONCEPTS_BY_FIRST_KEYWORD_ENTRY = "ConceptManager:Concepts_By_First_Keyword"
RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY = "ConceptManager:Resolved_Concepts_By_First_Keyword"
CONCEPTS_BNF_DEFINITIONS_ENTRY = "ConceptManager:Concepts_BNF_Definitions"
def __init__(self, sheerka):
super().__init__(sheerka, order=11)
self.forbidden_meta = {"is_builtin", "key", "id", "props", "variables"}
@@ -117,6 +119,8 @@ class SheerkaConceptManager(BaseService):
self.sheerka.bind_service_method(self.get_by_id, False, visible=False)
self.sheerka.bind_service_method(self.is_not_a_variable, False, visible=False)
self.sheerka.bind_service_method(self.get_concepts_by_first_token, False, visible=False)
self.sheerka.bind_service_method(self.get_concepts_bnf_definitions, False, visible=False)
self.sheerka.bind_service_method(self.clear_bnf_definition, True, visible=False)
register_concept_cache = self.sheerka.om.register_concept_cache
@@ -141,6 +145,9 @@ class SheerkaConceptManager(BaseService):
cache = DictionaryCache().auto_configure(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY)
self.sheerka.om.register_cache(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache, persist=False)
cache = Cache().auto_configure(self.CONCEPTS_BNF_DEFINITIONS_ENTRY)
self.sheerka.om.register_cache(self.CONCEPTS_BNF_DEFINITIONS_ENTRY, cache, persist=False)
def initialize_deferred(self, context, is_first_time):
if is_first_time:
self.sheerka.om.put(self.sheerka.OBJECTS_IDS_ENTRY, self.USER_CONCEPTS_IDS, 1000)
@@ -244,7 +251,7 @@ class SheerkaConceptManager(BaseService):
# TODO : this line seems to be useless
# The grammar is never reset
if concept.get_bnf() and init_bnf_ret_value is not None and init_bnf_ret_value.status:
sheerka.cache_manager.clear(sheerka.CONCEPTS_GRAMMARS_ENTRY)
sheerka.cache_manager.clear(self.CONCEPTS_BNF_DEFINITIONS_ENTRY)
# publish the new concept
sheerka.publish(context, EVENT_CONCEPT_CREATED, concept)
@@ -509,7 +516,7 @@ class SheerkaConceptManager(BaseService):
for concept_id in refs:
# remove the grammar entry so that it can be recreated
self.sheerka.om.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_id)
self.sheerka.om.delete(self.CONCEPTS_BNF_DEFINITIONS_ENTRY, concept_id)
# reset the bnf definition if needed
if modified_concept:
@@ -559,6 +566,12 @@ class SheerkaConceptManager(BaseService):
"""
return self.sheerka.om.get(self.sheerka.CONCEPTS_BY_NAME_ENTRY, name) is NotFound
def clear_bnf_definition(self, concept_id=None):
if concept_id:
self.sheerka.om.delete(self.CONCEPTS_BNF_DEFINITIONS_ENTRY, concept_id)
else:
self.sheerka.om.clear(self.CONCEPTS_BNF_DEFINITIONS_ENTRY)
@staticmethod
def _name_has_changed(to_add):
if to_add is None or "meta" not in to_add:
@@ -839,3 +852,6 @@ class SheerkaConceptManager(BaseService):
return core.utils.make_unique(result + custom_concepts,
lambda c: c.concept.id if hasattr(c, "concept") else c.id)
def get_concepts_bnf_definitions(self):
return self.sheerka.om.current_cache_manager().caches[self.CONCEPTS_BNF_DEFINITIONS_ENTRY].cache
@@ -280,6 +280,9 @@ class DebugItem:
class SheerkaDebugManager(BaseService):
NAME = "Debug"
PREFIX = "debug."
VARS_DEBUG_TYPE = "vars"
RULES_DEBUG_TYPE = "rules"
CONCEPTS_DEBUG_TYPE = "concepts"
children_activation_regex = re.compile(r"(\d+)\+")
@@ -292,6 +295,10 @@ class SheerkaDebugManager(BaseService):
self.debug_vars_settings = []
self.debug_rules_settings = []
self.debug_concepts_settings = []
self.registered_vars = [] # list of all variables that can be debugged
self.registered_rules = [] # list of all rules that can be debugged
self.registered_concepts = [] # list of all concept that can be debugged
self.state_vars = [
"activated",
"explicit", # to remove ?
@@ -302,16 +309,43 @@ class SheerkaDebugManager(BaseService):
"debug_concepts_settings"
]
self.item_name = {
self.VARS_DEBUG_TYPE: "variable",
self.RULES_DEBUG_TYPE: "rule",
self.CONCEPTS_DEBUG_TYPE: "concept",
}
def initialize(self):
self.sheerka.bind_service_method(self.set_debug, True)
self.sheerka.bind_service_method(self.inspect, False)
self.sheerka.bind_service_method(self.get_debugger, False)
self.sheerka.bind_service_method(self.reset_debug, False)
self.sheerka.bind_service_method(self.debug_var, True)
self.sheerka.bind_service_method(self.debug_rule, True)
self.sheerka.bind_service_method(self.debug_concept, True)
self.sheerka.bind_service_method(self.set_debug_var, True)
self.sheerka.bind_service_method(self.set_debug_rule, True)
self.sheerka.bind_service_method(self.set_debug_concept, True)
self.sheerka.bind_service_method(self.list_debug_vars, True)
self.sheerka.bind_service_method(self.list_debug_rules, True)
self.sheerka.bind_service_method(self.list_debug_concepts, True)
self.sheerka.bind_service_method(self.register_debug_vars, True, visible=False)
self.sheerka.bind_service_method(self.register_debug_rules, True, visible=False)
self.sheerka.bind_service_method(self.register_debug_concepts, True, visible=False)
# self.sheerka.bind_service_method(self.get_debug_settings, False, as_name="debug_settings")
# register what can be registered
from parsers.BnfNodeParser import BnfNodeParser
from evaluators.DefConceptEvaluator import DefConceptEvaluator
from evaluators.PythonEvaluator import PythonEvaluator
from parsers.SyaNodeParser import SyaNodeParser
self.register_debug_vars(BnfNodeParser.NAME, "parse", "result")
self.register_debug_concepts(BnfNodeParser.NAME, "parse", "*")
self.register_debug_vars(DefConceptEvaluator.NAME, "matches", "*")
self.register_debug_vars(DefConceptEvaluator.NAME, "eval", "*")
self.register_debug_vars(DefConceptEvaluator.NAME, "get_variables", "names")
self.register_debug_vars(PythonEvaluator.NAME, "eval", "globals")
self.register_debug_vars(PythonEvaluator.NAME, "eval", "ret")
self.register_debug_vars("Exceptions", PythonEvaluator.NAME+"-eval", "exception")
self.register_debug_vars("Exceptions", PythonEvaluator.NAME+"-eval", "trace")
self.register_debug_vars(SyaNodeParser.NAME, "parse", "*")
def initialize_deferred(self, context, is_first_time):
self.restore_state()
@@ -334,6 +368,124 @@ class SheerkaDebugManager(BaseService):
def restore_state(self):
self.restore_values(*self.state_vars)
def register_debug(self, item_type, service, method, item):
"""
Register a debug item, in order to east their discovery
:param item_type:
:param service:
:param method:
:param item:
:return:
"""
if item_type == self.VARS_DEBUG_TYPE:
self.registered_vars.append((service, method, item))
elif item_type == self.RULES_DEBUG_TYPE:
self.registered_rules.append((service, method, item))
elif item_type == self.CONCEPTS_DEBUG_TYPE:
self.registered_concepts.append((service, method, item))
else:
raise NotImplementedError()
def register_debug_vars(self, service, method, item):
return self.register_debug(self.VARS_DEBUG_TYPE, service, method, item)
def register_debug_rules(self, service, method, item):
return self.register_debug(self.RULES_DEBUG_TYPE, service, method, item)
def register_debug_concepts(self, service, method, item):
return self.register_debug(self.CONCEPTS_DEBUG_TYPE, service, method, item)
def filter_registered_debug(self, item_type, pattern=None, **kwargs):
"""
Filter the list of the debug item
:param pattern:
:param item_type:
:return:
"""
item_name = self.item_name[item_type]
service, method_name, item = "*", "*", "*"
if pattern:
tokens = pattern.split(".")
service = tokens[0]
method_name = "*"
item = "*"
if len(tokens) > 1:
method_name = tokens[1]
if len(tokens) > 2:
item = tokens[2]
# override with kwargs
service = kwargs.get("service", service)
method_name = kwargs.get("method", method_name)
item = kwargs.get(item_name, item)
if item_type == self.VARS_DEBUG_TYPE:
lst = self.registered_vars
elif item_type == self.RULES_DEBUG_TYPE:
lst = self.registered_rules
elif item_type == self.CONCEPTS_DEBUG_TYPE:
lst = self.registered_concepts
else:
raise NotImplementedError()
for registered in lst:
if service != "*" and service != registered[0]:
continue
if method_name != "*" and method_name != registered[1]:
continue
if item != "*" and item != registered[2]:
continue
yield registered
def list_registered(self, item_type, pattern=None, **kwargs):
"""
Return a formatted list of available registered debug items
:param item_type:
:param pattern:
:param kwargs:
:return:
"""
pattern = pattern.strip() if pattern is not None else None
lst = self.filter_registered_debug(item_type, pattern, **kwargs)
# for (service, method, item_name)
# level == 1 -> print 'service'
# level == 2 -> print 'service.method'
# level == 2 -> print 'service.method.item_name'
if pattern:
if not pattern.endswith(".*"):
pattern += ".*"
tokens = pattern.split(".")
level = len(tokens)
else:
level = 1
if "service" in kwargs.keys():
level = 2
elif {"method", "variable", "rule", "concept"}.intersection(set(kwargs.keys())):
level = 3
res = set()
for filtered in lst:
if level == 1:
res.add(filtered[0])
elif level == 2:
res.add(f"{filtered[0]}.{filtered[1]}")
else:
res.add(f"{filtered[0]}.{filtered[1]}.{filtered[2]}")
return self.sheerka.new(BuiltinConcepts.TO_LIST, body=sorted(res))
def list_debug_vars(self, pattern=None, **kwargs):
return self.list_registered(self.VARS_DEBUG_TYPE, pattern, **kwargs)
def list_debug_rules(self, pattern=None, **kwargs):
return self.list_registered(self.RULES_DEBUG_TYPE, pattern, **kwargs)
def list_debug_concepts(self, pattern=None, **kwargs):
return self.list_registered(self.CONCEPTS_DEBUG_TYPE, pattern, **kwargs)
def set_debug(self, context, value=True):
self.activated = value
self.sheerka.record_var(context, self.NAME, "activated", self.activated)
@@ -491,7 +643,7 @@ class SheerkaDebugManager(BaseService):
return self.sheerka.ret(SheerkaDebugManager.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
def debug_var(self, context, *args, **kwargs):
def set_debug_var(self, context, *args, **kwargs):
"""
Adds debug item for variables
debug_var(<service>.<method>.<var>, <context_id>[+], <debug_id>)
@@ -504,7 +656,7 @@ class SheerkaDebugManager(BaseService):
i, s, m, c_id, c_children, d, e = self.parse_debug_args("variable", *args, **kwargs)
return self.add_or_update_debug_item(context, "vars", i, s, m, c_id, c_children, d, False, e)
def debug_rule(self, context, *args, **kwargs):
def set_debug_rule(self, context, *args, **kwargs):
"""
Adds debug item for rules
debug_var(<service>.<method>.<rule>, <context_id>[+], <debug_id>)
@@ -518,7 +670,7 @@ class SheerkaDebugManager(BaseService):
i, s, m, c_id, c_children, d, e = self.parse_debug_args("rule", *args, **kwargs)
return self.add_or_update_debug_item(context, "rules", i, s, m, c_id, c_children, d, False, e)
def debug_concept(self, context, *args, **kwargs):
def set_debug_concept(self, context, *args, **kwargs):
"""
Adds debug item for concepts
debug_var(<service>.<method>.<concept>, <context_id>[+], <debug_id>)
@@ -546,7 +698,7 @@ class SheerkaDebugManager(BaseService):
Print
:param context:
:param args: 1st parameter is what to display, the other are the properties to display
:param kwargs: how to display the result
:param kwargs: how to display the result (as_bag=True, values=True)
:return:
"""
@@ -1,7 +1,7 @@
from dataclasses import dataclass
from core.builtin_concepts import BuiltinConcepts
from core.builtin_helpers import expect_one, only_successful, parse_unrecognized, evaluate, ensure_concept
from core.builtin_helpers import expect_one, only_successful, evaluate, ensure_concept
from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, AllConceptParts, \
concept_part_value
from core.global_symbols import NotInit
@@ -282,11 +282,11 @@ class SheerkaEvaluateConcept(BaseService):
:return:
"""
while True:
return_value = parse_unrecognized(current_context,
s,
parsers="all",
prop=p,
filter_func=only_successful)
return_value = current_context.sheerka.parse_unrecognized(current_context,
s,
parsers="all",
prop=p,
filter_func=only_successful)
if not return_value.status:
if current_context.preprocess:
@@ -26,6 +26,11 @@ class SheerkaEvaluateRules(BaseService):
self.sheerka.subscribe(EVENT_RULE_DELETED, self.on_rule_deleted)
self.sheerka.subscribe(EVENT_RULE_ID_DELETED, self.on_rule_deleted)
self.sheerka.register_debug_vars(self.NAME, "evaluate_rules", "results")
self.sheerka.register_debug_rules(self.NAME, "evaluate_rule", "*")
def reset_evaluators(self):
# instantiate evaluators, once for all, only keep when it's enabled
evaluators = [e_class() for e_class in self.sheerka.evaluators]
+150 -34
View File
@@ -1,23 +1,20 @@
import core.utils
from cache.FastCache import FastCache
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
from core.global_symbols import NotFound
from core.concept import ConceptParts
from core.global_symbols import NotFound, NO_MATCH
from core.sheerka.services.sheerka_service import BaseService
from core.tokenizer import Tokenizer, TokenKind, Token
from core.tokenizer import Tokenizer, TokenKind, Token, Keywords
NO_MATCH = "** No Match **"
EVALUATOR_STEPS = [
BuiltinConcepts.BEFORE_PARSING,
BuiltinConcepts.AFTER_PARSING,
BuiltinConcepts.BEFORE_EVALUATION,
BuiltinConcepts.EVALUATION,
BuiltinConcepts.AFTER_EVALUATION,
BuiltinConcepts.BEFORE_RENDERING,
BuiltinConcepts.RENDERING,
BuiltinConcepts.AFTER_RENDERING,
BuiltinConcepts.BEFORE_RULES_EVALUATION,
BuiltinConcepts.AFTER_RULES_EVALUATION,
]
PARSE_STEPS = [BuiltinConcepts.BEFORE_PARSING, BuiltinConcepts.PARSING, BuiltinConcepts.AFTER_PARSING]
PARSE_AND_EVAL_STEPS = PARSE_STEPS + [BuiltinConcepts.BEFORE_EVALUATION,
BuiltinConcepts.EVALUATION,
BuiltinConcepts.AFTER_EVALUATION]
ALL_STEPS = PARSE_AND_EVAL_STEPS + [BuiltinConcepts.BEFORE_RENDERING,
BuiltinConcepts.RENDERING,
BuiltinConcepts.AFTER_RENDERING,
BuiltinConcepts.BEFORE_RULES_EVALUATION,
BuiltinConcepts.AFTER_RULES_EVALUATION]
class ParserInput:
@@ -173,14 +170,15 @@ class SheerkaExecute(BaseService):
def __init__(self, sheerka):
# order must be after SheerkaEvaluateRules because of self.rules_evaluation_service
super().__init__(sheerka, order=5)
# order must be after ConceptManager because it needs concept bnf definitions
super().__init__(sheerka, order=15)
self.pi_cache = FastCache(default=lambda key: ParserInput(key), max_size=20)
self.instantiated_evaluators = None
self.evaluators_by_name = None
self.instantiated_parsers = None
self.parsers_by_name = None
self.old_values = []
self.preprocessed_items_old_values = []
# cache for all preregistered evaluator combination
# the key is the concatenation of the step and the name of evaluators in the group
@@ -202,6 +200,10 @@ class SheerkaExecute(BaseService):
def initialize(self):
self.sheerka.bind_service_method(self.execute, True, visible=False)
self.sheerka.bind_service_method(self.execute_rules, True, visible=False)
self.sheerka.bind_service_method(self.parse_unrecognized, False, visible=False)
self.sheerka.bind_service_method(self.parse_function, False, visible=False)
self.sheerka.bind_service_method(self.parse_python, False, visible=False)
self.sheerka.bind_service_method(self.parse_expression, False, visible=False)
self.reset_registered_evaluators()
self.reset_registered_parsers()
@@ -219,7 +221,7 @@ class SheerkaExecute(BaseService):
self.evaluators_by_name = {e.short_name: e for e in self.instantiated_evaluators}
# get default evaluators by process step
for process_step in EVALUATOR_STEPS:
for process_step in ALL_STEPS:
self.grouped_evaluators_cache[f"{process_step}|__default"] = self.get_grouped(
[e for e in self.instantiated_evaluators if process_step in e.steps])
@@ -270,7 +272,7 @@ class SheerkaExecute(BaseService):
if var_name == "preprocess_name":
continue
if hasattr(item, var_name):
self.old_values.append((item, var_name, getattr(item, var_name)))
self.preprocessed_items_old_values.append((item, var_name, getattr(item, var_name)))
setattr(item, var_name, value)
def get_evaluators(self, context, process_step):
@@ -319,29 +321,23 @@ class SheerkaExecute(BaseService):
for priority, parsers_classes in from_cache[0].items()}
return grouped_instances, from_cache[1]
# Normal case, use all registered parsers
if not context.preprocess_parsers and not context.preprocess:
return get_instances(self.grouped_parsers_cache["__default"])
# Other case, only use a subset of parsers
# This case is heavily used by lexer node parsers, thru parse_unrecognized
if context.preprocess_parsers and not context.preprocess:
key = "|".join(context.preprocess_parsers)
key = self.get_parsers_key(context)
if key:
try:
return get_instances(self.grouped_parsers_cache[key])
return key, *get_instances(self.grouped_parsers_cache[key])
except KeyError:
parsers = [self.parsers_by_name[p] for p in context.preprocess_parsers if p in self.parsers_by_name]
self.grouped_parsers_cache[key] = self.get_grouped(parsers, use_classes=True)
return get_instances(self.grouped_parsers_cache[key])
return key, *get_instances(self.grouped_parsers_cache[key])
# final case, parsers attributes are modified by the context
# else, case where parsers attributes are modified by the context
# This a the case when we want to disable a specific parser, or change the order of priority
parsers = [self.parsers_by_name[p] for p in context.preprocess_parsers if p in self.parsers_by_name] \
if context.preprocess_parsers else self.instantiated_parsers
self.preprocess(parsers, context.preprocess)
parsers = [p for p in parsers if p.enabled] # only keep those that are still enabled
groups, sorted_priorities = self.get_grouped(parsers, use_classes=True)
return get_instances((groups, sorted_priorities))
return key, *get_instances((groups, sorted_priorities))
def get_parser_input(self, text, tokens=None):
"""
@@ -366,6 +362,22 @@ class SheerkaExecute(BaseService):
self.pi_cache.put(key, pi)
return pi
@staticmethod
def get_parsers_key(context):
"""
From the context.preprocess_parsers and context.preprocess,
try to find a key to store the further results of the parsings
:param context:
:return:
"""
if not context.preprocess_parsers and not context.preprocess:
return "__default"
if context.preprocess_parsers and not context.preprocess:
return "|".join(context.preprocess_parsers)
return None
def call_parsers(self, context, return_values):
"""
Call all the parsers, ordered by priority
@@ -398,7 +410,7 @@ class SheerkaExecute(BaseService):
# keep track of the originals user inputs, as they need to be removed at the end
user_inputs = to_process[:]
grouped_parsers, sorted_priorities = self.get_parsers(context)
parsers_key, grouped_parsers, sorted_priorities = self.get_parsers(context)
stop_processing = False
for priority in sorted_priorities:
@@ -644,10 +656,10 @@ class SheerkaExecute(BaseService):
return return_values
def undo_preprocess(self):
for item, var_name, value in self.old_values:
for item, var_name, value in self.preprocessed_items_old_values:
setattr(item, var_name, value)
self.old_values.clear()
self.preprocessed_items_old_values.clear()
@staticmethod
def matches(parser_or_evaluator_name, preprocessor_name):
@@ -655,3 +667,107 @@ class SheerkaExecute(BaseService):
return parser_or_evaluator_name.startswith(preprocessor_name[:-1])
else:
return parser_or_evaluator_name == preprocessor_name
def parse_unrecognized(self, context, source, parsers, who=None, prop=None, filter_func=None):
"""
Try to recognize concepts or code from source using the given parsers
:param context:
:param source: ParserInput if possible
:param parsers:
:param who: who is asking the parsing ?
:param prop: Extra info, when parsing a property
:param filter_func: Once the result are found, call this function to filter them
:return:
"""
sheerka = context.sheerka
if prop:
action_context = {"prop": prop, "source": source}
desc = f"Parsing attribute '{prop}'"
else:
action_context = source
desc = f"Parsing '{source}'"
with context.push(BuiltinConcepts.PARSING, action_context, who=who, desc=desc) as sub_context:
# disable all parsers but the requested ones
if parsers != "all":
sub_context.preprocess_parsers = parsers
# sub_context.add_preprocess(BaseParser.PREFIX + "*", enabled=False)
# for parser in parsers:
# sub_context.add_preprocess(BaseParser.PREFIX + parser, enabled=True)
if prop in (Keywords.WHERE, Keywords.PRE, ConceptParts.WHERE, ConceptParts.PRE, Keywords.WHEN):
sub_context.protected_hints.add(BuiltinConcepts.EVAL_QUESTION_REQUESTED)
sub_context.add_inputs(source=source)
to_parse = sheerka.ret(context.who,
True,
sheerka.new(BuiltinConcepts.USER_INPUT, body=source))
res = sheerka.execute(sub_context, to_parse, PARSE_STEPS)
if filter_func:
res = filter_func(sub_context, res)
sub_context.add_values(return_values=res)
return res
def parse_function(self, context, source, tokens=None, start=0):
"""
Helper function that parses what is supposed to be a function
:param context:
:param source:
:param tokens:
:param start: start index for the source code node
:return:
"""
from parsers.BaseNodeParser import SourceCodeWithConceptNode
sheerka = context.sheerka
from parsers.FunctionParser import FunctionParser
parser = FunctionParser()
desc = f"Parsing function '{source}'"
with context.push(BuiltinConcepts.PARSE_CODE, source, desc=desc) as sub_context:
sheerka_execution = sheerka.services[SheerkaExecute.NAME]
res = parser.parse(sub_context, sheerka_execution.get_parser_input(source, tokens))
if not isinstance(res, list):
res = [res]
for r in [r for r in res if sheerka.isinstance(r.body, BuiltinConcepts.PARSER_RESULT)]:
r.body.body.start += start
r.body.body.end += start
if isinstance(r.body.body, SourceCodeWithConceptNode):
for n in [r.body.body.first, r.body.body.last] + r.body.body.nodes:
n.start += start
n.end += start
return res
def parse_python(self, context, source, desc=None):
"""
Helper function that parses what is known to be Python source code
:param context:
:param source:
:param desc: option description when creating the sub context
"""
from parsers.PythonParser import PythonParser
desc = desc or f"Compiling python '{source}'"
with context.push(BuiltinConcepts.PARSE_CODE,
{"language": "Python", "source": source},
desc) as sub_context:
parser_input = context.sheerka.services[SheerkaExecute.NAME].get_parser_input(source)
python_parser = PythonParser()
return python_parser.parse(sub_context, parser_input)
def parse_expression(self, context, source, desc=None):
"""
Helper function to parser expressions with AND, OR and NOT
"""
desc = desc or f"Parsing expression '{source}'"
with context.push(BuiltinConcepts.PARSE_CODE, source, desc) as sub_context:
parser_input = context.sheerka.services[SheerkaExecute.NAME].get_parser_input(source)
from parsers.ExpressionParser import ExpressionParser
expr_parser = ExpressionParser()
return expr_parser.parse(sub_context, parser_input)
@@ -35,6 +35,9 @@ class History:
return self.event == other.event and self.result == other.result
def __hash__(self):
return hash((self.event, self.result, self.status))
@property
def status(self):
if self._status:
@@ -56,9 +59,6 @@ class SheerkaHistoryManager(BaseService):
def __init__(self, sheerka):
super().__init__(sheerka)
def initialize(self):
self.sheerka.bind_service_method(self.history, False)
def history(self, depth=10, start=0):
"""
Load history
@@ -90,7 +90,7 @@ class SheerkaIsAManager(BaseService):
self.sheerka.services[SheerkaConceptManager.NAME].update_references(context, concept_set)
# remove the grammar entry so that it can be recreated
self.sheerka.om.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_set.id)
self.sheerka.clear_bnf_definition(concept_set.id)
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
+33 -1
View File
@@ -31,6 +31,7 @@ class SheerkaMemory(BaseService):
self.sheerka.bind_service_method(self.add_to_memory, True, visible=False)
self.sheerka.bind_service_method(self.add_many_to_short_term_memory, True, visible=False)
self.sheerka.bind_service_method(self.get_from_memory, False)
self.sheerka.bind_service_method(self.get_last_from_memory, False)
self.sheerka.bind_service_method(self.register_object, True, visible=False)
self.sheerka.bind_service_method(self.unregister_object, True, visible=False)
self.sheerka.bind_service_method(self.commit_registered_objects, True, visible=False)
@@ -39,7 +40,7 @@ class SheerkaMemory(BaseService):
cache = ListIfNeededCache().auto_configure(self.OBJECTS_ENTRY)
self.sheerka.om.register_cache(self.OBJECTS_ENTRY, cache, persist=True, use_ref=True)
self.sheerka.subscribe(EVENT_CONTEXT_DISPOSED, self.remove_context)
def reset(self):
@@ -94,6 +95,21 @@ class SheerkaMemory(BaseService):
:param concept:
:return:
"""
last = self.sheerka.om.get(SheerkaMemory.OBJECTS_ENTRY, key)
if last is NotFound:
self.sheerka.om.put(SheerkaMemory.OBJECTS_ENTRY, key, MemoryObject(context.event.get_digest(), concept))
return
if not isinstance(last, list) and last.obj == concept:
self.sheerka.om.delete(SheerkaMemory.OBJECTS_ENTRY, key, last)
self.sheerka.om.put(SheerkaMemory.OBJECTS_ENTRY, key, MemoryObject(context.event.get_digest(), concept))
return
if isinstance(last, list) and last[-1].obj == concept:
self.sheerka.om.delete(SheerkaMemory.OBJECTS_ENTRY, key, last[-1])
self.sheerka.om.put(SheerkaMemory.OBJECTS_ENTRY, key, MemoryObject(context.event.get_digest(), concept))
return
self.sheerka.om.put(SheerkaMemory.OBJECTS_ENTRY, key, MemoryObject(context.event.get_digest(), concept))
def get_from_memory(self, context, key):
@@ -101,6 +117,20 @@ class SheerkaMemory(BaseService):
"""
return self.sheerka.om.get(SheerkaMemory.OBJECTS_ENTRY, key)
def get_last_from_memory(self, context, key):
"""
Return an object from memory
When there are multiple items, returns the last one
"""
res = self.sheerka.om.get(SheerkaMemory.OBJECTS_ENTRY, key)
if res is NotFound:
return res
if isinstance(res, list):
return res[-1]
return res
def register_object(self, context, key, concept):
"""
Before adding memory_objects to memory, they first need to be registered
@@ -114,6 +144,8 @@ class SheerkaMemory(BaseService):
:param concept:
:return:
"""
if self.sheerka.during_initialisation:
return
self.registration[key] = concept
def unregister_object(self, context, key):
+2
View File
@@ -14,6 +14,8 @@ class SheerkaOut(BaseService):
def initialize(self):
self.sheerka.bind_service_method(self.process_return_values, False)
self.sheerka.register_debug_vars("Visitor", "create_out_tree", "Exception")
self.sheerka.register_debug_vars(SheerkaOut.NAME, "create_out_tree", "out_tree")
def create_out_tree(self, context, obj):
debugger = context.get_debugger("Visitor", "create_out_tree")
+12 -10
View File
@@ -6,8 +6,7 @@ from typing import Union, Set, List
from cache.Cache import Cache
from cache.ListIfNeededCache import ListIfNeededCache
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
from core.builtin_helpers import parse_unrecognized, is_a_question, parse_python, \
ensure_evaluated, expect_one, parse_expression
from core.builtin_helpers import is_a_question, ensure_evaluated, expect_one
from core.concept import Concept
from core.global_symbols import EVENT_RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT, NotFound, ErrorObj, \
EVENT_RULE_CREATED, EVENT_RULE_DELETED
@@ -765,7 +764,7 @@ class SheerkaRuleManager(BaseService):
parsed = []
errors = []
all_rete_disjunctions = []
parsed_expr_ret = parse_expression(context, source)
parsed_expr_ret = context.sheerka.parse_expression(context, source)
if parsed_expr_ret.status:
conjunctions = parsed_expr_ret.body.body.parts if isinstance(parsed_expr_ret.body.body, AndNode) else \
[parsed_expr_ret.body.body]
@@ -812,12 +811,12 @@ class SheerkaRuleManager(BaseService):
return self.sheerka.ret(self.NAME, True, parsed)
def compile_exec(self, context, source):
parsed = parse_unrecognized(context,
source,
parsers="all",
who=self.NAME,
prop=Keywords.THEN,
filter_func=expect_one)
parsed = context.sheerka.parse_unrecognized(context,
source,
parsers="all",
who=self.NAME,
prop=Keywords.THEN,
filter_func=expect_one)
return parsed
@@ -1014,7 +1013,10 @@ class SheerkaRuleManager(BaseService):
return RuleCompiledPredicate(source, action, ConceptEvaluator.NAME, r, c)
else:
to_parse = PythonCodeEmitter(context, "__ret.status").recognize_concept(c, "__ret.body").get_text()
return RuleCompiledPredicate(source, action, PythonEvaluator.NAME, parse_python(context, to_parse),
return RuleCompiledPredicate(source,
action,
PythonEvaluator.NAME,
context.sheerka.parse_python(context, to_parse),
None)
res = []
-6
View File
@@ -23,11 +23,5 @@ class AddToMemoryEvaluator(OneReturnValueEvaluator):
return len(context.sheerka.services[SheerkaMemory.NAME].registration) > 0
def eval(self, context, return_value):
if context.sheerka.during_initialisation:
from core.sheerka.services.SheerkaMemory import SheerkaMemory
service = context.sheerka.services[SheerkaMemory.NAME]
service.registration.clear()
return None
context.sheerka.commit_registered_objects(context)
return None # no need to have a second pass
+9 -2
View File
@@ -168,9 +168,16 @@ class DefConceptEvaluator(OneReturnValueEvaluator):
source = ret_value.value.source.as_text() if isinstance(ret_value.value.source,
ParserInput) else ret_value.value.source
tokens = ret_value.value.tokens or list(Tokenizer(source, yield_eof=False))
tokens = [t.str_value for t in tokens]
possible_vars = set()
for t in tokens:
if t.type == TokenKind.RULE:
for v in [v for v in t.value if v is not None]:
possible_vars.add(v)
else:
possible_vars.add(t.str_value)
for identifier in [i for i in concept_name if str(i).isalnum()]:
if identifier in tokens:
if identifier in possible_vars:
variables.add(identifier)
debugger.debug_var("names", variables, hint="from concept")
return variables
+11 -9
View File
@@ -11,9 +11,10 @@ from core.concept import ConceptParts, Concept
from core.global_symbols import NotInit, NotFound
from core.rule import Rule
from core.sheerka.ExecutionContext import ExecutionContext
from core.sheerka.services.SheerkaMemory import SheerkaMemory
from core.tokenizer import Token, TokenKind
from evaluators.BaseEvaluator import OneReturnValueEvaluator
from parsers.PythonParser import PythonNode, get_python_node
from parsers.PythonParser import PythonNode
TO_DISABLED = ["breakpoint", "callable", "compile", "delattr", "eval", "exec", "exit", "input", "locals", "open",
"print", "quit", "setattr"]
@@ -97,7 +98,7 @@ class PythonEvaluator(OneReturnValueEvaluator):
debugger = context.get_debugger(PythonEvaluator.NAME, "eval")
debugger.debug_entering(node=node)
exception_debugger = context.get_debugger("Exceptions", PythonEvaluator.NAME + ".eval")
exception_debugger = context.get_debugger("Exceptions", PythonEvaluator.NAME + "-eval")
get_trace_back = exception_debugger.is_enabled()
context.log(f"Evaluating python node {node}.", self.name)
@@ -140,8 +141,8 @@ class PythonEvaluator(OneReturnValueEvaluator):
context.log("Evaluating using 'exec'.", self.name)
evaluated = self.exec_with_return(node.ast_, globals_, my_locals)
# TODO find a better implementation using SheerkaMemory
sheerka.locals.update(my_locals)
for k, v in my_locals.items():
sheerka.services[SheerkaMemory.NAME].add_to_memory(context, k, v)
if not expect_success or evaluated:
break # in this first version, we stop once a success is found
@@ -231,17 +232,18 @@ class PythonEvaluator(OneReturnValueEvaluator):
my_globals[name] = Expando("sheerka", bag)
continue
# search in local variables. To remove when local variables will be merged with memory
if name in context.sheerka.locals:
my_globals[name] = context.sheerka.locals[name]
continue
# search in short term memory
if (obj := context.get_from_short_term_memory(name)) is not NotFound:
context.log(f"Resolving '{name}'. Using value found in STM.", self.name)
my_globals[name] = obj
continue
# search in memory
if (obj := context.sheerka.get_last_from_memory(context, name)) is not NotFound:
context.log(f"Resolving '{name}'. Using value found in Long Term Memory.", self.name)
my_globals[name] = obj.obj
continue
# search in sheerka methods
if (method := self.get_sheerka_method(context, name, expression_only)) is not None:
my_globals[name] = method
+13 -2
View File
@@ -231,8 +231,7 @@ class ConceptNode(LexerNode):
def clone(self):
# do we need to clone the concept as well ?
clone = ConceptNode(self.concept, self.start, self.end, self.tokens, self.source, self.underlying)
return clone
return ConceptNode(self.concept, self.start, self.end, self.tokens, self.source, self.underlying)
def as_bag(self):
"""
@@ -305,6 +304,16 @@ class SourceCodeNode(LexerNode):
def get_source_to_parse(self):
return self.python_node.source
def clone(self):
clone = SourceCodeNode(
self.start,
self.end,
self.tokens,
self.source,
self.python_node,
self.return_value)
return clone
class SourceCodeWithConceptNode(LexerNode):
"""
@@ -413,6 +422,8 @@ class SourceCodeWithConceptNode(LexerNode):
def clone(self):
clone = SourceCodeWithConceptNode(self.first, self.last, self.nodes.copy(), self.has_unrecognized)
clone.python_node = self.python_node
clone.return_value = self.return_value
return clone
def to_short_str(self):
+1 -1
View File
@@ -1189,7 +1189,7 @@ class BnfNodeParser(BaseNodeParser):
if 'sheerka' in kwargs:
sheerka = kwargs.get("sheerka")
self.concepts_grammars = sheerka.concepts_grammars
self.concepts_grammars = sheerka.get_concepts_bnf_definitions()
self.sheerka = sheerka
else:
self.concepts_grammars = Cache()
+6 -6
View File
@@ -211,12 +211,12 @@ class DefConceptParser(BaseCustomGrammarParser):
return None
source = self.sheerka.services[SheerkaExecute.NAME].get_parser_input(None, tokens[1:])
parsed = core.builtin_helpers.parse_unrecognized(self.context,
source,
parsers="all",
who=self.name,
prop=keyword,
filter_func=core.builtin_helpers.expect_one)
parsed = self.sheerka.parse_unrecognized(self.context,
source,
parsers="all",
who=self.name,
prop=keyword,
filter_func=core.builtin_helpers.expect_one)
if not parsed.status:
self.add_error(parsed.value)
+3 -4
View File
@@ -1,8 +1,7 @@
from itertools import product
from core.builtin_concepts import BuiltinConcepts
from core.builtin_helpers import only_successful, parse_unrecognized, get_inner_body, parse_python, \
get_lexer_nodes_using_positions
from core.builtin_helpers import only_successful, get_inner_body, get_lexer_nodes_using_positions
from core.sheerka.services.SheerkaExecute import ParserInput
from core.sheerka.services.sheerka_service import FailedToCompileError
from core.tokenizer import TokenKind, Tokenizer, Keywords
@@ -247,7 +246,7 @@ class ExpressionParser(BaseParser):
for conjunction in conjunctions:
# try to recognize conjunction, one by one
# negative conjunction can be a concept starting with 'not'
parsed_ret = parse_unrecognized(
parsed_ret = context.sheerka.parse_unrecognized(
context,
conjunction.get_value(), # we remove the 'NOT' part when needed to ease the recognition
parsers="all",
@@ -280,7 +279,7 @@ class ExpressionParser(BaseParser):
return_values.append(recognized_conjunctions[0])
elif len(recognized_conjunctions) == 1 and recognized_conjunctions[0].who == "parsers.Python":
# it is a negated python Node. Need to parse again
ret = parse_python(context, source=str(conjunctions[0]))
ret = context.sheerka.parse_python(context, source=str(conjunctions[0]))
if ret.status:
return_values.append(ret)
else:
+18 -54
View File
@@ -1,22 +1,20 @@
from dataclasses import dataclass
from typing import List
from core.builtin_concepts import BuiltinConcepts
from core.builtin_helpers import get_lexer_nodes_from_unrecognized, update_compiled
from core.concept import Concept
from core.sheerka.services.SheerkaExecute import ParserInput
from core.tokenizer import TokenKind, Token
from core.tokenizer import TokenKind
from core.utils import get_n_clones
from parsers.SequenceNodeParser import SequenceNodeParser
from parsers.BaseNodeParser import SourceCodeNode, SourceCodeWithConceptNode, UnrecognizedTokensNode
from parsers.BaseParser import BaseParser, UnexpectedTokenParsingError, UnexpectedEofParsingError, Node
from parsers.BnfNodeParser import BnfNodeParser
from parsers.PythonWithConceptsParser import PythonWithConceptsParser
from parsers.RuleParser import RuleParser
from parsers.SequenceNodeParser import SequenceNodeParser
from parsers.SyaNodeParser import SyaNodeParser
from parsers.expressions import NameExprNode
# No need to check for Python code as the source code node will resolve to python code anyway
# I only look for concepts, so
PARSERS = [RuleParser.NAME,
SequenceNodeParser.NAME,
BnfNodeParser.NAME,
@@ -28,49 +26,16 @@ class FunctionParserNode(Node):
pass
@dataclass()
class NamesNode(FunctionParserNode):
start: int # index of the first token
end: int # index of the last token
tokens: List[Token]
def __repr__(self):
return f"NameNode('{self.str_value()}')"
def str_value(self):
if self.tokens is None:
return None
return "".join([t.str_value for t in self.tokens])
def to_unrecognized(self):
"""
UnrecognizedTokensNode with all tokens
"""
return UnrecognizedTokensNode(self.start, self.end, self.tokens).fix_source()
def to_str_unrecognized(self):
"""
UnrecognizedTokensNode with one token, which is a string token of all the tokens
"""
token = Token(TokenKind.STRING,
"'" + self.str_value() + "'",
self.tokens[0].index,
self.tokens[0].line,
self.tokens[0].column)
return UnrecognizedTokensNode(self.start, self.end, [token]).fix_source()
@dataclass()
class FunctionParameter:
"""
class the represent result of the parameter parsing
"""
value: NamesNode # value parsed
separator: NamesNode = None # holds the value and the position of the separator
value: NameExprNode # value parsed
separator: NameExprNode = None # holds the value and the position of the separator
def add_sep(self, start, end, tokens):
self.separator = NamesNode(start, end, tokens)
self.separator = NameExprNode(start, end, tokens)
def value_to_unrecognized(self):
return UnrecognizedTokensNode(self.value.start, self.value.end, self.value.tokens).fix_source()
@@ -83,8 +48,8 @@ class FunctionParameter:
@dataclass
class FunctionNode(FunctionParserNode):
first: NamesNode # beginning of the function (it should represent the name of the function)
last: NamesNode # last part of the function (it should be the trailing parenthesis)
first: NameExprNode # beginning of the function (it should represent the name of the function)
last: NameExprNode # last part of the function (it should be the trailing parenthesis)
parameters: list
@@ -95,11 +60,11 @@ class FN(FunctionNode):
Thereby,
FN("first", "last", ["param1," ...]) can be compared to
FunctionNode(NamesNode("first"), NamesNode("second"), [FunctionParameter(NamesNodes("param1"), NamesNodes(", ")])
FunctionNode(NameExprNode("first"), NameExprNode("second"), [FunctionParameter(NamesNodes("param1"), NamesNodes(", ")])
Note that FunctionParameter can easily be defined with a single string
* "param" -> FunctionParameter(NamesNode("param"), None)
* "param, " -> FunctionParameter(NamesNode("param"), NamesNode(", "))
* "param" -> FunctionParameter(NameExprNode("param"), None)
* "param, " -> FunctionParameter(NameExprNode("param"), NameExprNode(", "))
For more complicated situations, you can use a tuple (value, sep) to define the value part and the separator part
"""
@@ -123,14 +88,13 @@ class FN(FunctionNode):
return self.first == other.first and self.last == other.last and self.parameters == other.parameters
if isinstance(other, FunctionNode):
if self.first != other.first.str_value() or self.last != other.last.str_value():
if self.first != other.first.value or self.last != other.last.value:
return False
if len(self.parameters) != len(other.parameters):
return False
for self_parameter, other_parameter in zip(self.parameters, other.parameters):
value = other_parameter.value.str_value() if isinstance(self_parameter[0],
str) else other_parameter.value
sep = other_parameter.separator.str_value() if other_parameter.separator else None
value = other_parameter.value.value if isinstance(self_parameter[0], str) else other_parameter.value
sep = other_parameter.separator.value if other_parameter.separator else None
if self_parameter[0] != value or self_parameter[1] != sep:
return False
@@ -244,7 +208,7 @@ class FunctionParser(BaseParser):
[TokenKind.LPAR]))
return None
start_node = NamesNode(start, start + 1, self.parser_input.tokens[start:start + 2])
start_node = NameExprNode(start, start + 1, self.parser_input.tokens[start:start + 2])
if not self.parser_input.next_token():
self.add_error(UnexpectedEofParsingError(f"Unexpected EOF after left parenthesis"))
return FunctionNode(start_node, None, None)
@@ -261,7 +225,7 @@ class FunctionParser(BaseParser):
return FunctionNode(start_node, None, params)
return FunctionNode(start_node,
NamesNode(self.parser_input.pos, self.parser_input.pos, [token]),
NameExprNode(self.parser_input.pos, self.parser_input.pos, [token]),
params)
def parse_parameters(self):
@@ -319,7 +283,7 @@ class FunctionParser(BaseParser):
if not self.parser_input.next_token(skip_whitespace=False):
break
return NamesNode(start_pos, self.parser_input.pos - 1, tokens) if len(tokens) else None
return NameExprNode(start_pos, self.parser_input.pos - 1, tokens) if len(tokens) else None
def to_source_code_node(self, function_node: FunctionNode):
python_parser = PythonWithConceptsParser()
@@ -350,7 +314,7 @@ class FunctionParser(BaseParser):
# try to recognize every parameter, one by one
for param in function_node.parameters:
if isinstance(param.value, NamesNode):
if isinstance(param.value, NameExprNode):
# try to recognize concepts
unrecognized = param.value.to_unrecognized()
nodes_sequences = get_lexer_nodes_from_unrecognized(self.context,
+2 -2
View File
@@ -1,5 +1,5 @@
from core.builtin_concepts import BuiltinConcepts
from core.builtin_helpers import parse_python, CreateObjectIdentifiers
from core.builtin_helpers import CreateObjectIdentifiers
from parsers.BaseNodeParser import ConceptNode, RuleNode
from parsers.BaseNodeParser import SourceCodeWithConceptNode
from parsers.BaseParser import BaseParser
@@ -71,7 +71,7 @@ class PythonWithConceptsParser(BaseParser):
if hasattr(node, "get_python_node"):
python_ids_mappings.update(node.get_python_node().objects)
result = parse_python(context, to_parse, "Trying Python for '" + to_parse + "'")
result = context.sheerka.parse_python(context, to_parse, "Trying Python for '" + to_parse + "'")
if result.status:
python_node = result.body.body
+4 -5
View File
@@ -5,7 +5,6 @@ from typing import List
from core import builtin_helpers
from core.builtin_concepts import BuiltinConcepts
from core.builtin_helpers import parse_function
from core.concept import Concept, DEFINITION_TYPE_BNF
from core.global_symbols import CONCEPT_COMPARISON_CONTEXT
from core.sheerka.services.SheerkaComparisonManager import SheerkaComparisonManager
@@ -833,10 +832,10 @@ class InFixToPostFix:
if self.unrecognized_tokens.parenthesis_count == 0:
self.unrecognized_tokens.fix_source()
res = parse_function(self.context,
self.unrecognized_tokens.source,
self.unrecognized_tokens.tokens[:],
self.unrecognized_tokens.start)
res = self.context.sheerka.parse_function(self.context,
self.unrecognized_tokens.source,
self.unrecognized_tokens.tokens[:],
self.unrecognized_tokens.start)
instances = get_n_clones(self, len(res))
self.forked.extend(instances[1:])
+3 -3
View File
@@ -2,11 +2,11 @@ from dataclasses import dataclass
import core.utils
from core.builtin_concepts import BuiltinConcepts
from core.builtin_helpers import only_successful, parse_unrecognized, get_lexer_nodes, update_compiled
from parsers.SequenceNodeParser import SequenceNodeParser
from core.builtin_helpers import only_successful, get_lexer_nodes, update_compiled
from parsers.BaseNodeParser import ConceptNode, UnrecognizedTokensNode, SourceCodeNode, SourceCodeWithConceptNode
from parsers.BaseParser import BaseParser, ParsingError
from parsers.BnfNodeParser import BnfNodeParser
from parsers.SequenceNodeParser import SequenceNodeParser
from parsers.SyaNodeParser import SyaNodeParser
PARSERS = ["EmptyString",
@@ -56,7 +56,7 @@ class UnrecognizedNodeParser(BaseParser):
sequences_found = core.utils.sheerka_product(sequences_found, [res.body])
elif isinstance(node, UnrecognizedTokensNode):
res = parse_unrecognized(context, node.source, PARSERS)
res = context.sheerka.parse_unrecognized(context, node.source, PARSERS)
res = only_successful(context, res)
if res.status:
lexer_nodes = get_lexer_nodes(res.body.body, node.start, node.tokens)
+18
View File
@@ -3,6 +3,7 @@ from typing import List, Tuple
from core.tokenizer import Token, TokenKind, Tokenizer
from core.utils import tokens_are_matching
from parsers.BaseNodeParser import UnrecognizedTokensNode
from parsers.BaseParser import Node, ParsingError
@@ -86,6 +87,23 @@ class NameExprNode(ExprNode):
def __hash__(self):
return super().__hash__()
def to_unrecognized(self):
"""
UnrecognizedTokensNode with all tokens
"""
return UnrecognizedTokensNode(self.start, self.end, self.tokens).fix_source()
def to_str_unrecognized(self):
"""
UnrecognizedTokensNode with one token, which is a string token of all the tokens
"""
token = Token(TokenKind.STRING,
"'" + self.str_value() + "'",
self.tokens[0].index,
self.tokens[0].line,
self.tokens[0].column)
return UnrecognizedTokensNode(self.start, self.end, [token]).fix_source()
@dataclass(init=False)
class AndNode(ExprNode):
+4 -12
View File
@@ -2,10 +2,9 @@ import hashlib
import json
import shutil
import time
from dataclasses import dataclass
from datetime import datetime, date
from threading import RLock
from os import path
from threading import RLock
from core.global_symbols import NotFound
from core.sheerka_logger import get_logger
@@ -115,16 +114,6 @@ class SheerkaDataProviderDuplicateKeyError(Exception):
self.obj = obj
@dataclass
class SheerkaDataProviderResult:
"""
Object that is returned after adding, setting or modifying an entry
"""
entry: str # entry where the object is put
key: str # key to use to retrieve the object
digest: str # digest used to store the reference
class SheerkaDataProviderTransaction:
def __init__(self, sdp, event):
@@ -231,6 +220,9 @@ class SheerkaDataProvider:
snapshot = self.get_snapshot(SheerkaDataProvider.HeadFile)
self.state = self.load_state(snapshot)
def __repr__(self):
return f"SheerkaDataProvider(name={self.name})"
@staticmethod
def get_stream_digest(stream):
sha256_hash = hashlib.sha256()
+24
View File
@@ -8,6 +8,7 @@ from core.sheerka.Sheerka import Sheerka
from core.sheerka.services.SheerkaExecute import ParserInput
from evaluators.BaseEvaluator import BaseEvaluator
from parsers.BaseParser import BaseParser
from parsers.DefRuleParser import DefRuleNode, DefExecRuleNode, DefFormatRuleNode
from parsers.PythonParser import PythonNode
from sheerkapickle.handlers import BaseHandler, registry
@@ -213,6 +214,26 @@ class PythonNodeHandler(BaseHandler):
return instance
class DefRuleNodeHandler(BaseHandler):
def flatten(self, obj, data):
pickler = self.context
data["tokens"] = pickler.flatten(obj.tokens)
data["name"] = pickler.flatten(obj.name)
return data
def new(self, data):
return DefRuleNode.__new__(DefRuleNode)
def restore(self, data, instance):
pickler = self.context
instance.__init__(data["source"], objects=pickler.restore(data["objects"]))
instance.tokens = pickler.restore(data["tokens"])
instance.name = pickler.restore(data["name"])
return instance
def initialize_pickle_handlers():
registry.register(Concept, ConceptHandler, True)
registry.register(UserInputConcept, UserInputHandler, True)
@@ -221,3 +242,6 @@ def initialize_pickle_handlers():
registry.register(ExecutionContext, ExecutionContextHandler, True)
registry.register(Rule, RuleContextHandler, True)
registry.register(PythonNode, PythonNodeHandler, True)
registry.register(DefRuleNode, DefRuleNodeHandler, True)
registry.register(DefExecRuleNode, DefRuleNodeHandler, True) # TODO: fix inheritance that does not work
registry.register(DefFormatRuleNode, DefRuleNodeHandler, True) # TODO: fix inheritance that does not work
+4 -2
View File
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Generator, Union
from core.concept import Concept
from core.global_symbols import NotInit
from core.rule import Rule
from core.rule import Rule, ACTION_TYPE_PRINT
from core.utils import as_bag
from sheerkarete.alpha import AlphaMemory
from sheerkarete.beta import ReteNode, BetaMemory
@@ -282,7 +282,9 @@ class ReteNetwork:
if rule.id is None:
raise ValueError("Rule has no id, cannot add")
if not rule.metadata.is_enabled or not rule.metadata.is_compiled:
if (not rule.metadata.is_enabled or
not rule.metadata.is_compiled or
rule.metadata.action_type == ACTION_TYPE_PRINT):
return
if rule.rete_net: