Added first version of DebugManager. Implemented draft of the rule engine
This commit is contained in:
@@ -5,6 +5,16 @@ from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.tokenizer import Tokenizer, TokenKind, Token
|
||||
|
||||
NO_MATCH = "** No Match **"
|
||||
EVALUATOR_STEPS = [
|
||||
BuiltinConcepts.BEFORE_PARSING,
|
||||
BuiltinConcepts.AFTER_PARSING,
|
||||
BuiltinConcepts.BEFORE_EVALUATION,
|
||||
BuiltinConcepts.EVALUATION,
|
||||
BuiltinConcepts.AFTER_EVALUATION,
|
||||
BuiltinConcepts.BEFORE_RENDERING,
|
||||
BuiltinConcepts.RENDERING,
|
||||
BuiltinConcepts.AFTER_RENDERING,
|
||||
]
|
||||
|
||||
|
||||
class ParserInput:
|
||||
@@ -74,10 +84,10 @@ class ParserInput:
|
||||
if self.start == 0 and self.end == self.length:
|
||||
self.sub_text = self.text
|
||||
return self.sub_text
|
||||
self.sub_text = self.get_text_from_tokens(self.tokens[self.start:self.end])
|
||||
self.sub_text = core.utils.get_text_from_tokens(self.tokens[self.start:self.end])
|
||||
return self.sub_text
|
||||
else:
|
||||
return self.get_text_from_tokens(self.as_tokens(), custom_switcher, tracker)
|
||||
return core.utils.get_text_from_tokens(self.as_tokens(), custom_switcher, tracker)
|
||||
|
||||
def as_tokens(self):
|
||||
if self.sub_tokens:
|
||||
@@ -145,36 +155,6 @@ class ParserInput:
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_text_from_tokens(tokens, custom_switcher=None, tracker=None):
|
||||
"""
|
||||
Create the source code, from the list of token
|
||||
:param tokens: list of tokens
|
||||
:param custom_switcher: to override the behaviour (the return value) of some token
|
||||
:param tracker: keep track of the original token value when custom switched
|
||||
:return:
|
||||
"""
|
||||
if tokens is None:
|
||||
return ""
|
||||
res = ""
|
||||
|
||||
if not hasattr(tokens, "__iter__"):
|
||||
tokens = [tokens]
|
||||
|
||||
switcher = {
|
||||
TokenKind.CONCEPT: lambda t: core.utils.str_concept(t.value),
|
||||
}
|
||||
|
||||
if custom_switcher:
|
||||
switcher.update(custom_switcher)
|
||||
|
||||
for token in tokens:
|
||||
value = switcher.get(token.type, lambda t: t.value)(token)
|
||||
res += value
|
||||
if tracker is not None and token.type in custom_switcher:
|
||||
tracker[value] = token.value
|
||||
return res
|
||||
|
||||
|
||||
class SheerkaExecute(BaseService):
|
||||
"""
|
||||
@@ -187,18 +167,156 @@ class SheerkaExecute(BaseService):
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.pi_cache = Cache(default=lambda key: ParserInput(key), max_size=20)
|
||||
self.instantiated_evaluators = None
|
||||
self.evaluators_by_name = None
|
||||
self.grouped_evaluators_cache = {} # key=step, value=tuple(evaluators for this step, sorted priorities)
|
||||
self.old_values = []
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.execute, True)
|
||||
|
||||
self.sheerka.cache_manager.register_cache(self.PARSERS_INPUTS_ENTRY, self.pi_cache, False)
|
||||
self.reset_evaluators()
|
||||
|
||||
def reset_evaluators(self):
|
||||
# instantiate evaluators, once for all, only keep when it's enabled
|
||||
self.instantiated_evaluators = [e_class() for e_class in self.sheerka.evaluators]
|
||||
self.instantiated_evaluators = [e for e in self.instantiated_evaluators if e.enabled]
|
||||
self.evaluators_by_name = {e.short_name: e for e in self.instantiated_evaluators}
|
||||
|
||||
# get default evaluators by process step
|
||||
for process_step in EVALUATOR_STEPS:
|
||||
self.grouped_evaluators_cache[f"{process_step}|__default"] = self.get_grouped_evaluators(
|
||||
[e for e in self.instantiated_evaluators if process_step in e.steps])
|
||||
|
||||
# @staticmethod
|
||||
# def get_grouped_evaluators(instantiated_evaluators, process_step):
|
||||
# """
|
||||
# For a given list of evaluators and a given process step
|
||||
# Computes
|
||||
# * the evaluators eligible for this step
|
||||
# * the list of sorted priorities for theses evaluators
|
||||
# :param instantiated_evaluators:
|
||||
# :param process_step:
|
||||
# :return:
|
||||
# """
|
||||
# grouped = {}
|
||||
# for evaluator in [e for e in instantiated_evaluators if e.enabled and process_step in e.steps]:
|
||||
# grouped.setdefault(evaluator.priority, []).append(evaluator)
|
||||
#
|
||||
# sorted_groups = sorted(grouped.keys(), reverse=True)
|
||||
# return grouped, sorted_groups
|
||||
|
||||
@staticmethod
|
||||
def get_grouped_evaluators(evaluators):
|
||||
"""
|
||||
For a given list of evaluators,
|
||||
group them by priorities
|
||||
sort the priorities
|
||||
:param evaluators:
|
||||
:return: tuple({priority: List of evaluators with this priority}, list of sorted priorities)
|
||||
"""
|
||||
grouped = {}
|
||||
for evaluator in evaluators:
|
||||
grouped.setdefault(evaluator.priority, []).append(evaluator)
|
||||
|
||||
sorted_groups = sorted(grouped.keys(), reverse=True)
|
||||
return grouped, sorted_groups
|
||||
|
||||
def preprocess(self, items, preprocess_definitions):
|
||||
for preprocess in preprocess_definitions:
|
||||
for item in items:
|
||||
if self.matches(item.name, preprocess.get_value("preprocess_name")):
|
||||
for var_name, value in preprocess.values().items():
|
||||
if var_name == "preprocess_name":
|
||||
continue
|
||||
if hasattr(item, var_name):
|
||||
self.old_values.append((item, var_name, getattr(item, var_name)))
|
||||
setattr(item, var_name, value)
|
||||
|
||||
def preprocess_old(self, context, parsers_or_evaluators, mode):
|
||||
if mode == "parsers":
|
||||
if not context.preprocess and not context.preprocess_parsers:
|
||||
return parsers_or_evaluators
|
||||
items = context.preprocess_parsers
|
||||
elif mode == "evaluators":
|
||||
if not context.preprocess and not context.preprocess_evaluators:
|
||||
return parsers_or_evaluators
|
||||
items = context.preprocess_evaluators
|
||||
else:
|
||||
raise ValueError(mode)
|
||||
|
||||
if not hasattr(parsers_or_evaluators, "__iter__"):
|
||||
single_one = True
|
||||
parsers_or_evaluators = [parsers_or_evaluators]
|
||||
else:
|
||||
single_one = False
|
||||
|
||||
if items:
|
||||
res = []
|
||||
for item in items:
|
||||
for e in parsers_or_evaluators:
|
||||
if item == e.name:
|
||||
res.append(e)
|
||||
break
|
||||
else:
|
||||
raise ValueError(f"{item} not found.")
|
||||
parsers_or_evaluators = res
|
||||
|
||||
if context.preprocess:
|
||||
for preprocess in context.preprocess:
|
||||
for e in parsers_or_evaluators:
|
||||
if self.matches(e.name, preprocess.get_value("name")):
|
||||
for var_name in preprocess.values:
|
||||
if var_name == "name":
|
||||
continue
|
||||
if hasattr(e, var_name):
|
||||
self.old_values.append((e, var_name, getattr(e, var_name)))
|
||||
setattr(e, var_name, preprocess.get_value(var_name))
|
||||
|
||||
return parsers_or_evaluators[0] if single_one else parsers_or_evaluators
|
||||
|
||||
def get_evaluators(self, context, process_step):
|
||||
"""
|
||||
Returns the list of evaluators to use for a specific test
|
||||
:param context:
|
||||
:param process_step:
|
||||
:return:
|
||||
"""
|
||||
# Normal case, the evaluators are the default one
|
||||
if not context.preprocess_evaluators and not context.preprocess:
|
||||
return self.grouped_evaluators_cache[f"{process_step}|__default"]
|
||||
|
||||
# First case, only use a subset of evaluators
|
||||
if context.preprocess_evaluators and not context.preprocess:
|
||||
key = str(process_step) + "|" + "|".join(context.preprocess_evaluators)
|
||||
try:
|
||||
return self.grouped_evaluators_cache[key]
|
||||
except KeyError:
|
||||
evaluators = [self.evaluators_by_name[e] for e in context.preprocess_evaluators]
|
||||
grouped = self.get_grouped_evaluators(evaluators)
|
||||
self.grouped_evaluators_cache[key] = grouped
|
||||
return grouped
|
||||
|
||||
# final case, evaluators attributes are modified by the context
|
||||
# So first, get the modified evaluators
|
||||
evaluators = [self.evaluators_by_name[e] for e in
|
||||
context.preprocess_evaluators] if context.preprocess_evaluators else self.instantiated_evaluators
|
||||
self.preprocess(evaluators, context.preprocess)
|
||||
evaluators = [e for e in evaluators if e.enabled] # make sure they are still enabled
|
||||
key = str(process_step) + "|" + "|".join([e.name for e in evaluators if e.enabled])
|
||||
try:
|
||||
return self.grouped_evaluators_cache[key]
|
||||
except KeyError:
|
||||
grouped = self.get_grouped_evaluators(evaluators)
|
||||
self.grouped_evaluators_cache[key] = grouped
|
||||
return grouped
|
||||
|
||||
def get_parser_input(self, text, tokens=None):
|
||||
"""
|
||||
Returns new or existing parser input
|
||||
:param text:
|
||||
:param tokens:
|
||||
:param length:
|
||||
:return:
|
||||
"""
|
||||
|
||||
@@ -212,7 +330,7 @@ class SheerkaExecute(BaseService):
|
||||
self.pi_cache.put(text, pi)
|
||||
return pi
|
||||
|
||||
key = text or ParserInput.get_text_from_tokens(tokens)
|
||||
key = text or core.utils.get_text_from_tokens(tokens)
|
||||
pi = ParserInput(key, tokens)
|
||||
self.pi_cache.put(key, pi)
|
||||
return pi
|
||||
@@ -251,7 +369,7 @@ class SheerkaExecute(BaseService):
|
||||
|
||||
# group the parsers by priorities
|
||||
instantiated_parsers = [parser(sheerka=self.sheerka) for parser in self.sheerka.parsers.values()]
|
||||
instantiated_parsers = self.preprocess(context, instantiated_parsers)
|
||||
instantiated_parsers = self.preprocess_old(context, instantiated_parsers, "parsers")
|
||||
|
||||
grouped_parsers = {}
|
||||
for parser in [p for p in instantiated_parsers if p.enabled]:
|
||||
@@ -272,13 +390,12 @@ class SheerkaExecute(BaseService):
|
||||
|
||||
# if self.sheerka.log.isEnabledFor(logging.DEBUG):
|
||||
# debug_text = "'" + to_parse + "'" if isinstance(to_parse, str) \
|
||||
# else "'" + BaseParser.get_text_from_tokens(to_parse) + "' as tokens"
|
||||
# else "'" + core.utils.get_text_from_tokens(to_parse) + "' as tokens"
|
||||
# context.log(f"Parsing {debug_text}")
|
||||
|
||||
with context.push(BuiltinConcepts.PARSING,
|
||||
{"parser": parser.name},
|
||||
desc=f"Parsing using {parser.name}",
|
||||
logger=parser.verbose_log) as sub_context:
|
||||
desc=f"Parsing using {parser.name}") as sub_context:
|
||||
sub_context.add_inputs(to_parse=to_parse)
|
||||
res = parser.parse(sub_context, to_parse)
|
||||
if res is not None:
|
||||
@@ -318,27 +435,13 @@ class SheerkaExecute(BaseService):
|
||||
if not isinstance(return_values, list):
|
||||
return_values = [return_values]
|
||||
|
||||
# group the evaluators by priority and sort them
|
||||
# The first one to be applied will be the one with the highest priority
|
||||
grouped_evaluators = {}
|
||||
instantiated_evaluators = [e_class() for e_class in self.sheerka.evaluators]
|
||||
grouped_evaluators, sorted_priorities = self.get_evaluators(context, process_step)
|
||||
|
||||
# pre-process evaluators if needed
|
||||
instantiated_evaluators = self.preprocess(context, instantiated_evaluators)
|
||||
|
||||
for evaluator in [e for e in instantiated_evaluators if e.enabled and process_step in e.steps]:
|
||||
grouped_evaluators.setdefault(evaluator.priority, []).append(evaluator)
|
||||
|
||||
# order the groups by priority, the higher first
|
||||
sorted_priorities = sorted(grouped_evaluators.keys(), reverse=True)
|
||||
|
||||
# process
|
||||
iteration = 0
|
||||
while True:
|
||||
with context.push(process_step,
|
||||
{"iteration": iteration},
|
||||
desc=f"iteration #{iteration}",
|
||||
iteration=iteration) as iteration_context:
|
||||
{"step": process_step, "iteration": iteration},
|
||||
desc=f"iteration #{iteration}") as iteration_context:
|
||||
simple_digest = return_values[:]
|
||||
iteration_context.add_inputs(return_values=simple_digest)
|
||||
|
||||
@@ -348,13 +451,14 @@ class SheerkaExecute(BaseService):
|
||||
evaluated_items = []
|
||||
to_delete = []
|
||||
for evaluator in grouped_evaluators[priority]:
|
||||
evaluator = self.preprocess(context, evaluator.__class__()) # fresh copy
|
||||
evaluator.reset()
|
||||
|
||||
sub_context_desc = f"Evaluating using {evaluator.name} ({priority=})"
|
||||
with iteration_context.push(process_step,
|
||||
{"iteration": iteration, "evaluator": evaluator.name},
|
||||
desc=sub_context_desc,
|
||||
logger=evaluator.verbose_log) as sub_context:
|
||||
{"step": process_step,
|
||||
"iteration": iteration,
|
||||
"evaluator": evaluator.name},
|
||||
desc=sub_context_desc) as sub_context:
|
||||
sub_context.add_inputs(return_values=original_items)
|
||||
|
||||
# process evaluators that work on one simple return value at the time
|
||||
@@ -365,6 +469,8 @@ class SheerkaExecute(BaseService):
|
||||
if evaluator.matches(sub_context, item):
|
||||
|
||||
# init the evaluator is possible
|
||||
# KSI. 20201102 : Evaluators are now instantiated at startup,
|
||||
# Can we move this section into reset_evaluators()
|
||||
if hasattr(evaluator, "init_evaluator") and not evaluator.is_initialized:
|
||||
evaluator.init_evaluator(sub_context, original_items)
|
||||
|
||||
@@ -401,6 +507,7 @@ class SheerkaExecute(BaseService):
|
||||
# process evaluators that work on all return values
|
||||
else:
|
||||
if evaluator.matches(sub_context, original_items):
|
||||
|
||||
results = evaluator.eval(sub_context, original_items)
|
||||
if results is None:
|
||||
continue
|
||||
@@ -427,6 +534,8 @@ class SheerkaExecute(BaseService):
|
||||
# inc the iteration and continue
|
||||
iteration += 1
|
||||
|
||||
self.undo_preprocess()
|
||||
|
||||
return return_values
|
||||
|
||||
def execute(self, context, return_values, execution_steps):
|
||||
@@ -441,40 +550,30 @@ class SheerkaExecute(BaseService):
|
||||
for step in execution_steps:
|
||||
copy = return_values[:] if hasattr(return_values, "__iter__") else [return_values]
|
||||
with context.push(BuiltinConcepts.PROCESSING,
|
||||
{"step": step},
|
||||
step=step, iteration=0, desc=f"{step=}") as sub_context:
|
||||
{"step": step, "iteration": 0},
|
||||
desc=f"{step=}") as sub_context:
|
||||
|
||||
sub_context.add_inputs(return_values=copy)
|
||||
|
||||
if step == BuiltinConcepts.PARSING:
|
||||
return_values = self.call_parsers(sub_context, return_values)
|
||||
else:
|
||||
return_values = self.call_evaluators(sub_context, return_values, step)
|
||||
|
||||
if copy != return_values:
|
||||
has_changed = copy != return_values
|
||||
if has_changed:
|
||||
sub_context.log_result(return_values)
|
||||
|
||||
sub_context.add_values(return_values=return_values)
|
||||
sub_context.add_values(has_changed=has_changed)
|
||||
|
||||
return return_values
|
||||
|
||||
def preprocess(self, context, parsers_or_evaluators):
|
||||
if not context.preprocess:
|
||||
return parsers_or_evaluators
|
||||
def undo_preprocess(self):
|
||||
for item, var_name, value in self.old_values:
|
||||
setattr(item, var_name, value)
|
||||
|
||||
if not hasattr(parsers_or_evaluators, "__iter__"):
|
||||
single_one = True
|
||||
parsers_or_evaluators = [parsers_or_evaluators]
|
||||
else:
|
||||
single_one = False
|
||||
|
||||
for preprocess in context.preprocess:
|
||||
for e in parsers_or_evaluators:
|
||||
if self.matches(e.name, preprocess.get_value("name")):
|
||||
for var_name in preprocess.values:
|
||||
if var_name == "name":
|
||||
continue
|
||||
if hasattr(e, var_name):
|
||||
setattr(e, var_name, preprocess.get_value(var_name))
|
||||
return parsers_or_evaluators[0] if single_one else parsers_or_evaluators
|
||||
self.old_values.clear()
|
||||
|
||||
@staticmethod
|
||||
def matches(parser_or_evaluator_name, preprocessor_name):
|
||||
|
||||
Reference in New Issue
Block a user