First version of explain. Creating a new parser was a wrong approach. Need to reimplement
This commit is contained in:
+2
-1
@@ -4,4 +4,5 @@ venv
|
||||
__pycache__
|
||||
build
|
||||
prof
|
||||
tests/_concepts.txt
|
||||
tests/_concepts.txt
|
||||
tests/**/*result_test
|
||||
+7
-4
@@ -69,7 +69,10 @@ def concept ninety as 90
|
||||
ninety isa number
|
||||
def concept nineties from bnf ninety number where number < 10 as ninety + number
|
||||
nineties isa number
|
||||
#def concept hundreds from number1 hundred and number2 where number1 < 10 and number2 < 100 as number1 * 100 + number2
|
||||
#def concept one hundred as 100
|
||||
#one hundred isa number
|
||||
#hundreds isa number
|
||||
def concept hundreds1 from number hundred where number1 < 10 as number1 * 100
|
||||
def concept hundreds2 from number1 hundred and number2 where number1 < 10 and number2 < 100 as number1 * 100 + number2
|
||||
def concept one hundred as 100
|
||||
one hundred isa number
|
||||
hundreds1 isa number
|
||||
hundreds2 isa number
|
||||
def concept history as history()
|
||||
|
||||
@@ -31,9 +31,10 @@ def main(argv):
|
||||
|
||||
_in = core.utils.sysarg_to_string(args)
|
||||
result = sheerka.evaluate_user_input(_in)
|
||||
sheerka.print(result)
|
||||
|
||||
for res in result:
|
||||
logging.info(res)
|
||||
# for res in result:
|
||||
# logging.info(res)
|
||||
|
||||
return result[-1].status if len(result) > 0 else 1
|
||||
except getopt.GetoptError:
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
ReturnValue(who=evaluators.TooManySuccess, status=False, value=(21)__TOO_MANY_SUCCESS, message=None)
|
||||
@@ -58,6 +58,7 @@ class BuiltinConcepts(Enum):
|
||||
WHERE_CLAUSE_FAILED = "where clause failed" # failed to validate where clause during evaluation
|
||||
CHICKEN_AND_EGG = "chicken and egg" # infinite recursion when declaring concept
|
||||
ISA = "is a" # builtin concept to express that a concept is an instance of another one
|
||||
EXPLANATION = "explanation"
|
||||
|
||||
NODE = "node"
|
||||
GENERIC_NODE = "generic node"
|
||||
@@ -436,3 +437,17 @@ class NotForMeConcept(Concept):
|
||||
|
||||
def __repr__(self):
|
||||
return f"NotForMeConcept(source={self.body}, reason={self.get_prop('reason')})"
|
||||
|
||||
|
||||
class ExplanationConcept(Concept):
|
||||
def __init__(self, digest=None, command=None, title=None, instructions=None, execution_result=None):
|
||||
super().__init__(BuiltinConcepts.EXPLANATION,
|
||||
True,
|
||||
False,
|
||||
BuiltinConcepts.EXPLANATION)
|
||||
self.def_prop("digest", digest) # event digest
|
||||
self.def_prop("command", command) # explain command parameters
|
||||
self.def_prop("title", title) # a title to the explanation
|
||||
self.def_prop("instructions", instructions) # instructions for SheerkaPrint
|
||||
self.set_metadata_value(ConceptParts.BODY, execution_result) # list of results
|
||||
self.metadata.is_evaluated = True
|
||||
|
||||
@@ -2,7 +2,7 @@ import ast
|
||||
import logging
|
||||
|
||||
import core.ast.nodes
|
||||
from core.ast.nodes import CallNodeConcept, GenericNodeConcept
|
||||
from core.ast.nodes import CallNodeConcept
|
||||
from core.ast.visitors import UnreferencedNamesVisitor
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept
|
||||
|
||||
@@ -276,6 +276,10 @@ class Concept:
|
||||
def to_dict(self, props_to_use=None):
|
||||
"""
|
||||
Returns a dict representing 'self'
|
||||
to_dict() is used for serializing the definition of the concept
|
||||
You will not that it does not dump the actual values of the properties, nor the body
|
||||
|
||||
If you need a dictionary version of the Concept, use to_bag()
|
||||
:return:
|
||||
"""
|
||||
|
||||
@@ -368,6 +372,7 @@ class Concept:
|
||||
:return:
|
||||
"""
|
||||
self.values[metadata] = value
|
||||
return self
|
||||
|
||||
def get_metadata_value(self, metadata: ConceptParts):
|
||||
"""
|
||||
@@ -407,6 +412,20 @@ class Concept:
|
||||
def get_original_definition_hash(self):
|
||||
return self.original_definition_hash
|
||||
|
||||
def to_bag(self):
|
||||
"""
|
||||
Creates a dictionary with the useful properties of the concept
|
||||
It quicker to implement than creating the actual property mechanism with @property
|
||||
And it removes the visibility from the other attributes/methods
|
||||
"""
|
||||
bag = {}
|
||||
for prop in self.props:
|
||||
bag[prop] = self.get_prop(prop)
|
||||
bag["prop." + prop] = self.get_prop(prop)
|
||||
for prop in ("id", "name", "key", "body"):
|
||||
bag[prop] = getattr(self, prop)
|
||||
return bag
|
||||
|
||||
|
||||
class Property:
|
||||
"""
|
||||
|
||||
@@ -3,6 +3,7 @@ import time
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept
|
||||
from core.sheerka.Services.SheerkaExecute import NO_MATCH
|
||||
from core.sheerka_logger import get_logger
|
||||
from sdp.sheerkaDataProvider import Event
|
||||
|
||||
@@ -261,6 +262,74 @@ class ExecutionContext:
|
||||
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _is_return_value(obj):
|
||||
return isinstance(obj, Concept) and obj.key == str(BuiltinConcepts.RETURN_VALUE)
|
||||
|
||||
def _at_least_one_success(self, return_values):
|
||||
status = False
|
||||
for ret_val in return_values:
|
||||
if not self._is_return_value(ret_val):
|
||||
return None
|
||||
status |= ret_val.status
|
||||
return status
|
||||
|
||||
def _all_success(self, return_values):
|
||||
status = True
|
||||
for ret_val in return_values:
|
||||
if not self._is_return_value(ret_val):
|
||||
return None
|
||||
status &= ret_val.status
|
||||
return status
|
||||
|
||||
def get_status(self):
|
||||
# In the function, I cannot use sheerka.isinstance() as self.sheerka may not be initialized
|
||||
# This is the case when ExecutionContext is deserialized
|
||||
|
||||
if "return_values" not in self.values:
|
||||
return None
|
||||
|
||||
if hasattr(self.values["return_values"], "__iter__"):
|
||||
values = self.values["return_values"]
|
||||
if len(values) == 0:
|
||||
return None
|
||||
|
||||
if isinstance(values, str):
|
||||
return "No Match" if values == NO_MATCH else values
|
||||
|
||||
if isinstance(values[0], dict):
|
||||
for result in values:
|
||||
if "return_value" not in result:
|
||||
return None
|
||||
if self._is_return_value(result["return_value"]):
|
||||
return result["return_value"].status
|
||||
return "No Match"
|
||||
else:
|
||||
return self._at_least_one_success(self.values["return_values"])
|
||||
|
||||
else:
|
||||
ret_val = self.values["return_values"]
|
||||
if not isinstance(ret_val, Concept) or not ret_val.key == str(BuiltinConcepts.RETURN_VALUE):
|
||||
return None
|
||||
return ret_val.status
|
||||
|
||||
def to_bag(self):
|
||||
"""
|
||||
Creates a dictionary with the useful properties of the concept
|
||||
It quicker to implement than creating the actual property mechanism with @property
|
||||
And it removes the visibility from the other attributes/methods
|
||||
"""
|
||||
bag = {}
|
||||
for k, v in self._bag.items():
|
||||
bag[k] = v
|
||||
bag["bag." + k] = v
|
||||
for prop in ("id", "who", "desc", "obj", "inputs", "values", "concepts"):
|
||||
bag[prop] = getattr(self, prop)
|
||||
bag["status"] = self.get_status()
|
||||
bag["elapsed"] = self.elapsed
|
||||
bag["digest"] = self.event.get_digest() if self.event else None
|
||||
return bag
|
||||
|
||||
@staticmethod
|
||||
def return_value_to_str(r):
|
||||
value = str(r.value)
|
||||
|
||||
@@ -86,7 +86,7 @@ class SheerkaDump:
|
||||
|
||||
while True:
|
||||
try:
|
||||
if h.event.user != self.sheerka.name:
|
||||
if h.result:
|
||||
self.sheerka.log.info(h)
|
||||
count += 1
|
||||
h = next(history)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
|
||||
import core.utils
|
||||
|
||||
NO_MATCH = "** No Match **"
|
||||
|
||||
class SheerkaExecute:
|
||||
"""
|
||||
@@ -159,7 +160,7 @@ class SheerkaExecute:
|
||||
evaluated_items.append(result)
|
||||
debug_result.append({"input": item, "return_value": result})
|
||||
else:
|
||||
debug_result.append({"input": item, "return_value": "** No Match **"})
|
||||
debug_result.append({"input": item, "return_value": NO_MATCH})
|
||||
sub_context.add_values(return_values=debug_result)
|
||||
|
||||
# process evaluators that work on all return values
|
||||
@@ -175,7 +176,7 @@ class SheerkaExecute:
|
||||
to_delete.extend(result.parents)
|
||||
sub_context.add_values(return_values=results)
|
||||
else:
|
||||
sub_context.add_values(return_values="** No Match **")
|
||||
sub_context.add_values(return_values=NO_MATCH)
|
||||
|
||||
return_values = evaluated_items
|
||||
return_values.extend([item for item in original_items if item not in to_delete])
|
||||
|
||||
@@ -2,7 +2,7 @@ from collections import namedtuple
|
||||
|
||||
from sdp.sheerkaDataProvider import Event
|
||||
|
||||
hist = namedtuple("History", "text status") # tests purposes only
|
||||
hist = namedtuple("HistoryTest", "text status") # tests purposes only
|
||||
|
||||
|
||||
class History:
|
||||
@@ -38,34 +38,23 @@ class History:
|
||||
if self._status:
|
||||
return self._status
|
||||
|
||||
if not self.result or "return_values" not in self.result.values:
|
||||
return
|
||||
|
||||
if hasattr(self.result.values["return_values"], "__iter__"):
|
||||
if len(self.result.values["return_values"]) != 1:
|
||||
self._status = False
|
||||
return self._status
|
||||
else:
|
||||
self._status = self.result.values["return_values"][0].status
|
||||
return self._status
|
||||
else:
|
||||
self._status = self.result.values["return_values"].status
|
||||
return self._status
|
||||
self._status = self.result.get_status() if self.result else None
|
||||
return self._status
|
||||
|
||||
|
||||
class SheerkaHistoryManager:
|
||||
def __init__(self, sheerka):
|
||||
self.sheerka = sheerka
|
||||
|
||||
def history(self, depth_or_digest, start):
|
||||
def history(self, depth, start):
|
||||
"""
|
||||
Load history
|
||||
:param depth_or_digest: number of items or digest
|
||||
:param depth: number of items
|
||||
:param start:
|
||||
:return:
|
||||
"""
|
||||
|
||||
events = list(self.sheerka.sdp.load_events(depth_or_digest, start))
|
||||
events = list(self.sheerka.sdp.load_events(depth, start))
|
||||
for event in events:
|
||||
try:
|
||||
result = self.sheerka.sdp.load_result(event.get_digest())
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
from sdp.sheerkaSerializer import Serializer
|
||||
|
||||
|
||||
@dataclass
|
||||
class Variable:
|
||||
"""
|
||||
Variable to store
|
||||
"""
|
||||
event_id: str # event where the variable is modified
|
||||
who: str # who is the modifier
|
||||
key: str # key of the variable
|
||||
value: object # value
|
||||
parents: List[str] # previous references of the variable (Note that there should be only one parent)
|
||||
|
||||
def get_key(self):
|
||||
return f"{self.who}.{self.key}"
|
||||
|
||||
|
||||
class SheerkaVariableManager:
|
||||
VARIABLES_ENTRY = "All_Variables" # to store all the concepts
|
||||
|
||||
def __init__(self, sheerka):
|
||||
self.sheerka = sheerka
|
||||
|
||||
def record(self, context, who, key, value):
|
||||
"""Persist a variable"""
|
||||
# first check if there is a previous version of the variable
|
||||
try:
|
||||
old = self.sheerka.sdp.get(self.VARIABLES_ENTRY, who + "." + key)
|
||||
if old.value == value:
|
||||
return
|
||||
|
||||
parent = getattr(old, Serializer.ORIGIN)
|
||||
except IndexError:
|
||||
parent = None
|
||||
|
||||
variable = Variable(context.event.get_digest(), who, key, value, [parent] if parent else None)
|
||||
self.sheerka.sdp.set(context.event.get_digest(), self.VARIABLES_ENTRY, variable, use_ref=True)
|
||||
|
||||
def load(self, who, key):
|
||||
variable = self.sheerka.sdp.get_safe(self.VARIABLES_ENTRY, who + "." + key)
|
||||
if variable is None:
|
||||
return None
|
||||
|
||||
return variable.value
|
||||
|
||||
def delete(self, context, who, key):
|
||||
self.sheerka.sdp.remove(
|
||||
context.event.get_digest(),
|
||||
self.VARIABLES_ENTRY,
|
||||
lambda _key, _var: _key == who + "." + key)
|
||||
@@ -1,3 +1,7 @@
|
||||
import logging
|
||||
|
||||
import core.builtin_helpers
|
||||
import core.utils
|
||||
from core.builtin_concepts import BuiltinConcepts, ErrorConcept, ReturnValueConcept, BuiltinErrors, BuiltinUnique, \
|
||||
UnknownConcept
|
||||
from core.concept import Concept, ConceptParts, PROPERTIES_FOR_NEW
|
||||
@@ -9,13 +13,10 @@ from core.sheerka.Services.SheerkaExecute import SheerkaExecute
|
||||
from core.sheerka.Services.SheerkaHistoryManager import SheerkaHistoryManager
|
||||
from core.sheerka.Services.SheerkaModifyConcept import SheerkaModifyConcept
|
||||
from core.sheerka.Services.SheerkaSetsManager import SheerkaSetsManager
|
||||
from sdp.sheerkaDataProvider import SheerkaDataProvider, Event
|
||||
import core.utils
|
||||
import core.builtin_helpers
|
||||
|
||||
from core.sheerka.Services.SheerkaVariableManager import SheerkaVariableManager
|
||||
from core.sheerka_logger import console_handler
|
||||
|
||||
import logging
|
||||
from printer.SheerkaPrinter import SheerkaPrinter
|
||||
from sdp.sheerkaDataProvider import SheerkaDataProvider, Event
|
||||
|
||||
CONCEPT_LEXER_PARSER_CLASS = "parsers.BnfNodeParser.BnfNodeParser"
|
||||
BNF_PARSER_CLASS = "parsers.BnfParser.BnfParser"
|
||||
@@ -93,6 +94,8 @@ class Sheerka(Concept):
|
||||
self.sets_handler = SheerkaSetsManager(self)
|
||||
self.evaluate_concept_handler = SheerkaEvaluateConcept(self)
|
||||
self.history_handler = SheerkaHistoryManager(self)
|
||||
self.printer_handler = SheerkaPrinter(self)
|
||||
self.variable_handler = SheerkaVariableManager(self)
|
||||
|
||||
self.during_restore = False
|
||||
self._builtins_classes_cache = None
|
||||
@@ -127,7 +130,7 @@ class Sheerka(Concept):
|
||||
|
||||
exec_context.add_values(return_values=res)
|
||||
if not self.skip_builtins_in_db:
|
||||
self.sdp.save_result(exec_context)
|
||||
self.sdp.save_result(exec_context, is_admin=True)
|
||||
self.init_log.debug(f"Sheerka successfully initialized")
|
||||
|
||||
except IOError as e:
|
||||
@@ -299,9 +302,26 @@ class Sheerka(Concept):
|
||||
# if len(ret) == 1 and ret[0].status and self.isinstance(ret[0].value, BuiltinConcepts.NEW_CONCEPT):
|
||||
# with open(CONCEPTS_FILE, "a") as f:
|
||||
# f.write(text + "\n")
|
||||
|
||||
return ret
|
||||
|
||||
def print(self, result, instructions=None):
|
||||
"""
|
||||
Print the result to output
|
||||
:param result:
|
||||
:param instructions:
|
||||
:return:
|
||||
"""
|
||||
self.printer_handler.print(result, instructions)
|
||||
|
||||
def record(self, context, who, key, value):
|
||||
return self.variable_handler.record(context, who, key, value)
|
||||
|
||||
def load(self, who, key):
|
||||
return self.variable_handler.load(who, key)
|
||||
|
||||
def delete(self, context, who, key):
|
||||
return self.variable_handler.delete(context, who, key)
|
||||
|
||||
def execute(self, execution_context, return_values, execution_steps):
|
||||
"""
|
||||
Executes process for all initial contexts
|
||||
@@ -639,12 +659,27 @@ class Sheerka(Concept):
|
||||
|
||||
return self.value(body_to_use)
|
||||
|
||||
def value_by_concept(self, obj, concept):
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
if not isinstance(obj, Concept):
|
||||
return None
|
||||
|
||||
if isinstance(concept, tuple) and obj.key in [str(key) for key in concept]:
|
||||
return obj
|
||||
|
||||
if obj.key == str(concept):
|
||||
return obj
|
||||
|
||||
return self.value_by_concept(obj.body, concept)
|
||||
|
||||
def get_error(self, obj):
|
||||
if isinstance(obj, Concept) and obj.metadata.is_builtin and obj.key in BuiltinErrors:
|
||||
return obj
|
||||
|
||||
if isinstance(obj, list):
|
||||
return obj
|
||||
return obj
|
||||
|
||||
if self.isinstance(obj, BuiltinConcepts.RETURN_VALUE):
|
||||
if obj.status:
|
||||
|
||||
+30
-4
@@ -46,6 +46,8 @@ class TokenKind(Enum):
|
||||
TILDE = "tilde" # ~
|
||||
UNDERSCORE = "underscore" # _
|
||||
DEGREE = "degree" # °
|
||||
WORD = "word"
|
||||
EQUALSEQUALS = "=="
|
||||
|
||||
|
||||
@dataclass()
|
||||
@@ -99,12 +101,13 @@ class Tokenizer:
|
||||
|
||||
KEYWORDS = set(x.value for x in Keywords)
|
||||
|
||||
def __init__(self, text):
|
||||
def __init__(self, text, parse_word=False):
|
||||
self.text = text
|
||||
self.text_len = len(text)
|
||||
self.column = 1
|
||||
self.line = 1
|
||||
self.i = 0
|
||||
self.parse_word = parse_word
|
||||
|
||||
def __iter__(self):
|
||||
|
||||
@@ -175,9 +178,14 @@ class Tokenizer:
|
||||
self.i += 1
|
||||
self.column += 1
|
||||
elif c == "=":
|
||||
yield Token(TokenKind.EQUALS, "=", self.i, self.line, self.column)
|
||||
self.i += 1
|
||||
self.column += 1
|
||||
if self.i + 1 < self.text_len and self.text[self.i + 1] == "=":
|
||||
yield Token(TokenKind.EQUALSEQUALS, "==", self.i, self.line, self.column)
|
||||
self.i += 2
|
||||
self.column += 2
|
||||
else:
|
||||
yield Token(TokenKind.EQUALS, "=", self.i, self.line, self.column)
|
||||
self.i += 1
|
||||
self.column += 1
|
||||
elif c == " " or c == "\t":
|
||||
whitespace = self.eat_whitespace(self.i)
|
||||
yield Token(TokenKind.WHITESPACE, whitespace, self.i, self.line, self.column)
|
||||
@@ -270,6 +278,11 @@ class Tokenizer:
|
||||
yield Token(TokenKind.CONCEPT, (name, id), self.i, self.line, self.column)
|
||||
self.i += length + 2
|
||||
self.column += length + 2
|
||||
elif self.parse_word and (c.isalpha() or c.isdigit()):
|
||||
word = self.eat_word(self.i)
|
||||
yield Token(TokenKind.WORD, word, self.i, self.line, self.column)
|
||||
self.i += len(word)
|
||||
self.column += len(word)
|
||||
elif c.isalpha() or c == "_":
|
||||
identifier = self.eat_identifier(self.i)
|
||||
token_type = TokenKind.KEYWORD if identifier in self.KEYWORDS else TokenKind.IDENTIFIER
|
||||
@@ -419,3 +432,16 @@ class Tokenizer:
|
||||
1 if lines_count > 0 else start_column + len(result))
|
||||
|
||||
return result, lines_count
|
||||
|
||||
def eat_word(self, start):
|
||||
result = self.text[start]
|
||||
i = start + 1
|
||||
while i < self.text_len:
|
||||
c = self.text[i]
|
||||
if c.isalpha() or c.isdigit():
|
||||
result += c
|
||||
i += 1
|
||||
else:
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
+1
-11
@@ -176,7 +176,7 @@ def product(a, b):
|
||||
res = []
|
||||
for item_b in b:
|
||||
for item_a in a:
|
||||
#items = item_a + [item_b]
|
||||
# items = item_a + [item_b]
|
||||
items = item_a[:]
|
||||
if hasattr(item_b, "__iter__"):
|
||||
items.extend(item_b)
|
||||
@@ -235,16 +235,6 @@ def escape_char(text, to_escape):
|
||||
return res
|
||||
|
||||
|
||||
def pp(items):
|
||||
if not hasattr(items, "__iter__"):
|
||||
return str(items)
|
||||
|
||||
if len(items) == 0:
|
||||
return str(items)
|
||||
|
||||
return " \n" + " \n".join(str(item) for item in items)
|
||||
|
||||
|
||||
def decode_enum(enum_repr: str):
|
||||
"""
|
||||
Tries to transform ClassName.Name into an enum
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
from typing import List
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts, ParserResultConcept
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from evaluators.BaseEvaluator import OneReturnValueEvaluator
|
||||
from parsers.ExplainParser import ExplanationNode, FilterNode, RecurseDefNode, FormatLNode, FormatDNode
|
||||
from parsers.ExpressionParser import ExpressionVisitor, IsaNode
|
||||
from printer.SheerkaPrinter import FormatInstructions
|
||||
|
||||
|
||||
class ExplainExpressionVisitor(ExpressionVisitor):
|
||||
def __init__(self):
|
||||
self.instructions = FormatInstructions()
|
||||
|
||||
def visit_RecurseDefNode(self, expr_node):
|
||||
self.instructions.set_recurse("children", expr_node.depth)
|
||||
|
||||
def visit_FormatLNode(self, expr_node):
|
||||
self.instructions.set_format_l(ExecutionContext, expr_node.template)
|
||||
|
||||
|
||||
class ExplainEvaluator(OneReturnValueEvaluator):
|
||||
NAME = "Explain"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(self.NAME, [BuiltinConcepts.EVALUATION], 60)
|
||||
|
||||
def get_event_digest(self, sheerka, explanation_node):
|
||||
if explanation_node.digest and sheerka.sdp.has_result(explanation_node.digest):
|
||||
return explanation_node.digest
|
||||
|
||||
if not explanation_node.digest and not explanation_node.record_digest:
|
||||
# use a previous digest if found
|
||||
digest = sheerka.load(self.name, "digest")
|
||||
if digest is not None:
|
||||
return digest
|
||||
|
||||
start = 0
|
||||
while True:
|
||||
events = list(sheerka.sdp.load_events(5, start))
|
||||
if not events:
|
||||
break
|
||||
|
||||
for event in events:
|
||||
if not sheerka.sdp.has_result(event.get_digest()):
|
||||
continue
|
||||
if not explanation_node.digest or explanation_node.digest == event.message:
|
||||
# maybe explanation_node.digest is not a real digest, but the command we want to explain
|
||||
return event.get_digest()
|
||||
|
||||
start += 5
|
||||
if start > 20:
|
||||
break
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_execution_result(sheerka, digest):
|
||||
if digest is None:
|
||||
# the test is done here to ease the unit tests
|
||||
return None
|
||||
return [sheerka.sdp.load_result(digest)]
|
||||
|
||||
@staticmethod
|
||||
def get_instructions(filter_node: FilterNode):
|
||||
instructions = FormatInstructions()
|
||||
for directive in filter_node.directives:
|
||||
if isinstance(directive, RecurseDefNode):
|
||||
instructions.set_recurse("children", directive.depth)
|
||||
elif isinstance(directive, FormatLNode):
|
||||
instructions.set_format_l(ExecutionContext, directive.template)
|
||||
elif isinstance(directive, FormatDNode):
|
||||
instructions.add_format_d(IsaNode(ExecutionContext), directive.properties)
|
||||
return instructions
|
||||
|
||||
@staticmethod
|
||||
def get_title(filter_node):
|
||||
return "<title>"
|
||||
|
||||
def matches(self, context, return_value):
|
||||
if not return_value.status:
|
||||
return False
|
||||
|
||||
if not isinstance(return_value.value, ParserResultConcept):
|
||||
return False
|
||||
|
||||
return isinstance(return_value.value.value, ExplanationNode)
|
||||
|
||||
def eval(self, context, return_value):
|
||||
sheerka = context.sheerka
|
||||
explanation_node = return_value.value.value
|
||||
|
||||
if explanation_node.digest and not explanation_node.record_digest:
|
||||
context.log(f"Deleting recorded digest")
|
||||
sheerka.delete(context, self.name, "digest")
|
||||
|
||||
digest = self.get_event_digest(sheerka, explanation_node)
|
||||
executions_results = self.get_execution_result(sheerka, digest)
|
||||
if executions_results is None and not digest:
|
||||
res = sheerka.new(BuiltinConcepts.ERROR, body=f"No result found (digest={explanation_node.digest})")
|
||||
|
||||
else:
|
||||
# record the digest if needed
|
||||
if explanation_node.record_digest:
|
||||
context.log(f"Recording digest '{digest}'")
|
||||
sheerka.record(context, self.name, "digest", digest)
|
||||
|
||||
filter_nodes = explanation_node.expr.filters
|
||||
global_instructions = self.get_instructions(filter_nodes[0])
|
||||
if len(filter_nodes) == 1:
|
||||
filtered = [[]]
|
||||
self.filter(executions_results, filter_nodes, filtered)
|
||||
res = sheerka.new(BuiltinConcepts.EXPLANATION,
|
||||
digest=digest,
|
||||
command=explanation_node.command,
|
||||
title="<all>",
|
||||
body=filtered[0],
|
||||
instructions=global_instructions)
|
||||
else:
|
||||
res = []
|
||||
filter_nodes = filter_nodes[1:] # remove the first filter_node (which always returns True)
|
||||
filtered = []
|
||||
for i in range(len(filter_nodes)):
|
||||
filtered.append([])
|
||||
self.filter(executions_results, filter_nodes, filtered)
|
||||
for i, filter_node in enumerate(filter_nodes):
|
||||
instructions = global_instructions.clone().merge(self.get_instructions(filter_node))
|
||||
res.append(sheerka.new(BuiltinConcepts.EXPLANATION,
|
||||
digest=digest,
|
||||
command=explanation_node.command,
|
||||
title=self.get_title(filter_node),
|
||||
body=filtered[i],
|
||||
instructions=instructions))
|
||||
|
||||
if len(res) == 1:
|
||||
res = res[0]
|
||||
|
||||
return sheerka.ret(self.name, not sheerka.isinstance(res, BuiltinConcepts.ERROR), res, parents=[return_value])
|
||||
|
||||
def filter(self, executions_results, filter_nodes: List[FilterNode], res):
|
||||
|
||||
for execution_result in executions_results:
|
||||
for i, filter_node in enumerate(filter_nodes):
|
||||
if filter_node.expr.eval(execution_result):
|
||||
res[i].append(execution_result)
|
||||
|
||||
if execution_result.children:
|
||||
self.filter(execution_result.children, filter_nodes, res)
|
||||
|
||||
return res
|
||||
@@ -469,6 +469,50 @@ class CNC(CN):
|
||||
return txt + ")"
|
||||
|
||||
|
||||
class UTN(HelperWithPos):
|
||||
"""
|
||||
Tester class for UnrecognizedTokenNode
|
||||
compare the source, and start, end if defined
|
||||
"""
|
||||
|
||||
def __init__(self, source, start=None, end=None):
|
||||
"""
|
||||
:param concept: Concept or concept_key (only the key is used anyway)
|
||||
:param start:
|
||||
:param end:
|
||||
:param source:
|
||||
"""
|
||||
super().__init__(start, end)
|
||||
self.source = source
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if isinstance(other, UnrecognizedTokensNode):
|
||||
return self.start == other.start and \
|
||||
self.end == other.end and \
|
||||
self.source == other.source
|
||||
|
||||
if not isinstance(other, UTN):
|
||||
return False
|
||||
|
||||
return self.start == other.start and \
|
||||
self.end == other.end and \
|
||||
self.source == other.source
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.source, self.start, self.end))
|
||||
|
||||
def __repr__(self):
|
||||
txt = f"UTN( source='{self.source}'"
|
||||
if self.start is not None:
|
||||
txt += f", start={self.start}"
|
||||
if self.end is not None:
|
||||
txt += f", end={self.end}"
|
||||
return txt + ")"
|
||||
|
||||
|
||||
class BaseNodeParser(BaseParser):
|
||||
def __init__(self, name, priority, **kwargs):
|
||||
super().__init__(name, priority)
|
||||
@@ -623,47 +667,3 @@ class BaseNodeParser(BaseParser):
|
||||
return token.value.value
|
||||
else:
|
||||
return token.value
|
||||
|
||||
|
||||
class UTN(HelperWithPos):
|
||||
"""
|
||||
Tester class for UnrecognizedTokenNode
|
||||
compare the source, and start, end if defined
|
||||
"""
|
||||
|
||||
def __init__(self, source, start=None, end=None):
|
||||
"""
|
||||
:param concept: Concept or concept_key (only the key is used anyway)
|
||||
:param start:
|
||||
:param end:
|
||||
:param source:
|
||||
"""
|
||||
super().__init__(start, end)
|
||||
self.source = source
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if isinstance(other, UnrecognizedTokensNode):
|
||||
return self.start == other.start and \
|
||||
self.end == other.end and \
|
||||
self.source == other.source
|
||||
|
||||
if not isinstance(other, UTN):
|
||||
return False
|
||||
|
||||
return self.start == other.start and \
|
||||
self.end == other.end and \
|
||||
self.source == other.source
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.source, self.start, self.end))
|
||||
|
||||
def __repr__(self):
|
||||
txt = f"UTN( source='{self.source}'"
|
||||
if self.start is not None:
|
||||
txt += f", start={self.start}"
|
||||
if self.end is not None:
|
||||
txt += f", end={self.end}"
|
||||
return txt + ")"
|
||||
|
||||
+166
-3
@@ -1,11 +1,12 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
import core.utils
|
||||
from core.builtin_concepts import BuiltinConcepts, ParserResultConcept
|
||||
from core.concept import Concept
|
||||
from core.tokenizer import TokenKind, Keywords, Token, Tokenizer
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from core.sheerka_logger import get_logger
|
||||
import core.utils
|
||||
import logging
|
||||
from core.tokenizer import TokenKind, Keywords, Token, Tokenizer
|
||||
|
||||
|
||||
@dataclass()
|
||||
@@ -65,6 +66,11 @@ class UnexpectedTokenErrorNode(ErrorNode):
|
||||
return hash((self.message, self.token, self.expected_tokens))
|
||||
|
||||
|
||||
@dataclass()
|
||||
class UnexpectedEof(ErrorNode):
|
||||
message: str
|
||||
|
||||
|
||||
class BaseParser:
|
||||
PREFIX = "parsers."
|
||||
|
||||
@@ -203,3 +209,160 @@ class BaseParser:
|
||||
value = switcher.get(token.type, lambda t: t.value)(token)
|
||||
res += value
|
||||
return res
|
||||
|
||||
|
||||
class BaseTokenizerIterParser(BaseParser):
|
||||
|
||||
def __init__(self, name, priority, parse_word=False, none_on_eof=True):
|
||||
super().__init__(name, priority)
|
||||
self.lexer_iter = None
|
||||
self._current = None
|
||||
self.context: ExecutionContext = None
|
||||
self.text = None
|
||||
self.sheerka = None
|
||||
|
||||
self.parse_word = parse_word
|
||||
self.none_on_eof = none_on_eof
|
||||
|
||||
def reset_parser(self, context, text):
|
||||
self.context = context
|
||||
self.sheerka = context.sheerka
|
||||
|
||||
self.text = text
|
||||
self.lexer_iter = iter(Tokenizer(text, self.parse_word))
|
||||
self._current = None
|
||||
|
||||
self.next_token()
|
||||
|
||||
def add_error(self, error, next_token=True):
|
||||
self.error_sink.append(error)
|
||||
if next_token:
|
||||
self.next_token()
|
||||
return error
|
||||
|
||||
def get_token(self) -> Token:
|
||||
return self._current
|
||||
|
||||
def next_token(self, skip_whitespace=True):
|
||||
try:
|
||||
self._current = next(self.lexer_iter)
|
||||
|
||||
if self.none_on_eof and self._current.type == TokenKind.EOF:
|
||||
self._current = None
|
||||
return False
|
||||
|
||||
if skip_whitespace:
|
||||
while self._current.type == TokenKind.WHITESPACE or self._current.type == TokenKind.NEWLINE:
|
||||
self._current = next(self.lexer_iter)
|
||||
except StopIteration:
|
||||
self._current = None
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class BaseSplitIterParser(BaseParser):
|
||||
|
||||
def __init__(self, name, priority, none_on_eof=False):
|
||||
super().__init__(name, priority)
|
||||
self._current = None
|
||||
self.context: ExecutionContext = None
|
||||
self.text = None
|
||||
self.sheerka = None
|
||||
self.iter_split = None
|
||||
self.split_and_eat_tokens = (" ", "\n", "\t")
|
||||
self.split_and_keep_tokens = ("=", ")", "(", ",")
|
||||
self.split_tokens = self.split_and_eat_tokens + self.split_and_keep_tokens
|
||||
|
||||
self.none_on_eof = none_on_eof # current token is set to None when EOF is hit
|
||||
|
||||
def parse_word(self, c, index, line, column):
|
||||
end = self.split_tokens
|
||||
escaped = False
|
||||
buffer = ""
|
||||
|
||||
while escaped or c not in end:
|
||||
if not escaped and c == "\\":
|
||||
escaped = True
|
||||
elif not escaped and c in ("'", '"'):
|
||||
end = [c]
|
||||
else:
|
||||
buffer += c
|
||||
escaped = False
|
||||
|
||||
index, column = index + 1, column + 1
|
||||
if index == len(self.text):
|
||||
break
|
||||
c = self.text[index]
|
||||
|
||||
if c == "\n":
|
||||
line += 1
|
||||
column = 0
|
||||
|
||||
if c not in self.split_and_keep_tokens: # 'not in' instead of 'in' to when c is a quote
|
||||
index, column = index + 1, column + 1
|
||||
|
||||
return buffer, index, line, column
|
||||
|
||||
def split(self):
|
||||
index = 0
|
||||
line = 1
|
||||
column = 1
|
||||
|
||||
while index < len(self.text):
|
||||
c = self.text[index]
|
||||
|
||||
if c == "=":
|
||||
if index + 1 < len(self.text) and self.text[index + 1] == "=":
|
||||
yield Token(TokenKind.EQUALSEQUALS, "==", index, line, column)
|
||||
index, column = index + 2, column + 2
|
||||
else:
|
||||
yield Token(TokenKind.EQUALS, "=", index, line, column)
|
||||
index, column = index + 1, column + 1
|
||||
elif c == ")":
|
||||
yield Token(TokenKind.RPAR, ")", index, line, column)
|
||||
index, column = index + 1, column + 1
|
||||
elif c == "(":
|
||||
yield Token(TokenKind.LPAR, "(", index, line, column)
|
||||
index, column = index + 1, column + 1
|
||||
elif c == ",":
|
||||
yield Token(TokenKind.COMMA, ",", index, line, column)
|
||||
index, column = index + 1, column + 1
|
||||
else:
|
||||
|
||||
buffer, end_index, end_line, end_column = self.parse_word(c, index, line, column)
|
||||
if buffer:
|
||||
yield Token(TokenKind.WORD, buffer, index, line, column)
|
||||
index, line, column = end_index, end_line, end_column
|
||||
|
||||
yield Token(TokenKind.EOF, "<eof>", index, line, column)
|
||||
|
||||
def reset_parser(self, context, text):
|
||||
self.context = context
|
||||
self.sheerka = context.sheerka if context else None
|
||||
|
||||
self.text = text
|
||||
self._current = None
|
||||
self.iter_split = iter(self.split())
|
||||
|
||||
def add_error(self, error, next_token=True):
|
||||
self.error_sink.append(error)
|
||||
if next_token:
|
||||
self.next_token()
|
||||
return error
|
||||
|
||||
def get_token(self) -> Token:
|
||||
return self._current
|
||||
|
||||
def next_token(self):
|
||||
try:
|
||||
self._current = next(self.iter_split)
|
||||
if self._current.type == TokenKind.EOF:
|
||||
if self.none_on_eof:
|
||||
self._current = None
|
||||
return False
|
||||
except StopIteration:
|
||||
self._current = None
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -0,0 +1,361 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Dict
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.tokenizer import LexerError, Token
|
||||
from parsers.BaseParser import Node, UnexpectedTokenErrorNode, BaseSplitIterParser, UnexpectedEof, ErrorNode
|
||||
from parsers.ExpressionParser import ExprNode, TrueNode, PropertyEqualsNode, PropertyContainsNode, OrNode, AndNode
|
||||
|
||||
|
||||
@dataclass()
|
||||
class ValueErrorNode(ErrorNode):
|
||||
"""
|
||||
When the value parse has an incorrect type or value
|
||||
"""
|
||||
message: str
|
||||
token: Token # token when the error is detected
|
||||
|
||||
|
||||
@dataclass()
|
||||
class MultipleDigestError(ErrorNode):
|
||||
message: str
|
||||
token: Token
|
||||
|
||||
|
||||
@dataclass()
|
||||
class ExplanationNode(Node):
|
||||
digest: str # digest of the event to explain
|
||||
command: str # original explain command
|
||||
expr: ExprNode = None
|
||||
record_digest: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class FilterNode(ExprNode):
|
||||
"""
|
||||
Wraps predicates
|
||||
"""
|
||||
expr: ExprNode
|
||||
directives: List[ExprNode] = field(default_factory=list)
|
||||
|
||||
def eval(self, obj):
|
||||
return self.expr.eval(obj)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RecurseDefNode(ExprNode):
|
||||
"""
|
||||
It is used to defined the depth of the recursion
|
||||
"""
|
||||
depth: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatLNode(ExprNode):
|
||||
"""
|
||||
Define the template to use for ExecutionContext when printed in line
|
||||
"""
|
||||
template: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatDNode(ExprNode):
|
||||
"""
|
||||
Defines the properties to display, and their format
|
||||
"""
|
||||
properties: Dict[str, str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnionNode(ExprNode):
|
||||
"""
|
||||
Define the template to use for ExecutionContext when printed in line
|
||||
"""
|
||||
filters: List[FilterNode]
|
||||
|
||||
def eval(self, obj):
|
||||
if len(self.filters) == 0:
|
||||
return False
|
||||
|
||||
if len(self.filters) == 0:
|
||||
return self.filters[0].eval(obj)
|
||||
|
||||
res = False
|
||||
for f in self.filters[1:]:
|
||||
res |= f.eval(obj)
|
||||
return res
|
||||
|
||||
|
||||
class ExplainParser(BaseSplitIterParser):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__("Explain", 81, none_on_eof=True)
|
||||
|
||||
def parse_explain(self):
|
||||
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
return BuiltinConcepts.IS_EMPTY
|
||||
|
||||
if token.value != 'explain':
|
||||
self.add_error(UnexpectedTokenErrorNode("", token, ["explain"]))
|
||||
return BuiltinConcepts.NOT_FOR_ME
|
||||
|
||||
digest = ""
|
||||
record_digest = False
|
||||
expr_node = UnionNode([FilterNode(TrueNode(), [])])
|
||||
self.next_token()
|
||||
while True:
|
||||
# no need to continue when error
|
||||
if self.has_error:
|
||||
return None
|
||||
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
break
|
||||
|
||||
if token.value == "-f" or token.value == "--filter":
|
||||
self.next_token()
|
||||
expr_node.filters.append(self.parse_filter())
|
||||
elif token.value in ("-r", "--recurse"):
|
||||
self.next_token()
|
||||
expr_node.filters[-1].directives.append(self.parse_recurse())
|
||||
elif token.value == "--format_l":
|
||||
self.next_token()
|
||||
expr_node.filters[-1].directives.append(self.parse_format_l())
|
||||
elif token.value == "--format_d":
|
||||
self.next_token()
|
||||
expr_node.filters[-1].directives.append(self.parse_format_d())
|
||||
elif token.value in ("-d", "--digest"):
|
||||
self.next_token()
|
||||
digest = self.parse_digest(digest)
|
||||
record_digest = True
|
||||
elif token.value.startswith("-"):
|
||||
self.add_error(UnexpectedTokenErrorNode("", token, []))
|
||||
else:
|
||||
digest = self.parse_digest(digest)
|
||||
|
||||
return ExplanationNode(digest, self.text, expr=expr_node, record_digest=record_digest)
|
||||
|
||||
def parse_digest(self, digest):
|
||||
token = self.get_token()
|
||||
if token is None or token.value.startswith("-"):
|
||||
return ""
|
||||
|
||||
if digest != "":
|
||||
self.add_error(MultipleDigestError("Too many digest", token))
|
||||
return None
|
||||
|
||||
digest = token.value
|
||||
self.next_token()
|
||||
return digest
|
||||
|
||||
def parse_filter(self):
|
||||
node = self.parse_or()
|
||||
if node is None:
|
||||
return None
|
||||
return FilterNode(node)
|
||||
|
||||
def parse_or(self):
|
||||
parts = []
|
||||
|
||||
node = self.parse_and()
|
||||
if node is None:
|
||||
return None
|
||||
|
||||
parts.append(node)
|
||||
|
||||
while True:
|
||||
token = self.get_token()
|
||||
if token is None or token.value != "or":
|
||||
break
|
||||
|
||||
self.next_token()
|
||||
node = self.parse_and()
|
||||
if node is None:
|
||||
return None
|
||||
else:
|
||||
parts.append(node)
|
||||
|
||||
return parts[0] if len(parts) == 1 else OrNode(*parts)
|
||||
|
||||
def parse_and(self):
|
||||
parts = []
|
||||
|
||||
node = self.parse_predicate()
|
||||
if node is None:
|
||||
return None
|
||||
|
||||
parts.append(node)
|
||||
|
||||
while True:
|
||||
token = self.get_token()
|
||||
if token is None or token.value != "and":
|
||||
break
|
||||
|
||||
self.next_token()
|
||||
node = self.parse_predicate()
|
||||
if node is None:
|
||||
return None
|
||||
else:
|
||||
parts.append(node)
|
||||
|
||||
return parts[0] if len(parts) == 1 else AndNode(*parts)
|
||||
|
||||
def parse_predicate(self):
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
self.add_error(UnexpectedEof("Unexpected EOF while parsing filter"))
|
||||
return None
|
||||
|
||||
if token.value == "(":
|
||||
self.next_token()
|
||||
expr = self.parse_or()
|
||||
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
self.add_error(UnexpectedEof("Missing right parenthesis"))
|
||||
return None
|
||||
if token.value != ")":
|
||||
self.add_error(UnexpectedTokenErrorNode("Parenthesis mismatch", token, [")"]))
|
||||
return None
|
||||
self.next_token()
|
||||
|
||||
else:
|
||||
expr = self.parse_property_predicate()
|
||||
|
||||
return expr
|
||||
|
||||
def parse_recurse(self):
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
self.add_error(UnexpectedEof("Unexpected EOF while parsing recurse"))
|
||||
return None
|
||||
|
||||
try:
|
||||
depth = int(token.value)
|
||||
self.next_token()
|
||||
return RecurseDefNode(depth)
|
||||
except ValueError:
|
||||
self.add_error(ValueErrorNode(f"'{token.value}' is not an integer", token))
|
||||
return None
|
||||
|
||||
def parse_format_l(self):
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
self.add_error(UnexpectedEof("Unexpected EOF while parsing format_l"))
|
||||
return None
|
||||
|
||||
if token.value.startswith("-"):
|
||||
self.add_error(UnexpectedTokenErrorNode("parsing format_l", token, ["<property name>"]))
|
||||
return None
|
||||
|
||||
template = token.value
|
||||
self.next_token()
|
||||
return FormatLNode(template)
|
||||
|
||||
def parse_format_d(self):
|
||||
props = {}
|
||||
|
||||
while TrueNode:
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
self.add_error(UnexpectedEof("Unexpected EOF while parsing format_d"))
|
||||
return None
|
||||
|
||||
if token.value.startswith("-"):
|
||||
self.add_error(UnexpectedTokenErrorNode("parsing format_d", token, ["<property name>"]))
|
||||
return None
|
||||
|
||||
parts = token.value.split(':')
|
||||
if len(parts) == 1:
|
||||
props[token.value] = "{" + token.value + "}"
|
||||
else:
|
||||
props[parts[0]] = parts[1]
|
||||
|
||||
self.next_token()
|
||||
token = self.get_token()
|
||||
|
||||
if token is None or token.value.startswith("-"):
|
||||
break
|
||||
elif token.value == ",":
|
||||
self.next_token()
|
||||
else:
|
||||
self.add_error(UnexpectedTokenErrorNode("parsing format_d", token, ["<eof>", ","]))
|
||||
|
||||
return FormatDNode(props)
|
||||
|
||||
def parse_property_predicate(self):
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
self.add_error(UnexpectedEof("Unexpected EOF while parsing predicate"))
|
||||
return None
|
||||
prop_name = token.value
|
||||
if prop_name.startswith("-"):
|
||||
self.add_error(UnexpectedTokenErrorNode("while parsing predicate", token, ["<property_name>"]))
|
||||
return None
|
||||
self.next_token()
|
||||
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
self.add_error(UnexpectedEof("Unexpected EOF while parsing predicate"))
|
||||
return None
|
||||
operand = token.value
|
||||
|
||||
if operand not in ("=", "=="):
|
||||
self.add_error(UnexpectedTokenErrorNode("Unexpected token when parsing predicate", token, ['=', "=="]))
|
||||
return None
|
||||
self.next_token()
|
||||
|
||||
token = self.get_token()
|
||||
if token is None:
|
||||
self.add_error(UnexpectedEof("Unexpected EOF while parsing filter"))
|
||||
return None
|
||||
self.next_token()
|
||||
prop_value = token.value
|
||||
|
||||
return PropertyEqualsNode(prop_name, prop_value) if operand == "==" else \
|
||||
PropertyContainsNode(prop_name, prop_value)
|
||||
|
||||
def parse(self, context, parser_input):
|
||||
"""
|
||||
text can be string, but text can also be an list of tokens
|
||||
:param context:
|
||||
:param parser_input:
|
||||
:return:
|
||||
"""
|
||||
|
||||
context.log(f"Parsing '{parser_input}'", self.name)
|
||||
sheerka = context.sheerka
|
||||
|
||||
if not isinstance(parser_input, str):
|
||||
return sheerka.ret(self.name, False, sheerka.new(BuiltinConcepts.NOT_FOR_ME, reason=parser_input))
|
||||
|
||||
explanation_node = None
|
||||
try:
|
||||
self.reset_parser(context, parser_input)
|
||||
self.next_token()
|
||||
explanation_node = self.parse_explain()
|
||||
except LexerError as e:
|
||||
self.add_error(e, False)
|
||||
|
||||
if self.has_error or not isinstance(explanation_node, ExplanationNode):
|
||||
if explanation_node in (BuiltinConcepts.NOT_FOR_ME, BuiltinConcepts.IS_EMPTY):
|
||||
error_body = sheerka.new(
|
||||
BuiltinConcepts.NOT_FOR_ME,
|
||||
body=parser_input,
|
||||
reason=self.error_sink if self.has_error else BuiltinConcepts.IS_EMPTY)
|
||||
else:
|
||||
error_body = sheerka.new(
|
||||
BuiltinConcepts.ERROR,
|
||||
body=self.error_sink)
|
||||
ret = sheerka.ret(self.name, False, error_body)
|
||||
else:
|
||||
ret = sheerka.ret(self.name, True,
|
||||
sheerka.new(
|
||||
BuiltinConcepts.PARSER_RESULT,
|
||||
parser=self,
|
||||
source=parser_input,
|
||||
body=explanation_node))
|
||||
|
||||
self.log_result(context, parser_input, ret)
|
||||
return ret
|
||||
@@ -0,0 +1,177 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Tuple, Callable
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept
|
||||
from parsers.BaseParser import Node
|
||||
|
||||
|
||||
class ExprNode(Node):
|
||||
"""
|
||||
Base ExprNode
|
||||
eval() must be overridden
|
||||
"""
|
||||
|
||||
def eval(self, obj):
|
||||
return True
|
||||
|
||||
|
||||
@dataclass
|
||||
class PropertyEqualsNode(ExprNode):
|
||||
prop: str
|
||||
value: object
|
||||
|
||||
def eval(self, obj):
|
||||
if hasattr(obj, self.prop):
|
||||
return str(getattr(obj, self.prop)) == self.value
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@dataclass()
|
||||
class PropertyContainsNode(ExprNode):
|
||||
prop: str
|
||||
value: object
|
||||
|
||||
def eval(self, obj):
|
||||
if hasattr(obj, self.prop):
|
||||
return self.value in str(getattr(obj, self.prop))
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@dataclass
|
||||
class PropertyEqualsSequenceNode(ExprNode):
|
||||
"""
|
||||
To use when the test must be done across parent and child
|
||||
"""
|
||||
props: List[str]
|
||||
values: List[object]
|
||||
|
||||
def eval(self, obj):
|
||||
index = len(self.props) - 1
|
||||
|
||||
while True:
|
||||
if not hasattr(obj, self.props[index]) or getattr(obj, self.props[index]) != self.values[index]:
|
||||
return False
|
||||
|
||||
if index == 0:
|
||||
break
|
||||
|
||||
index -= 1
|
||||
obj = obj.get_parent() if hasattr(obj, "get_parent") else obj.parent
|
||||
if obj is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@dataclass()
|
||||
class IsaNode(ExprNode):
|
||||
"""
|
||||
To use to replicate instanceof, sheerka.instanceof,
|
||||
"""
|
||||
obj_class: object
|
||||
|
||||
def eval(self, obj):
|
||||
if isinstance(self.obj_class, type):
|
||||
return isinstance(obj, self.obj_class)
|
||||
|
||||
if isinstance(self.obj_class, (BuiltinConcepts, str)):
|
||||
return isinstance(obj, Concept) and str(self.obj_class) == obj.key
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@dataclass()
|
||||
class LambdaNode(ExprNode):
|
||||
"""
|
||||
Generic expression to ease the tests
|
||||
"""
|
||||
lambda_exp: Callable[[object], bool]
|
||||
|
||||
def eval(self, obj):
|
||||
try:
|
||||
return self.lambda_exp(obj)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class AndNode(ExprNode):
|
||||
parts: Tuple[ExprNode]
|
||||
|
||||
def __init__(self, *parts: ExprNode):
|
||||
self.parts = parts
|
||||
|
||||
def eval(self, obj):
|
||||
res = self.parts[0].eval(obj) and self.parts[1].eval(obj)
|
||||
for part in self.parts[2:]:
|
||||
res &= part.eval(obj)
|
||||
return res
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class OrNode(ExprNode):
|
||||
parts: Tuple[ExprNode]
|
||||
|
||||
def __init__(self, *parts: ExprNode):
|
||||
self.parts = parts
|
||||
|
||||
def eval(self, obj):
|
||||
res = self.parts[0].eval(obj) or self.parts[1].eval(obj)
|
||||
for part in self.parts[2:]:
|
||||
res |= part.eval(obj)
|
||||
return res
|
||||
|
||||
|
||||
@dataclass()
|
||||
class NotNode(ExprNode):
|
||||
node: ExprNode
|
||||
|
||||
def eval(self, obj):
|
||||
return not self.node.eval(obj)
|
||||
|
||||
|
||||
class FalseNode(ExprNode):
|
||||
def eval(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
class TrueNode(ExprNode):
|
||||
def eval(self, obj):
|
||||
return True
|
||||
|
||||
|
||||
class ExpressionParser:
|
||||
"""
|
||||
will parser logic expression
|
||||
like not (a and b or c)
|
||||
|
||||
The nodes can be used for custom filtering (ex with ExplanationConcept)
|
||||
Or to help to understand why a python expression returns True or False
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ExpressionVisitor:
|
||||
"""
|
||||
Pyhtonic implementation of visitors for ExprNode
|
||||
"""
|
||||
|
||||
def visit(self, expr_node):
|
||||
name = expr_node.__class__.__name__
|
||||
|
||||
method = 'visit_' + name
|
||||
visitor = getattr(self, method, self.generic_visit)
|
||||
return visitor(expr_node)
|
||||
|
||||
def generic_visit(self, expr_node):
|
||||
"""Called if no explicit visitor function exists for a node."""
|
||||
for field, value in expr_node.__dict__.items():
|
||||
if isinstance(value, (list, tuple)):
|
||||
for item in value:
|
||||
if isinstance(item, ExprNode):
|
||||
self.visit(item)
|
||||
elif isinstance(value, ExprNode):
|
||||
self.visit(value)
|
||||
@@ -0,0 +1,120 @@
|
||||
import copy
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Dict
|
||||
|
||||
from core.concept import Concept
|
||||
from core.utils import get_full_qualified_name
|
||||
from parsers.ExpressionParser import ExprNode
|
||||
|
||||
|
||||
class FormatDetailType(Enum):
|
||||
Props_In_Line = "in line" # the properties are listed line by line
|
||||
Props_In_Column = "in column" # the properties are listed row by row
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatDetailDesc:
|
||||
"""
|
||||
class that describes how to print the details
|
||||
"""
|
||||
predicate: ExprNode # the detail will be printed if the predicate is matched
|
||||
format_type: FormatDetailType
|
||||
properties: Dict[str, str] # name of the property, format to use
|
||||
|
||||
|
||||
class FormatInstructions:
|
||||
def __init__(self, tab_indent=None, tab=None, no_color=None):
|
||||
self._tab_indent = 2
|
||||
self._tab = ""
|
||||
self._no_color = False
|
||||
|
||||
self.recursive_props = {} # which property that does in recursion and what depth
|
||||
self.format_l = {} # what format to use when printing obj line by line
|
||||
self.format_d = [] # list of FormatDetailDesc
|
||||
|
||||
# keep track of the modifications
|
||||
self.modified = set()
|
||||
self.recursive_props_modified = set()
|
||||
self.format_l_modified = set()
|
||||
|
||||
if tab_indent is not None:
|
||||
self.tab_indent = tab_indent
|
||||
if tab:
|
||||
self.tab = tab
|
||||
if no_color:
|
||||
self.no_color = no_color
|
||||
|
||||
@property
|
||||
def tab(self):
|
||||
return self._tab
|
||||
|
||||
@tab.setter
|
||||
def tab(self, value):
|
||||
self._tab = value
|
||||
self.modified.add("tab")
|
||||
|
||||
@property
|
||||
def tab_indent(self):
|
||||
return self._tab_indent
|
||||
|
||||
@tab_indent.setter
|
||||
def tab_indent(self, value):
|
||||
self._tab_indent = value
|
||||
self.modified.add("tab_indent")
|
||||
|
||||
@property
|
||||
def no_color(self):
|
||||
return self._no_color
|
||||
|
||||
@no_color.setter
|
||||
def no_color(self, value):
|
||||
self._no_color = value
|
||||
self.modified.add("no_color")
|
||||
|
||||
def set_recurse(self, prop_name, depth):
|
||||
self.recursive_props[prop_name] = depth
|
||||
self.recursive_props_modified.add(prop_name)
|
||||
return self
|
||||
|
||||
def recurse(self, property_name):
|
||||
clone = self.clone()
|
||||
clone.tab = self.tab + (" " * self.tab_indent)
|
||||
clone.recursive_props[property_name] -= 1
|
||||
return clone
|
||||
|
||||
def set_format_l(self, obj, template):
|
||||
key = self.get_obj_key(obj)
|
||||
self.format_l[key] = template
|
||||
self.format_l_modified.add(key)
|
||||
return self
|
||||
|
||||
def add_format_d(self, predicate, properties, format_type=FormatDetailType.Props_In_Line):
|
||||
if isinstance(properties, list):
|
||||
properties = dict((p, "{" + p + " }") for p in properties)
|
||||
self.format_d.append(FormatDetailDesc(predicate, format_type, properties))
|
||||
return self
|
||||
|
||||
def clone(self):
|
||||
clone = copy.deepcopy(self)
|
||||
return clone
|
||||
|
||||
def merge(self, other):
|
||||
for prop in other.modified:
|
||||
setattr(self, prop, getattr(other, prop))
|
||||
|
||||
for prop in other.recursive_props_modified:
|
||||
self.set_recurse(prop, other.recursive_props[prop])
|
||||
|
||||
for key in other.format_l_modified:
|
||||
self.set_format_l(key, other.format_l[key])
|
||||
|
||||
self.format_d.extend(other.format_d)
|
||||
|
||||
return self
|
||||
|
||||
@staticmethod
|
||||
def get_obj_key(obj):
|
||||
return obj.id if isinstance(obj, Concept) else \
|
||||
obj if isinstance(obj, str) else \
|
||||
get_full_qualified_name(obj)
|
||||
@@ -0,0 +1,78 @@
|
||||
from printer.FormatInstructions import FormatDetailDesc, FormatDetailType, FormatInstructions
|
||||
|
||||
|
||||
class Formatter:
|
||||
|
||||
def __init__(self):
|
||||
self.custom_l_formats = {}
|
||||
self.custom_d_formats = []
|
||||
|
||||
def register_format_l(self, obj, template):
|
||||
key = FormatInstructions.get_obj_key(obj)
|
||||
self.custom_l_formats[key] = template
|
||||
return self
|
||||
|
||||
def register_format_d(self, predicate, properties, format_type=FormatDetailType.Props_In_Line):
|
||||
if isinstance(properties, list):
|
||||
properties = dict([(p, "{" + p + "}") for p in properties])
|
||||
self.custom_d_formats.append(FormatDetailDesc(predicate, format_type, properties))
|
||||
return self
|
||||
|
||||
def compute_format_l(self, custom_formats_override, key):
|
||||
if custom_formats_override and key in custom_formats_override:
|
||||
custom_template = custom_formats_override[key]
|
||||
if custom_template in ("+", "\\+", "+\\"):
|
||||
return custom_template
|
||||
elif custom_template.startswith("+"):
|
||||
registered_template = self.custom_l_formats[key] if key in self.custom_l_formats else ""
|
||||
return registered_template + custom_template[1:]
|
||||
elif custom_template.startswith("\\+"):
|
||||
return custom_template[1:]
|
||||
elif custom_template.endswith("\\+"):
|
||||
return custom_template[:-2] + "+"
|
||||
elif custom_template.endswith("+"):
|
||||
registered_template = self.custom_l_formats[key] if key in self.custom_l_formats else ""
|
||||
return custom_template[:-1] + registered_template
|
||||
else:
|
||||
return custom_template
|
||||
elif key in self.custom_l_formats:
|
||||
return self.custom_l_formats[key]
|
||||
else:
|
||||
return None
|
||||
|
||||
def compute_format_d(self, custom_formats_override):
|
||||
if custom_formats_override and not self.custom_d_formats:
|
||||
return custom_formats_override
|
||||
if self.custom_d_formats and not custom_formats_override:
|
||||
return self.custom_d_formats
|
||||
if self.custom_d_formats and custom_formats_override:
|
||||
return self.custom_d_formats + custom_formats_override
|
||||
return []
|
||||
|
||||
def format_l(self, obj, custom_formats_override=None):
|
||||
key = FormatInstructions.get_obj_key(obj)
|
||||
format_l = self.compute_format_l(custom_formats_override, key)
|
||||
return self.to_string(obj, format_l) if format_l else str(obj)
|
||||
|
||||
def format_d(self, obj, format_d_desc: FormatDetailDesc):
|
||||
max_prop_length = self.get_properties_max_length(format_d_desc.properties.keys())
|
||||
res = ""
|
||||
for prop, template in format_d_desc.properties.items():
|
||||
if res:
|
||||
res += "\n"
|
||||
#value = getattr(obj, prop) if hasattr(obj, prop) else "*Undefined*"
|
||||
res += prop.ljust(max_prop_length) + ": " + self.to_string(obj, template)
|
||||
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def get_properties_max_length(properties):
|
||||
return max((len(p) for p in properties))
|
||||
|
||||
@staticmethod
|
||||
def to_string(obj, template):
|
||||
try:
|
||||
bag = obj.to_bag() if hasattr(obj, "to_bag") else obj.__dict__
|
||||
return template.format(**bag)
|
||||
except KeyError:
|
||||
return "*Undefined*"
|
||||
@@ -0,0 +1,103 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept
|
||||
from printer.FormatInstructions import FormatInstructions, FormatDetailType
|
||||
from printer.Formatter import Formatter
|
||||
|
||||
COLORS = {
|
||||
"reset": "\u001b[0m",
|
||||
"black": "\u001b[30m",
|
||||
"red": "\u001b[31m",
|
||||
"green": "\u001b[32m",
|
||||
"yellow": "\u001b[33m",
|
||||
"blue": "\u001b[34m",
|
||||
"magenta": "\u001b[35m",
|
||||
"cyan": "\u001b[36m",
|
||||
"white": "\u001b[37m",
|
||||
}
|
||||
EXECUTION_CONTEXT_CLASS = "core.sheerka.ExecutionContext.ExecutionContext"
|
||||
|
||||
|
||||
class SheerkaPrinter:
|
||||
"""
|
||||
Class use to format the output
|
||||
"""
|
||||
|
||||
out = print
|
||||
|
||||
def __init__(self, sheerka):
|
||||
self.sheerka = sheerka
|
||||
self.formatter = Formatter()
|
||||
self.formatter.register_format_l(EXECUTION_CONTEXT_CLASS, "[{id:3}] %tab%{desc} ({status})")
|
||||
self.custom_concepts_printers = {
|
||||
str(BuiltinConcepts.EXPLANATION): self.print_explanation,
|
||||
str(BuiltinConcepts.RETURN_VALUE): self.print_return_value,
|
||||
}
|
||||
|
||||
def register_custom_printer(self, concept, custom_format):
|
||||
key = concept.key if isinstance(concept, Concept) else concept
|
||||
self.custom_concepts_printers[str(key)] = custom_format
|
||||
return self
|
||||
|
||||
def register_format_l(self, obj, template):
|
||||
self.formatter.register_format_l(obj, template)
|
||||
|
||||
def register_format_d(self, predicate, properties, format_type=FormatDetailType.Props_In_Line):
|
||||
self.formatter.register_format_d(predicate, properties, format_type)
|
||||
|
||||
def print(self, to_print, instructions=None):
|
||||
instructions = instructions or FormatInstructions()
|
||||
self.fp(instructions, to_print)
|
||||
|
||||
def fp(self, instructions, item):
|
||||
"""
|
||||
fp stands for format and print
|
||||
:param instructions:
|
||||
:param item:
|
||||
:return:
|
||||
"""
|
||||
if isinstance(item, (list, tuple)):
|
||||
for i in item:
|
||||
self.fp(instructions, i)
|
||||
return
|
||||
elif isinstance(item, str):
|
||||
for color in COLORS:
|
||||
item = item.replace("%" + color + "%", "" if instructions.no_color else COLORS[color])
|
||||
if "%tab%" in item:
|
||||
self.out(item.replace("%tab%", instructions.tab))
|
||||
else:
|
||||
self.out(instructions.tab + item)
|
||||
return
|
||||
|
||||
elif isinstance(item, Concept) and item.key in self.custom_concepts_printers:
|
||||
self.custom_concepts_printers[item.key](self, instructions, item)
|
||||
else:
|
||||
self.fp(instructions, self.formatter.format_l(item, instructions.format_l))
|
||||
|
||||
# print details
|
||||
format_d = self.formatter.compute_format_d(instructions.format_d)
|
||||
for format_d_desc in reversed(format_d):
|
||||
if format_d_desc.predicate.eval(item):
|
||||
self.fp(instructions, self.formatter.format_d(item, format_d_desc))
|
||||
break
|
||||
|
||||
if instructions.recursive_props:
|
||||
for k, v in instructions.recursive_props.items():
|
||||
if hasattr(item, k) and v > 0 and (value := getattr(item, k)) is not None:
|
||||
self.fp(instructions.recurse(k), value)
|
||||
|
||||
@staticmethod
|
||||
def print_explanation(printer, instructions, item):
|
||||
explanation_instructions = instructions.clone().merge(item.instructions)
|
||||
printer.fp(explanation_instructions, f"%blue%{item.digest}%reset% : {item.command}")
|
||||
printer.fp(explanation_instructions, item.body)
|
||||
|
||||
@staticmethod
|
||||
def print_return_value(printer, instructions, item):
|
||||
if printer.sheerka.isinstance(item.body, BuiltinConcepts.EXPLANATION):
|
||||
return printer.fp(instructions, item.body)
|
||||
|
||||
if isinstance(item.body, (list, tuple)):
|
||||
return printer.fp(instructions, item.body)
|
||||
|
||||
status = item.status
|
||||
return printer.fp(instructions, str(item) if status else f"%red%{item}%reset%")
|
||||
+3
-2
@@ -13,6 +13,7 @@
|
||||
- C : concept (with history management)
|
||||
- D : concept definitions (no history management)
|
||||
- R : executionContext ('R' stands for Result or ReturnValue, no history management)
|
||||
- V : variable (from pickle)
|
||||
|
||||
## How concepts are serialized ?
|
||||
- get the id of the concept
|
||||
@@ -35,9 +36,9 @@ structure of the serialisation:
|
||||
## Idea to manage ObjectSerializer
|
||||
Problem:
|
||||
During serialization, there is no issue. The match() method is the unique way to get the correct serialier.
|
||||
During the deserialisation, all Object serializer have type = '0' and version = 1.
|
||||
During the deserialisation, all Object serializer have type = 'O' and version = 1.
|
||||
So how to choose the correct one ?
|
||||
A possible solution will be to add the type of the object to deserialize to the saved stream
|
||||
--> SHA256 for every object. Too much data saved.
|
||||
The id is to let to inc the version automatically in the Serialiser (during the registration) and to keep the mapping within sdp.state
|
||||
|
||||
No, finally, I use a different char for each object type
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, date
|
||||
import hashlib
|
||||
import json
|
||||
import zlib
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, date
|
||||
|
||||
from core.sheerka_logger import get_logger
|
||||
from sdp.sheerkaDataProviderIO import SheerkaDataProviderIO
|
||||
from sdp.sheerkaSerializer import Serializer, SerializerContext
|
||||
from core.sheerka_logger import get_logger
|
||||
|
||||
|
||||
def json_default_converter(o):
|
||||
@@ -76,9 +76,10 @@ class Event(object):
|
||||
|
||||
class ObjToUpdate:
|
||||
"""
|
||||
Internal key value class to hold the key (and the value)
|
||||
when it is detected
|
||||
It's created to distinguish from {key, value}
|
||||
Internal key value class;
|
||||
You give it an obj, and it tries to figure out what is the key of the obj
|
||||
Note that you can force the key if you want
|
||||
It was first create to make the difference between an object that has a key and {key, value}
|
||||
"""
|
||||
|
||||
def __init__(self, obj, key=None, digest=None):
|
||||
@@ -310,18 +311,24 @@ class SheerkaDataProviderDuplicateKeyError(Exception):
|
||||
|
||||
@dataclass
|
||||
class SheerkaDataProviderResult:
|
||||
obj: object
|
||||
entry: str
|
||||
key: str
|
||||
digest: str
|
||||
already_exists: bool = False
|
||||
"""
|
||||
Object that is returned after adding, setting or modifying an entry
|
||||
"""
|
||||
obj: object # obj that was given to store/modify
|
||||
entry: str # entry where the object is put
|
||||
key: str # key to use to retrieve the object
|
||||
digest: str # digest used to store the reference
|
||||
already_exists: bool = False # the same object was already persisted
|
||||
|
||||
|
||||
@dataclass
|
||||
class SheerkaDataProviderRef:
|
||||
key: str
|
||||
target: str
|
||||
original_target: str = None
|
||||
"""
|
||||
Object that tells where an object is store (target is the digest of the reference)
|
||||
"""
|
||||
key: str # key of the object
|
||||
target: str # digest of the reference
|
||||
original_target: str = None # when the object is modified, previous digest
|
||||
|
||||
def get_digest(self):
|
||||
return self.original_target
|
||||
@@ -361,8 +368,8 @@ class SheerkaDataProvider:
|
||||
:param obj:
|
||||
:return: String version of that is found, None otherwise
|
||||
"""
|
||||
return str(obj.key) if hasattr(obj, "key") \
|
||||
else str(obj.get_key()) if hasattr(obj, "get_key") \
|
||||
return str(obj.get_key()) if hasattr(obj, "get_key") \
|
||||
else str(obj.key) if hasattr(obj, "key") \
|
||||
else None
|
||||
|
||||
@staticmethod
|
||||
@@ -640,12 +647,13 @@ class SheerkaDataProvider:
|
||||
if filter_to_use(element):
|
||||
yield self.load_ref_if_needed(element)[0]
|
||||
|
||||
def remove(self, event_digest, entry, filter=None):
|
||||
def remove(self, event_digest, entry, filter=None, silent_remove=True):
|
||||
"""
|
||||
Removes elements under the entry 'entry'
|
||||
:param event_digest: event that triggers the deletion
|
||||
:param entry:
|
||||
:param filter: filter to use
|
||||
:param silent_remove: Do not throw exception if entry does not exist
|
||||
:return: new sha256 of the state
|
||||
TODO: Remove by key
|
||||
"""
|
||||
@@ -653,7 +661,10 @@ class SheerkaDataProvider:
|
||||
state = self.load_state(snapshot)
|
||||
|
||||
if entry not in state.data:
|
||||
raise IndexError(entry)
|
||||
if silent_remove:
|
||||
return snapshot
|
||||
else:
|
||||
raise IndexError(entry)
|
||||
|
||||
state.parents = [] if snapshot is None else [snapshot]
|
||||
state.events = [event_digest]
|
||||
@@ -710,6 +721,38 @@ class SheerkaDataProvider:
|
||||
|
||||
return self.load_ref_if_needed(item, load_origin)[0]
|
||||
|
||||
def get_ref(self, entry, key=None):
|
||||
"""
|
||||
Returns the reference of an object if the object exists
|
||||
This function allows to retrieve obj.##origin## without loading the object
|
||||
:param entry:
|
||||
:param key:
|
||||
:return:
|
||||
"""
|
||||
|
||||
snapshot = self.get_snapshot(SheerkaDataProvider.HeadFile)
|
||||
state = self.load_state(snapshot)
|
||||
|
||||
if entry not in state.data:
|
||||
raise IndexError(entry)
|
||||
|
||||
if key is not None and key not in state.data[entry]:
|
||||
raise IndexError(f"{entry}.{key}")
|
||||
|
||||
item = state.data[entry] if key is None else state.data[entry][key]
|
||||
if isinstance(item, list):
|
||||
res = []
|
||||
for element in item:
|
||||
if not self.is_reference(element):
|
||||
raise SheerkaDataProviderError("Not a reference", f"{entry}.{key}")
|
||||
res.append(self.get_obj_digest(element))
|
||||
return res
|
||||
|
||||
if not self.is_reference(item):
|
||||
raise SheerkaDataProviderError("Not a reference", f"{entry}.{key}")
|
||||
|
||||
return self.get_obj_digest(item)
|
||||
|
||||
def exists(self, entry, key=None, digest=None):
|
||||
"""
|
||||
Returns true if the entry is defined
|
||||
@@ -805,18 +848,33 @@ class SheerkaDataProvider:
|
||||
digest = event.parents[0]
|
||||
count += 1
|
||||
|
||||
def save_result(self, execution_context):
|
||||
def get_result_file_path(self, digest, is_admin):
|
||||
ext = "_admin_result" if is_admin else "_result"
|
||||
return self.io.get_obj_path(SheerkaDataProvider.EventFolder, digest) + ext
|
||||
|
||||
def has_result(self, digest, is_admin=False):
|
||||
"""
|
||||
Check is a result file was created for a specific event
|
||||
:param digest:
|
||||
:param is_admin: True is the result is an internal admin result file
|
||||
:return:
|
||||
"""
|
||||
target_path = self.get_result_file_path(digest, is_admin)
|
||||
return self.io.exists(target_path)
|
||||
|
||||
def save_result(self, execution_context, is_admin=False):
|
||||
"""
|
||||
Save the execution context associated with an event
|
||||
To make a long story short,
|
||||
for every single user input, there is an event (which is the first thing that is created)
|
||||
and a result (the ExecutionContext created by sheerka.evaluate_user_input()
|
||||
:param execution_context:
|
||||
:param is_admin: True is the result is an internal admin result file
|
||||
:return:
|
||||
"""
|
||||
digest = execution_context.event.get_digest()
|
||||
self.log.debug(f"Saving execution context. digest={digest}")
|
||||
target_path = self.io.get_obj_path(SheerkaDataProvider.EventFolder, digest) + "_result"
|
||||
target_path = self.get_result_file_path(digest, is_admin)
|
||||
if self.io.exists(target_path):
|
||||
return digest
|
||||
|
||||
@@ -824,8 +882,14 @@ class SheerkaDataProvider:
|
||||
self.io.write_binary(target_path, self.serializer.serialize(execution_context, context).read())
|
||||
return digest
|
||||
|
||||
def load_result(self, digest):
|
||||
target_path = self.io.get_obj_path(SheerkaDataProvider.EventFolder, digest) + "_result"
|
||||
def load_result(self, digest, is_admin=False):
|
||||
"""
|
||||
Load and deserialize a result file
|
||||
:param digest:
|
||||
:param is_admin: True is the result is an internal admin result file
|
||||
:return:
|
||||
"""
|
||||
target_path = self.get_result_file_path(digest, is_admin)
|
||||
|
||||
with self.io.open(target_path, "rb") as f:
|
||||
context = SerializerContext(sheerka=self.sheerka)
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import json
|
||||
import datetime
|
||||
import io
|
||||
import json
|
||||
import pickle
|
||||
import struct
|
||||
import io
|
||||
from dataclasses import dataclass
|
||||
|
||||
import sheerkapickle
|
||||
from core.sheerka_logger import get_logger
|
||||
from enum import Enum
|
||||
|
||||
import core.utils
|
||||
|
||||
import sheerkapickle
|
||||
from core.concept import Concept
|
||||
from core.sheerka_logger import get_logger
|
||||
from core.utils import get_full_qualified_name, get_class
|
||||
|
||||
|
||||
def json_default_converter(o):
|
||||
@@ -62,6 +60,7 @@ class Serializer:
|
||||
self.register(ConceptSerializer())
|
||||
self.register(DictionarySerializer())
|
||||
self.register(ExecutionContextSerializer())
|
||||
self.register(VariableSerializer())
|
||||
|
||||
def register(self, serializer):
|
||||
"""
|
||||
@@ -158,7 +157,7 @@ class EventSerializer(BaseSerializer):
|
||||
BaseSerializer.__init__(self, "E", 1)
|
||||
|
||||
def matches(self, obj):
|
||||
return core.utils.get_full_qualified_name(obj) == "sdp.sheerkaDataProvider.Event"
|
||||
return get_full_qualified_name(obj) == "sdp.sheerkaDataProvider.Event"
|
||||
|
||||
def dump(self, stream, obj, context):
|
||||
stream.write(json.dumps(obj.to_dict(), default=json_default_converter).encode("utf-8"))
|
||||
@@ -168,7 +167,7 @@ class EventSerializer(BaseSerializer):
|
||||
def load(self, stream, context):
|
||||
json_stream = stream.read().decode("utf-8")
|
||||
as_dict = json.loads(json_stream)
|
||||
event = core.utils.get_class("sdp.sheerkaDataProvider.Event")()
|
||||
event = get_class("sdp.sheerkaDataProvider.Event")()
|
||||
event.from_dict(as_dict)
|
||||
return event
|
||||
|
||||
@@ -180,7 +179,7 @@ class JsonSerializer(BaseSerializer):
|
||||
self.fully_qualified_name = fully_qualified_name
|
||||
|
||||
def matches(self, obj):
|
||||
return core.utils.get_full_qualified_name(obj) == self.fully_qualified_name
|
||||
return get_full_qualified_name(obj) == self.fully_qualified_name
|
||||
|
||||
def dump(self, stream, obj, context):
|
||||
as_json = obj.to_dict()
|
||||
@@ -197,7 +196,7 @@ class JsonSerializer(BaseSerializer):
|
||||
def load(self, stream, context):
|
||||
json_stream = stream.read().decode("utf-8")
|
||||
json_message = json.loads(json_stream)
|
||||
obj = core.utils.get_class(self.fully_qualified_name)()
|
||||
obj = get_class(self.fully_qualified_name)()
|
||||
obj.from_dict(json_message)
|
||||
setattr(obj, Serializer.HISTORY, json_message[Serializer.HISTORY])
|
||||
|
||||
@@ -224,9 +223,8 @@ class PickleSerializer(BaseSerializer):
|
||||
|
||||
class StateSerializer(PickleSerializer):
|
||||
def __init__(self):
|
||||
PickleSerializer.__init__(
|
||||
self,
|
||||
lambda obj: core.utils.get_full_qualified_name(obj) == "sdp.sheerkaDataProvider.State",
|
||||
super().__init__(
|
||||
lambda obj: get_full_qualified_name(obj) == "sdp.sheerkaDataProvider.State",
|
||||
"S",
|
||||
1)
|
||||
|
||||
@@ -276,7 +274,7 @@ class ExecutionContextSerializer(BaseSerializer):
|
||||
BaseSerializer.__init__(self, "R", 1)
|
||||
|
||||
def matches(self, obj):
|
||||
return core.utils.get_full_qualified_name(obj) == self.CLASS_NAME
|
||||
return get_full_qualified_name(obj) == self.CLASS_NAME
|
||||
|
||||
def dump(self, stream, obj, context):
|
||||
stream.write(sheerkapickle.encode(context.sheerka, obj).encode("utf-8"))
|
||||
@@ -289,6 +287,14 @@ class ExecutionContextSerializer(BaseSerializer):
|
||||
# json_message = json.loads(json_stream)
|
||||
return obj
|
||||
|
||||
|
||||
class VariableSerializer(PickleSerializer):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
lambda obj: get_full_qualified_name(obj) == "core.sheerka.Services.SheerkaVariableManager.Variable",
|
||||
"V",
|
||||
1)
|
||||
|
||||
#
|
||||
# class SheerkaSerializer(ObjectSerializer):
|
||||
# def __init__(self):
|
||||
|
||||
@@ -42,6 +42,9 @@ class SheerkaPickler:
|
||||
if utils.is_primitive(obj):
|
||||
return obj
|
||||
|
||||
if utils.is_type(obj):
|
||||
return str(obj)
|
||||
|
||||
if utils.is_tuple(obj):
|
||||
return {tags.TUPLE: [self.flatten(v) for v in obj]}
|
||||
|
||||
|
||||
@@ -49,6 +49,10 @@ def is_tuple(obj):
|
||||
return type(obj) is tuple
|
||||
|
||||
|
||||
def is_class(obj):
|
||||
return type(obj) is type
|
||||
|
||||
|
||||
def b64encode(data):
|
||||
"""
|
||||
Encode binary data to ascii text in base64. Data must be bytes.
|
||||
|
||||
@@ -16,7 +16,7 @@ class TestUsingMemoryBasedSheerka(BaseTest):
|
||||
skip_builtins_in_db = kwargs.get("skip_builtins_in_db", True)
|
||||
use_singleton = kwargs.get("singleton", False)
|
||||
|
||||
sheerka = kwargs.get("sheerka", False)
|
||||
sheerka = kwargs.get("sheerka", None)
|
||||
if sheerka:
|
||||
return sheerka
|
||||
|
||||
|
||||
BIN
Binary file not shown.
BIN
Binary file not shown.
@@ -31,7 +31,7 @@ class TestSheerkaHistoryManager(TestUsingMemoryBasedSheerka):
|
||||
hist("xxx", False),
|
||||
hist("one", True),
|
||||
hist("def concept one as 1", True),
|
||||
hist("Initializing Sheerka.", True)]
|
||||
hist("Initializing Sheerka.", None)]
|
||||
|
||||
h = list(sheerka.history(2))
|
||||
assert h == [
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
from core.concept import Concept, ConceptParts
|
||||
from core.sheerka.Services.SheerkaVariableManager import SheerkaVariableManager
|
||||
|
||||
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
|
||||
|
||||
|
||||
class TestSheerkaVariable(TestUsingMemoryBasedSheerka):
|
||||
def test_i_can_record_and_load_a_constant(self):
|
||||
sheerka = self.get_sheerka()
|
||||
context = self.get_context(sheerka)
|
||||
|
||||
sheerka.record(context, "TestSheerkaVariable", "my_variable", 1)
|
||||
res = sheerka.load("TestSheerkaVariable", "my_variable")
|
||||
assert res == 1
|
||||
|
||||
assert sheerka.sdp.exists(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable.my_variable")
|
||||
loaded = sheerka.sdp.get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable.my_variable")
|
||||
assert loaded.event_id == context.event.get_digest()
|
||||
assert loaded.key == "my_variable"
|
||||
assert loaded.value == 1
|
||||
assert loaded.who == "TestSheerkaVariable"
|
||||
assert loaded.parents is None
|
||||
|
||||
def test_i_can_record_and_load_a_concept(self):
|
||||
sheerka = self.get_sheerka()
|
||||
context = self.get_context(sheerka)
|
||||
|
||||
concept = Concept("foo").set_prop("a", "alpha").set_metadata_value(ConceptParts.BODY, 3.14)
|
||||
|
||||
sheerka.record(context, "TestSheerkaVariable", "my_variable", concept)
|
||||
res = sheerka.load("TestSheerkaVariable", "my_variable")
|
||||
|
||||
assert res == concept
|
||||
assert res.body == concept.body
|
||||
|
||||
def test_i_can_get_the_parent_when_modified(self):
|
||||
sheerka = self.get_sheerka()
|
||||
context = self.get_context(sheerka)
|
||||
|
||||
sheerka.record(context, "TestSheerkaVariable", "my_variable", 1)
|
||||
sheerka.record(context, "TestSheerkaVariable", "my_variable", 2)
|
||||
res = sheerka.load("TestSheerkaVariable", "my_variable")
|
||||
assert res == 2
|
||||
|
||||
loaded = sheerka.sdp.get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable.my_variable")
|
||||
assert loaded.event_id == context.event.get_digest()
|
||||
assert loaded.key == "my_variable"
|
||||
assert loaded.value == 2
|
||||
assert loaded.who == "TestSheerkaVariable"
|
||||
assert loaded.parents == ['8c9ada7bf488d84229f6539f76042431638f16d600fe3b7ec7e7161043a40d59']
|
||||
|
||||
parent = sheerka.sdp.load_obj(loaded.parents[0])
|
||||
assert parent.event_id == context.event.get_digest()
|
||||
assert parent.key == "my_variable"
|
||||
assert parent.value == 1
|
||||
assert parent.who == "TestSheerkaVariable"
|
||||
assert parent.parents is None
|
||||
|
||||
def test_variable_is_not_persisted_if_the_value_is_the_same(self):
|
||||
sheerka = self.get_sheerka()
|
||||
context = self.get_context(sheerka)
|
||||
|
||||
sheerka.record(context, "TestSheerkaVariable", "my_variable", 1)
|
||||
sheerka.record(context, "TestSheerkaVariable", "my_variable", 1)
|
||||
|
||||
loaded = sheerka.sdp.get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable.my_variable")
|
||||
assert loaded.event_id == context.event.get_digest()
|
||||
assert loaded.key == "my_variable"
|
||||
assert loaded.value == 1
|
||||
assert loaded.who == "TestSheerkaVariable"
|
||||
assert loaded.parents is None
|
||||
@@ -0,0 +1,278 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pytest
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, ConceptParts
|
||||
from parsers.ExpressionParser import TrueNode, LambdaNode
|
||||
from printer.SheerkaPrinter import FormatInstructions
|
||||
|
||||
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
|
||||
|
||||
|
||||
@dataclass
|
||||
class Obj:
|
||||
a: object
|
||||
b: object
|
||||
|
||||
|
||||
@dataclass()
|
||||
class ObjLongProp:
|
||||
first_property_name: object
|
||||
second: object
|
||||
|
||||
|
||||
class TestSheerkaPrinter(TestUsingMemoryBasedSheerka):
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("Hello world!", "Hello world!\n"),
|
||||
("%black%%red%%green%%yellow%%reset%", "\x1b[30m\x1b[31m\x1b[32m\x1b[33m\x1b[0m\n"),
|
||||
("%blue%%magenta%%cyan%%white%%reset%", "\x1b[34m\x1b[35m\x1b[36m\x1b[37m\x1b[0m\n"),
|
||||
(["Hello", "world!"], "Hello\nworld!\n"),
|
||||
(("Hello", "world!"), "Hello\nworld!\n"),
|
||||
])
|
||||
def test_i_can_print(self, capsys, text, expected):
|
||||
sheerka = self.get_sheerka()
|
||||
sheerka.print(text)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == expected
|
||||
|
||||
def test_i_can_disable_color(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
sheerka.print("%red%Hello world !%reset%", FormatInstructions(no_color=True))
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "Hello world !\n"
|
||||
|
||||
def test_i_can_print_concept(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Concept("foo a b").def_prop("a").def_prop("b")
|
||||
sheerka.print(foo)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == str(foo) + "\n"
|
||||
|
||||
def test_i_can_use_custom_format(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Concept("foo a b").def_prop("a").def_prop("b").init_key()
|
||||
sheerka.printer_handler.register_custom_printer(
|
||||
foo,
|
||||
lambda printer, instr, item: printer.fp(instr, f"foo a={item.a}, b={item.b}"))
|
||||
foo.set_prop("a", "value a").set_prop("b", "value b")
|
||||
sheerka.print(foo)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "foo a=value a, b=value b\n"
|
||||
|
||||
def test_i_can_print_and_recurse(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
level3 = Concept("level3")
|
||||
level2 = Concept("level2").set_metadata_value(ConceptParts.BODY, level3)
|
||||
level1 = Concept("level1").set_metadata_value(ConceptParts.BODY, level2)
|
||||
|
||||
sheerka.print(level1)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == f"{level1}\n"
|
||||
|
||||
sheerka.print(level1, FormatInstructions().set_recurse("body", 1))
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == f"{level1}\n {level2}\n"
|
||||
|
||||
sheerka.print(level1, FormatInstructions().set_recurse("body", 2))
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == f"{level1}\n {level2}\n {level3}\n"
|
||||
|
||||
sheerka.print(level1, FormatInstructions().set_recurse("body", 10))
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == f"{level1}\n {level2}\n {level3}\n"
|
||||
|
||||
def test_i_can_print_and_recurse_list(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
level31 = Concept("level31")
|
||||
level32 = Concept("level32")
|
||||
level33 = Concept("level33")
|
||||
level21 = Concept("level21").set_metadata_value(ConceptParts.BODY, [level31, level32])
|
||||
level22 = Concept("level22").set_metadata_value(ConceptParts.BODY, [level33])
|
||||
level1 = Concept("level1").set_metadata_value(ConceptParts.BODY, [level21, level22])
|
||||
|
||||
sheerka.print(level1)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == f"{level1}\n"
|
||||
|
||||
sheerka.print(level1, FormatInstructions().set_recurse("body", 1))
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == f"{level1}\n {level21}\n {level22}\n"
|
||||
|
||||
sheerka.print(level1, FormatInstructions().set_recurse("body", 3))
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == f"{level1}\n {level21}\n {level31}\n {level32}\n {level22}\n {level33}\n"
|
||||
|
||||
def test_explanation_concept_can_control_recursion(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
level31 = Concept("level31")
|
||||
level32 = Concept("level32")
|
||||
level33 = Concept("level33")
|
||||
level21 = Concept("level21").set_metadata_value(ConceptParts.BODY, [level31, level32])
|
||||
level22 = Concept("level22").set_metadata_value(ConceptParts.BODY, [level33])
|
||||
level1 = Concept("level1").set_metadata_value(ConceptParts.BODY, [level21, level22])
|
||||
|
||||
instructions = FormatInstructions(no_color=True)
|
||||
explanation = sheerka.new(
|
||||
BuiltinConcepts.EXPLANATION,
|
||||
digest="digest",
|
||||
command="command",
|
||||
title="title",
|
||||
instructions=instructions,
|
||||
body=[level1])
|
||||
|
||||
sheerka.print(explanation)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == f"digest : command\n(None)level1\n"
|
||||
|
||||
instructions = FormatInstructions(no_color=True).set_recurse("body", 2)
|
||||
explanation = sheerka.new(
|
||||
BuiltinConcepts.EXPLANATION,
|
||||
digest="digest",
|
||||
command="command",
|
||||
title="title",
|
||||
instructions=instructions,
|
||||
body=[level1])
|
||||
|
||||
sheerka.print(explanation)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == """digest : command
|
||||
(None)level1
|
||||
(None)level21
|
||||
(None)level31
|
||||
(None)level32
|
||||
(None)level22
|
||||
(None)level33
|
||||
"""
|
||||
|
||||
def test_i_can_format_concept(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Concept("foo a b").def_prop("a").def_prop("b").init_key()
|
||||
foo.set_prop("a", "value a").set_prop("b", "value b")
|
||||
foo.set_metadata_value(ConceptParts.BODY, "body")
|
||||
sheerka.set_id_if_needed(foo, False)
|
||||
|
||||
sheerka.printer_handler.register_format_l(foo, "{id}-{name}-{key}-{body}-{a}-{b}")
|
||||
|
||||
sheerka.print(foo)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "1001-foo a b-foo __var__0 __var__1-body-value a-value b\n"
|
||||
|
||||
def test_i_can_format_object(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Obj("value a", "value b")
|
||||
|
||||
sheerka.printer_handler.register_format_l(foo, "{a}-{b}")
|
||||
|
||||
sheerka.print(foo)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "value a-value b\n"
|
||||
|
||||
def test_i_can_register_a_custom_format_by_its_name(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Obj("value a", "value b")
|
||||
|
||||
sheerka.printer_handler.register_format_l("tests.core.test_sheerka_printer.Obj", "{a}-{b}")
|
||||
|
||||
sheerka.print(foo)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "value a-value b\n"
|
||||
|
||||
def test_i_can_define_format_in_print_instruction(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Obj("value a", "value b")
|
||||
|
||||
instructions = FormatInstructions().set_format_l("tests.core.test_sheerka_printer.Obj", "{a}-{b}")
|
||||
|
||||
sheerka.print(foo, instructions)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "value a-value b\n"
|
||||
|
||||
def test_format_print_instruction_override_register_format(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Obj("value a", "value b")
|
||||
|
||||
sheerka.printer_handler.register_format_l("tests.core.test_sheerka_printer.Obj", "{a}-{b}")
|
||||
instructions = FormatInstructions().set_format_l("tests.core.test_sheerka_printer.Obj", "a={a} <> b={b}")
|
||||
|
||||
sheerka.print(foo, instructions)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "a=value a <> b=value b\n"
|
||||
|
||||
def test_i_can_format_d(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = [Obj("value a", "value b"), Obj("value c", "value d")]
|
||||
|
||||
sheerka.printer_handler.register_format_d(TrueNode(), ["a", "b"])
|
||||
sheerka.print(foo)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == """Obj(a='value a', b='value b')
|
||||
a: value a
|
||||
b: value b
|
||||
Obj(a='value c', b='value d')
|
||||
a: value c
|
||||
b: value d
|
||||
"""
|
||||
|
||||
def test_i_can_format_d_and_align_properties(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = [ObjLongProp("value a", "value b"), ObjLongProp("value c", "value d")]
|
||||
|
||||
sheerka.printer_handler.register_format_d(TrueNode(), ["first_property_name", "second"])
|
||||
sheerka.print(foo)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == """ObjLongProp(first_property_name='value a', second='value b')
|
||||
first_property_name: value a
|
||||
second : value b
|
||||
ObjLongProp(first_property_name='value c', second='value d')
|
||||
first_property_name: value c
|
||||
second : value d
|
||||
"""
|
||||
|
||||
def test_i_can_manage_when_property_does_not_exist(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Obj("value a", "value b")
|
||||
|
||||
sheerka.printer_handler.register_format_d(TrueNode(), ["foo", "bar"])
|
||||
sheerka.print(foo)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == """Obj(a='value a', b='value b')
|
||||
foo: *Undefined*
|
||||
bar: *Undefined*
|
||||
"""
|
||||
|
||||
def test_i_can_select_the_object_to_format_d(self, capsys):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = [Obj("value a", "value b"), ObjLongProp("value c", "value d")]
|
||||
|
||||
sheerka.printer_handler.register_format_d(LambdaNode(lambda o: isinstance(o, Obj)), ["a", "b"])
|
||||
sheerka.print(foo)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == """Obj(a='value a', b='value b')
|
||||
a: value a
|
||||
b: value b
|
||||
ObjLongProp(first_property_name='value c', second='value d')
|
||||
"""
|
||||
|
||||
@pytest.mark.parametrize("template, expected", [
|
||||
("+-{b}", "value a-value b\n"),
|
||||
("{b}-+", "value b-value a\n"),
|
||||
("\\+{b}", "+value b\n"),
|
||||
("{b}\\+", "value b+\n"),
|
||||
("+", "+\n"),
|
||||
("\\+", "\\+\n"),
|
||||
("+\\", "+\\\n"),
|
||||
])
|
||||
def test_i_can_concat_print_instruction_and_register_format(self, capsys, template, expected):
|
||||
sheerka = self.get_sheerka()
|
||||
foo = Obj("value a", "value b")
|
||||
|
||||
sheerka.printer_handler.register_format_l("tests.core.test_sheerka_printer.Obj", "{a}")
|
||||
instructions = FormatInstructions().set_format_l("tests.core.test_sheerka_printer.Obj", template)
|
||||
|
||||
sheerka.print(foo, instructions)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == expected
|
||||
@@ -4,7 +4,7 @@ from core.tokenizer import Tokenizer, Token, TokenKind, LexerError, Keywords
|
||||
|
||||
def test_i_can_tokenize():
|
||||
source = "+*-/{}[]() ,;:.?\n\n\r\r\r\nidentifier_0\t \t10.15 10 'string\n' \"another string\"=|&<>c:name:"
|
||||
source += "$£€!_identifier°~_^\\`#"
|
||||
source += "$£€!_identifier°~_^\\`==#"
|
||||
tokens = list(Tokenizer(source))
|
||||
assert tokens[0] == Token(TokenKind.PLUS, "+", 0, 1, 1)
|
||||
assert tokens[1] == Token(TokenKind.STAR, "*", 1, 1, 2)
|
||||
@@ -52,9 +52,10 @@ def test_i_can_tokenize():
|
||||
assert tokens[43] == Token(TokenKind.CARAT, '^', 106, 6, 48)
|
||||
assert tokens[44] == Token(TokenKind.BACK_SLASH, '\\', 107, 6, 49)
|
||||
assert tokens[45] == Token(TokenKind.BACK_QUOTE, '`', 108, 6, 50)
|
||||
assert tokens[46] == Token(TokenKind.HASH, '#', 109, 6, 51)
|
||||
assert tokens[46] == Token(TokenKind.EQUALSEQUALS, '==', 109, 6, 51)
|
||||
assert tokens[47] == Token(TokenKind.HASH, '#', 111, 6, 53)
|
||||
|
||||
assert tokens[47] == Token(TokenKind.EOF, '', 110, 6, 52)
|
||||
assert tokens[48] == Token(TokenKind.EOF, '', 112, 6, 54)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
@@ -74,6 +75,19 @@ def test_i_can_tokenize_identifiers(text, expected):
|
||||
assert comparison == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text", [
|
||||
"123abc",
|
||||
"123",
|
||||
"abc",
|
||||
"abc123"
|
||||
])
|
||||
def test_i_can_parse_word(text):
|
||||
tokens = list(Tokenizer(text, parse_word=True))
|
||||
assert tokens[0].type == TokenKind.WORD
|
||||
assert tokens[0].value == text
|
||||
assert tokens[1].index == len(text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, message, error_text, index, line, column", [
|
||||
("'string", "Missing Trailing quote", "'string", 7, 1, 8),
|
||||
('"string', "Missing Trailing quote", '"string', 7, 1, 8),
|
||||
|
||||
@@ -0,0 +1,317 @@
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from core.builtin_concepts import ParserResultConcept, ReturnValueConcept, BuiltinConcepts
|
||||
from core.concept import Concept
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from evaluators.ExplainEvaluator import ExplainEvaluator
|
||||
from parsers.ExplainParser import ExplanationNode, RecurseDefNode, FormatLNode, UnionNode, FilterNode, FormatDNode
|
||||
from parsers.ExpressionParser import PropertyEqualsNode, PropertyEqualsSequenceNode, TrueNode, IsaNode
|
||||
from printer.FormatInstructions import FormatDetailDesc, FormatDetailType
|
||||
from pytest import fixture
|
||||
from sdp.sheerkaDataProvider import Event
|
||||
from sdp.sheerkaSerializer import Serializer, SerializerContext
|
||||
|
||||
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
|
||||
|
||||
|
||||
@fixture(scope="module")
|
||||
def serializer():
|
||||
"""
|
||||
Return a :class:`sdp.sheerkaSerializer.Serializer` instance for the module
|
||||
"""
|
||||
return Serializer()
|
||||
|
||||
|
||||
class EC:
|
||||
"""
|
||||
Helper to create execution context (AKA execution result)
|
||||
"""
|
||||
|
||||
def __init__(self, children=None, **props):
|
||||
self.props = props
|
||||
self.children = children
|
||||
|
||||
|
||||
def get_return_value(expr):
|
||||
if isinstance(expr, ExplanationNode):
|
||||
value = expr
|
||||
else:
|
||||
value = ExplanationNode("xxx_test_explain_evaluator_xxx", "", expr=expr)
|
||||
|
||||
return ReturnValueConcept(
|
||||
"TestEvaluator",
|
||||
True,
|
||||
ParserResultConcept(parser="parser", value=value))
|
||||
|
||||
|
||||
def create_executions_results(context, list_of_ecs):
|
||||
def update(execution_context, ec):
|
||||
for prop_name, pro_value in ec.props.items():
|
||||
setattr(execution_context, prop_name, pro_value)
|
||||
|
||||
if ec.children:
|
||||
for child_ec in ec.children:
|
||||
child_execution_context = execution_context.push("TestEvaluator")
|
||||
update(child_execution_context, child_ec)
|
||||
|
||||
res = []
|
||||
for ec in list_of_ecs:
|
||||
execution_context = ExecutionContext("TestEvaluator", context.event, context.sheerka)
|
||||
update(execution_context, ec)
|
||||
res.append(execution_context)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def get_execution_result_from_file(sheerka, digest, serializer):
|
||||
target_path = os.path.join("../_fixture/", digest) + "_result"
|
||||
with open(target_path, "rb") as f:
|
||||
context = SerializerContext(sheerka=sheerka)
|
||||
return serializer.deserialize(f, context)
|
||||
|
||||
|
||||
def get_execution_result_from_list(executions_result):
|
||||
return executions_result
|
||||
|
||||
|
||||
class TestExplainEvaluator(TestUsingMemoryBasedSheerka):
|
||||
|
||||
@staticmethod
|
||||
def init_evaluator_with_file(self, serializer):
|
||||
sheerka = self.get_sheerka()
|
||||
context = self.get_context(sheerka)
|
||||
evaluator = ExplainEvaluator()
|
||||
evaluator.get_execution_result = lambda s, d: get_execution_result_from_file(s, d, serializer)
|
||||
|
||||
return sheerka, context, evaluator
|
||||
|
||||
def init_evaluator_with_list(self, list_of_ecs):
|
||||
sheerka = self.get_sheerka()
|
||||
context = self.get_context(sheerka)
|
||||
evaluator = ExplainEvaluator()
|
||||
|
||||
executions_result = create_executions_results(context, list_of_ecs)
|
||||
evaluator.get_execution_result = lambda s, d: get_execution_result_from_list(executions_result)
|
||||
|
||||
return sheerka, context, evaluator, executions_result
|
||||
|
||||
@pytest.mark.parametrize("ret_val, expected", [
|
||||
(ReturnValueConcept("some_name", True, ParserResultConcept(value=ExplanationNode("", ""))), True),
|
||||
(ReturnValueConcept("some_name", True, ParserResultConcept(value="other thing")), False),
|
||||
(ReturnValueConcept("some_name", False, "not relevant"), False),
|
||||
(ReturnValueConcept("some_name", True, Concept()), False)
|
||||
])
|
||||
def test_i_can_match(self, ret_val, expected):
|
||||
context = self.get_context()
|
||||
assert ExplainEvaluator().matches(context, ret_val) == expected
|
||||
|
||||
def test_i_can_eval_in_list(self, serializer):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list(
|
||||
[
|
||||
EC(desc="correct desc"),
|
||||
EC(desc="wrong desc"),
|
||||
]
|
||||
)
|
||||
ret_val = get_return_value(UnionNode(
|
||||
[
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("desc", "correct desc")),
|
||||
]))
|
||||
|
||||
res = evaluator.eval(context, ret_val)
|
||||
assert res.status
|
||||
assert sheerka.isinstance(res.body, BuiltinConcepts.EXPLANATION)
|
||||
|
||||
filtered = res.body.body
|
||||
assert filtered == [execution_results[0]]
|
||||
|
||||
def test_i_can_eval_in_children(self):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list(
|
||||
[
|
||||
EC(desc="wrong desc", children=[EC(desc="wrong sub"), EC(desc="good sub")]),
|
||||
EC(desc="wrong desc", children=[EC(desc="good sub")]),
|
||||
]
|
||||
)
|
||||
ret_val = get_return_value(UnionNode(
|
||||
[
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("desc", "good sub")),
|
||||
]))
|
||||
|
||||
res = evaluator.eval(context, ret_val)
|
||||
assert res.status
|
||||
assert sheerka.isinstance(res.body, BuiltinConcepts.EXPLANATION)
|
||||
|
||||
filtered = res.body.body
|
||||
assert filtered == [
|
||||
execution_results[0].children[1],
|
||||
execution_results[1].children[0],
|
||||
]
|
||||
|
||||
def test_i_can_evaluate_multiple_filter_node(self):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list(
|
||||
[
|
||||
EC(desc="parent1", _id=1, children=[EC(desc="wrong sub"), EC(desc="good sub")]),
|
||||
EC(desc="parent2", children=[EC(desc="wrong sub"), EC(desc="good sub")]),
|
||||
EC(desc="good sub")
|
||||
])
|
||||
ret_val = get_return_value(UnionNode(
|
||||
[
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("id", "1")),
|
||||
FilterNode(PropertyEqualsNode("desc", "good sub")),
|
||||
]))
|
||||
|
||||
res = evaluator.eval(context, ret_val)
|
||||
assert res.status
|
||||
assert len(res.body) == 2
|
||||
|
||||
assert sheerka.isinstance(res.body[0], BuiltinConcepts.EXPLANATION)
|
||||
assert sheerka.isinstance(res.body[1], BuiltinConcepts.EXPLANATION)
|
||||
|
||||
assert res.body[0].body == [execution_results[0]]
|
||||
assert res.body[1].body == [
|
||||
execution_results[0].children[1],
|
||||
execution_results[1].children[1],
|
||||
execution_results[2]
|
||||
]
|
||||
|
||||
def test_i_can_eval_parent_and_child(self):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list(
|
||||
[
|
||||
EC(desc="parent1", children=[EC(desc="wrong sub"), EC(desc="good sub")]),
|
||||
EC(desc="parent2", children=[EC(desc="wrong sub"), EC(desc="good sub")]),
|
||||
EC(desc="good sub")
|
||||
]
|
||||
)
|
||||
ret_val = get_return_value(UnionNode(
|
||||
[
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsSequenceNode(["desc", "desc"], ["parent1", "good sub"])),
|
||||
]))
|
||||
|
||||
res = evaluator.eval(context, ret_val)
|
||||
assert res.status
|
||||
assert sheerka.isinstance(res.body, BuiltinConcepts.EXPLANATION)
|
||||
|
||||
filtered = res.body.body
|
||||
assert filtered == [
|
||||
execution_results[0].children[1],
|
||||
]
|
||||
|
||||
def test_i_correctly_create_format_instructions(self):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list([])
|
||||
ret_val = get_return_value(UnionNode(
|
||||
[
|
||||
FilterNode(TrueNode(), [
|
||||
RecurseDefNode(2),
|
||||
FormatLNode("abc"),
|
||||
FormatDNode({"a": "{a}", "b": "{b}"})
|
||||
]),
|
||||
]))
|
||||
|
||||
res = evaluator.eval(context, ret_val)
|
||||
assert res.status
|
||||
assert sheerka.isinstance(res.body, BuiltinConcepts.EXPLANATION)
|
||||
|
||||
instructions = res.body.instructions
|
||||
assert instructions.recursive_props == {"children": 2}
|
||||
assert instructions.format_l == {'core.sheerka.ExecutionContext.ExecutionContext': 'abc'}
|
||||
assert instructions.format_d == [FormatDetailDesc(
|
||||
IsaNode(ExecutionContext),
|
||||
FormatDetailType.Props_In_Line,
|
||||
{"a": "{a}", "b": "{b}"})]
|
||||
|
||||
def test_i_correctly_create_format_instructions_with_filtering(self):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list([])
|
||||
|
||||
ret_val = get_return_value(UnionNode(
|
||||
[
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("id", "1"), [RecurseDefNode(2), FormatLNode("abc")]),
|
||||
]))
|
||||
res = evaluator.eval(context, ret_val)
|
||||
assert res.status
|
||||
assert sheerka.isinstance(res.body, BuiltinConcepts.EXPLANATION)
|
||||
|
||||
instructions = res.body.instructions
|
||||
assert instructions.format_l == {'core.sheerka.ExecutionContext.ExecutionContext': 'abc'}
|
||||
assert instructions.recursive_props == {"children": 2}
|
||||
|
||||
def test_i_can_have_different_instructions_for_different_filtering(self):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list([])
|
||||
ret_val = get_return_value(UnionNode(
|
||||
[
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("id", "1"), [RecurseDefNode(2)]),
|
||||
FilterNode(PropertyEqualsNode("desc", "good sub"), [FormatLNode("abc")]),
|
||||
]))
|
||||
|
||||
res = evaluator.eval(context, ret_val)
|
||||
assert res.status
|
||||
assert len(res.body) == 2
|
||||
|
||||
assert res.body[0].instructions.recursive_props == {"children": 2}
|
||||
assert res.body[1].instructions.format_l == {'core.sheerka.ExecutionContext.ExecutionContext': 'abc'}
|
||||
|
||||
def test_filtering_instructions_inherit_from_the_first_filtering_node(self):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list([])
|
||||
ret_val = get_return_value(UnionNode(
|
||||
[
|
||||
FilterNode(TrueNode(), [RecurseDefNode(2)]),
|
||||
FilterNode(PropertyEqualsNode("id", "1"), [RecurseDefNode(1)]),
|
||||
FilterNode(PropertyEqualsNode("desc", "good sub"), [FormatLNode("abc")]),
|
||||
]))
|
||||
|
||||
res = evaluator.eval(context, ret_val)
|
||||
assert res.status
|
||||
assert len(res.body) == 2
|
||||
|
||||
assert res.body[0].instructions.recursive_props == {"children": 1} # overridden
|
||||
|
||||
assert res.body[1].instructions.format_l == {'core.sheerka.ExecutionContext.ExecutionContext': 'abc'}
|
||||
assert res.body[1].instructions.recursive_props == {"children": 2}
|
||||
|
||||
def test_i_can_reuse_a_recorded_digest(self):
|
||||
sheerka, context, evaluator, execution_results = self.init_evaluator_with_list([])
|
||||
expr = UnionNode([FilterNode(TrueNode(), [RecurseDefNode(2)])])
|
||||
|
||||
# need a valid result to test this feature
|
||||
event = Event("fake message")
|
||||
execution_context = ExecutionContext("TestExplainEvaluator", event, sheerka)
|
||||
sheerka.sdp.save_result(execution_context)
|
||||
|
||||
# save another result
|
||||
event2 = Event("fake message")
|
||||
execution_context = ExecutionContext("TestExplainEvaluator", event2, sheerka)
|
||||
sheerka.sdp.save_result(execution_context)
|
||||
|
||||
# digest is recorded during the first call
|
||||
explanation_node = ExplanationNode(event.get_digest(), "", expr=expr, record_digest=True)
|
||||
ret_val = get_return_value(explanation_node)
|
||||
evaluator.eval(context, ret_val)
|
||||
|
||||
# the next call to get_event_digest will load the recorded digest
|
||||
explanation_node = ExplanationNode("", "", expr=expr, record_digest=False) # digest is not provided
|
||||
digest = evaluator.get_event_digest(sheerka, explanation_node)
|
||||
assert digest == event.get_digest()
|
||||
|
||||
# test I can record another digest
|
||||
explanation_node = ExplanationNode(event2.get_digest(), "", expr=expr, record_digest=True)
|
||||
ret_val = get_return_value(explanation_node)
|
||||
evaluator.eval(context, ret_val)
|
||||
|
||||
explanation_node = ExplanationNode("", "", expr=expr, record_digest=False) # digest is not provided
|
||||
digest = evaluator.get_event_digest(sheerka, explanation_node)
|
||||
assert digest == event2.get_digest()
|
||||
|
||||
# test can now reset the recorded digest
|
||||
# (a digest is provided, but record_digest is set to False)
|
||||
explanation_node = ExplanationNode(event.get_digest(), "", expr=expr, record_digest=False)
|
||||
ret_val = get_return_value(explanation_node)
|
||||
evaluator.eval(context, ret_val)
|
||||
|
||||
explanation_node = ExplanationNode("", "", expr=expr, record_digest=False) # digest is not provided
|
||||
digest = evaluator.get_event_digest(sheerka, explanation_node)
|
||||
assert digest is None
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from core.tokenizer import Tokenizer, TokenKind
|
||||
from parsers.BaseParser import BaseParser
|
||||
from core.tokenizer import Tokenizer, TokenKind, Token
|
||||
from parsers.BaseParser import BaseParser, BaseSplitIterParser
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected_text", [
|
||||
@@ -23,3 +23,45 @@ def test_i_can_get_text_from_tokens(text, expected_text):
|
||||
def test_i_can_get_text_from_tokens_with_custom_switcher(text, custom, expected_text):
|
||||
tokens = list(Tokenizer(text))
|
||||
assert BaseParser.get_text_from_tokens(tokens, custom) == expected_text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("", ["<eof>"]),
|
||||
("one two -f --file", ["one", "two", "-f", "--file", "<eof>"]),
|
||||
("one 'two three'", ["one", "two three", "<eof>"]),
|
||||
('one "two three"', ["one", "two three", "<eof>"]),
|
||||
('one\\ two three"', ["one two", "three", "<eof>"]),
|
||||
("one 'two\\' three'", ["one", "two' three", "<eof>"]),
|
||||
("one\\\\two three", ["one\\two", "three", "<eof>"]),
|
||||
("one\ntwo three", ["one", "two", "three", "<eof>"]),
|
||||
("one \n two three", ["one", "two", "three", "<eof>"]),
|
||||
("'one \n two' three", ["one \n two", "three", "<eof>"]),
|
||||
("a=b", ["a", "=", "b", "<eof>"]),
|
||||
("a = b", ["a", "=", "b", "<eof>"]),
|
||||
("a==b", ["a", "==", "b", "<eof>"]),
|
||||
("a == b", ["a", "==", "b", "<eof>"]),
|
||||
])
|
||||
def test_i_can_split_using_base_split_iterparser_class(text, expected):
|
||||
parser = BaseSplitIterParser("BaseSplitIterParser", 0)
|
||||
parser.reset_parser(None, text)
|
||||
res = [t.value for t in parser.split()]
|
||||
|
||||
assert res == expected
|
||||
|
||||
|
||||
def test_i_can_test_split_iter_parser_indexes():
|
||||
parser = BaseSplitIterParser("BaseSplitIterParser", 0)
|
||||
text = "one two \n three = ==(),"
|
||||
parser.reset_parser(None, text)
|
||||
res = []
|
||||
while parser.next_token():
|
||||
res.append(parser.get_token())
|
||||
|
||||
assert res[0] == Token(TokenKind.WORD, "one", 0, 1, 1)
|
||||
assert res[1] == Token(TokenKind.WORD, "two", 4, 1, 5)
|
||||
assert res[2] == Token(TokenKind.WORD, "three", 10, 2, 2)
|
||||
assert res[3] == Token(TokenKind.EQUALS, "=", 16, 2, 8)
|
||||
assert res[4] == Token(TokenKind.EQUALSEQUALS, "==", 18, 2, 10)
|
||||
assert res[5] == Token(TokenKind.LPAR, "(", 20, 2, 12)
|
||||
assert res[6] == Token(TokenKind.RPAR, ")", 21, 2, 13)
|
||||
assert res[7] == Token(TokenKind.COMMA, ",", 22, 2, 14)
|
||||
|
||||
@@ -73,7 +73,7 @@ def cprop(concept, prop_name):
|
||||
return concept.compiled[prop_name]
|
||||
|
||||
|
||||
class TestBnfConceptLexerParser(TestUsingMemoryBasedSheerka):
|
||||
class TestBnfNodeParser(TestUsingMemoryBasedSheerka):
|
||||
|
||||
def init(self, concepts, grammar):
|
||||
sheerka = self.get_sheerka(singleton=True)
|
||||
@@ -0,0 +1,205 @@
|
||||
import pytest
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from parsers.BaseParser import UnexpectedTokenErrorNode, UnexpectedEof
|
||||
from parsers.ExplainParser import ExplainParser, ExplanationNode, MultipleDigestError, ValueErrorNode, \
|
||||
RecurseDefNode, FormatLNode, UnionNode, FilterNode, FormatDNode
|
||||
from parsers.ExpressionParser import PropertyContainsNode, PropertyEqualsNode, TrueNode, AndNode, OrNode
|
||||
|
||||
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
|
||||
|
||||
|
||||
class TestExplainParser(TestUsingMemoryBasedSheerka):
|
||||
def init_parser(self, **kwargs):
|
||||
sheerka = self.get_sheerka(singleton=True, **kwargs)
|
||||
context = self.get_context(sheerka)
|
||||
parser = ExplainParser()
|
||||
return sheerka, context, parser
|
||||
|
||||
def test_i_cannot_parse_empty_string(self):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, "")
|
||||
|
||||
assert not res.status
|
||||
assert sheerka.isinstance(res.body, BuiltinConcepts.NOT_FOR_ME)
|
||||
|
||||
def test_i_cannot_parse_if_not_for_me(self):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
text = "foo"
|
||||
res = parser.parse(context, text)
|
||||
not_for_me = res.body
|
||||
|
||||
assert not res.status
|
||||
assert sheerka.isinstance(not_for_me, BuiltinConcepts.NOT_FOR_ME)
|
||||
assert not_for_me.body == text
|
||||
assert isinstance(not_for_me.reason[0], UnexpectedTokenErrorNode)
|
||||
|
||||
@pytest.mark.parametrize("text, digest, command, directives", [
|
||||
# ("explain", "", "explain", []),
|
||||
("explain digest", "digest", "explain digest", []),
|
||||
("explain -r 3", "", "explain -r 3", [RecurseDefNode(3)]),
|
||||
("explain digest -r 3", "digest", "explain digest -r 3", [RecurseDefNode(3)]),
|
||||
])
|
||||
def test_i_can_parse_explain_without_filter(self, text, digest, command, directives):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, text)
|
||||
parser_result = res.body
|
||||
explanation_node = res.body.body
|
||||
|
||||
assert res.status
|
||||
assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT)
|
||||
assert parser_result.parser.name == "parsers.Explain"
|
||||
assert parser_result.source == text
|
||||
|
||||
assert explanation_node.digest == digest
|
||||
assert explanation_node.command == command
|
||||
assert explanation_node.expr == UnionNode([FilterNode(TrueNode(), directives)])
|
||||
|
||||
def test_i_can_parse_using_filter(self):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
text = "explain -f a=b"
|
||||
res = parser.parse(context, text)
|
||||
parser_result = res.body
|
||||
explanation_node = res.body.body
|
||||
|
||||
assert res.status
|
||||
assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT)
|
||||
assert parser_result.parser.name == "parsers.Explain"
|
||||
assert parser_result.source == text
|
||||
|
||||
assert explanation_node.expr == UnionNode([
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyContainsNode("a", "b"))])
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("-f a==b", PropertyEqualsNode("a", "b")),
|
||||
("--filter a==b", PropertyEqualsNode("a", "b")),
|
||||
("-f a==b and c=d", AndNode(PropertyEqualsNode("a", "b"), PropertyContainsNode("c", "d"))),
|
||||
("-f a==b or c=d", OrNode(PropertyEqualsNode("a", "b"), PropertyContainsNode("c", "d"))),
|
||||
("-f a==b or c==d and e==f", OrNode(
|
||||
PropertyEqualsNode("a", "b"),
|
||||
AndNode(PropertyEqualsNode("c", "d"), PropertyEqualsNode("e", "f")))),
|
||||
("-f a==b and c==d or e==f", OrNode(
|
||||
AndNode(PropertyEqualsNode("a", "b"), PropertyEqualsNode("c", "d")),
|
||||
PropertyEqualsNode("e", "f"))),
|
||||
("-f (a==b or c==d) and e==f", AndNode(
|
||||
OrNode(PropertyEqualsNode("a", "b"), PropertyEqualsNode("c", "d")),
|
||||
PropertyEqualsNode("e", "f"))),
|
||||
])
|
||||
def test_i_can_parse_filter_expressions(self, text, expected):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, "explain " + text)
|
||||
parser_result = res.body
|
||||
explanation_node = res.body.body
|
||||
expr_node = explanation_node.expr.filters[-1].expr
|
||||
|
||||
assert res.status
|
||||
assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT)
|
||||
assert isinstance(explanation_node, ExplanationNode)
|
||||
|
||||
assert expr_node == expected
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("-r 2", [
|
||||
FilterNode(TrueNode(), [RecurseDefNode(2)])
|
||||
]),
|
||||
("--format_l 'abc'", [
|
||||
FilterNode(TrueNode(), [FormatLNode('abc')])
|
||||
]),
|
||||
("--format_d 'abc'", [
|
||||
FilterNode(TrueNode(), [FormatDNode({"abc": "{abc}"})])
|
||||
]),
|
||||
("--format_d a,b,c", [
|
||||
FilterNode(TrueNode(), [FormatDNode({"a": "{a}", "b": "{b}", "c": "{c}"})])
|
||||
]),
|
||||
("--format_d a , b , c", [
|
||||
FilterNode(TrueNode(), [FormatDNode({"a": "{a}", "b": "{b}", "c": "{c}"})])
|
||||
]),
|
||||
("-r 2 --format_l 'abc'", [
|
||||
FilterNode(TrueNode(), [RecurseDefNode(2), FormatLNode('abc')])
|
||||
]),
|
||||
("--format_d a, b -r 2", [
|
||||
FilterNode(TrueNode(), [FormatDNode({"a": "{a}", "b": "{b}"}), RecurseDefNode(2)])
|
||||
]),
|
||||
("-f a==b -r 3", [
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), [RecurseDefNode(3)]),
|
||||
]),
|
||||
("-f a==b --format_l 'abc'", [
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), [FormatLNode("abc")]),
|
||||
]),
|
||||
("-r 3 -f a==b", [
|
||||
FilterNode(TrueNode(), [RecurseDefNode(3)]),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), []),
|
||||
]),
|
||||
("--format_l 'abc' -f a==b", [
|
||||
FilterNode(TrueNode(), [FormatLNode("abc")]),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), []),
|
||||
]),
|
||||
("-f a==b -f c==d", [
|
||||
FilterNode(TrueNode()),
|
||||
FilterNode(PropertyEqualsNode("a", "b")),
|
||||
FilterNode(PropertyEqualsNode("c", "d"))
|
||||
]),
|
||||
("-r 1 -f a==b -r 2 -f c==d -r 3", [
|
||||
FilterNode(TrueNode(), [RecurseDefNode(1)]),
|
||||
FilterNode(PropertyEqualsNode("a", "b"), [RecurseDefNode(2)]),
|
||||
FilterNode(PropertyEqualsNode("c", "d"), [RecurseDefNode(3)])
|
||||
]),
|
||||
])
|
||||
def test_i_can_parse_other_directives(self, text, expected):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, "explain " + text)
|
||||
parser_result = res.body
|
||||
explanation_node = res.body.body
|
||||
expr_node = explanation_node.expr
|
||||
|
||||
assert res.status
|
||||
assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT)
|
||||
assert isinstance(explanation_node, ExplanationNode)
|
||||
|
||||
assert expr_node.filters == expected
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("explain -d digest", "digest"),
|
||||
("explain -d", ""),
|
||||
("explain -d -f a=b", "")
|
||||
])
|
||||
def test_i_can_parse_record_digest(self, text, expected):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, text)
|
||||
explanation_node = res.body.body
|
||||
|
||||
assert explanation_node.digest == expected
|
||||
assert explanation_node.record_digest
|
||||
|
||||
@pytest.mark.parametrize("text, expected_error_type", [
|
||||
("explain digest1 digest2", MultipleDigestError),
|
||||
("explain -r", UnexpectedEof),
|
||||
("explain -r foo", ValueErrorNode),
|
||||
("explain -r 1.2", ValueErrorNode),
|
||||
("explain -f -r 1.2", UnexpectedTokenErrorNode),
|
||||
("explain -f", UnexpectedEof),
|
||||
("explain --format_d", UnexpectedEof),
|
||||
("explain --format_l", UnexpectedEof),
|
||||
("explain --format_l -r foo", UnexpectedTokenErrorNode),
|
||||
("explain --format_d -r foo", UnexpectedTokenErrorNode),
|
||||
])
|
||||
def test_i_cannot_parse(self, text, expected_error_type):
|
||||
sheerka, context, parser = self.init_parser()
|
||||
|
||||
res = parser.parse(context, text)
|
||||
error = res.body
|
||||
errors = res.body.body
|
||||
|
||||
assert not res.status
|
||||
assert sheerka.isinstance(error, BuiltinConcepts.ERROR)
|
||||
assert len(errors) == 1
|
||||
assert isinstance(errors[0], expected_error_type)
|
||||
@@ -0,0 +1,103 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
|
||||
from core.concept import Concept
|
||||
from parsers.ExpressionParser import PropertyEqualsNode, PropertyEqualsSequenceNode, PropertyContainsNode, AndNode, \
|
||||
OrNode, NotNode, LambdaNode, IsaNode
|
||||
|
||||
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
|
||||
|
||||
|
||||
@dataclass
|
||||
class Obj:
|
||||
prop_a: object
|
||||
prop_b: object = None
|
||||
prop_c: object = None
|
||||
parent: object = None
|
||||
|
||||
|
||||
class TestExpressionParser(TestUsingMemoryBasedSheerka):
|
||||
|
||||
def test_i_can_test_property_equals(self):
|
||||
node = PropertyEqualsNode("prop_a", "good value")
|
||||
|
||||
assert node.eval(Obj(prop_a="good value"))
|
||||
assert not node.eval(Obj(prop_a="other value"))
|
||||
|
||||
def test_i_can_test_property_equals_for_int(self):
|
||||
node = PropertyEqualsNode("prop_a", "1")
|
||||
|
||||
assert node.eval(Obj(prop_a=1))
|
||||
assert node.eval(Obj(prop_a="1"))
|
||||
|
||||
def test_i_can_test_property_equals_sequence(self):
|
||||
node = PropertyEqualsSequenceNode(["prop_b", "prop_a"], ["good parent", "good child"])
|
||||
|
||||
assert node.eval(Obj(prop_a="good child", parent=Obj(prop_a="Don't care", prop_b="good parent")))
|
||||
assert not node.eval(Obj(prop_a="good child", parent=Obj(prop_a="Don't care", prop_b="wrong parent")))
|
||||
assert not node.eval(Obj(prop_a="good child"))
|
||||
assert not node.eval(Obj(prop_a="wrong child", parent=Obj(prop_a="Don't care", prop_b="good parent")))
|
||||
|
||||
def test_i_can_test_property_contains(self):
|
||||
node = PropertyContainsNode("prop_a", "substring")
|
||||
|
||||
assert node.eval(Obj(prop_a="it contains substring in it"))
|
||||
assert not node.eval(Obj(prop_a="it does not"))
|
||||
|
||||
def test_i_can_test_property_contains_for_int(self):
|
||||
node = PropertyContainsNode("prop_a", "44")
|
||||
|
||||
assert node.eval(Obj(prop_a=123445))
|
||||
assert not node.eval(Obj(prop_a=12435))
|
||||
|
||||
def test_i_can_test_and(self):
|
||||
left = PropertyEqualsNode("prop_a", "good a")
|
||||
right = PropertyEqualsNode("prop_b", "good b")
|
||||
other = PropertyEqualsNode("prop_c", "good c")
|
||||
and_node = AndNode(left, right, other)
|
||||
|
||||
assert and_node.eval(Obj("good a", "good b", "good c"))
|
||||
assert not and_node.eval(Obj("wrong a", "good b", "good c"))
|
||||
assert not and_node.eval(Obj("good a", "wrong b", "good c"))
|
||||
assert not and_node.eval(Obj("good a", "good b", "wrong c"))
|
||||
|
||||
def test_i_can_test_or(self):
|
||||
left = PropertyEqualsNode("prop_a", "good a")
|
||||
right = PropertyEqualsNode("prop_b", "good b")
|
||||
other = PropertyEqualsNode("prop_c", "good c")
|
||||
or_node = OrNode(left, right, other)
|
||||
|
||||
assert or_node.eval(Obj("wrong a", "good b", "good c"))
|
||||
assert or_node.eval(Obj("good a", "wrong b", "good c"))
|
||||
assert or_node.eval(Obj("good a", "good b", "wrong c"))
|
||||
assert not or_node.eval(Obj("wrong a", "wrong b", "wrong c"))
|
||||
|
||||
def test_i_can_test_not(self):
|
||||
node = PropertyEqualsNode("prop_a", "good value")
|
||||
not_node = NotNode(node)
|
||||
|
||||
assert not not_node.eval(Obj(prop_a="good value"))
|
||||
assert not_node.eval(Obj(prop_a="wrong value"))
|
||||
|
||||
def test_i_can_test_lambda_node(self):
|
||||
node = LambdaNode(lambda o: o.prop_a + o.prop_b == "ab")
|
||||
|
||||
assert node.eval(Obj(prop_a="a", prop_b="b"))
|
||||
assert not node.eval(Obj(prop_a="wrong value", prop_b="wrong value"))
|
||||
assert not node.eval(Obj(prop_a="wrong value")) # exception is caught
|
||||
|
||||
def test_i_can_test_isa_node(self):
|
||||
class_node = IsaNode(Obj)
|
||||
assert class_node.eval(Obj(prop_a="value"))
|
||||
assert not class_node.eval(TestExpressionParser())
|
||||
|
||||
concept_node = IsaNode(BuiltinConcepts.RETURN_VALUE)
|
||||
assert concept_node.eval(ReturnValueConcept())
|
||||
assert concept_node.eval(Concept(name="foo", key=BuiltinConcepts.RETURN_VALUE))
|
||||
assert not concept_node.eval(Obj)
|
||||
assert not concept_node.eval(Concept())
|
||||
|
||||
concept_node2 = IsaNode("foo")
|
||||
assert concept_node2.eval(Concept("foo").init_key())
|
||||
assert not concept_node2.eval(Obj)
|
||||
assert not concept_node2.eval(Concept())
|
||||
@@ -191,7 +191,7 @@ class TestMultipleConceptsParser(TestUsingMemoryBasedSheerka):
|
||||
|
||||
@pytest.mark.parametrize("text, expected_source, expected_end", [
|
||||
("True", "True", 0),
|
||||
("1 == 1", "1 == 1", 5),
|
||||
("1 == 1", "1 == 1", 4),
|
||||
("1!xdf", "1", 0),
|
||||
("1", "1", 0),
|
||||
])
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
import hashlib
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from datetime import date, datetime
|
||||
from os import path
|
||||
|
||||
import core.utils
|
||||
import pytest
|
||||
from sdp.sheerkaDataProvider import SheerkaDataProvider, Event, SheerkaDataProviderError, \
|
||||
SheerkaDataProviderDuplicateKeyError, SheerkaDataProviderResult, SheerkaDataProviderRef
|
||||
from datetime import date, datetime
|
||||
import shutil
|
||||
import json
|
||||
|
||||
from sdp.sheerkaSerializer import JsonSerializer, Serializer, PickleSerializer
|
||||
import core.utils
|
||||
|
||||
tests_root = path.abspath("../../build/tests")
|
||||
evt_digest = "3a571cb6034ef6fc8d7fe91948d0d29728eed74de02bac7968b0e9facca2c2d7"
|
||||
@@ -1021,14 +1020,14 @@ def test_i_can_set_a_reference(root):
|
||||
def test_i_cannot_set_using_use_ref_and_is_ref():
|
||||
sdp = SheerkaDataProvider("mem://")
|
||||
|
||||
with pytest.raises(SheerkaDataProviderError) as error:
|
||||
with pytest.raises(SheerkaDataProviderError):
|
||||
sdp.set(evt_digest, "entry", ObjWithDigestWithKey("a", "b"), use_ref=True, is_ref=True)
|
||||
|
||||
|
||||
def test_i_cannot_set_using_is_ref_if_obj_is_not_a_dictionary():
|
||||
sdp = SheerkaDataProvider("mem://")
|
||||
|
||||
with pytest.raises(SheerkaDataProviderError) as error:
|
||||
with pytest.raises(SheerkaDataProviderError):
|
||||
sdp.set(evt_digest, "entry", ObjWithDigestWithKey("a", "b"), is_ref=True)
|
||||
|
||||
|
||||
@@ -1435,8 +1434,17 @@ def test_i_can_remove_when_only_one_element(root):
|
||||
def test_i_cannot_remove_if_entry_does_not_exist(root):
|
||||
sdp = SheerkaDataProvider(root)
|
||||
with pytest.raises(IndexError) as e:
|
||||
sdp.remove(evt_digest, "entry")
|
||||
assert str(e) == "entry"
|
||||
sdp.remove(evt_digest, "entry", silent_remove=False)
|
||||
assert str(e.value) == "entry"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("root", [
|
||||
".sheerka",
|
||||
"mem://"
|
||||
])
|
||||
def test_no_exception_is_raise_when_remove_in_silent_mode(root):
|
||||
sdp = SheerkaDataProvider(root)
|
||||
sdp.remove(evt_digest, "entry", silent_remove=True) # default
|
||||
|
||||
|
||||
@pytest.mark.parametrize("root", [
|
||||
@@ -1773,7 +1781,7 @@ def test_i_cannot_modify_a_key_that_does_not_exist(root):
|
||||
|
||||
with pytest.raises(IndexError) as e:
|
||||
sdp.modify(evt_digest, "entry1", "2", "bar")
|
||||
assert str(e) == "entry1.2"
|
||||
assert str(e.value) == "entry1.2"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("root", [
|
||||
@@ -1938,7 +1946,7 @@ def test_i_cannot_get_an_entry_that_does_not_exist(root):
|
||||
assert sdp.get_safe("entry") is None
|
||||
with pytest.raises(IndexError) as e:
|
||||
sdp.get("entry")
|
||||
assert str(e) == "entry"
|
||||
assert str(e.value) == "entry"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("root", [
|
||||
@@ -1952,7 +1960,7 @@ def test_i_cannot_get_a_key_that_does_not_exist(root):
|
||||
assert sdp.get_safe("entry1", "2") is None
|
||||
with pytest.raises(IndexError) as e:
|
||||
sdp.get("entry1", "2")
|
||||
assert str(e) == "entry.1"
|
||||
assert str(e.value) == "entry1.2"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("root", [
|
||||
@@ -2257,3 +2265,50 @@ def test_i_get_safe_object_without_origin(root):
|
||||
from_db_no_origin = sdp.get_safe(result.entry, result.key, load_origin=False)
|
||||
assert from_db_no_origin == obj
|
||||
assert not hasattr(from_db_no_origin, Serializer.ORIGIN)
|
||||
|
||||
|
||||
def test_i_can_get_ref():
|
||||
sdp = SheerkaDataProvider("mem://")
|
||||
obj = ObjDumpJson("my_key", "value1")
|
||||
|
||||
obj_serializer = JsonSerializer(core.utils.get_full_qualified_name(obj))
|
||||
sdp.serializer.register(obj_serializer)
|
||||
|
||||
result = sdp.add(evt_digest, "entry", obj, use_ref=True)
|
||||
|
||||
ref = sdp.get_ref(result.entry, result.key)
|
||||
assert ref == "076f0df0f110c304982242a88088efacce71f361e49f065db75919a7f72c2821"
|
||||
|
||||
|
||||
def test_i_can_get_ref_when_list():
|
||||
sdp = SheerkaDataProvider("mem://")
|
||||
|
||||
obj_serializer = JsonSerializer(core.utils.get_full_qualified_name(ObjDumpJson))
|
||||
sdp.serializer.register(obj_serializer)
|
||||
|
||||
sdp.add(evt_digest, "entry", ObjDumpJson("my_key", "value1"), use_ref=True)
|
||||
result = sdp.add(evt_digest, "entry", ObjDumpJson("my_key", "value2"), use_ref=True)
|
||||
|
||||
ref = sdp.get_ref(result.entry, result.key)
|
||||
assert ref == [
|
||||
"076f0df0f110c304982242a88088efacce71f361e49f065db75919a7f72c2821",
|
||||
"e6bf5b56428cfce0f08c94f2c3625dc3b3a8180d7229eaa9f8aa967fb16e5256"
|
||||
]
|
||||
|
||||
|
||||
def test_i_cannot_get_ref_if_the_saved_item_is_not_a_ref():
|
||||
sdp = SheerkaDataProvider("mem://")
|
||||
obj = ObjDumpJson("my_key", "value1")
|
||||
result = sdp.add(evt_digest, "entry", obj, use_ref=False)
|
||||
|
||||
with pytest.raises(SheerkaDataProviderError) as e:
|
||||
sdp.get_ref(result.entry, result.key)
|
||||
|
||||
assert e.value.args[0] == "Not a reference"
|
||||
assert e.value.obj == f"{result.entry}.{result.key}"
|
||||
|
||||
|
||||
def test_i_cannot_get_ref_if_the_item_does_not_exist():
|
||||
sdp = SheerkaDataProvider("mem://")
|
||||
with pytest.raises(IndexError):
|
||||
sdp.get_ref("fake", "fake")
|
||||
|
||||
Reference in New Issue
Block a user