Fixed #20: I can parse simple concepts

This commit is contained in:
2023-07-09 18:08:47 +02:00
parent ba397b0b72
commit 57f9ce2bbb
44 changed files with 2462 additions and 149 deletions
+4 -3
View File
@@ -18,8 +18,9 @@ class BaseCache:
self._max_size = max_size
self._default = default # default value to return when key is not found. It can be a callable of key
self._extend_exists = extend_exists # search in remote
self._alt_sdp_get = alt_sdp_get # How to get the value when called by alt_sdp
self._sdp = sdp # current instance of SheerkaDataProvider
self._sdp = sdp # How to get the value from the associated database (persisted values)
self._alt_sdp_get = alt_sdp_get # How to get the value when other ontologies
self._lock = RLock()
self._current_size = 0
self._initialized_keys = set() # to keep the list of the keys already requested (using get())
@@ -88,7 +89,7 @@ class BaseCache:
def disable_default(self):
self._default = (lambda sdp, key: NotFound) if self._sdp else (lambda key: NotFound)
def put(self, key: str, value: object, alt_sdp=None):
def put(self, key: str | bool, value: object, alt_sdp=None):
"""
Add a new entry in cache
:param key:
+84 -2
View File
@@ -5,10 +5,20 @@ from common.global_symbols import NotFound
class DictionaryCache(BaseCache):
"""
Kind of all or nothing dictionary database
You can get the values key by by
It's a kind of 'all or nothing' dictionary database
You can get the values key by key
But when you want to put, you must put the whole database
For this reason, alt_sdp is not supported. The top ontology layer contains the whole database
>>> cache = DictionaryCache()
>>> cache.put(True, {"key1": "value1", "key2": "value2"}) # put the whole dictionary
>>> assert cache.copy() == {"key1": "value1", "key2": "value2"}
>>> assert cache.get("key1") == "value1"
>>> cache.put(True, {"key3": "value3"})
>>> assert cache.copy() == {"key1": "value1", "key2": "value2", "key3": "value3"}
>>> cache.put(False, {"key4": "value4"})
>>> assert cache.copy() == {"key4": "value4"}
"""
def auto_configure(self, cache_name):
@@ -86,3 +96,75 @@ class DictionaryCache(BaseCache):
self._current_size = 0
for v in self._cache.values():
self._current_size += len(v) if hasattr(v, "__len__") and not isinstance(v, str) else 1
def add_path(self, path: list, value):
"""
Us the path (list of string) to create a tree
the leaf of the tree is the list of all values which share the same path
:param path:
:type path:
:param value:
:type value:
:return:
:rtype:
"""
with self._lock:
current = self._cache
for item in path:
current.setdefault(item, {})
current = current[item]
current.setdefault("#values#", []).append(value)
self._current_size += 1
def remove_path(self, path: list, value):
"""
Remove a value, and its path if needed
:param path:
:type path:
:param value:
:type value:
:return:
:rtype:
"""
parents = []
with self._lock:
current = self._cache
try:
for item in path:
parents.insert(0, current)
current = current[item]
current["#values#"].remove(value)
self._current_size -= 1
except (KeyError, ValueError):
pass
if "#values#" in current:
# clean leaf
if len(current["#values#"]) == 0:
del current["#values#"]
# clean tree
for item in parents:
to_remove = [k for k, v in item.items() if v == {}]
for k in to_remove:
del item[k]
def get_from_path(self, path: list):
"""
Get the list of value that share the same path
:param path:
:type path:
:return: NotFound if the path does not exist
:rtype:
"""
with self._lock:
current = self._cache
try:
for item in path:
current = current[item]
return current["#values#"]
except KeyError:
return NotFound
+35 -2
View File
@@ -12,6 +12,7 @@ class FastCache:
self.lru = []
self.default = default
self.calls = {}
self.restore_points = []
def __contains__(self, item):
return self.has(item)
@@ -35,6 +36,8 @@ class FastCache:
self.cache[key] = value
self.lru.append(key)
self.calls[key] = 0
if self.restore_points:
self.restore_points[0].append(key)
def has(self, key):
return key in self.cache
@@ -52,6 +55,18 @@ class FastCache:
return NotFound
def remove(self, key):
"""
Remove an entry
:param key:
:type key:
:return:
:rtype:
"""
self.lru.remove(key)
del self.cache[key]
del self.calls[key]
def evict_by_key(self, predicate):
to_remove = []
@@ -60,8 +75,7 @@ class FastCache:
to_remove.append(k)
for k in to_remove:
self.lru.remove(k)
del self.cache[k]
self.remove(k)
def copy(self):
return self.cache.copy()
@@ -69,3 +83,22 @@ class FastCache:
def clear(self):
self.cache.clear()
self.lru.clear()
self.restore_points.clear()
def snapshot(self):
"""
From now on, all new added key will be recorded
:return:
:rtype:
"""
self.restore_points.insert(0, [])
def revert_snapshot(self):
"""
All key recorded since the last snapshot will be removed
:return:
:rtype:
"""
if self.restore_points:
for key in self.restore_points.pop(0):
self.remove(key)
+47 -1
View File
@@ -3,7 +3,6 @@ from common.global_symbols import NotFound, Removed
from common.utils import sheerka_deepcopy
class ListCache(BaseCache):
"""
An in memory FIFO cache object
@@ -13,7 +12,11 @@ class ListCache(BaseCache):
def _put(self, key, value, alt_sdp):
if key in self._cache:
if isinstance(self._cache[key], list): # to deal with the case when entry is Removed
self._cache[key].append(value)
else:
self._cache[key] = [value]
else:
self._sync(key)
@@ -64,3 +67,46 @@ class ListCache(BaseCache):
self._cache[new_key][i] = new_value # avoid add and remove in dict
break # only the first one is affected
self._add_to_add(new_key)
def _delete(self, key, value, alt_sdp):
if value is None:
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key):
self._current_size += 1 - len(self._cache[key]) if key in self._cache else 1
self._cache[key] = Removed
self._add_to_add(key)
else:
self._current_size -= len(self._cache[key])
del self._cache[key]
self._add_to_remove(key)
else:
try:
self._cache[key].remove(value)
if len(self._cache[key]) == 0:
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key):
self._cache[key] = Removed
self._add_to_add(key)
# self._current_size -= 1 # Do not decrease size, as it's replaced by 'Removed'
else:
del self._cache[key]
self._add_to_remove(key)
self._current_size -= 1
else:
self._add_to_add(key)
self._current_size -= 1
except (KeyError, ValueError) as ex:
previous = self._alt_sdp_get(alt_sdp, key) if not self._is_cleared and alt_sdp else NotFound
if previous in (NotFound, Removed):
return True
previous = sheerka_deepcopy(previous)
previous.remove(value) # will raise a ValueError if value is not in the set
if len(previous) == 0:
self._cache[key] = Removed
self._current_size += 1
else:
self._cache[key] = previous
self._current_size += len(previous)
self._add_to_add(key)
return True
+1
View File
@@ -99,6 +99,7 @@ class ListIfNeededCache(BaseCache):
try:
previous = self._cache[key]
if isinstance(previous, list):
if value in previous:
previous.remove(value)
self._cache[key] = previous[0] if len(previous) == 1 else previous
self._current_size -= 1
+3
View File
@@ -163,6 +163,9 @@ def str_concept(t, drop_name=None, prefix="c:"):
:param prefix:
:return:
"""
if t is None:
return ""
if isinstance(t, tuple):
name, id_ = t[0], t[1]
else:
+2
View File
@@ -3,9 +3,11 @@ class BuiltinConcepts:
NEW_CONCEPT = "__NEW_CONCEPT" # when the definition of a new concept is added
UNKNOWN_CONCEPT = "__UNKNOWN_CONCEPT" # Failed to find the requested concept
USER_INPUT = "__USER_INPUT" # user command
PARSER_INPUT = "__PARSER_INPUT" # command that will be parsed
PYTHON_CODE = "__PYTHON_CODE" # command that is parsed
PARSER_RESULT = "__PARSER_RESULT" # incomplete recognition of the concepts
INVALID_CONCEPT = "__INVALID_CONCEPT" # failed to parse concept attributes
EVALUATION_ERROR = "__EVALUATION_ERROR" # failed to evaluate concept
-16
View File
@@ -120,13 +120,11 @@ class Sheerka:
# initialize_pickle_handlers()
self.om = SheerkaOntologyManager(self, root_folder)
# self.builtin_cache, self.builtin_cache_by_class_name = self.get_builtins_classes_as_dict()
self.initialize_bind_methods()
self.initialize_caching()
self.initialize_evaluators()
self.initialize_services()
# self.initialize_builtin_evaluators()
# self.om.init_subscriptions()
event = Event("Initializing Sheerka.", user_id=self.name)
@@ -269,20 +267,6 @@ class Sheerka:
"config": self.config.__dict__
}
def publish(self, context, topic, data=None):
"""
To be removed as it must be part of the EventManager service
:param context:
:type context:
:param topic:
:type topic:
:param data:
:type data:
:return:
:rtype:
"""
pass
def evaluate_user_input(self, command: str, user: User):
self.log.info("Processing '%s' from '%s'", command, user.email)
+10 -5
View File
@@ -27,7 +27,12 @@ class DefinitionType:
class ConceptMetadata:
"""
Static information of the Concept
What is the difference between variable and parameter ?
A variable is an attribute of the concept
A parameter is a variable that must be set upon instantiation
for example :
def concept a plus b def_var a b => a and b are parameters (and also variables)
def concept color def_var color_name => color_name is a variable, but not a parameter
"""
id: str # unique identifier for a concept. The id will never be modified (but the key can)
name: str
@@ -45,8 +50,8 @@ class ConceptMetadata:
autouse: bool # indicates if eval must be automatically called on the concept once validated
bound_body: str # which property must be considered have default value for the concept
props: dict # hashmap of properties, values
variables: tuple # list of concept variables(tuple), with their default values
parameters: tuple # list of variables that are part of the name of the concept
variables: list # list of concept variables(tuple), with their default values
parameters: set # variables that are part of the definition of the concept
digest: str = None
all_attrs: tuple = None
@@ -138,7 +143,7 @@ class Concept:
return True
def __hash__(self):
return self._metadata.digest
return hash(self._metadata.digest)
@property
def id(self):
@@ -198,5 +203,5 @@ class Concept:
except AttributeError:
return NotInit if name in self.all_attrs() else NotFound
def get_runtime_info(self):
def get_runtime_info(self) -> ConceptRuntimeInfo:
return self._runtime_info
+16 -10
View File
@@ -2,7 +2,7 @@ from common.global_symbols import NotInit
from core.ExecutionContext import ContextActions, ExecutionContext
from core.ReturnValue import ReturnValue
from core.concept import DefinitionType
from core.error import ErrorContext, SheerkaException
from core.error import ErrorContext
from evaluators.base_evaluator import EvaluatorEvalResult, EvaluatorMatchResult, OneReturnValueEvaluator
from parsers.BnfDefinitionParser import BnfDefinitionParser
from parsers.ConceptDefinitionParser import ConceptDefinition
@@ -42,7 +42,7 @@ class DefConceptEvaluator(OneReturnValueEvaluator):
try:
concept_def = return_value.value
variables = self._get_variables(context, concept_def)
parameters = None
parameters = {item[0] for item in variables} & set(self._get_possible_vars_from_def(context, concept_def))
if concept_def.definition_type == DefinitionType.BNF:
self._validate_bnf(context, concept_def)
@@ -76,10 +76,9 @@ class DefConceptEvaluator(OneReturnValueEvaluator):
def _get_variables(self, context: ExecutionContext, concept_def: ConceptDefinition):
variables_found = set() # list of names, there is no tuple
definition = concept_def.definition or concept_def.name
possible_vars_from_name = self._get_possible_vars_from_def(context, definition)
possible_parameters_from_name = self._get_possible_vars_from_def(context, concept_def)
possible_vars_from_name_as_set = set(possible_vars_from_name)
possible_parameters_from_name_as_set = set(possible_parameters_from_name)
for part in CONCEPT_PARTS_TO_USE:
# if these possibles variables are referenced in other parts of the definition, they may be variables
part_value = getattr(concept_def, part)
@@ -87,7 +86,7 @@ class DefConceptEvaluator(OneReturnValueEvaluator):
continue
possible_vars_from_part = self._get_possible_vars_from_part(context, part_value)
variables_found.update(possible_vars_from_name_as_set & possible_vars_from_part)
variables_found.update(possible_parameters_from_name_as_set & possible_vars_from_part)
# add variables from add_var
if concept_def.def_var:
@@ -97,24 +96,31 @@ class DefConceptEvaluator(OneReturnValueEvaluator):
# variables are sorted
sorted_vars = []
for possible_var in possible_vars_from_name:
for possible_var in possible_parameters_from_name:
for found in with_default_value:
if possible_var == found[0]:
sorted_vars.append(found)
# force variables from def_var if they were filtered
variables_names = {item[0] for item in sorted_vars}
for item in with_default_value:
if item[0] not in variables_names:
sorted_vars.append(item)
return sorted_vars
@staticmethod
def _get_possible_vars_from_def(context, definition):
def _get_possible_vars_from_def(context, concept_def: ConceptDefinition):
"""
:param context:
:type context:
:param definition:
:type definition:
:param concept_def:
:type concept_def:
:return: list of names
:rtype:
"""
definition = concept_def.definition or concept_def.name
names = (str(t.value) for t in Tokenizer(definition) if t.type in NAMES_TOKEN_TYPES)
possible_vars = filter(lambda x: not context.sheerka.is_a_concept_name(x), names)
+30
View File
@@ -0,0 +1,30 @@
from core.ExecutionContext import ContextActions, ExecutionContext
from core.ReturnValue import ReturnValue
from evaluators.base_evaluator import AllReturnValuesEvaluator, EvaluatorEvalResult, EvaluatorMatchResult
class FilterSuccessful(AllReturnValuesEvaluator):
"""
When everything is evaluated,
removes all return values that not successful
"""
NAME = "FilterSuccessful"
def __init__(self):
super().__init__(self.NAME, ContextActions.AFTER_EVALUATION, 80)
def matches(self, context: ExecutionContext, return_values: list[ReturnValue]) -> EvaluatorMatchResult:
to_keep, to_drop = [], []
for r in return_values:
if r.status:
to_keep.append(r)
else:
to_drop.append(r)
return EvaluatorMatchResult(len(to_keep) > 0 and len(to_drop) > 0,
{"to_keep": to_keep, "to_drop": to_drop})
def eval(self, context: ExecutionContext,
evaluation_context: dict,
return_values: list[ReturnValue]) -> EvaluatorEvalResult:
return EvaluatorEvalResult(evaluation_context["to_keep"], evaluation_context["to_drop"])
+8
View File
@@ -13,6 +13,10 @@ from parsers.tokenizer import TokenKind
@dataclass()
class PythonErrorNode(ErrorObj):
"""
Error object when failed to parse the source code
Contains the source code and the associated exception found when tried to compile
"""
source: str
exception: Exception
@@ -30,6 +34,10 @@ class PythonErrorNode(ErrorObj):
class PythonParser(OneReturnValueEvaluator):
"""
Tries to parse Python source code
Return Concept(PythonCode) with PythonFragment if python code is recognized
"""
NAME = "PythonParser"
def __init__(self):
+1 -1
View File
@@ -8,7 +8,7 @@ from parsers.ConceptDefinitionParser import ConceptDefinitionParser
class RecognizeDefConcept(OneReturnValueEvaluator):
"""
class the recognize input 'def concept <name> [as <body>] [where <where>] [pre <pre>] [ret <ret>]'
class that recognizes input 'def concept <name> [as <body>] [where <where>] [pre <pre>] [ret <ret>]'
"""
NAME = "RecognizeDefConcept"
+60
View File
@@ -0,0 +1,60 @@
from core.BuiltinConcepts import BuiltinConcepts
from core.ExecutionContext import ContextActions, ExecutionContext
from core.ReturnValue import ReturnValue
from core.concept import Concept
from evaluators.base_evaluator import EvaluatorEvalResult, EvaluatorMatchResult, NotForMe, OneReturnValueEvaluator
from parsers.SimpleParserParser import SimpleConceptsParser
from parsers.state_machine import MetadataToken
class RecognizeSimpleConcept(OneReturnValueEvaluator):
"""
class that recognizes concepts in the input
It only focuses on concepts thot do not require parameter
"""
NAME = "RecognizeSimpleConcept"
def __init__(self):
super().__init__(self.NAME, ContextActions.PARSING, 80)
self.parser = SimpleConceptsParser()
def matches(self, context: ExecutionContext, return_value: ReturnValue) -> EvaluatorMatchResult:
return EvaluatorMatchResult(return_value.status and
context.sheerka.isinstance(return_value.value, BuiltinConcepts.PARSER_INPUT))
def eval(self, context: ExecutionContext,
evaluation_context: object,
return_value: ReturnValue) -> EvaluatorEvalResult:
parser_input = return_value.value.body
parser_input.reset()
parsed = self.parser.parse(context, parser_input)
if len(parsed.items) == 0:
not_for_me = ReturnValue(self.NAME, False, NotForMe(self.NAME, return_value.value))
return EvaluatorEvalResult([not_for_me], [])
new = []
for sequence in parsed.items:
instantiated = []
has_unrecognized = False
for item in sequence:
# instantiate the concept
if isinstance(item, MetadataToken):
concept = context.sheerka.newi(item.metadata.id)
concept.get_runtime_info().info["resolution_method"] = item.resolution_method
instantiated.append(concept)
else:
instantiated.append(item.buffer)
has_unrecognized = True
if has_unrecognized:
parser_result = context.sheerka.newn(BuiltinConcepts.PARSER_RESULT, result=instantiated)
new.append(ReturnValue(self.NAME, False, parser_result, [return_value]))
else:
# remove whitespaces first
instantiated = [item for item in instantiated if isinstance(item, Concept) or not item.isspace()]
to_return = instantiated[0] if len(instantiated) == 1 else instantiated
new.append(ReturnValue(self.NAME, True, to_return, [return_value]))
return EvaluatorEvalResult(new, [return_value])
@@ -0,0 +1,51 @@
from core.ExecutionContext import ContextActions, ExecutionContext
from core.ReturnValue import ReturnValue
from evaluators.PythonParser import PythonParser
from evaluators.RecognizeSimpleConcept import RecognizeSimpleConcept
from evaluators.base_evaluator import AllReturnValuesEvaluator, EvaluatorEvalResult, EvaluatorMatchResult
class ResolvePythonVsSimpleConcept(AllReturnValuesEvaluator):
"""
A one long name concept can be recognized by PythonParser the SimpleConceptParser
The evaluator resolves the conflict when it's the case
The rule is simple by the way, always prefer the SimpleConceptParser
"""
NAME = "ResolvePythonVsSimpleConcept"
def __init__(self):
super().__init__(self.NAME, ContextActions.AFTER_PARSING, 90)
def matches(self, context: ExecutionContext, return_values: list[ReturnValue]) -> EvaluatorMatchResult:
"""
Browse the return values
if both PythonParser and RecognizeSimpleConcept are successful, we must choose one
:param context:
:type context:
:param return_values:
:type return_values:
:return:
:rtype:
"""
to_keep = None
to_drop = None
others = []
for ret_val in return_values:
if ret_val.status and ret_val.who == PythonParser.NAME:
to_drop = ret_val
elif ret_val.status and ret_val.who == RecognizeSimpleConcept.NAME:
to_keep = ret_val
else:
others.append(ret_val)
if to_keep and to_drop:
return EvaluatorMatchResult(True, {"to_keep": to_keep, "to_drop": to_drop, "others": others})
return EvaluatorMatchResult(False)
def eval(self, context: ExecutionContext,
evaluation_context: dict,
return_values: list[ReturnValue]) -> EvaluatorEvalResult:
return EvaluatorEvalResult([evaluation_context["to_keep"]] + evaluation_context["others"],
[evaluation_context["to_drop"]])
+53 -8
View File
@@ -1,13 +1,15 @@
from dataclasses import dataclass
from typing import Any
from core.ExecutionContext import ExecutionContext, ContextActions
from core.ExecutionContext import ContextActions, ExecutionContext
from core.ReturnValue import ReturnValue
from core.error import ErrorObj
@dataclass
class EvaluatorMatchResult:
status: bool
obj: object = None
obj: dict = None
@dataclass
@@ -48,12 +50,11 @@ class OneReturnValueEvaluator(BaseEvaluator):
Evaluate one specific return value
"""
def matches(self, context: ExecutionContext,
return_value: ReturnValue) -> EvaluatorMatchResult:
def matches(self, context: ExecutionContext, return_value: ReturnValue) -> EvaluatorMatchResult:
pass
def eval(self, context: ExecutionContext,
evaluation_context: object,
evaluation_context: dict,
return_value: ReturnValue) -> EvaluatorEvalResult:
pass
@@ -63,11 +64,55 @@ class AllReturnValuesEvaluator(BaseEvaluator):
Evaluates the groups of ReturnValues
"""
def matches(self, context: ExecutionContext,
return_values: list[ReturnValue]) -> EvaluatorMatchResult:
def matches(self, context: ExecutionContext, return_values: list[ReturnValue]) -> EvaluatorMatchResult:
pass
def eval(self, context: ExecutionContext,
evaluation_context: object,
evaluation_context: dict,
return_values: list[ReturnValue]) -> EvaluatorEvalResult:
pass
class MultipleChoices:
def __init__(self, items: list):
self.items = items
def __iter__(self):
return iter(self.items)
def __len__(self):
return len(self.items)
def __repr__(self):
if self.items:
return f"MultipleChoices({', '.join([repr(item) for item in self.items])})"
else:
return f"MultipleChoices( **empty** )"
def __eq__(self, other):
if not isinstance(other, MultipleChoices):
return False
if len(other.items) != len(self.items):
return False
for _self, _other in zip(self.items, other.items):
if _self != _other:
return False
return True
def __hash__(self):
return hash(tuple(self.items))
@dataclass
class NotForMe(ErrorObj):
"""
Return by an Evaluator are execution if the input was not for it
"""
who: str # who issued the NotForMe
items: Any # ReturnValue value(s)
def get_error_msg(self) -> str:
return f"{self.items} is not for '{self.who}'"
+9 -6
View File
@@ -3,9 +3,8 @@ from caching.CacheManager import CacheManager
from caching.DictionaryCache import DictionaryCache
from caching.FastCache import FastCache
from caching.SetCache import SetCache
from common.global_symbols import EVENT_CONCEPT_ID_DELETED, \
EVENT_RULE_ID_DELETED, NotFound, \
Removed
from common.global_symbols import EVENT_CONCEPT_ID_DELETED, EVENT_ONTOLOGY_CREATED, EVENT_ONTOLOGY_DELETED, \
EVENT_RULE_ID_DELETED, NotFound, Removed
from ontologies.Exceptions import OntologyAlreadyExists, OntologyManagerCannotPopLatest, OntologyManagerFrozen, \
OntologyManagerNotFrozen, OntologyNotFound
from sdp.sheerkaDataProvider import SheerkaDataProvider
@@ -84,7 +83,7 @@ class Ontology:
class SheerkaOntologyManager:
ROOT_ONTOLOGY_NAME = "__default__"
SELF_CACHE_MANAGER = "__ontology_manager__" # cache to store SheerkaOntologyManager info
CONCEPTS_BY_ONTOLOGY_ENTRY = "ConceptsByOntologyEntry"
CONCEPTS_BY_ONTOLOGY_ENTRY = "ConceptsByOntologyEntry" # stores concepts id created in ontologies
RULES_BY_ONTOLOGY_ENTRY = "RulesByOntologyEntry"
ONTOLOGY_BY_CONCEPT_ENTRY = "OntologyByConceptEntry"
ONTOLOGY_BY_RULE_ENTRY = "OntologyByRuleEntry"
@@ -156,7 +155,6 @@ class SheerkaOntologyManager:
"""
Add an ontology layer
:param name: name of the layer
:param cache_only:
"""
if not self.frozen:
raise OntologyManagerNotFrozen()
@@ -178,6 +176,8 @@ class SheerkaOntologyManager:
alt_sdp = AlternateSdp(self.ontologies)
new_ontology = Ontology(name, len(self.ontologies), cache_manager, alt_sdp)
self.ontologies.insert(0, new_ontology)
self.sheerka.publish(None, EVENT_ONTOLOGY_CREATED, new_ontology)
return new_ontology
def pop_ontology(self, context):
@@ -206,7 +206,9 @@ class SheerkaOntologyManager:
self.internal_cache_manager.delete(self.ONTOLOGY_BY_RULE_ENTRY, rule)
self.internal_cache_manager.delete(self.RULES_BY_ONTOLOGY_ENTRY, ontology_name)
return self.ontologies.pop(0)
ontology = self.ontologies.pop(0)
self.sheerka.publish(context, EVENT_ONTOLOGY_DELETED, ontology)
return ontology
def add_ontology(self, ontology: Ontology):
"""
@@ -221,6 +223,7 @@ class SheerkaOntologyManager:
for cache_def in ontology.cache_manager.caches.values():
cache_def.cache.reset_initialized_keys()
self.sheerka.publish(None, EVENT_ONTOLOGY_CREATED, ontology)
return self
def revert_ontology(self, context, ontology) -> Ontology:
+10
View File
@@ -90,5 +90,15 @@ class ParserInput:
self.token = self.all_tokens[self.pos]
return True
def clone(self):
res = ParserInput(self.original_text)
res.all_tokens = self.all_tokens
res.exception = self.exception
res.pos = self.pos
res.end = self.end
res.token = self.token
return res
def __repr__(self):
return f"ParserInput('{self.original_text}', len={len(self.all_tokens)})"
+114
View File
@@ -0,0 +1,114 @@
from core.concept import DefinitionType
from evaluators.base_evaluator import MultipleChoices
from parsers.state_machine import ConceptToRecognize, End, ManageUnrecognized, MetadataToken, PrepareReadTokens, \
ReadConcept, ReadTokens, Start, StateMachine, StateMachineContext, UnrecognizedToken
from parsers.tokenizer import Token, TokenKind, Tokenizer
class SimpleConceptsParser:
""""
This class to parser concepts with no variable
It parses a sequence of concepts
"""
def __init__(self):
tokens_wkf = {
Start("start", next_states=["prepare read tokens"]),
PrepareReadTokens("prepare read tokens", next_states=["read tokens"]),
ReadTokens("read tokens", next_states=["read tokens", "eof", "concepts found"]),
ManageUnrecognized("eof", next_states=["end"]),
ManageUnrecognized("concepts found", next_states=["#concept_wkf"]),
End("end", next_states=None)
}
concept_wkf = {
Start("start", next_states=["read concept"]),
ReadConcept("read concept", next_states=["#tokens_wkf"]),
}
self.workflows = {
"#tokens_wkf": {t.name: t for t in tokens_wkf},
"#concept_wkf": {t.name: t for t in concept_wkf},
}
self.error_sink = []
@staticmethod
def get_metadata_from_first_token(context, token: Token):
def _get_expected_tokens(_metadata, attr):
return [t.strip_quote for t in Tokenizer(getattr(_metadata, attr), yield_eof=False)][1:]
if token.type == TokenKind.CONCEPT:
name, concept_id = token.value
if concept_id:
return [ConceptToRecognize(context.sheerka.get_by_id(concept_id), [], "id")]
else:
metadata = context.sheerka.get_by_name(name)
return [ConceptToRecognize(metadata, [], "name")] if not isinstance(metadata, list) else \
[ConceptToRecognize(m, [], "name") for m in metadata]
concepts_by_key = [ConceptToRecognize(m, _get_expected_tokens(m, "key"), "key")
for m in context.sheerka.get_metadatas_from_first_token("key", token.value)
if m.definition_type == DefinitionType.DEFAULT and len(m.parameters) == 0]
concepts_by_name = [ConceptToRecognize(m, _get_expected_tokens(m, "name"), "name")
for m in context.sheerka.get_metadatas_from_first_token("name", token.value)]
return concepts_by_key + concepts_by_name
def parse(self, context, parser_input):
sm = StateMachine(self.workflows)
sm_context = StateMachineContext(context, parser_input, self.get_metadata_from_first_token)
sm.run("#tokens_wkf", "start", sm_context)
selected = self.select_best_paths(sm)
return MultipleChoices(selected)
def select_best_paths(self, sm):
"""
Returns a list of sequence
:param sm:
:type sm:
:return:
:rtype:
"""
selected = []
best_score = 1
for path in sm.paths:
if path.execution_context.errors:
continue
score = self._compute_path_score(path)
if score > best_score:
selected.clear()
selected.append(path.execution_context.result)
best_score = score
elif score == best_score:
selected.append(path.execution_context.result)
return selected
@staticmethod
def _compute_path_score(path):
"""
To compute the score of a path
We look at the MetadataToken, that represent the concepts that are recognized
The first idea was to look at the concepts that use the maximum of token in a row
example :
Concept("I am a concept") is better than Concept("I am") + Unrecognized(" a concept")
but :
Concept("one two") should be equivalent to Concept("one") followed by Concept("two")
:param path:
:type path:
:return:
:rtype:
"""
score = 0
for token in path.execution_context.result:
if isinstance(token, MetadataToken):
score += token.end - token.start + 1
elif isinstance(token, UnrecognizedToken) and token.buffer.isspace():
score += len(token.buffer)
return score
+1 -1
View File
@@ -83,7 +83,7 @@ class KeywordNotFound(ErrorObj):
@dataclass()
class UnexpectedEof(ErrorObj):
keyword: str
keyword: str # expected keyword or token
last_token: Token | None
def get_error_msg(self):
+332
View File
@@ -0,0 +1,332 @@
from dataclasses import dataclass, field
from typing import Any, Literal
from common.utils import str_concept
from core.ExecutionContext import ExecutionContext
from core.concept import ConceptMetadata
from parsers.ParserInput import ParserInput
from parsers.parser_utils import UnexpectedEof, UnexpectedToken, get_text_from_tokens
from parsers.tokenizer import Token
@dataclass
class MetadataToken:
"""
Class that represents a text that is recognized as a concept
We keep track of the start and the end position
"""
metadata: ConceptMetadata
start: int
end: int
resolution_method: Literal["name", "key", "id"]
parser: str
def __repr__(self):
return f"(MetadataToken metadata={str_concept(self.metadata, drop_name=True)}, " + \
f"start={self.start}, end={self.end}, method={self.resolution_method}, origin={self.parser})"
def __eq__(self, other):
if not isinstance(other, MetadataToken):
return False
return self.metadata.id == other.metadata.id \
and self.start == other.start \
and self.end == other.end \
and self.parser == other.parser
def __hash__(self):
return hash((self.metadata.id, self.start, self.end, self.parser))
@dataclass
class UnrecognizedToken:
"""
Class that represents a text that is not recognized
We keep track of the start and the end position
"""
buffer: str
start: int
end: int
@dataclass
class StateResult:
next_state: str | None
forks: list = None
@dataclass
class ConceptToRecognize:
"""
Holds information about the concept to recognize
"""
metadata: ConceptMetadata
expected_tokens: list
resolution_method: Literal["name", "key", "id"] # which attribute was used to resolve the concept
@dataclass
class StateMachineContext:
context: ExecutionContext
parser_input: ParserInput
get_metadata_from_first_token: Any
buffer: list[Token] = field(default_factory=list)
buffer_start_pos: int = -1
concept_to_recognize: ConceptToRecognize | None = None
result: list = field(default_factory=list)
errors: list = field(default_factory=list)
def get_clones(self, concepts_to_recognize):
return [StateMachineContext(self.context,
self.parser_input.clone(),
self.get_metadata_from_first_token,
self.buffer.copy(),
self.buffer_start_pos,
concept,
self.result.copy(),
self.errors.copy())
for concept in concepts_to_recognize]
def to_debug(self):
return {"pos": self.parser_input.pos,
"token": self.parser_input.token,
"buffer": [token.value for token in self.buffer],
"concept": str_concept(self.concept_to_recognize.metadata) if self.concept_to_recognize else None,
"result": self.result.copy()}
class State:
def __init__(self, name, next_states):
self.name = name
self.next_states = next_states
def run(self, state_context: StateMachineContext) -> StateResult:
pass
@staticmethod
def get_forks(next_state, states_contexts: list[StateMachineContext]):
"""
Create on fork item for every state context
:param next_state:
:type next_state:
:param states_contexts:
:type states_contexts:
:return:
:rtype:
"""
return [(next_state, state_context) for state_context in states_contexts]
def __repr__(self):
return f"(State '{self.name}' -> {self.next_states})"
class Start(State):
def run(self, state_context) -> StateResult:
# Start state
# give some logs and ask for the next state
return StateResult(self.next_states[0])
def __repr__(self):
return f"(StartState '{self.name}' -> '{self.next_states[0]}')"
class PrepareReadTokens(State):
def run(self, state_context: StateMachineContext) -> StateResult:
state_context.buffer.clear()
state_context.buffer_start_pos = state_context.parser_input.pos + 1
return StateResult(self.next_states[0])
class ReadTokens(State):
def run(self, state_context) -> StateResult:
if not state_context.parser_input.next_token(False):
return StateResult("eof")
# try to get the possible concepts to recognize
concepts = state_context.get_metadata_from_first_token(state_context.context,
state_context.parser_input.token)
forks = self.get_forks("concepts found", state_context.get_clones(concepts)) if concepts else None
state_context.buffer.append(state_context.parser_input.token)
return StateResult(self.name, forks)
class ManageUnrecognized(State):
def run(self, state_context) -> StateResult:
if state_context.buffer:
buffer_as_str = get_text_from_tokens(state_context.buffer)
if len(state_context.result) > 0 and isinstance(old := state_context.result[-1], UnrecognizedToken):
state_context.result[-1] = UnrecognizedToken(old.buffer + buffer_as_str,
old.start,
state_context.parser_input.pos - 1)
else:
state_context.result.append(UnrecognizedToken(buffer_as_str,
state_context.buffer_start_pos,
state_context.parser_input.pos - 1))
return StateResult(self.next_states[0])
class ReadConcept(State):
def run(self, state_context) -> StateResult:
start = state_context.parser_input.pos
for expected in state_context.concept_to_recognize.expected_tokens:
if not state_context.parser_input.next_token(False):
# eof before the concept is recognized
state_context.errors.append(UnexpectedEof(expected, state_context.parser_input.token))
state_context.concept_to_recognize = None
return StateResult(self.next_states[0])
token = state_context.parser_input.token
if token.value != expected:
# token mismatch
state_context.errors.append(UnexpectedToken(token, expected))
state_context.concept_to_recognize = None
return StateResult(self.next_states[0])
state_context.result.append(MetadataToken(state_context.concept_to_recognize.metadata,
start,
state_context.parser_input.pos,
state_context.concept_to_recognize.resolution_method,
"simple"))
state_context.concept_to_recognize = None
return StateResult(self.next_states[0])
class End(State):
def run(self, state_context) -> StateResult:
return StateResult(None)
def __repr__(self):
return f"(EndState '{self.name}')"
@dataclass
class ExecutionPathHistory:
from_state: str
execution_context_debug: dict
to_state: str = ""
forks: list[tuple] = None
parents: list = None
def clone(self, parent_path_id):
parents = self.parents.copy() if self.parents else []
parents.append(parent_path_id)
return ExecutionPathHistory(self.from_state,
self.execution_context_debug.copy(),
self.to_state,
self.forks.copy() if self.forks else None,
parents)
def __repr__(self):
return "History(from '{0}', to '{1}', using {2}, forks={3}, parents={4}".format(
self.from_state,
self.to_state,
self.execution_context_debug,
len(self.forks) if self.forks else 0,
self.parents)
@dataclass
class ExecutionPath:
path_id: int
execution_context: Any
current_workflow: str
current_state: str
history: list[ExecutionPathHistory]
ended: bool = False
def clone(self, path_id, new_execution_path, new_workflow, new_state):
return ExecutionPath(path_id,
new_execution_path,
new_workflow,
new_state,
[h.clone(self.path_id) for h in self.history],
self.ended)
def __repr__(self):
return f"(Path id={self.path_id}, workflow='{self.current_workflow}', state='{self.current_state}')"
def get_audit_trail(self):
return [h.from_state for h in self.history]
class StateMachine:
def __init__(self, workflows):
self.workflows = workflows
self.paths = None
self.last_path_id = -1
def run(self, workflow_name: str, state_name: str, execution_context):
"""
Run the workflow from the state given in parameter
:param workflow_name:
:type workflow_name:
:param state_name:
:type state_name:
:param execution_context:
:type execution_context:
:return:
:rtype:
"""
self.last_path_id = -1 # reset the path ids
self.paths = [ExecutionPath(self._get_new_path_id(),
execution_context,
workflow_name,
state_name,
[],
False)]
while True:
to_review = [p for p in self.paths if not p.ended]
if len(to_review) == 0:
break
for path in to_review:
# add traceability
history = ExecutionPathHistory(f"{path.current_workflow}:{path.current_state}",
path.execution_context.to_debug())
path.history.append(history)
current_state = self.workflows[path.current_workflow][path.current_state]
res = current_state.run(path.execution_context)
if res.next_state is None:
path.ended = True
continue # not possible to fork !
path.current_workflow, path.current_state = self._compute_next_workflow_and_state(path.current_workflow,
res.next_state)
# update traceability
history.to_state = f"{path.current_workflow}:{path.current_state}"
# add forks
if res.forks:
new_paths = []
for next_state, next_execution_context in res.forks:
next_workflow, next_state = self._compute_next_workflow_and_state(path.current_workflow,
next_state)
new_paths.append(path.clone(self._get_new_path_id(),
next_execution_context,
next_workflow,
next_state))
self.paths.extend(new_paths)
history.forks = [p.path_id for p in new_paths]
def _get_new_path_id(self):
self.last_path_id += 1
return self.last_path_id
@staticmethod
def _compute_next_workflow_and_state(workflow, state):
if state.startswith("#"):
return state, "start"
else:
return workflow, state
+116 -16
View File
@@ -1,20 +1,22 @@
import ast
from dataclasses import dataclass
from caching.FastCache import FastCache
from common.ast_utils import WhereConstraintVisitor
from common.global_symbols import CustomType, NotFound, NotInit
from common.global_symbols import CustomType, EVENT_ONTOLOGY_CREATED, EVENT_ONTOLOGY_DELETED, NotFound, NotInit
from core.BuiltinConcepts import BuiltinConcepts
from core.ExecutionContext import ContextActions, ExecutionContext
from core.ReturnValue import ReturnValue
from core.concept import Concept, ConceptDefaultProps, ConceptDefaultPropsAttrs, ConceptMetadata
from core.error import ErrorObj, SheerkaException
from core.error import ErrorConcepts, ErrorObj, SheerkaException
from core.python_fragment import PythonFragment
from services.BaseService import BaseService
from services.SheerkaPython import EvalMethod, EvaluationContext, EvaluationRef, MultipleResults
from services.SheerkaPython import ConceptRef, EvalMethod, EvaluationContext, MultipleResults, ObjectRef
PARSING_STEPS = [
ContextActions.BEFORE_PARSING,
ContextActions.PARSING,
ContextActions.AFTER_PARSING,
]
CONDITIONAL_ATTR = [ConceptDefaultProps.WHERE, ConceptDefaultProps.PRE]
@@ -48,6 +50,14 @@ class PredicateIsFalse(ErrorObj):
return f"Failed to match condition '{self.predicate}' with namespace {self.namespace}."
class ConceptEvalError(ErrorObj):
def __init__(self, message):
self.message = message
def get_error_msg(self) -> str:
return self.message
@dataclass
class InfiniteRecursion(ErrorObj):
"""
@@ -56,6 +66,19 @@ class InfiniteRecursion(ErrorObj):
ids: list
@dataclass
class TooManySuccess(ErrorObj):
values: list
@dataclass
class TooManyErrors(ErrorObj):
values: list
def get_error_msg(self) -> str:
return "\n".join([e.get_error_msg() for e in self.values])
@dataclass
class PredicateIsTrue:
"""
@@ -78,12 +101,16 @@ class ConceptEvaluator(BaseService):
def __init__(self, sheerka):
super().__init__(sheerka)
self.compiled_cache = FastCache()
self.where_constraints_cache = FastCache(default=None)
self.compiled_cache = FastCache(max_size=2048)
self.where_constraints_cache = FastCache(max_size=2048)
def initialize(self):
self.sheerka.bind_service_method(self.NAME, self.evaluate_concept, True)
def initialize_deferred(self, context, first_time):
self.sheerka.subscribe(EVENT_ONTOLOGY_CREATED, self._on_ontology_created)
self.sheerka.subscribe(EVENT_ONTOLOGY_DELETED, self._on_ontology_removed)
def evaluate_concept(self, context: ExecutionContext,
concept: Concept,
hints: ConceptEvaluationHints = None):
@@ -141,11 +168,20 @@ class ConceptEvaluator(BaseService):
compiled = ConceptCompiled()
with context.push(self.NAME, ContextActions.BUILD_CONCEPT, {"metadata": action_context}) as sub_context:
variables = {k for k, v in metadata.variables}
for attr, source_code in action_context.items():
if source_code is None or source_code == "":
setattr(compiled, attr, None)
continue
if source_code in variables:
# Reference to internal variable
python_fragment = self._ensure_python_fragment(context, source_code)
setattr(compiled, attr, python_fragment)
continue
# else, we need to parse the source code
with sub_context.push(self.NAME, ContextActions.BUILD_CONCEPT_ATTR, {"attr": attr}) as attr_context:
start = ReturnValue(self.NAME,
True,
@@ -156,15 +192,17 @@ class ConceptEvaluator(BaseService):
ret = sheerka.execute(attr_context, [start], PARSING_STEPS)
attr_context.add_values(return_values=ret)
value = ret[0].value
only_successful = self._only_one_successful(ret)
value = self._ensure_python_fragment(context, only_successful)
if isinstance(value, ErrorObj):
setattr(compiled, attr, value)
compiled.errors[attr] = value.get_error_msg()
else:
# Add reference to internal variables
python_fragment = value.pf
python_fragment = value
for k, v in metadata.variables:
python_fragment.namespace[k] = EvaluationRef("self", k)
python_fragment.namespace[k] = ObjectRef("self", k)
setattr(compiled, attr, python_fragment)
@@ -225,10 +263,8 @@ class ConceptEvaluator(BaseService):
if (attr_constraints := self._get_where_constraints(concept, attr)) is not None:
res = self._apply_attr_constraints(context, attr_constraints, attr, res)
if isinstance(res, ErrorObj):
if isinstance(res, ErrorObj) or isinstance(res, Concept) and res.name in ErrorConcepts:
errors[attr] = res
concept.set_value(attr, NotInit)
res = NotInit
concept.set_value(attr, res)
@@ -244,20 +280,84 @@ class ConceptEvaluator(BaseService):
concept.get_runtime_info().is_evaluated = True
if errors:
if context.sheerka.isinstance(error_in_body := concept.body, BuiltinConcepts.EVALUATION_ERROR):
# if the body is an 'EVALUATION_ERROR', it needs to be propagated.
# There is no need to create a new EVALUATION_ERROR concept
concept.get_runtime_info().error = error_in_body.reason
return error_in_body
elif errors:
# if some new errors are detected, We must return an EVALUATION_ERROR concept
error_concept = sheerka.newn(BuiltinConcepts.EVALUATION_ERROR, concept=concept, reason=errors)
concept.get_runtime_info().error = errors
return error_concept
elif context.sheerka.isinstance(error_in_body := concept.body, BuiltinConcepts.EVALUATION_ERROR):
# if the body is an 'evaluation_error', it needs to be propagated
concept.get_runtime_info().error = error_in_body.reason
return error_in_body
if (ret := concept.get_value(ConceptDefaultProps.RET)) is NotInit:
return concept
else:
return ret
def _on_ontology_created(self, context, ontology):
self.compiled_cache.snapshot()
self.where_constraints_cache.snapshot()
def _on_ontology_removed(self, context, ontology):
self.compiled_cache.revert_snapshot()
self.where_constraints_cache.revert_snapshot()
@staticmethod
def _ensure_python_fragment(context, obj) -> PythonFragment | ErrorObj:
"""
We can evaluate only python code
Concepts found must be transformed into python fragment of code
The python fragment will be an identifier
and the real value of the concept will be stored in the namespace of the PythonFragment
:param obj:
:type obj:
:return:
:rtype:
"""
if isinstance(obj, (ErrorObj, PythonFragment)):
return obj
if context.sheerka.isinstance(obj, BuiltinConcepts.PYTHON_CODE):
return obj.pf
if isinstance(obj, (Concept, MultipleResults)):
concept_ref = f"__REF__{id(obj)}"
ast_tree = ast.parse(concept_ref, "<user input>", 'eval')
ref = ConceptRef(obj) if isinstance(obj, Concept) else MultipleResults(*(ConceptRef(o) for o in obj.items))
return PythonFragment(concept_ref, ast_tree=ast_tree, namespace={concept_ref: ref})
if isinstance(obj, str):
ast_tree = ast.parse(obj, "<user input>", 'eval')
return PythonFragment(obj, ast_tree=ast_tree, namespace={obj: ObjectRef("self", obj)})
return ConceptEvalError(f"Cannot process intput '{obj}'")
@staticmethod
def _only_one_successful(return_values):
"""
After parsing the source code, we may found multiple possible results
First, disqualify all failed return values.
Return MultipleResult if we cannot found out which one to choose
:param return_values:
:type return_values:
:return:
:rtype:
"""
if len(return_values) == 1:
return return_values[0].value
only_successful = [r for r in return_values if r.status]
if len(only_successful) == 1:
return only_successful[0].value
if len(only_successful) > 1:
# TODO: make sure those are concepts !
return MultipleResults(*(r.value for r in only_successful))
return TooManyErrors([r.value for r in return_values])
@staticmethod
def _detect_recursion(context, current_concept_id):
ids = []
+131 -18
View File
@@ -1,9 +1,11 @@
import hashlib
import logging
from dataclasses import dataclass
from typing import Literal
from caching.Cache import Cache
from caching.FastCache import FastCache
from caching.ListCache import ListCache
from caching.ListIfNeededCache import ListIfNeededCache
from common.global_symbols import NotFound, NotInit, VARIABLE_PREFIX
from common.utils import get_logger_name, unstr_concept
@@ -41,10 +43,24 @@ class InvalidBnf(ErrorObj):
@dataclass
class FirstItemError(ErrorObj):
class NoFirstItemError(ErrorObj):
pass
@dataclass
class ConceptRef:
concept: Concept
def __eq__(self, other):
if not isinstance(other, ConceptRef):
return False
return self.concept.id == other.concept.id
def __hash__(self):
return hash(self.concept.id)
class ConceptManager(BaseService):
"""
The service is used for the administration of concepts
@@ -60,7 +76,10 @@ class ConceptManager(BaseService):
CONCEPTS_BY_ID_ENTRY = "ConceptManager:Concepts_By_ID" # to store all the concepts
CONCEPTS_BY_KEY_ENTRY = "ConceptManager:Concepts_By_Key"
CONCEPTS_BY_NAME_ENTRY = "ConceptManager:Concepts_By_Name"
CONCEPTS_BY_HASH_ENTRY = "ConceptManager:Concepts_By_Hash" # sto
CONCEPTS_BY_HASH_ENTRY = "ConceptManager:Concepts_By_Hash"
CONCEPT_BY_FIRST_TOKEN_IN_KEY = "ConceptManager:Concepts_By_First_Token_In_Key"
CONCEPT_BY_FIRST_TOKEN_IN_NAME = "ConceptManager:Concepts_By_First_Token_In_Name"
def __init__(self, sheerka):
super().__init__(sheerka, order=11)
@@ -78,7 +97,9 @@ class ConceptManager(BaseService):
self.sheerka.bind_service_method(self.NAME, self.get_by_name, False)
self.sheerka.bind_service_method(self.NAME, self.get_by_id, False)
self.sheerka.bind_service_method(self.NAME, self.get_by_key, False)
self.sheerka.bind_service_method(self.NAME, self.get_by_digest, False)
self.sheerka.bind_service_method(self.NAME, self.is_a_concept_name, False)
self.sheerka.bind_service_method(self.NAME, self.get_metadatas_from_first_token, False)
register_concept_cache = self.sheerka.om.register_concept_cache
@@ -95,6 +116,12 @@ class ConceptManager(BaseService):
cache = ListIfNeededCache().auto_configure(self.CONCEPTS_BY_HASH_ENTRY)
register_concept_cache(self.CONCEPTS_BY_HASH_ENTRY, cache, lambda c: c.digest, True)
cache = ListCache().auto_configure(self.CONCEPT_BY_FIRST_TOKEN_IN_KEY)
self.sheerka.om.register_cache(self.CONCEPT_BY_FIRST_TOKEN_IN_KEY, cache)
cache = ListCache().auto_configure(self.CONCEPT_BY_FIRST_TOKEN_IN_NAME)
self.sheerka.om.register_cache(self.CONCEPT_BY_FIRST_TOKEN_IN_NAME, cache)
def initialize_deferred(self, context, is_first_time):
if is_first_time:
self.sheerka.om.put(self.sheerka.OBJECTS_IDS_ENTRY, self.USER_CONCEPTS_IDS, 1000)
@@ -102,12 +129,13 @@ class ConceptManager(BaseService):
_ = self._create_builtin_concept
_(1, BuiltinConcepts.SHEERKA, desc="Sheerka")
_(2, BuiltinConcepts.NEW_CONCEPT, desc="On new concept creation", variables=("metadata",))
_(3, BuiltinConcepts.UNKNOWN_CONCEPT, desc="Unknown concept", variables=("requested_name", "requested_id"))
_(3, BuiltinConcepts.UNKNOWN_CONCEPT, desc="Unknown concept", variables=("requested",))
_(4, BuiltinConcepts.USER_INPUT, desc="Any external input", variables=("command",))
_(5, BuiltinConcepts.PARSER_INPUT, desc="tokenized input", variables=("pi",))
_(6, BuiltinConcepts.PYTHON_CODE, desc="python code", variables=("pf",)) # pf for PythonFragment
_(7, BuiltinConcepts.INVALID_CONCEPT, desc="invalid concept", variables=("concept_id", "reason"))
_(8, BuiltinConcepts.EVALUATION_ERROR, desc="evaluation error", variables=("concept", "reason"))
_(7, BuiltinConcepts.PARSER_RESULT, desc="parser result", variables=("result",))
_(8, BuiltinConcepts.INVALID_CONCEPT, desc="invalid concept", variables=("concept_id", "reason"))
_(9, BuiltinConcepts.EVALUATION_ERROR, desc="evaluation error", variables=("concept", "reason"))
self.init_log.debug('%s builtin concepts created',
len(self.sheerka.om.current_cache_manager().concept_caches))
@@ -129,7 +157,7 @@ class ConceptManager(BaseService):
desc: str = "", # possible description for the concept
props: dict = None, # hashmap of default properties
variables: list = None, # list of concept variables(tuple), with their default values
parameters: list = None # list of variables that are part of the name of the concept
parameters: set = None # list of variables that are part of the name of the concept
) -> ReturnValue:
"""
Adds the definition of a new concept
@@ -151,14 +179,13 @@ class ConceptManager(BaseService):
post,
ret,
definition,
definition_type,
DefinitionType.DEFAULT if definition_type is None else definition_type,
desc,
autouse,
bound_body,
props or {},
variables or (),
parameters or (),
)
{} if props is None else props,
[] if variables is None else variables,
set() if parameters is None else parameters)
digest = self.compute_metadata_digest(metadata)
if self.sheerka.om.exists_in_current(self.CONCEPTS_BY_HASH_ENTRY, digest):
@@ -177,15 +204,28 @@ class ConceptManager(BaseService):
# error = ErrorContext(self.NAME, context, ex)
# return ReturnValue(self.NAME, False, error)
# try:
# first_item_res = self.recompute_first_items(context, None, [metadata])
# except FirstItemError as ex:
# return ReturnValue(self.NAME, False, ex)
first_token_by_key = self._get_concept_first_token(concept_key)
if first_token_by_key is None:
return ReturnValue(self.NAME, False, self.newn(BuiltinConcepts.INVALID_CONCEPT,
concept_id=concept_id,
reason=NoFirstItemError()))
first_token_by_name = self._get_concept_first_token(name)
if first_token_by_name is None:
return ReturnValue(self.NAME, False, self.newn(BuiltinConcepts.INVALID_CONCEPT,
concept_id=concept_id,
reason=NoFirstItemError()))
# at this point everything is fine. let's get the id and save everything
om = self.sheerka.om
metadata.id = str(self.sheerka.om.get(self.sheerka.OBJECTS_IDS_ENTRY, self.USER_CONCEPTS_IDS))
om.add_concept(metadata)
# add the first token to the
om.put(self.CONCEPT_BY_FIRST_TOKEN_IN_KEY, first_token_by_key, metadata.id)
if first_token_by_name != first_token_by_key:
om.put(self.CONCEPT_BY_FIRST_TOKEN_IN_NAME, first_token_by_name, metadata.id)
# self.update_first_items_caches(context, first_item_res)
# if bnf_expr:
# self.bnf_expr_cache.put(metadata.id, bnf_expr)
@@ -208,7 +248,7 @@ class ConceptManager(BaseService):
"""
metadata = self.get_by_name(concept_name)
if metadata is NotFound:
return self._inner_new(self.get_by_name(BuiltinConcepts.UNKNOWN_CONCEPT), requested_name=concept_name)
return self._inner_new(self.get_by_name(BuiltinConcepts.UNKNOWN_CONCEPT), requested=concept_name)
if isinstance(metadata, list):
return [self._inner_new(m, **kwargs) for m in metadata]
@@ -228,7 +268,7 @@ class ConceptManager(BaseService):
"""
metadata = self.get_by_id(concept_id)
if metadata is NotFound:
return self._inner_new(self.get_by_name(BuiltinConcepts.UNKNOWN_CONCEPT), requested_id=concept_id)
return self._inner_new(self.get_by_name(BuiltinConcepts.UNKNOWN_CONCEPT), requested=f"#{concept_id}")
return self._inner_new(metadata, **kwargs)
def new(self, identifier, **kwargs):
@@ -244,6 +284,29 @@ class ConceptManager(BaseService):
if isinstance(identifier, (ConceptMetadata, Concept)):
return self._inner_new(identifier.get_metadata(), **kwargs)
if isinstance(identifier, ConceptRef):
# first, try the digest
resolved_identifier = identifier.concept.get_definition_digest()
metadata = self.get_by_digest(resolved_identifier)
if metadata is NotFound:
# used the same method that was used when the concept was first recognized
match identifier.concept.get_runtime_info().info["resolution_method"]:
case "id":
resolved_identifier = f"#{identifier.concept.id}"
metadata = self.get_by_id(resolved_identifier)
case "key":
resolved_identifier = identifier.concept.key
metadata = self.get_by_key(resolved_identifier)
case _:
resolved_identifier = identifier.concept.name
metadata = self.get_by_name(resolved_identifier)
if metadata is NotFound:
return self._inner_new(self.get_by_name(BuiltinConcepts.UNKNOWN_CONCEPT), requested=resolved_identifier)
else:
return [self.new(item, **kwargs) for item in metadata] if \
isinstance(metadata, list) else self._inner_new(metadata, **kwargs)
if isinstance(identifier, list):
return [self.new(item, **kwargs) for item in identifier]
@@ -257,7 +320,8 @@ class ConceptManager(BaseService):
if isinstance(identifier, str):
return self.newn(identifier, **kwargs)
return self._inner_new(self.get_by_name(BuiltinConcepts.UNKNOWN_CONCEPT), requested_name=identifier)
# failed to instantiate the concept
return self._inner_new(self.get_by_name(BuiltinConcepts.UNKNOWN_CONCEPT), requested=identifier)
def get_by_name(self, key: str):
"""
@@ -289,9 +353,36 @@ class ConceptManager(BaseService):
"""
return self.sheerka.om.get(self.CONCEPTS_BY_KEY_ENTRY, key)
def get_by_digest(self, digest: str):
"""
Returns a concept metadata, using its digest
:param digest:
:type digest:
:return: NotFound if not found
:rtype:
"""
return self.sheerka.om.get(self.CONCEPTS_BY_HASH_ENTRY, digest)
def get_all_concepts(self):
return list(sorted(self.sheerka.om.list(self.CONCEPTS_BY_ID_ENTRY), key=lambda item: int(item.id)))
def get_metadatas_from_first_token(self, attr: Literal["key", "name"], token: str):
"""
Get the list of the concepts that start with token
:param attr: "key" or "name"
:type attr:
:param token:
:type token:
:return:
:rtype:
"""
cache_name = self.CONCEPT_BY_FIRST_TOKEN_IN_NAME if attr == "name" else self.CONCEPT_BY_FIRST_TOKEN_IN_KEY
concepts_ids = self.sheerka.om.get(cache_name, token)
if concepts_ids is NotFound:
return []
return [self.get_by_id(c_id) for c_id in concepts_ids]
def is_a_concept_name(self, name):
return self.sheerka.om.exists(self.CONCEPTS_BY_NAME_ENTRY, name)
@@ -385,6 +476,28 @@ class ConceptManager(BaseService):
metadata.all_attrs = self.compute_all_attrs(variables_to_use)
self.sheerka.om.add_concept(metadata)
@staticmethod
def _get_concept_first_token(concept_key):
"""
Return the list of tokens that consist of the first par of a concept key
>>> assert _get_concept_first_token("I am a concept") == "I"
>>> assert _get_concept_first_token("__var__1 multiplied by __var__2") == "multiplied"
:param concept_key:
:type concept_key:
:return:
:rtype:
"""
keywords = concept_key.split()
# trim first variables
res = []
for keyword in keywords:
if keyword.startswith(VARIABLE_PREFIX):
continue
return keyword
return None
@staticmethod
def _inner_new(_metadata_def: ConceptMetadata, **kwargs):
concept = Concept(_metadata_def)
+60
View File
@@ -0,0 +1,60 @@
from multiprocessing import RLock
from services.BaseService import BaseService
class SheerkaDummyEventManager(BaseService):
"""
Manage simple publish and subscribe functions
Need to be replaced by a standard in the industry (Redis?)
"""
NAME = "DummyEventManager"
def __init__(self, sheerka):
super().__init__(sheerka, order=2)
self._lock = RLock()
self.subscribers = {}
def initialize(self):
self.sheerka.bind_service_method(self.NAME, self.subscribe, True, visible=False)
self.sheerka.bind_service_method(self.NAME, self.publish, True, visible=False)
def subscribe(self, topic, callback):
"""
To subscribe to a topic, just give the callback to call
Note that the callback must be a function whose first argument is a context
:param topic:
:param callback:
:return:
"""
with self._lock:
self.subscribers.setdefault(topic, []).append(callback)
def publish(self, context, topic, data=None):
"""
Publish on a topic
The data is not mandatory
:param context:
:param topic:
:param data:
:return:
"""
with self._lock:
try:
subscribers = self.subscribers[topic]
if data:
for callback in subscribers:
callback(context, data)
else:
for callback in subscribers:
callback(context)
except KeyError:
pass
def test_only_reset_service(self):
"""
Remove all subscribers from a given topic
TO REMOVE once sheerka ontology is fully implemented
:return:
"""
self.subscribers.clear()
+35 -4
View File
@@ -13,6 +13,7 @@ from core.error import ErrorConcepts, ErrorContext, ErrorObj, MethodAccessError
from core.python_fragment import PythonFragment
from parsers.tokenizer import Token, TokenKind
from services.BaseService import BaseService
from services.SheerkaConceptManager import ConceptRef
TO_DISABLED = ["breakpoint", "callable", "compile", "delattr", "eval", "exec", "exit", "input", "locals", "open",
"print", "quit", "setattr"]
@@ -98,12 +99,12 @@ class PythonEvalError(ErrorObj):
@dataclass
class EvaluationRef:
class ObjectRef:
root: str
attr: str
def __eq__(self, other):
if not isinstance(other, EvaluationRef):
if not isinstance(other, ObjectRef):
return False
return self.root == other.root and self.attr == other.attr
@@ -126,7 +127,12 @@ class EvaluationContext:
class MultipleResults:
def __init__(self, *args):
self.items = args
self.items = []
for item in args:
if isinstance(item, MultipleResults):
self.items.extend(item.items)
else:
self.items.append(item)
def __iter__(self):
return iter(self.items)
@@ -153,6 +159,10 @@ class MultipleResults:
def concepts_only(self):
return MultipleResults(*[item for item in self.items if isinstance(item, Concept)])
def unique(self):
seen = set()
return MultipleResults(*[x for x in self.items if x not in seen and not seen.add(x)])
class SheerkaPython(BaseService):
"""
@@ -368,7 +378,18 @@ class SheerkaPython(BaseService):
return result
def resolve_object(self, context, attr_name, to_resolve, global_namespace):
if isinstance(to_resolve, EvaluationRef):
if isinstance(to_resolve, MultipleResults):
return MultipleResults(*(self.resolve_object(context, attr_name, item, global_namespace)
for item in to_resolve.items)).unique()
if isinstance(to_resolve, Concept):
to_resolve = context.sheerka.evaluate_concept(context, to_resolve)
return to_resolve
if isinstance(to_resolve, ConceptRef):
return self.new_concept(context, to_resolve)
if isinstance(to_resolve, ObjectRef):
return getattr(global_namespace[to_resolve.root], to_resolve.attr)
if isinstance(to_resolve, Token) and to_resolve.type == TokenKind.CONCEPT:
@@ -519,6 +540,16 @@ class SheerkaPython(BaseService):
@staticmethod
def new_concept(context, identifier):
"""
Instantiate and evaluate a new concept, from its identifier
This method can return MultipleResult
:param context:
:type context:
:param identifier:
:type identifier:
:return:
:rtype:
"""
new_concept = context.sheerka.new(identifier)
if isinstance(new_concept, list):
+60 -2
View File
@@ -142,7 +142,7 @@ class TestDictionaryCache(BaseTest):
assert cache.get("key") is NotFound
assert cache._cache == {}
def test_auto_configure_retrieves_the_whole_remote_repository(self, sdp, context):
def test_auto_configure_retrieves_the_whole_remote_repository(self, context, sdp):
cache = DictionaryCache(sdp=sdp).auto_configure("test")
with sdp.get_transaction(context.event) as transaction:
transaction.add("test", "key1", "value1")
@@ -153,7 +153,7 @@ class TestDictionaryCache(BaseTest):
assert cache.copy() == {'key1': 'value1', 'key2': 'value2'}
def test_we_do_no_go_twice_in_repo_when_not_found(self, sdp, context):
def test_we_do_no_go_twice_in_repo_when_not_found(self, context, sdp):
cache = DictionaryCache(sdp=sdp).auto_configure("test")
assert cache.get("key") is NotFound
@@ -163,3 +163,61 @@ class TestDictionaryCache(BaseTest):
transaction.add("test", "key", "value")
assert cache.get("key") is NotFound # the key was previously requested
def test_i_can_add_path(self):
cache = DictionaryCache()
cache.add_path(["a", "b", "c"], "c_value")
cache.add_path(["a", "b", "d", "e"], "e_value")
assert cache.copy() == {'a': {'b': {'c': {"#values#": ['c_value']},
'd': {'e': {"#values#": ['e_value']}}}}}
assert len(cache) == 2
def test_i_can_get_multiple_values_in_the_same_path(self):
cache = DictionaryCache()
cache.add_path(["a", "b", "c"], "value1")
cache.add_path(["a", "b", "c"], "value2")
cache.add_path(["a", "b", "c", "d"], "value3")
assert cache.copy() == {'a': {'b': {'c': {'d': {'#values#': ['value3']},
'#values#': ["value1", "value2"]}}}}
assert len(cache) == 3
def test_i_can_remove_path(self):
cache = DictionaryCache()
cache.add_path(["a", "b", "c"], "value1")
cache.add_path(["a", "b", "c"], "value2")
cache.remove_path(["a", "b", "c"], "value1")
assert cache.copy() == {'a': {'b': {'c': {"#values#": ['value2']}}}}
assert len(cache) == 1
cache.remove_path(["a", "b", "c"], "value2")
assert cache.copy() == {}
assert len(cache) == 0
def test_i_can_remove_when_not_exist(self):
# remove an entry that does not exist does not cause error
cache = DictionaryCache()
cache.add_path(["a", "b", "c"], "value1")
cache.add_path(["a", "b", "c"], "value2")
cache.remove_path(["a", "b", "c"], "value3")
cache.remove_path(["a", "b"], "value1")
assert cache.copy() == {'a': {'b': {'c': {"#values#": ['value1', 'value2']}}}}
assert len(cache) == 2
def test_i_can_get_from_path(self):
cache = DictionaryCache()
cache.add_path(["a", "b", "c"], "value1")
cache.add_path(["a", "b", "c"], "value2")
assert cache.get_from_path(["a", "b"]) is NotFound
assert cache.get_from_path(["a", "b", "c"]) == ["value1", "value2"]
+39
View File
@@ -51,6 +51,15 @@ def test_not_found_is_returned_when_not_found():
assert cache.get("foo") is NotFound
def test_i_can_remove_an_item():
cache = FastCache()
cache.put("key1", "value1")
cache.put("to_keep1", "to_keep_value1")
cache.remove("key1")
assert cache.cache == {"to_keep1": "to_keep_value1"}
def test_i_can_evict_by_key():
cache = FastCache()
cache.put("key1", "value1")
@@ -109,3 +118,33 @@ def test_i_can_copy():
cache.put("key3", "value3")
assert cache.copy() == {"key1": "value1", "key2": "value2", "key3": "value3"}
def test_i_can_take_snapshots_and_revert():
# Test that I can create restoration points
# and come back later to them
cache = FastCache()
cache.put("key1", "value1")
cache.snapshot()
cache.put("key2", "value2")
cache.put("key3", "value3")
cache.snapshot()
cache.put("key4", "value4")
cache.put("key5", "value5")
assert cache.cache == {"key1": "value1",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5": "value5"}
cache.revert_snapshot()
assert cache.cache == {"key1": "value1",
"key2": "value2",
"key3": "value3"}
cache.revert_snapshot()
assert cache.cache == {"key1": "value1"}
cache.revert_snapshot() # no effect if nothing to revert
assert cache.cache == {"key1": "value1"}
+257
View File
@@ -279,3 +279,260 @@ class TestListCache(BaseTest):
cache.update("old_key", "old_value", "new_key", "new_value")
assert cache.get("old_key") == ["other old value"]
assert cache.get("new_key") == ["other new", "new_value"]
def test_i_can_delete_from_list_cache(self):
cache = ListCache()
cache.put("key", "value")
cache.put("key", "value2") # we can append to this list
cache.delete("key", "value2")
assert len(cache) == 1
assert cache.get("key") == ["value"]
cache.delete("key", "value")
assert len(cache) == 0
assert cache.get("key") is NotFound
def test_delete_an_entry_that_does_not_exist_has_no_effect(self):
cache = ListCache()
cache.put("key", "value")
cache.delete("key", "value2")
assert len(cache) == 1
assert cache.get("key") == ["value"]
def test_i_can_delete_when_alt_sdp_a_key_from_cache(self):
# There is a value in alt_cache_manager,
# No remaining value in current cache after deletion
# The key must be flagged as Removed
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
cache.put("key", "value")
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
assert cache.copy() == {"key": Removed}
assert cache.to_add == {"key"}
assert cache.to_remove == set()
def test_i_can_delete_when_alt_sdp_a_value_from_cache(self):
# There is a value in alt_cache_manager,
# No remaining value in current cache after deletion
# The key must be flagged as Removed
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
cache.put("key", "value")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value", alt_sdp=alt_sdp)
assert cache.copy() == {"key": Removed}
assert cache.to_remove == set()
assert cache.to_add == {"key"}
def test_i_can_delete_when_alt_sdp_a_value_from_cache_and_then_put_back(self):
# There is a value in alt_cache_manager,
# No remaining value in current cache after deletion
# The key must be flagged as Removed
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
cache.put("key", "value")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value", alt_sdp=alt_sdp) # remove all values
cache.put("key", "value")
assert cache.copy() == {"key": ["value"]}
assert cache.to_remove == set()
assert cache.to_add == {"key"}
def test_i_can_delete_when_alt_sdp_a_value_from_cache_remaining_one_value(self):
# There is a value in alt_cache_manager,
# But this, there are remaining values in current cache after deletion
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
cache.put("key", "value")
cache.put("key", "value2")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value", alt_sdp=alt_sdp)
assert cache.copy() == {"key": ["value2"]}
assert cache.to_remove == set()
assert cache.to_add == {"key"}
def test_i_can_delete_when_alt_sdp_a_value_from_cache_remaining_values(self):
# There is a value in alt_cache_manager,
# But this, there are remaining values in current cache after deletion
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
cache.put("key", "value")
cache.put("key", "value2")
cache.put("key", "value3")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value", alt_sdp=alt_sdp)
assert cache.copy() == {"key": ['value2', 'value3']}
assert cache.to_remove == set()
assert cache.to_add == {"key"}
def test_i_can_delete_when_alt_sdp_a_key_from_remote_repository(self):
# There is a value in alt_cache_manager,
# No remaining value in current cache after deletion
# The key must be flagged as Removed
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: ["value1", "value2"])).auto_configure(
"cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
cache.delete("key", value=None, alt_sdp=alt_sdp)
assert cache.copy() == {"key": Removed}
assert cache.to_remove == set()
assert cache.to_add == {"key"}
def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository(self):
# There is a value in alt_cache_manager,
# No remaining value in current cache after deletion
# The key must be flagged as Removed
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: ["value"])).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value", alt_sdp=alt_sdp)
assert cache.copy() == {"key": Removed}
assert cache.to_remove == set()
assert cache.to_add == {"key"}
def test_i_can_delete_when_alt_sdp_a_key_from_remote_repository_and_then_put_back(self):
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: ["value1", "value2"])).auto_configure(
"cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["xxx"], extend_exists=lambda cache_name, key: True)
cache.delete("key", value=None, alt_sdp=alt_sdp) # remove all values
cache.put("key", "value")
assert cache.copy() == {"key": ["value"]}
assert cache.to_remove == set()
assert cache.to_add == {"key"}
def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository_remaining_one_value(self):
# There is a value in alt_cache_manager,
# But this time, there are remaining values in current cache after deletion
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: ["value1", "value2"])).auto_configure(
"cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value1", alt_sdp=alt_sdp)
assert cache.copy() == {"key": ["value2"]}
assert cache.to_remove == set()
assert cache.to_add == {"key"}
def test_i_can_delete_when_alt_sdp_a_key_from_alt_sdp(self):
# alt_cache_manager is used because no value in cache or in remote repository
# After value deletion, the key is empty
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1, value2"],
extend_exists=lambda cache_name, key: True)
cache.delete("key", value=None, alt_sdp=alt_sdp)
assert cache.copy() == {"key": Removed}
assert cache.to_add == {"key"}
assert cache.to_remove == set()
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp(self):
# alt_cache_manager is used because no value in cache or in remote repository
# After value deletion, the key is empty
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1"],
extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value1", alt_sdp=alt_sdp)
assert cache.copy() == {"key": Removed}
assert cache.to_add == {"key"}
assert cache.to_remove == set()
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_and_then_put_back(self):
# alt_cache_manager is used because no value in cache or in remote repository
# After value deletion, the key is empty
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1"],
extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value1", alt_sdp=alt_sdp)
cache.put("key", "value")
assert cache.copy() == {"key": ["value"]}
assert cache.to_add == {"key"}
assert cache.to_remove == set()
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_one_value_remaining(self):
# alt_cache_manager is used because no value in cache or in remote repository
# After value deletion, one value remains in the cache
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1", "value2"],
extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value1", alt_sdp=alt_sdp)
assert cache.copy() == {"key": ["value2"]}
assert cache.to_add == {"key"}
assert cache.to_remove == set()
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_multiple_values_remaining(self):
# alt_cache_manager is used because no value in cache or in remote repository
# After value deletion, one value remains in the cache
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1", "value2", "value3"],
extend_exists=lambda cache_name, key: True)
cache.delete("key", value="value1", alt_sdp=alt_sdp)
assert cache.copy() == {"key": ["value2", "value3"]}
assert cache.to_add == {"key"}
assert cache.to_remove == set()
def test_i_can_delete_when_alt_sdp_an_already_removed_value_from_alt_sdp(self):
# alt_cache_manager is used because no value in cache or in remote repository
# But the alternate sdp returns Removed, which means that previous value was deleted
# It's like there is nothing to delete
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: Removed,
extend_exists=lambda cache_name, key: False)
cache.delete("key", value="value1", alt_sdp=alt_sdp)
assert cache.copy() == {}
assert cache.to_add == set()
assert cache.to_remove == set()
def test_deleting_an_entry_that_does_not_exist_is_not_an_error(self):
cache = ListCache()
cache.put("key", "value1")
cache.reset_events()
cache.delete("key3")
assert len(cache) == 1
assert cache.to_add == set()
assert cache.to_remove == set()
cache.delete("key3", "value")
assert len(cache) == 1
assert cache.to_add == set()
assert cache.to_remove == set()
cache.delete("key", "value2")
assert len(cache) == 1
assert cache.to_add == set()
assert cache.to_remove == set()
def test_i_can_delete_when_alt_sdp_and_cache_is_cleared(self):
cache = ListCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value",
extend_exists=lambda cache_name, key: True)
cache.clear()
cache.delete("key", value=None, alt_sdp=alt_sdp)
assert cache.copy() == {}
assert cache.to_add == set()
assert cache.to_remove == set()
cache.delete("key", value="value", alt_sdp=alt_sdp)
assert cache.copy() == {}
assert cache.to_add == set()
assert cache.to_remove == set()
+11
View File
@@ -610,6 +610,17 @@ class TestListIfNeededCache(BaseTest):
assert cache.to_add == set()
assert cache.to_remove == set()
def test_deleting_an_entry_that_does_not_exist_from_a_list_is_not_an_error(self):
cache = ListIfNeededCache()
cache.put("key", "value1")
cache.put("key", "value2")
cache.reset_events()
cache.delete("key", "value3")
assert len(cache) == 2
assert cache.to_add == set()
assert cache.to_remove == set()
def test_i_can_delete_when_alt_sdp_and_cache_is_cleared(self):
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value",
+8 -1
View File
@@ -1,8 +1,12 @@
import inspect
import pytest
from helpers import GetNextId
from server.authentication import User
DEFAULT_ONTOLOGY_NAME = "current_test_"
@pytest.fixture(scope="session")
def sheerka():
@@ -75,12 +79,15 @@ class NewOntology:
"""
from core.ExecutionContext import ExecutionContext
def __init__(self, context: ExecutionContext, name="current_test"):
def __init__(self, context: ExecutionContext, name=None):
self.sheerka = context.sheerka
self.context = context
self.name = name
self.ontology = None
if self.name is None:
self.name = inspect.stack()[1][3]
def __enter__(self):
self.ontology = self.sheerka.om.push_ontology(self.name)
return self.ontology
@@ -92,3 +92,16 @@ class TestDefConceptEvaluator(BaseTest):
new_concept = res.new[0].value
assert context.sheerka.isinstance(new_concept, BuiltinConcepts.NEW_CONCEPT)
assert new_concept.body.variables == expected
assert new_concept.body.parameters == set(item[0] for item in expected)
def test_i_can_define_variables_that_are_not_parameters(self, context, evaluator):
with NewOntology(context, "test_i_can_define_variables_that_are_not_parameters"):
ret_val_input = get_ret_val_from(context, "def concept color def_var color_name")
res = evaluator.eval(context, None, ret_val_input)
assert len(res.new) == 1
assert res.new[0].status
new_concept = res.new[0].value
assert context.sheerka.isinstance(new_concept, BuiltinConcepts.NEW_CONCEPT)
assert new_concept.body.variables == [("color_name", NotInit)]
assert new_concept.body.parameters == set()
+51
View File
@@ -0,0 +1,51 @@
import pytest
from base import BaseTest
from evaluators.FilterSuccessful import FilterSuccessful
from helpers import _rv, _rvf
class TestFilterSuccessful(BaseTest):
@pytest.fixture()
def evaluator(self, sheerka):
return sheerka.evaluators[FilterSuccessful.NAME]
def test_i_can_match_and_eval(self, context, evaluator):
true1 = _rv("some_value1")
true2 = _rv("some_value2")
false1 = _rvf("some_value1")
false2 = _rvf("some_value2")
return_values = [true1]
m = evaluator.matches(context, return_values)
assert m.status is False
return_values = [true1, true2]
m = evaluator.matches(context, return_values)
assert m.status is False
return_values = [false1]
m = evaluator.matches(context, return_values)
assert m.status is False
return_values = [false1, false2]
m = evaluator.matches(context, return_values)
assert m.status is False
return_values = [true1, false1]
m = evaluator.matches(context, return_values)
assert m.status is True
assert m.obj == {'to_keep': [true1], 'to_drop': [false1]}
r = evaluator.eval(context, m.obj, return_values)
assert r.new == [true1]
assert r.eaten == [false1]
return_values = [true1, true2, false1, false2]
m = evaluator.matches(context, return_values)
assert m.status is True
assert m.obj == {'to_keep': [true1, true2], 'to_drop': [false1, false2]}
r = evaluator.eval(context, m.obj, return_values)
assert r.new == [true1, true2]
assert r.eaten == [false1, false2]
@@ -0,0 +1,48 @@
import pytest
from base import BaseParserTest
from conftest import NewOntology
from core.BuiltinConcepts import BuiltinConcepts
from evaluators.RecognizeSimpleConcept import RecognizeSimpleConcept
from evaluators.base_evaluator import NotForMe
from helpers import _rv, _rvf, get_concepts
from parsers.ParserInput import ParserInput
class TestRecognizeSimpleConcept(BaseParserTest):
@pytest.fixture()
def evaluator(self, sheerka):
return sheerka.evaluators[RecognizeSimpleConcept.NAME]
def test_i_can_match(self, sheerka, context, evaluator):
ret_val = _rv(sheerka.newn(BuiltinConcepts.PARSER_INPUT, pi=ParserInput("some text")))
assert evaluator.matches(context, ret_val).status is True
ret_val = _rv(sheerka.newn(BuiltinConcepts.UNKNOWN_CONCEPT)) # it responds to USER_INPUT only
assert evaluator.matches(context, ret_val).status is False
ret_val = _rvf(sheerka.newn(BuiltinConcepts.PARSER_INPUT, pi=ParserInput("some text"))) # status is false
assert evaluator.matches(context, ret_val).status is False
def test_i_can_recognize_a_concept(self, context, evaluator):
with NewOntology(context, "test_i_can_recognize_a_def_concept"):
concept, = get_concepts(context, "I am a new concept", use_sheerka=True)
ret_val_input = self.get_parser_input(context, "I am a new concept")
res = evaluator.eval(context, None, ret_val_input)
assert len(res.new) == 1
assert res.new[0].status
assert context.sheerka.isinstance(res.new[0].value, concept)
assert res.eaten == [ret_val_input]
def test_i_do_not_eat_when_not_for_me(self, context, evaluator):
with NewOntology(context, "test_i_can_recognize_a_def_concept"):
ret_val_input = self.get_parser_input(context, "unknown concept")
res = evaluator.eval(context, None, ret_val_input)
assert len(res.new) == 1
assert not res.new[0].status
assert isinstance(res.new[0].value, NotForMe)
assert len(res.eaten) == 0
@@ -0,0 +1,57 @@
import pytest
from base import BaseTest
from evaluators.PythonParser import PythonParser
from evaluators.RecognizeDefConcept import RecognizeDefConcept
from evaluators.RecognizeSimpleConcept import RecognizeSimpleConcept
from evaluators.ResolvePythonVsSimpleConcept import ResolvePythonVsSimpleConcept
from helpers import _rv, _rvf
class TestResolvePythonVsSimpleConcept(BaseTest):
@pytest.fixture()
def evaluator(self, sheerka):
return sheerka.evaluators[ResolvePythonVsSimpleConcept.NAME]
def test_i_can_match_and_eval(self, context, evaluator):
python = _rv("some_value", who=PythonParser.NAME)
concept = _rv("some_value", who=RecognizeSimpleConcept.NAME)
other = _rv("some_value", who=RecognizeDefConcept.NAME)
python_nok = _rvf("some_value", who=PythonParser.NAME)
concept_nok = _rvf("some_value", who=RecognizeSimpleConcept.NAME)
other_nok = _rvf("some_value", who=RecognizeDefConcept.NAME)
# at least the two
return_values = [python, concept]
m = evaluator.matches(context, return_values)
assert m.status is True
assert m.obj == {'to_keep': concept, 'to_drop': python, 'others': []}
r = evaluator.eval(context, m.obj, return_values)
assert r.new == [concept]
assert r.eaten == [python]
# the two and other successful
return_values = [python, concept, other, other_nok]
m = evaluator.matches(context, return_values)
assert m.status is True
assert m.obj == {'to_keep': concept, 'to_drop': python, 'others': [other, other_nok]}
r = evaluator.eval(context, m.obj, return_values)
assert r.new == [concept, other, other_nok]
assert r.eaten == [python]
# python is not ok
return_values = [python_nok, concept]
m = evaluator.matches(context, return_values)
assert m.status is False
# concept is not ok
return_values = [python, concept_nok]
m = evaluator.matches(context, return_values)
assert m.status is False
# neither is not
return_values = [python_nok, concept_nok]
m = evaluator.matches(context, return_values)
assert m.status is False
+109
View File
@@ -1,7 +1,11 @@
from common.global_symbols import NotInit
from common.utils import unstr_concept
from core.ExecutionContext import ExecutionContext
from core.ReturnValue import ReturnValue
from core.concept import Concept, ConceptDefaultProps, ConceptMetadata, DefinitionType
from parsers.ParserInput import ParserInput
from parsers.state_machine import MetadataToken, UnrecognizedToken
from parsers.tokenizer import Tokenizer
from services.SheerkaConceptManager import ConceptManager
ATTR_MAP = {
@@ -122,7 +126,34 @@ def get_evaluated_concept(blueprint: Concept | ConceptMetadata, **kwargs):
:return:
:rtype:
"""
def _isfloat(num):
try:
float(num)
return True
except ValueError:
return False
res = Concept(blueprint.get_metadata())
for attr in ATTR_MAP:
source_code = getattr(res.get_metadata(), attr)
if source_code == "" or source_code is None:
value = NotInit
elif source_code[0] in ("'", '"'):
value = source_code[1:-1]
elif source_code in ("True", "False"):
value = source_code == "True"
elif source_code.isdecimal():
value = int(source_code)
elif _isfloat(source_code):
value = float(source_code)
else:
raise Exception(f"Cannot manage {attr=}, {source_code=}")
setattr(res, ATTR_MAP[attr], value)
# force values
for k, v in kwargs.items():
res.set_value(ATTR_MAP.get(k, k), v)
@@ -347,6 +378,13 @@ def get_concepts(context: ExecutionContext, *concepts, **kwargs) -> list[Concept
return res
def get_evaluated_concepts(context, *concepts, use_sheerka=False) -> list[Concept]:
if use_sheerka:
return [context.sheerka.evaluate_concept(context, Concept(c.get_metadata())) for c in concepts]
else:
return [get_evaluated_concept(concept) for concept in concepts]
def define_new_concept(context: ExecutionContext, c: str | Concept | ConceptMetadata) -> Concept:
sheerka = context.sheerka
if isinstance(c, str):
@@ -381,6 +419,43 @@ def get_file_content(file_name):
return f.read()
def get_parser_input(text):
pi = ParserInput(text)
assert pi.init()
return pi
def get_from(*args, **kwargs):
"""
Convert the input to fix the positions
:param args:
:type args:
:return:
:rtype:
"""
cache = {} # I keep the name in cache to avoid having to remind it everytime
pos = 0
res = []
for item in args:
start = pos
if isinstance(item, MetadataToken):
if item.metadata.name:
cache[item.metadata.id] = item.metadata.name
tokens = list(Tokenizer(cache[item.metadata.id], yield_eof=False))
pos += len(tokens)
resolution_method = kwargs.get("resolution_method", item.resolution_method)
parser = kwargs.get("parser", item.parser)
res.append(MetadataToken(item.metadata, start, pos - 1, resolution_method, parser))
elif isinstance(item, UnrecognizedToken):
tokens = list(Tokenizer(item.buffer, yield_eof=False))
pos += len(tokens)
res.append(UnrecognizedToken(item.buffer, start, pos - 1))
return res
def _rv(value, who="Test"):
return ReturnValue(who=who, status=True, value=value)
@@ -400,3 +475,37 @@ def _rvf(value, who="Test"):
:rtype:
"""
return ReturnValue(who=who, status=False, value=value)
def _ut(buffer, start=0, end=-1):
"""
helper to UnrecognizedToken
:param buffer:
:type buffer:
:param start:
:type start:
:param end:
:type end:
:return:
:rtype:
"""
return UnrecognizedToken(buffer, start, end)
def _mt(concept_id, start=0, end=-1, resolution_method="id", parser="simple"):
"""
helper to MetadataToken
:param concept_id:
:type concept_id:
:param start:
:type start:
:param end:
:type end:
:return:
:rtype:
"""
name, _id = unstr_concept(concept_id)
if _id is None:
return MetadataToken(get_metadata(id=concept_id), start, end, resolution_method, parser)
else:
return MetadataToken(get_metadata(id=_id, name=name), start, end, resolution_method, parser)
+52 -2
View File
@@ -1,4 +1,6 @@
from base import BaseTest
from conftest import NewOntology
from core.BuiltinConcepts import BuiltinConcepts
def get_ret_val(res):
@@ -17,9 +19,9 @@ class TestNonReg1(BaseTest):
def test_i_cannot_evaluate_variable_that_is_not_defined(self, sheerka, user):
res = sheerka.evaluate_user_input("a", user)
ret_val = get_ret_val(res)
assert ret_val.status is False
assert len(res) == 2
assert all([not ret_val.status for ret_val in res])
def test_i_can_remember_variables(self, sheerka, user):
sheerka.evaluate_user_input("a = 10", user)
@@ -28,3 +30,51 @@ class TestNonReg1(BaseTest):
ret_val = get_ret_val(res)
assert ret_val.value == 10
def test_i_can_define_a_new_concept(self, context, sheerka, user):
with NewOntology(context, "test_i_can_define_a_new_concept"):
res = sheerka.evaluate_user_input("def concept one as 1", user)
ret_val = get_ret_val(res)
assert ret_val.status
assert sheerka.isinstance(ret_val.value, BuiltinConcepts.NEW_CONCEPT)
def test_i_can_define_a_new_concept_and_use_it(self, context, sheerka, user):
with NewOntology(context, "test_i_can_define_a_new_concept_and_use_it"):
sheerka.evaluate_user_input("def concept one as 1", user)
res = sheerka.evaluate_user_input("one", user)
ret_val = get_ret_val(res)
assert ret_val.status
assert sheerka.isinstance(ret_val.value, "one")
assert not ret_val.value.get_runtime_info().is_evaluated
def test_i_can_get_i_concept_using_c_name_form(self, context, sheerka, user):
with NewOntology(context):
sheerka.evaluate_user_input("def concept one as 1", user)
res = sheerka.evaluate_user_input("c:one:", user)
ret_val = get_ret_val(res)
assert ret_val.status
assert sheerka.isinstance(ret_val.value, "one")
assert not ret_val.value.get_runtime_info().is_evaluated
def test_i_can_get_i_concept_using_c_id_form(self, context, sheerka, user):
with NewOntology(context):
sheerka.evaluate_user_input("def concept one as 1", user)
res = sheerka.evaluate_user_input("c:#1001:", user)
ret_val = get_ret_val(res)
assert ret_val.status
assert sheerka.isinstance(ret_val.value, "one")
assert not ret_val.value.get_runtime_info().is_evaluated
def test_i_can_recognize_concepts_with_long_name(self, context, sheerka, user):
with NewOntology(context):
sheerka.evaluate_user_input("def concept i am a concept", user)
res = sheerka.evaluate_user_input("i am a concept", user)
ret_val = get_ret_val(res)
assert ret_val.status
assert sheerka.isinstance(ret_val.value, "i am a concept")
assert not ret_val.value.get_runtime_info().is_evaluated
@@ -2,22 +2,15 @@ import pytest
from common.global_symbols import NotInit
from core.concept import DefinitionType
from helpers import get_parser_input
from parsers.ConceptDefinitionParser import ConceptDefinition, ConceptDefinitionParser
from parsers.ParserInput import ParserInput
from parsers.parser_utils import ParsingError, UnexpectedEof, UnexpectedToken
from parsers.tokenizer import Keywords, Token, TokenKind
def get_parser_input(text):
pi = ParserInput(text)
assert pi.init()
return pi
class TestRecognizeDefConcept:
class TestConceptDefinitionParser:
@pytest.fixture()
def parser(self, sheerka):
def parser(self):
return ConceptDefinitionParser()
@pytest.mark.parametrize("text", [
+142
View File
@@ -0,0 +1,142 @@
import pytest
from base import BaseTest
from conftest import NewOntology
from evaluators.base_evaluator import MultipleChoices
from helpers import _mt, _ut, get_concepts, get_from, get_metadata, get_parser_input
from parsers.SimpleParserParser import SimpleConceptsParser
class TestSimpleConceptsParser(BaseTest):
@pytest.fixture()
def parser(self):
return SimpleConceptsParser()
@pytest.mark.parametrize("text, expected", [
("I am a new concept", [_mt("1003", 0, 8)]),
("xxx yyy I am a new concept", [_ut("xxx yyy ", 0, 3), _mt("1003", 4, 12)]),
("I am a new concept xxx yyy", [_mt("1003", 0, 8), _ut(" xxx yyy", 9, 12)]),
("xxx I am a new concept yyy", [_ut("xxx ", 0, 1), _mt("1003", 2, 10), _ut(" yyy", 11, 12)]),
("c:#1003:", [_mt("1003", 0, 0)]),
("xxx c:#1003: yyy", [_ut("xxx ", 0, 1), _mt("1003", 2, 2), _ut(" yyy", 3, 4)]),
("xxx c:I am: yyy", [_ut("xxx ", 0, 1), _mt("1002", 2, 2), _ut(" yyy", 3, 4)]),
(" I am a new concept", [_ut(" ", 0, 0), _mt("1003", 1, 9)])
])
def test_i_can_recognize_a_concept(self, context, parser, text, expected):
with NewOntology(context, "test_i_can_recognize_a_concept"):
get_concepts(context, "I", "I am", "I am a new concept", use_sheerka=True)
pi = get_parser_input(text)
res = parser.parse(context, pi)
assert res == MultipleChoices([expected])
assert not parser.error_sink
@pytest.mark.parametrize("text, expected", [
("foo", [_mt("1001", 0, 0)]),
("I am a new concept", [_mt("1001", 0, 8)])
])
def test_i_can_recognize_a_concept_by_its_name_and_its_definition(self, context, parser, text, expected):
with NewOntology(context, "test_i_can_recognize_a_concept_by_its_name_and_its_definition"):
get_concepts(context, get_metadata(name="foo", definition="I am a new concept"), use_sheerka=True)
pi = get_parser_input(text)
res = parser.parse(context, pi)
assert res == MultipleChoices([expected])
assert not parser.error_sink
@pytest.mark.parametrize("text, expected", [
("long concept name", [_mt("1001", 0, 4)]),
("I am a new concept", [_mt("1001", 0, 8)])
])
def test_i_can_recognize_a_concept_by_its_name_when_long_name(self, context, parser, text, expected):
with NewOntology(context, "test_i_can_recognize_a_concept_by_its_name_when_long_name"):
get_concepts(context, get_metadata(name="long concept name", definition="I am a new concept"),
use_sheerka=True)
pi = get_parser_input(text)
res = parser.parse(context, pi)
assert res == MultipleChoices([expected])
assert not parser.error_sink
def test_i_can_parse_a_sequence_of_concept(self, context, parser):
with NewOntology(context, "test_i_can_parse_a_sequence_of_concept"):
get_concepts(context, "foo bar", "baz", "qux", use_sheerka=True)
pi = get_parser_input("foo bar baz foo, qux")
res = parser.parse(context, pi)
expected = [_mt("1001", 0, 2),
_ut(" ", 3, 3),
_mt("1002", 4, 4),
_ut(" foo, ", 5, 8),
_mt("1003", 9, 9)]
assert res == MultipleChoices([expected])
assert not parser.error_sink
def test_i_can_detect_multiple_choices(self, context, parser):
with NewOntology(context, "test_i_can_detect_multiple_choices"):
get_concepts(context, "foo bar", "bar baz", use_sheerka=True)
pi = get_parser_input("foo bar baz")
res = parser.parse(context, pi)
expected1 = [_mt("1001", 0, 2), _ut(" baz", 3, 4)]
expected2 = [_ut("foo ", 0, 1), _mt("1002", 2, 4)]
assert res == MultipleChoices([expected1, expected2])
assert not parser.error_sink
def test_i_can_detect_multiple_choices_2(self, context, parser):
with NewOntology(context, "test_i_can_detect_multiple_choices_2"):
get_concepts(context, "one two", "one", "two", use_sheerka=True)
pi = get_parser_input("one two")
res = parser.parse(context, pi)
expected1 = [_mt("1001", 0, 2)]
expected2 = [_mt("1002", 0, 0), _ut(" ", 1, 1), _mt("1003", 2, 2)]
assert res == MultipleChoices([expected1, expected2])
assert not parser.error_sink
def test_i_can_detect_multiple_choices_3(self, context, parser):
with NewOntology(context, "test_i_can_detect_multiple_choices_2"):
get_concepts(context, "one two", "one", "two", use_sheerka=True)
pi = get_parser_input("one two xxx one two")
res = parser.parse(context, pi)
e1 = get_from(_mt("c:one two#1001:"), _ut(" xxx "), _mt("c:#1001:"))
e2 = get_from(_mt("c:one#1002:"), _ut(" "), _mt("c:two#1003:"), _ut(" xxx "), _mt("c:one two#1001:"))
e3 = get_from(_mt("c:one two#1001:"), _ut(" xxx "), _mt("c:one#1002:"), _ut(" "), _mt("c:two#1003:"))
e4 = get_from(_mt("c:one#1002:"), _ut(" "), _mt("c:two#1003:"), _ut(" xxx "), _mt("c:#1002:"), _ut(" "),
_mt("c:#1003:"))
assert res == MultipleChoices([e1, e2, e3, e4])
assert not parser.error_sink
def test_nothing_is_return_is_no_concept_is_recognized(self, context, parser):
pi = get_parser_input("one two three")
res = parser.parse(context, pi)
assert res == MultipleChoices([])
def test_i_can_manage_attribute_reference(self, context, parser):
with NewOntology(context, "test_i_can_detect_multiple_choices_2"):
get_concepts(context, "foo", "i am a concept", use_sheerka=True)
pi = get_parser_input("foo.attribute")
res = parser.parse(context, pi)
expected = [_mt("1001", 0, 0), _ut(".attribute", 1, 2)]
assert res == MultipleChoices([expected])
pi = get_parser_input("i am a concept.attribute")
res = parser.parse(context, pi)
expected = [_mt("1002", 0, 6), _ut(".attribute", 7, 8)]
assert res == MultipleChoices([expected])
+82
View File
@@ -0,0 +1,82 @@
from dataclasses import dataclass
from parsers.state_machine import End, Start, State, StateMachine, StateResult
@dataclass
class DummyExecutionContext:
count: int
def to_debug(self):
return {"count": self.count}
class GenericTestState(State):
def __init__(self, name, next_state, fork=None):
super().__init__(name=name, next_states=[next_state])
self.next_state = next_state
self.fork = fork
def run(self, state_context) -> StateResult:
return StateResult(self.next_state, self.fork)
def __repr__(self):
return f"(GenericTestState {self.name} -> {self.next_state}, forks={len(self.fork) if self.fork else 0})"
def test_i_can_execute_a_workflow():
wkf_as_list = [Start("start", ["a"]),
GenericTestState("a", "b"),
GenericTestState("b", "c"),
GenericTestState("c", "end"),
End("end", None)]
wkf = {state.name: state for state in wkf_as_list}
state_machine = StateMachine({"#wkf": wkf})
state_machine.run("#wkf", "start", DummyExecutionContext(0))
assert len(state_machine.paths) == 1
assert state_machine.paths[0].get_audit_trail() == ["#wkf:start", "#wkf:a", "#wkf:b", "#wkf:c", "#wkf:end"]
def test_i_can_change_workflow():
wkf1_as_list = [Start("start", ["a"]),
GenericTestState("a", "#wkf2")]
wkf2_as_list = [Start("start", ["c"]),
GenericTestState("c", "end"),
End("end", None)]
wkfs = {
"#wkf1": {state.name: state for state in wkf1_as_list},
"#wkf2": {state.name: state for state in wkf2_as_list}
}
state_machine = StateMachine(wkfs)
state_machine.run("#wkf1", "start", DummyExecutionContext(0))
assert len(state_machine.paths) == 1
assert state_machine.paths[0].get_audit_trail() == ["#wkf1:start", "#wkf1:a", "#wkf2:start", "#wkf2:c", "#wkf2:end"]
def test_i_can_fork_path():
wkf_as_list = [Start("start", ["a"]),
GenericTestState("a", "end", [("b", DummyExecutionContext(i)) for i in range(3)]),
GenericTestState("b", "end"),
End("end", None)]
wkf = {state.name: state for state in wkf_as_list}
state_machine = StateMachine({"#wkf": wkf})
state_machine.run("#wkf", "start", DummyExecutionContext(0))
assert len(state_machine.paths) == 4
assert state_machine.paths[0].get_audit_trail() == ["#wkf:start", "#wkf:a", "#wkf:end"]
assert state_machine.paths[0].history[1].forks == [1, 2, 3]
assert state_machine.paths[1].get_audit_trail() == ["#wkf:start", "#wkf:a", "#wkf:b", "#wkf:end"]
assert state_machine.paths[1].history[0].parents == [0]
assert state_machine.paths[2].get_audit_trail() == ["#wkf:start", "#wkf:a", "#wkf:b", "#wkf:end"]
assert state_machine.paths[2].history[0].parents == [0]
assert state_machine.paths[3].get_audit_trail() == ["#wkf:start", "#wkf:a", "#wkf:b", "#wkf:end"]
assert state_machine.paths[3].history[0].parents == [0]
+55 -9
View File
@@ -5,11 +5,10 @@ from common.global_symbols import NotInit
from conftest import NewOntology
from core.BuiltinConcepts import BuiltinConcepts
from core.concept import ConceptDefaultProps
from core.error import ErrorContext
from core.python_fragment import PythonFragment
from helpers import define_new_concept, get_concept, get_concepts, get_metadata
from services.SheerkaConceptEvaluator import ConceptEvaluator
from services.SheerkaPython import EvaluationRef
from services.SheerkaConceptEvaluator import ConceptEvaluator, InfiniteRecursion, TooManyErrors
from services.SheerkaPython import ObjectRef
class TestConceptManager(BaseTest):
@@ -77,8 +76,8 @@ class TestConceptManager(BaseTest):
compiled = service._build_attributes(context, metadata)
pf = getattr(compiled, ConceptDefaultProps.BODY)
assert isinstance(pf, PythonFragment)
assert pf.namespace == {"a": EvaluationRef("self", "a"),
"b": EvaluationRef("self", "b")}
assert pf.namespace == {"a": ObjectRef("self", "a"),
"b": ObjectRef("self", "b")}
def test_i_can_manage_parsing_errors(self, context, service):
metadata = get_metadata(
@@ -98,7 +97,7 @@ class TestConceptManager(BaseTest):
assert pf.source_code == "NotInit"
error = getattr(compiled, ConceptDefaultProps.BODY)
assert isinstance(error, ErrorContext)
assert isinstance(error, TooManyErrors)
def test_i_can_eval_concept_attributes(self, context, service):
with NewOntology(context, "test_i_can_eval_concept_attributes"):
@@ -225,6 +224,9 @@ class TestConceptManager(BaseTest):
assert context.sheerka.objvalue(qux) == 1
def test_concept_variables_precede_global_concepts(self, context, service):
# In this test, there is a variable named "foo"
# Its value is the concept 'bar'
# So when the body is evaluated, we expected Concept(bar), not Concept(foo)
with NewOntology(context, "test_concept_variables_precede_global_concepts"):
foo, bar, baz = get_concepts(context,
get_concept("foo"),
@@ -237,6 +239,20 @@ class TestConceptManager(BaseTest):
assert context.sheerka.isinstance(res, baz)
assert context.sheerka.isinstance(res.body, bar)
def test_concept_variables_precede_global_concept_during_computation(self, context, service):
# In this test, there is a variable named "foo" and a concept also named "foo"
# When evaluated, foo + 1 must use the variable 'foo', not the Concept("foo")
with NewOntology(context, "test_concept_variables_precede_global_concepts"):
foo, bar = get_concepts(context,
get_concept("foo", body="2"),
get_concept("bar", body="foo + 1", variables=(("foo", "1"),)),
use_sheerka=True)
res = service.evaluate_concept(context, bar)
assert context.sheerka.isinstance(res, bar)
assert context.sheerka.objvalue(res) == 2
def test_i_can_evaluate_concept_when_variables_reference_others_concepts_with_body(self, context, service):
with NewOntology(context, "test_i_can_evaluate_concept_when_variables_reference_others_concepts_with_body"):
foo, bar, baz = get_concepts(context,
@@ -463,6 +479,7 @@ class TestConceptManager(BaseTest):
res = service.evaluate_concept(context, foo)
assert context.sheerka.isinstance(res, BuiltinConcepts.EVALUATION_ERROR)
assert context.sheerka.isinstance(res.concept, foo)
assert isinstance(res.reason, InfiniteRecursion)
assert res.reason.ids == [foo.id, bar.id, baz.id]
def test_i_can_detect_sub_infinite_loop(self, context, service):
@@ -476,6 +493,7 @@ class TestConceptManager(BaseTest):
res = service.evaluate_concept(context, foo)
assert context.sheerka.isinstance(res, BuiltinConcepts.EVALUATION_ERROR)
assert context.sheerka.isinstance(res.concept, bar)
assert isinstance(res.reason, InfiniteRecursion)
assert res.reason.ids == [bar.id, baz.id]
def test_i_can_detect_auto_infinite_loop(self, context, service):
@@ -487,10 +505,11 @@ class TestConceptManager(BaseTest):
res = service.evaluate_concept(context, foo)
assert context.sheerka.isinstance(res, BuiltinConcepts.EVALUATION_ERROR)
assert context.sheerka.isinstance(res.concept, foo)
assert isinstance(res.reason, InfiniteRecursion)
assert res.reason.ids == [foo.id]
def test_i_can_select_the_valid_result_when_multiple_choice_invalid_concept(self, context, service):
with NewOntology(context, "test_i_can_select_the_valid_result_when_multiple_choice"):
with NewOntology(context, "test_i_can_select_the_valid_result_when_multiple_choice_invalid_concept"):
foo, two_ok, two_nok = get_concepts(context,
get_concept("foo", body="two"),
get_concept("two", body="1 +"), # has to come before the other 'two'
@@ -502,7 +521,7 @@ class TestConceptManager(BaseTest):
assert context.sheerka.objvalue(foo) == 2
def test_i_can_select_the_valid_result_when_multiple_choice_evaluation_error(self, context, service):
with NewOntology(context, "test_i_can_select_the_valid_result_when_multiple_choice"):
with NewOntology(context, "test_i_can_select_the_valid_result_when_multiple_choice_evaluation_error"):
foo, two_ok, two_nok = get_concepts(context,
get_concept("foo", body="two"),
get_concept("two", body="1 / 0"), # has to come before the other 'two'
@@ -529,8 +548,35 @@ class TestConceptManager(BaseTest):
with NewOntology(context, "test_i_do_not_use_ret_in_case_of_error"):
foo, baz = get_concepts(context,
get_concept("foo"),
get_concept("baz", body="foo", ret="bar"),
get_concept("baz", body="foo", ret="bar"), # Concept("bar") is not defined
use_sheerka=True)
res = service.evaluate_concept(context, baz)
assert context.sheerka.isinstance(res, BuiltinConcepts.EVALUATION_ERROR)
@pytest.mark.skip("Cannot remove concept")
def test_i_do_not_use_ret_in_case_of_error_when_concept_was_removed(self, context, service):
# Make sure that ret is not returned in case of UNKNOWN_CONCEPT error message
foo, bar, baz = get_concepts(context,
get_concept("foo"),
get_concept("bar"),
get_concept("baz", body="foo", ret="bar"), # Concept("bar") is not defined
use_sheerka=True)
service.evaluate_concept(context, baz) # creates the compiled for Concept("baz")
context.sheerka.remove_concept(bar) # Concept("bar") no longer exists, but compiled for "baz" remains the same
res = service.evaluate_concept(context, baz)
assert context.sheerka.isinstance(res, BuiltinConcepts.EVALUATION_ERROR)
assert "#ret#" in res.reason
assert res.reason["#ret#"].value == context.sheerka.newn(BuiltinConcepts.UNKNOWN_CONCEPT, requested="bar")
def test_i_cannot_evaluate_when_error(self, context, service):
with NewOntology(context, "test_i_cannot_evaluate_when_error"):
foo, = get_concepts(context,
get_concept("foo", body="I am a concept"), # "one" does not exist
use_sheerka=True)
res = service.evaluate_concept(context, foo)
assert context.sheerka.isinstance(res, BuiltinConcepts.INVALID_CONCEPT)
+101 -5
View File
@@ -6,8 +6,8 @@ from conftest import NewOntology
from core.BuiltinConcepts import BuiltinConcepts
from core.concept import ConceptMetadata
from core.error import ErrorContext
from helpers import get_concepts, get_metadata
from services.SheerkaConceptManager import ConceptAlreadyDefined, ConceptManager
from helpers import get_concept, get_concepts, get_metadata
from services.SheerkaConceptManager import ConceptAlreadyDefined, ConceptManager, ConceptRef
class TestConceptManager(BaseTest):
@@ -86,7 +86,7 @@ class TestConceptManager(BaseTest):
assert metadata.name == "name"
assert metadata.key == "name"
assert metadata.body == "body"
assert metadata.digest == "c75faa4efbc9ef9dbc5174c52786d5b066e2ece41486b81c27336e292917fecb"
assert metadata.digest == "f32363f42e698b1642c8f76f969d76d56f53f0e0732cb651e3360e3ede7b2b11"
assert metadata.all_attrs == ('#where#', '#pre#', '#post#', '#body#', '#ret#')
# is sorted in db
@@ -96,6 +96,60 @@ class TestConceptManager(BaseTest):
assert om.get(ConceptManager.CONCEPTS_BY_KEY_ENTRY, metadata.key) == metadata
assert om.get(ConceptManager.CONCEPTS_BY_HASH_ENTRY, metadata.digest) == metadata
# check first token
assert om.get(ConceptManager.CONCEPT_BY_FIRST_TOKEN_IN_KEY, "name") == ["1001"]
def test_i_can_define_a_new_concept_with_variables(self, context, service):
with NewOntology(context, "test_i_can_define_a_new_concept_with_variables"):
res = service.define_new_concept(context,
name="a multiplied by b",
variables=[("a", NotInit), ("b", NotInit)])
metadata = res.value.metadata
assert isinstance(metadata, ConceptMetadata)
assert metadata.id == "1001"
assert metadata.name == "a multiplied by b"
assert metadata.key == "__var__0 multiplied by __var__1"
assert metadata.digest == "17d2360d82fc4264e2bcb75e4aa30ee3de87531acee72f5d939e23bff246b2dd"
assert metadata.all_attrs == ('#where#', '#pre#', '#post#', '#body#', '#ret#', "a", "b")
# is sorted in db
om = context.sheerka.om
assert om.get(ConceptManager.CONCEPTS_BY_ID_ENTRY, metadata.id) == metadata
assert om.get(ConceptManager.CONCEPTS_BY_NAME_ENTRY, metadata.name) == metadata
assert om.get(ConceptManager.CONCEPTS_BY_KEY_ENTRY, metadata.key) == metadata
assert om.get(ConceptManager.CONCEPTS_BY_HASH_ENTRY, metadata.digest) == metadata
# check first token
assert om.get(ConceptManager.CONCEPT_BY_FIRST_TOKEN_IN_KEY, "multiplied") == ["1001"]
def test_i_can_define_a_new_concept_using_definition(self, context, service):
with NewOntology(context, "test_i_can_define_a_new_concept_using_definition"):
res = service.define_new_concept(context,
name="multiplication",
definition="a multiplied by b",
variables=[("a", NotInit), ("b", NotInit)])
metadata = res.value.metadata
assert isinstance(metadata, ConceptMetadata)
assert metadata.id == "1001"
assert metadata.name == "multiplication"
assert metadata.definition == "a multiplied by b"
assert metadata.key == "__var__0 multiplied by __var__1"
assert metadata.digest == "b29007ea67bddc48329a2ae0124a320e26c86fb6b106aad6581bc75dfdf5ebeb"
assert metadata.all_attrs == ('#where#', '#pre#', '#post#', '#body#', '#ret#', "a", "b")
# is sorted in db
om = context.sheerka.om
assert om.get(ConceptManager.CONCEPTS_BY_ID_ENTRY, metadata.id) == metadata
assert om.get(ConceptManager.CONCEPTS_BY_NAME_ENTRY, metadata.name) == metadata
assert om.get(ConceptManager.CONCEPTS_BY_KEY_ENTRY, metadata.key) == metadata
assert om.get(ConceptManager.CONCEPTS_BY_HASH_ENTRY, metadata.digest) == metadata
# check first token
assert om.get(ConceptManager.CONCEPT_BY_FIRST_TOKEN_IN_KEY, "multiplied") == ["1001"]
assert om.get(ConceptManager.CONCEPT_BY_FIRST_TOKEN_IN_NAME, "multiplication") == ["1001"]
def test_i_cannot_create_the_same_concept_twice(self, context, service):
with NewOntology(context, "test_i_cannot_create_the_same_concept_twice"):
res = service.define_new_concept(context, "name", body="body")
@@ -176,11 +230,11 @@ class TestConceptManager(BaseTest):
def test_i_cannot_instantiate_a_concept_which_does_not_exist(self, context, service):
foo = service.newn("foo", var1="value1", var2="value2")
assert foo.key == BuiltinConcepts.UNKNOWN_CONCEPT
assert foo.requested_name == "foo"
assert foo.requested == "foo"
foo = service.newi("1001", var1="value1", var2="value2")
assert foo.key == BuiltinConcepts.UNKNOWN_CONCEPT
assert foo.requested_id == "1001"
assert foo.requested == "#1001"
def test_i_can_instantiate_by_name_when_multiple_results(self, context, service):
with NewOntology(context, "test_i_can_instantiate_by_name_when_multiple_results"):
@@ -255,6 +309,48 @@ class TestConceptManager(BaseTest):
assert context.sheerka.isinstance(res[0], foo)
assert context.sheerka.isinstance(res[1], bar)
def test_i_can_new_using_concept_reference(self, context, service):
with NewOntology(context, "test_i_can_new_using_concept_reference"):
foo, bar, baz = get_concepts(context, "foo", "bar", "baz", use_sheerka=True)
foo.get_runtime_info().info["resolution_method"] = "id"
bar.get_runtime_info().info["resolution_method"] = "key"
foo_concept_ref = ConceptRef(foo)
res = service.new(foo_concept_ref)
assert context.sheerka.isinstance(res, foo)
bar_concept_ref = ConceptRef(bar)
res = service.new(bar_concept_ref)
assert context.sheerka.isinstance(res, bar)
baz_concept_ref = ConceptRef(baz)
res = service.new(baz_concept_ref)
assert context.sheerka.isinstance(res, baz)
def test_i_can_new_using_concept_reference_when_multiple_results(self, context, service):
with NewOntology(context, "test_i_can_new_using_concept_reference"):
foo1, foo2 = get_concepts(context,
get_concept("foo", body="1"),
get_concept("foo", body="2"),
use_sheerka=True)
foo = get_concept("foo") # blueprint, no need to be known by Sheerka
foo.get_runtime_info().info["resolution_method"] = "name"
foo_concept_ref = ConceptRef(foo)
res = service.new(foo_concept_ref)
assert res == [foo1, foo2]
def test_i_cannot_new_using_concept_reference_when_unknown(self, context, service):
foo = get_concept("foo") # not known by Sheerka
foo.get_runtime_info().info["resolution_method"] = "name"
foo_concept_ref = ConceptRef(foo)
res = service.new(foo_concept_ref)
assert context.sheerka.isinstance(res, BuiltinConcepts.UNKNOWN_CONCEPT)
assert res.requested == "foo"
def test_unknown_concept_is_return_if_the_identifier_is_not_found(self, service):
assert service.new("unknown").name == BuiltinConcepts.UNKNOWN_CONCEPT
@@ -0,0 +1,66 @@
import pytest
from base import BaseTest
from services.SheerkaDummyEventManager import SheerkaDummyEventManager
def example_of_function(context):
print(f"example_of_class_method. event={context.event.get_digest()}")
def example_of_function_with_data(context, data):
print(f"example_of_class_method. event={context.event.get_digest()}, {data=}")
class TestSheerkaEventManager(BaseTest):
@pytest.fixture()
def service(self, sheerka):
service = sheerka.services[SheerkaDummyEventManager.NAME]
yield service
service.test_only_reset_service()
def example_of_class_method(self, context):
print(f"example_of_class_method. event={context.event.get_digest()}")
@staticmethod
def example_of_static_method(context):
print(f"example_of_static_method. event={context.event.get_digest()}")
def example_of_class_method_with_data(self, context, data):
print(f"example_of_class_method. event={context.event.get_digest()}, {data=}")
@staticmethod
def example_of_static_method_with_data(context, data):
print(f"example_of_static_method. event={context.event.get_digest()}, {data=}")
def test_i_can_subscribe_and_publish(self, context, service, capsys):
topic = "my topic"
service.subscribe(topic, self.example_of_class_method)
service.subscribe(topic, self.example_of_static_method)
service.subscribe(topic, example_of_function)
service.publish(context, topic)
captured = capsys.readouterr()
assert captured.out == """example_of_class_method. event=xxx
example_of_static_method. event=xxx
example_of_class_method. event=xxx
"""
def test_i_can_subscribe_and_publish_with_data(self, context, service, capsys):
topic = "my topic"
service.subscribe(topic, self.example_of_class_method_with_data)
service.subscribe(topic, self.example_of_static_method_with_data)
service.subscribe(topic, example_of_function_with_data)
service.publish(context, topic, "42")
captured = capsys.readouterr()
assert captured.out == """example_of_class_method. event=xxx, data='42'
example_of_static_method. event=xxx, data='42'
example_of_class_method. event=xxx, data='42'
"""
+46 -15
View File
@@ -1,3 +1,5 @@
import ast
import pytest
from base import BaseTest, DummyObj
@@ -7,11 +9,13 @@ from core.BuiltinConcepts import BuiltinConcepts
from core.ExecutionContext import ContextActions
from core.concept import ConceptDefaultProps
from core.error import MethodAccessError
from core.python_fragment import PythonFragment
from evaluators.PythonParser import PythonParser
from helpers import _rv, define_new_concept, get_concepts, get_evaluated_concept, get_metadata
from helpers import _rv, define_new_concept, get_concepts, get_evaluated_concept, get_evaluated_concepts, get_metadata
from parsers.ParserInput import ParserInput
from parsers.tokenizer import Token, TokenKind
from services.SheerkaPython import EvalMethod, EvaluationContext, EvaluationRef, Expando, MultipleResults, SheerkaPython
from services.SheerkaConceptManager import ConceptRef
from services.SheerkaPython import EvalMethod, EvaluationContext, Expando, MultipleResults, ObjectRef, SheerkaPython
def get_python_fragment(sheerka, context, command):
@@ -70,7 +74,7 @@ class TestSheerkaPython(BaseTest):
def test_i_can_eval_using_eval_ref(self, sheerka, context, service):
python_fragment = get_python_fragment(sheerka, context, "a")
python_fragment.namespace = {"a": EvaluationRef("self", "a")}
python_fragment.namespace = {"a": ObjectRef("self", "a")}
ret = service.evaluate_python(context, EvaluationContext(), python_fragment,
{"self": DummyObj("my dummy value")})
@@ -95,6 +99,38 @@ class TestSheerkaPython(BaseTest):
ret = service.evaluate_python(context, EvaluationContext(), python_fragment)
assert ret == 3
def test_i_can_eval_when_multiple_concepts(self, sheerka, context, service):
with NewOntology(context, "test_i_can_eval_when_multiple_concepts"):
get_concepts(context,
get_metadata("one", body="'one'"),
get_metadata("one", body="1"),
use_sheerka=True)
python_fragment = get_python_fragment(sheerka, context, "one + 1")
ret = service.evaluate_python(context, EvaluationContext(), python_fragment)
assert ret == 2
def test_i_can_eval_when_multiple_result_in_local_namespace(self, sheerka, context, service):
# In the test, the PythonFragment contains a MultipleResult in its namespace
# (normally, the MultipleResult is created inside the evaluate_python)
# We need to make sure that multiple results are created in the same way
with NewOntology(context, "test_i_can_eval_when_multiple_result_in_local_namespace"):
one1, one2 = get_concepts(context,
get_metadata("one", body="'one'"),
get_metadata("one", body="1"),
use_sheerka=True)
concept_ref = "__concept_id__"
ast_tree = ast.parse(concept_ref, "<user input>", 'eval')
ref = MultipleResults(ConceptRef(one1), ConceptRef(one2))
python_fragment = PythonFragment(concept_ref, ast_tree=ast_tree, namespace={concept_ref: ref})
ret = service.evaluate_python(context, EvaluationContext(eval_method=EvalMethod.All), python_fragment)
evaluated_one1, evaluated_one2 = get_evaluated_concepts(context, one1, one2, use_sheerka=True)
assert ret == MultipleResults(evaluated_one1, "one", evaluated_one2, 1)
def test_i_can_remember_previous_results(self, sheerka, context, service):
python_fragment = get_python_fragment(sheerka, context, "a=10")
ret = service.evaluate_python(context, EvaluationContext(), python_fragment)
@@ -151,18 +187,6 @@ class TestSheerkaPython(BaseTest):
get_evaluated_concept(foo_3, body='bar'),
"bar")
def test_i_can_eval_when_multiple_concepts(self, sheerka, context, service):
with NewOntology(context, "test_i_can_eval_when_multiple_concepts"):
get_concepts(context,
get_metadata("one", body="'one'"),
get_metadata("one", body="1"),
use_sheerka=True)
python_fragment = get_python_fragment(sheerka, context, "one + 1")
ret = service.evaluate_python(context, EvaluationContext(), python_fragment)
assert ret == 2
def test_i_can_eval_until_a_successful_result_is_found(self, sheerka, context, service):
with NewOntology(context, "test_i_can_eval_when_multiple_concepts"):
get_concepts(context,
@@ -338,3 +362,10 @@ class TestSheerkaPython(BaseTest):
foo, bar = get_concepts(context, "foo", "bar")
assert MultipleResults(foo, "one", bar, 1).concepts_only() == MultipleResults(foo, bar)
assert MultipleResults("one", 1).concepts_only() == MultipleResults()
def test_i_can_add_multiple_results_of_multiple_results(self, context):
foo, bar, baz, qux = get_concepts(context, "foo", "bar", "baz", "qux")
m1 = MultipleResults(foo, bar)
m2 = MultipleResults(bar, baz, m1)
assert m2.items == [bar, baz, foo, bar]
+39 -2
View File
@@ -2,7 +2,8 @@ import pytest
from common.global_symbols import NotInit
from core.concept import Concept, ConceptDefaultProps, ConceptMetadata, DefinitionType
from helpers import GetNextId, get_concept, get_concepts, get_metadata, get_metadatas, get_evaluated_concept
from helpers import GetNextId, _mt, _ut, get_concept, get_concepts, get_evaluated_concept, get_from, get_metadata, \
get_metadatas
def test_i_can_get_default_value_when_get_metadata():
@@ -233,7 +234,7 @@ def test_i_can_get_multiple_concepts_when_same_name(sheerka, context):
assert sheerka.isinstance(one_int, "one")
def test_i_can_create_test_concept(sheerka, context):
def test_i_can_create_test_concept():
concept = get_concept("one", body="'one'")
test_concept = get_evaluated_concept(concept, body='hello', a="value for a")
@@ -241,3 +242,39 @@ def test_i_can_create_test_concept(sheerka, context):
assert test_concept.get_metadata() == concept.get_metadata()
assert test_concept.get_value(ConceptDefaultProps.BODY) == "hello"
assert test_concept.get_value("a") == "value for a"
def test_i_can_dummy_evaluate_concept():
concept = get_concept("one", body="'one'", where="True", pre="False", ret="1", post="1.0")
evaluated = get_evaluated_concept(concept)
assert evaluated.get_value(ConceptDefaultProps.WHERE) is True
assert evaluated.get_value(ConceptDefaultProps.PRE) is False
assert evaluated.get_value(ConceptDefaultProps.BODY) == "one"
assert evaluated.get_value(ConceptDefaultProps.RET) == 1
assert evaluated.get_value(ConceptDefaultProps.POST) == 1.0
concept = get_concept("one", body='"one"', ret="'a value'")
evaluated = get_evaluated_concept(concept, ret='forced value')
assert evaluated.get_value(ConceptDefaultProps.WHERE) == NotInit
assert evaluated.get_value(ConceptDefaultProps.PRE) == NotInit
assert evaluated.get_value(ConceptDefaultProps.BODY) == "one"
assert evaluated.get_value(ConceptDefaultProps.RET) == "forced value"
assert evaluated.get_value(ConceptDefaultProps.POST) == NotInit
def test_i_can_get_from():
res = get_from(_mt("c:i am a concept#1001:"))
assert res == [_mt("1001", 0, 6)]
res = get_from(_ut("some unrecognized stuff"))
assert res == [_ut("some unrecognized stuff", 0, 4)]
res = get_from(_mt("c:i am a concept#1001:"), _ut("some unrecognized stuff"))
assert res == [_mt("1001", 0, 6), _ut("some unrecognized stuff", 7, 11)]
res = get_from(_mt("c:i am a concept#1001:"), _ut("some unrecognized stuff"), parser="other")
assert res == [_mt("1001", 0, 6, parser="other"), _ut("some unrecognized stuff", 7, 11)]
res = get_from(_mt("c:i am a concept#1001:"), _mt("c:#1001:"))
assert res == [_mt("1001", 0, 6), _mt("1001", 7, 13)]