From e26c83a825309f5e59f520dd3263a96e16d788b0 Mon Sep 17 00:00:00 2001 From: Kodjo Sossouvi Date: Mon, 11 Jan 2021 15:36:03 +0100 Subject: [PATCH] Implemented SheerkaOntology --- Makefile | 1 + _concepts_admin.txt | 1 + _concepts_default.txt | 1 + _concepts_test-adjective.txt | 1 + conftest.py | 29 +- docs/source/blog/blog.rst | 2 +- docs/source/blog/persistence.rst | 2 +- setup.py | 2 +- src/cache/BaseCache.py | 159 +- src/cache/Cache.py | 28 +- src/cache/CacheManager.py | 147 +- src/cache/DictionaryCache.py | 55 +- src/cache/FastCache.py | 16 +- src/cache/IncCache.py | 14 +- src/cache/ListCache.py | 32 +- src/cache/ListIfNeededCache.py | 110 +- src/cache/SetCache.py | 81 +- src/core/ast_helpers.py | 11 +- src/core/builtin_concepts.py | 2 +- src/core/builtin_concepts_ids.py | 3 + src/core/builtin_helpers.py | 67 +- src/core/concept.py | 41 +- src/core/global_symbols.py | 32 + src/core/rule.py | 26 +- src/core/sheerka/Sheerka.py | 254 +-- src/core/sheerka/SheerkaOntologyManager.py | 470 ++++++ src/core/sheerka/services/SheerkaAdmin.py | 26 +- .../services/SheerkaComparisonManager.py | 48 +- .../sheerka/services/SheerkaConceptManager.py | 100 +- .../services/SheerkaConceptsAlgebra.py | 5 +- .../sheerka/services/SheerkaDebugManager.py | 47 +- .../services/SheerkaEvaluateConcept.py | 5 +- .../sheerka/services/SheerkaEventManager.py | 17 +- src/core/sheerka/services/SheerkaExecute.py | 10 +- .../SheerkaFunctionsParametersHistory.py | 21 +- .../sheerka/services/SheerkaHistoryManager.py | 4 +- ...rkaSetsManager.py => SheerkaIsAManager.py} | 49 +- src/core/sheerka/services/SheerkaMemory.py | 30 +- src/core/sheerka/services/SheerkaQuestion.py | 21 +- .../sheerka/services/SheerkaResultManager.py | 40 +- .../sheerka/services/SheerkaRuleManager.py | 77 +- .../services/SheerkaVariableManager.py | 80 +- src/core/sheerka/services/sheerka_service.py | 12 +- src/core/simple_debug.py | 14 +- src/core/utils.py | 63 +- src/evaluators/AddToMemoryEvaluator.py | 2 +- src/evaluators/DefConceptEvaluator.py | 4 +- src/evaluators/PostExecutionEvaluator.py | 7 +- src/evaluators/PythonEvaluator.py | 22 +- src/evaluators/ReturnBodyEvaluator.py | 3 +- src/evaluators/RuleEvaluator.py | 3 +- src/out/DeveloperVisitor.py | 3 +- src/parsers/BaseNodeParser.py | 42 +- src/parsers/ShortTermMemoryParser.py | 10 +- src/parsers/SyaNodeParser.py | 25 +- src/printer/SheerkaPrinter.py | 3 +- src/sdp/readme.md | 1 + src/sdp/sheerkaDataProvider.py | 85 +- src/sdp/sheerkaSerializer.py | 25 + src/sheerkapickle/SheerkaPickler.py | 23 +- src/sheerkapickle/SheerkaUnpickler.py | 17 + src/sheerkapickle/sheerka_handlers.py | 5 +- src/sheerkapickle/tags.py | 1 + src/sheerkapickle/utils.py | 8 +- tests/BaseTest.py | 180 +- tests/TestUsingFileBasedSheerka.py | 62 +- tests/TestUsingMemoryBasedSheerka.py | 45 +- tests/cache/__init__.py | 18 + tests/cache/test_CacheManager.py | 224 +++ tests/cache/test_DictionaryCache.py | 162 ++ tests/cache/test_FastCache.py | 14 +- tests/cache/test_IncCache.py | 84 + tests/cache/test_ListCache.py | 274 ++++ tests/cache/test_ListIfNeededCache.py | 569 +++++++ tests/cache/test_SetCache.py | 477 ++++++ tests/cache/test_cache.py | 655 +++----- tests/cache/test_cache_manager.py | 157 -- tests/core/test_SheerkaAdmin.py | 12 +- tests/core/test_SheerkaComparisonManager.py | 123 +- tests/core/test_SheerkaConceptAlgebra.py | 10 +- tests/core/test_SheerkaConceptManager.py | 227 ++- tests/core/test_SheerkaDebugManager.py | 72 +- tests/core/test_SheerkaEvaluateConcept.py | 10 +- tests/core/test_SheerkaEvaluateRules.py | 10 +- tests/core/test_SheerkaEventManager.py | 23 +- .../test_SheerkaFunctionsParametersHistory.py | 66 +- tests/core/test_SheerkaHistoryManager.py | 6 +- ...tsManager.py => test_SheerkaIsAManager.py} | 132 +- tests/core/test_SheerkaMemory.py | 43 +- tests/core/test_SheerkaRuleManager.py | 156 +- tests/core/test_SheerkaVariableManager.py | 102 +- tests/core/test_sheerka.py | 236 ++- tests/core/test_sheerkaResultManager.py | 98 +- tests/core/test_sheerka_call_evaluators.py | 2 +- tests/core/test_sheerka_call_parsers.py | 2 +- tests/core/test_sheerka_ontology.py | 1460 +++++++++++++++++ tests/core/test_sheerka_printer.py | 22 +- tests/core/test_utils.py | 12 +- .../test_AddConceptInSetEvaluator.py | 8 +- tests/evaluators/test_DefConceptEvaluator.py | 2 +- tests/evaluators/test_EvalEvaluator.py | 4 +- tests/evaluators/test_LexerNodeEvaluator.py | 2 +- tests/evaluators/test_PythonEvaluator.py | 12 +- tests/non_reg/test_sheerka_display.py | 27 +- tests/non_reg/test_sheerka_non_reg.py | 63 +- tests/out/test_SheerkaOut.py | 25 +- tests/parsers/test_BaseNodeParser.py | 31 +- tests/parsers/test_BnfNodeParser.py | 116 +- tests/parsers/test_DefFormatRuleParser.py | 24 +- tests/parsers/test_FunctionParser.py | 24 +- tests/parsers/test_RuleParser.py | 21 +- ...msParser.py => test_SequenceNodeParser.py} | 5 +- tests/parsers/test_SyaNodeParser.py | 70 +- tests/parsers/test_UnrecognizedNodeParser.py | 41 +- tests/sdp/test_sheerkaDataProvider.py | 61 +- tests/sdp/test_sheerkaSerializer.py | 17 +- tests/sheerkapickle/test_SheerkaPickler.py | 29 + tests/sheerkapickle/test_sheerka_handlers.py | 2 +- utils/sheerka.rebuild.sh | 4 +- 119 files changed, 6876 insertions(+), 2002 deletions(-) create mode 100644 src/core/sheerka/SheerkaOntologyManager.py rename src/core/sheerka/services/{SheerkaSetsManager.py => SheerkaIsAManager.py} (85%) create mode 100644 tests/cache/test_CacheManager.py create mode 100644 tests/cache/test_DictionaryCache.py create mode 100644 tests/cache/test_IncCache.py create mode 100644 tests/cache/test_ListCache.py create mode 100644 tests/cache/test_ListIfNeededCache.py create mode 100644 tests/cache/test_SetCache.py delete mode 100644 tests/cache/test_cache_manager.py rename tests/core/{test_SheerkaSetsManager.py => test_SheerkaIsAManager.py} (74%) create mode 100644 tests/core/test_sheerka_ontology.py rename tests/parsers/{test_AtomsParser.py => test_SequenceNodeParser.py} (99%) diff --git a/Makefile b/Makefile index 86d4683..691efc3 100644 --- a/Makefile +++ b/Makefile @@ -12,6 +12,7 @@ clean: rm -rf docs/source/_build rm -rf prof rm -rf tests/prof + rm -rf tests/build rm -rf Untitled*.ipynb find . -name '.pytest_cache' -exec rm -rf {} + find . -name '__pycache__' -exec rm -rf {} + diff --git a/_concepts_admin.txt b/_concepts_admin.txt index 56602e4..1727684 100644 --- a/_concepts_admin.txt +++ b/_concepts_admin.txt @@ -1,4 +1,5 @@ # admin helpers +push_ontology("admin") def concept explain as get_results(id=0, depth=2) set_isa(c:explain:, __AUTO_EVAL) diff --git a/_concepts_default.txt b/_concepts_default.txt index c5c8212..b722a13 100644 --- a/_concepts_default.txt +++ b/_concepts_default.txt @@ -1,4 +1,5 @@ # question +push_ontology("english") def concept q from q ? as question(q) pre is_question() set_is_lesser(__PRECEDENCE, q, 'Sya') set_auto_eval(c:q:) diff --git a/_concepts_test-adjective.txt b/_concepts_test-adjective.txt index 0fd02a1..d98da66 100644 --- a/_concepts_test-adjective.txt +++ b/_concepts_test-adjective.txt @@ -1,4 +1,5 @@ #import full +push_ontology("test-adjective") def concept size def concept little def concept girl diff --git a/conftest.py b/conftest.py index 838358d..8685712 100644 --- a/conftest.py +++ b/conftest.py @@ -1,12 +1,33 @@ +import os +from os import path +import shutil + collect_ignore = [ "setup.py" ] collect_ignore_glob = [ + # "tests/cache/", # "tests/core/", - #"tests/evaluators/", + # "tests/evaluators/", # "tests/non_reg/", - #"tests/parsers/", - #"tests/sdp/", - #"tests/sheerkapickle/", + # "tests/out/", + # "tests/parsers/", + # "tests/repl/", + # "tests/sdp/", + # "tests/sheerkapickle/", ] + +TESTS_ROOT_FOLDER = path.abspath("../../build/tests") +SHEERKA_TEST_FOLDER = path.join(TESTS_ROOT_FOLDER, "init_folder") + + +def pytest_sessionstart(session): + """ + Called after the Session object has been created and + before performing collection and entering the run test loop. + """ + if path.exists(TESTS_ROOT_FOLDER): + shutil.rmtree(TESTS_ROOT_FOLDER) + + os.makedirs(TESTS_ROOT_FOLDER) diff --git a/docs/source/blog/blog.rst b/docs/source/blog/blog.rst index 1570160..263a4cc 100644 --- a/docs/source/blog/blog.rst +++ b/docs/source/blog/blog.rst @@ -246,7 +246,7 @@ To better explain what I have in mind. let's say that I want to pretty print an pp = pprint.PrettyPrinter(indent=4) pp.pprint(stuff) -I need three line in oder to be able to pretty print. I will first try by dumping the +I need three lines in oder to be able to pretty print. I will first try by dumping the globals(), using pickle and load it back whenever needed. If it does not work as expected, I can find a way to save the commands a exec everything diff --git a/docs/source/blog/persistence.rst b/docs/source/blog/persistence.rst index 09ff634..b79388c 100644 --- a/docs/source/blog/persistence.rst +++ b/docs/source/blog/persistence.rst @@ -11,7 +11,7 @@ My simple idea for the persistence is that **everything** should be persisted. The actual main difference between an human being and a computer is that we have the ability to remember almost everything (at least everything that we have not forgotten). -On the contrary, we only allow computer to remember specific stuff that we thing (as of +On the contrary, we only allow computer to remember specific stuff that we think (as of today) will be relevant in the future. There are two majors issues with that: diff --git a/setup.py b/setup.py index c080e3f..7194c22 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ from setuptools import setup setup( name='sheerka', version='0.1', - license='', + license='All rights reserved.', description='A human/computer communication interface', long_description='', author='Kodjo Sossouvi', diff --git a/src/cache/BaseCache.py b/src/cache/BaseCache.py index 152e853..2e68869 100644 --- a/src/cache/BaseCache.py +++ b/src/cache/BaseCache.py @@ -1,5 +1,8 @@ from threading import RLock +from core.global_symbols import NotFound, Removed +from core.utils import sheerka_deepcopy + MAX_INITIALIZED_KEY = 100 @@ -10,14 +13,17 @@ class BaseCache: When you put the same key twice, the previous element is overridden """ - def __init__(self, max_size=None, default=None, extend_exists=None): + def __init__(self, max_size=None, default=NotFound, extend_exists=None, alt_sdp_get=None, sdp=None): self._cache = {} self._max_size = max_size self._default = default # default value to return when key is not found. It can be a callable of key self._extend_exists = extend_exists # search in remote + self._alt_sdp_get = alt_sdp_get # How to get the value when called by alt_sdp + self._sdp = sdp # current instance of SheerkaDataProvider self._lock = RLock() self._current_size = 0 self._initialized_keys = set() # to keep the list of the keys already requested (using get()) + self._is_cleared = False # indicate that clear() was called self.to_add = set() self.to_remove = set() @@ -49,42 +55,75 @@ class BaseCache: def __repr__(self): return f"{self.__class__.__name__}(size={self._current_size}, #keys={len(self._cache)})" - def configure(self, max_size=None, default=None, extend_exists=None): + def configure(self, max_size=None, default=NotFound, extend_exists=None, alt_sdp_get=None, sdp=None): if max_size is not None: self._max_size = max_size - if default is not None: + if default is not NotFound: self._default = default if extend_exists is not None: self._extend_exists = extend_exists - def disable_default(self): - self._default = None + if alt_sdp_get is not None: + self._alt_sdp_get = alt_sdp_get - def put(self, key, value): + if sdp is not None: + self._sdp = sdp + + return self + + def auto_configure(self, cache_name): + """ + Convenient way to configure the cache + :param cache_name: + :return: + """ + self._default = lambda sdp, key: sdp.get(cache_name, key) + self._extend_exists = lambda sdp, key: sdp.exists(cache_name, key) + self._alt_sdp_get = lambda sdp, key: sdp.alt_get(cache_name, key) # by default, same than get + + return self + + def disable_default(self): + self._default = (lambda sdp, key: NotFound) if self._sdp else (lambda key: NotFound) + + def put(self, key, value, alt_sdp=None): """ Add a new entry in cache :param key: :param value: + :param alt_sdp: :return: """ with self._lock: if self._max_size and self._current_size >= self._max_size: self.evict(self._max_size - self._current_size + 1) - if self._put(key, value): + if self._put(key, value, alt_sdp): self._current_size += 1 - def get(self, key): + def get(self, key, alt_sdp=None): """ Retrieve an entry from the cache If the entry does not exist, will use the 'default' value or delegate :param key: + :param alt_sdp: if not found in cache._sdp, look in other repositories :return: """ with self._lock: - return self._get(key) + return self._get(key, alt_sdp) + + def alt_get(self, key): + """ + Alternate way to get an entry, from concept cache + This is mainly used for IncCache, in order to get the value without increasing it + It used for another cache, it must return the value from key WITHOUT modifying the state of the cache + :param key: + :return: + """ + with self._lock: + return self._alt_get(key) def get_all(self): """ @@ -98,36 +137,63 @@ class BaseCache: def inner_get(self, key): return self._cache[key] - def update(self, old_key, old_value, new_key, new_value): + def update(self, old_key, old_value, new_key, new_value, alt_sdp=None): """ Update an entry in the cache :param old_key: key of the previous version of the entry :param old_value: previous version of the entry :param new_key: key of the entry :param new_value: new value + :param alt_sdp: new value :return: """ with self._lock: - self._update(old_key, old_value, new_key, new_value) + self._update(old_key, old_value, new_key, new_value, alt_sdp) - def delete(self, key, value=None): + def delete(self, key, value=None, alt_sdp=None): with self._lock: try: - self._delete(key, value) + self._sync(key) + self._delete(key, value, alt_sdp) + return True except KeyError: - pass + return False - def populate(self, populate_function, get_key_function): + def populate(self, populate_function, get_key_function, reset_events=False): """ Initialise the cache with a bunch of data :param populate_function: :param get_key_function: + :param reset_events: :return: """ with self._lock: - for item in populate_function(): + if reset_events: + to_add_copy = self.to_add.copy() + to_remove_copy = self.to_remove.copy() + + for item in (populate_function(self._sdp) if self._sdp else populate_function()): self.put(get_key_function(item), item) + if reset_events: + self.to_add = to_add_copy + self.to_remove = to_remove_copy + + def force_value(self, key, value): + """ + Force a value into a key without raising any event + """ + with self._lock: + self._cache[key] = value + + def remove_initialized_key(self, key): + """ + When a value is requested by alt_sdp, we should not keep track of the request + As the outcome is not known + """ + with self._lock: + self._initialized_keys.remove(key) + def has(self, key): """ Return True if the key is in the cache @@ -149,7 +215,10 @@ class BaseCache: if key in self._cache: return True - return self._extend_exists(key) if self._extend_exists else False + if self._extend_exists: + return self._extend_exists(self._sdp, key) if self._sdp else self._extend_exists(key) + else: + return False def evict(self, nb_items): """ @@ -195,13 +264,16 @@ class BaseCache: return len(to_delete) - def clear(self): + def clear(self, set_is_cleared=True): with self._lock: + # Seems that remote sdp is not correctly updated self._cache.clear() self._current_size = 0 self._initialized_keys.clear() self.to_add.clear() self.to_remove.clear() + if set_is_cleared: + self._is_cleared = True def dump(self): with self._lock: @@ -225,9 +297,32 @@ class BaseCache: self.to_add.clear() self.to_remove.clear() + def reset_initialized_keys(self): + """ + Use when an ontology is put back. Reset all the previous requests as alt_sdp is a new one + """ + with self._lock: + self._initialized_keys.clear() + + def is_cleared(self): + with self._lock: + return self._is_cleared + + def clone(self): + return type(self)(self._max_size, self._default, self._extend_exists, self._alt_sdp_get, self._sdp) + + def test_only_reset(self): + """ + Clears the cache, but does not set is_cleared to True + It's a convenient way to clear the cache without altering alt_sdp behaviour + """ + self.clear(set_is_cleared=False) + def _sync(self, *keys): + # KSI 2020-12-29. DO not try to use alt_sdp here + # Sync must only sync with the current sdp for key in keys: - if key not in self._initialized_keys and self._default: + if key not in self._initialized_keys and callable(self._default): # to keep sync with the remote repo is needed # first check self._initialized_keys to prevent infinite loop self.get(key) @@ -246,7 +341,7 @@ class BaseCache: except KeyError: pass - def _get(self, key): + def _get(self, key, alt_sdp=None): try: value = self._cache[key] except KeyError: @@ -254,11 +349,18 @@ class BaseCache: self._initialized_keys.clear() if callable(self._default): if key in self._initialized_keys: - return None + # it means that we have already asked the repository + return NotFound - value = self._default(key) - if value is not None: - self._cache[key] = value + simple_copy = True + value = self._default(self._sdp, key) if self._sdp else self._default(key) + if value is NotFound and alt_sdp and not self._is_cleared: + value = self._alt_sdp_get(alt_sdp, key) + simple_copy = False + + if value is not NotFound: + self._cache[key] = value if simple_copy else sheerka_deepcopy(value) + value = self._cache[key] # update _current_size if isinstance(value, (list, set)): @@ -271,11 +373,14 @@ class BaseCache: return value - def _put(self, key, value): + def _alt_get(self, key): + return self._get(key) # by default, point to _get + + def _put(self, key, value, alt_sdp): pass - def _update(self, old_key, old_value, new_key, new_value): + def _update(self, old_key, old_value, new_key, new_value, alt_sdp): pass - def _delete(self, key, value): + def _delete(self, key, value, alt_sdp): raise NotImplementedError() diff --git a/src/cache/Cache.py b/src/cache/Cache.py index 1295133..165cdee 100644 --- a/src/cache/Cache.py +++ b/src/cache/Cache.py @@ -1,6 +1,7 @@ from threading import RLock from cache.BaseCache import BaseCache +from core.global_symbols import Removed class Cache(BaseCache): @@ -10,23 +11,32 @@ class Cache(BaseCache): When you put the same key twice, the previous element is overridden """ - def _put(self, key, value): + def _put(self, key, value, alt_sdp): res = key not in self._cache self._cache[key] = value self._add_to_add(key) return res - def _update(self, old_key, old_value, new_key, new_value): + def _update(self, old_key, old_value, new_key, new_value, alt_sdp): self._cache[new_key] = new_value self._add_to_add(new_key) if new_key != old_key: self._sync(old_key) - del (self._cache[old_key]) - self._add_to_remove(old_key) - - def _delete(self, key, value): - del(self._cache[key]) - self._current_size -= 1 - self._add_to_remove(key) + if not self._is_cleared and alt_sdp and self._extend_exists and self._extend_exists(alt_sdp, old_key): + self._cache[old_key] = Removed + self._add_to_add(old_key) + self._current_size += 1 + else: + del (self._cache[old_key]) + self._add_to_remove(old_key) + def _delete(self, key, value, alt_sdp): + if not self._is_cleared and alt_sdp and self._extend_exists and self._extend_exists(alt_sdp, key): + self._cache[key] = Removed + self._add_to_add(key) + # do not decrease self._current_size as 'Removed' takes on slot + else: + del (self._cache[key]) + self._add_to_remove(key) + self._current_size -= 1 diff --git a/src/cache/CacheManager.py b/src/cache/CacheManager.py index ead772a..2f38fac 100644 --- a/src/cache/CacheManager.py +++ b/src/cache/CacheManager.py @@ -4,6 +4,7 @@ from typing import Callable from cache.BaseCache import BaseCache from core.concept import Concept +from core.global_symbols import NotFound @dataclass @@ -37,8 +38,9 @@ class CacheManager: Single class to manage all the caches """ - def __init__(self, cache_only): + def __init__(self, cache_only, sdp=None): self.cache_only = cache_only # if true disable all remote access when key not found + self.sdp = sdp self.caches = {} self.concept_caches = [] self.is_dirty = False # to indicate that the value of a cache has changed @@ -57,6 +59,8 @@ class CacheManager: with self._lock: if self.cache_only: cache.disable_default() + if self.sdp: + cache.configure(sdp=self.sdp) self.caches[name] = CacheDefinition(cache, use_ref, get_key) self.concept_caches.append(name) @@ -70,8 +74,13 @@ class CacheManager: :return: """ with self._lock: + if self.sdp: + cache.configure(sdp=self.sdp) + if self.cache_only: cache.disable_default() + persist = False + self.caches[name] = CacheDefinition(cache, use_ref, None, persist) def add_concept(self, concept): @@ -89,11 +98,12 @@ class CacheManager: self.is_dirty = True - def update_concept(self, old, new): + def update_concept(self, old, new, alt_sdp=None): """ Update a concept. :param old: old version of the concept :param new: new version of the concept + :param alt_sdp: if not found in self.sdp, look in other repositories :return: """ with self._lock: @@ -103,42 +113,15 @@ class CacheManager: old_key = cache_def.get_key(old) new_key = cache_def.get_key(new) - cache_def.cache.update(old_key, old, new_key, new) + cache_def.cache.update(old_key, old, new_key, new, alt_sdp=alt_sdp) self.is_dirty = True - # how can you update an entry it the key may have changed ? - # You need to have an invariant. By convention the keys in the first cache cannot change - # with self._lock: - # iter_cache_def = iter(self.caches) - # - # cache_def = next(iter_cache_def) - # old_key = cache_def.get_key(concept) - # - # try: - # while True: - # items = cache_def.cache[old_key] - # if isinstance(items, (list, set)): - # for item in items: - # if item.id == concept.id: - # break - # else: - # raise IndexError(f"{old_key=}, id={concept.id}") - # - # cache_def.cache.update(old_key, item, cache_def.get_key(concept), concept) - # - # else: - # cache_def.cache.update(old_key, items, cache_def.get_key(concept), concept) - # - # cache_def = next(iter_cache_def) - # except StopIteration: - # pass - # self.is_dirty = True - - def remove_concept(self, concept): + def remove_concept(self, concept, alt_sdp=None): """ Remove a concept from all caches :param concept: + :param alt_sdp: if not found in self.sdp, look in other repositories :return: """ with self._lock: @@ -148,25 +131,66 @@ class CacheManager: concept_id = ref_cache_def.get_key(concept) ref_concept = ref_cache_def.cache.get(concept_id) - if ref_concept is None: + if ref_concept is NotFound and alt_sdp: + ref_concept = alt_sdp.get(self.concept_caches[0], concept_id) + + if ref_concept is NotFound: raise ConceptNotFound(concept) for cache_name in self.concept_caches: cache_def = self.caches[cache_name] key = cache_def.get_key(ref_concept) - cache_def.cache.delete(key, ref_concept) + cache_def.cache.delete(key, ref_concept, alt_sdp=alt_sdp) self.is_dirty = True - def get(self, cache_name, key): + def get(self, cache_name, key, alt_sdp=None): """ From concept cache, get an entry :param cache_name: :param key: + :param alt_sdp: if not found in self.sdp, look in other repositories + :return: + """ + with self._lock: + return self.caches[cache_name].cache.get(key, alt_sdp) + + def alt_get(self, cache_name, key): + """ + Alternate way to get an entry, from concept cache + This is mainly used for IncCache, in order to get the value without increasing it + :param cache_name: + :param key: :return: """ with self._lock: - return self.caches[cache_name].cache.get(key) + return self.caches[cache_name].cache.alt_get(key) + + def put(self, cache_name, key, value, alt_sdp=None): + """ + Add to a cache + :param cache_name: + :param key: + :param value: + :param alt_sdp: if not found in self.sdp, look in other repositories + :return: + """ + with self._lock: + self.caches[cache_name].cache.put(key, value, alt_sdp) + self.is_dirty = True + + def delete(self, cache_name, key, value=None, alt_sdp=None): + """ + Delete an entry from the cache + :param cache_name: + :param key: + :param value: + :param alt_sdp: if not found in self.sdp, look in other repositories + :return: + """ + with self._lock: + if self.caches[cache_name].cache.delete(key, value, alt_sdp): + self.is_dirty = True def get_cache(self, cache_name): """ @@ -186,40 +210,31 @@ class CacheManager: """ return self.caches[cache_name].cache.copy() - def put(self, cache_name, key, value): - """ - Add to a cache - :param cache_name: - :param key: - :param value: - :return: - """ - with self._lock: - self.caches[cache_name].cache.put(key, value) - self.is_dirty = True - - def delete(self, cache_name, key, value=None): - """ - Delete an entry from the cache - :param cache_name: - :param key: - :param value: - :return: - """ - with self._lock: - self.caches[cache_name].cache.delete(key, value) - self.is_dirty = True - - def populate(self, cache_name, populate_function, get_key_function): + def populate(self, cache_name, populate_function, get_key_function, reset_events=False): """ Populate a specific cache with a bunch of items :param cache_name: :param populate_function: how to get the items :param get_key_function: how to get the key, out of an item + :param reset_events: reset to_add and to_remove events after populate :return: """ with self._lock: - self.caches[cache_name].cache.init(populate_function, get_key_function) + self.caches[cache_name].cache.populate(populate_function, get_key_function, reset_events) + + def force_value(self, cache_name, key, value): + """ + Update the content of the cache, but does not raise any event + """ + with self._lock: + self.caches[cache_name].cache.force_value(key, value) + + def remove_initialized_key(self, cache_name, key): + """ + + """ + with self._lock: + self.caches[cache_name].cache.remove_initialized_key(key) def has(self, cache_name, key): """ @@ -267,7 +282,7 @@ class CacheManager: return with self._lock: - with context.sheerka.sdp.get_transaction(context.event.get_digest()) as transaction: + with self.sdp.get_transaction(context.event.get_digest()) as transaction: for cache_name, cache_def in self.caches.items(): if not cache_def.persist: continue @@ -287,13 +302,13 @@ class CacheManager: cache_def.cache.reset_events() self.is_dirty = False - def clear(self, cache_name=None): + def clear(self, cache_name=None, set_is_cleared=True): with self._lock: if cache_name: - self.caches[cache_name].cache.clear() + self.caches[cache_name].cache.clear(set_is_cleared) else: for cache_def in self.caches.values(): - cache_def.cache.clear() + cache_def.cache.clear(set_is_cleared) def dump(self): """ diff --git a/src/cache/DictionaryCache.py b/src/cache/DictionaryCache.py index 7ae79cd..ab04baa 100644 --- a/src/cache/DictionaryCache.py +++ b/src/cache/DictionaryCache.py @@ -1,30 +1,62 @@ -from cache.BaseCache import BaseCache +from cache.BaseCache import BaseCache, MAX_INITIALIZED_KEY +from core.global_symbols import NotFound, Removed +from core.utils import sheerka_deepcopy class DictionaryCache(BaseCache): - def _get(self, key): + """ + Kind of all or nothing dictionary database + You can get the values key by by + But when you want to put, you must put the whole database + For this reason, alt_sdp is not supported. The top ontology layer contains the whole database + """ + + def auto_configure(self, cache_name): + """ + Convenient way to configure the cache + :param cache_name: + :return: + """ + self._default = lambda sdp, key: sdp.get(cache_name) # retrieve the whole entry + self._extend_exists = None # not used + self._alt_sdp_get = None # not used + + return self + + def _get(self, key, alt_sdp=None): """ Management of the default is different :param key: :return: """ try: - value = self._cache[key] - return value + return self._cache[key] except KeyError: + if key in self._initialized_keys: + return NotFound + + if len(self._initialized_keys) == MAX_INITIALIZED_KEY: + self._initialized_keys.clear() + + self._initialized_keys.add(key) + if callable(self._default): - self._cache = self._default(key) or {} + default_values = self._default(self._sdp, key) if self._sdp else self._default(key) else: - self._cache = self._default.copy() if self._default else {} + default_values = self._default + + if isinstance(default_values, dict): + self._cache.update(default_values) # update the whole cache dictionary to resync with remote sdp self._count_items() - return self._cache[key] if key in self._cache else None + return self._cache[key] if key in self._cache else NotFound - def _put(self, key, value): + def _put(self, key, value, alt_sdp): """ Adds a whole dictionary :param key: True to append, false to reset :param value: dictionary + :param alt_sdp: NOT SUPPORTED as the values from alt_sdp must be retrieved and computed BEFORE the put :return: """ if not isinstance(key, bool): @@ -33,12 +65,12 @@ class DictionaryCache(BaseCache): if not isinstance(value, dict): raise ValueError - if key: + if key: # update the current cache if self._cache is None: self._cache = value.copy() else: self._cache.update(value) - else: + else: # reset the current cache self._cache = value self._count_items() @@ -47,6 +79,9 @@ class DictionaryCache(BaseCache): self._add_to_add("*self*") return False + def _delete(self, key, value, alt_sdp): + raise NotImplementedError() + def _count_items(self): self._current_size = 0 for v in self._cache.values(): diff --git a/src/cache/FastCache.py b/src/cache/FastCache.py index f370c32..1fd9b74 100644 --- a/src/cache/FastCache.py +++ b/src/cache/FastCache.py @@ -1,12 +1,16 @@ +from core.global_symbols import NotFound + + class FastCache: """ Simplest LRU cache """ - def __init__(self, max_size=256): + def __init__(self, max_size=256, default=None): self.max_size = max_size self.cache = {} self.lru = [] + self.default = default def put(self, key, value): if len(self.cache) == self.max_size: @@ -18,11 +22,19 @@ class FastCache: self.cache[key] = value self.lru.append(key) + def has(self, key): + return key in self.cache + def get(self, key): try: return self.cache[key] except KeyError: - return None + if self.default: + value = self.default(key) + self.put(key, value) + return value + + return NotFound def evict_by_key(self, predicate): to_remove = [] diff --git a/src/cache/IncCache.py b/src/cache/IncCache.py index 11ada16..4ac1590 100644 --- a/src/cache/IncCache.py +++ b/src/cache/IncCache.py @@ -1,4 +1,5 @@ from cache.Cache import Cache +from core.global_symbols import NotFound, Removed class IncCache(Cache): @@ -6,13 +7,18 @@ class IncCache(Cache): Increment the value of the key every time it's accessed """ - def _get(self, key): - value = super()._get(key) or 0 + def _get(self, key, alt_sdp=None): + value = super()._get(key, alt_sdp=alt_sdp) + if value in (NotFound, Removed): + value = 0 value += 1 - self._put(key, value) + self._put(key, value, alt_sdp) return value - def _put(self, key, value): + def _put(self, key, value, alt_sdp): self._cache[key] = value self._add_to_add(key) return True + + def _alt_get(self, key): + return super()._get(key) # point to parent, not to self diff --git a/src/cache/ListCache.py b/src/cache/ListCache.py index 0e874b8..d881ca8 100644 --- a/src/cache/ListCache.py +++ b/src/cache/ListCache.py @@ -1,4 +1,6 @@ from cache.Cache import BaseCache +from core.global_symbols import Removed, NotFound +from core.utils import sheerka_deepcopy class ListCache(BaseCache): @@ -8,12 +10,17 @@ class ListCache(BaseCache): Items of this cache are list """ - def _put(self, key, value): + def _put(self, key, value, alt_sdp): if key in self._cache: self._cache[key].append(value) else: self._sync(key) + if key not in self._cache and alt_sdp and not self._is_cleared: + previous = self._alt_sdp_get(alt_sdp, key) + if previous not in (NotFound, Removed): + self._cache[key] = sheerka_deepcopy(previous) + if key in self._cache: self._cache[key].append(value) else: @@ -22,18 +29,33 @@ class ListCache(BaseCache): self._add_to_add(key) return True - def _update(self, old_key, old_value, new_key, new_value): + def _update(self, old_key, old_value, new_key, new_value, alt_sdp): self._sync(old_key, new_key) + if old_key not in self._cache and alt_sdp and not self._is_cleared: + # no value found in local cache or remote repository + # Use the values from alt_sdp + previous = self._alt_sdp_get(alt_sdp, old_key) + if previous in (NotFound, Removed): + raise KeyError(old_key) + + self._cache[old_key] = sheerka_deepcopy(previous) + self._current_size += len(previous) + if old_key != new_key: self._cache[old_key].remove(old_value) if len(self._cache[old_key]) == 0: - del (self._cache[old_key]) - self._add_to_remove(old_key) + if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, old_key): + self._cache[old_key] = Removed + self._add_to_add(old_key) + self._current_size += 1 + else: + del (self._cache[old_key]) + self._add_to_remove(old_key) else: self._add_to_add(old_key) - self._put(new_key, new_value) + self._put(new_key, new_value, alt_sdp) self._add_to_add(new_key) else: for i in range(len(self._cache[new_key])): diff --git a/src/cache/ListIfNeededCache.py b/src/cache/ListIfNeededCache.py index fa66b0b..8e79ab5 100644 --- a/src/cache/ListIfNeededCache.py +++ b/src/cache/ListIfNeededCache.py @@ -1,4 +1,6 @@ from cache.Cache import BaseCache +from core.global_symbols import Removed, NotFound +from core.utils import sheerka_deepcopy class ListIfNeededCache(BaseCache): @@ -8,7 +10,7 @@ class ListIfNeededCache(BaseCache): When you put the same key twice, you now have a list of two elements """ - def _put(self, key, value): + def _put(self, key, value, alt_sdp): if key in self._cache: if isinstance(self._cache[key], list): self._cache[key].append(value) @@ -17,6 +19,11 @@ class ListIfNeededCache(BaseCache): else: self._sync(key) + if key not in self._cache and alt_sdp and not self._is_cleared: + previous = self._alt_sdp_get(alt_sdp, key) + if previous not in (NotFound, Removed): + self._cache[key] = sheerka_deepcopy(previous) + if key in self._cache: if isinstance(self._cache[key], list): self._cache[key].append(value) @@ -27,23 +34,36 @@ class ListIfNeededCache(BaseCache): self._add_to_add(key) return True - def _update(self, old_key, old_value, new_key, new_value): + def _update(self, old_key, old_value, new_key, new_value, alt_sdp): self._sync(old_key, new_key) + if old_key not in self._cache and alt_sdp and not self._is_cleared: + # no value found in local cache or remote repository + # Use the values from alt_sdp + previous = self._alt_sdp_get(alt_sdp, old_key) + if previous in (NotFound, Removed): + raise KeyError(old_key) + + self._cache[old_key] = sheerka_deepcopy(previous) + self._current_size += len(previous) if isinstance(previous, list) else 1 + if old_key != new_key: if isinstance(self._cache[old_key], list): self._cache[old_key].remove(old_value) - if len(self._cache[old_key]) == 0: + if len(self._cache[old_key]) == 1: + self._cache[old_key] = self._cache[old_key][0] + self._add_to_add(old_key) + else: + if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, old_key): + self._cache[old_key] = Removed + self._add_to_add(old_key) + self._current_size += 1 + else: del (self._cache[old_key]) self._add_to_remove(old_key) - else: - self._add_to_add(old_key) - else: - del (self._cache[old_key]) - self._add_to_remove(old_key) - self._put(new_key, new_value) + self._put(new_key, new_value, alt_sdp) self._add_to_add(new_key) else: if isinstance(self._cache[new_key], list): @@ -55,22 +75,60 @@ class ListIfNeededCache(BaseCache): self._cache[new_key] = new_value self._add_to_add(new_key) - def _delete(self, key, value): + def _delete(self, key, value, alt_sdp): if value is None: - self._current_size -= len(self._cache[key]) - del self._cache[key] - self._add_to_remove(key) - else: - previous = self._cache[key] - if isinstance(previous, list): - previous.remove(value) - if len(previous) == 1: - self._cache[key] = previous[0] - self._current_size -= 1 - self.to_add.add(key) - else: - if previous == value: - del self._cache[key] - self._current_size -= 1 - self.to_remove.add(key) + # Remove the whole key + if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key): + if key in self._cache: + previous = self._cache[key] + if isinstance(previous, list): + self._current_size -= len(previous) + 1 + else: + self._current_size += 1 + self._cache[key] = Removed + self._add_to_add(key) + else: + previous = self._cache[key] + self._current_size -= len(previous) if isinstance(previous, list) else 1 + del self._cache[key] + self._add_to_remove(key) + + else: + # Remove a single value + try: + previous = self._cache[key] + if isinstance(previous, list): + previous.remove(value) + self._cache[key] = previous[0] if len(previous) == 1 else previous + self._current_size -= 1 + self.to_add.add(key) + else: + if previous == value: + # I am about to delete the entry + if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key): + self._cache[key] = Removed + self.to_add.add(key) + # self._current_size -= 1 # Do not decrease size, as it's replaced by 'Removed' + else: + del self._cache[key] + self._current_size -= 1 + self.to_remove.add(key) + except KeyError as ex: + previous = self._alt_sdp_get(alt_sdp, key) if not self._is_cleared and alt_sdp else NotFound + if previous in (NotFound, Removed): + raise ex + + if isinstance(previous, list): + previous = sheerka_deepcopy(previous) + previous.remove(value) # raise an exception if value in not in the list + self._cache[key] = previous[0] if len(previous) == 1 else previous + self._current_size -= 1 + self.to_add.add(key) + else: + if previous == value: + self._cache[key] = Removed + self.to_add.add(key) + self._current_size -= 1 + + return True diff --git a/src/cache/SetCache.py b/src/cache/SetCache.py index 3ea66c0..143ee0f 100644 --- a/src/cache/SetCache.py +++ b/src/cache/SetCache.py @@ -1,4 +1,6 @@ from cache.Cache import BaseCache +from core.global_symbols import NotFound, Removed +from core.utils import sheerka_deepcopy class SetCache(BaseCache): @@ -15,7 +17,7 @@ class SetCache(BaseCache): >> assert {'value1', 'value2'} == self.get('key') """ - def _put(self, key, value): + def _put(self, key, value, alt_sdp): if key in self._cache: if value in self._cache[key]: return False @@ -23,6 +25,11 @@ class SetCache(BaseCache): else: self._sync(key) + if key not in self._cache and alt_sdp and not self._is_cleared: + previous = self._alt_sdp_get(alt_sdp, key) + if previous not in (NotFound, Removed): + self._cache[key] = sheerka_deepcopy(previous) + if key in self._cache: self._cache[key].add(value) else: @@ -31,35 +38,79 @@ class SetCache(BaseCache): self._add_to_add(key) return True - def _update(self, old_key, old_value, new_key, new_value): + def _update(self, old_key, old_value, new_key, new_value, alt_sdp): self._sync(old_key, new_key) + if old_key not in self._cache and alt_sdp and not self._is_cleared: + # no value found in local cache or remote repository + # Use the values from alt_sdp + previous = self._alt_sdp_get(alt_sdp, old_key) + if previous in (NotFound, Removed): + raise KeyError(old_key) + + self._cache[old_key] = sheerka_deepcopy(previous) + self._current_size += len(previous) + if old_key != new_key: if isinstance(self._cache[old_key], set): self._cache[old_key].remove(old_value) if len(self._cache[old_key]) == 0: - del (self._cache[old_key]) - self._add_to_remove(old_key) + if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, old_key): + self._cache[old_key] = Removed + self._add_to_add(old_key) + self._current_size += 1 + else: + del (self._cache[old_key]) + self._add_to_remove(old_key) else: self._add_to_add(old_key) - self._put(new_key, new_value) + self._put(new_key, new_value, alt_sdp) self._add_to_add(new_key) else: self._cache[new_key].remove(old_value) - self._put(new_key, new_value) + self._put(new_key, new_value, alt_sdp) self._add_to_add(new_key) - def _delete(self, key, value): + def _delete(self, key, value, alt_sdp): if value is None: - self._current_size -= len(self._cache[key]) - del self._cache[key] - self._add_to_remove(key) - else: - self._cache[key].remove(value) - self._current_size -= 1 - if len(self._cache[key]) == 0: + if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key): + self._current_size += 1 - len(self._cache[key]) if key in self._cache else 1 + self._cache[key] = Removed + self._add_to_add(key) + else: + self._current_size -= len(self._cache[key]) del self._cache[key] self._add_to_remove(key) - else: + + else: + try: + self._cache[key].remove(value) + if len(self._cache[key]) == 0: + if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key): + self._cache[key] = Removed + self._add_to_add(key) + # self._current_size -= 1 # Do not decrease size, as it's replaced by 'Removed' + else: + del self._cache[key] + self._add_to_remove(key) + self._current_size -= 1 + else: + self._add_to_add(key) + self._current_size -= 1 + except KeyError as ex: + previous = self._alt_sdp_get(alt_sdp, key) if not self._is_cleared and alt_sdp else NotFound + if previous in (NotFound, Removed): + raise ex + + previous = sheerka_deepcopy(previous) + previous.remove(value) # will raise a KeyError if value is not in the set + if len(previous) == 0: + self._cache[key] = Removed + self._current_size += 1 + else: + self._cache[key] = previous + self._current_size += len(previous) self._add_to_add(key) + + return True diff --git a/src/core/ast_helpers.py b/src/core/ast_helpers.py index 36623fe..c93e84f 100644 --- a/src/core/ast_helpers.py +++ b/src/core/ast_helpers.py @@ -2,6 +2,7 @@ import ast from dataclasses import dataclass from cache.FastCache import FastCache +from core.global_symbols import NotFound @dataclass @@ -56,12 +57,12 @@ class UnreferencedNamesVisitor(ast.NodeVisitor): def get_names(self, node): names = UnreferencedNamesVisitor.cache.get(node) - if names is not None: - return names + if names is NotFound: + self.visit(node) + UnreferencedNamesVisitor.cache.put(node, self.names) + return self.names - self.visit(node) - UnreferencedNamesVisitor.cache.put(node, self.names) - return self.names + return names def visit_Name(self, node): self.names.add(node.id) diff --git a/src/core/builtin_concepts.py b/src/core/builtin_concepts.py index 4289f30..98a0422 100644 --- a/src/core/builtin_concepts.py +++ b/src/core/builtin_concepts.py @@ -351,7 +351,7 @@ class PythonSecurityError(Concept, ErrorObj): self._metadata.is_evaluated = True -class NotFound(Concept, ErrorObj): +class NotFoundConcept(Concept, ErrorObj): ALL_ATTRIBUTES = [] def __init__(self, body=None): diff --git a/src/core/builtin_concepts_ids.py b/src/core/builtin_concepts_ids.py index 74f11d5..1302e5e 100644 --- a/src/core/builtin_concepts_ids.py +++ b/src/core/builtin_concepts_ids.py @@ -95,6 +95,8 @@ class BuiltinConcepts: INVALID_GREATEST_OPERATION = "__INVALID_GREATEST_OPERATION" NEW_RULE = "__NEW_RULE" UNKNOWN_RULE = "__UNKNOWN_RULE" + ONTOLOGY_ALREADY_DEFINED = "__ONTOLOGY_ALREADY_DEFINED" + ONTOLOGY_REMOVED = "__ONTOLOGY_REMOVED" NODE = "__NODE" GENERIC_NODE = "__GENERIC_NODE" @@ -167,6 +169,7 @@ BuiltinErrors = [ BuiltinConcepts.NOT_FOUND, BuiltinConcepts.INVALID_LESSER_OPERATION, BuiltinConcepts.INVALID_GREATEST_OPERATION, + BuiltinConcepts.ONTOLOGY_ALREADY_DEFINED ] BuiltinContainers = [ diff --git a/src/core/builtin_helpers.py b/src/core/builtin_helpers.py index 90f4a02..2b00d65 100644 --- a/src/core/builtin_helpers.py +++ b/src/core/builtin_helpers.py @@ -1,10 +1,15 @@ +import ast import logging +from cache.Cache import Cache +from core.ast_helpers import ast_to_props from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, NotInit, ConceptParts, DEFINITION_TYPE_BNF, concept_part_value +from core.concept import Concept, ConceptParts, DEFINITION_TYPE_BNF, concept_part_value +from core.global_symbols import NotInit, NotFound from core.rule import Rule from core.sheerka.services.SheerkaExecute import SheerkaExecute from core.tokenizer import Keywords +from core.utils import as_bag from parsers.BaseNodeParser import SourceCodeNode, ConceptNode, UnrecognizedTokensNode, SourceCodeWithConceptNode, \ RuleNode from parsers.BaseParser import BaseParser, ParsingError @@ -510,11 +515,11 @@ def get_lexer_nodes_from_unrecognized(context, unrecognized_tokens_node, parsers def update_compiled(context, concept, errors, parsers=None): """ - recursively iterate thru concept.get_compiled() to replace LexerNode into concepts or list of ReturnValueConcept + recursively iterate over concept.get_compiled() to replace LexerNode into concepts or list of ReturnValueConcept When parsing using a LexerNodeParser (SyaNodeParser, BnfNodeParser...) the result will be a LexerNode. In the specific case of a ConceptNode, the compiled variables will also be LexerNode (UnrecognizedTokensNode...) - This function iterate thru the compile to transform these nodes into concept of compiled AST + This function iterate over the compile to transform these nodes into concept of compiled AST :param context: :param concept: :param errors: a list the must be initialized by the caller @@ -648,3 +653,59 @@ def ensure_concept_or_rule(*items): else: if not isinstance(items, (Concept, Rule)): raise TypeError(f"'{items}' must be a concept or rule") + + +expressions_cache = Cache() + + +def evaluate_expression(expr, bag): + """ + Try to evaluate expr in context of bag + :param expr: + :param bag: + :return: + """ + + if expr is None or expr.strip() == "": + return None + + if expr in bag: + return bag[expr] + + props_definitions = expressions_cache.get(expr) + if props_definitions is NotFound: + _ast = ast.parse(expr, mode="eval") + props_definitions = [] + ast_to_props(props_definitions, _ast.body, None) + props_definitions.reverse() + expressions_cache.put(expr, props_definitions) + + return evaluate_object(bag, props_definitions) + + +def evaluate_object(bag, properties): + """ + Evaluate the properties of an object + Works with evaluate_expression + :param bag: + :param properties: List of ast_helpers.PropDef + :return: + """ + for prop in properties: + try: + obj = bag[prop.prop] + except KeyError: + try: + obj = bag["self"][prop.prop] + except Exception: + raise NameError(prop.prop) + + if obj is None: + return None + + if prop.index is not None: + obj = obj[prop.index] + + bag = as_bag(obj) + + return obj diff --git a/src/core/concept.py b/src/core/concept.py index 635761b..b58f5e6 100644 --- a/src/core/concept.py +++ b/src/core/concept.py @@ -2,10 +2,12 @@ import hashlib from collections import namedtuple from copy import deepcopy from dataclasses import dataclass +from threading import RLock from typing import Union import core.utils from core.builtin_concepts_ids import BuiltinDynamicAttrs +from core.global_symbols import NotInit from core.tokenizer import Tokenizer, TokenKind PROPERTIES_FOR_DIGEST = ("name", "key", @@ -21,19 +23,6 @@ DEFINITION_TYPE_BNF = "bnf" DEFINITION_TYPE_DEF = "def" -class NotInitialized: - value = "**NotInit**" - - def __repr__(self): - return self.value - - def __eq__(self, other): - return isinstance(other, NotInitialized) - - -NotInit = NotInitialized() - - class ConceptParts: """ Lists metadata that can contains some code @@ -75,6 +64,7 @@ class ConceptMetadata: ALL_ATTRIBUTES = {} +all_attributes_lock = RLock() def get_concept_attrs(concept): @@ -86,15 +76,28 @@ def get_concept_attrs(concept): except KeyError: pass - all_attributes = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"] - if concept.id and concept.key not in BuiltinDynamicAttrs: - ALL_ATTRIBUTES[concept.id] = all_attributes - return all_attributes + with all_attributes_lock: + all_attributes = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"] + if concept.id and concept.key not in BuiltinDynamicAttrs: + ALL_ATTRIBUTES[concept.id] = all_attributes + return all_attributes def freeze_concept_attrs(concept): - if concept.key not in BuiltinDynamicAttrs: - ALL_ATTRIBUTES[concept.id] = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"] + with all_attributes_lock: + if concept.key not in BuiltinDynamicAttrs: + ALL_ATTRIBUTES[concept.id] = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"] + + +def copy_concepts_attrs(): + with all_attributes_lock: + return ALL_ATTRIBUTES.copy() + + +def load_concepts_attrs(attrs): + global ALL_ATTRIBUTES + with all_attributes_lock: + ALL_ATTRIBUTES = attrs class Concept: diff --git a/src/core/global_symbols.py b/src/core/global_symbols.py index 96772b0..df303ab 100644 --- a/src/core/global_symbols.py +++ b/src/core/global_symbols.py @@ -8,3 +8,35 @@ EVENT_CONCEPT_CREATED = "evt_cc" # comparison context RULE_COMPARISON_CONTEXT = "Rule" CONCEPT_COMPARISON_CONTEXT = "Sya" + + +class CustomType: + + def __init__(self, value): + self.value = value + + def __repr__(self): + return self.value + + def __eq__(self, other): + return isinstance(other, CustomType) and self.value == other.value + + +class NotInitType(CustomType): + def __init__(self): + super(NotInitType, self).__init__("**NotInit**") + + +class NotFoundType(CustomType): + def __init__(self): + super(NotFoundType, self).__init__("**NotFound**") + + +class RemovedType(CustomType): + def __init__(self): + super(RemovedType, self).__init__("**Removed**") + + +NotInit = NotInitType() +NotFound = NotFoundType() +Removed = RemovedType() diff --git a/src/core/rule.py b/src/core/rule.py index 4960bb8..1c8a38b 100644 --- a/src/core/rule.py +++ b/src/core/rule.py @@ -5,7 +5,7 @@ import core.utils ACTION_TYPE_PRINT = "print" ACTION_TYPE_EXEC = "exec" -ACTION_TYPE_DEFERRED = "deferred" +ACTION_TYPE_DEFERRED = "deferred" # KSI 2021-04-01 What is it for ? I definitely need some proper documentation @dataclass @@ -27,8 +27,9 @@ class Rule: predicate=None, action=None, priority=None, + rule_id=None, is_enabled=None): - self.metadata = RuleMetadata(action_type, name, predicate, action, is_enabled=is_enabled) + self.metadata = RuleMetadata(action_type, name, predicate, action, id=rule_id, is_enabled=is_enabled) self.compiled_predicate = None self.compiled_action = None from core.sheerka.services.SheerkaComparisonManager import SheerkaComparisonManager @@ -36,7 +37,10 @@ class Rule: self.error_sink = None def __repr__(self): - return f"Rule(#{self.metadata.id}, when '{self.metadata.predicate}' {self.metadata.action_type} '{self.metadata.action}', priority={self.priority})" + rule_id = f"#{self.metadata.id}" + if self.name: + rule_id += f" ({self.metadata.name})" + return f"Rule({rule_id}, when '{self.metadata.predicate}' {self.metadata.action_type} '{self.metadata.action}', priority={self.priority})" def __eq__(self, other): if id(other) == id(self): @@ -57,6 +61,22 @@ class Rule: self.metadata.action_type, self.metadata.action)) + def __deepcopy__(self, memodict={}): + copy = Rule(self.metadata.action_type, + self.name, + self.metadata.predicate, + self.metadata.action, + self.priority, + self.id, + self.metadata.is_enabled) + copy.compiled_predicate = self.compiled_predicate + copy.compiled_action = self.compiled_action + + return copy + + def __copy__(self): + return self.__deepcopy__() + def set_id(self, rule_id): self.metadata.id = rule_id return self diff --git a/src/core/sheerka/Sheerka.py b/src/core/sheerka/Sheerka.py index 9196d9f..ffa9ff8 100644 --- a/src/core/sheerka/Sheerka.py +++ b/src/core/sheerka/Sheerka.py @@ -5,21 +5,20 @@ from dataclasses import dataclass import core.builtin_helpers import core.utils from cache.Cache import Cache -from cache.CacheManager import CacheManager from cache.DictionaryCache import DictionaryCache from cache.IncCache import IncCache from core.builtin_concepts import ErrorConcept, ReturnValueConcept, UnknownConcept from core.builtin_concepts_ids import BuiltinErrors, BuiltinConcepts -from core.concept import Concept, ConceptParts, NotInit, get_concept_attrs +from core.concept import Concept, ConceptParts, get_concept_attrs from core.error import ErrorObj -from core.global_symbols import EVENT_USER_INPUT_EVALUATED +from core.global_symbols import EVENT_USER_INPUT_EVALUATED, NotInit, NotFound from core.profiling import profile from core.sheerka.ExecutionContext import ExecutionContext +from core.sheerka.SheerkaOntologyManager import SheerkaOntologyManager, OntologyAlreadyExists from core.sheerka_logger import console_handler -from core.simple_debug import my_debug from core.tokenizer import Token, TokenKind from printer.SheerkaPrinter import SheerkaPrinter -from sdp.sheerkaDataProvider import SheerkaDataProvider, Event +from sdp.sheerkaDataProvider import Event BASE_NODE_PARSER_CLASS = "parsers.BaseNodeParser.BaseNodeParser" EXIT_COMMANDS = ("quit", "exit", "bye") @@ -64,18 +63,19 @@ class Sheerka(Concept): ALL_ATTRIBUTES = [] def __init__(self, cache_only=False, debug=False, loggers=None): + super().__init__(BuiltinConcepts.SHEERKA, True, True, BuiltinConcepts.SHEERKA) + self.init_logging(debug, loggers) self.loggers = loggers + self.cache_only = cache_only - super().__init__(BuiltinConcepts.SHEERKA, True, True, BuiltinConcepts.SHEERKA) # self.log.debug("Starting Sheerka.") self.bnp = None # reference to the BaseNodeParser class (to compute first keyword token) self.return_value_concept_id = None self.error_concept_id = None - self.sdp: SheerkaDataProvider = None - self.cache_manager = CacheManager(cache_only) + self.om: SheerkaOntologyManager = None self.services = {} # sheerka plugins @@ -105,29 +105,20 @@ class Sheerka(Concept): self.locals = {} self.concepts_ids = None - @property - def resolved_concepts_by_first_keyword(self): - """ - We return the cache as we will be interested by statistics - :return: - """ - return self.cache_manager.caches[self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY].cache - - @property - def resolved_sya_def(self): - """ - - :return: - """ - return self.cache_manager.caches[self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY].cache - @property def concepts_grammars(self): - return self.cache_manager.caches[self.CONCEPTS_GRAMMARS_ENTRY].cache + """ + Quick access to BNF grammars + """ + return self.om.current_cache_manager().caches[self.CONCEPTS_GRAMMARS_ENTRY].cache @property def chicken_and_eggs(self): - return self.cache_manager.caches[self.CHICKEN_AND_EGG_CONCEPTS_ENTRY].cache + return self.om.current_cache_manager().caches[self.CHICKEN_AND_EGG_CONCEPTS_ENTRY].cache + + @property + def root_folder(self): + return self.om.root_folder def bind_service_method(self, bound_method, has_side_effect, as_name=None, visible=True): """ @@ -147,7 +138,7 @@ class Sheerka(Concept): self.methods_with_context.add(as_name) self.sheerka_methods[as_name] = SheerkaMethod(bound_method, has_side_effect) - setattr(self, as_name, bound_method) + setattr(self, bound_method.__name__, bound_method) def initialize(self, root_folder: str = None, save_execution_context=None, enable_process_return_values=None): """ @@ -171,7 +162,7 @@ class Sheerka(Concept): from sheerkapickle.sheerka_handlers import initialize_pickle_handlers initialize_pickle_handlers() - self.sdp = SheerkaDataProvider(root_folder, self) + self.om = SheerkaOntologyManager(self, root_folder, self.cache_only) self.builtin_cache = self.get_builtins_classes_as_dict() self.initialize_caching() @@ -181,33 +172,38 @@ class Sheerka(Concept): self.initialize_builtin_evaluators() event = Event("Initializing Sheerka.", user_id=self.name) - self.sdp.save_event(event) + self.om.save_event(event) with ExecutionContext(self.key, event, self, BuiltinConcepts.INIT_SHEERKA, None, desc="Initializing Sheerka.") as exec_context: - if self.sdp.first_time: + if self.om.current_sdp().first_time: self.first_time_initialisation(exec_context) self.initialize_builtin_concepts() self.initialize_concept_node_parsing(exec_context) - self.initialize_services_deferred(exec_context, self.sdp.first_time) + self.initialize_services_deferred(exec_context, self.om.current_sdp().first_time) res = ReturnValueConcept(self, True, self) exec_context.add_values(return_values=res) - if self.cache_manager.is_dirty: - self.cache_manager.commit(exec_context) + if self.om.is_dirty(): + self.om.commit(exec_context) if self.save_execution_context: - self.sdp.save_result(exec_context, is_admin=True) + self.om.save_result(exec_context, is_admin=True) + + # append the other ontologies if needed + self.om.freeze() + self.initialize_ontologies(exec_context) + # self.init_log.debug(f"Sheerka successfully initialized") except IOError as e: - res = ReturnValueConcept(self, False, self.get(BuiltinConcepts.ERROR), e) + res = ReturnValueConcept(self.name, False, self.new(BuiltinConcepts.ERROR, body=e)) finally: self.during_initialisation = False @@ -216,28 +212,28 @@ class Sheerka(Concept): def initialize_caching(self): - cache = IncCache(default=lambda k: self.sdp.get(self.OBJECTS_IDS_ENTRY, k)) - self.cache_manager.register_cache(self.OBJECTS_IDS_ENTRY, cache) + cache = IncCache().auto_configure(self.OBJECTS_IDS_ENTRY) + self.om.register_cache(self.OBJECTS_IDS_ENTRY, cache) - cache = DictionaryCache(default=lambda k: self.sdp.get(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, k)) - self.cache_manager.register_cache(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache) - self.cache_manager.get(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, None) # to init from sdp + cache = DictionaryCache().auto_configure(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) + self.om.register_cache(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache) + self.om.get(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, None) # to init from sdp - cache = DictionaryCache(default=lambda k: self.sdp.get(self.CONCEPTS_SYA_DEFINITION_ENTRY, k)) - self.cache_manager.register_cache(self.CONCEPTS_SYA_DEFINITION_ENTRY, cache) - self.cache_manager.get(self.CONCEPTS_SYA_DEFINITION_ENTRY, None) # to init from sdp + cache = DictionaryCache().auto_configure(self.CONCEPTS_SYA_DEFINITION_ENTRY) + self.om.register_cache(self.CONCEPTS_SYA_DEFINITION_ENTRY, cache) + self.om.get(self.CONCEPTS_SYA_DEFINITION_ENTRY, None) # to init from sdp - cache = DictionaryCache() - self.cache_manager.register_cache(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache, persist=False) + cache = DictionaryCache().auto_configure(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) + self.om.register_cache(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache, persist=False) - cache = DictionaryCache() - self.cache_manager.register_cache(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY, cache, persist=False) + cache = DictionaryCache().auto_configure(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY) + self.om.register_cache(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY, cache, persist=False) - cache = Cache() - self.cache_manager.register_cache(self.CONCEPTS_GRAMMARS_ENTRY, cache, persist=False) + cache = Cache().auto_configure(self.CONCEPTS_GRAMMARS_ENTRY) + self.om.register_cache(self.CONCEPTS_GRAMMARS_ENTRY, cache, persist=False) - cache = Cache() - self.cache_manager.register_cache(self.CHICKEN_AND_EGG_CONCEPTS_ENTRY, cache, persist=False) + cache = Cache().auto_configure(self.CHICKEN_AND_EGG_CONCEPTS_ENTRY) + self.om.register_cache(self.CHICKEN_AND_EGG_CONCEPTS_ENTRY, cache, persist=False) def initialize_services(self): """ @@ -261,13 +257,12 @@ class Sheerka(Concept): :return: """ # self.init_log.debug("Initializing services (deferred)") - for service in self.services.values(): if hasattr(service, "initialize_deferred"): service.initialize_deferred(context, is_first_time) def first_time_initialisation(self, context): - self.record_var(context, self.name, "save_execution_context", True) + self.record_var(context, self.name, "save_execution_context", self.save_execution_context) def initialize_builtin_concepts(self): """ @@ -339,27 +334,18 @@ class Sheerka(Concept): def initialize_concept_node_parsing(self, context): # self.init_log.debug("Initializing concepts by first keyword.") - concepts_by_first_keyword = self.cache_manager.copy(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) + concepts_by_first_keyword = self.om.current_cache_manager().copy(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) res = self.bnp.resolve_concepts_by_first_keyword(context, concepts_by_first_keyword) - self.cache_manager.put(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, res.body) + self.om.put(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, res.body) - def reset(self, cache_only=False): - if self.cache_manager.cache_only != cache_only: - self.cache_manager.reset(cache_only) - self.initialize_caching() - for service in self.services.values(): - if hasattr(service, "initialize"): - service.initialize() - else: - self.cache_manager.clear() + def initialize_ontologies(self, context): + ontologies = self.om.current_sdp().load_ontologies() + if not ontologies: + return - for service in self.services.values(): - if hasattr(service, "reset"): - service.reset() - - self.printer_handler.reset() - self.sdp.reset() - self.locals = {} + for ontology_name in list(reversed(ontologies))[1:]: + self.om.push_ontology(ontology_name, False) + self.initialize_services_deferred(context, False) # @profile(filename="profile_80") def evaluate_user_input(self, text: str, user_name="kodjo"): @@ -371,9 +357,9 @@ class Sheerka(Concept): :return: """ # self.log.debug(f"Processing user input '{text}', {user_name=}.") - my_debug(f"****************** Processing user input '{text}', {user_name=}.***********************************") + # my_debug(f"****************** Processing user input '{text}', {user_name=}.***********************************") event = Event(text, user_name) - self.sdp.save_event(event) + self.om.save_event(event) with ExecutionContext(self.key, event, @@ -391,8 +377,8 @@ class Sheerka(Concept): ret = self.execute(execution_context, [user_input, reduce_requested], EXECUTE_STEPS) execution_context.add_values(return_values=ret) - if self.cache_manager.is_dirty: - self.cache_manager.commit(execution_context) + if self.om.is_dirty: + self.om.commit(execution_context) self.publish(execution_context, EVENT_USER_INPUT_EVALUATED) @@ -489,13 +475,13 @@ class Sheerka(Concept): return None if key[1]: - concept = self.cache_manager.get(self.CONCEPTS_BY_ID_ENTRY, key[1]) + concept = self.om.get(self.CONCEPTS_BY_ID_ENTRY, key[1]) else: - concept = self.cache_manager.get(self.CONCEPTS_BY_NAME_ENTRY, key[0]) + concept = self.om.get(self.CONCEPTS_BY_NAME_ENTRY, key[0]) else: - concept = self.cache_manager.get(self.CONCEPTS_BY_NAME_ENTRY, key) + concept = self.om.get(self.CONCEPTS_BY_NAME_ENTRY, key) - if concept is None: + if concept is NotFound: return None return new_instances(concept) if return_new else concept @@ -556,6 +542,76 @@ class Sheerka(Concept): concept._metadata.is_evaluated = True # because we have manually set the variables return concept + def push_ontology(self, context, name, cache_only=False): + + try: + if self.om.already_on_top(name): + return self.ret(self.name, True, self.new(BuiltinConcepts.SUCCESS)) + except OntologyAlreadyExists: + return self.ret(self.name, False, self.new(BuiltinConcepts.ONTOLOGY_ALREADY_DEFINED, body=name)) + + # record sheerka and services states + self.om.record_sheerka_state() + for service in self.services.values(): + if hasattr(service, "save_state"): + service.save_state(context) + if hasattr(service, "reset_state"): + service.reset_state() + + self.om.push_ontology(name, cache_only) + + # Not the first time for this ontology. Update the services + if name in self.om.current_sdp().load_ontologies(): + self.initialize_services_deferred(context, False) + + self.om.save_ontologies() + + return self.ret(self.name, True, self.new(BuiltinConcepts.SUCCESS)) + + def pop_ontology(self): + ontology = self.om.pop_ontology() + + self.om.reset_sheerka_state() + for service in self.services.values(): + if hasattr(service, "restore_state"): + service.restore_state() + if hasattr(service, "reset_state"): + service.reset_state() + + self.om.save_ontologies() + return self.ret(self.name, True, self.new(BuiltinConcepts.ONTOLOGY_REMOVED, body=ontology)) + + def get_ontology(self, context): + self.om.record_sheerka_state() + for service in self.services.values(): + if hasattr(service, "save_state"): + service.save_state(context) + + return self.om.get_ontology() + + def add_ontology(self, context, ontology): + """ + Add the previously recorded ontology on the top + """ + + # save the state of the current ontology + self.om.record_sheerka_state() + for service in self.services.values(): + if hasattr(service, "save_state"): + service.save_state(context) + # if hasattr(service, "reset_state"): # no need to do it twice + # service.reset_state() + + self.om.add_ontology(ontology) + + # update sheerka with this new ontology + self.om.reset_sheerka_state() + for service in self.services.values(): + if hasattr(service, "restore_state"): + service.restore_state() + if hasattr(service, "reset_state"): + service.reset_state() + def ret(self, who: str, status: bool, value, parents=None): """ Creates and returns a ReturnValue concept @@ -665,7 +721,7 @@ class Sheerka(Concept): if not isinstance(obj, Concept): return True - return obj.key != str(BuiltinConcepts.UNKNOWN_CONCEPT) + return obj.key not in (BuiltinConcepts.UNKNOWN_CONCEPT, BuiltinConcepts.UNKNOWN_RULE) @staticmethod def isinstance(a, b): @@ -791,40 +847,6 @@ class Sheerka(Concept): def test_error(self): raise Exception("I can raise an error") - def test_only_force_sya_def(self, context, list_of_def): - """ - Set the precedence and/or the associativity of a concept - FOR TESTS PURPOSE. TO REMOVE EVENTUALLY - :param context: - :param list_of_def list of tuple(concept_id, precedence (int), SyaAssociativity) - :return: - """ - - # validate the entries - # If one entry is an invalid concept, rollback everything - for concept_id, precedence, associativity in list_of_def: - if concept_id == BuiltinConcepts.UNKNOWN_CONCEPT: - return self.ret(self.name, - False, - self.new(BuiltinConcepts.ERROR, body=f"Concept {concept_id} is not known")) - - sya_def = self.cache_manager.copy(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY) or {} - - # update the definitions - for concept_id, precedence, associativity in list_of_def: - if precedence is None and associativity is None: - try: - del self.sya_definitions[concept_id] - except KeyError: - pass - else: - sya_def[concept_id] = (precedence, associativity) - - # put in cache - self.cache_manager.put(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY, False, sya_def) - - return self.ret(self.name, True, self.new(BuiltinConcepts.SUCCESS)) - def test_only_add_in_cache(self, concept: Concept): """ Adds a concept template in cache. @@ -840,7 +862,7 @@ class Sheerka(Concept): if concept.key is None: raise KeyError() - self.cache_manager.add_concept(concept) + self.om.add_concept(concept) return concept @@ -848,7 +870,7 @@ class Sheerka(Concept): def to_profile(): sheerka = Sheerka() sheerka.initialize(save_execution_context=False, enable_process_return_values=False) - event = Event("test", "kodjoko") + event = Event("test", "kodjo") execution_context = ExecutionContext(sheerka.name, event, sheerka, diff --git a/src/core/sheerka/SheerkaOntologyManager.py b/src/core/sheerka/SheerkaOntologyManager.py new file mode 100644 index 0000000..fef677d --- /dev/null +++ b/src/core/sheerka/SheerkaOntologyManager.py @@ -0,0 +1,470 @@ +from cache.CacheManager import CacheManager +from cache.DictionaryCache import DictionaryCache +from core.concept import copy_concepts_attrs, load_concepts_attrs +from core.global_symbols import NotFound, Removed +from core.utils import sheerka_deepcopy +from sdp.sheerkaDataProvider import SheerkaDataProvider + + +class OntologyManagerFrozen(Exception): + """ + Raised when you try to add a cache manager while the ontology manager is frozen + """ + pass + + +class OntologyManagerNotFrozen(Exception): + """ + Raised when you try to push or pop a cache manager while the ontology manager is not frozen + """ + pass + + +class OntologyManagerCannotPopLatest(Exception): + """ + Raised when you try pop the latest cache manager + """ + pass + + +class OntologyAlreadyExists(Exception): + """ + When the ontology exists AND is not the top layer + """ + + def __init__(self, name): + self.name = name + + +class AlternateSdp: + def __init__(self, ontologies): + self.names = [o.name for o in ontologies] + self.cache_managers = [o.cache_manager for o in ontologies] + + def get(self, cache_name, key): + last = len(self.cache_managers) - 1 + for i, cache_manager in enumerate(self.cache_managers): + value = cache_manager.get(cache_name, key) + if value is not NotFound: + return value + + if i != last: + # forget than the key was requested + cache_manager.remove_initialized_key(cache_name, key) + + return NotFound + + def alt_get(self, cache_name, key): + last = len(self.cache_managers) - 1 + for i, cache_manager in enumerate(self.cache_managers): + value = cache_manager.alt_get(cache_name, key) + if value is not NotFound: + return value + + if i != last: + # forget than the key was requested + cache_manager.remove_initialized_key(cache_name, key) + + return NotFound + + def exists(self, cache_name, key): + for cache_manager in self.cache_managers: + if cache_manager.exists(cache_name, key): + return True + return False + + +class Ontology: + def __init__(self, name, cache_manager: CacheManager, alt_sdp: AlternateSdp): + self.name = name + self.cache_manager = cache_manager + self.alt_sdp = alt_sdp + self.concepts_attributes = None + self.local_variables = None + + def __repr__(self): + return f"Ontology('{self.name}')" + + +class SheerkaOntologyManager: + ROOT_ONTOLOGY_NAME = "__default__" + + def __init__(self, sheerka, root_folder, cache_only): + self.sheerka = sheerka + self.root_folder = root_folder + self.cache_only = cache_only + self.frozen = False + + ref_cache_manager = CacheManager(self.cache_only, sdp=SheerkaDataProvider(root_folder, self.sheerka)) + self.ontologies = [Ontology(self.ROOT_ONTOLOGY_NAME, ref_cache_manager, None)] + + @property + def ontologies_names(self): + return [o.name for o in self.ontologies] + + def freeze(self): + self.frozen = True + return self + + def test_only_unfreeze(self): + # To remove ASAP + self.frozen = False + return self + + def push_ontology(self, name, cache_only=None): + """ + Add an ontology layer + :param name: name of the layer + :param cache_only: + """ + if not self.frozen: + raise OntologyManagerNotFrozen() + + # pseudo clone cache manager + cache_manager = CacheManager(cache_only or self.cache_only, sdp=self.get_sdp(name)) + for cache_name, cache_def in self.current_cache_manager().caches.items(): + clone = cache_def.cache.clone() + if cache_name in self.current_cache_manager().concept_caches: + cache_manager.register_concept_cache(cache_name, clone, cache_def.get_key, cache_def.use_ref) + else: + cache_manager.register_cache(cache_name, clone, cache_def.persist, cache_def.use_ref) + + # Dictionary cache special treatment + if isinstance(clone, DictionaryCache): + clone.put(False, cache_def.cache.copy()) # only a shadow copy for now + clone.reset_events() + + alt_sdp = AlternateSdp(self.ontologies) + self.ontologies.insert(0, Ontology(name, cache_manager, alt_sdp)) + return self + + def pop_ontology(self): + """ + Remove the top ontology layer + """ + if not self.frozen: + raise OntologyManagerNotFrozen() + + if len(self.ontologies) == 1: + raise OntologyManagerCannotPopLatest() + + return self.ontologies.pop(0) + + def add_ontology(self, ontology: Ontology): + """ + Put back a previously created ontology + :param ontology: how to get the items + """ + if not self.frozen: + raise OntologyManagerNotFrozen() + + ontology.alt_sdp = AlternateSdp(self.ontologies) + self.ontologies.insert(0, ontology) + for cache_def in ontology.cache_manager.caches.values(): + cache_def.cache.reset_initialized_keys() + + return self + + def get_ontology(self, name=None): + """ + Return the first ontology with the corresponding name + When no is given, return the top ontology + """ + if name is None: + return self.ontologies[0] + + for ontology in self.ontologies: + if ontology.name == name: + return ontology + + raise KeyError(name) + + def save_ontologies(self): + self.current_sdp().save_ontologies(self.ontologies_names) + + # def load_ontologies(self): + # ontologies = self.current_sdp().load_ontologies() + # if not ontologies: + # return + # + # for ontology_name in list(reversed(ontologies))[1:]: + # self.push_ontology(ontology_name) + + def already_on_top(self, name): + """ + Returns True if the ontology 'name' is already on the top + Raises a OntologyAlreadyExists exception if the ontology exists, but not at the top + """ + if self.ontologies[0].name == name: + return True + + if name in self.ontologies_names: + raise OntologyAlreadyExists(name) + + return False + + def record_sheerka_state(self): + """ + The current ontology can keep extra information + """ + # TODO persist these information ? + self.current_ontology().concepts_attributes = copy_concepts_attrs() + self.current_ontology().local_variables = sheerka_deepcopy(self.sheerka.locals) + + def reset_sheerka_state(self): + if self.current_ontology().concepts_attributes is not None: + load_concepts_attrs(self.current_ontology().concepts_attributes) + if self.current_ontology().local_variables is not None: + self.sheerka.locals = self.current_ontology().local_variables + + def current_cache_manager(self) -> CacheManager: + return self.ontologies[0].cache_manager + + def current_sdp(self) -> SheerkaDataProvider: + return self.ontologies[0].cache_manager.sdp + + def current_ontology(self) -> Ontology: + return self.ontologies[0] + + def register_concept_cache(self, name, cache, get_key, use_ref): + """ + Define which type of cache along with how to compute the key + :param name: + :param cache: + :param get_key: + :param use_ref: + :return: + """ + if self.frozen: + raise OntologyManagerFrozen + + return self.current_cache_manager().register_concept_cache(name, cache, get_key, use_ref) + + def register_cache(self, name, cache, persist=True, use_ref=False): + """ + Define which type of cache along with how to compute the key + :param name: + :param cache: + :param persist: + :param use_ref: + :return: + """ + if self.frozen: + raise OntologyManagerFrozen + + return self.current_cache_manager().register_cache(name, cache, persist, use_ref) + + def add_concept(self, concept): + """ + We need multiple indexes to retrieve a concept + So the new concept is dispatched into multiple caches + :param concept: + :return: + """ + return self.current_cache_manager().add_concept(concept) + + def update_concept(self, old, new): + """ + Update a concept. + :param old: old version of the concept + :param new: new version of the concept + :return: + """ + + return self.current_cache_manager().update_concept(old, new, self.ontologies[0].alt_sdp) + + def remove_concept(self, concept): + """ + Remove a concept from all caches + :param concept: + :return: + """ + return self.current_cache_manager().remove_concept(concept, self.ontologies[0].alt_sdp) + + def get(self, cache_name, key): + """ + Browses the ontologies, looking for the data 'key' in entry 'cache_name' + If a value is found in a low level cache, updates the top level one + :param cache_name: + :param key: + :return: + """ + value = self.current_cache_manager().get(cache_name, key, self.ontologies[0].alt_sdp) + return NotFound if value is Removed else value + + def exists(self, cache_name, key): + """ + Browses the ontologies to check if the data 'key' is defined in entry 'cache_name' + :param cache_name: + :param key: + :return: + """ + for ontology in self.ontologies: + if ontology.cache_manager.exists(cache_name, key): + return True + + return False + + def list(self, entry, cache_only=False): + """ + list all entries + """ + return list(self.get_all(entry, cache_only).values()) + + def list_by_key(self, entry, key): + """ + List all entries of a given key + If the values are lists, sets of dictionaries, they will be concatenated + Otherwise it will raise an error + """ + res = None + + def update_values(_res, values_): + if values_ is NotFound: + return _res + elif values_ is Removed: + _res.clear() + elif isinstance(values_, dict): + if _res is None: + _res = values_.copy() + elif isinstance(_res, dict): + _res.update(values_) + else: + raise ValueError(f"Expecting dict while found '{values_}'") + elif isinstance(values_, list): + if _res is None: + _res = values_.copy() + elif isinstance(_res, list): + _res.extend(values_) + else: + raise ValueError(f"Expecting list while found '{values_}'") + else: + raise NotImplementedError() + + return _res + + for ontology in reversed(self.ontologies): + + from_cache_values = ontology.cache_manager.get(entry, key) + if from_cache_values is not NotFound: + res = update_values(res, from_cache_values) + else: + from_sdp_values = ontology.cache_manager.sdp.get(entry, key) + res = update_values(res, from_sdp_values) + + return res + + def get_all(self, entry, cache_only=False): + """ + Return all key, value from all ontologies + First look in sdp, then override with the cache, for all ontologies + :param entry: cache name / sdp entry + :param cache_only: Do no fetch data from remote sdp + """ + res = {} + for ontology in reversed(self.ontologies): + + if not cache_only: + # get values from sdp + values = ontology.cache_manager.sdp.get(entry) + if values is Removed: + res.clear() + + elif values is not NotFound: + for k, v in values.items(): + if v is Removed: + del res[k] + else: + res[k] = v + + # override with the values from cache + try: + cache = ontology.cache_manager.get_cache(entry) + + if cache.is_cleared(): + res.clear() + + for k in cache: + v = cache.alt_get(k) # Do not use get(), because of IncCache() + if v is Removed: + del res[k] + else: + res[k] = v + + except KeyError: + pass + + return res + + def put(self, cache_name, key, value): + """ + Add to a cache + :param cache_name: + :param key: + :param value: + :return: + """ + return self.current_cache_manager().put(cache_name, key, value, self.ontologies[0].alt_sdp) + + def delete(self, cache_name, key, value=None): + """ + Delete an entry + :param cache_name: + :param key: + :param value: + :return: + """ + return self.current_cache_manager().delete(cache_name, key, value, self.ontologies[0].alt_sdp) + + def populate(self, cache_name, populate_function, get_key_function, reset_events=False, all_ontologies=False): + """ + Populate a specific cache with a bunch of items + :param cache_name: + :param populate_function: how to get the items + :param get_key_function: how to get the key, out of an item + :param reset_events: reset the to_add and to_remove events after the populate + :param all_ontologies: populate all ontology layers + :return: + """ + self.current_cache_manager().populate(cache_name, populate_function, get_key_function, reset_events) + if all_ontologies: + for ontology in self.ontologies[1:]: + ontology.cache_manager.populate(cache_name, populate_function, get_key_function, reset_events) + + def copy(self, cache_name): + """ + get a copy the content of the top ontology layer + :param self: + :param cache_name: + :return: + """ + return self.current_cache_manager().caches[cache_name].cache.copy() + + def commit(self, context): + """ + Persist all the caches into a physical persistence storage + :param context: + :return: + """ + return self.current_cache_manager().commit(context) + + def clear(self, cache_name=None): + return self.current_cache_manager().clear(cache_name) + + def get_sdp(self, name=None): + """ + Return new instance of SheerkaDataProvider + """ + if name: + return SheerkaDataProvider(self.root_folder, self.sheerka, name) + else: + return self.current_sdp() + + def save_event(self, event): + return self.current_sdp().save_event(event) + + def save_result(self, execution_context, is_admin): + return self.current_sdp().save_result(execution_context, is_admin) + + def is_dirty(self): + return self.current_cache_manager().is_dirty diff --git a/src/core/sheerka/services/SheerkaAdmin.py b/src/core/sheerka/services/SheerkaAdmin.py index adde154..bded521 100644 --- a/src/core/sheerka/services/SheerkaAdmin.py +++ b/src/core/sheerka/services/SheerkaAdmin.py @@ -1,5 +1,6 @@ import sys import time +from operator import attrgetter from os import path from core.builtin_concepts_ids import BuiltinConcepts, BuiltinContainers @@ -27,13 +28,16 @@ class SheerkaAdmin(BaseService): self.sheerka.bind_service_method(self.extended_isinstance, False) self.sheerka.bind_service_method(self.is_container, False) self.sheerka.bind_service_method(self.format_rules, False) + self.sheerka.bind_service_method(self.admin_push_ontology, True, as_name="push_ontology") + self.sheerka.bind_service_method(self.admin_pop_ontology, True, as_name="pop_ontology") + self.sheerka.bind_service_method(self.ontologies, False) def caches_names(self): """ Returns the name of all the caches :return: """ - return list(self.sheerka.cache_manager.caches.keys()) + return list(self.sheerka.om.current_cache_manager().caches.keys()) def cache(self, name, *keys): """ @@ -42,13 +46,13 @@ class SheerkaAdmin(BaseService): :param keys: look for a specific key. May ask to sdp if the key is not in cache :return: """ - if name not in self.sheerka.cache_manager.caches: + if name not in self.sheerka.om.current_cache_manager().caches: return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"cache": name}) if not keys: - return self.sheerka.cache_manager.caches[name].cache.copy() + return self.sheerka.om.current_cache_manager().caches[name].cache.copy() - return {key: self.sheerka.cache_manager.get(name, key) for key in keys} + return {key: self.sheerka.om.get(name, key) for key in keys} def restore(self, concept_file=CONCEPTS_FILE_TO_USE): """ @@ -119,7 +123,8 @@ class SheerkaAdmin(BaseService): raise e def concepts(self): - return self.sheerka.new(BuiltinConcepts.TO_LIST, body=self.sheerka.sdp.list(self.sheerka.CONCEPTS_BY_ID_ENTRY)) + concepts = sorted(self.sheerka.om.list(self.sheerka.CONCEPTS_BY_ID_ENTRY), key=lambda item: int(item.id)) + return self.sheerka.new(BuiltinConcepts.TO_LIST, body=concepts) def desc(self, *concepts): ensure_concept(*concepts) @@ -146,7 +151,6 @@ class SheerkaAdmin(BaseService): def format_rules(self): return self.sheerka.new(BuiltinConcepts.TO_LIST, items=self.sheerka.get_format_rules()) - def extended_isinstance(self, a, b): """ switch between sheerka.isinstance and builtin.isinstance @@ -171,3 +175,13 @@ class SheerkaAdmin(BaseService): return False return obj.key in BuiltinContainers + + def admin_push_ontology(self, context, name): + return self.sheerka.push_ontology(context, name, False) + + def admin_pop_ontology(self): + return self.sheerka.pop_ontology() + + def ontologies(self): + ontologies = self.sheerka.om.ontologies_names + return self.sheerka.new(BuiltinConcepts.TO_LIST, body=ontologies) diff --git a/src/core/sheerka/services/SheerkaComparisonManager.py b/src/core/sheerka/services/SheerkaComparisonManager.py index 71a1cf6..9d43e2f 100644 --- a/src/core/sheerka/services/SheerkaComparisonManager.py +++ b/src/core/sheerka/services/SheerkaComparisonManager.py @@ -3,8 +3,9 @@ from dataclasses import dataclass from cache.Cache import Cache from cache.ListCache import ListCache from core.builtin_concepts import BuiltinConcepts -from core.global_symbols import EVENT_CONCEPT_PRECEDENCE_MODIFIED, EVENT_RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT, \ - CONCEPT_COMPARISON_CONTEXT +from core.global_symbols import EVENT_CONCEPT_PRECEDENCE_MODIFIED, EVENT_RULE_PRECEDENCE_MODIFIED, \ + RULE_COMPARISON_CONTEXT, \ + CONCEPT_COMPARISON_CONTEXT, NotFound from core.builtin_helpers import ensure_concept_or_rule from core.concept import Concept from core.sheerka.services.SheerkaRuleManager import SheerkaRuleManager @@ -136,8 +137,8 @@ class SheerkaComparisonManager(BaseService): def _add_comparison(self, context, comparison_obj): key = self._compute_key(comparison_obj.property, comparison_obj.context) - previous = self.sheerka.cache_manager.get(self.COMPARISON_ENTRY, key) - new = previous.copy() if previous else [] + previous = self.sheerka.om.get(self.COMPARISON_ENTRY, key) + new = previous.copy() if isinstance(previous, list) else [] for co in new: if co.property == comparison_obj.property and \ @@ -176,10 +177,10 @@ class SheerkaComparisonManager(BaseService): chicken_an_egg = self.sheerka.new(BuiltinConcepts.CHICKEN_AND_EGG, body=concepts_in_cycle) return self.sheerka.ret(self.NAME, False, chicken_an_egg) - self.sheerka.cache_manager.put(self.COMPARISON_ENTRY, key, comparison_obj) - self.sheerka.cache_manager.put(self.RESOLVED_COMPARISON_ENTRY, key, self._compute_weights(new, - lesser_objs_ids, - greatest_objs_ids)) + self.sheerka.om.put(self.COMPARISON_ENTRY, key, comparison_obj) + self.sheerka.om.put(self.RESOLVED_COMPARISON_ENTRY, key, self._compute_weights(new, + lesser_objs_ids, + greatest_objs_ids)) if comparison_obj.property == BuiltinConcepts.PRECEDENCE: if comparison_obj.context == CONCEPT_COMPARISON_CONTEXT: @@ -190,11 +191,11 @@ class SheerkaComparisonManager(BaseService): return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS)) def initialize(self): - cache = ListCache(default=lambda k: self.sheerka.sdp.get(self.COMPARISON_ENTRY, k)) - self.sheerka.cache_manager.register_cache(self.COMPARISON_ENTRY, cache, True, True) + cache = ListCache().auto_configure(self.COMPARISON_ENTRY) + self.sheerka.om.register_cache(self.COMPARISON_ENTRY, cache, True, True) - cache = Cache() - self.sheerka.cache_manager.register_cache(self.RESOLVED_COMPARISON_ENTRY, cache, persist=False) + cache = Cache().auto_configure(self.RESOLVED_COMPARISON_ENTRY) + self.sheerka.om.register_cache(self.RESOLVED_COMPARISON_ENTRY, cache, persist=False) self.sheerka.bind_service_method(self.set_is_greater_than, True) self.sheerka.bind_service_method(self.set_is_less_than, True) @@ -325,19 +326,26 @@ class SheerkaComparisonManager(BaseService): return self._get_partition(weighted_concept) def get_concepts_weights(self, prop_name, comparison_context="#"): - weighted_concepts = self.sheerka.cache_manager.get( - self.RESOLVED_COMPARISON_ENTRY, - self._compute_key(prop_name, comparison_context)) + # KSI 2021-01-10 This implementation seems to be too complicated + # Chances are that there is a better way to implement this. + # Note that I don't want to use a DictionaryCache for the RESOLVED_COMPARISON_ENTRY + # as I don't need to have all the keys in memory at the same time + # Anyway... - if weighted_concepts is None: - key = self._compute_key(prop_name, comparison_context) - entries = self.sheerka.cache_manager.get(self.COMPARISON_ENTRY, key) + # If the weighted_concepts is in the TOP LAYER cache, we can use it + key_to_use = self._compute_key(prop_name, comparison_context) + if self.sheerka.om.current_cache_manager().has(self.RESOLVED_COMPARISON_ENTRY, key_to_use): + weighted_concepts = self.sheerka.om.get(self.RESOLVED_COMPARISON_ENTRY, key_to_use) + else: + # otherwise, either it's not computed yet or it does not include the info of the current layer + # In both case, it is safer to recompute the weights + entries = self.sheerka.om.list_by_key(self.COMPARISON_ENTRY, key_to_use) if entries is None: - return {} + weighted_concepts = {} # Why not put it in cache ??? else: weighted_concepts = self._compute_weights(entries) - self.sheerka.cache_manager.put(self.RESOLVED_COMPARISON_ENTRY, key, weighted_concepts) + self.sheerka.om.put(self.RESOLVED_COMPARISON_ENTRY, key_to_use, weighted_concepts) return weighted_concepts diff --git a/src/core/sheerka/services/SheerkaConceptManager.py b/src/core/sheerka/services/SheerkaConceptManager.py index 1cfbd16..adf0637 100644 --- a/src/core/sheerka/services/SheerkaConceptManager.py +++ b/src/core/sheerka/services/SheerkaConceptManager.py @@ -8,10 +8,9 @@ from cache.SetCache import SetCache from core.builtin_concepts import ErrorConcept from core.builtin_concepts_ids import BuiltinConcepts, AllBuiltinConcepts, BuiltinUnique from core.builtin_helpers import ensure_concept -from core.concept import Concept, DEFINITION_TYPE_DEF, DEFINITION_TYPE_BNF, freeze_concept_attrs, NotInit, \ - ConceptMetadata +from core.concept import Concept, DEFINITION_TYPE_DEF, DEFINITION_TYPE_BNF, freeze_concept_attrs, ConceptMetadata from core.error import ErrorObj -from core.global_symbols import EVENT_CONCEPT_CREATED +from core.global_symbols import EVENT_CONCEPT_CREATED, NotInit, NotFound from core.sheerka.services.sheerka_service import BaseService from core.tokenizer import Tokenizer, TokenKind from sdp.sheerkaDataProvider import SheerkaDataProviderDuplicateKeyError @@ -101,34 +100,28 @@ class SheerkaConceptManager(BaseService): self.sheerka.bind_service_method(self.get_by_name, False, visible=False) self.sheerka.bind_service_method(self.get_by_hash, False, visible=False) self.sheerka.bind_service_method(self.get_by_id, False, visible=False) - self.sheerka.bind_service_method(self.not_is_variable, False, visible=False) + self.sheerka.bind_service_method(self.is_not_a_variable, False, visible=False) - def params(cache_name): - return { - 'default': lambda k: self.sheerka.sdp.get(cache_name, k), - 'extend_exists': lambda k: self.sheerka.sdp.exists(cache_name, k) - } + register_concept_cache = self.sheerka.om.register_concept_cache - register_concept_cache = self.sheerka.cache_manager.register_concept_cache - - cache = Cache(**params(self.CONCEPTS_BY_ID_ENTRY)) + cache = Cache().auto_configure(self.CONCEPTS_BY_ID_ENTRY) register_concept_cache(self.CONCEPTS_BY_ID_ENTRY, cache, lambda c: c.id, True) - cache = ListIfNeededCache(**params(self.CONCEPTS_BY_KEY_ENTRY)) + cache = ListIfNeededCache().auto_configure(self.CONCEPTS_BY_KEY_ENTRY) register_concept_cache(self.CONCEPTS_BY_KEY_ENTRY, cache, lambda c: c.key, True) - cache = ListIfNeededCache(**params(self.CONCEPTS_BY_NAME_ENTRY)) + cache = ListIfNeededCache().auto_configure(self.CONCEPTS_BY_NAME_ENTRY) register_concept_cache(self.CONCEPTS_BY_NAME_ENTRY, cache, lambda c: c.name, True) - cache = ListIfNeededCache(**params(self.CONCEPTS_BY_HASH_ENTRY)) + cache = ListIfNeededCache().auto_configure(self.CONCEPTS_BY_HASH_ENTRY) register_concept_cache(self.CONCEPTS_BY_HASH_ENTRY, cache, lambda c: c.get_definition_hash(), True) - cache = SetCache(default=lambda k: self.sheerka.sdp.get(self.CONCEPTS_REFERENCES_ENTRY, k)) - self.sheerka.cache_manager.register_cache(self.CONCEPTS_REFERENCES_ENTRY, cache) + cache = SetCache().auto_configure(self.CONCEPTS_REFERENCES_ENTRY) + self.sheerka.om.register_cache(self.CONCEPTS_REFERENCES_ENTRY, cache) def initialize_deferred(self, context, is_first_time): if is_first_time: - self.sheerka.cache_manager.put(self.sheerka.OBJECTS_IDS_ENTRY, self.USER_CONCEPTS_IDS, 1000) + self.sheerka.om.put(self.sheerka.OBJECTS_IDS_ENTRY, self.USER_CONCEPTS_IDS, 1000) def initialize_builtin_concepts(self): """ @@ -146,11 +139,11 @@ class SheerkaConceptManager(BaseService): concept.get_metadata().is_unique = True concept.get_metadata().is_evaluated = True - from_db = self.sheerka.cache_manager.get(self.CONCEPTS_BY_KEY_ENTRY, concept.get_metadata().key) - if from_db is None: + from_db = self.sheerka.om.get(self.CONCEPTS_BY_KEY_ENTRY, concept.get_metadata().key) + if from_db is NotFound: # self.init_log.debug(f"'{concept.name}' concept is not found in db. Adding.") self.set_id_if_needed(concept, True) - self.sheerka.cache_manager.add_concept(concept) + self.sheerka.om.add_concept(concept) else: # self.init_log.debug(f"Found concept '{from_db}' in db. Updating.") concept.update_from(from_db) @@ -173,9 +166,9 @@ class SheerkaConceptManager(BaseService): concept.init_key() init_bnf_ret_value = None - cache_manager = sheerka.cache_manager + ontology = sheerka.om - if cache_manager.exists(self.CONCEPTS_BY_HASH_ENTRY, concept.get_definition_hash()): + if ontology.exists(self.CONCEPTS_BY_HASH_ENTRY, concept.get_definition_hash()): error = SheerkaDataProviderDuplicateKeyError(self.CONCEPTS_BY_KEY_ENTRY + "." + concept.key, concept) return sheerka.ret( self.NAME, @@ -186,9 +179,6 @@ class SheerkaConceptManager(BaseService): # set id before saving in db sheerka.set_id_if_needed(concept, False) - # freeze attributes - freeze_concept_attrs(concept) - # check if the bnf definition is correctly computed try: self.bnp.ensure_bnf(context, concept) @@ -196,7 +186,7 @@ class SheerkaConceptManager(BaseService): return sheerka.ret(self.NAME, False, ex.args[0]) # compute new concepts_by_first_keyword - init_ret_value = self.bnp.get_concepts_by_first_token(context, [concept], True) + init_ret_value = self.bnp.compute_concepts_by_first_token(context, [concept], True) if not init_ret_value.status: return sheerka.ret(self.NAME, False, ErrorConcept(init_ret_value.value)) concepts_by_first_keyword = init_ret_value.body @@ -208,18 +198,20 @@ class SheerkaConceptManager(BaseService): resolved_concepts_by_first_keyword = init_ret_value.body # if everything is fine + freeze_concept_attrs(concept) concept.freeze_definition_hash() - cache_manager.add_concept(concept) - cache_manager.put(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, concepts_by_first_keyword) - cache_manager.put(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, resolved_concepts_by_first_keyword) + + ontology.add_concept(concept) + ontology.put(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, concepts_by_first_keyword) + ontology.put(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, resolved_concepts_by_first_keyword) if concept.get_metadata().definition_type == DEFINITION_TYPE_DEF and concept.get_metadata().definition != concept.name: # allow search by definition when definition relevant - cache_manager.put(self.sheerka.CONCEPTS_BY_NAME_ENTRY, concept.get_metadata().definition, concept) + ontology.put(self.sheerka.CONCEPTS_BY_NAME_ENTRY, concept.get_metadata().definition, concept) # update references for ref in self.compute_references(concept): - cache_manager.put(self.CONCEPTS_REFERENCES_ENTRY, ref, concept.id) + ontology.put(self.CONCEPTS_REFERENCES_ENTRY, ref, concept.id) # TODO : this line seems to be useless # The grammar is never reset @@ -246,7 +238,7 @@ class SheerkaConceptManager(BaseService): # to_add is a dictionary # to_add = { - # 'meta' : {} of metadata to add/update, + # 'meta' : {} of metadata to update, # 'props' : {} of properties to add/update, # 'variables': {} of variables to add/update, # } @@ -259,12 +251,12 @@ class SheerkaConceptManager(BaseService): # } # sheerka = self.sheerka - cache_manager = self.sheerka.cache_manager + cache_manager = self.sheerka.om if not to_add and not to_remove: return sheerka.ret(self.NAME, False, sheerka.err(NoModificationFound(concept))) - if not sheerka.cache_manager.exists(self.CONCEPTS_BY_ID_ENTRY, concept.id): + if not sheerka.om.exists(self.CONCEPTS_BY_ID_ENTRY, concept.id): return sheerka.ret(self.NAME, False, sheerka.new(BuiltinConcepts.UNKNOWN_CONCEPT, body=concept)) # modify the metadata. Almost all ConceptMetadata attributes except variables and props @@ -274,8 +266,6 @@ class SheerkaConceptManager(BaseService): if res is not None: return res - freeze_concept_attrs(new_concept) - # To update concept by first keyword # first remove the old references keywords = self.bnp.get_first_tokens(sheerka, concept) # keyword of the old concept @@ -289,7 +279,7 @@ class SheerkaConceptManager(BaseService): pass # and then update - init_ret_value = self.bnp.get_concepts_by_first_token(context, [new_concept], False, concepts_by_first_keyword) + init_ret_value = self.bnp.compute_concepts_by_first_token(context, [new_concept], False, concepts_by_first_keyword) if not init_ret_value.status: return sheerka.ret(self.NAME, False, ErrorConcept(init_ret_value.value)) concepts_by_first_keyword = init_ret_value.body @@ -316,6 +306,10 @@ class SheerkaConceptManager(BaseService): cache_manager.put(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, resolved_concepts_by_first_keyword) + # everything seems to be fine. Update the list of attributes + # Caution. Must be done AFTER update_concept() + freeze_concept_attrs(new_concept) + # TODO : update when definition_type = DEFINITION_TYPE_DEF : have a look at update_references() below # TODO : Update concepts grammars : have a look at update_references() below if modify_source: @@ -332,13 +326,13 @@ class SheerkaConceptManager(BaseService): :return: """ sheerka = context.sheerka - refs = self.sheerka.cache_manager.get(self.CONCEPTS_REFERENCES_ENTRY, concept.id) - if refs: + refs = self.sheerka.om.get(self.CONCEPTS_REFERENCES_ENTRY, concept.id) + if refs is not NotFound: refs_instances = [sheerka.new_from_template(c, c.key) for c in [self.get_by_id(ref) for ref in refs]] return sheerka.ret(self.NAME, False, sheerka.err(ConceptIsReferenced(refs_instances))) try: - sheerka.cache_manager.remove_concept(concept) + sheerka.om.remove_concept(concept) return sheerka.ret(self.NAME, True, sheerka.new(BuiltinConcepts.SUCCESS)) except ConceptNotFound as ex: return sheerka.ret(self.NAME, False, sheerka.err(ex)) @@ -387,7 +381,7 @@ class SheerkaConceptManager(BaseService): return entry_key = self.BUILTIN_CONCEPTS_IDS if is_builtin else self.USER_CONCEPTS_IDS - obj.get_metadata().id = str(self.sheerka.cache_manager.get(self.sheerka.OBJECTS_IDS_ENTRY, entry_key)) + obj.get_metadata().id = str(self.sheerka.om.get(self.sheerka.OBJECTS_IDS_ENTRY, entry_key)) # self.log.debug(f"Setting id '{obj.metadata.id}' to concept '{obj.metadata.name}'.") def get_by_key(self, concept_key, concept_id=None): @@ -412,7 +406,7 @@ class SheerkaConceptManager(BaseService): """ if concept_id is None: return False - return self.sheerka.cache_manager.has(self.CONCEPTS_BY_ID_ENTRY, concept_id) + return self.sheerka.om.current_cache_manager().has(self.CONCEPTS_BY_ID_ENTRY, concept_id) def has_key(self, concept_key): """ @@ -421,7 +415,7 @@ class SheerkaConceptManager(BaseService): :param concept_key: :return: """ - return self.sheerka.cache_manager.has(self.CONCEPTS_BY_KEY_ENTRY, concept_key) + return self.sheerka.om.current_cache_manager().has(self.CONCEPTS_BY_KEY_ENTRY, concept_key) def has_name(self, concept_name): """ @@ -430,7 +424,7 @@ class SheerkaConceptManager(BaseService): :param concept_name: :return: """ - return self.sheerka.cache_manager.has(self.CONCEPTS_BY_NAME_ENTRY, concept_name) + return self.sheerka.om.current_cache_manager().has(self.CONCEPTS_BY_NAME_ENTRY, concept_name) def has_hash(self, concept_hash): """ @@ -439,7 +433,7 @@ class SheerkaConceptManager(BaseService): :param concept_hash: :return: """ - return self.sheerka.cache_manager.has(self.CONCEPTS_BY_HASH_ENTRY, concept_hash) + return self.sheerka.om.current_cache_manager().has(self.CONCEPTS_BY_HASH_ENTRY, concept_hash) def internal_get(self, index_name, key, cache_name, concept_id=None): """ @@ -454,8 +448,8 @@ class SheerkaConceptManager(BaseService): if key is None: return ErrorConcept(f"Concept '{key}' is undefined.") - concepts = self.sheerka.cache_manager.get(cache_name, key) - if concepts: + concepts = self.sheerka.om.get(cache_name, key) + if concepts is not NotFound: if concept_id is None: return concepts @@ -479,13 +473,13 @@ class SheerkaConceptManager(BaseService): :return: """ - refs = self.sheerka.cache_manager.get(self.CONCEPTS_REFERENCES_ENTRY, concept.id) - if not refs: + refs = self.sheerka.om.get(self.CONCEPTS_REFERENCES_ENTRY, concept.id) + if refs is NotFound: return for concept_id in refs: # remove the grammar entry so that it can be recreated - self.sheerka.cache_manager.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_id) + self.sheerka.om.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_id) # reset the bnf definition if needed if modified_concept: @@ -527,13 +521,13 @@ class SheerkaConceptManager(BaseService): return refs - def not_is_variable(self, name): + def is_not_a_variable(self, name): """ Given a name tells if it refers to a variable name :param name: :return: """ - return not self.sheerka.cache_manager.get(self.sheerka.CONCEPTS_BY_NAME_ENTRY, name) + return self.sheerka.om.get(self.sheerka.CONCEPTS_BY_NAME_ENTRY, name) is NotFound @staticmethod def _name_has_changed(to_add): diff --git a/src/core/sheerka/services/SheerkaConceptsAlgebra.py b/src/core/sheerka/services/SheerkaConceptsAlgebra.py index c973108..883fc42 100644 --- a/src/core/sheerka/services/SheerkaConceptsAlgebra.py +++ b/src/core/sheerka/services/SheerkaConceptsAlgebra.py @@ -119,10 +119,9 @@ class SheerkaConceptsAlgebra(BaseService): if nb_props == 0: return res - concepts_service = self.sheerka.services[SheerkaConceptManager.NAME] + concepts_manager = self.sheerka.services[SheerkaConceptManager.NAME] - all_concepts = self.sheerka.cache_manager.copy(concepts_service.CONCEPTS_BY_ID_ENTRY).values() \ - if self.sheerka.cache_manager.cache_only else self.sheerka.sdp.list(concepts_service.CONCEPTS_BY_ID_ENTRY) + all_concepts = self.sheerka.om.list(concepts_manager.CONCEPTS_BY_ID_ENTRY) for c in all_concepts: score = self._compute_score(c, concept, step_b=round(1 / nb_props, 2)) diff --git a/src/core/sheerka/services/SheerkaDebugManager.py b/src/core/sheerka/services/SheerkaDebugManager.py index 87b0573..8fc9d85 100644 --- a/src/core/sheerka/services/SheerkaDebugManager.py +++ b/src/core/sheerka/services/SheerkaDebugManager.py @@ -3,17 +3,17 @@ import re from dataclasses import dataclass from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, NotInit +from core.builtin_helpers import evaluate_expression +from core.concept import Concept +from core.global_symbols import NotInit, NotFound from core.sheerka.ExecutionContext import ExecutionContext from core.sheerka.services.sheerka_service import BaseService from core.utils import CONSOLE_COLORS_MAP as CCM, CONSOLE_COLUMNS, PRIMITIVES_TYPES -from core.utils import evaluate_expression, as_bag +from core.utils import as_bag from parsers.BaseNodeParser import SourceCodeWithConceptNode, UnrecognizedTokensNode pp = pprint.PrettyPrinter(indent=2, width=CONSOLE_COLUMNS) -NotFound = "** Not Found **" - class ConceptDebugObj: def __init__(self, concept, **kwargs): @@ -278,12 +278,21 @@ class SheerkaDebugManager(BaseService): def __init__(self, sheerka): super().__init__(sheerka) self.activated = False # is debug activated - self.explicit = False # No need to activate context debug when debug mode is on - self.context_cache = set() # debug for specific context - self.variable_cache = set() # debug for specific variable + self.explicit = False # No need to activate context debug when debug mode is on # to remove ? + self.context_cache = set() # debug for specific context # to remove ? + self.variable_cache = set() # debug for specific variable # to remove ? self.debug_vars_settings = [] self.debug_rules_settings = [] self.debug_concepts_settings = [] + self.state_vars = [ + "activated", + "explicit", # to remove ? + "context_cache", # to remove ? + "variable_cache", # to remove ? + "debug_vars_settings", + "debug_rules_settings", + "debug_concepts_settings" + ] def initialize(self): # TO REMOVE ??? @@ -307,13 +316,7 @@ class SheerkaDebugManager(BaseService): # self.sheerka.bind_service_method(self.get_debug_settings, False, as_name="debug_settings") def initialize_deferred(self, context, is_first_time): - self.restore_values("activated", - "explicit", - "context_cache", - "variable_cache", - "debug_vars_settings", - "debug_rules_settings", - "debug_concepts_settings") + self.restore_state() def reset(self): """ @@ -327,6 +330,12 @@ class SheerkaDebugManager(BaseService): self.debug_rules_settings.clear() self.debug_concepts_settings.clear() + def save_state(self, context): + self.store_values(context, *self.state_vars) + + def restore_state(self): + self.restore_values(*self.state_vars) + def set_debug(self, context, value=True): self.activated = value self.sheerka.record_var(context, self.NAME, "activated", self.activated) @@ -728,6 +737,16 @@ class SheerkaDebugManager(BaseService): @staticmethod def parse_debug_args(item_name, *args, **kwargs): + """ + Returns + i : item to debug. It can be a Concept, Rule or a variable + s : Service to debug (so far, it is SheerkaService) + m : Method within the serice + c_id : Context id + c_children : True / False to allow debugging of context children + d : Debug id + e : enable / disabled + """ service, method_name, context_id, context_children, item, debug_id, enabled = None, None, None, False, None, None, True if len(args) > 0: if args[0] is None or args[0] == "": diff --git a/src/core/sheerka/services/SheerkaEvaluateConcept.py b/src/core/sheerka/services/SheerkaEvaluateConcept.py index 76500bf..b066cf5 100644 --- a/src/core/sheerka/services/SheerkaEvaluateConcept.py +++ b/src/core/sheerka/services/SheerkaEvaluateConcept.py @@ -2,8 +2,9 @@ from dataclasses import dataclass from core.builtin_concepts import BuiltinConcepts from core.builtin_helpers import expect_one, only_successful, parse_unrecognized, evaluate, ensure_concept -from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, NotInit, AllConceptParts, \ +from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, AllConceptParts, \ concept_part_value +from core.global_symbols import NotInit from core.sheerka.services.SheerkaConceptManager import SheerkaConceptManager from core.sheerka.services.SheerkaExecute import ParserInput from core.sheerka.services.sheerka_service import BaseService @@ -605,7 +606,7 @@ class SheerkaEvaluateConcept(BaseService): # # update the cache for concepts with no variables # Cannot use cache. See the comment at the beginning of this method # if len(concept.get_metadata().variables) == 0: - # self.sheerka.cache_manager.put(self.sheerka.CONCEPTS_BY_ID_ENTRY, concept.id, concept) + # self.sheerka.om.put(self.sheerka.CONCEPTS_BY_ID_ENTRY, concept.id, concept) if not concept.get_metadata().is_builtin: self.sheerka.register_object(sub_context, concept.name, concept) diff --git a/src/core/sheerka/services/SheerkaEventManager.py b/src/core/sheerka/services/SheerkaEventManager.py index 71e43da..998d757 100644 --- a/src/core/sheerka/services/SheerkaEventManager.py +++ b/src/core/sheerka/services/SheerkaEventManager.py @@ -1,5 +1,5 @@ from threading import RLock - +from core.global_symbols import NotFound from core.sheerka.services.sheerka_service import BaseService @@ -19,6 +19,16 @@ class SheerkaEventManager(BaseService): self.sheerka.bind_service_method(self.subscribe, True, visible=False) self.sheerka.bind_service_method(self.publish, True, visible=False) + def save_state(self, context): + with self._lock: + copy = self.subscribers.copy() + self.sheerka.record_internal_var(context, self.NAME, "subscribers", copy) + + def restore_state(self): + with self._lock: + if (from_cache := self.sheerka.load_internal_var(self.NAME, "subscribers")) is not NotFound: + self.subscribers = from_cache + def subscribe(self, topic, callback): """ To subscribe to a topic, just give the callback to call @@ -51,9 +61,10 @@ class SheerkaEventManager(BaseService): except KeyError: pass - def reset_topic(self, topic): + def test_only_reset_topic(self, topic): """ - Remove all subsccribers from a given topic + Remove all subscribers from a given topic + TO REMOVE once sheerka ontology is fully implemented :param topic: :return: """ diff --git a/src/core/sheerka/services/SheerkaExecute.py b/src/core/sheerka/services/SheerkaExecute.py index c88cdd4..81abdd1 100644 --- a/src/core/sheerka/services/SheerkaExecute.py +++ b/src/core/sheerka/services/SheerkaExecute.py @@ -1,6 +1,8 @@ import core.utils from cache.Cache import Cache +from cache.FastCache import FastCache from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept +from core.global_symbols import NotFound from core.sheerka.services.sheerka_service import BaseService from core.tokenizer import Tokenizer, TokenKind, Token @@ -166,7 +168,7 @@ class SheerkaExecute(BaseService): def __init__(self, sheerka): super().__init__(sheerka) - self.pi_cache = Cache(default=lambda key: ParserInput(key), max_size=20) + self.pi_cache = FastCache(default=lambda key: ParserInput(key), max_size=20) self.instantiated_evaluators = None self.evaluators_by_name = None @@ -192,10 +194,12 @@ class SheerkaExecute(BaseService): def initialize(self): self.sheerka.bind_service_method(self.execute, True) - self.sheerka.cache_manager.register_cache(self.PARSERS_INPUTS_ENTRY, self.pi_cache, False) self.reset_registered_evaluators() self.reset_registered_parsers() + def reset_state(self): + self.pi_cache.clear() + def reset_registered_evaluators(self): # instantiate evaluators, once for all, only keep when it's enabled self.instantiated_evaluators = [e_class() for e_class in self.sheerka.evaluators] @@ -340,7 +344,7 @@ class SheerkaExecute(BaseService): if tokens is None or self.pi_cache.has(text): pi = self.pi_cache.get(text) - if pi is None: # when CacheManager.cache_only is True + if pi is NotFound: # when CacheManager.cache_only is True pi = ParserInput(text) self.pi_cache.put(text, pi) return pi diff --git a/src/core/sheerka/services/SheerkaFunctionsParametersHistory.py b/src/core/sheerka/services/SheerkaFunctionsParametersHistory.py index 86c6dd9..44f3b55 100644 --- a/src/core/sheerka/services/SheerkaFunctionsParametersHistory.py +++ b/src/core/sheerka/services/SheerkaFunctionsParametersHistory.py @@ -3,6 +3,7 @@ from operator import itemgetter from typing import Tuple, Dict, List from cache.Cache import Cache +from core.global_symbols import NotFound from core.sheerka.services.sheerka_service import BaseService, ServiceObj @@ -29,10 +30,10 @@ class SheerkaFunctionsParametersHistory(BaseService): def __init__(self, sheerka): super().__init__(sheerka) - self.cache = Cache(max_size=1024, default=lambda k: self.sheerka.sdp.get(self.FUNCTIONS_PARAMETERS_ENTRY, k)) def initialize(self): - self.sheerka.cache_manager.register_cache(self.FUNCTIONS_PARAMETERS_ENTRY, self.cache, True, True) + cache = Cache(max_size=1024).auto_configure(self.FUNCTIONS_PARAMETERS_ENTRY) + self.sheerka.om.register_cache(self.FUNCTIONS_PARAMETERS_ENTRY, cache, True, True) return self def record_function_parameter(self, context, func_name: str, param_number: int, param_value: str): @@ -44,8 +45,11 @@ class SheerkaFunctionsParametersHistory(BaseService): :param param_value: :return: """ - old = self.cache.get(func_name) - if old is not None: + old = self.sheerka.om.get(self.FUNCTIONS_PARAMETERS_ENTRY, func_name) + if old is NotFound: + obj = FunctionParametersObj(context.event.get_digest(), func_name, {param_number: [(param_value, 1)]}) + self.sheerka.om.put(self.FUNCTIONS_PARAMETERS_ENTRY, func_name, obj) + else: if param_number in old.params: lst = old.params[param_number] for i, value in enumerate(lst): # value is a tuple (param_value, counter) @@ -56,10 +60,7 @@ class SheerkaFunctionsParametersHistory(BaseService): lst.append((param_value, 1)) else: old.params[param_number] = [(param_value, 1)] - self.cache.put(func_name, old) - else: - obj = FunctionParametersObj(context.event.get_digest(), func_name, {param_number: [(param_value, 1)]}) - self.cache.put(func_name, obj) + self.sheerka.om.put(self.FUNCTIONS_PARAMETERS_ENTRY, func_name, old) def get_function_parameters(self, func_name: str, param_number: int): """ @@ -68,8 +69,8 @@ class SheerkaFunctionsParametersHistory(BaseService): :param param_number: :return: """ - values = self.cache.get(func_name) - if values is None: + values = self.sheerka.om.get(self.FUNCTIONS_PARAMETERS_ENTRY, func_name) + if values is NotFound: return [] if param_number not in values.params: diff --git a/src/core/sheerka/services/SheerkaHistoryManager.py b/src/core/sheerka/services/SheerkaHistoryManager.py index 6d1f463..d3c9525 100644 --- a/src/core/sheerka/services/SheerkaHistoryManager.py +++ b/src/core/sheerka/services/SheerkaHistoryManager.py @@ -67,10 +67,10 @@ class SheerkaHistoryManager(BaseService): :return: """ - events = list(self.sheerka.sdp.load_events(depth, start)) + events = list(self.sheerka.om.current_sdp().load_events(depth, start)) for event in events: try: - result = self.sheerka.sdp.load_result(event.get_digest()) + result = self.sheerka.om.current_sdp().load_result(event.get_digest()) except (IOError, KeyError): result = None yield History(event, result) diff --git a/src/core/sheerka/services/SheerkaSetsManager.py b/src/core/sheerka/services/SheerkaIsAManager.py similarity index 85% rename from src/core/sheerka/services/SheerkaSetsManager.py rename to src/core/sheerka/services/SheerkaIsAManager.py index 313d0fb..ac5b073 100644 --- a/src/core/sheerka/services/SheerkaSetsManager.py +++ b/src/core/sheerka/services/SheerkaIsAManager.py @@ -4,21 +4,18 @@ from cache.SetCache import SetCache from core.ast_helpers import UnreferencedVariablesVisitor from core.builtin_concepts import BuiltinConcepts from core.concept import Concept, ConceptParts, DEFINITION_TYPE_BNF +from core.global_symbols import NotFound from core.sheerka.services.SheerkaConceptManager import SheerkaConceptManager from core.sheerka.services.sheerka_service import BaseService -GROUP_PREFIX = 'All_' - -class SheerkaSetsManager(BaseService): - NAME = "SetsManager" - CONCEPTS_GROUPS_ENTRY = "SetsManager:Concepts_Groups" - CONCEPTS_IN_GROUPS_ENTRY = "SetsManager:Concepts_In_Groups" # cache for get_set_elements() +class SheerkaIsAManager(BaseService): + NAME = "IsAManager" + CONCEPTS_GROUPS_ENTRY = "IsAManager:Concepts_Groups" + CONCEPTS_IN_GROUPS_ENTRY = "IsAManager:Concepts_In_Groups" # cache for get_set_elements() def __init__(self, sheerka): super().__init__(sheerka) - self.sets = SetCache(default=lambda k: self.sheerka.sdp.get(self.CONCEPTS_GROUPS_ENTRY, k)) - self.concepts_in_set = Cache() def initialize(self): self.sheerka.bind_service_method(self.set_isa, True) @@ -28,8 +25,10 @@ class SheerkaSetsManager(BaseService): self.sheerka.bind_service_method(self.isa, False) self.sheerka.bind_service_method(self.isaset, True) # concept is evaluated, need to change the code - self.sheerka.cache_manager.register_cache(self.CONCEPTS_GROUPS_ENTRY, self.sets) - self.sheerka.cache_manager.register_cache(self.CONCEPTS_IN_GROUPS_ENTRY, self.concepts_in_set, persist=False) + cache = SetCache().auto_configure(self.CONCEPTS_GROUPS_ENTRY) + self.sheerka.om.register_cache(self.CONCEPTS_GROUPS_ENTRY, cache) + cache = Cache().auto_configure(self.CONCEPTS_IN_GROUPS_ENTRY) + self.sheerka.om.register_cache(self.CONCEPTS_IN_GROUPS_ENTRY, cache, persist=False) def set_isa(self, context, concept, concept_set): """ @@ -43,8 +42,8 @@ class SheerkaSetsManager(BaseService): context.log(f"Setting concept {concept} is a {concept_set}", who=self.NAME) core.builtin_helpers.ensure_concept(concept, concept_set) - if BuiltinConcepts.ISA in concept.get_metadata().props and concept_set in concept.get_metadata().props[ - BuiltinConcepts.ISA]: + if BuiltinConcepts.ISA in concept.get_metadata().props and \ + concept_set in concept.get_metadata().props[BuiltinConcepts.ISA]: return self.sheerka.ret( self.NAME, False, @@ -75,23 +74,23 @@ class SheerkaSetsManager(BaseService): context.log(f"Adding concept {concept} to set {concept_set}", who=self.NAME) core.builtin_helpers.ensure_concept(concept, concept_set) - set_elements = self.sets.get(concept_set.id) - if set_elements and concept.id in set_elements: + set_elements = self.sheerka.om.get(self.CONCEPTS_GROUPS_ENTRY, concept_set.id) + if set_elements is not NotFound and concept.id in set_elements: return self.sheerka.ret( self.NAME, False, self.sheerka.new(BuiltinConcepts.CONCEPT_ALREADY_IN_SET, body=concept, concept_set=concept_set)) - self.sets.put(concept_set.id, concept.id) + self.sheerka.om.put(self.CONCEPTS_GROUPS_ENTRY, concept_set.id, concept.id) # invalidate the cache of what contains concept_set - self.concepts_in_set.delete(concept_set.id) + self.sheerka.om.delete(self.CONCEPTS_IN_GROUPS_ENTRY, concept_set.id) # update concept_set references self.sheerka.services[SheerkaConceptManager.NAME].update_references(context, concept_set) # remove the grammar entry so that it can be recreated - self.sheerka.cache_manager.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_set.id) + self.sheerka.om.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_set.id) return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS)) @@ -116,7 +115,7 @@ class SheerkaSetsManager(BaseService): concept_set=concept_set) else: body = self.sheerka.new(BuiltinConcepts.SUCCESS) - self.concepts_in_set.delete(concept_set.id) + self.sheerka.om.delete(self.CONCEPTS_IN_GROUPS_ENTRY, concept_set.id) return self.sheerka.ret(self.NAME, len(already_in_set) != len(concepts), body) @@ -136,7 +135,7 @@ class SheerkaSetsManager(BaseService): return self.sheerka.new(BuiltinConcepts.NOT_A_SET, body=concept) # first, try to see if sub_concept has it's own group entry - ids = self.sets.get(sub_concept.id) + ids = self.sheerka.om.get(self.CONCEPTS_GROUPS_ENTRY, sub_concept.id) concepts = self._get_concepts(context, ids, True) # aggregate with en entries from its body @@ -166,13 +165,13 @@ class SheerkaSetsManager(BaseService): return concepts # already in cache ? - if res := self.concepts_in_set.get(concept.id): + if (res := self.sheerka.om.get(self.CONCEPTS_IN_GROUPS_ENTRY, concept.id)) is not NotFound: return res res = _get_set_elements(concept) # put in cache - self.concepts_in_set.put(concept.id, res) + self.sheerka.om.put(self.CONCEPTS_IN_GROUPS_ENTRY, concept.id, res) return res def isinset(self, a, b): @@ -190,8 +189,8 @@ class SheerkaSetsManager(BaseService): if not (a.id and b.id): return False - group_elements = self.sets.get(b.id) - return group_elements and a.id in group_elements + group_elements = self.sheerka.om.get(self.CONCEPTS_GROUPS_ENTRY, b.id) + return group_elements is not NotFound and a.id in group_elements def isa(self, a, b): @@ -226,7 +225,7 @@ class SheerkaSetsManager(BaseService): # check if it has a group # TODO: use cache instead of directly requesting sdp - if self.sets.get(concept.id): + if self.sheerka.om.get(self.CONCEPTS_GROUPS_ENTRY, concept.id) is not NotFound: return True # it may be a concept that references a set @@ -267,7 +266,7 @@ for x in xx__concepts__xx: :return: """ - if not ids: + if ids in (None, NotFound): return [] if not evaluate: diff --git a/src/core/sheerka/services/SheerkaMemory.py b/src/core/sheerka/services/SheerkaMemory.py index 5e60c71..3413832 100644 --- a/src/core/sheerka/services/SheerkaMemory.py +++ b/src/core/sheerka/services/SheerkaMemory.py @@ -4,7 +4,7 @@ from cache.FastCache import FastCache from cache.ListIfNeededCache import ListIfNeededCache from core.builtin_concepts import BuiltinConcepts from core.concept import Concept -from core.global_symbols import EVENT_CONTEXT_DISPOSED +from core.global_symbols import EVENT_CONTEXT_DISPOSED, NotFound from core.sheerka.services.sheerka_service import BaseService, ServiceObj @@ -17,13 +17,11 @@ class SheerkaMemory(BaseService): NAME = "Memory" GLOBAL = "global" - SHORT_TERM_OBJECTS_ENTRY = "Memory:ShortTermMemoryObjects" OBJECTS_ENTRY = "Memory:Objects" def __init__(self, sheerka): super().__init__(sheerka) self.short_term_objects = FastCache() - self.memory_objects = ListIfNeededCache(default=lambda k: self.sheerka.sdp.get(self.OBJECTS_ENTRY, k)) self.registration = {} def initialize(self): @@ -35,15 +33,20 @@ class SheerkaMemory(BaseService): self.sheerka.bind_service_method(self.get_from_memory, False) self.sheerka.bind_service_method(self.register_object, True, visible=False) self.sheerka.bind_service_method(self.unregister_object, True, visible=False) - self.sheerka.bind_service_method(self.add_registered_objects, True, visible=False) + self.sheerka.bind_service_method(self.commit_registered_objects, True, visible=False) self.sheerka.bind_service_method(self.memory, False) self.sheerka.bind_service_method(self.mem, False) - self.sheerka.cache_manager.register_cache(self.OBJECTS_ENTRY, self.memory_objects, persist=True, use_ref=True) + cache = ListIfNeededCache().auto_configure(self.OBJECTS_ENTRY) + self.sheerka.om.register_cache(self.OBJECTS_ENTRY, cache, persist=True, use_ref=True) def reset(self): self.short_term_objects.clear() - self.memory_objects.clear() + self.sheerka.om.clear(self.OBJECTS_ENTRY) + + def reset_state(self): + self.short_term_objects.clear() + self.registration.clear() def initialize_deferred(self, context, is_first_time): self.sheerka.subscribe(EVENT_CONTEXT_DISPOSED, self.remove_context) @@ -55,7 +58,7 @@ class SheerkaMemory(BaseService): return self.short_term_objects.cache[id_to_use][key] except KeyError: if context is None: - return None + return NotFound context = context.get_parent() @@ -92,12 +95,12 @@ class SheerkaMemory(BaseService): :param concept: :return: """ - self.memory_objects.put(key, MemoryObject(context.event.get_digest(), concept)) + self.sheerka.om.put(SheerkaMemory.OBJECTS_ENTRY, key, MemoryObject(context.event.get_digest(), concept)) def get_from_memory(self, context, key): """" """ - return self.memory_objects.get(key) + return self.sheerka.om.get(SheerkaMemory.OBJECTS_ENTRY, key) def register_object(self, context, key, concept): """ @@ -126,7 +129,7 @@ class SheerkaMemory(BaseService): except KeyError: pass - def add_registered_objects(self, context): + def commit_registered_objects(self, context): """ Adds all registered memory_objects :param context: @@ -147,7 +150,7 @@ class SheerkaMemory(BaseService): name_to_use = name.name if isinstance(name, Concept) else name self.unregister_object(context, name_to_use) obj = self.get_from_memory(context, name_to_use) - if obj is None: + if obj is NotFound: return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"#name": name}) if isinstance(obj, list): @@ -156,8 +159,7 @@ class SheerkaMemory(BaseService): return obj.obj res = {} - for k in self.memory_objects: - obj = self.memory_objects.get(k) + for k, obj in self.sheerka.om.get_all(SheerkaMemory.OBJECTS_ENTRY).items(): if isinstance(obj, list): obj = obj[-1] res[k] = obj.obj @@ -165,5 +167,5 @@ class SheerkaMemory(BaseService): return res def mem(self): - keys = sorted([k for k in self.memory_objects]) + keys = sorted([k for k in self.sheerka.om.list(SheerkaMemory.OBJECTS_ENTRY)]) return {"keys": keys, "len": len(keys)} diff --git a/src/core/sheerka/services/SheerkaQuestion.py b/src/core/sheerka/services/SheerkaQuestion.py index 242fd0e..b8ac4f6 100644 --- a/src/core/sheerka/services/SheerkaQuestion.py +++ b/src/core/sheerka/services/SheerkaQuestion.py @@ -10,27 +10,10 @@ class SheerkaQuestion(BaseService): super().__init__(sheerka) def initialize(self): - # self.sheerka.bind_service_method(self.question, False) self.sheerka.bind_service_method(self.is_question, False) - # def question(self, context, q): - # """ - # Evaluate q in the context in a question - # :param context: - # :param q: - # :return: - # """ - # - # if isinstance(q, Concept): - # with context.push(BuiltinConcepts.EVALUATE_CONCEPT, q, desc=f"Evaluating question '{q}'") as sub_context: - # sub_context.global_hints.add(BuiltinConcepts.EVAL_QUESTION_REQUESTED) - # sub_context.global_hints.add(BuiltinConcepts.EVAL_UNTIL_SUCCESS_REQUESTED) - # - # evaluated = self.sheerka.evaluate_concept(sub_context, q) - # - # return evaluated - - def is_question(self, context): + @staticmethod + def is_question(context): """ Returns True if a question is asked :return: diff --git a/src/core/sheerka/services/SheerkaResultManager.py b/src/core/sheerka/services/SheerkaResultManager.py index 0f0afaf..f285dfb 100644 --- a/src/core/sheerka/services/SheerkaResultManager.py +++ b/src/core/sheerka/services/SheerkaResultManager.py @@ -2,7 +2,7 @@ import ast from cache.Cache import Cache from core.builtin_concepts import BuiltinConcepts -from core.global_symbols import EVENT_USER_INPUT_EVALUATED, EVENT_CONCEPT_CREATED +from core.global_symbols import EVENT_USER_INPUT_EVALUATED, EVENT_CONCEPT_CREATED, NotFound from core.sheerka.services.sheerka_service import BaseService from core.utils import CONSOLE_COLORS_MAP as CCM from core.utils import as_bag @@ -13,6 +13,9 @@ MAX_EXECUTION_HISTORY = 100 class SheerkaResultConcept(BaseService): NAME = "Result" + # SheerkaResultConcept seems to be a concept that must not support multiple ontology layers + # We must have always access to everything that was done, whatever the ontology + def __init__(self, sheerka, page_size=30): super().__init__(sheerka) self.page_size = page_size @@ -20,6 +23,7 @@ class SheerkaResultConcept(BaseService): self.last_execution = None self.last_created_concept = None self.last_created_concept_id = None + self.state_vars = ["last_created_concept_id"] def initialize(self): self.sheerka.bind_service_method(self.get_results_by_digest, True) # digest is recorded @@ -27,20 +31,26 @@ class SheerkaResultConcept(BaseService): self.sheerka.bind_service_method(self.get_last_results, True) # digest is recorded self.sheerka.bind_service_method(self.get_results, False) self.sheerka.bind_service_method(self.get_execution_item, False) - self.sheerka.bind_service_method(self.get_last_ret, False, as_name="last_ret") + self.sheerka.bind_service_method(self.get_last_return_value, False, as_name="last_ret") self.sheerka.bind_service_method(self.get_last_created_concept, False, as_name="last_created_concept") def initialize_deferred(self, context, is_first_time): - self.restore_values("last_created_concept_id") + self.restore_values(*self.state_vars) self.sheerka.subscribe(EVENT_USER_INPUT_EVALUATED, self.user_input_evaluated) self.sheerka.subscribe(EVENT_CONCEPT_CREATED, self.new_concept_created) - def reset(self): + def test_only_reset(self): self.executions_contexts_cache.clear() self.last_execution = None self.last_created_concept = None self.last_created_concept_id = None + def save_state(self, context): + self.store_values(context, *self.state_vars) + + def restore_state(self): + self.restore_values(*self.state_vars) + @staticmethod def get_predicate(**kwargs): if len(kwargs) == 0: @@ -81,7 +91,7 @@ class SheerkaResultConcept(BaseService): :param record_digest: :return: """ - if digest is None: + if digest is NotFound: return None if filter is not None: @@ -92,8 +102,8 @@ class SheerkaResultConcept(BaseService): result = self.executions_contexts_cache.get(digest) event = result.event else: - result = self.sheerka.sdp.load_result(digest) - event = self.sheerka.sdp.load_event(digest) # there is no real need for a cache of the events + result = self.sheerka.om.current_sdp().load_result(digest) + event = self.sheerka.om.current_sdp().load_event(digest) # really needed ? if record_digest: context.log(f"Recording digest '{digest}'") @@ -141,7 +151,7 @@ class SheerkaResultConcept(BaseService): start = len(self.executions_contexts_cache) consumed = 0 while True: - for event in self.sheerka.sdp.load_events(self.page_size, start): + for event in self.sheerka.om.current_sdp().load_events(self.page_size, start): consumed += 1 if event.message.startswith(command): return self.get_results_by_digest(context, event.get_digest(), filter, record_digest, **kwargs) @@ -200,14 +210,14 @@ class SheerkaResultConcept(BaseService): :return: """ digest = self.sheerka.load_var(self.NAME, "digest") - if digest is None: + if digest is NotFound: return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body="no digest") try: if digest in self.executions_contexts_cache: result = self.executions_contexts_cache.get(digest) else: - result = self.sheerka.sdp.load_result(digest) + result = self.sheerka.om.current_sdp().load_result(digest) items = list(self.as_list(result, self.get_predicate(id=item_id))) if len(items) == 0: @@ -227,7 +237,7 @@ class SheerkaResultConcept(BaseService): """ if self.sheerka.save_execution_context: try: - self.sheerka.sdp.save_result(execution_context) + self.sheerka.om.current_sdp().save_result(execution_context) except Exception as ex: print(f"{CCM['red']}Failed to save execution context. Reason: {ex}{CCM['reset']}") pass @@ -236,7 +246,7 @@ class SheerkaResultConcept(BaseService): self.executions_contexts_cache.put(execution_context.event.get_digest(), execution_context) self.last_execution = execution_context - def get_last_ret(self, context): + def get_last_return_value(self, context): """ Return the last return value(s) :return: @@ -248,7 +258,7 @@ class SheerkaResultConcept(BaseService): if event_id is not None: try: - execution_result = self.sheerka.sdp.load_result(event_id) + execution_result = self.sheerka.om.current_sdp().load_result(event_id) return execution_result.values["return_values"] except FileNotFoundError as ex: @@ -278,9 +288,9 @@ class SheerkaResultConcept(BaseService): page_size = 2 consumed = 0 while True: - for event in self.sheerka.sdp.load_events(page_size, start): + for event in self.sheerka.om.current_sdp().load_events(page_size, start): consumed += 1 - if self.sheerka.sdp.has_result(event.get_digest()): + if self.sheerka.om.current_sdp().has_result(event.get_digest()): return event.get_digest() if consumed < page_size: diff --git a/src/core/sheerka/services/SheerkaRuleManager.py b/src/core/sheerka/services/SheerkaRuleManager.py index 1e2c67c..827a28b 100644 --- a/src/core/sheerka/services/SheerkaRuleManager.py +++ b/src/core/sheerka/services/SheerkaRuleManager.py @@ -4,10 +4,11 @@ from dataclasses import dataclass from typing import Union from cache.Cache import Cache +from cache.ListIfNeededCache import ListIfNeededCache from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept from core.builtin_helpers import parse_unrecognized, only_successful, ensure_rule from core.concept import Concept -from core.global_symbols import EVENT_RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT +from core.global_symbols import EVENT_RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT, NotFound from core.rule import Rule from core.sheerka.services.sheerka_service import BaseService from core.tokenizer import Keywords, TokenKind, Token, IterParser @@ -509,22 +510,25 @@ class SheerkaRuleManager(BaseService): RULE_IDS = "Rules_Ids" FORMAT_RULE_ENTRY = "RuleManager:FormatRules" EXEC_RULE_ENTRY = "RuleManager:ExecRules" + RULES_BY_NAME_ENTRY = "RuleManager:Rules_By_Name" def __init__(self, sheerka): super().__init__(sheerka) - self.format_rule_cache = Cache(default=lambda k: self.sheerka.sdp.get(self.FORMAT_RULE_ENTRY, k)) - self.exec_rule_cache = Cache(default=lambda k: self.sheerka.sdp.get(self.EXEC_RULE_ENTRY, k)) - self._format_rules = None # sorted by priority def initialize(self): self.sheerka.bind_service_method(self.create_new_rule, True, visible=False) self.sheerka.bind_service_method(self.get_rule_by_id, False) + self.sheerka.bind_service_method(self.get_rule_by_name, False) self.sheerka.bind_service_method(self.dump_desc_rule, False, as_name="desc_rule") self.sheerka.bind_service_method(self.get_format_rules, False, visible=False) - self.sheerka.cache_manager.register_cache(self.FORMAT_RULE_ENTRY, self.format_rule_cache, True, True) - self.sheerka.cache_manager.register_cache(self.EXEC_RULE_ENTRY, self.exec_rule_cache, True, True) + cache = Cache().auto_configure(self.FORMAT_RULE_ENTRY) + self.sheerka.om.register_cache(self.FORMAT_RULE_ENTRY, cache, True, True) + cache = Cache().auto_configure(self.EXEC_RULE_ENTRY) + self.sheerka.om.register_cache(self.EXEC_RULE_ENTRY, cache, True, True) + cache = ListIfNeededCache().auto_configure(self.RULES_BY_NAME_ENTRY) + self.sheerka.om.register_cache(self.RULES_BY_NAME_ENTRY, cache, True, True) def initialize_deferred(self, context, is_first_time): @@ -533,20 +537,29 @@ class SheerkaRuleManager(BaseService): self.init_builtin_rules(context) else: # adds the other rules (when it's not the first time) - self.format_rule_cache.populate(lambda: self.sheerka.sdp.list(self.FORMAT_RULE_ENTRY), lambda rule: rule.id) - self.exec_rule_cache.populate(lambda: self.sheerka.sdp.list(self.EXEC_RULE_ENTRY), lambda rule: rule.id) - self.format_rule_cache.reset_events() - self.exec_rule_cache.reset_events() + self.sheerka.om.populate(self.FORMAT_RULE_ENTRY, + lambda sdp: sdp.list(self.FORMAT_RULE_ENTRY), + lambda rule: rule.id, + reset_events=True, + all_ontologies=True) + self.sheerka.om.populate(self.EXEC_RULE_ENTRY, + lambda sdp: sdp.list(self.EXEC_RULE_ENTRY), + lambda rule: rule.id, + reset_events=True, + all_ontologies=True) - # compile all the rules - for rule_id in self.format_rule_cache: - rule = self.init_rule(context, self.format_rule_cache.get(rule_id)) + # compile all format the rules + for rule_id, rule_def in self.sheerka.om.get_all(self.FORMAT_RULE_ENTRY, cache_only=True).items(): + rule = self.init_rule(context, rule_def) # update rules priorities self.update_rules_priorities(context) self.sheerka.subscribe(EVENT_RULE_PRECEDENCE_MODIFIED, self.update_rules_priorities) + def reset_state(self): + self._format_rules = None + def update_rules_priorities(self, context): """ Ask the SheerkaComparisonManager for the priorities @@ -555,9 +568,8 @@ class SheerkaRuleManager(BaseService): # get the priorities rules_weights = self.sheerka.get_concepts_weights(BuiltinConcepts.PRECEDENCE, RULE_COMPARISON_CONTEXT) - # compile all the rules - for rule_id in self.format_rule_cache: - rule = self.format_rule_cache.get(rule_id) + # update the priorities + for rule in self.sheerka.om.list(self.FORMAT_RULE_ENTRY, cache_only=True): if rule.str_id in rules_weights: rule.priority = rules_weights[rule.str_id] @@ -623,7 +635,7 @@ class SheerkaRuleManager(BaseService): if rule.metadata.id is not None: return - rule.metadata.id = str(self.sheerka.cache_manager.get(self.sheerka.OBJECTS_IDS_ENTRY, self.RULE_IDS)) + rule.metadata.id = str(self.sheerka.om.get(self.sheerka.OBJECTS_IDS_ENTRY, self.RULE_IDS)) def create_new_rule(self, context, rule): """ @@ -642,10 +654,14 @@ class SheerkaRuleManager(BaseService): # save it if rule.metadata.action_type == "print": - self.sheerka.cache_manager.put(self.FORMAT_RULE_ENTRY, rule.metadata.id, rule) + self.sheerka.om.put(self.FORMAT_RULE_ENTRY, rule.metadata.id, rule) self._format_rules = None else: - self.sheerka.cache_manager.put(self.EXEC_RULE_ENTRY, rule.metadata.id, rule) + self.sheerka.om.put(self.EXEC_RULE_ENTRY, rule.metadata.id, rule) + + # save by name if needed + if rule.metadata.name: + self.sheerka.om.put(self.RULES_BY_NAME_ENTRY, rule.metadata.name, rule) # process the return if needed ret = sheerka.ret(self.NAME, True, sheerka.new(BuiltinConcepts.NEW_RULE, body=rule)) @@ -721,17 +737,28 @@ class SheerkaRuleManager(BaseService): if rule_id is None: return None - rule = self.format_rule_cache.get(rule_id) - if rule: + rule = self.sheerka.om.get(self.FORMAT_RULE_ENTRY, rule_id) + if rule is not NotFound: return rule - rule = self.exec_rule_cache.get(rule_id) - if rule: + rule = self.sheerka.om.get(self.EXEC_RULE_ENTRY, rule_id) + if rule is not NotFound: return rule metadata = [("id", rule_id)] return self.sheerka.new(BuiltinConcepts.UNKNOWN_RULE, body=metadata) + def get_rule_by_name(self, rule_name): + if rule_name is None: + return None + + rule = self.sheerka.om.get(self.RULES_BY_NAME_ENTRY, rule_name) + if rule is NotFound: + metadata = [("name", rule_name)] + return self.sheerka.new(BuiltinConcepts.UNKNOWN_RULE, body=metadata) + + return rule + def dump_desc_rule(self, rules): """ dumps the definition of a rule @@ -759,7 +786,9 @@ class SheerkaRuleManager(BaseService): if self._format_rules: return self._format_rules - self._format_rules = sorted(self.format_rule_cache.get_all(), key=operator.attrgetter('priority'), reverse=True) + self._format_rules = sorted(self.sheerka.om.list(self.FORMAT_RULE_ENTRY, cache_only=True), + key=operator.attrgetter('priority'), + reverse=True) return self._format_rules def add_evaluators(self, source, ret_vals): diff --git a/src/core/sheerka/services/SheerkaVariableManager.py b/src/core/sheerka/services/SheerkaVariableManager.py index 48a5d6c..32cb3b8 100644 --- a/src/core/sheerka/services/SheerkaVariableManager.py +++ b/src/core/sheerka/services/SheerkaVariableManager.py @@ -3,6 +3,7 @@ from typing import List from cache.Cache import Cache from core.builtin_concepts import BuiltinConcepts +from core.global_symbols import NotFound from core.sheerka.services.sheerka_service import ServiceObj, BaseService @@ -23,32 +24,52 @@ class Variable(ServiceObj): return f"({self.who}){self.key}={self.value}" +@dataclass +class InternalObj: + obj: object + + def __deepcopy__(self, memodict={}): + return self + + def __copy__(self): + return self + + class SheerkaVariableManager(BaseService): NAME = "VariableManager" - VARIABLES_ENTRY = "VariableManager:Variables" # entry for admin or internal variables + VARIABLES_ENTRY = "VariableManager:Variables" # entry for variables which will be copied in sdp + INTERNAL_VARIABLES_ENTRY = "VariableManager:InternalVariables" # internal to current process (can store lambda) def __init__(self, sheerka): super().__init__(sheerka) - self.bound = { - "sheerka.enable_process_return_values": "enable_process_return_values", - "sheerka.save_execution_context": "save_execution_context" + self.bound_variables = { + self.sheerka.name: {"enable_process_return_values", "save_execution_context"} } def initialize(self): self.sheerka.bind_service_method(self.record_var, True, visible=False) self.sheerka.bind_service_method(self.load_var, False, visible=False) + self.sheerka.bind_service_method(self.record_internal_var, True, visible=False) + self.sheerka.bind_service_method(self.load_internal_var, False, visible=False) self.sheerka.bind_service_method(self.delete_var, True, visible=False) self.sheerka.bind_service_method(self.set_var, True) self.sheerka.bind_service_method(self.get_var, False) self.sheerka.bind_service_method(self.list_vars, False) - cache = Cache() - cache.populate(lambda: self.sheerka.sdp.list(self.VARIABLES_ENTRY), lambda var: var.get_key()) - self.sheerka.cache_manager.register_cache(self.VARIABLES_ENTRY, cache, True, True) + cache = Cache().auto_configure(self.VARIABLES_ENTRY) + self.sheerka.om.register_cache(self.VARIABLES_ENTRY, cache, True, True) + cache.populate(lambda sdp: sdp.list(self.VARIABLES_ENTRY), lambda var: var.get_key()) - for variable in cache.get_all(): - if variable.key in self.bound: - setattr(self.sheerka, self.bound[variable.key], variable.value) + internal_vars = Cache().auto_configure(self.INTERNAL_VARIABLES_ENTRY) + self.sheerka.om.register_cache(self.INTERNAL_VARIABLES_ENTRY, internal_vars, False, False) + + def initialize_deferred(self, context, first_time): + # update bound variables + for who, keys in self.bound_variables.items(): + for key in keys: + if (variable := self.sheerka.om.get(self.VARIABLES_ENTRY, f"{who}|{key}")) is not NotFound: + service = self.sheerka if who == self.sheerka.name else self.sheerka.services[who] + setattr(service, key, variable.value) def record_var(self, context, who, key, value): """ @@ -61,34 +82,49 @@ class SheerkaVariableManager(BaseService): """ variable = Variable(context.event.get_digest(), who, key, value, None) - self.sheerka.cache_manager.put(self.VARIABLES_ENTRY, variable.get_key(), variable) + self.sheerka.om.put(self.VARIABLES_ENTRY, variable.get_key(), variable) # TODO: manage credentials - if key in self.bound: - setattr(self.sheerka, self.bound[key], value) - - return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS)) + if who in self.bound_variables and key in self.bound_variables[who]: + service = self.sheerka if who == self.sheerka.name else self.sheerka.services[who] + setattr(service, key, value) def load_var(self, who, key): - variable = self.sheerka.cache_manager.get(self.VARIABLES_ENTRY, who + "|" + key) - if variable is None: - return None + variable = self.sheerka.om.get(self.VARIABLES_ENTRY, who + "|" + key) + if variable is NotFound: + return NotFound return variable.value + def record_internal_var(self, context, who, key, value): + """ + Stores the value in the internal cache + This cache is not pushed to the remote repository + :param context: + :param who: entity that owns the key (acts as a namespace) + :param key: + :param value: + """ + self.sheerka.om.put(self.INTERNAL_VARIABLES_ENTRY, f"{who}|{key}", InternalObj(value)) + + def load_internal_var(self, who, key): + value = self.sheerka.om.get(self.INTERNAL_VARIABLES_ENTRY, f"{who}|{key}") + return NotFound if value is NotFound else value.obj + def delete_var(self, context, who, key): - self.sheerka.cache_manager.delete(self.VARIABLES_ENTRY, who + "|" + key) + self.sheerka.om.delete(self.VARIABLES_ENTRY, who + "|" + key) def set_var(self, context, key, value): - return self.record_var(context, context.event.user_id, key, value) + self.record_var(context, context.event.user_id, key, value) + return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS)) def get_var(self, context, key): return self.load_var(context.event.user_id, key) def list_vars(self, context, all_vars=False): if all_vars: - res = [str(v) for v in self.sheerka.cache_manager.copy(self.VARIABLES_ENTRY).values()] + res = [str(v) for v in self.sheerka.om.copy(self.VARIABLES_ENTRY).values()] else: - res = [str(v) for v in self.sheerka.cache_manager.copy(self.VARIABLES_ENTRY).values() if + res = [str(v) for v in self.sheerka.om.copy(self.VARIABLES_ENTRY).values() if v.who == context.event.user_id] return res diff --git a/src/core/sheerka/services/sheerka_service.py b/src/core/sheerka/services/sheerka_service.py index 89910d9..e4671e3 100644 --- a/src/core/sheerka/services/sheerka_service.py +++ b/src/core/sheerka/services/sheerka_service.py @@ -1,5 +1,8 @@ from dataclasses import dataclass +from core.global_symbols import NotFound +from core.utils import sheerka_deepcopy + @dataclass class ServiceObj: @@ -21,6 +24,13 @@ class BaseService: """ pass + def store_values(self, context, *args): + """ + Use variable Manager to store the state of the service + """ + for prop_name in args: + self.sheerka.record_var(context, self.NAME, prop_name, sheerka_deepcopy(getattr(self, prop_name))) + def restore_values(self, *args): """ Use Variable Manager to restore the state of a service @@ -28,5 +38,5 @@ class BaseService: :return: """ for prop_name in args: - if (value := self.sheerka.load_var(self.NAME, prop_name)) is not None: + if (value := self.sheerka.load_var(self.NAME, prop_name)) is not NotFound: setattr(self, prop_name, value) diff --git a/src/core/simple_debug.py b/src/core/simple_debug.py index 773c4cf..e2254ed 100644 --- a/src/core/simple_debug.py +++ b/src/core/simple_debug.py @@ -23,13 +23,13 @@ def my_debug(*args, check_started=None): if debug_name not in debug_activated: return - # with open("debug.txt", "a") as f: - # for arg in args: - # if isinstance(arg, list): - # for item in arg: - # f.write(f"{item}\n") - # else: - # f.write(f"{arg}\n") + with open("debug.txt", "a") as f: + for arg in args: + if isinstance(arg, list): + for item in arg: + f.write(f"{item}\n") + else: + f.write(f"{arg}\n") def start_debug(debug_name=default_debug_name, msg=None): diff --git a/src/core/utils.py b/src/core/utils.py index 3162a6f..8c46d20 100644 --- a/src/core/utils.py +++ b/src/core/utils.py @@ -5,8 +5,7 @@ import os import pkgutil from copy import deepcopy -from cache.Cache import Cache -from core.ast_helpers import ast_to_props +from core.global_symbols import CustomType from core.tokenizer import TokenKind, Tokenizer from pyparsing import * @@ -35,8 +34,6 @@ CONSOLE_COLORS_MAP = { PRIMITIVES_TYPES = (str, bool, type(None), int, float, list, dict, set, bytes, tuple, type) -expressions_cache = Cache() - ESC = Literal('\x1b') integer = Word(nums) escapeSeq = Combine(ESC + '[' + Optional(delimitedList(integer, ';')) + @@ -603,59 +600,6 @@ def flatten_all_children(item, get_children): return inner_get_all_children(item) -def evaluate_expression(expr, bag): - """ - Try to evaluate expr in context of bag - :param expr: - :param bag: - :return: - """ - - if expr is None or expr.strip() == "": - return None - - if expr in bag: - return bag[expr] - - props_definitions = expressions_cache.get(expr) - if props_definitions is None: - _ast = ast.parse(expr, mode="eval") - props_definitions = [] - ast_to_props(props_definitions, _ast.body, None) - props_definitions.reverse() - expressions_cache.put(expr, props_definitions) - - return evaluate_object(bag, props_definitions) - - -def evaluate_object(bag, properties): - """ - Evaluate the properties of an object - Works with evaluate_expression - :param bag: - :param properties: List of ast_helpers.PropDef - :return: - """ - for prop in properties: - try: - obj = bag[prop.prop] - except KeyError: - try: - obj = bag["self"][prop.prop] - except Exception: - raise NameError(prop.prop) - - if obj is None: - return None - - if prop.index is not None: - obj = obj[prop.index] - - bag = as_bag(obj) - - return obj - - def get_text_from_tokens(tokens, custom_switcher=None, tracker=None): """ Create the source code, from the list of token @@ -729,7 +673,9 @@ def sheerka_deepcopy(obj): return instance from core.concept import Concept - if isinstance(obj, dict): + if isinstance(obj, CustomType): + return obj + elif isinstance(obj, dict): res = {sheerka_deepcopy(k): sheerka_deepcopy(v) for k, v in obj.items()} return res elif isinstance(obj, list): @@ -759,6 +705,7 @@ class NextIdManager: """ solely return the next integer """ + def __init__(self): self.id = -1 diff --git a/src/evaluators/AddToMemoryEvaluator.py b/src/evaluators/AddToMemoryEvaluator.py index c9b23a0..57513f7 100644 --- a/src/evaluators/AddToMemoryEvaluator.py +++ b/src/evaluators/AddToMemoryEvaluator.py @@ -29,5 +29,5 @@ class AddToMemoryEvaluator(OneReturnValueEvaluator): service.registration.clear() return None - context.sheerka.add_registered_objects(context) + context.sheerka.commit_registered_objects(context) return None # no need to have a second pass diff --git a/src/evaluators/DefConceptEvaluator.py b/src/evaluators/DefConceptEvaluator.py index 16fca9d..3170f4c 100644 --- a/src/evaluators/DefConceptEvaluator.py +++ b/src/evaluators/DefConceptEvaluator.py @@ -137,7 +137,7 @@ class DefConceptEvaluator(OneReturnValueEvaluator): names = [str(t.value) for t in ret_value.tokens if t.type in ( TokenKind.IDENTIFIER, TokenKind.STRING, TokenKind.KEYWORD)] debugger.debug_var("names", names, hint="from NameNode") - return set(filter(lambda x: x in concept_name and context.sheerka.not_is_variable(x), names)) + return set(filter(lambda x: x in concept_name and context.sheerka.is_not_a_variable(x), names)) # # case of BNF @@ -156,7 +156,7 @@ class DefConceptEvaluator(OneReturnValueEvaluator): visitor = UnreferencedVariablesVisitor(context) names = visitor.get_names(python_node.ast_) debugger.debug_var("names", names, hint="from python node") - return set(filter(lambda x: x in concept_name and context.sheerka.not_is_variable(x), names)) + return set(filter(lambda x: x in concept_name and context.sheerka.is_not_a_variable(x), names)) else: return set() diff --git a/src/evaluators/PostExecutionEvaluator.py b/src/evaluators/PostExecutionEvaluator.py index c90cf60..fba29e9 100644 --- a/src/evaluators/PostExecutionEvaluator.py +++ b/src/evaluators/PostExecutionEvaluator.py @@ -1,5 +1,6 @@ from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, NotInit +from core.concept import Concept +from core.global_symbols import NotInit from evaluators.BaseEvaluator import OneReturnValueEvaluator @@ -8,6 +9,7 @@ class PostExecutionEvaluator(OneReturnValueEvaluator): Last chance to alter the return_value This evaluator is supposed to be a generic evaluator for all rules that must be executed just before the aggregations + As of now, the AUTO_EVAL rule implementation is simply hardcoded """ NAME = "PostExecution" @@ -20,12 +22,11 @@ class PostExecutionEvaluator(OneReturnValueEvaluator): if len(evaluation_parents) > 1: return False # It must be executed only when the top level context - # only support the rule for the COMMANDS value = return_value.body return isinstance(value, Concept) and context.sheerka.isa(value, context.sheerka.new(BuiltinConcepts.AUTO_EVAL)) def eval(self, context, return_value): - # only support the rule for the COMMANDS ?? + # only support the rule for the AUTO_EVAL return context.sheerka.ret( self.name, True, diff --git a/src/evaluators/PythonEvaluator.py b/src/evaluators/PythonEvaluator.py index 4b55680..c0f22b7 100644 --- a/src/evaluators/PythonEvaluator.py +++ b/src/evaluators/PythonEvaluator.py @@ -7,7 +7,8 @@ import core.builtin_helpers import core.utils from core.ast_helpers import UnreferencedNamesVisitor, NamesWithAttributesVisitor from core.builtin_concepts import BuiltinConcepts, ParserResultConcept -from core.concept import ConceptParts, Concept, NotInit +from core.concept import ConceptParts, Concept +from core.global_symbols import NotInit, NotFound from core.rule import Rule from core.sheerka.ExecutionContext import ExecutionContext from core.tokenizer import Token, TokenKind @@ -127,12 +128,16 @@ class PythonEvaluator(OneReturnValueEvaluator): for globals_ in all_possible_globals: try: # eval + my_locals = {} if isinstance(node.ast_, ast.Expression): context.log("Evaluating using 'eval'.", self.name) - evaluated = eval(node.get_compiled(), globals_, sheerka.locals) + evaluated = eval(node.get_compiled(), globals_, my_locals) else: context.log("Evaluating using 'exec'.", self.name) - evaluated = self.exec_with_return(node.ast_, globals_, sheerka.locals) + evaluated = self.exec_with_return(node.ast_, globals_, my_locals) + + # TODO find a better implementation using SheerkaMemory + sheerka.locals.update(my_locals) if not expect_success or evaluated: break # in this first version, we stop once a success is found @@ -140,8 +145,8 @@ class PythonEvaluator(OneReturnValueEvaluator): if concepts_entries is None: concepts_entries = self.get_concepts_entries_from_globals(my_globals) eval_error = PythonEvalError(ex, - traceback.format_exc() if get_trace_back else None, - self.get_concepts_values_from_globals(globals_, concepts_entries)) + traceback.format_exc() if get_trace_back else None, + self.get_concepts_values_from_globals(globals_, concepts_entries)) errors.append(eval_error) exception_debugger.debug_var("exception", eval_error.error, is_error=True) exception_debugger.debug_var("trace", eval_error.traceback, is_error=True) @@ -223,8 +228,13 @@ class PythonEvaluator(OneReturnValueEvaluator): my_globals["sheerka"] = Expando(bag) continue + # search in local variables. To remove when local variables will be merged with memory + if name in context.sheerka.locals: + my_globals[name] = context.sheerka.locals[name] + continue + # search in short term memory - if (obj := context.get_from_short_term_memory(name)) is not None: + if (obj := context.get_from_short_term_memory(name)) is not NotFound: context.log(f"Resolving '{name}'. Using value found in STM.", self.name) my_globals[name] = obj continue diff --git a/src/evaluators/ReturnBodyEvaluator.py b/src/evaluators/ReturnBodyEvaluator.py index a0da08e..3b06d2b 100644 --- a/src/evaluators/ReturnBodyEvaluator.py +++ b/src/evaluators/ReturnBodyEvaluator.py @@ -1,5 +1,6 @@ from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, NotInit +from core.concept import Concept +from core.global_symbols import NotInit from evaluators.BaseEvaluator import AllReturnValuesEvaluator diff --git a/src/evaluators/RuleEvaluator.py b/src/evaluators/RuleEvaluator.py index ed661c9..49c09a5 100644 --- a/src/evaluators/RuleEvaluator.py +++ b/src/evaluators/RuleEvaluator.py @@ -1,4 +1,5 @@ from core.builtin_concepts import BuiltinConcepts, ParserResultConcept +from core.global_symbols import NotFound from core.rule import Rule, ACTION_TYPE_DEFERRED from evaluators.BaseEvaluator import OneReturnValueEvaluator @@ -35,7 +36,7 @@ class RuleEvaluator(OneReturnValueEvaluator): # Browse the rules to find possible deferred rules if r.metadata.action_type == ACTION_TYPE_DEFERRED: rule_id = sheerka.get_from_short_term_memory(context, r.id) - rule = sheerka.get_rule_by_id(str(rule_id or r.id)) + rule = sheerka.get_rule_by_id(str(rule_id if rule_id is not NotFound else r.id)) resolved.append(rule) success &= isinstance(rule, Rule) else: diff --git a/src/out/DeveloperVisitor.py b/src/out/DeveloperVisitor.py index b3dadca..152167e 100644 --- a/src/out/DeveloperVisitor.py +++ b/src/out/DeveloperVisitor.py @@ -1,6 +1,7 @@ +from core.builtin_helpers import evaluate_expression from core.sheerka.services.SheerkaRuleManager import FormatAstVariable, FormatAstVariableNotFound, FormatAstColor, \ FormatAstList, FormatAstRawText, FormatAstDict -from core.utils import evaluate_expression, as_bag +from core.utils import as_bag fstring = compile('f"{value:{format}}"', "DeveloperVisitor.fstring", mode="eval") diff --git a/src/parsers/BaseNodeParser.py b/src/parsers/BaseNodeParser.py index 4ce6df4..3306dde 100644 --- a/src/parsers/BaseNodeParser.py +++ b/src/parsers/BaseNodeParser.py @@ -6,6 +6,7 @@ from typing import Set import core.utils from core.builtin_concepts import BuiltinConcepts from core.concept import VARIABLE_PREFIX, Concept, DEFINITION_TYPE_BNF, ConceptParts +from core.global_symbols import NotFound from core.rule import Rule from core.tokenizer import TokenKind, Token from parsers.BaseParser import Node, BaseParser, ParsingError @@ -817,12 +818,6 @@ class BaseNodeParser(BaseParser): def __init__(self, name, priority, **kwargs): super().__init__(name, priority, yield_eof=True) - if 'sheerka' in kwargs: - sheerka = kwargs.get("sheerka") - self.concepts_by_first_keyword = sheerka.resolved_concepts_by_first_keyword - - else: - self.concepts_by_first_keyword = None def init_from_concepts(self, context, concepts, **kwargs): """ @@ -832,8 +827,12 @@ class BaseNodeParser(BaseParser): :param concepts :return: """ - concepts_by_first_keyword = self.get_concepts_by_first_token(context, concepts).body - self.concepts_by_first_keyword = self.resolve_concepts_by_first_keyword(context, concepts_by_first_keyword).body + concepts_by_first_keyword = self.compute_concepts_by_first_token(context, concepts).body + resolved = self.resolve_concepts_by_first_keyword(context, concepts_by_first_keyword).body + + context.sheerka.om.put(context.sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, + False, + resolved) def get_concepts(self, token, to_keep, custom=None, to_map=None, strip_quotes=False): """ @@ -858,24 +857,25 @@ class BaseNodeParser(BaseParser): custom_concepts = custom(name) if custom else [] # to get extra concepts using an alternative method result = [] - if name in self.concepts_by_first_keyword: - for concept_id in self.concepts_by_first_keyword.get(name): + concepts_ids = self.sheerka.om.get(self.sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, name) + if concepts_ids is NotFound: + return custom_concepts if custom else None - concept = self.sheerka.get_by_id(concept_id) + for concept_id in concepts_ids: - if not to_keep(concept): - continue + concept = self.sheerka.get_by_id(concept_id) - concept = to_map(concept, self, self.sheerka) if to_map else concept - result.append(concept) + if not to_keep(concept): + continue - return core.utils.make_unique(result + custom_concepts, - lambda c: c.concept.id if hasattr(c, "concept") else c.id) + concept = to_map(concept, self, self.sheerka) if to_map else concept + result.append(concept) - return custom_concepts if custom else None + return core.utils.make_unique(result + custom_concepts, + lambda c: c.concept.id if hasattr(c, "concept") else c.id) @staticmethod - def get_concepts_by_first_token(context, concepts, use_sheerka=False, previous_entries=None): + def compute_concepts_by_first_token(context, concepts, use_sheerka=False, previous_entries=None): """ Create the map describing the first token expected by a concept :param context: @@ -885,7 +885,7 @@ class BaseNodeParser(BaseParser): :return: """ sheerka = context.sheerka - res = sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) if use_sheerka else (previous_entries or {}) + res = sheerka.om.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) if use_sheerka else (previous_entries or {}) for concept in concepts: keywords = BaseNodeParser.get_first_tokens(sheerka, concept) @@ -966,7 +966,7 @@ class BaseNodeParser(BaseParser): for concept_id in concepts_in_recursion: # make sure we keep the longest chain old = sheerka.chicken_and_eggs.get(concept_id) - if old is None or len(old) < len(ex.concepts): + if old is NotFound or len(old) < len(ex.concepts): sheerka.chicken_and_eggs.put(concept_id, concepts_in_recursion) else: res.setdefault(k, []).extend(v) diff --git a/src/parsers/ShortTermMemoryParser.py b/src/parsers/ShortTermMemoryParser.py index 1bcabc4..cd5615d 100644 --- a/src/parsers/ShortTermMemoryParser.py +++ b/src/parsers/ShortTermMemoryParser.py @@ -1,4 +1,5 @@ from core.builtin_concepts import BuiltinConcepts +from core.global_symbols import NotFound from core.sheerka.services.SheerkaExecute import ParserInput from parsers.BaseParser import BaseParser @@ -34,11 +35,10 @@ class ShortTermMemoryParser(BaseParser): concept_name = parser_input.as_text() concept = sheerka.get_from_short_term_memory(context, concept_name) - if concept: + if concept is NotFound: + body = sheerka.new(BuiltinConcepts.NOT_FOUND, body=concept_name) + return sheerka.ret(self.name, False, body) + else: # Unlike what is usually done, we directly return the concept, not a ParsingResult of the concept # This is to save the evaluation time cost return sheerka.ret(self.name, True, concept) - - else: - body = sheerka.new(BuiltinConcepts.NOT_FOUND, body=concept_name) - return sheerka.ret(self.name, False, body) diff --git a/src/parsers/SyaNodeParser.py b/src/parsers/SyaNodeParser.py index c364e5a..aeb2173 100644 --- a/src/parsers/SyaNodeParser.py +++ b/src/parsers/SyaNodeParser.py @@ -126,15 +126,15 @@ class SyaConceptDef: # first, try to look in the parser # it is where to find the data during the unit tests - if parser and concept.id in parser.sya_definitions: + if parser and concept.id in parser.test_only_sya_definitions: # Manage when precedence and associativity are given in the unit tests - sya_def = parser.sya_definitions.get(concept.id) + sya_def = parser.test_only_sya_definitions.get(concept.id) if sya_def[0] is not None: sya_concept_def.precedence = sya_def[0] if sya_def[1] is not None: sya_concept_def.associativity = sya_def[1] - # otherwise, use sheerka + # otherwise, use sheerka # KSI 20210109 otherwise or override ?? if sheerka: concept_weight = parser.sheerka.get_concepts_weights(BuiltinConcepts.PRECEDENCE, CONCEPT_COMPARISON_CONTEXT) if concept.str_id in concept_weight: @@ -332,7 +332,7 @@ class InFixToPostFix: def _add_debug(self, debug_info: DebugInfo): if debug_info.level is None or (self.enabled_debug_levels and (f"#{self.id}.{debug_info.level}" in self.enabled_debug_levels or - "*" in self.enabled_debug_levels)): + "*" in self.enabled_debug_levels)): self.debug.append(debug_info) def _is_lpar(self, token): @@ -1134,20 +1134,14 @@ class SyaNodeParser(BaseNodeParser): def __init__(self, **kwargs): super().__init__(SyaNodeParser.NAME, 50, **kwargs) - if 'sheerka' in kwargs: - sheerka = kwargs.get("sheerka") - self.sya_definitions = sheerka.resolved_sya_def - - else: - self.concepts_by_first_keyword = {} - self.sya_definitions = {} + self.test_only_sya_definitions = {} def init_from_concepts(self, context, concepts, **kwargs): super().init_from_concepts(context, concepts) sya_definitions = kwargs.get("sya", None) if sya_definitions: - self.sya_definitions = sya_definitions + self.test_only_sya_definitions = sya_definitions @staticmethod def _is_eligible(concept): @@ -1431,10 +1425,3 @@ class SyaNodeParser(BaseNodeParser): result.append(infix_to_postfix) return result - - # @staticmethod - # def init_sheerka(self, sheerka): - # if hasattr(BaseNodeParser, "init_sheerka"): - # BaseNodeParser.init_sheerka(sheerka) - # - # # init syadefinitins diff --git a/src/printer/SheerkaPrinter.py b/src/printer/SheerkaPrinter.py index 2858e97..ac6f869 100644 --- a/src/printer/SheerkaPrinter.py +++ b/src/printer/SheerkaPrinter.py @@ -1,7 +1,8 @@ import types from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, NotInit +from core.concept import Concept +from core.global_symbols import NotInit from printer.FormatInstructions import FormatInstructions, FormatDetailType from printer.Formatter import Formatter diff --git a/src/sdp/readme.md b/src/sdp/readme.md index 5a9469c..eaab097 100644 --- a/src/sdp/readme.md +++ b/src/sdp/readme.md @@ -16,6 +16,7 @@ - O : ServiceObj (from pickle) - M : MemoryObject (using SheerkaPickle) - X : Rule (from sheerkaPickle, 'X' stands for nothing, I am running out of meaningful letters) +- T : CustomType ## How concepts are serialized ? - get the id of the concept diff --git a/src/sdp/sheerkaDataProvider.py b/src/sdp/sheerkaDataProvider.py index b007188..2c19bd2 100644 --- a/src/sdp/sheerkaDataProvider.py +++ b/src/sdp/sheerkaDataProvider.py @@ -1,10 +1,13 @@ import hashlib import json +import shutil import time from dataclasses import dataclass from datetime import datetime, date from threading import RLock +from os import path +from core.global_symbols import NotFound from core.sheerka_logger import get_logger from sdp.sheerkaDataProviderIO import SheerkaDataProviderIO from sdp.sheerkaSerializer import Serializer, SerializerContext @@ -71,6 +74,22 @@ class Event(object): self.parents = as_dict["parents"] self._digest = as_dict["_digest"] # freeze the digest + def __eq__(self, other): + if id(self) == id(other): + return True + + if isinstance(other, Event): + return (self.version == other.version and + self.user_id == other.user_id and + self.date == other.date and + self.message == other.message and + self.parents == other.parents) + + return False + + def __hash__(self): + return hash(self.get_digest()) + class State: """ @@ -140,10 +159,10 @@ class SheerkaDataProviderTransaction: else: items = self.sdp.REF_PREFIX + self.sdp.save_obj(items) - if key: - self.state.data[entry][key] = items - else: + if key is None: self.state.data[entry] = items + else: + self.state.data[entry][key] = items def remove(self, entry, key): """ @@ -189,12 +208,14 @@ class SheerkaDataProvider: StateFolder = "state" ObjectsFolder = "objects" CacheFolder = "cache" + RefFolder = "refs" HeadFile = "HEAD" LastEventFile = "LAST_EVENT" KeysFile = "keys" + OntologiesFiles = "ontologies" REF_PREFIX = "##REF##:" - def __init__(self, root=None, sheerka=None): + def __init__(self, root=None, sheerka=None, name="__default__"): self.log = get_logger(__name__) self.init_log = get_logger("init." + __name__) self.init_log.debug("Initializing sdp.") @@ -202,6 +223,7 @@ class SheerkaDataProvider: self.sheerka = sheerka self.io = SheerkaDataProviderIO.get(root) self.first_time = self.io.first_time + self.name = name self.serializer = Serializer() self.lock = RLock() @@ -218,10 +240,10 @@ class SheerkaDataProvider: stream.seek(0) return sha256_hash.hexdigest() - def get_transaction(self, event): + def get_transaction(self, event) -> SheerkaDataProviderTransaction: return SheerkaDataProviderTransaction(self, event) - def get(self, entry, key=None, default=None, load_origin=True): + def get(self, entry, key=None, default=NotFound, load_origin=True): """ Get an element :param entry: @@ -307,7 +329,7 @@ class SheerkaDataProvider: :param event: :return: digest of the event """ - parent = self.get_snapshot(SheerkaDataProvider.LastEventFile) + parent = self.get_last_event() event.parents = [parent] if parent else None digest = event.get_digest() # must be call after setting the parents @@ -316,7 +338,7 @@ class SheerkaDataProvider: return digest self.io.write_binary(target_path, self.serializer.serialize(event, None).read()) - self.set_snapshot(SheerkaDataProvider.LastEventFile, digest) + self.set_last_event(digest) return digest @@ -326,7 +348,7 @@ class SheerkaDataProvider: :param digest: :return: """ - digest = digest or self.get_snapshot(SheerkaDataProvider.LastEventFile) + digest = digest or self.get_last_event() if digest is None: return None @@ -339,7 +361,7 @@ class SheerkaDataProvider: """ Load multiple events in the same command :param start: - :param page_size: + :param page_size: = -1 to load everything :return: """ @@ -365,16 +387,26 @@ class SheerkaDataProvider: digest = event.parents[0] count += 1 + def get_last_event(self): + last_event_file = self.io.path_join(self.LastEventFile) + if not self.io.exists(last_event_file): + return None + return self.io.read_text(last_event_file) + + def set_last_event(self, digest): + last_event_file = self.io.path_join(self.LastEventFile) + return self.io.write_text(last_event_file, digest) + + def set_snapshot(self, file, digest): + head_file = self.io.path_join(self.RefFolder, self.name, file) + return self.io.write_text(head_file, digest) + def get_snapshot(self, file): - head_file = self.io.path_join(file) + head_file = self.io.path_join(self.RefFolder, self.name, file) if not self.io.exists(head_file): return None return self.io.read_text(head_file) - def set_snapshot(self, file, digest): - head_file = self.io.path_join(file) - return self.io.write_text(head_file, digest) - def load_state(self, digest): if digest is None: return State() @@ -516,3 +548,26 @@ class SheerkaDataProvider: elif not isinstance(obj, str): setattr(obj, Serializer.ORIGIN, digest) return obj + + def save_ontologies(self, ontologies_names): + """ + Keep track of the sequence of ontologies + This is a quick and dirty ontology management + I would like the ontologies to have a digest and to know what is their parent + """ + ontology_file = self.io.path_join(SheerkaDataProvider.OntologiesFiles) + text = "\n".join(ontologies_names) + self.io.write_text(ontology_file, text) + + def load_ontologies(self): + ontology_file = self.io.path_join(SheerkaDataProvider.OntologiesFiles) + if not self.io.exists(ontology_file): + return [] + + text = self.io.read_text(ontology_file) + return text.split("\n") + + def test_only_destroy_refs(self): + current_sdp_refs_folder = self.io.path_join(self.RefFolder, self.name) + if path.exists(current_sdp_refs_folder): + shutil.rmtree(current_sdp_refs_folder) diff --git a/src/sdp/sheerkaSerializer.py b/src/sdp/sheerkaSerializer.py index 9804ae0..407bf7f 100644 --- a/src/sdp/sheerkaSerializer.py +++ b/src/sdp/sheerkaSerializer.py @@ -8,6 +8,7 @@ from enum import Enum import sheerkapickle from core.concept import Concept +from core.global_symbols import CustomType, NotInit, NotFound, Removed from core.rule import Rule from core.sheerka_logger import get_logger from core.utils import get_full_qualified_name, get_class @@ -64,6 +65,7 @@ class Serializer: self.register(MemoryObjectSerializer()) # before ServiceObjSerializer self.register(ServiceObjSerializer()) self.register(RuleSerializer()) + self.register(CustomTypeSerializer()) def register(self, serializer): """ @@ -305,3 +307,26 @@ class MemoryObjectSerializer(SheerkaPickleSerializer): class RuleSerializer(SheerkaPickleSerializer): def __init__(self): super().__init__(lambda obj: isinstance(obj, Rule), "X", 1) + + +class CustomTypeSerializer(BaseSerializer): + def __init__(self): + BaseSerializer.__init__(self, "T", 1) + + def matches(self, obj): + return isinstance(obj, CustomType) + + def dump(self, stream, obj, context): + stream.write(obj.value.encode("utf-8")) + stream.seek(0) + return stream + + def load(self, stream, context): + value = stream.read().decode("utf-8") + if value == NotInit.value: + return NotInit + elif value == NotFound.value: + return NotFound + elif value == Removed.value: + return Removed + raise NotImplemented(value) diff --git a/src/sheerkapickle/SheerkaPickler.py b/src/sheerkapickle/SheerkaPickler.py index 804ea63..b0de06e 100644 --- a/src/sheerkapickle/SheerkaPickler.py +++ b/src/sheerkapickle/SheerkaPickler.py @@ -2,16 +2,15 @@ import json from logging import Logger import core.utils -from core.concept import Concept, NotInitialized +from core.concept import Concept from core.sheerka.services.SheerkaExecute import ParserInput -from core.simple_debug import my_debug from sheerkapickle import utils, tags, handlers def encode(sheerka, obj): pickler = SheerkaPickler(sheerka) flatten = pickler.flatten(obj) - my_debug(f"{obj} ids={len(pickler.ids)}, objs={len(pickler.objs)}") + # my_debug(f"{obj} ids={len(pickler.ids)}, objs={len(pickler.objs)}") return json.dumps(flatten) @@ -38,9 +37,10 @@ class SheerkaPickler: self.to_reduce.append(ToReduce(lambda o: isinstance(o, Logger), lambda o: None)) from parsers.BaseParser import BaseParser from evaluators.BaseEvaluator import BaseEvaluator + from core.sheerka.SheerkaOntologyManager import Ontology self.to_reduce.append(ToReduce(lambda o: isinstance(o, (BaseParser, BaseEvaluator)), lambda o: o.name)) self.to_reduce.append(ToReduce(lambda o: isinstance(o, ParserInput), lambda o: o.as_text())) - self.to_reduce.append(ToReduce(lambda o: isinstance(o, NotInitialized), lambda o: None)) + self.to_reduce.append(ToReduce(lambda o: isinstance(o, Ontology), lambda o: o.name)) def flatten(self, obj): if utils.is_to_discard(obj): @@ -49,6 +49,9 @@ class SheerkaPickler: if utils.is_primitive(obj): return obj + if utils.is_custom_type(obj): + return self._flatten_custom_type(obj) + if utils.is_type(obj): return str(obj) @@ -133,6 +136,18 @@ class SheerkaPickler: return data + def _flatten_custom_type(self, obj): + # check if the object was already seen + exists, _id = self.exist(obj) + if exists: + return {tags.ID: _id} + else: + self.id_count = self.id_count + 1 + self.ids[id(obj)] = self.id_count + self.objs.append(obj) + + return {tags.CUSTOM: obj.value} + def exist(self, obj): try: v = self.ids[id(obj)] diff --git a/src/sheerkapickle/SheerkaUnpickler.py b/src/sheerkapickle/SheerkaUnpickler.py index ac3661e..cb150f4 100644 --- a/src/sheerkapickle/SheerkaUnpickler.py +++ b/src/sheerkapickle/SheerkaUnpickler.py @@ -1,6 +1,7 @@ import json import core.utils +from core.global_symbols import NotInit, NotFound, Removed from sheerkapickle import tags, utils, handlers @@ -20,6 +21,9 @@ class SheerkaUnpickler: if has_tag(obj, tags.TUPLE): return self._restore_tuple(obj) + if has_tag(obj, tags.CUSTOM): + return self._restore_custom(obj) + if has_tag(obj, tags.SET): return self._restore_set(obj) @@ -43,6 +47,19 @@ class SheerkaUnpickler: def _restore_tuple(self, obj): return tuple([self.restore(v) for v in obj[tags.TUPLE]]) + def _restore_custom(self, obj): + if obj[tags.CUSTOM] == NotInit.value: + instance = NotInit + elif obj[tags.CUSTOM] == NotFound.value: + instance = NotFound + elif obj[tags.CUSTOM] == Removed.value: + instance = Removed + else: + raise KeyError(f"unknown {obj[tags.CUSTOM]}") + + self.objs.append(instance) + return instance + def _restore_set(self, obj): return set([self.restore(v) for v in obj[tags.SET]]) diff --git a/src/sheerkapickle/sheerka_handlers.py b/src/sheerkapickle/sheerka_handlers.py index 3289df7..b09fc43 100644 --- a/src/sheerkapickle/sheerka_handlers.py +++ b/src/sheerkapickle/sheerka_handlers.py @@ -1,7 +1,7 @@ import core.utils from core.builtin_concepts import UserInputConcept, ReturnValueConcept, BuiltinConcepts -from core.concept import Concept, PROPERTIES_TO_SERIALIZE as CONCEPT_PROPERTIES_TO_SERIALIZE, ConceptParts, NotInit, \ - get_concept_attrs +from core.concept import Concept, PROPERTIES_TO_SERIALIZE as CONCEPT_PROPERTIES_TO_SERIALIZE +from core.global_symbols import NotInit from core.rule import Rule from core.sheerka.ExecutionContext import ExecutionContext, PROPERTIES_TO_SERIALIZE as CONTEXT_PROPERTIES_TO_SERIALIZE from core.sheerka.Sheerka import Sheerka @@ -221,4 +221,3 @@ def initialize_pickle_handlers(): registry.register(ExecutionContext, ExecutionContextHandler, True) registry.register(Rule, RuleContextHandler, True) registry.register(PythonNode, PythonNodeHandler, True) - diff --git a/src/sheerkapickle/tags.py b/src/sheerkapickle/tags.py index 59e0976..afd51b1 100644 --- a/src/sheerkapickle/tags.py +++ b/src/sheerkapickle/tags.py @@ -3,3 +3,4 @@ TUPLE = "_sheerka/tuple" SET = "_sheerka/set" OBJECT = "_sheerka/obj" ENUM = "_sheerka/enum" +CUSTOM = "_sheerka/custom" diff --git a/src/sheerkapickle/utils.py b/src/sheerkapickle/utils.py index 6e989bf..36dd66d 100644 --- a/src/sheerkapickle/utils.py +++ b/src/sheerkapickle/utils.py @@ -2,6 +2,8 @@ import base64 import types from enum import Enum +from core.global_symbols import CustomType + class_types = (type,) PRIMITIVES = (str, bool, type(None), int, float) @@ -17,6 +19,10 @@ def is_enum(obj): return isinstance(obj, Enum) +def is_custom_type(obj): + return isinstance(obj, CustomType) + + def is_object(obj): """Returns True is obj is a reference to an object instance.""" @@ -36,7 +42,7 @@ def is_primitive(obj): def is_dictionary(obj): - return isinstance(obj, dict) + return isinstance(obj, dict) def is_list(obj): diff --git a/tests/BaseTest.py b/tests/BaseTest.py index 8b99308..254b729 100644 --- a/tests/BaseTest.py +++ b/tests/BaseTest.py @@ -1,17 +1,101 @@ import ast +from dataclasses import dataclass, field from core.builtin_concepts import ReturnValueConcept, ParserResultConcept, BuiltinConcepts -from core.concept import Concept, DEFINITION_TYPE_BNF, DEFINITION_TYPE_DEF -from core.rule import Rule +from core.concept import Concept, DEFINITION_TYPE_BNF, DEFINITION_TYPE_DEF, freeze_concept_attrs +from core.rule import Rule, ACTION_TYPE_PRINT from core.sheerka.ExecutionContext import ExecutionContext +from core.sheerka.Sheerka import Sheerka from core.sheerka.services.SheerkaRuleManager import SheerkaRuleManager from parsers.BnfDefinitionParser import BnfDefinitionParser from parsers.BnfNodeParser import StrMatch from sdp.sheerkaDataProvider import Event +@dataclass +class InitTestHelper: + sheerka: Sheerka + context: ExecutionContext + items: list = field(default_factory=list) + + def push(self, *items): + self.items.extend(items) + + def unpack(self): + return self.sheerka, self.context, *self.items + + def with_concepts(self, *concepts, **kwargs): + create_new = kwargs.get("create_new", False) + + for c in concepts: + if isinstance(c, str): + c = Concept(c) + + if c.get_metadata().definition and c.get_metadata().definition_type != DEFINITION_TYPE_DEF: + desc = f"Resolving BNF {c.get_metadata().definition}" + with self.context.push(BuiltinConcepts.INIT_BNF, + c, + obj=c, + desc=desc) as sub_context: + + bnf_parser = BnfDefinitionParser() + res = bnf_parser.parse(sub_context, c.get_metadata().definition) + if res.status: + c.set_bnf(res.value.value) + c.get_metadata().definition_type = DEFINITION_TYPE_BNF + else: + raise Exception(f"Error in bnf definition '{c.get_metadata().definition}'", + self.sheerka.get_error(res)) + + if create_new: + self.sheerka.create_new_concept(self.context, c) + else: + c.init_key() + self.sheerka.set_id_if_needed(c, False) + self.sheerka.test_only_add_in_cache(c) + freeze_concept_attrs(c) + + self.items.append(c) + + return self + + def with_rules(self, *rules, **kwargs): + create_new = kwargs.get("create_new", True) + compile_rule = kwargs.get("compile_rule", True) + + for rule_template in rules: + if isinstance(rule_template, tuple): + if len(rule_template) == 3: + rule = Rule(ACTION_TYPE_PRINT, rule_template[0], rule_template[1], rule_template[2]) + else: + rule = Rule(ACTION_TYPE_PRINT, None, rule_template[0], rule_template[1]) + else: + rule = rule_template + + is_enabled = rule.metadata.is_enabled # remember the value... + + if compile_rule: + self.sheerka.services[SheerkaRuleManager.NAME].init_rule(self.context, rule) + else: + rule.metadata.is_compiled = True + + if create_new: + res = self.sheerka.create_new_rule(self.context, rule) + if not res.status: + raise Exception(f"Error in rule definition '{res.body}'", + self.sheerka.get_error(res)) + self.items.append(res.body.body) + else: + self.items.append(rule) + + if is_enabled is not None: # ...and back the value if it was not None + rule.metadata.is_enabled = is_enabled + + return self + + class BaseTest: - def get_sheerka(self, **kwargs): + def get_sheerka(self, **kwargs) -> Sheerka: pass def get_context(self, sheerka=None, eval_body=False, eval_where=False): @@ -23,6 +107,20 @@ class BaseTest: return context + @staticmethod + def get_init_test_args(**kwargs): + return {k: v for k, v in kwargs.items() if k in ["cache_only", "ontology", "eval_body", "eval_where"]} + + @staticmethod + def get_with_concepts_args(**kwargs): + return {k: v for k, v in kwargs.items() if k in ["create_new"]} + + def init_test(self, cache_only=None, ontology=None, eval_body=False, eval_where=False): + sheerka = self.get_sheerka(cache_only=cache_only, ontology=ontology) + context = self.get_context(sheerka=sheerka, eval_body=eval_body, eval_where=eval_where) + + return InitTestHelper(sheerka, context) + def get_default_concept(self): concept = Concept( name="a + b", @@ -47,77 +145,12 @@ class BaseTest: return [t.repr_value for t in tokens] def init_concepts(self, *concepts, **kwargs): - sheerka = self.get_sheerka(**kwargs) + init_test_args = self.get_init_test_args(**kwargs) + with_concepts_args = self.get_with_concepts_args(**kwargs) + return self.init_test(**init_test_args).with_concepts(*concepts, **with_concepts_args).unpack() - context_args = dict([(k, v) for k, v in kwargs.items() if k in ["sheerka", "eval_body", "eval_where"]]) - context = self.get_context(sheerka, **context_args) - create_new = kwargs.get("create_new", None) - - result = [] - for c in concepts: - if isinstance(c, str): - c = Concept(c) - - if c.get_metadata().definition and c.get_metadata().definition_type != DEFINITION_TYPE_DEF: - desc = f"Resolving BNF {c.get_metadata().definition}" - with context.push(BuiltinConcepts.INIT_BNF, - c, - obj=c, - desc=desc) as sub_context: - - bnf_parser = BnfDefinitionParser() - res = bnf_parser.parse(sub_context, c.get_metadata().definition) - if res.status: - c.set_bnf(res.value.value) - c.get_metadata().definition_type = DEFINITION_TYPE_BNF - else: - raise Exception(f"Error in bnf definition '{c.get_metadata().definition}'", - sheerka.get_error(res)) - - if create_new: - sheerka.create_new_concept(context, c) - else: - c.init_key() - sheerka.set_id_if_needed(c, False) - sheerka.test_only_add_in_cache(c) - - result.append(c) - - return sheerka, context, *result - - def init_format_rules(self, *rules, create_new=True, compile_rule=True, concepts=None, **kwargs): - if concepts: - sheerka, context, *concepts = self.init_concepts(*concepts, **kwargs) - else: - sheerka, context, *concepts = self.init_concepts(**kwargs) - - if create_new: - sheerka.cache_manager.caches[SheerkaRuleManager.FORMAT_RULE_ENTRY].cache.clear() - sheerka.cache_manager.delete(sheerka.OBJECTS_IDS_ENTRY, SheerkaRuleManager.RULE_IDS) - with sheerka.sdp.get_transaction(context.event.get_digest()) as transaction: - transaction.clear(SheerkaRuleManager.FORMAT_RULE_ENTRY) - - initialized = [] - for rule_blue_print in rules: - if isinstance(rule_blue_print, tuple): - rule = Rule("print", None, rule_blue_print[0], rule_blue_print[1]) - if not compile_rule: - rule.metadata.is_compiled = True - else: - rule = rule_blue_print - - is_enabled = rule.metadata.is_enabled - sheerka.services[SheerkaRuleManager.NAME].init_rule(context, rule) - if create_new: - res = sheerka.create_new_rule(context, rule) - initialized.append(res.body.body) - else: - initialized.append(rule) - - if is_enabled is not None: - rule.metadata.is_enabled = is_enabled - - return sheerka, context, *initialized + def init_format_rules(self, *rules, **kwargs): + return self.init_test(**kwargs).with_rules(*rules, **kwargs).unpack() @staticmethod def get_concept_instance(sheerka, concept, **kwargs): @@ -179,6 +212,7 @@ class BaseTest: concept.init_key() sheerka.set_id_if_needed(concept, False) sheerka.test_only_add_in_cache(concept) + freeze_concept_attrs(concept) return concept @staticmethod diff --git a/tests/TestUsingFileBasedSheerka.py b/tests/TestUsingFileBasedSheerka.py index 8058d86..e7c514b 100644 --- a/tests/TestUsingFileBasedSheerka.py +++ b/tests/TestUsingFileBasedSheerka.py @@ -1,43 +1,37 @@ -import os -import shutil -from os import path - -import pytest -from core.concept import ALL_ATTRIBUTES +from conftest import SHEERKA_TEST_FOLDER from core.sheerka.Sheerka import Sheerka +from core.sheerka.SheerkaOntologyManager import SheerkaOntologyManager from tests.BaseTest import BaseTest class TestUsingFileBasedSheerka(BaseTest): - tests_root = path.abspath("../../build/tests") - root_folder = "init_folder" + sheerka = None + context = None + root_ontology_name = SheerkaOntologyManager.ROOT_ONTOLOGY_NAME - @pytest.fixture(autouse=True) - def init_test(self): - if path.exists(self.tests_root): - shutil.rmtree(self.tests_root) - - if not path.exists(self.tests_root): - os.makedirs(self.tests_root) - current_pwd = os.getcwd() - os.chdir(self.tests_root) - - yield None - - os.chdir(current_pwd) - - def get_sheerka(self, **kwargs): - reset_attrs = kwargs.get("reset_attrs", True) - if reset_attrs: - ALL_ATTRIBUTES.clear() - - use_dict = kwargs.get("use_dict", False) - # use dictionary based io instead of file - # If you do so, information between two different instances of sheerka - # won't be shared - root = "mem://" if use_dict else self.root_folder - sheerka = Sheerka() - sheerka.initialize(root, save_execution_context=False, enable_process_return_values=False) + def teardown_method(self, method): + # to do after the test + if TestUsingFileBasedSheerka.sheerka: + while TestUsingFileBasedSheerka.sheerka.om.current_ontology().name != self.root_ontology_name: + ontology = TestUsingFileBasedSheerka.sheerka.pop_ontology().body.body + ontology.cache_manager.sdp.test_only_destroy_refs() + @staticmethod + def new_sheerka_instance(cache_only): + sheerka = Sheerka(cache_only=cache_only) + sheerka.initialize(SHEERKA_TEST_FOLDER, + save_execution_context=False, + enable_process_return_values=False) return sheerka + + def get_sheerka(self, **kwargs) -> Sheerka: + cache_only = kwargs.get("cache_only", False) + ontology_name = kwargs.get("ontology", "#unit_test#") or "#unit_test#" + + if TestUsingFileBasedSheerka.sheerka is None: + TestUsingFileBasedSheerka.sheerka = self.new_sheerka_instance(False) + TestUsingFileBasedSheerka.context = self.get_context(TestUsingFileBasedSheerka.sheerka) + + self.sheerka.push_ontology(self.context, ontology_name, cache_only=cache_only) + return TestUsingFileBasedSheerka.sheerka diff --git a/tests/TestUsingMemoryBasedSheerka.py b/tests/TestUsingMemoryBasedSheerka.py index 5c42c86..f000f69 100644 --- a/tests/TestUsingMemoryBasedSheerka.py +++ b/tests/TestUsingMemoryBasedSheerka.py @@ -1,42 +1,33 @@ -from core.concept import ALL_ATTRIBUTES from core.sheerka.Sheerka import Sheerka +from core.sheerka.SheerkaOntologyManager import SheerkaOntologyManager from tests.BaseTest import BaseTest class TestUsingMemoryBasedSheerka(BaseTest): - singleton_instance = None - dump = None + sheerka = None + context = None + root_ontology_name = SheerkaOntologyManager.ROOT_ONTOLOGY_NAME + + def teardown_method(self, method): + # to do after the test + if TestUsingMemoryBasedSheerka.sheerka: + while TestUsingMemoryBasedSheerka.sheerka.om.current_ontology().name != self.root_ontology_name: + TestUsingMemoryBasedSheerka.sheerka.pop_ontology() @staticmethod - def _inner_get_sheerka(cache_only): - ALL_ATTRIBUTES.clear() + def new_sheerka_instance(cache_only): sheerka = Sheerka(cache_only=cache_only) sheerka.initialize("mem://", save_execution_context=False, enable_process_return_values=False) return sheerka - def get_sheerka(self, **kwargs): + def get_sheerka(self, **kwargs) -> Sheerka: cache_only = kwargs.get("cache_only", True) - use_singleton = kwargs.get("singleton", True) - reset_attrs = kwargs.get("reset_attrs", True) + ontology_name = kwargs.get("ontology", "#unit_test#") or "#unit_test#" - sheerka = kwargs.get("sheerka", None) - if sheerka: - return sheerka + if TestUsingMemoryBasedSheerka.sheerka is None: + TestUsingMemoryBasedSheerka.sheerka = self.new_sheerka_instance(False) + TestUsingMemoryBasedSheerka.context = self.get_context(TestUsingMemoryBasedSheerka.sheerka) - if reset_attrs: - ALL_ATTRIBUTES.clear() - - if use_singleton: - singleton_instance = TestUsingMemoryBasedSheerka.singleton_instance - if singleton_instance: - singleton_instance.reset(cache_only) - singleton_instance.cache_manager.init_from_dump(TestUsingMemoryBasedSheerka.dump) - return singleton_instance - else: - new_instance = self._inner_get_sheerka(cache_only) - TestUsingMemoryBasedSheerka.dump = new_instance.cache_manager.dump() - TestUsingMemoryBasedSheerka.singleton_instance = new_instance - return TestUsingMemoryBasedSheerka.singleton_instance - - return self._inner_get_sheerka(cache_only) + self.sheerka.push_ontology(self.context, ontology_name, cache_only=cache_only) + return self.sheerka diff --git a/tests/cache/__init__.py b/tests/cache/__init__.py index e69de29..8aba1e2 100644 --- a/tests/cache/__init__.py +++ b/tests/cache/__init__.py @@ -0,0 +1,18 @@ +class FakeSdp: + def __init__(self, /, get_value=None, extend_exists=None, get_alt_value=None, populate=None): + self.get_value = get_value + self.extend_exists = extend_exists + self.populate_function = populate + self.get_alt_value = get_alt_value + + def get(self, cache_name, key): + return self.get_value(cache_name, key) + + def exists(self, cache_name, key): + return self.extend_exists(cache_name, key) + + def alt_get(self, cache_name, key): + return self.get_alt_value(cache_name, key) + + def populate(self): + return self.populate_function() if callable(self.populate_function) else self.populate_function diff --git a/tests/cache/test_CacheManager.py b/tests/cache/test_CacheManager.py new file mode 100644 index 0000000..9da9a8f --- /dev/null +++ b/tests/cache/test_CacheManager.py @@ -0,0 +1,224 @@ +import pytest +from cache.Cache import Cache +from cache.CacheManager import CacheManager, ConceptNotFound +from cache.DictionaryCache import DictionaryCache +from cache.ListCache import ListCache +from cache.ListIfNeededCache import ListIfNeededCache +from core.concept import Concept +from core.global_symbols import NotFound + +from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka +from tests.cache import FakeSdp + + +class TestCacheManager(TestUsingMemoryBasedSheerka): + def test_i_do_not_push_into_sdp_when_cache_only_is_true(self): + sheerka, context = self.init_test().unpack() + + cache_manager = CacheManager(True) + cache_manager.register_cache("test", Cache(), persist=True) + cache_manager.put("test", "key", "value") + + cache_manager.commit(context) + assert not sheerka.om.current_cache_manager().sdp.exists("test", "key") + + def test_i_do_not_get_value_from_sdp_when_cache_only_is_true(self): + sheerka, context = self.init_test().unpack() + + sdp = sheerka.om.get_sdp() + with sdp.get_transaction(context.event) as transaction: + transaction.add("test", "key", "value") + + cache = Cache(default=lambda k: sdp.get("test", k)) + + cache_manager = CacheManager(True) + cache_manager.register_cache("test", cache, persist=True) + + assert cache_manager.get("test", "key") is NotFound + + def test_i_do_not_get_value_from_inner_sdp_when_cache_only_is_true(self): + sheerka, context = self.init_test().unpack() + + cache_manager = CacheManager(True, sdp=sheerka.om.get_sdp("test")) + cache = Cache(default=lambda _sdp, k: _sdp.get("test", k)) + cache_manager.register_cache("test", cache, persist=True) + + with cache_manager.sdp.get_transaction(context.event) as transaction: + transaction.add("test", "key", "value") + + assert cache_manager.get("test", "key") is NotFound + + def test_i_can_get_value_from_sdp_when_cache_only_is_false(self): + sheerka, context = self.init_test().unpack() + + sdp = sheerka.om.get_sdp() + with sdp.get_transaction(context.event) as transaction: + transaction.add("test", "key", "value") + + cache_manager = CacheManager(False) + cache = Cache(default=lambda k: sdp.get("test", k)) + cache_manager.register_cache("test", cache, persist=True) + + assert cache_manager.get("test", "key") == "value" + + def test_i_can_get_value_from_inner_sdp_when_cache_only_is_false(self): + sheerka, context = self.init_test().unpack() + + cache_manager = CacheManager(False, sdp=sheerka.om.get_sdp("test")) + cache = Cache(default=lambda _sdp, k: _sdp.get("test", k)) + cache_manager.register_cache("test", cache, persist=True) + + with cache_manager.sdp.get_transaction(context.event) as transaction: + transaction.add("test", "key", "value") + + assert cache_manager.get("test", "key") == "value" + + def test_i_can_get_value_from_alt_sdp_when_cache_only_is_true(self): + sheerka, context = self.init_test().unpack() + + cache_manager = CacheManager(cache_only=True, sdp=sheerka.om.get_sdp("test")) + cache_manager.register_cache("test", Cache().auto_configure("test"), persist=True) + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value found !") + assert cache_manager.get("test", "key", alt_sdp=alt_sdp) is "value found !" + + def test_i_can_commit_simple_cache(self): + sheerka, context = self.init_test().unpack() + + cache_manager = CacheManager(False, sheerka.om.get_sdp("test")) + cache_manager.register_cache("test", Cache(), persist=True) + cache = cache_manager.caches["test"].cache + + cache_manager.put("test", "key", "value") + + cache_manager.commit(context) + assert cache_manager.sdp.get("test", "key") == "value" + + cache.update("key", "value", "key", "another_value") + cache_manager.commit(context) + assert cache_manager.sdp.get("test", "key") == "another_value" + + cache.update("key", "another_value", "key2", "another_value") + cache_manager.commit(context) + assert cache_manager.sdp.get("test", "key") is NotFound + assert cache_manager.sdp.get("test", "key2") == "another_value" + + # sanity check + # sdp 'test' has value, but sdp '__default__' does not + assert cache_manager.sdp.name == "test" + assert cache_manager.sdp.state.data == {'test': {'key2': 'another_value'}} + + default_sdp = sheerka.om.ontologies[-1].cache_manager.sdp + assert default_sdp.name == "__default__" + assert "test" not in default_sdp.state.data + + def test_i_can_commit_list_cache(self): + sheerka, context = self.init_test().unpack() + + cache_manager = CacheManager(False, sheerka.om.get_sdp("test")) + cache_manager.register_cache("test", ListCache(), persist=True) + cache = cache_manager.caches["test"].cache + + cache.put("key", "value") + cache_manager.commit(context) + assert cache_manager.sdp.get("test", "key") == ["value"] + + cache.put("key", "value2") + cache_manager.commit(context) + assert cache_manager.sdp.get("test", "key") == ["value", "value2"] + + cache.update("key", "value2", "key2", "value2") + cache_manager.commit(context) + assert cache_manager.sdp.get("test", "key") == ["value"] + assert cache_manager.sdp.get("test", "key2") == ["value2"] + + cache.update("key2", "value2", "key3", "value2") + cache_manager.commit(context) + assert cache_manager.sdp.get("test", "key") == ["value"] + assert cache_manager.sdp.get("test", "key2") is NotFound + assert cache_manager.sdp.get("test", "key3") == ["value2"] + + def test_i_can_commit_dictionary_cache(self): + sheerka, context = self.init_test().unpack() + + cache_manager = CacheManager(False, sheerka.om.get_sdp("test")) + cache_manager.register_cache("test", DictionaryCache(), persist=True) + cache = cache_manager.caches["test"].cache + + cache.put(False, {"key": "value", "key2": "value2"}) + cache_manager.commit(context) + assert cache_manager.sdp.get("test") == {"key": "value", "key2": "value2"} + assert cache_manager.sdp.get("test", "key") == "value" + + cache.put(False, {"key": "value", "key2": "value2", "key3": "value3"}) + cache_manager.commit(context) + assert cache_manager.sdp.get("test") == {"key": "value", "key2": "value2", "key3": "value3"} + + def test_i_can_get_value_from_sdp_when_dictionary_cache(self): + sheerka, context = self.init_test().unpack() + + cache_manager = CacheManager(False, sheerka.om.get_sdp("cache_name")) + cache_manager.register_cache("cache_name", DictionaryCache().auto_configure("cache_name"), persist=True) + + with cache_manager.sdp.get_transaction(context.event) as transaction: + transaction.add("cache_name", None, {"key1": "value1", "key2": "value2"}) + + assert cache_manager.get("cache_name", "key3") is NotFound + + # make sure that the first call retrieves the whole remote repository + assert cache_manager.caches["cache_name"].cache.copy() == {"key1": "value1", "key2": "value2"} + + assert cache_manager.get("cache_name", "key1") == "value1" + assert cache_manager.get("cache_name", "key2") == "value2" + + def test_i_can_remove_a_concept_from_concepts_caches(self): + cache_manager = CacheManager(True) + cache_manager.register_concept_cache("id", Cache(), lambda c: c.id, True) + cache_manager.register_concept_cache("key", ListIfNeededCache(), lambda c: c.key, True) + + sheerka, context, one, two, three, two_bis = self.init_concepts("one", "two", "three", Concept("two", body="2")) + + for concept in [one, two, three, two_bis]: + cache_manager.add_concept(concept) + + # sanity check before removing + cache_def = cache_manager.caches["id"] + assert cache_def.cache.copy() == {one.id: one, two.id: two, three.id: three, two_bis.id: two_bis} + cache_def = cache_manager.caches["key"] + assert cache_def.cache.copy() == {one.key: one, two.key: [two, two_bis], three.key: three} + + for cache_name in cache_manager.concept_caches: + cache_manager.caches[cache_name].cache.reset_events() + + cache_manager.remove_concept(sheerka.new(("two", two_bis.id))) + + cache_def = cache_manager.caches["id"] + assert cache_def.cache.copy() == {one.id: one, two.id: two, three.id: three} + assert cache_def.cache.to_remove == {two_bis.id} + assert cache_def.cache.to_add == set() + assert len(cache_def.cache) == 3 + + cache_def = cache_manager.caches["key"] + assert cache_def.cache.copy() == {one.key: one, two.key: two, three.key: three} + assert cache_def.cache.to_remove == set() + assert cache_def.cache.to_add == {"two"} + assert len(cache_def.cache) == 3 + + def test_i_cannot_remove_a_concept_that_does_not_exists(self): + cache_manager = CacheManager(True) + cache_manager.register_concept_cache("id", Cache(), lambda c: c.id, True) + cache_manager.register_concept_cache("key", ListIfNeededCache(), lambda c: c.key, True) + + with pytest.raises(ConceptNotFound) as ex: + cache_manager.remove_concept(Concept("foo", id="1001")) + + assert ex.value.concept == Concept("foo", id="1001") + + def test_i_can_configure_a_cache_with_internal_sdp(self): + cache_manager = CacheManager(cache_only=False, + sdp=FakeSdp(get_value=lambda cache_name, key: key + "_not_found")) + cache = Cache(default=lambda sdp, key: sdp.get("cache_name", key)) + cache_manager.register_cache("test", cache) + + assert cache.get("key") == "key_not_found" + assert cache_manager.get("test", "key") == "key_not_found" diff --git a/tests/cache/test_DictionaryCache.py b/tests/cache/test_DictionaryCache.py new file mode 100644 index 0000000..34eb739 --- /dev/null +++ b/tests/cache/test_DictionaryCache.py @@ -0,0 +1,162 @@ +import pytest + +from cache.DictionaryCache import DictionaryCache +from core.global_symbols import NotFound +from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka +from tests.cache import FakeSdp + + +class TestDictionaryCache(TestUsingMemoryBasedSheerka): + def test_i_can_put_and_retrieve_value_from_dictionary_cache(self): + cache = DictionaryCache() + + # key must be None + with pytest.raises(KeyError): + cache.put("key", None) + + # value must be a dictionary + with pytest.raises(ValueError): + cache.put(True, "value") + + entry = {"key": "value", "key2": ["value21", "value22"]} + cache.put(False, entry) + assert len(cache) == 3 + assert id(cache._cache) == id(entry) + assert cache.get("key") == "value" + assert cache.get("key2") == ["value21", "value22"] + + # I can append values + cache.put(True, {"key": "another_value", "key3": "value3"}) + assert len(cache) == 4 + assert cache.get("key") == "another_value" + assert cache.get("key2") == ["value21", "value22"] + assert cache.get("key3") == "value3" + + # I can reset + entry = {"key": "value", "key2": ["value21", "value22"]} + cache.put(False, entry) + assert len(cache) == 3 + assert id(cache._cache) == id(entry) + assert cache.get("key") == "value" + assert cache.get("key2") == ["value21", "value22"] + + assert cache.copy() == {'key': 'value', 'key2': ['value21', 'value22']} + + @pytest.mark.parametrize("key", [ + None, + "something" + ]) + def test_keys_have_constraints_when_dictionary_cache(self, key): + cache = DictionaryCache() + with pytest.raises(KeyError): + cache.put(key, None) + + def test_i_can_sync_with_remote_repository(self): + cache = DictionaryCache() + + entry = {"key": "value", "key2": ["value21", "value22"]} + cache.put(False, entry) + + assert len(cache) == 3 + assert id(cache._cache) == id(entry) + assert cache.get("key") == "value" + assert cache.get("key2") == ["value21", "value22"] + + def test_i_can_get_a_value_that_does_not_exist_without_compromising_the_cache(self): + cache = DictionaryCache() + cache.put(False, {"key": "value"}) + + assert cache.get("key2") is NotFound + assert cache.copy() == {"key": "value"} + + @pytest.mark.parametrize("value", [ + None, + "something" + ]) + def test_values_have_constraints_when_dictionary_cache(self, value): + cache = DictionaryCache() + with pytest.raises(ValueError): + cache.put(True, value) + + def test_i_can_append_to_a_dictionary_cache_even_if_it_is_new(self): + cache = DictionaryCache() + + entry = {"key": "value", "key2": ["value21", "value22"]} + cache.put(True, entry) + assert len(cache) == 3 + assert id(cache._cache) != id(entry) + assert cache.get("key") == "value" + assert cache.get("key2") == ["value21", "value22"] + + def test_exists_in_dictionary_cache(self): + cache = DictionaryCache() + assert not cache.exists("key") + + cache.put(True, {"key": "value"}) + assert cache.exists("key") + + def test_default_for_dictionary_cache(self): + cache = DictionaryCache(default={"key": "value", "key2": "value2"}) + + # cache is fully set when the value is found + assert cache.get("key") == "value" + assert cache.copy() == {"key": "value", "key2": "value2"} + + # cache is fully set when the value is not found + cache.test_only_reset() + assert cache.get("key3") is NotFound + assert cache.copy() == {"key": "value", "key2": "value2"} + + # cache is not corrupted when value is found + cache.put(True, {"key3": "value3", "key4": "value4"}) + assert cache.get("key3") == "value3" + assert cache.copy() == {"key": "value", "key2": "value2", "key3": "value3", "key4": "value4"} + + # cache is not corrupted when value is not found + cache._cache["key"] = "another value" # operation that is normally not possible + assert cache.get("key5") is NotFound + assert cache.copy() == {"key": "value", "key2": "value2", "key3": "value3", "key4": "value4"} + + def test_default_callable_for_dictionary_cache(self): + cache = DictionaryCache(default=lambda k: {"key": "value", "key2": "value2"}) + + assert cache.get("key") == "value" + assert "key2" in cache + assert len(cache) == 2 + + cache.clear() + assert cache.get("key3") is NotFound + assert len(cache) == 2 + assert "key" in cache + assert "key2" in cache + + def test_default_callable_with_internal_sdp_for_dictionary_cache(self): + cache = DictionaryCache(default=lambda sdp, key: sdp.get("cache_name", key), + sdp=FakeSdp(lambda entry, k: {"key": "value", "key2": "value2"})) + + assert cache.get("key") == "value" + assert "key2" in cache + assert len(cache) == 2 + + cache.clear() + assert cache.get("key3") is NotFound + assert len(cache) == 2 + assert "key" in cache + assert "key2" in cache + + def test_dictionary_cache_cannot_be_null(self): + cache = DictionaryCache(default=lambda k: NotFound) + assert cache.get("key") is NotFound + assert cache._cache == {} + + cache = DictionaryCache(default=NotFound) + assert cache.get("key") is NotFound + assert cache._cache == {} + + cache = DictionaryCache(default=lambda k: None) + assert cache.get("key") is NotFound + assert cache._cache == {} + + cache = DictionaryCache(default=None) + assert cache.get("key") is NotFound + assert cache._cache == {} diff --git a/tests/cache/test_FastCache.py b/tests/cache/test_FastCache.py index 49ba177..e12eb69 100644 --- a/tests/cache/test_FastCache.py +++ b/tests/cache/test_FastCache.py @@ -1,4 +1,5 @@ from cache.FastCache import FastCache +from core.global_symbols import NotFound def test_i_can_put_an_retrieve_values(): @@ -47,7 +48,7 @@ def test_i_can_put_the_same_key_several_times(): def test_none_is_returned_when_not_found(): cache = FastCache() - assert cache.get("foo") is None + assert cache.get("foo") is NotFound def test_i_can_evict_by_key(): @@ -65,3 +66,14 @@ def test_i_can_evict_by_key(): "to_keep3": "to_keep_value3"} assert cache.lru == ["to_keep1", "to_keep2", "to_keep3"] + + +def test_i_can_get_default_value(): + cache = FastCache(max_size=3, default=lambda key: key + 1) + + assert cache.get(1) == 2 + assert cache.get(2) == 3 + assert cache.get(3) == 4 + assert cache.get(4) == 5 + + assert cache.cache == {2: 3, 3: 4, 4: 5} # only 3 values diff --git a/tests/cache/test_IncCache.py b/tests/cache/test_IncCache.py new file mode 100644 index 0000000..ec8b28f --- /dev/null +++ b/tests/cache/test_IncCache.py @@ -0,0 +1,84 @@ +from cache.IncCache import IncCache +from core.global_symbols import NotFound, Removed +from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka +from tests.cache import FakeSdp + + +class FakeIncSdp: + def __init__(self, init_value1, init_value2): + self.internals = [IncCache(), IncCache()] + if init_value1: + self.internals[0].put("key", init_value1) + + if init_value2: + self.internals[1].put("key", init_value2) + + def alt_get(self, cache_name, key): + for cache in self.internals: + value = cache.alt_get(key) + if value is not NotFound: + return value + + return NotFound + + +class TestIncCache(TestUsingMemoryBasedSheerka): + def test_i_can_put_and_retrieve_values_from_inc_cache(self): + cache = IncCache() + + assert cache.get("key") == 1 + assert cache.get("key") == 2 + assert cache.get("key") == 3 + assert cache.get("key2") == 1 + assert cache.get("key2") == 2 + + cache.put("key", 100) + assert cache.get("key") == 101 + + assert cache.copy() == {'key': 101, 'key2': 2} + + def test_i_can_alt_get(self): + cache = IncCache() + + assert cache.get("key") == 1 + assert cache.get("key") == 2 + assert cache.alt_get("key") == 2 + assert cache.alt_get("key") == 2 + assert cache.get("key") == 3 + + def test_current_cache_takes_precedence_over_alt_sdp(self): + cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + assert cache.get("key") == 1 + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: 10) + assert cache.get("key", alt_sdp=alt_sdp) == 2 + + def test_remote_repository_takes_precedence_over_alt_sdp(self): + cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: 5)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: 10) + assert cache.get("key", alt_sdp=alt_sdp) == 6 + assert cache.get("key", alt_sdp=alt_sdp) == 7 # then we use the value from the cache + + def test_i_can_take_value_from_alt_sdp(self): + cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeIncSdp(10, NotFound) + assert cache.get("key", alt_sdp=alt_sdp) == 11 + assert cache.get("key", alt_sdp=alt_sdp) == 12 # then we use the value from the cache + + def test_i_can_get_when_alt_sdp_and_cache_is_cleared(self): + cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + cache.clear() + + alt_sdp = FakeIncSdp(10, NotFound) + assert cache.get("key", alt_sdp=alt_sdp) == 1 + assert cache.get("key", alt_sdp=alt_sdp) == 2 # then we use the value from the cache + + def test_i_can_manage_when_the_value_from_alt_sdp_is_removed(self): + cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeIncSdp(Removed, 10) + assert cache.get("key", alt_sdp=alt_sdp) == 1 + assert cache.get("key", alt_sdp=alt_sdp) == 2 # then we use the value from the cache diff --git a/tests/cache/test_ListCache.py b/tests/cache/test_ListCache.py new file mode 100644 index 0000000..8f1c5ca --- /dev/null +++ b/tests/cache/test_ListCache.py @@ -0,0 +1,274 @@ +import pytest + +from cache.ListCache import ListCache +from core.global_symbols import NotFound, Removed +from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka +from tests.cache import FakeSdp + + +class TestListIfNeededCache(TestUsingMemoryBasedSheerka): + def test_i_can_put_and_retrieve_value_from_list_cache(self): + cache = ListCache() + + cache.put("key", "value") + assert cache.get("key") == ["value"] + assert len(cache) == 1 + + cache.put("key", "value2") # we can append to this list + assert cache.get("key") == ["value", "value2"] + assert len(cache) == 2 + + cache.put("key2", "value") + assert cache.get("key2") == ["value"] + assert len(cache) == 3 + + # duplicates are allowed + cache.put("key", "value") + assert cache.get("key") == ["value", "value2", "value"] + assert len(cache) == 4 + + assert cache.copy() == {'key': ['value', 'value2', 'value'], 'key2': ['value']} + + def test_i_can_put_in_list_cache_when_alt_sdp_returns_values(self): + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: ["value1"])) + assert cache.get("key") == ["value1", "value2"] + + cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed)) + assert cache.get("key3") == ["value1"] + + def test_i_can_put_in_list_cache_when_alt_sdp_returns_values_and_cache_is_cleared(self): + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + cache.clear() + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: ["value1"])) + assert cache.get("key") == ["value2"] + + cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed)) + assert cache.get("key3") == ["value1"] + + def test_current_cache_take_precedence_over_alt_sdp_when_i_put_data_in_list_cache(self): + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key", "value1") + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx")) + assert cache.get("key") == ["value1", "value2"] + + def test_current_sdp_take_precedence_over_alt_sdp_when_i_put_data_in_list_cache(self): + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: ["value1"])).auto_configure("cache_name") + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx")) + assert cache.get("key") == ["value1", "value2"] + + def test_i_can_update_from_list_cache(self): + cache = ListCache() + + cache.put("key", "value") + cache.put("key", "value2") + cache.put("key", "value") + cache.update("key", "value", "key", "another value") + + assert len(cache._cache) == 1 + assert len(cache) == 3 + assert cache.get("key") == ["another value", "value2", "value"] # only the first one is affected + + cache.update("key", "value2", "key2", "value2") + assert len(cache._cache) == 2 + assert len(cache) == 3 + assert cache.get("key") == ["another value", "value"] + assert cache.get("key2") == ["value2"] + + cache.update("key2", "value2", "key3", "value2") + assert len(cache._cache) == 2 + assert len(cache) == 3 + assert cache.get("key") == ["another value", "value"] + assert cache.get("key3") == ["value2"] + assert cache.get("key2") is NotFound + + with pytest.raises(KeyError): + cache.update("wrong key", "value", "key", "value") + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same(self): + cache = ListCache(default=lambda sdp, key: sdp.get("cache_name", key), + extend_exists=lambda sdp, key: sdp.exists("cache_name", key), + sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)) + + cache.put("key", "value") + cache.update("key", "value", "key", "new_value", FakeSdp(extend_exists=lambda cache_name, key: True)) + + assert cache.get("key") == ["new_value"] + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same_but_nothing_in_cache(self): + # There is nothing in cache or remote repository. + # We must ust the value from alt_sdp + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + previous_value = ["old_1", "old_2", "value"] + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: True, + get_alt_value=lambda cache_name, key: previous_value) + cache.update("key", "value", "key", "new_value", alt_sdp=alt_sdp) + assert cache.get("key") == ["old_1", "old_2", "new_value"] + assert previous_value == ["old_1", "old_2", "value"] + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_different(self): + # keys are different + # make sure that current cache take precedence over alt_sdp + # In this test, the values from alt_sdp are never used + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: ["xxx1"] if key == "key1" else NotFound) + + # one values in 'key1' + cache.put("key1", "old_1") + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == ["new_value"] + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + cache.clear() + cache.put("key1", "old_1") + cache.put("key1", "old_2") + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == ["old_2"] + assert cache.get("key2") == ["new_value"] + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + def test_i_can_update_when_alt_sdp_from_repository_keys_are_different(self): + # keys are different + # make sure that current repo take precedence over alt_sdp + remote = FakeSdp(get_value=lambda cache_name, key: ["old_1"] if key == "key1" else NotFound) + cache = ListCache(sdp=remote).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: ["xxx1"] if key == "key1" else NotFound) + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == ["new_value"] + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + remote = FakeSdp(get_value=lambda cache_name, key: ["old_1", "old_2"] if key == "key1" else NotFound) + cache = ListCache(sdp=remote).auto_configure("cache_name") + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == ["old_2"] + assert cache.get("key2") == ["new_value"] + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + def test_i_can_update_when_alt_sdp_from_alt_sdp_keys_are_different_one_value(self): + # keys are different + # No value found in cache or remote repository, + # Will use values from alt_sdp + # The old key is the same, so it has to be marked as Removed + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + # one values in 'key1' + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: ["old_1"] if key == "key1" else NotFound) + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == ["new_value"] + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + cache.test_only_reset() + old_values = ["old_1", "old_2"] + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: old_values if key == "key1" else NotFound) + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == ["old_2"] + assert cache.get("key2") == ["new_value"] + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + assert old_values == ["old_1", "old_2"] # not modified + + def test_i_can_update_when_alt_sdp_cache_take_precedence_for_destination_key(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: ["xxx2"] if key == "key2" else NotFound) + cache.put("key1", "source_value") + cache.put("key2", "old_value") + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == ['old_value', 'new_value'] + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + + def test_i_can_update_when_alt_sdp_repository_take_precedence_for_destination_key(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + remote_repo = FakeSdp(get_value=lambda cache_name, key: ["old_value"] if key == "key2" else NotFound) + cache = ListCache(sdp=remote_repo).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: ["xxx2"] if key == "key2" else NotFound) + cache.put("key1", "source_value") + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == ['old_value', 'new_value'] + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + + def test_i_can_update_when_alt_sdp_use_alt_sdp_when_no_destination_value_found(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key1", "source_value") + previous_values = ["old_1", "old_2"] + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: previous_values if key == "key2" else NotFound) + + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == ["old_1", "old_2", 'new_value'] + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + assert previous_values == ["old_1", "old_2"] # not modified + + def test_i_can_update_when_alt_sdp_and_cache_is_cleared(self): + cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1"]) + cache.clear() + + with pytest.raises(KeyError): + cache.update("key", "value1", "key", "value2", alt_sdp=alt_sdp) + + with pytest.raises(KeyError): + cache.update("key", "value1", "key2", "value2", alt_sdp=alt_sdp) + + def test_default_is_called_before_updating_list_cache(self): + cache = ListCache(default=lambda k: NotFound) + with pytest.raises(KeyError): + cache.update("old_key", "old_value", "new_key", "new_value") + + cache = ListCache(default=lambda k: ["old_value", "other old value"]) + cache.update("old_key", "old_value", "old_key", "new_value") + assert cache.get("old_key") == ["new_value", "other old value"] + + cache = ListCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else NotFound) + cache.update("old_key", "old_value", "new_key", "new_value") + assert cache.get("old_key") == ["other old value"] + assert cache.get("new_key") == ["new_value"] + + cache = ListCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else ["other new"]) + cache.update("old_key", "old_value", "new_key", "new_value") + assert cache.get("old_key") == ["other old value"] + assert cache.get("new_key") == ["other new", "new_value"] diff --git a/tests/cache/test_ListIfNeededCache.py b/tests/cache/test_ListIfNeededCache.py new file mode 100644 index 0000000..57d7ecd --- /dev/null +++ b/tests/cache/test_ListIfNeededCache.py @@ -0,0 +1,569 @@ +import pytest + +from cache.ListIfNeededCache import ListIfNeededCache +from core.global_symbols import NotFound, Removed +from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka +from tests.cache import FakeSdp + + +class TestListIfNeededCache(TestUsingMemoryBasedSheerka): + def test_i_can_put_and_retrieve_value_from_list_if_needed_cache(self): + cache = ListIfNeededCache() + + cache.put("key", "value") + assert cache.get("key") == "value" + + # second time with the same key creates a list + cache.put("key", "value2") + assert cache.get("key") == ["value", "value2"] + assert len(cache) == 2 + + # third time, we now have a list + cache.put("key", "value3") + assert cache.get("key") == ["value", "value2", "value3"] + assert len(cache) == 3 + + # other keys are not affected + cache.put("key2", "value") + assert cache.get("key2") == "value" + assert len(cache) == 4 + + # duplicates are allowed + cache.put("key", "value") + assert cache.get("key") == ["value", "value2", "value3", "value"] + assert len(cache) == 5 + + def test_i_can_put_in_list_if_need_cache_when_alt_sdp_returns_values(self): + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "value1")) + assert cache.get("key") == ["value1", "value2"] + + cache.put("key2", "value3", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: ["value1", "value2"])) + assert cache.get("key2") == ["value1", "value2", "value3"] + + cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed)) + assert cache.get("key3") == "value1" + + def test_i_can_put_in_list_if_need__cache_when_alt_sdp_returns_values_and_cache_is_cleared(self): + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + cache.clear() + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "value1")) + assert cache.get("key") == "value2" + + cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed)) + assert cache.get("key3") == "value1" + + def test_current_cache_take_precedence_over_alt_sdp_when_i_put_data_in_list_if_needed_cache(self): + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key", "value1") + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx")) + assert cache.get("key") == ["value1", "value2"] + + def test_current_sdp_take_precedence_over_alt_sdp_when_i_put_data_in_list_if_needed_cache(self): + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: "value1")).auto_configure("cache_name") + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx")) + assert cache.get("key") == ["value1", "value2"] + + def test_i_can_update_from_list_if_needed_cache(self): + cache = ListIfNeededCache() + + cache.put("key", "value") + cache.put("key", "value2") + cache.put("key", "value") + + # only the first 'value' is affected + cache.update("key", "value", "key", "another value") + assert len(cache._cache) == 1 + assert len(cache) == 3 + assert cache.get("key") == ["another value", "value2", "value"] + + # change the key + cache.update("key", "value2", "key2", "value2") + assert len(cache._cache) == 2 + assert len(cache) == 3 + assert cache.get("key") == ["another value", "value"] + assert cache.get("key2") == "value2" + + # rename the newly created key + cache.update("key2", "value2", "key3", "value2") + assert len(cache._cache) == 2 + assert len(cache) == 3 + assert cache.get("key") == ["another value", "value"] + assert cache.get("key3") == "value2" + assert cache.get("key2") is NotFound + + # from list to single item and vice versa + cache.update("key", "value", "key3", "value") + assert len(cache._cache) == 2 + assert len(cache) == 3 + assert cache.get("key") == "another value" # 'key' is no longer a list + assert cache.get("key3") == ["value2", "value"] # 'key3' is now a list + assert cache.get("key2") is NotFound + + with pytest.raises(KeyError): + cache.update("wrong key", "value", "key", "value") + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same(self): + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key", "value") + cache.update("key", "value", "key", "new_value", FakeSdp(extend_exists=lambda cache_name, key: True)) + + assert cache.get("key") == "new_value" + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same_but_nothing_in_cache(self): + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + # one value in alt_sdp + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: True, + get_alt_value=lambda cache_name, key: "old_value") + cache.update("key", "value", "key", "new_value", alt_sdp=alt_sdp) + assert cache.get("key") == "new_value" + + # multiple values in alt_sdp + cache.test_only_reset() + previous_value = ["old_1", "old_2", "value"] + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: True, + get_alt_value=lambda cache_name, key: previous_value) + cache.update("key", "value", "key", "new_value", alt_sdp=alt_sdp) + assert cache.get("key") == ["old_1", "old_2", "new_value"] + assert previous_value == ["old_1", "old_2", "value"] + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_different(self): + # keys are different + # make sure that current cache take precedence over alt_sdp + # In this test, the values from alt_sdp are never used + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: "xxx1" if key == "key1" else NotFound) + + # one values in 'key1' + cache.put("key1", "old_1") + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + cache.clear() + cache.put("key1", "old_1") + cache.put("key1", "old_2") + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == "old_2" + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # List of values in 'key1' + cache.clear() + cache.put("key1", "old_1") + cache.put("key1", "old_2") + cache.put("key1", "old_3") + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == ["old_2", "old_3"] + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + def test_i_can_update_when_alt_sdp_from_repository_keys_are_different(self): + # keys are different + # make sure that current repo take precedence over alt_sdp + remote = FakeSdp(get_value=lambda cache_name, key: "old_1" if key == "key1" else NotFound) + cache = ListIfNeededCache(sdp=remote).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: "xxx1" if key == "key1" else NotFound) + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + remote = FakeSdp(get_value=lambda cache_name, key: ["old_1", "old_2"] if key == "key1" else NotFound) + cache = ListIfNeededCache(sdp=remote).auto_configure("cache_name") + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == "old_2" + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # List of values in 'key1' + remote = FakeSdp(get_value=lambda cache_name, key: ["old_1", "old_2", "old_3"] if key == "key1" else NotFound) + cache = ListIfNeededCache(sdp=remote).auto_configure("cache_name") + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == ["old_2", "old_3"] + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + def test_i_can_update_when_alt_sdp_from_alt_sdp_keys_are_different_one_value(self): + # keys are different + # No value found in cache or remote repository, + # Will use values from alt_sdp + # The old key is the same, so it has to be marked as Removed + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + # one values in 'key1' + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: "old_1" if key == "key1" else NotFound) + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + cache.test_only_reset() + old_values = ["old_1", "old_2"] + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: old_values if key == "key1" else NotFound) + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == "old_2" + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + assert old_values == ["old_1", "old_2"] # not modified + + # List of values in 'key1' + cache.test_only_reset() + old_values = ["old_1", "old_2", "old_3"] + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: old_values if key == "key1" else NotFound) + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == ["old_2", "old_3"] + assert cache.get("key2") == "new_value" + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + assert old_values == ["old_1", "old_2", "old_3"] # not modified + + def test_i_can_update_when_alt_sdp_cache_take_precedence_for_destination_key(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: "xxx2" if key == "key2" else NotFound) + cache.put("key1", "source_value") + cache.put("key2", "old_value") + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == ['old_value', 'new_value'] + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + + def test_i_can_update_when_alt_sdp_repository_take_precedence_for_destination_key(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + remote_repo = FakeSdp(get_value=lambda cache_name, key: "old_value" if key == "key2" else NotFound) + cache = ListIfNeededCache(sdp=remote_repo).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: "xxx2" if key == "key2" else NotFound) + cache.put("key1", "source_value") + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == ['old_value', 'new_value'] + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + + def test_i_can_update_when_alt_sdp_use_alt_sdp_when_no_destination_value_found(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + # one value in 'key2' + cache.put("key1", "source_value") + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: "old_value" if key == "key2" else NotFound) + + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == ['old_value', 'new_value'] + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + + # Multiple values in 'key2' + cache.test_only_reset() + cache.put("key1", "source_value") + previous_values = ["old_1", "old_2"] + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: previous_values if key == "key2" else NotFound) + + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == ["old_1", "old_2", 'new_value'] + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + assert previous_values == ["old_1", "old_2"] # not modified + + def test_i_can_update_when_alt_sdp_and_cache_is_cleared(self): + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value1") + cache.clear() + + with pytest.raises(KeyError): + cache.update("key", "value1", "key", "value2", alt_sdp=alt_sdp) + + with pytest.raises(KeyError): + cache.update("key", "value1", "key2", "value2", alt_sdp=alt_sdp) + + def test_default_is_called_before_updating_list_if_needed_cache(self): + cache = ListIfNeededCache(default=lambda k: NotFound) + with pytest.raises(KeyError): + cache.update("old_key", "old_value", "new_key", "new_value") + + cache = ListIfNeededCache(default=lambda k: "old_value") + cache.update("old_key", "old_value", "old_key", "new_value") + assert cache.get("old_key") == "new_value" + + cache = ListIfNeededCache(default=lambda k: ["old_value", "other old value"]) + cache.update("old_key", "old_value", "old_key", "new_value") + assert cache.get("old_key") == ["new_value", "other old value"] + + cache = ListIfNeededCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else NotFound) + cache.update("old_key", "old_value", "new_key", "new_value") + assert cache.get("old_key") == "other old value" + assert cache.get("new_key") == "new_value" + + def test_i_can_delete_key_and_values(self): + cache = ListIfNeededCache() + cache.put("key", "value1") + cache.put("key", "value11") + cache.put("key2", "value2") + cache.put("key2", "value22") + cache.put("key2", "value222") + cache.put("key3", "value3") + cache.put("key3", "value33") + cache.put("key4", "value4") + cache.reset_events() + + assert len(cache) == 8 + + # I can remove a whole key + cache.delete("key") + assert cache.get("key") is NotFound + assert len(cache) == 6 + assert cache.to_remove == {"key"} + assert cache.to_add == set() + + # I can remove an element while a list is remaining + cache.reset_events() + cache.delete("key2", "value22") + assert cache.get("key2") == ["value2", "value222"] + assert len(cache) == 5 + assert cache.to_add == {"key2"} + assert cache.to_remove == set() + + # I can remove an element while a single element is remaining + cache.reset_events() + cache.delete("key3", "value33") + assert cache.get("key3") == "value3" + assert len(cache) == 4 + assert cache.to_add == {"key3"} + assert cache.to_remove == set() + + # I can remove an element while nothing remains + cache.reset_events() + cache.delete("key4", "value4") + assert cache.get("key4") is NotFound + assert len(cache) == 3 + assert cache.to_remove == {"key4"} + assert cache.to_add == set() + + # I do not remove when the value is not the same + cache.reset_events() + cache.delete("key3", "value33") # value33 was already remove + assert cache.get("key3") == "value3" + assert len(cache) == 3 + assert cache.to_add == set() + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_key_from_cache(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + cache.put("key", "value") + + cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": Removed} + assert cache.to_add == {"key"} + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_value_from_cache(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + cache.put("key", "value") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True) + cache.delete("key", value="value", alt_sdp=alt_sdp) + assert cache.copy() == {"key": Removed} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_value_from_cache_remaining_values(self): + # There is a value in alt_cache_manager, + # But this, there are remaining values in current cache after deletion + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + cache.put("key", "value") + cache.put("key", "value2") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True) + cache.delete("key", value="value", alt_sdp=alt_sdp) + assert cache.copy() == {"key": "value2"} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_key_from_remote_repository(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: ["value1", "value2"])).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True) + cache.delete("key", value=None, alt_sdp=alt_sdp) + assert cache.copy() == {"key": Removed} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: "value")).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True) + cache.delete("key", value="value", alt_sdp=alt_sdp) + assert cache.copy() == {"key": Removed} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository_remaining_values(self): + # There is a value in alt_cache_manager, + # But this, there are remaining values in current cache after deletion + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: ["value1", "value2"])).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True) + cache.delete("key", value="value1", alt_sdp=alt_sdp) + assert cache.copy() == {"key": "value2"} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_key_from_alt_sdp(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, the key is empty + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1, value2"], + extend_exists=lambda cache_name, key: True) + + cache.delete("key", value=None, alt_sdp=alt_sdp) + assert cache.copy() == {"key": Removed} + assert cache.to_add == {"key"} + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, the key is empty + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value1", + extend_exists=lambda cache_name, key: True) + + cache.delete("key", value="value1", alt_sdp=alt_sdp) + assert cache.copy() == {"key": Removed} + assert cache.to_add == {"key"} + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_one_value_remaining(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, one value remains in the cache + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1", "value2"], + extend_exists=lambda cache_name, key: True) + + cache.delete("key", value="value1", alt_sdp=alt_sdp) + assert cache.copy() == {"key": "value2"} + assert cache.to_add == {"key"} + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_multiple_values_remaining(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, one value remains in the cache + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1", "value2", "value3"], + extend_exists=lambda cache_name, key: True) + + cache.delete("key", value="value1", alt_sdp=alt_sdp) + assert cache.copy() == {"key": ["value2", "value3"]} + assert cache.to_add == {"key"} + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_an_already_removed_value_from_alt_sdp(self): + # alt_cache_manager is used because no value in cache or in remote repository + # But the alternate sdp returns Removed, which means that previous value was deleted + # It's like there is nothing to delete + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: Removed, + extend_exists=lambda cache_name, key: False) + + cache.delete("key", value="value1", alt_sdp=alt_sdp) + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == set() + + def test_deleting_an_entry_that_does_not_exist_is_not_an_error(self): + cache = ListIfNeededCache() + cache.put("key", "value1") + + cache.reset_events() + cache.delete("key3") + assert len(cache) == 1 + assert cache.to_add == set() + assert cache.to_remove == set() + + cache.delete("key3", "value") + assert len(cache) == 1 + assert cache.to_add == set() + assert cache.to_remove == set() + + cache.delete("key", "value2") + assert len(cache) == 1 + assert cache.to_add == set() + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_and_cache_is_cleared(self): + cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value", + extend_exists=lambda cache_name, key: True) + + cache.clear() + cache.delete("key", value=None, alt_sdp=alt_sdp) + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == set() + + cache.delete("key", value="value", alt_sdp=alt_sdp) + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == set() diff --git a/tests/cache/test_SetCache.py b/tests/cache/test_SetCache.py new file mode 100644 index 0000000..15f3746 --- /dev/null +++ b/tests/cache/test_SetCache.py @@ -0,0 +1,477 @@ +import pytest + +from cache.SetCache import SetCache +from core.global_symbols import NotFound, Removed +from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka +from tests.cache import FakeSdp + + +class TestSetCache(TestUsingMemoryBasedSheerka): + + def test_i_can_put_and_retrieve_values_from_set_cache(self): + cache = SetCache() + + cache.put("key", "value") + assert cache.get("key") == {"value"} + assert len(cache) == 1 + + # we can add to this set + cache.put("key", "value2") + assert cache.get("key") == {"value", "value2"} + assert len(cache) == 2 + + # other keys are not affected + cache.put("key2", "value") + assert cache.get("key2") == {"value"} + assert len(cache) == 3 + + # duplicates are removed + cache.put("key", "value") + assert cache.get("key") == {"value", "value2"} + assert len(cache) == 3 + + assert cache.copy() == {'key': {'value', 'value2'}, 'key2': {'value'}} + + def test_i_can_put_in_set_cache_when_alt_sdp_returns_values(self): + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: {"value1"})) + assert cache.get("key") == {"value1", "value2"} + + cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed)) + assert cache.get("key3") == {"value1"} + + def test_i_can_put_in_set_cache_when_alt_sdp_returns_values_and_cache_is_cleared(self): + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + cache.clear() + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: {"value1"})) + assert cache.get("key") == {"value2"} + + cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed)) + assert cache.get("key3") == {"value1"} + + def test_current_cache_take_precedence_over_alt_sdp_when_i_put_data_in_set_cache(self): + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key", "value1") + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx")) + assert cache.get("key") == {"value1", "value2"} + + def test_current_sdp_take_precedence_over_alt_sdp_when_i_put_data_in_set_cache(self): + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: {"value1"})).auto_configure("cache_name") + + cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx")) + assert cache.get("key") == {"value1", "value2"} + + def test_i_can_update_from_set_cache(self): + cache = SetCache() + + cache.put("key", "value") + cache.put("key", "value2") + cache.update("key", "value", "key", "another value") + + assert len(cache._cache) == 1 + assert len(cache) == 2 + assert cache.get("key") == {"another value", "value2"} + + cache.update("key", "value2", "key2", "value2") + assert len(cache._cache) == 2 + assert len(cache) == 2 + assert cache.get("key") == {"another value"} + assert cache.get("key2") == {"value2"} + + cache.update("key", "another value", "key3", "another value") + assert len(cache._cache) == 2 + assert len(cache) == 2 + assert cache.get("key") is NotFound + assert cache.get("key2") == {"value2"} + assert cache.get("key3") == {"another value"} + + with pytest.raises(KeyError): + cache.update("wrong key", "value", "key", "value") + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same(self): + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key", "value") + cache.update("key", "value", "key", "new_value", alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + + assert cache.get("key") == {"new_value"} + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same_but_nothing_in_cache(self): + # There is nothing in cache or remote repository. + # We must ust the value from alt_sdp + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + previous_value = {"old_1", "old_2", "value"} + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: True, + get_alt_value=lambda cache_name, key: previous_value) + cache.update("key", "value", "key", "new_value", alt_sdp=alt_sdp) + assert cache.get("key") == {"old_1", "old_2", "new_value"} + assert previous_value == {"old_1", "old_2", "value"} + + def test_i_can_update_when_alt_sdp_from_cache_keys_are_different(self): + # keys are different + # make sure that current cache take precedence over alt_sdp + # In this test, the values from alt_sdp are never used + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: {"xxx1"} if key == "key1" else NotFound) + + # one values in 'key1' + cache.put("key1", "old_1") + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == {"new_value"} + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + cache.clear() + cache.put("key1", "old_1") + cache.put("key1", "old_2") + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == {"old_2"} + assert cache.get("key2") == {"new_value"} + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + def test_i_can_update_when_alt_sdp_from_repository_keys_are_different(self): + # keys are different + # make sure that current repo take precedence over alt_sdp + remote = FakeSdp(get_value=lambda cache_name, key: {"old_1"} if key == "key1" else NotFound) + cache = SetCache(sdp=remote).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: {"xxx1"} if key == "key1" else NotFound) + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == {"new_value"} + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + remote = FakeSdp(get_value=lambda cache_name, key: {"old_1", "old_2"} if key == "key1" else NotFound) + cache = SetCache(sdp=remote).auto_configure("cache_name") + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == {"old_2"} + assert cache.get("key2") == {"new_value"} + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + def test_i_can_update_when_alt_sdp_from_alt_sdp_keys_are_different_one_value(self): + # keys are different + # No value found in cache or remote repository, + # Will use values from alt_sdp + # The old key is the same, so it has to be marked as Removed + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + # one values in 'key1' + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: {"old_1"} if key == "key1" else NotFound) + + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == Removed + assert cache.get("key2") == {"new_value"} + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + + # Multiple values in 'key1' + cache.test_only_reset() + old_values = {"old_1", "old_2"} + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1", + get_alt_value=lambda cache_name, key: old_values if key == "key1" else NotFound) + cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == {"old_2"} + assert cache.get("key2") == {"new_value"} + assert cache.to_add == {"key2", "key1"} + assert cache.to_remove == set() + assert old_values == {"old_1", "old_2"} # not modified + + def test_i_can_update_when_alt_sdp_cache_take_precedence_for_destination_key(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: {"xxx2"} if key == "key2" else NotFound) + cache.put("key1", "source_value") + cache.put("key2", "old_value") + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == {'old_value', 'new_value'} + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + + def test_i_can_update_when_alt_sdp_repository_take_precedence_for_destination_key(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + remote_repo = FakeSdp(get_value=lambda cache_name, key: {"old_value"} if key == "key2" else NotFound) + cache = SetCache(sdp=remote_repo).auto_configure("cache_name") + + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: "xxx2" if key == "key2" else NotFound) + cache.put("key1", "source_value") + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == {'old_value', 'new_value'} + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + + def test_i_can_update_when_alt_sdp_use_alt_sdp_when_no_destination_value_found(self): + # If a value exists in destination key, either in local cache or remote repository + # It take precedence + # If no value is found, we must use the value from alt_sdp + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + cache.put("key1", "source_value") + previous_values = {"old_1", "old_2"} + alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2", + get_alt_value=lambda cache_name, key: previous_values if key == "key2" else NotFound) + + cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp) + assert cache.get("key1") == NotFound + assert cache.get("key2") == {"old_1", "old_2", 'new_value'} + assert cache.to_add == {"key2"} + assert cache.to_remove == {"key1"} + assert previous_values == {"old_1", "old_2"} # not modified + + def test_i_can_update_when_alt_sdp_and_cache_is_cleared(self): + cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1"]) + cache.clear() + + with pytest.raises(KeyError): + cache.update("key", "value1", "key", "value2", alt_sdp=alt_sdp) + + with pytest.raises(KeyError): + cache.update("key", "value1", "key2", "value2", alt_sdp=alt_sdp) + + def test_default_is_called_before_updating_set_cache(self): + cache = SetCache(default=lambda k: NotFound) + with pytest.raises(KeyError): + cache.update("old_key", "old_value", "new_key", "new_value") + + cache = SetCache(default=lambda k: {"old_value", "other old value"}) + cache.update("old_key", "old_value", "old_key", "new_value") + assert cache.get("old_key") == {"new_value", "other old value"} + + cache = SetCache(default=lambda k: {"old_value", "other old value"} if k == "old_key" else NotFound) + cache.update("old_key", "old_value", "new_key", "new_value") + assert cache.get("old_key") == {"other old value"} + assert cache.get("new_key") == {"new_value"} + + cache = SetCache(default=lambda k: {"old_value", "other old value"} if k == "old_key" else {"other new"}) + cache.update("old_key", "old_value", "new_key", "new_value") + assert cache.get("old_key") == {"other old value"} + assert cache.get("new_key") == {"other new", "new_value"} + + def test_i_can_delete_values_from_set_cache(self): + cache = SetCache() + cache.put("key", "value1") + cache.put("key", "value2") + cache.reset_events() + + cache.delete("key", "fake_value") + assert cache.get("key") == {"value1", "value2"} + assert len(cache) == 2 + assert cache.to_add == set() + assert cache.to_remove == set() + + cache.delete("key", "value1") + assert cache.get("key") == {"value2"} + assert cache.to_add == {"key"} + assert len(cache) == 1 + + cache.delete("key", "value2") + assert cache.get("key") is NotFound + assert cache.to_remove == {"key"} + assert len(cache) == 0 + + def test_i_can_delete_key_from_set_cache(self): + cache = SetCache() + cache.put("key", "value1") + cache.put("key", "value2") + + cache.delete("key") + assert cache.get("key") is NotFound + assert cache.to_remove == {"key"} + assert len(cache) == 0 + + def test_i_can_delete_a_key_that_does_not_exists(self): + cache = SetCache() + cache.delete("key") + + assert cache.to_add == set() + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_key_from_cache(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + cache.put("key", "value") + + cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": Removed} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_value_from_cache(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + cache.put("key", "value") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True) + cache.delete("key", value="value", alt_sdp=alt_sdp) + assert cache.copy() == {"key": Removed} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_value_from_cache_remaining_values(self): + # There is a value in alt_cache_manager, + # But there is a value in the current cache after deletion + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + cache.put("key", "value1") + cache.put("key", "value2") + + cache.delete("key", value="value1", alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": {"value2"}} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_key_from_remote_repository(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: {"value1", "value2"})).auto_configure("cache_name") + + cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": Removed} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: {"value"})).auto_configure("cache_name") + + cache.delete("key", value="value", alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": Removed} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository_remaining_values(self): + # There is a value in alt_cache_manager, + # But there is a value in the current cache after deletion + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: {"value1", "value2"})).auto_configure("cache_name") + + cache.delete("key", value="value1", alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": {"value2"}} + assert cache.to_remove == set() + assert cache.to_add == {"key"} + + def test_i_can_delete_when_alt_sdp_a_key_from_alt_sdp(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, the key is empty + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1, value2"}, + extend_exists=lambda cache_name, key: True) + + cache.delete("key", value=None, alt_sdp=alt_sdp) + assert cache.copy() == {"key": Removed} + assert cache.to_add == {"key"} + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, the key is empty + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1"}, + extend_exists=lambda cache_name, key: True) + + cache.delete("key", value="value1", alt_sdp=alt_sdp) + assert cache.copy() == {"key": Removed} + assert cache.to_add == {"key"} + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_one_value_remaining(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, the key is empty + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1", "value2"}, + extend_exists=lambda cache_name, key: True) + + cache.delete("key", value="value1", alt_sdp=alt_sdp) + assert cache.copy() == {"key": {"value2"}} + assert cache.to_add == {"key"} + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_key_that_does_not_exist_from_alt_sdp(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, the key is empty + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=(lambda cache_name, key: {"value1", "value2"} if key == "key" else NotFound), + extend_exists=lambda cache_name, key: key == "key") + + cache.delete("key2", value=None, alt_sdp=alt_sdp) + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_a_value_that_does_not_exist_from_alt_sdp(self): + # alt_cache_manager is used because no value in cache or in remote repository + # After value deletion, the key is empty + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1", "value2"}, + extend_exists=lambda cache_name, key: True) + + cache.delete("key", value="value4", alt_sdp=alt_sdp) + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_an_already_removed_value_from_alt_sdp(self): + # alt_cache_manager is used because no value in cache or in remote repository + # But the alternate sdp returns Removed, which means that previous value was deleted + # It's like there is nothing to delete + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: Removed, + extend_exists=lambda cache_name, key: False) + + cache.delete("key", value="value1", alt_sdp=alt_sdp) + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == set() + + def test_i_can_delete_when_alt_sdp_and_cache_is_cleared(self): + cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name") + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value"}, + extend_exists=lambda cache_name, key: True) + + cache.clear() + cache.delete("key", value=None, alt_sdp=alt_sdp) + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == set() + + cache.delete("key", value="value", alt_sdp=alt_sdp) + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == set() diff --git a/tests/cache/test_cache.py b/tests/cache/test_cache.py index 0d2f643..b42770e 100644 --- a/tests/cache/test_cache.py +++ b/tests/cache/test_cache.py @@ -8,11 +8,48 @@ from cache.ListCache import ListCache from cache.ListIfNeededCache import ListIfNeededCache from cache.SetCache import SetCache from core.concept import Concept +from core.global_symbols import NotFound, Removed from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka +from tests.cache import FakeSdp class TestCache(TestUsingMemoryBasedSheerka): + def test_i_can_configure(self): + cache = Cache() + cache.configure(max_size=256, + default="default_delegate", + extend_exists="extend_exists_delegate", + alt_sdp_get="alt_sdp_delegate", + sdp=FakeSdp()) + + # Caution, in this test, I initialize default, extend_exists and alt_get_delegate with string + # to simplify the test, but it real usage, they are lambda + # default = lambda sdp, key: sdp.get(cache_name, key) or lambda key: func(key) + # extend_exists = lambda sdp, key: sdp.exists(cache_name, key) or lambda key: func(key) + # alt_sdp_get = lambda sdp, key: sdp.alt_get(cache_name, key) + + assert cache._max_size == 256 + assert cache._default == "default_delegate" + assert cache._extend_exists == "extend_exists_delegate" + assert cache._alt_sdp_get == "alt_sdp_delegate" + assert cache._sdp is not None + + def test_i_can_auto_configure(self): + sdp = FakeSdp(get_value=lambda cache_name, key: key + 1 if cache_name == "cache_name" else NotFound, + extend_exists=lambda cache_name, key: True if cache_name == "cache_name" else False, + get_alt_value=lambda cache_name, key: key + 2 if cache_name == "cache_name" else NotFound) + + cache = Cache(sdp=sdp).auto_configure("cache_name") + assert cache._default(cache._sdp, 10) == 11 + assert cache._extend_exists(cache._sdp, 10) == True + assert cache._alt_sdp_get(cache._sdp, 10) == 12 + + cache = Cache(sdp=sdp).auto_configure("another_cache") + assert cache._default(cache._sdp, 10) == NotFound + assert cache._extend_exists(cache._sdp, 10) == False + assert cache._alt_sdp_get(cache._sdp, 10) == NotFound + def test_i_can_get_an_retrieve_value_from_cache(self): cache = Cache() cache.put("key", "value") @@ -44,9 +81,26 @@ class TestCache(TestUsingMemoryBasedSheerka): assert len(cache) == maxsize assert not cache.has(key - maxsize) + def test_i_can_get_a_value_from_alt_sdp(self): + cache = Cache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value found !") + assert cache.get("key", alt_sdp=alt_sdp) == "value found !" + + # The value is now in cache + assert cache.copy() == {'key': 'value found !'} + + def test_i_cannot_get_a_value_from_alt_sdp_when_cache_is_cleared(self): + cache = Cache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name") + cache.clear() + + alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value found !") + assert cache.get("key", alt_sdp=alt_sdp) is NotFound + assert cache.copy() == {} + def test_i_can_get_default_value_from_simple_cache(self): cache = Cache() - assert cache.get("key") is None + assert cache.get("key") is NotFound cache = Cache(default=10) assert cache.get("key") == 10 @@ -56,7 +110,12 @@ class TestCache(TestUsingMemoryBasedSheerka): assert cache.get("key") == "key_not_found" assert "key" in cache # default callable are put in cache - def test_i_dont_ask_the_remote_repository_twice(self): + cache = Cache(default=lambda sdp, key: sdp.get("cache_name", key), + sdp=FakeSdp(get_value=lambda entry, key: key + "_not_found")) + assert cache.get("key") == "key_not_found" + assert "key" in cache # default callable are put in cache + + def test_i_do_not_ask_the_remote_repository_twice(self): nb_request = [] cache = Cache(default=lambda key: nb_request.append("requested")) @@ -64,155 +123,6 @@ class TestCache(TestUsingMemoryBasedSheerka): assert cache.get("key") is None assert len(nb_request) == 1 - def test_i_can_put_and_retrieve_value_from_list_cache(self): - cache = ListCache() - - cache.put("key", "value") - assert cache.get("key") == ["value"] - assert len(cache) == 1 - - cache.put("key", "value2") # we can append to this list - assert cache.get("key") == ["value", "value2"] - assert len(cache) == 2 - - cache.put("key2", "value") - assert cache.get("key2") == ["value"] - assert len(cache) == 3 - - # duplicates are allowed - cache.put("key", "value") - assert cache.get("key") == ["value", "value2", "value"] - assert len(cache) == 4 - - assert cache.copy() == {'key': ['value', 'value2', 'value'], 'key2': ['value']} - - def test_i_can_put_and_retrieve_value_from_list_if_needed_cache(self): - cache = ListIfNeededCache() - - cache.put("key", "value") - assert cache.get("key") == "value" - - # second time with the same key creates a list - cache.put("key", "value2") - assert cache.get("key") == ["value", "value2"] - assert len(cache) == 2 - - # third time, we now have a list - cache.put("key", "value3") - assert cache.get("key") == ["value", "value2", "value3"] - assert len(cache) == 3 - - # other keys are not affected - cache.put("key2", "value") - assert cache.get("key2") == "value" - assert len(cache) == 4 - - # duplicates are allowed - cache.put("key", "value") - assert cache.get("key") == ["value", "value2", "value3", "value"] - assert len(cache) == 5 - - def test_i_can_put_and_retrieve_values_from_set_cache(self): - cache = SetCache() - - cache.put("key", "value") - assert cache.get("key") == {"value"} - assert len(cache) == 1 - - # we can add to this set - cache.put("key", "value2") - assert cache.get("key") == {"value", "value2"} - assert len(cache) == 2 - - # other keys are not affected - cache.put("key2", "value") - assert cache.get("key2") == {"value"} - assert len(cache) == 3 - - # duplicates are removed - cache.put("key", "value") - assert cache.get("key") == {"value", "value2"} - assert len(cache) == 3 - - assert cache.copy() == {'key': {'value', 'value2'}, 'key2': {'value'}} - - def test_i_can_put_and_retrieve_value_from_dictionary_cache(self): - cache = DictionaryCache() - - # # key must be None - # with pytest.raises(KeyError): - # cache.put("key", None) - # - # # value must be a dictionary - # with pytest.raises(ValueError): - # cache.put(True, "value") - - entry = {"key": "value", "key2": ["value21", "value22"]} - cache.put(False, entry) - assert len(cache) == 3 - assert id(cache._cache) == id(entry) - assert cache.get("key") == "value" - assert cache.get("key2") == ["value21", "value22"] - - # I can append values - cache.put(True, {"key": "another_value", "key3": "value3"}) - assert len(cache) == 4 - assert cache.get("key") == "another_value" - assert cache.get("key2") == ["value21", "value22"] - assert cache.get("key3") == "value3" - - # I can reset - entry = {"key": "value", "key2": ["value21", "value22"]} - cache.put(False, entry) - assert len(cache) == 3 - assert id(cache._cache) == id(entry) - assert cache.get("key") == "value" - assert cache.get("key2") == ["value21", "value22"] - - assert cache.copy() == {'key': 'value', 'key2': ['value21', 'value22']} - - def test_i_can_put_and_retrieve_values_from_inc_cache(self): - cache = IncCache() - - assert cache.get("key") == 1 - assert cache.get("key") == 2 - assert cache.get("key") == 3 - assert cache.get("key2") == 1 - assert cache.get("key2") == 2 - - cache.put("key", 100) - assert cache.get("key") == 101 - - assert cache.copy() == {'key': 101, 'key2': 2} - - @pytest.mark.parametrize("key", [ - None, - "something" - ]) - def test_keys_have_constraints_when_dictionary_cache(self, key): - cache = DictionaryCache() - with pytest.raises(KeyError): - cache.put(key, None) - - @pytest.mark.parametrize("value", [ - None, - "something" - ]) - def test_values_have_constraints_when_dictionary_cache(self, value): - cache = DictionaryCache() - with pytest.raises(ValueError): - cache.put(True, value) - - def test_i_can_append_to_a_dictionary_cache_even_if_it_s_new(self): - cache = DictionaryCache() - - entry = {"key": "value", "key2": ["value21", "value22"]} - cache.put(True, entry) - assert len(cache) == 3 - assert id(cache._cache) != id(entry) - assert cache.get("key") == "value" - assert cache.get("key2") == ["value21", "value22"] - def test_i_can_update_from_simple_cache(self): cache = Cache() cache.put("key", "value") @@ -221,98 +131,43 @@ class TestCache(TestUsingMemoryBasedSheerka): assert len(cache._cache) == 1 assert len(cache) == 1 assert cache.get("key") == "new_value" + assert cache.to_add == {"key"} + assert cache.to_remove == set() + cache.reset_events() cache.update("key", "new_value", "another_key", "another_value") assert len(cache._cache) == 1 assert len(cache) == 1 - assert cache.get("key") is None + assert cache.get("key") is NotFound assert cache.get("another_key") == "another_value" + assert cache.to_add == {"another_key"} + assert cache.to_remove == {"key"} with pytest.raises(KeyError): cache.update("wrong key", "value", "key", "value") - def test_i_can_update_from_list_cache(self): - cache = ListCache() + def test_i_can_update_when_alt_sdp_same_keys(self): + cache = Cache(default=lambda sdp, key: sdp.get("cache_name", key), + extend_exists=lambda sdp, key: sdp.exists("cache_name", key), + sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)) cache.put("key", "value") - cache.put("key", "value2") - cache.put("key", "value") - cache.update("key", "value", "key", "another value") + cache.update("key", "value", "key", "new_value", FakeSdp(extend_exists=lambda cache_name, key: True)) - assert len(cache._cache) == 1 - assert len(cache) == 3 - assert cache.get("key") == ["another value", "value2", "value"] # only the first one is affected + assert cache.get("key") == "new_value" - cache.update("key", "value2", "key2", "value2") - assert len(cache._cache) == 2 - assert len(cache) == 3 - assert cache.get("key") == ["another value", "value"] - assert cache.get("key2") == ["value2"] - - cache.update("key2", "value2", "key3", "value2") - assert len(cache._cache) == 2 - assert len(cache) == 3 - assert cache.get("key") == ["another value", "value"] - assert cache.get("key3") == ["value2"] - assert cache.get("key2") is None - - with pytest.raises(KeyError): - cache.update("wrong key", "value", "key", "value") - - def test_i_can_update_from_list_if_needed_cache(self): - cache = ListIfNeededCache() + def test_i_can_update_when_alt_sdp_different_keys(self): + cache = Cache(default=lambda sdp, key: sdp.get("cache_name", key), + extend_exists=lambda sdp, key: sdp.exists("cache_name", key), + sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)) cache.put("key", "value") - cache.put("key", "value2") - cache.put("key", "value") - cache.update("key", "value", "key", "another value") + cache.update("key", "value", "key2", "value2", FakeSdp(extend_exists=lambda cache_name, key: True)) - assert len(cache._cache) == 1 - assert len(cache) == 3 - assert cache.get("key") == ["another value", "value2", "value"] # only the first one is affected - - cache.update("key", "value2", "key2", "value2") - assert len(cache._cache) == 2 - assert len(cache) == 3 - assert cache.get("key") == ["another value", "value"] assert cache.get("key2") == "value2" - - cache.update("key2", "value2", "key3", "value2") - assert len(cache._cache) == 2 - assert len(cache) == 3 - assert cache.get("key") == ["another value", "value"] - assert cache.get("key3") == "value2" - assert cache.get("key2") is None - - with pytest.raises(KeyError): - cache.update("wrong key", "value", "key", "value") - - def test_i_can_update_from_set_cache(self): - cache = SetCache() - - cache.put("key", "value") - cache.put("key", "value2") - cache.update("key", "value", "key", "another value") - - assert len(cache._cache) == 1 - assert len(cache) == 2 - assert cache.get("key") == {"another value", "value2"} - - cache.update("key", "value2", "key2", "value2") - assert len(cache._cache) == 2 - assert len(cache) == 2 - assert cache.get("key") == {"another value"} - assert cache.get("key2") == {"value2"} - - cache.update("key", "another value", "key3", "another value") - assert len(cache._cache) == 2 - assert len(cache) == 2 - assert cache.get("key") is None - assert cache.get("key2") == {"value2"} - assert cache.get("key3") == {"another value"} - - with pytest.raises(KeyError): - cache.update("wrong key", "value", "key", "value") + assert cache.get("key") == Removed + assert cache.to_add == {"key", "key2"} + assert cache.to_remove == set() @pytest.mark.parametrize("cache", [ Cache(), ListCache(), ListIfNeededCache(), SetCache(), IncCache() @@ -365,21 +220,19 @@ class TestCache(TestUsingMemoryBasedSheerka): cache.put("key", "value") assert cache.exists("key") - def test_exists_in_dictionary_cache(self): - cache = DictionaryCache() - assert not cache.exists("key") - - cache.put(True, {"key": "value"}) - assert cache.exists("key") - def test_exists_extend(self): cache = Cache(extend_exists=lambda k: True if k == "special_key" else False) assert not cache.exists("key") assert cache.exists("special_key") + def test_i_can_extend_exists_when_internal_sdp(self): + cache = Cache(extend_exists=lambda sdp, k: True if k == "special_key" else False, sdp=FakeSdp) + assert not cache.exists("key") + assert cache.exists("special_key") + def test_add_concept_fills_all_dependent_caches(self): sheerka, context, one, two, two_2, three = self.init_concepts("one", "two", Concept("two"), "three") - cache_manager = CacheManager(None) + cache_manager = CacheManager(cache_only=True, sdp=None) cache_manager.register_concept_cache("by_id", Cache(), lambda obj: obj.id, True) cache_manager.register_concept_cache("by_name", ListCache(), lambda obj: obj.name, True) @@ -412,53 +265,13 @@ class TestCache(TestUsingMemoryBasedSheerka): assert cache_manager.get("by_name", "two") == [two, two_2] assert cache_manager.get("by_name2", "two") == [two, two_2] - def test_default_for_dictionary_cache(self): - cache = DictionaryCache(default={"key": "value", "key2": "value2"}) - - assert cache.get("key") == "value" - assert "key2" in cache - assert len(cache) == 2 - - cache.clear() - assert cache.get("key3") is None - assert len(cache) == 2 - assert "key" in cache - assert "key2" in cache - - # default is not modified - cache._cache["key"] = "another value" # operation that is normally not possible - cache.clear() - assert cache.get("key") == "value" - - def test_default_callable_for_dictionary_cache(self): - cache = DictionaryCache(default=lambda k: {"key": "value", "key2": "value2"}) - - assert cache.get("key") == "value" - assert "key2" in cache - assert len(cache) == 2 - - cache.clear() - assert cache.get("key3") is None - assert len(cache) == 2 - assert "key" in cache - assert "key2" in cache - - def test_dictionary_cache_cannot_be_null(self): - cache = DictionaryCache(default=lambda k: None) - assert cache.get("key") is None - assert cache._cache == {} - - cache = DictionaryCache(default=None) - assert cache.get("key") is None - assert cache._cache == {} - @pytest.mark.parametrize("cache, default, new_value, expected", [ - (ListCache(), lambda k: None, "value", ["value"]), + (ListCache(), lambda k: NotFound, "value", ["value"]), (ListCache(), lambda k: ["value"], "value", ["value", "value"]), - (ListIfNeededCache(), lambda k: None, "value", "value"), + (ListIfNeededCache(), lambda k: NotFound, "value", "value"), (ListIfNeededCache(), lambda k: "value", "value1", ["value", "value1"]), (ListIfNeededCache(), lambda k: ["value1", "value2"], "value1", ["value1", "value2", "value1"]), - (SetCache(), lambda k: None, "value", {"value"}), + (SetCache(), lambda k: NotFound, "value", {"value"}), (SetCache(), lambda k: {"value"}, "value", {"value"}), (SetCache(), lambda k: {"value1"}, "value2", {"value1", "value2"}), ]) @@ -469,7 +282,7 @@ class TestCache(TestUsingMemoryBasedSheerka): assert cache.get("key") == expected def test_default_is_called_before_updating_simple_cache(self): - cache = Cache(default=lambda k: None) + cache = Cache(default=lambda k: NotFound) with pytest.raises(KeyError): cache.update("old_key", "old_value", "new_key", "new_value") @@ -477,180 +290,70 @@ class TestCache(TestUsingMemoryBasedSheerka): cache.update("old_key", "old_value", "new_key", "new_value") assert cache.get("new_key") == "new_value" - def test_default_is_called_before_updating_list_cache(self): - cache = ListCache(default=lambda k: None) - with pytest.raises(KeyError): - cache.update("old_key", "old_value", "new_key", "new_value") - - cache = ListCache(default=lambda k: ["old_value", "other old value"]) - cache.update("old_key", "old_value", "old_key", "new_value") - assert cache.get("old_key") == ["new_value", "other old value"] - - cache = ListCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else None) - cache.update("old_key", "old_value", "new_key", "new_value") - assert cache.get("old_key") == ["other old value"] - assert cache.get("new_key") == ["new_value"] - - cache = ListCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else ["other new"]) - cache.update("old_key", "old_value", "new_key", "new_value") - assert cache.get("old_key") == ["other old value"] - assert cache.get("new_key") == ["other new", "new_value"] - - def test_default_is_called_before_updating_list_if_needed_cache(self): - cache = ListIfNeededCache(default=lambda k: None) - with pytest.raises(KeyError): - cache.update("old_key", "old_value", "new_key", "new_value") - - cache = ListIfNeededCache(default=lambda k: "old_value") - cache.update("old_key", "old_value", "old_key", "new_value") - assert cache.get("old_key") == "new_value" - - cache = ListIfNeededCache(default=lambda k: ["old_value", "other old value"]) - cache.update("old_key", "old_value", "old_key", "new_value") - assert cache.get("old_key") == ["new_value", "other old value"] - - cache = ListIfNeededCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else None) - cache.update("old_key", "old_value", "new_key", "new_value") - assert cache.get("old_key") == ["other old value"] - assert cache.get("new_key") == "new_value" - - def test_default_is_called_before_updating_set_cache(self): - cache = SetCache(default=lambda k: None) - with pytest.raises(KeyError): - cache.update("old_key", "old_value", "new_key", "new_value") - - cache = SetCache(default=lambda k: {"old_value", "other old value"}) - cache.update("old_key", "old_value", "old_key", "new_value") - assert cache.get("old_key") == {"new_value", "other old value"} - - cache = SetCache(default=lambda k: {"old_value", "other old value"} if k == "old_key" else None) - cache.update("old_key", "old_value", "new_key", "new_value") - assert cache.get("old_key") == {"other old value"} - assert cache.get("new_key") == {"new_value"} - - cache = SetCache(default=lambda k: {"old_value", "other old value"} if k == "old_key" else {"other new"}) - cache.update("old_key", "old_value", "new_key", "new_value") - assert cache.get("old_key") == {"other old value"} - assert cache.get("new_key") == {"other new", "new_value"} - def test_i_can_delete_an_entry_from_cache(self): cache = Cache() cache.put("key", "value") assert cache.get("key") == "value" cache.delete("key") - assert cache.get("key") is None + assert cache.get("key") is NotFound assert cache.to_remove == {"key"} - def test_i_can_delete_values_from_set_cache(self): - cache = SetCache() - cache.put("key", "value1") - cache.put("key", "value2") - cache.reset_events() + def test_i_can_delete_when_entry_is_only_in_db(self): + cache = Cache(default=lambda k: "value" if k == 'key' else NotFound) - cache.delete("key", "fake_value") - assert cache.get("key") == {"value1", "value2"} - assert len(cache) == 2 + cache.delete("another_key") + assert cache.copy() == {} assert cache.to_add == set() assert cache.to_remove == set() - cache.delete("key", "value1") - assert cache.get("key") == {"value2"} + cache.delete("key") + assert cache.copy() == {} + assert cache.to_add == set() + assert cache.to_remove == {"key"} + + def test_i_can_delete_an_entry_from_cache_when_alt_sdp_and_value_in_cache(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = Cache(extend_exists=lambda sdp, k: sdp.exists("cache_name", k)) + cache.put("key", "value") + + cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": Removed} assert cache.to_add == {"key"} - assert len(cache) == 1 - - cache.delete("key", "value2") - assert cache.get("key") is None - assert cache.to_remove == {"key"} - assert len(cache) == 0 - - def test_i_can_delete_key_from_set_cache(self): - cache = SetCache() - cache.put("key", "value1") - cache.put("key", "value2") - - cache.delete("key") - assert cache.get("key") is None - assert cache.to_remove == {"key"} - assert len(cache) == 0 - - def test_i_can_delete_a_key_that_does_not_exists(self): - cache = SetCache() - cache.delete("key") - - assert cache.to_add == set() assert cache.to_remove == set() - def test_i_can_delete_from_a_key_from_list_id_needed(self): - cache = ListIfNeededCache() - cache.put("key", "value1") - cache.put("key", "value11") - cache.put("key2", "value2") - cache.put("key2", "value22") - cache.put("key2", "value222") - cache.put("key3", "value3") - cache.put("key3", "value33") - cache.put("key4", "value4") - cache.reset_events() + def test_i_can_delete_an_entry_from_cache_when_alt_sdp_when_in_remote_repository(self): + # There is a value in alt_cache_manager, + # No remaining value in current cache after deletion + # The key must be flagged as Removed + cache = Cache(default=lambda k: "value", extend_exists=lambda sdp, k: sdp.exists("cache_name", k)) - assert len(cache) == 8 - - # I can remove a whole key - cache.delete("key") - assert cache.get("key") is None - assert len(cache) == 6 - assert cache.to_remove == {"key"} - assert cache.to_add == set() - - # I can remove an element while a list is remaining - cache.reset_events() - cache.delete("key2", "value22") - assert cache.get("key2") == ["value2", "value222"] - assert len(cache) == 5 - assert cache.to_add == {"key2"} + cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": Removed} + assert cache.to_add == {"key"} assert cache.to_remove == set() - # I can remove an element while a single element is remaining - cache.reset_events() - cache.delete("key3", "value33") - assert cache.get("key3") == "value3" - assert len(cache) == 4 - assert cache.to_add == {"key3"} + def test_i_can_delete_an_entry_from_cache_when_alt_sdp_and_no_value_in_cache_or_remote_repository(self): + # alt_cache_manager is used when no value found + cache = Cache(default=lambda sdp, k: sdp.get("cache_name", k), + extend_exists=lambda sdp, k: sdp.exists("cache_name", k), + sdp=FakeSdp(get_value=lambda entry, k: NotFound)) + + cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True)) + assert cache.copy() == {"key": Removed} + assert cache.to_add == {"key"} assert cache.to_remove == set() - # I can remove an element while nothing remains - cache.reset_events() - cache.delete("key4", "value4") - assert cache.get("key4") is None - assert len(cache) == 3 - assert cache.to_remove == {"key4"} - assert cache.to_add == set() + def test_no_error_when_deleting_a_key_that_does_not_exists_when_alt_sdp(self): + # alt_cache_manager is used when no value found + cache = Cache(default=lambda sdp, k: sdp.get("cache_name", k), + extend_exists=lambda sdp, k: sdp.exists("cache_name", k), + sdp=FakeSdp(get_value=lambda entry, k: NotFound)) - # I do not remove when the value is not the same - cache.reset_events() - cache.delete("key3", "value33") # value33 was already remove - assert cache.get("key3") == "value3" - assert len(cache) == 3 - assert cache.to_add == set() - assert cache.to_remove == set() - - def test_deleting_a_list_if_need_entry_that_does_not_exist_is_not_an_error(self): - cache = ListIfNeededCache() - cache.put("key", "value1") - - cache.reset_events() - cache.delete("key3") - assert len(cache) == 1 - assert cache.to_add == set() - assert cache.to_remove == set() - - cache.delete("key3", "value") - assert len(cache) == 1 - assert cache.to_add == set() - assert cache.to_remove == set() - - cache.delete("key", "value2") - assert len(cache) == 1 + cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: False)) + assert cache.copy() == {} assert cache.to_add == set() assert cache.to_remove == set() @@ -689,6 +392,38 @@ class TestCache(TestUsingMemoryBasedSheerka): assert cache.get("2") == ("2", "2") assert cache.get("3") == ("3", "3") + assert cache.to_add == {"1", "2", "3"} + assert cache.to_remove == set() + + def test_i_can_populate_using_internal_sdp(self): + items = [("1", "1"), ("2", "2"), ("3", "3")] + cache = Cache(sdp=FakeSdp(populate=items)) + + cache.populate(lambda sdp: sdp.populate(), lambda item: item[0]) + + assert len(cache) == 3 + assert cache.get("1") == ("1", "1") + assert cache.get("2") == ("2", "2") + assert cache.get("3") == ("3", "3") + + assert cache.to_add == {"1", "2", "3"} + assert cache.to_remove == set() + + def test_i_can_reset_the_event_after_populate(self): + items = [("1", "1"), ("2", "2"), ("3", "3")] + cache = Cache() + cache.to_add = {"some_value"} + cache.to_remove = {"some_other_value"} + + cache.populate(lambda: items, lambda item: item[0], reset_events=True) + + assert len(cache) == 3 + assert cache.copy() == {"1": ("1", "1"), + "2": ("2", "2"), + "3": ("3", "3")} + assert cache.to_add == {"some_value"} + assert cache.to_remove == {"some_other_value"} + def test_max_size_is_respected_when_populate(self): items = [("1", "1"), ("2", "2"), ("3", "3"), ("4", "4"), ("5", "5")] cache = Cache(max_size=3) @@ -709,3 +444,61 @@ class TestCache(TestUsingMemoryBasedSheerka): res = cache.get_all() assert len(res) == 3 assert list(res) == [('1', '1'), ('2', '2'), ('3', '3')] + + def test_i_can_clone_cache(self): + cache = Cache(max_size=256, + default=lambda sdp, key: sdp.get("cache_name", key), + extend_exists=False, + alt_sdp_get=lambda sdp, key: sdp.alt_get("cache_name", key), + sdp=FakeSdp(get_value=lambda entry, key: key + "_not_found")) + cache.put("key1", "value1") + cache.put("key2", "value2") + + clone = cache.clone() + assert type(cache) == type(clone) + assert clone._max_size == cache._max_size + assert clone._default == cache._default + assert clone._extend_exists == cache._extend_exists + assert clone._alt_sdp_get == cache._alt_sdp_get + assert clone._sdp == cache._sdp + assert clone._cache == {} # value are not copied + assert clone._initialized_keys == set() + assert clone._current_size == 0 + assert clone.to_add == set() + assert clone.to_remove == set() + + clone.configure(sdp=FakeSdp(lambda entry, key: key + " found !")) + + assert cache.get("key3") == "key3_not_found" + assert clone.get("key3") == "key3 found !" + + @pytest.mark.parametrize("cache", [ + Cache(), + DictionaryCache(), + IncCache(), + ListCache(), + ListIfNeededCache() + ]) + def test_i_can_clone_all_caches(self, cache): + clone = cache.clone() + assert type(clone) == type(cache) + + def test_sanity_check_on_list_if_needed_cache(self): + cache = ListIfNeededCache() + clone = cache.clone() + + clone.put("key", "value1") + clone.put("key", "value2") + + assert clone.get("key") == ["value1", "value2"] + + def test_i_can_clear_when_alt_sdp(self): + cache = Cache().auto_configure("cache_name") + + cache.put("key1", "value1") + cache.put("key2", "value2") + + cache.clear() + + assert cache.copy() == {} + assert cache._is_cleared diff --git a/tests/cache/test_cache_manager.py b/tests/cache/test_cache_manager.py deleted file mode 100644 index 82d7224..0000000 --- a/tests/cache/test_cache_manager.py +++ /dev/null @@ -1,157 +0,0 @@ -import pytest -from cache.Cache import Cache -from cache.CacheManager import CacheManager, ConceptNotFound -from cache.DictionaryCache import DictionaryCache -from cache.ListCache import ListCache -from cache.ListIfNeededCache import ListIfNeededCache -from core.concept import Concept - -from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka - - -class TestCacheManager(TestUsingMemoryBasedSheerka): - def test_i_do_not_push_into_sdp_when_cache_only(self): - sheerka = self.get_sheerka() - context = self.get_context(sheerka) - - cache_manager = CacheManager(True) - cache_manager.register_cache("test", Cache(), persist=True) - cache_manager.put("test", "key", "value") - - cache_manager.commit(context) - assert not sheerka.sdp.exists("test", "key") - - def test_i_do_not_get_value_from_sdp_when_cache_only_is_true(self): - sheerka = self.get_sheerka() - context = self.get_context(sheerka) - with sheerka.sdp.get_transaction(context.event) as transaction: - transaction.add("test", "key", "value") - - cache = Cache(default=lambda k: sheerka.sdp.get("test", k)) - - cache_manager = CacheManager(True) - cache_manager.register_cache("test", cache, persist=True) - - assert cache_manager.get("test", "key") is None - - def test_i_can_get_value_from_sdp_when_cache_only_is_false(self): - sheerka = self.get_sheerka() - context = self.get_context(sheerka) - with sheerka.sdp.get_transaction(context.event) as transaction: - transaction.add("test", "key", "value") - - cache = Cache(default=lambda k: sheerka.sdp.get("test", k)) - - cache_manager = CacheManager(False) - cache_manager.register_cache("test", cache, persist=True) - - assert cache_manager.get("test", "key") == "value" - - def test_i_can_commit_simple_cache(self): - sheerka = self.get_sheerka() - context = self.get_context(sheerka) - - cache_manager = CacheManager(False) - cache_manager.register_cache("test", Cache(), persist=True) - cache = cache_manager.caches["test"].cache - - cache_manager.put("test", "key", "value") - - cache_manager.commit(context) - assert sheerka.sdp.get("test", "key") == "value" - - cache.update("key", "value", "key", "another_value") - cache_manager.commit(context) - assert sheerka.sdp.get("test", "key") == "another_value" - - cache.update("key", "another_value", "key2", "another_value") - cache_manager.commit(context) - assert sheerka.sdp.get("test", "key") is None - assert sheerka.sdp.get("test", "key2") == "another_value" - - def test_i_can_commit_list_cache(self): - sheerka = self.get_sheerka() - context = self.get_context(sheerka) - - cache_manager = CacheManager(False) - cache_manager.register_cache("test", ListCache(), persist=True) - cache = cache_manager.caches["test"].cache - - cache.put("key", "value") - cache_manager.commit(context) - assert sheerka.sdp.get("test", "key") == ["value"] - - cache.put("key", "value2") - cache_manager.commit(context) - assert sheerka.sdp.get("test", "key") == ["value", "value2"] - - cache.update("key", "value2", "key2", "value2") - cache_manager.commit(context) - assert sheerka.sdp.get("test", "key") == ["value"] - assert sheerka.sdp.get("test", "key2") == ["value2"] - - cache.update("key2", "value2", "key3", "value2") - cache_manager.commit(context) - assert sheerka.sdp.get("test", "key") == ["value"] - assert sheerka.sdp.get("test", "key2") is None - assert sheerka.sdp.get("test", "key3") == ["value2"] - - def test_i_can_commit_dictionary_cache(self): - sheerka = self.get_sheerka() - context = self.get_context(sheerka) - - cache_manager = CacheManager(False) - cache_manager.register_cache("test", DictionaryCache(), persist=True) - cache = cache_manager.caches["test"].cache - - cache.put(False, {"key": "value", "key2": "value2"}) - cache_manager.commit(context) - assert sheerka.sdp.get("test") == {"key": "value", "key2": "value2"} - assert sheerka.sdp.get("test", "key") == "value" - - cache.put(False, {"key": "value", "key2": "value2", "key3": "value3"}) - cache_manager.commit(context) - assert sheerka.sdp.get("test") == {"key": "value", "key2": "value2", "key3": "value3"} - - def test_i_can_remove_a_concept_from_concepts_caches(self): - cache_manager = CacheManager(True) - cache_manager.register_concept_cache("id", Cache(), lambda c: c.id, True) - cache_manager.register_concept_cache("key", ListIfNeededCache(), lambda c: c.key, True) - - sheerka, context, one, two, three, two_bis = self.init_concepts("one", "two", "three", Concept("two", body="2")) - - for concept in [one, two, three, two_bis]: - cache_manager.add_concept(concept) - - # sanity check - cache_def = cache_manager.caches["id"] - assert cache_def.cache.copy() == {one.id: one, two.id: two, three.id: three, two_bis.id: two_bis} - cache_def = cache_manager.caches["key"] - assert cache_def.cache.copy() == {one.key: one, two.key: [two, two_bis], three.key: three} - - for cache_name in cache_manager.concept_caches: - cache_manager.caches[cache_name].cache.reset_events() - - cache_manager.remove_concept(sheerka.new(("two", two_bis.id))) - - cache_def = cache_manager.caches["id"] - assert cache_def.cache.copy() == {one.id: one, two.id: two, three.id: three} - assert cache_def.cache.to_remove == {two_bis.id} - assert cache_def.cache.to_add == set() - assert len(cache_def.cache) == 3 - - cache_def = cache_manager.caches["key"] - assert cache_def.cache.copy() == {one.key: one, two.key: two, three.key: three} - assert cache_def.cache.to_remove == set() - assert cache_def.cache.to_add == {"two"} - assert len(cache_def.cache) == 3 - - def test_i_cannot_remove_a_concept_that_does_not_exists(self): - cache_manager = CacheManager(True) - cache_manager.register_concept_cache("id", Cache(), lambda c: c.id, True) - cache_manager.register_concept_cache("key", ListIfNeededCache(), lambda c: c.key, True) - - with pytest.raises(ConceptNotFound) as ex: - cache_manager.remove_concept(Concept("foo", id="1001")) - - assert ex.value.concept == Concept("foo", id="1001") diff --git a/tests/core/test_SheerkaAdmin.py b/tests/core/test_SheerkaAdmin.py index 60cf184..11b3cfa 100644 --- a/tests/core/test_SheerkaAdmin.py +++ b/tests/core/test_SheerkaAdmin.py @@ -1,9 +1,17 @@ +from core.builtin_concepts_ids import BuiltinConcepts from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka class TestSheerkaAdmin(TestUsingMemoryBasedSheerka): - def test_i_can_get_last_ret(self): - pass + def test_i_can_get_concepts(self): + sheerka = self.get_sheerka(cache_only=False, singleton=False) + + res = sheerka.concepts() + concepts = list(res.body) + + assert sheerka.isinstance(res, BuiltinConcepts.TO_LIST) + assert concepts[0] == sheerka + assert concepts[1].id == "2" # def test_i_can_get_last_error_ret(self): # sheerka, context = self.init_concepts() diff --git a/tests/core/test_SheerkaComparisonManager.py b/tests/core/test_SheerkaComparisonManager.py index 9e6aa98..f68ebe7 100644 --- a/tests/core/test_SheerkaComparisonManager.py +++ b/tests/core/test_SheerkaComparisonManager.py @@ -1,7 +1,8 @@ import pytest from core.builtin_concepts import BuiltinConcepts from core.concept import Concept -from core.global_symbols import EVENT_CONCEPT_PRECEDENCE_MODIFIED, CONCEPT_COMPARISON_CONTEXT, EVENT_RULE_PRECEDENCE_MODIFIED, \ +from core.global_symbols import EVENT_CONCEPT_PRECEDENCE_MODIFIED, CONCEPT_COMPARISON_CONTEXT, \ + EVENT_RULE_PRECEDENCE_MODIFIED, \ RULE_COMPARISON_CONTEXT from core.sheerka.services.SheerkaComparisonManager import SheerkaComparisonManager, ComparisonObj @@ -33,36 +34,37 @@ class TestSheerkaGreaterThanManager(TestUsingMemoryBasedSheerka): assert res.status assert sheerka.isinstance(res.body, BuiltinConcepts.SUCCESS) - in_cache = sheerka.cache_manager.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + in_cache = sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_cache == [ComparisonObj(context.event.get_digest(), "prop_name", two.str_id, one.str_id, ">", "#")] - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") assert weighted == {"c:one|1001:": 1, "c:two|1002:": 2} # I can commit - sheerka.cache_manager.commit(context) - in_db = sheerka.sdp.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + sheerka.om.commit(context) + in_db = sheerka.om.current_sdp().get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_db == [ComparisonObj(context.event.get_digest(), "prop_name", two.str_id, one.str_id, ">", "#")] def test_i_can_add_is_greater_than_for_rules(self): - sheerka, context, r1, r2 = self.init_format_rules(("True", "true"), ("False", "false"), - cache_only=False, - compile_rule=False) + sheerka, context, r1, r2 = self.init_test(cache_only=False).with_rules(("True", "true"), + ("False", "false"), + compile_rule=False).unpack() + service = sheerka.services[SheerkaComparisonManager.NAME] res = service.set_is_greater_than(context, "prop_name", r2, r1) assert res.status assert sheerka.isinstance(res.body, BuiltinConcepts.SUCCESS) - in_cache = sheerka.cache_manager.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + in_cache = sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_cache == [ComparisonObj(context.event.get_digest(), "prop_name", r2.str_id, r1.str_id, ">", "#")] - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") - assert weighted == {"r:|1:": 1, "r:|2:": 2} + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + assert weighted == {r1.str_id: 1, r2.str_id: 2} # I can commit - sheerka.cache_manager.commit(context) - in_db = sheerka.sdp.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + sheerka.om.commit(context) + in_db = sheerka.om.current_sdp().get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_db == [ComparisonObj(context.event.get_digest(), "prop_name", r2.str_id, r1.str_id, ">", "#")] def test_i_can_add_a_is_less_than(self): @@ -73,36 +75,38 @@ class TestSheerkaGreaterThanManager(TestUsingMemoryBasedSheerka): assert res.status assert sheerka.isinstance(res.body, BuiltinConcepts.SUCCESS) - in_cache = sheerka.cache_manager.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + in_cache = sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_cache == [ComparisonObj(context.event.get_digest(), "prop_name", one.str_id, two.str_id, "<", "#")] - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") assert weighted == {"c:one|1001:": 1, "c:two|1002:": 2} # I can commit - sheerka.cache_manager.commit(context) - in_db = sheerka.sdp.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + sheerka.om.commit(context) + in_db = sheerka.om.current_sdp().get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_db == [ComparisonObj(context.event.get_digest(), "prop_name", one.str_id, two.str_id, "<", "#")] def test_i_can_add_is_less_than_for_rules(self): - sheerka, context, r1, r2 = self.init_format_rules(("True", "true"), ("False", "false"), - cache_only=False, - compile_rule=False) + sheerka, context, r1, r2 = self.init_test(cache_only=False).with_rules(("True", "true"), + ("False", "false"), + compile_rule=False, + create_new=True).unpack() + service = sheerka.services[SheerkaComparisonManager.NAME] res = service.set_is_less_than(context, "prop_name", r1, r2) assert res.status assert sheerka.isinstance(res.body, BuiltinConcepts.SUCCESS) - in_cache = sheerka.cache_manager.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + in_cache = sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_cache == [ComparisonObj(context.event.get_digest(), "prop_name", r1.str_id, r2.str_id, "<", "#")] - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") - assert weighted == {"r:|1:": 1, "r:|2:": 2} + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + assert weighted == {r1.str_id: 1, r2.str_id: 2} # I can commit - sheerka.cache_manager.commit(context) - in_db = sheerka.sdp.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + sheerka.om.commit(context) + in_db = sheerka.om.current_sdp().get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_db == [ComparisonObj(context.event.get_digest(), "prop_name", r1.str_id, r2.str_id, "<", "#")] def test_i_can_add_multiples_constraints(self): @@ -112,31 +116,31 @@ class TestSheerkaGreaterThanManager(TestUsingMemoryBasedSheerka): service.set_is_greater_than(context, "prop_name", two, one) service.set_is_greater_than(context, "prop_name", three, two) - in_cache = sheerka.cache_manager.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + in_cache = sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_cache == [ ComparisonObj(context.event.get_digest(), "prop_name", two.str_id, one.str_id, ">", "#"), ComparisonObj(context.event.get_digest(), "prop_name", three.str_id, two.str_id, ">", "#") ] # I can commit - sheerka.cache_manager.commit(context) - in_db = sheerka.sdp.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + sheerka.om.commit(context) + in_db = sheerka.om.current_sdp().get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_db == [ ComparisonObj(context.event.get_digest(), "prop_name", two.str_id, one.str_id, ">", "#"), ComparisonObj(context.event.get_digest(), "prop_name", three.str_id, two.str_id, ">", "#") ] - sheerka.cache_manager.clear(SheerkaComparisonManager.COMPARISON_ENTRY) # reset the cache + sheerka.om.clear(SheerkaComparisonManager.COMPARISON_ENTRY) # reset the cache service.set_is_greater_than(context, "prop_name", four, three) - in_cache = sheerka.cache_manager.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + in_cache = sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_cache == [ ComparisonObj(context.event.get_digest(), "prop_name", two.str_id, one.str_id, ">", "#"), ComparisonObj(context.event.get_digest(), "prop_name", three.str_id, two.str_id, ">", "#"), ComparisonObj(context.event.get_digest(), "prop_name", four.str_id, three.str_id, ">", "#"), ] - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") assert weighted == {"c:one|1001:": 1, "c:two|1002:": 2, "c:three|1003:": 3, "c:four|1004:": 4} def test_i_can_add_multiple_constraints_2(self): @@ -146,23 +150,54 @@ class TestSheerkaGreaterThanManager(TestUsingMemoryBasedSheerka): service.set_is_greater_than(context, "prop_name", two, one) service.set_is_greater_than(context, "prop_name", three, two) - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") assert weighted == {"c:one|1001:": 1, "c:two|1002:": 2, "c:three|1003:": 3} service.set_is_greater_than(context, "prop_name", three, one) # should not change order - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") assert weighted == {"c:one|1001:": 1, "c:two|1002:": 2, "c:three|1003:": 3} - def test_i_lesser_than_and_opposite_greater_than(self): + def test_i_can_add_lesser_than_and_opposite_greater_than(self): sheerka, context, one, two = self.init_concepts("one", "two") service = sheerka.services[SheerkaComparisonManager.NAME] service.set_is_greater_than(context, "prop_name", two, one) service.set_is_less_than(context, "prop_name", one, two) - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") assert weighted == {"c:one|1001:": 1, "c:two|1002:": 2} + def test_i_can_support_multiple_ontology_layers(self): + sheerka, context, one, two, three = self.init_concepts("one", "two", "three", cache_only=False) + service = sheerka.services[SheerkaComparisonManager.NAME] + + service.set_is_greater_than(context, "prop_name", two, one) + + # sanity check + expected_in_cache = [ComparisonObj(context.event.get_digest(), "prop_name", two.str_id, one.str_id, ">", "#")] + assert sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") == expected_in_cache + expected_weights = {"c:one|1001:": 1, "c:two|1002:": 2} + assert sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") == expected_weights + + # I still can access to the previous values + sheerka.push_ontology(context, "new ontology") + assert sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") == expected_in_cache + assert sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") == expected_weights + + # I can modify + service.set_is_greater_than(context, "prop_name", three, two) + expected_in_cache2 = [ + ComparisonObj(context.event.get_digest(), "prop_name", two.str_id, one.str_id, ">", "#"), + ComparisonObj(context.event.get_digest(), "prop_name", three.str_id, two.str_id, ">", "#")] + assert sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") == expected_in_cache2 + expected_weights2 = {"c:one|1001:": 1, "c:two|1002:": 2, "c:three|1003:": 3} + assert sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") == expected_weights2 + + # I can retrieve the previous values + sheerka.pop_ontology() + assert sheerka.om.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") == expected_in_cache + assert sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") == expected_weights + @pytest.mark.parametrize("entries, expected", [ (["two > one"], {'c:one|1001:': 1, 'c:two|1002:': 2}), (["one < two"], {'c:one|1001:': 1, 'c:two|1002:': 2}), @@ -192,7 +227,7 @@ class TestSheerkaGreaterThanManager(TestUsingMemoryBasedSheerka): service = sheerka.services[SheerkaComparisonManager.NAME] service.set_is_lesser(context, "prop_name", one) - sheerka.cache_manager.clear(service.RESOLVED_COMPARISON_ENTRY) + sheerka.om.clear(service.RESOLVED_COMPARISON_ENTRY) assert service.get_concepts_weights("prop_name") == {"c:one|1001:": 0} @@ -244,7 +279,7 @@ class TestSheerkaGreaterThanManager(TestUsingMemoryBasedSheerka): service.set_is_greater_than(context, "prop_name", two, one) service.set_is_less_than(context, "prop_name", one, two) - weighted = sheerka.cache_manager.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") + weighted = sheerka.om.get(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY, "prop_name|#") assert weighted == {"c:one|1001:": 1, "c:two|1002:": 2} def test_methods_are_correctly_bound(self): @@ -263,8 +298,8 @@ class TestSheerkaGreaterThanManager(TestUsingMemoryBasedSheerka): assert service.get_concepts_weights("prop_name") == {"c:one|1001:": 0, "c:two|1002:": 1, "c:three|1003:": 2} # I can commit - sheerka.cache_manager.commit(context) - in_db = sheerka.sdp.get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") + sheerka.om.commit(context) + in_db = sheerka.om.current_sdp().get(SheerkaComparisonManager.COMPARISON_ENTRY, "prop_name|#") assert in_db == [ ComparisonObj(context.event.get_digest(), "prop_name", one.str_id, None, "<<", "#"), ComparisonObj(context.event.get_digest(), "prop_name", three.str_id, two.str_id, ">", "#") @@ -458,14 +493,12 @@ class TestSheerkaGreaterThanManager(TestUsingMemoryBasedSheerka): assert event_received def test_an_event_is_fired_when_modifying_rule_precedence(self): - sheerka, context, r1, r2 = self.init_format_rules( - ("True", "True"), - ("False", "False"), - compile_rule=False, - ) + sheerka, context, r1, r2 = self.init_test(cache_only=False).with_rules(("True", "true"), + ("False", "false"), + compile_rule=False).unpack() foo = Concept("foo") event_received = False - sheerka.cache_manager.clear(SheerkaComparisonManager.COMPARISON_ENTRY) + sheerka.om.clear(SheerkaComparisonManager.COMPARISON_ENTRY) def receive_event(c): nonlocal event_received diff --git a/tests/core/test_SheerkaConceptAlgebra.py b/tests/core/test_SheerkaConceptAlgebra.py index b5e3595..68b482e 100644 --- a/tests/core/test_SheerkaConceptAlgebra.py +++ b/tests/core/test_SheerkaConceptAlgebra.py @@ -61,15 +61,15 @@ class TestSheerkaConceptsAlgebra(TestUsingMemoryBasedSheerka): BuiltinConcepts.HASA: {hasa2}, } def test_i_can_recognize_myself_when_using_sdp_repository(self): - sheerka, context, foo, isa1, hasa1, = self.init_concepts("foo", "isa1", "has1", - cache_only=False, - create_new=True) - sheerka.cache_manager.commit(context) + sheerka, context, foo, isa1, hasa1, = self.init_test(cache_only=False). \ + with_concepts("foo", "isa1", "has1", create_new=True). \ + unpack() + sheerka.om.commit(context) new_foo = sheerka.new("foo") sheerka.set_isa(context, new_foo, isa1) sheerka.set_hasa(context, new_foo, hasa1) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) assert sheerka.recognize(new_foo, all_scores=True) == [ConceptScore(1, new_foo, new_foo)] diff --git a/tests/core/test_SheerkaConceptManager.py b/tests/core/test_SheerkaConceptManager.py index 17abbf3..e5feafb 100644 --- a/tests/core/test_SheerkaConceptManager.py +++ b/tests/core/test_SheerkaConceptManager.py @@ -1,8 +1,9 @@ import pytest from cache.CacheManager import ConceptNotFound from core.builtin_concepts import BuiltinConcepts -from core.concept import PROPERTIES_TO_SERIALIZE, Concept, DEFINITION_TYPE_DEF, get_concept_attrs, NotInit, \ +from core.concept import PROPERTIES_TO_SERIALIZE, Concept, DEFINITION_TYPE_DEF, get_concept_attrs, \ DEFINITION_TYPE_BNF +from core.global_symbols import NotInit, NotFound from core.sheerka.Sheerka import Sheerka from core.sheerka.services.SheerkaConceptManager import SheerkaConceptManager, NoModificationFound, ForbiddenAttribute, \ UnknownAttribute, CannotRemoveMeta, ValueNotFound, ConceptIsReferenced @@ -21,7 +22,7 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): service = sheerka.services[SheerkaConceptManager.NAME] res = sheerka.create_new_concept(context, concept) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) assert res.status assert sheerka.isinstance(res.value, BuiltinConcepts.NEW_CONCEPT) @@ -47,15 +48,15 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert sheerka.get_by_hash(concept.get_definition_hash()) == concept # I can get by the first entry - assert sheerka.cache_manager.get(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "+") == [concept.id] - assert sheerka.cache_manager.get(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "+") == [concept.id] + assert sheerka.om.get(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "+") == [concept.id] + assert sheerka.om.get(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "+") == [concept.id] # saved in sdp - assert sheerka.sdp.exists(service.CONCEPTS_BY_ID_ENTRY, concept.id) - assert sheerka.sdp.exists(service.CONCEPTS_BY_KEY_ENTRY, concept.key) - assert sheerka.sdp.exists(service.CONCEPTS_BY_NAME_ENTRY, concept.name) - assert sheerka.sdp.exists(service.CONCEPTS_BY_HASH_ENTRY, concept.get_definition_hash()) - assert sheerka.sdp.exists(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "+") + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_ID_ENTRY, concept.id) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_KEY_ENTRY, concept.key) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_NAME_ENTRY, concept.name) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_HASH_ENTRY, concept.get_definition_hash()) + assert sheerka.om.current_sdp().exists(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "+") def test_i_cannot_create_a_bnf_concept_that_references_a_concept_that_cannot_be_resolved(self): sheerka, context, one_1, one_1_0 = self.init_concepts(Concept("one", body="1"), Concept("one", body="1.0")) @@ -74,7 +75,7 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): service = sheerka.services[SheerkaConceptManager.NAME] res = sheerka.create_new_concept(self.get_context(sheerka), concept) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) assert res.status assert sheerka.isinstance(res.value, BuiltinConcepts.NEW_CONCEPT) @@ -99,11 +100,11 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert sheerka.get_by_hash(concept.get_definition_hash()) == concept # saved in sdp - assert sheerka.sdp.exists(service.CONCEPTS_BY_ID_ENTRY, concept.id) - assert sheerka.sdp.exists(service.CONCEPTS_BY_KEY_ENTRY, concept.key) - assert sheerka.sdp.exists(service.CONCEPTS_BY_NAME_ENTRY, concept.name) - assert sheerka.sdp.exists(service.CONCEPTS_BY_HASH_ENTRY, concept.get_definition_hash()) - assert sheerka.sdp.exists(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "hello") + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_ID_ENTRY, concept.id) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_KEY_ENTRY, concept.key) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_NAME_ENTRY, concept.name) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_HASH_ENTRY, concept.get_definition_hash()) + assert sheerka.om.current_sdp().exists(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "hello") def test_i_cannot_add_the_same_concept_twice(self): """ @@ -184,55 +185,55 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert res.status # I can get by the first entry - assert sheerka.cache_manager.get(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [concept.id] - assert sheerka.cache_manager.get(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [concept.id] + assert sheerka.om.get(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [concept.id] + assert sheerka.om.get(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [concept.id] @pytest.mark.parametrize("expression", [ "--'filter' ('one' | 'two') ", "'--filter' ('one' | 'two') ", ]) def test_i_can_get_first_token_when_bnf_concept_and_not_a_letter(self, expression): - sheerka, context, bnf_concept = self.init_concepts( + sheerka, context, bnf_concept = self.init_test().with_concepts( Concept("foo", definition=expression), - create_new=True) + create_new=True).unpack() # I can get by the first entry - assert sheerka.cache_manager.get(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [bnf_concept.id] - assert sheerka.cache_manager.get(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [bnf_concept.id] + assert sheerka.om.get(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [bnf_concept.id] + assert sheerka.om.get(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [bnf_concept.id] def test_concept_references_are_updated_1(self): - sheerka, context, one, two, number, twenty, twenties = self.init_concepts( + sheerka, context, one, two, number, twenty, twenties = self.init_test().with_concepts( "one", "two", "number", "twenty", Concept("twenties", definition="twenty one | two 'hundred'"), create_new=True - ) + ).unpack() service = sheerka.services[SheerkaConceptManager.NAME] - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, one.id) == {twenties.id} - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, two.id) == {twenties.id} - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, number.id) is None - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, twenty.id) == {twenties.id} - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, twenties.id) is None + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, one.id) == {twenties.id} + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, two.id) == {twenties.id} + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, number.id) is NotFound + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, twenty.id) == {twenties.id} + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, twenties.id) is NotFound def test_concept_references_are_updated_2(self): - sheerka, context, one, two, number, twenty, twenties = self.init_concepts( + sheerka, context, one, two, number, twenty, twenties = self.init_test().with_concepts( "one", "two", "number", "twenty", Concept("twenties", definition="twenty number"), create_new=True - ) + ).unpack() service = sheerka.services[SheerkaConceptManager.NAME] - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, one.id) is None - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, two.id) is None - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, number.id) == {twenties.id} - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, twenty.id) == {twenties.id} - assert sheerka.cache_manager.get(service.CONCEPTS_REFERENCES_ENTRY, twenties.id) is None + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, one.id) is NotFound + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, two.id) is NotFound + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, number.id) == {twenties.id} + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, twenty.id) == {twenties.id} + assert sheerka.om.get(service.CONCEPTS_REFERENCES_ENTRY, twenties.id) is NotFound @pytest.mark.parametrize("attr", [ "name", @@ -357,16 +358,18 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert sheerka.get_by_hash(new_concept.get_definition_hash()).get_metadata().body == "metadata value" # sdp is updated - sheerka.cache_manager.commit(context) - from_sdp = sheerka.sdp.get(service.CONCEPTS_BY_ID_ENTRY, new_concept.id) + sheerka.om.commit(context) + from_sdp = sheerka.om.current_sdp().get(service.CONCEPTS_BY_ID_ENTRY, new_concept.id) assert from_sdp.get_metadata().body == "metadata value" assert from_sdp.get_metadata().variables == [("var_name", "default value")] assert from_sdp.get_prop(BuiltinConcepts.ISA) == {bar} - assert sheerka.sdp.get(service.CONCEPTS_BY_NAME_ENTRY, new_concept.name).get_metadata().body == "metadata value" - assert sheerka.sdp.get(service.CONCEPTS_BY_KEY_ENTRY, new_concept.key).get_metadata().body == "metadata value" - assert sheerka.sdp.get(service.CONCEPTS_BY_HASH_ENTRY, - new_concept.get_definition_hash()).get_metadata().body == "metadata value" + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_NAME_ENTRY, + new_concept.name).get_metadata().body == "metadata value" + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_KEY_ENTRY, + new_concept.key).get_metadata().body == "metadata value" + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_HASH_ENTRY, + new_concept.get_definition_hash()).get_metadata().body == "metadata value" def test_caches_are_update_when_i_modify_the_name(self): sheerka, context, foo = self.init_concepts("foo", cache_only=False) @@ -391,14 +394,15 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert not sheerka.is_known(sheerka.get_by_key(foo.key)) assert not sheerka.is_known(sheerka.get_by_hash(foo.get_definition_hash())) - sheerka.cache_manager.commit(context) - assert sheerka.sdp.get(service.CONCEPTS_BY_ID_ENTRY, new_concept.id).name == "bar" - assert sheerka.sdp.get(service.CONCEPTS_BY_KEY_ENTRY, new_concept.key).name == "bar" - assert sheerka.sdp.get(service.CONCEPTS_BY_NAME_ENTRY, new_concept.name).name == "bar" - assert sheerka.sdp.get(service.CONCEPTS_BY_HASH_ENTRY, new_concept.get_definition_hash()).name == "bar" - assert sheerka.sdp.get(service.CONCEPTS_BY_KEY_ENTRY, foo.key) is None - assert sheerka.sdp.get(service.CONCEPTS_BY_NAME_ENTRY, foo.name) is None - assert sheerka.sdp.get(service.CONCEPTS_BY_HASH_ENTRY, foo.get_definition_hash()) is None + sheerka.om.commit(context) + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_ID_ENTRY, new_concept.id).name == "bar" + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_KEY_ENTRY, new_concept.key).name == "bar" + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_NAME_ENTRY, new_concept.name).name == "bar" + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_HASH_ENTRY, + new_concept.get_definition_hash()).name == "bar" + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_KEY_ENTRY, foo.key) is NotFound + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_NAME_ENTRY, foo.name) is NotFound + assert sheerka.om.current_sdp().get(service.CONCEPTS_BY_HASH_ENTRY, foo.get_definition_hash()) is NotFound def test_i_can_modify_a_concept_from_a_list_of_concepts(self): sheerka, context, foo1, foo2 = self.init_concepts( @@ -467,12 +471,12 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert new_concept.key == "__var__0 bar __var__1 __var__2" def test_bnf_is_modified_when_modifying_the_definition(self): - sheerka, context, one, two, foo = self.init_concepts( + sheerka, context, one, two, foo = self.init_test().with_concepts( "one", "two", Concept(name="foo", definition="'twenty' one"), create_new=True - ) + ).unpack() to_add = {"meta": {"definition": "'twenty' two"}} @@ -484,17 +488,17 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert new_concept.get_bnf() == Sequence(StrMatch('twenty'), ConceptExpression(two, rule_name='two')) def test_concept_by_first_keyword_is_updated_after_concept_modification(self): - sheerka, context, foo, bar, baz = self.init_concepts( + sheerka, context, foo, bar, baz = self.init_test().with_concepts( Concept("foo"), Concept("bar"), Concept("baz", definition="foo"), - create_new=True) + create_new=True).unpack() - assert sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { + assert sheerka.om.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { "foo": ["1001"], "bar": ["1002"], 'c:|1001:': ['1003']} - assert sheerka.cache_manager.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { + assert sheerka.om.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { 'foo': ['1001', '1003'], 'bar': ['1002']} @@ -502,18 +506,18 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): res = sheerka.modify_concept(context, foo, to_add) assert res.status - assert sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { + assert sheerka.om.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { "bar": ["1002", "1001"], 'c:|1001:': ['1003']} - assert sheerka.cache_manager.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { + assert sheerka.om.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { 'bar': ['1002', '1001', '1003']} def test_references_are_updated_after_concept_modification(self): - sheerka, context, one, twenty_one = self.init_concepts( + sheerka, context, one, twenty_one = self.init_test().with_concepts( "onz", Concept("twenty one", definition="'twenty' onz"), create_new=True - ) + ).unpack() assert twenty_one.get_bnf() == Sequence(StrMatch('twenty'), ConceptExpression(one, rule_name='onz')) @@ -530,6 +534,35 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): BaseNodeParser.ensure_bnf(context, twenty_one) assert twenty_one.get_bnf() == Sequence(StrMatch('twenty'), ConceptExpression(modified, rule_name='one')) + def test_i_can_modify_on_top_of_a_new_ontology_layer(self): + sheerka, context, foo = self.init_concepts(Concept("foo").def_var("a").def_var("b"), cache_only=False) + + sheerka.push_ontology(context, "new ontology") + + to_add = { + "meta": {"body": "a body"}, + "props": {BuiltinConcepts.ISA: "bar"}, + "variables": {"c": "value"} + } + to_remove = { + "variables": ["b"] + } + + res = sheerka.modify_concept(context, foo, to_add=to_add, to_remove=to_remove) + assert res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.NEW_CONCEPT) + assert res.body.body.get_metadata().body == "a body" + assert res.body.body.get_metadata().variables == [("a", None), ("c", "value")] + assert res.body.body.get_metadata().props == {BuiltinConcepts.ISA: {"bar"}} + + # and correctly set in cache + updated = sheerka.get_by_id(foo.id) + assert updated.get_metadata().body == "a body" + assert updated.get_metadata().variables == [("a", None), ("c", "value")] + assert updated.get_metadata().props == {BuiltinConcepts.ISA: {"bar"}} + + sheerka.pop_ontology() + def test_i_cannot_modify_without_any_modification(self): sheerka, context, foo = self.init_concepts("foo") service = sheerka.services[SheerkaConceptManager.NAME] @@ -627,10 +660,10 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert sheerka.get_attr(foo, prop) == bar def test_i_cannot_remove_a_concept_which_has_reference(self): - sheerka, context, one, twenty_one = self.init_concepts( + sheerka, context, one, twenty_one = self.init_test().with_concepts( Concept("one"), Concept("twenty one", definition="'twenty' one"), - create_new=True) + create_new=True).unpack() res = sheerka.remove_concept(context, one) @@ -639,9 +672,9 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert res.body.body == ConceptIsReferenced([twenty_one]) def test_i_can_remove_a_concept(self): - sheerka, context, one = self.init_concepts( + sheerka, context, one = self.init_test().with_concepts( Concept("one"), - create_new=True) + create_new=True).unpack() # sanity check assert sheerka.get_by_id(one.id) == one @@ -669,6 +702,34 @@ class TestSheerkaConceptManager(TestUsingMemoryBasedSheerka): assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR) assert res.body.body == ConceptNotFound(one) + def test_i_can_create_concepts_in_multiple_ontology_layers(self): + sheerka, context = self.init_concepts(cache_only=False) + + res = sheerka.create_new_concept(context, Concept("foo")) + assert res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.NEW_CONCEPT) + + sheerka.push_ontology(context, "new ontology") + res = sheerka.create_new_concept(context, Concept("bar")) + assert res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.NEW_CONCEPT) + + # I cannot defined foo again, even if it's not the same layer + res = sheerka.create_new_concept(context, Concept("foo")) + assert not res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.CONCEPT_ALREADY_DEFINED) + + # I cannot define bar again in this layer + res = sheerka.create_new_concept(context, Concept("bar")) + assert not res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.CONCEPT_ALREADY_DEFINED) + + sheerka.pop_ontology() + # But I can if I remove the layer + res = sheerka.create_new_concept(context, Concept("bar")) + assert res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.NEW_CONCEPT) + class TestSheerkaConceptManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka): def test_i_can_add_several_concepts(self): @@ -678,42 +739,40 @@ class TestSheerkaConceptManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka): hello = Concept("Hello world a").def_var("a") res = sheerka.create_new_concept(context, hello) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) assert res.status sheerka = self.get_sheerka() # another instance context = self.get_context(sheerka) greeting = Concept("Greeting a").def_var("a") res = sheerka.create_new_concept(context, greeting) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) assert res.status sheerka = self.get_sheerka() # another instance again - assert sheerka.sdp.exists(service.CONCEPTS_BY_KEY_ENTRY, hello.key) - assert sheerka.sdp.exists(service.CONCEPTS_BY_KEY_ENTRY, greeting.key) - assert sheerka.sdp.exists(service.CONCEPTS_BY_ID_ENTRY, hello.id) - assert sheerka.sdp.exists(service.CONCEPTS_BY_ID_ENTRY, greeting.id) - assert sheerka.sdp.exists(service.CONCEPTS_BY_NAME_ENTRY, "Hello world a") - assert sheerka.sdp.exists(service.CONCEPTS_BY_NAME_ENTRY, "Greeting a") - assert sheerka.sdp.exists(service.CONCEPTS_BY_HASH_ENTRY, hello.get_definition_hash()) - assert sheerka.sdp.exists(service.CONCEPTS_BY_HASH_ENTRY, greeting.get_definition_hash()) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_KEY_ENTRY, hello.key) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_KEY_ENTRY, greeting.key) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_ID_ENTRY, hello.id) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_ID_ENTRY, greeting.id) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_NAME_ENTRY, "Hello world a") + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_NAME_ENTRY, "Greeting a") + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_HASH_ENTRY, hello.get_definition_hash()) + assert sheerka.om.current_sdp().exists(service.CONCEPTS_BY_HASH_ENTRY, greeting.get_definition_hash()) - assert sheerka.sdp.exists(Sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "Hello") - assert sheerka.sdp.exists(Sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "Greeting") + assert sheerka.om.current_sdp().exists(Sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "Hello") + assert sheerka.om.current_sdp().exists(Sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "Greeting") def test_i_cannot_add_the_same_concept_twice_using_sdp(self): """ Checks that duplicated concepts are managed by sheerka, not by sheerka.sdp :return: """ - sheerka = self.get_sheerka(cache_only=False) - context = self.get_context(sheerka) - concept = self.get_default_concept() + sheerka, context, concept = self.init_concepts("foo") sheerka.create_new_concept(context, concept) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) - sheerka.cache_manager.clear() + sheerka.om.current_cache_manager().clear(set_is_cleared=False) res = sheerka.create_new_concept(context, concept) assert not res.status @@ -727,13 +786,13 @@ class TestSheerkaConceptManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka): sheerka.create_new_concept(context, Concept("foo", body="1")) sheerka.create_new_concept(context, Concept("foo", body="2")) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) - assert len(sheerka.sdp.get(service.CONCEPTS_BY_KEY_ENTRY, "foo")) == 2 + assert len(sheerka.om.current_sdp().get(service.CONCEPTS_BY_KEY_ENTRY, "foo")) == 2 sheerka = self.get_sheerka() # new instance context = self.get_context(sheerka) sheerka.create_new_concept(context, Concept("foo", body="3")) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) - assert len(sheerka.sdp.get(service.CONCEPTS_BY_KEY_ENTRY, "foo")) == 3 + assert len(sheerka.om.current_sdp().get(service.CONCEPTS_BY_KEY_ENTRY, "foo")) == 3 diff --git a/tests/core/test_SheerkaDebugManager.py b/tests/core/test_SheerkaDebugManager.py index 5a373cc..59f616a 100644 --- a/tests/core/test_SheerkaDebugManager.py +++ b/tests/core/test_SheerkaDebugManager.py @@ -1,6 +1,7 @@ import pytest from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, NotInit +from core.concept import Concept +from core.global_symbols import NotInit, NotFound from core.sheerka.ExecutionContext import ExecutionContext from core.sheerka.services.SheerkaDebugManager import SheerkaDebugManager, DebugItem, ConceptDebugObj from parsers.PythonParser import PythonNode @@ -830,7 +831,7 @@ class TestSheerkaDebugManager(TestUsingMemoryBasedSheerka): dummy = DummyObj(foo, "value") res = sheerka.inspect(context, dummy, "#type#", "fake", "a", "b") assert res.body == {'#type#': 'DummyObj', - 'fake': "** Not Found **", + 'fake': NotFound, 'a': foo, 'b': 'value'} @@ -850,19 +851,19 @@ class TestSheerkaDebugManager(TestUsingMemoryBasedSheerka): res = sheerka.inspect(context, 0) - assert res.body == {'#type#': 'NotFound', + assert res.body == {'#type#': 'NotFoundConcept', 'id': sheerka.concepts_ids[BuiltinConcepts.NOT_FOUND], 'key': '__NOT_FOUND', 'name': '__NOT_FOUND', 'body': 'no digest'} def test_i_can_inspect_values(self): - sheerka, context, table, how, little = self.init_concepts( + sheerka, context, table, how, little = self.init_test().with_concepts( "table", Concept("how is x").def_var("x"), Concept("little x").def_var("x"), create_new=True - ) + ).unpack() return_values = sheerka.evaluate_user_input("how is little table") @@ -911,3 +912,64 @@ class TestSheerkaDebugManager(TestUsingMemoryBasedSheerka): assert str(ConceptDebugObj(foo)) == \ "(:foo|1001:meta.x='x_meta', meta.y='y_meta', compiled.x=(:bar|1002:meta.a='a_meta', value.a='a_value'), value.x='x_value', value.z='extra_value')" + + def test_i_can_save_and_restore_state_to_default_state(self): + sheerka, context = self.init_concepts() + service = sheerka.services[SheerkaDebugManager.NAME] + + sheerka.push_ontology(context, "new ontology") + + service.set_debug(context) + service.set_explicit(context) + service.debug_var(context, "var_service.var_method.var_name", "1+", 1) + service.debug_rule(context, "rule_service.rule_method.rule_name", "2+", 2) + service.debug_concept(context, "concept_service.concept_method.concept_name", "3+", 3) + + # sanity check + assert service.activated + assert service.explicit + assert service.debug_vars_settings != [] + assert service.debug_rules_settings != [] + assert service.debug_concepts_settings != [] + + sheerka.pop_ontology() + assert not service.activated + assert not service.explicit + assert service.context_cache == set() + assert service.variable_cache == set() + assert service.debug_vars_settings == [] + assert service.debug_rules_settings == [] + assert service.debug_concepts_settings == [] + + def test_i_can_save_and_restore_state_to_specific_state(self): + sheerka, context = self.init_concepts() + service = sheerka.services[SheerkaDebugManager.NAME] + + service.set_debug(context) + service.set_explicit(context) + service.debug_var(context, "v_service.v_method.v_name", "1+", 1) + service.debug_rule(context, "r_service.r_method.r_name", "2+", 2) + service.debug_concept(context, "c_serv.c_method.c_name", "3+", 3) + + sheerka.push_ontology(context, "new ontology") + + # modify the state + service.set_debug(context, False) + service.set_explicit(context, False) + service.debug_var(context, "var_service2.var_method2.var_name2", "11+", 11) + service.debug_rule(context, "rule_service2.rule_method2.rule_name2", "22+", 22) + service.debug_concept(context, "concept_service2.concept_method2.concept_name2", "33+", 33) + + # sanity + assert not service.activated + assert not service.explicit + assert len(service.debug_vars_settings) == 2 + assert len(service.debug_rules_settings) == 2 + assert len(service.debug_concepts_settings) == 2 + + sheerka.pop_ontology() + assert service.activated + assert service.explicit + assert service.debug_vars_settings == [DebugItem("v_name", "v_service", "v_method", 1, True, 1, False, True)] + assert service.debug_rules_settings == [DebugItem("r_name", "r_service", "r_method", 2, True, 2, False, True)] + assert service.debug_concepts_settings == [DebugItem("c_name", "c_serv", "c_method", 3, True, 3, False, True)] diff --git a/tests/core/test_SheerkaEvaluateConcept.py b/tests/core/test_SheerkaEvaluateConcept.py index 457331c..5d61418 100644 --- a/tests/core/test_SheerkaEvaluateConcept.py +++ b/tests/core/test_SheerkaEvaluateConcept.py @@ -2,8 +2,9 @@ from dataclasses import dataclass import pytest from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept, ParserResultConcept -from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, CB, NotInit, \ +from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, CB, \ concept_part_value, DEFINITION_TYPE_DEF +from core.global_symbols import NotInit from core.sheerka.services.SheerkaEvaluateConcept import SheerkaEvaluateConcept from core.sheerka.services.SheerkaMemory import SheerkaMemory from parsers.BaseParser import BaseParser @@ -13,7 +14,6 @@ from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka from tests.evaluators.EvaluatorTestsUtils import pr_ret_val, python_ret_val - class TestSheerkaEvaluateConcept(TestUsingMemoryBasedSheerka): @pytest.mark.parametrize("body, expected", [ @@ -27,7 +27,7 @@ class TestSheerkaEvaluateConcept(TestUsingMemoryBasedSheerka): ("1 > 2", False), ]) def test_i_can_evaluate_a_concept_with_simple_body(self, body, expected): - sheerka, context, concept = self.init_concepts(Concept("foo", body=body), eval_body=True) + sheerka, context, concept = self.init_test(eval_body=True).with_concepts(Concept("foo", body=body)).unpack() evaluated = sheerka.evaluate_concept(context, concept) @@ -99,7 +99,9 @@ class TestSheerkaEvaluateConcept(TestUsingMemoryBasedSheerka): def test_i_can_evaluate_when_the_body_is_the_name_of_the_concept(self): # to prove that I can distinguish from a string - sheerka, context, concept = self.init_concepts(Concept("foo", body="'foo'"), eval_body=True, create_new=True) + sheerka, context, concept = self.init_test(eval_body=True).with_concepts( + Concept("foo", body="'foo'"), + create_new=True).unpack() evaluated = sheerka.evaluate_concept(context, concept) diff --git a/tests/core/test_SheerkaEvaluateRules.py b/tests/core/test_SheerkaEvaluateRules.py index 4af82de..2864baf 100644 --- a/tests/core/test_SheerkaEvaluateRules.py +++ b/tests/core/test_SheerkaEvaluateRules.py @@ -9,7 +9,7 @@ from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka class TestSheerkaEvaluateRules(TestUsingMemoryBasedSheerka): def test_i_can_evaluate_python_rules(self): - sheerka, context, r1, r2, r3, r4, r5, r6, r7, r8, r9 = self.init_format_rules( + sheerka, context, r1, r2, r3, r4, r5, r6, r7, r8, r9 = self.init_test().with_rules( Rule(predicate="a == 1", action="", priority=1), # r1 Rule(predicate="a == 2", action="", priority=1), # r2 Rule(predicate="a == 3", action="", priority=0), # r3 @@ -19,7 +19,7 @@ class TestSheerkaEvaluateRules(TestUsingMemoryBasedSheerka): Rule(predicate="a == 7", action="", priority=1, is_enabled=False), # r7 Rule(predicate="a == 8", action="", priority=1), # r8 Rule(predicate="a == 9", action="", priority=2), # r9 - ) + ).unpack() service = sheerka.services[SheerkaEvaluateRules.NAME] rules = sorted([r1, r2, r3, r4, r5, r6, r7, r8, r9], key=operator.attrgetter('priority'), reverse=True) @@ -33,7 +33,8 @@ class TestSheerkaEvaluateRules(TestUsingMemoryBasedSheerka): } def test_i_can_evaluate_concept_rules(self): - sheerka, context, r1, r2, r3, r4, r5, r6, r7, r8, r9 = self.init_format_rules( + sheerka, context, concept, r1, r2, r3, r4, r5, r6, r7, r8, r9 = self.init_test().with_concepts( + Concept("x equals y", body="x == y").def_var("x").def_var("y"), create_new=True).with_rules( Rule(predicate="a equals 1", action="", priority=1), # r1 Rule(predicate="a equals 2", action="", priority=1), # r2 Rule(predicate="a equals 3", action="", priority=0), # r3 @@ -43,8 +44,7 @@ class TestSheerkaEvaluateRules(TestUsingMemoryBasedSheerka): Rule(predicate="a equals 7", action="", priority=1, is_enabled=False), # r7 Rule(predicate="a equals 8", action="", priority=1), # r8 Rule(predicate="a equals 9", action="", priority=2), # r9 - concepts=[Concept("x equals y", body="x == y").def_var("x").def_var("y")], - ) + ).unpack() service = sheerka.services[SheerkaEvaluateRules.NAME] rules = sorted([r1, r2, r3, r4, r5, r6, r7, r8, r9], key=operator.attrgetter('priority'), reverse=True) diff --git a/tests/core/test_SheerkaEventManager.py b/tests/core/test_SheerkaEventManager.py index ca670f0..ff4496c 100644 --- a/tests/core/test_SheerkaEventManager.py +++ b/tests/core/test_SheerkaEventManager.py @@ -1,3 +1,5 @@ +import pytest + from core.sheerka.services.SheerkaEventManager import SheerkaEventManager from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka @@ -44,7 +46,7 @@ example_of_class_method. event=xxx """ service = sheerka.services[SheerkaEventManager.NAME] - service.reset_topic(topic) + service.test_only_reset_topic(topic) def test_i_can_subscribe_and_publish_with_data(self, capsys): sheerka, context = self.init_concepts() @@ -63,4 +65,21 @@ example_of_class_method. event=xxx, data='42' """ service = sheerka.services[SheerkaEventManager.NAME] - service.reset_topic(topic) + service.test_only_reset_topic(topic) + + def test_i_can_save_and_reset_state(self): + sheerka, context = self.init_concepts() + service = sheerka.services[SheerkaEventManager.NAME] + + sheerka.subscribe("my first topic", self.example_of_class_method_with_data) + + sheerka.push_ontology(context, "new ontology") + sheerka.subscribe("my second topic", self.example_of_class_method_with_data) + + # I can access to the topic + assert "my first topic" in service.subscribers + assert "my second topic" in service.subscribers + + sheerka.pop_ontology() + assert "my first topic" in service.subscribers + assert "my second topic" not in service.subscribers diff --git a/tests/core/test_SheerkaFunctionsParametersHistory.py b/tests/core/test_SheerkaFunctionsParametersHistory.py index 3cf4e2b..ff18697 100644 --- a/tests/core/test_SheerkaFunctionsParametersHistory.py +++ b/tests/core/test_SheerkaFunctionsParametersHistory.py @@ -1,5 +1,8 @@ +import pytest + from core.sheerka.services.SheerkaFunctionsParametersHistory import SheerkaFunctionsParametersHistory, \ FunctionParametersObj +from core.utils import sheerka_deepcopy from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka @@ -7,13 +10,15 @@ from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka class TestSheerkaFunctionsParametersHistory(TestUsingMemoryBasedSheerka): def test_i_can_add_a_parameter_value(self): sheerka, context = self.init_concepts(cache_only=False) - service = SheerkaFunctionsParametersHistory(sheerka).initialize() + sheerka.om.test_only_unfreeze() + service = SheerkaFunctionsParametersHistory(sheerka).initialize() # since service is no longer auto init'ed + sheerka.om.freeze() service.record_function_parameter(context, "function", 1, "10") service.record_function_parameter(context, "function", 2, "True") service.record_function_parameter(context, "function", 3, "'string value'") - assert service.cache.copy() == {"function": FunctionParametersObj( + assert sheerka.om.copy(service.FUNCTIONS_PARAMETERS_ENTRY) == {"function": FunctionParametersObj( context.event.get_digest(), "function", { @@ -23,8 +28,8 @@ class TestSheerkaFunctionsParametersHistory(TestUsingMemoryBasedSheerka): })} # and i can serialize - sheerka.cache_manager.commit(context) - from_db = sheerka.sdp.get(SheerkaFunctionsParametersHistory.FUNCTIONS_PARAMETERS_ENTRY, "function") + sheerka.om.commit(context) + from_db = sheerka.om.current_sdp().get(service.FUNCTIONS_PARAMETERS_ENTRY, "function") assert from_db.event_id == context.event.get_digest() assert from_db.name == "function" assert from_db.params == { @@ -35,14 +40,16 @@ class TestSheerkaFunctionsParametersHistory(TestUsingMemoryBasedSheerka): def test_i_can_add_the_same_value_multiple_times(self): sheerka, context = self.init_concepts(cache_only=True) - service = SheerkaFunctionsParametersHistory(sheerka) + sheerka.om.test_only_unfreeze() + service = SheerkaFunctionsParametersHistory(sheerka).initialize() # since service is no longer auto init'ed + sheerka.om.freeze() service.record_function_parameter(context, "function", 1, "10") service.record_function_parameter(context, "function", 1, "20") service.record_function_parameter(context, "function", 2, "True") service.record_function_parameter(context, "function", 1, "20") - assert service.cache.copy() == {"function": FunctionParametersObj( + assert sheerka.om.copy(service.FUNCTIONS_PARAMETERS_ENTRY) == {"function": FunctionParametersObj( context.event.get_digest(), "function", { @@ -52,12 +59,14 @@ class TestSheerkaFunctionsParametersHistory(TestUsingMemoryBasedSheerka): def test_i_can_specify_parameter_in_any_order(self): sheerka, context = self.init_concepts() - service = SheerkaFunctionsParametersHistory(sheerka) + sheerka.om.test_only_unfreeze() + service = SheerkaFunctionsParametersHistory(sheerka).initialize() # since service is no longer auto init'ed + sheerka.om.freeze() service.record_function_parameter(context, "function", 3, "'string value'") service.record_function_parameter(context, "function", 2, "True") - assert service.cache.copy() == {"function": FunctionParametersObj( + assert sheerka.om.copy(service.FUNCTIONS_PARAMETERS_ENTRY) == {"function": FunctionParametersObj( context.event.get_digest(), "function", { @@ -67,7 +76,9 @@ class TestSheerkaFunctionsParametersHistory(TestUsingMemoryBasedSheerka): def test_no_value_is_managed(self): sheerka, context = self.init_concepts() - service = SheerkaFunctionsParametersHistory(sheerka) + sheerka.om.test_only_unfreeze() + service = SheerkaFunctionsParametersHistory(sheerka).initialize() # since service is no longer auto init'ed + sheerka.om.freeze() # no entry for the function assert service.get_function_parameters("function", 2) == [] @@ -78,10 +89,45 @@ class TestSheerkaFunctionsParametersHistory(TestUsingMemoryBasedSheerka): def test_i_can_get_sorted_parameters(self): sheerka, context = self.init_concepts() - service = SheerkaFunctionsParametersHistory(sheerka) + sheerka.om.test_only_unfreeze() + service = SheerkaFunctionsParametersHistory(sheerka).initialize() # since service is no longer auto init'ed + sheerka.om.freeze() service.record_function_parameter(context, "function", 2, "'string value'") service.record_function_parameter(context, "function", 2, "True") service.record_function_parameter(context, "function", 2, "True") assert service.get_function_parameters("function", 2) == ["True", "'string value'"] + + def test_i_can_add_and_retrieve_parameters_when_multiple_ontology_layers(self): + sheerka, context = self.init_concepts(cache_only=False) + sheerka.om.test_only_unfreeze() + service = SheerkaFunctionsParametersHistory(sheerka).initialize() # since service is no longer auto init'ed + sheerka.om.freeze() + + service.record_function_parameter(context, "function", 1, "10") + service.record_function_parameter(context, "function", 2, "True") + service.record_function_parameter(context, "function", 3, "'string value'") + + sheerka.push_ontology(context, "new ontology") + service.record_function_parameter(context, "function", 1, "20") + service.record_function_parameter(context, "function", 2, "True") + + assert sheerka.om.copy(service.FUNCTIONS_PARAMETERS_ENTRY) == {"function": FunctionParametersObj( + context.event.get_digest(), + "function", + { + 1: [('10', 1), ("20", 1)], + 2: [('True', 2)], + 3: [("'string value'", 1)] + })} + + sheerka.pop_ontology() + assert sheerka.om.copy(service.FUNCTIONS_PARAMETERS_ENTRY) == {"function": FunctionParametersObj( + context.event.get_digest(), + "function", + { + 1: [('10', 1)], + 2: [('True', 1)], + 3: [("'string value'", 1)] + })} diff --git a/tests/core/test_SheerkaHistoryManager.py b/tests/core/test_SheerkaHistoryManager.py index feea883..8dd85c3 100644 --- a/tests/core/test_SheerkaHistoryManager.py +++ b/tests/core/test_SheerkaHistoryManager.py @@ -1,10 +1,10 @@ from core.sheerka.services.SheerkaHistoryManager import hist -from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka +from tests.TestUsingFileBasedSheerka import TestUsingFileBasedSheerka -class TestSheerkaHistoryManager(TestUsingMemoryBasedSheerka): +class TestSheerkaHistoryManager(TestUsingFileBasedSheerka): def test_i_can_retrieve_history(self): - sheerka = self.get_sheerka(singleton=False) + sheerka = self.get_sheerka() sheerka.save_execution_context = True sheerka.evaluate_user_input("def concept one as 1") diff --git a/tests/core/test_SheerkaSetsManager.py b/tests/core/test_SheerkaIsAManager.py similarity index 74% rename from tests/core/test_SheerkaSetsManager.py rename to tests/core/test_SheerkaIsAManager.py index 46940b0..baac637 100644 --- a/tests/core/test_SheerkaSetsManager.py +++ b/tests/core/test_SheerkaIsAManager.py @@ -1,12 +1,14 @@ +import pytest + from core.builtin_concepts import BuiltinConcepts from core.concept import Concept -from core.sheerka.services.SheerkaSetsManager import SheerkaSetsManager +from core.sheerka.services.SheerkaIsAManager import SheerkaIsAManager from tests.TestUsingFileBasedSheerka import TestUsingFileBasedSheerka from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka -class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): +class TestSheerkaIsAManager(TestUsingMemoryBasedSheerka): def test_i_can_add_a_concept_to_a_set(self): sheerka, context, foo, group = self.init_concepts( @@ -14,14 +16,18 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): Concept("group"), cache_only=False ) - assert sheerka.add_concept_to_set(context, foo, group).status - group_elements = sheerka.cache_manager.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY, group.id) + res = sheerka.add_concept_to_set(context, foo, group) + + assert res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.SUCCESS) + + group_elements = sheerka.om.get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY, group.id) assert group_elements == {foo.id} # it can be persisted - sheerka.cache_manager.commit(context) - assert sheerka.sdp.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY, group.id) == {foo.id} + sheerka.om.commit(context) + assert sheerka.om.current_sdp().get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY, group.id) == {foo.id} def test_i_cannot_add_the_same_concept_twice_in_a_set(self): sheerka, context, foo, group = self.init_concepts(Concept("foo"), Concept("group")) @@ -35,7 +41,7 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): assert res.body.body == foo assert res.body.concept_set == group - all_entries = sheerka.cache_manager.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY, group.id) + all_entries = sheerka.om.get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY, group.id) assert all_entries == {foo.id} def test_i_can_have_multiple_groups(self): @@ -53,12 +59,12 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): assert sheerka.add_concept_to_set(context, bar, group2).status assert sheerka.add_concept_to_set(context, baz, group2).status - assert sheerka.cache_manager.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY, group1.id) == {foo.id, bar.id} - assert sheerka.cache_manager.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY, group2.id) == {baz.id, bar.id} + assert sheerka.om.get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY, group1.id) == {foo.id, bar.id} + assert sheerka.om.get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY, group2.id) == {baz.id, bar.id} # I can save in db - sheerka.cache_manager.commit(context) - assert sheerka.sdp.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY) == { + sheerka.om.commit(context) + assert sheerka.om.current_sdp().get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY) == { '1004': {'1001', '1002'}, '1005': {'1002', '1003'} } @@ -96,7 +102,7 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): sheerka, context, foo, bar, group1, group2 = self.init_concepts( "foo", "bar", "group1", Concept("group2", body="group1")) - service = sheerka.services[SheerkaSetsManager.NAME] + service = sheerka.services[SheerkaIsAManager.NAME] service.add_concepts_to_set(context, [foo, bar], group1) assert sheerka.isaset(context, group2) @@ -112,7 +118,7 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): Concept("number"), Concept("sub_number", body="number", where="number < 4") ) - service = sheerka.services[SheerkaSetsManager.NAME] + service = sheerka.services[SheerkaIsAManager.NAME] service.add_concepts_to_set(context, [one, two, three, four, five], number) assert sheerka.isaset(context, sub_number) @@ -129,7 +135,7 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): Concept("sub_number", body="number", where="number < 4"), Concept("sub_sub_number", body="sub_number", where="sub_number > 2") ) - service = sheerka.services[SheerkaSetsManager.NAME] + service = sheerka.services[SheerkaIsAManager.NAME] service.add_concepts_to_set(context, [one, two, three, four, five], number) assert sheerka.isaset(context, sub_sub_number) @@ -153,7 +159,7 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): Concept("number"), Concept("sub_number", body="number", where="number < 4") ) - service = sheerka.services[SheerkaSetsManager.NAME] + service = sheerka.services[SheerkaIsAManager.NAME] service.add_concepts_to_set(context, [one, two, three, four, five], number) assert sheerka.isaset(context, sub_number) @@ -175,7 +181,7 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): Concept("number"), Concept("sub_number", body="number", where="number >= 20") ) - service = sheerka.services[SheerkaSetsManager.NAME] + service = sheerka.services[SheerkaIsAManager.NAME] service.add_concepts_to_set(context, [one, two, twenty, twenties], number) assert sheerka.isaset(context, sub_number) @@ -192,15 +198,14 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): :return: """ - sheerka, context, one, two, twenty, twenties, number = self.init_concepts( + sheerka, context, one, two, twenty, twenties, number = self.init_test().with_concepts( Concept("one", body="1"), Concept("two", body="2"), Concept("twenty", body="20"), Concept("twenties", definition="twenty (one|two)=unit", body="twenty + unit").def_var("unit"), Concept("number"), - create_new=True - ) - service = sheerka.services[SheerkaSetsManager.NAME] + create_new=True).unpack() + service = sheerka.services[SheerkaIsAManager.NAME] service.add_concepts_to_set(context, [one, two, twenty, twenties], number) assert sheerka.isinset(twenties, number) @@ -210,12 +215,11 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): assert sheerka.isinset(res[0].body, number) def test_a_concept_can_be_in_multiple_sets(self): - sheerka, context, foo, all_foo, all_bar = self.init_concepts( + sheerka, context, foo, all_foo, all_bar = self.init_test().with_concepts( Concept("foo"), Concept("all_foo"), Concept("all_bar"), - create_new=True - ) + create_new=True).unpack() foo = sheerka.new(foo.key) # new instance sheerka.set_isa(context, foo, all_foo) @@ -270,7 +274,7 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): elements = sheerka.get_set_elements(context, number) assert [c.id for c in elements] == [one.id] - concepts_in_cache = sheerka.cache_manager.get(SheerkaSetsManager.CONCEPTS_IN_GROUPS_ENTRY, number.id) + concepts_in_cache = sheerka.om.get(SheerkaIsAManager.CONCEPTS_IN_GROUPS_ENTRY, number.id) assert [c.id for c in concepts_in_cache] == [one.id] # pretend that number has been updated in sheerka.concepts_grammar @@ -281,23 +285,63 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka): elements = sheerka.get_set_elements(context, number) assert {c.id for c in elements} == {one.id, two.id} - concepts_in_cache = sheerka.cache_manager.get(SheerkaSetsManager.CONCEPTS_IN_GROUPS_ENTRY, number.id) + concepts_in_cache = sheerka.om.get(SheerkaIsAManager.CONCEPTS_IN_GROUPS_ENTRY, number.id) assert {c.id for c in concepts_in_cache} == {one.id, two.id} # make sure the bnf definition is also updated assert number.id not in sheerka.concepts_grammars + def test_i_can_get_and_set_isa_when_multiple_ontology_layers(self): + sheerka, context, foo, group1, group2 = self.init_concepts( + Concept("foo"), + Concept("group1"), + Concept("group2"), + cache_only=False + ) + + sheerka.set_isa(context, foo, group1) + + assert sheerka.isaset(context, group1) + assert sheerka.isinset(foo, group1) + assert sheerka.isa(foo, group1) + + sheerka.push_ontology(context, "new ontology") + assert sheerka.isaset(context, group1) + assert sheerka.isinset(foo, group1) + assert sheerka.isa(foo, group1) + assert not sheerka.isaset(context, group2) + assert not sheerka.isinset(foo, group2) + assert not sheerka.isa(foo, group2) + + sheerka.set_isa(context, foo, group2) + assert sheerka.isaset(context, group1) + assert sheerka.isinset(foo, group1) + assert sheerka.isa(foo, group1) + assert sheerka.isaset(context, group2) + assert sheerka.isinset(foo, group2) + assert sheerka.isa(foo, group2) + + # I can revert back + sheerka.pop_ontology() + assert sheerka.isaset(context, group1) + assert sheerka.isinset(foo, group1) + assert sheerka.isa(foo, group1) + assert not sheerka.isaset(context, group2) + assert not sheerka.isinset(foo, group2) + + foo = sheerka.get_by_id(foo.id) + assert not sheerka.isa(foo, group2) + class TestSheerkaSetsManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka): def test_i_can_add_concept_to_set_and_retrieve_it_in_another_session(self): - sheerka, context, foo, bar, group = self.init_concepts( - Concept("foo"), - Concept("bar"), - Concept("group"), - create_new=True) + sheerka, context, foo, bar, group = self.init_test().with_concepts(Concept("foo"), + Concept("bar"), + Concept("group"), + create_new=True).unpack() assert sheerka.add_concept_to_set(context, foo, group).status - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) sheerka = self.get_sheerka(reset_attrs=False) # another session context = self.get_context(sheerka) @@ -306,8 +350,8 @@ class TestSheerkaSetsManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka): # I can get the elements assert set(sheerka.get_set_elements(context, group)) == {foo, bar} - sheerka.cache_manager.commit(context) # save in db - all_entries = sheerka.sdp.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY) # check the db + sheerka.om.commit(context) # save in db + all_entries = sheerka.om.current_sdp().get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY) # check the db assert all_entries == { group.id: {foo.id, bar.id} } @@ -320,33 +364,31 @@ class TestSheerkaSetsManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka): for c in [foo3, foo4]: sheerka.create_new_concept(context, c) - sets_handler = sheerka.services[SheerkaSetsManager.NAME] + sets_handler = sheerka.services[SheerkaIsAManager.NAME] res = sets_handler.add_concepts_to_set(context, (foo3, foo4), group) assert res.status # I can get the elements assert set(sheerka.get_set_elements(context, group)) == {foo, bar, foo3, foo4} - sheerka.cache_manager.commit(context) # save in db - all_entries = sheerka.sdp.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY) # check the db + sheerka.om.commit(context) # save in db + all_entries = sheerka.om.current_sdp().get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY) # check the db assert all_entries == { group.id: {foo.id, bar.id, foo3.id, foo4.id} } def test_i_can_set_isa(self): - sheerka, context, foo, bar, group = self.init_concepts( - "foo", - "bar", - "group", - create_new=True, # needed by modify - ) + sheerka, context, foo, bar, group = self.init_test().with_concepts("foo", + "bar", + "group", + ).unpack() # nothing was previously in ISA foo = sheerka.new(foo.key) assert BuiltinConcepts.ISA not in foo.get_metadata().props res = sheerka.set_isa(context, foo, group) assert res.status - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) sheerka = self.get_sheerka(reset_attrs=False) assert foo.get_prop(BuiltinConcepts.ISA) == {group} @@ -363,11 +405,11 @@ class TestSheerkaSetsManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka): assert sheerka.isinset(bar, group) assert sheerka.isaset(context, group) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) # they are both in the same group sheerka = self.get_sheerka(reset_attrs=False) - all_entries = sheerka.sdp.get(SheerkaSetsManager.CONCEPTS_GROUPS_ENTRY) + all_entries = sheerka.om.current_sdp().get(SheerkaIsAManager.CONCEPTS_GROUPS_ENTRY) assert all_entries == { group.id: {foo.id, bar.id} } diff --git a/tests/core/test_SheerkaMemory.py b/tests/core/test_SheerkaMemory.py index 15a6f5a..0b7400a 100644 --- a/tests/core/test_SheerkaMemory.py +++ b/tests/core/test_SheerkaMemory.py @@ -1,5 +1,6 @@ from core.builtin_concepts import BuiltinConcepts from core.concept import Concept +from core.global_symbols import NotFound from core.sheerka.ExecutionContext import ExecutionContext from core.sheerka.services.SheerkaMemory import SheerkaMemory, MemoryObject @@ -19,7 +20,7 @@ class TestSheerkaMemory(TestUsingMemoryBasedSheerka): assert id(sheerka.get_from_short_term_memory(None, "a")) == id(foo) def test_i_can_add_context_short_term_memory(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() service = sheerka.services[SheerkaMemory.NAME] foo = Concept("foo") @@ -28,10 +29,10 @@ class TestSheerkaMemory(TestUsingMemoryBasedSheerka): context_id = ExecutionContext.ids[context.event.get_digest()] assert service.short_term_objects.copy() == {context_id: {'a': foo}} assert id(sheerka.get_from_short_term_memory(context, "a")) == id(foo) - assert sheerka.get_from_short_term_memory(None, "a") is None + assert sheerka.get_from_short_term_memory(None, "a") is NotFound def test_i_can_add_many(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() bag = {"a": "foo", "b": "bar", } context_id = ExecutionContext.ids[context.event.get_digest()] service = sheerka.services[SheerkaMemory.NAME] @@ -40,7 +41,7 @@ class TestSheerkaMemory(TestUsingMemoryBasedSheerka): assert service.short_term_objects.copy() == {context_id: bag} def test_i_can_get_obj_from_parents(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() foo = Concept("foo") sheerka.add_to_short_term_memory(None, "a", foo) @@ -53,54 +54,54 @@ class TestSheerkaMemory(TestUsingMemoryBasedSheerka): assert id(sheerka.get_from_short_term_memory(sub_context, "b")) == id(foo) assert id(sheerka.get_from_short_term_memory(context, "b")) == id(foo) - assert sheerka.get_from_short_term_memory(None, "b") is None + assert sheerka.get_from_short_term_memory(None, "b") is NotFound def test_short_term_memory_entries_are_removed_on_context_exit(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() with context.push(BuiltinConcepts.TESTING, None) as sub_context: foo = Concept("foo") sheerka.add_to_short_term_memory(sub_context, "a", foo) assert id(sheerka.get_from_short_term_memory(sub_context, "a")) == id(foo) - assert sheerka.get_from_short_term_memory(sub_context, "a") is None + assert sheerka.get_from_short_term_memory(sub_context, "a") is NotFound def test_short_term_memory_entries_are_removed_on_context_exit_2(self): # this time we test the bulk insert - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() with context.push(BuiltinConcepts.TESTING, None) as sub_context: foo = Concept("foo") sheerka.add_many_to_short_term_memory(sub_context, {"a": foo}) assert id(sheerka.get_from_short_term_memory(sub_context, "a")) == id(foo) - assert sheerka.get_from_short_term_memory(sub_context, "a") is None + assert sheerka.get_from_short_term_memory(sub_context, "a") is NotFound def test_i_can_add_and_retrieve_from_memory(self): - sheerka, context = self.init_concepts() - service = sheerka.services[SheerkaMemory.NAME] + sheerka, context = self.init_test().unpack() - assert sheerka.get_from_memory(context, "a") is None + assert sheerka.get_from_memory(context, "a") is NotFound foo = Concept("foo") sheerka.add_to_memory(context, "a", foo) - assert service.memory_objects.copy() == {"a": MemoryObject(context.event.get_digest(), foo)} + assert sheerka.om.copy(SheerkaMemory.OBJECTS_ENTRY) == {"a": MemoryObject(context.event.get_digest(), foo)} assert id(sheerka.get_from_memory(context, "a").obj) == id(foo) def test_i_can_use_memory_to_get_the_list_of_all_objects(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test(cache_only=False).unpack() foo = Concept("foo") bar = Concept("bar") sheerka.add_to_memory(context, "foo", 'value that will not appear') sheerka.add_to_memory(context, "foo", foo) sheerka.add_to_memory(context, "bar", bar) + sheerka.om.commit(context) assert sheerka.memory(context) == {"foo": foo, "bar": bar} def test_i_can_use_memory_with_a_string(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() foo = Concept("foo") sheerka.add_to_memory(context, "foo", foo) @@ -108,7 +109,7 @@ class TestSheerkaMemory(TestUsingMemoryBasedSheerka): assert sheerka.memory(context, "foo") == foo def test_i_can_use_memory_with_a_concept(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() foo = Concept("foo") sheerka.add_to_memory(context, "foo", foo) @@ -116,7 +117,7 @@ class TestSheerkaMemory(TestUsingMemoryBasedSheerka): assert sheerka.memory(context, Concept("foo")) == foo def test_concept_not_found_is_return_when_not_found(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() res = sheerka.memory(context, "foo") @@ -124,7 +125,7 @@ class TestSheerkaMemory(TestUsingMemoryBasedSheerka): assert res.body == {"#name": "foo"} def test_memory_only_returns_the_last_object(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() foo = Concept("foo") bar = Concept("bar") @@ -135,17 +136,17 @@ class TestSheerkaMemory(TestUsingMemoryBasedSheerka): assert sheerka.memory(context, "item") == bar def test_object_are_not_added_in_memory_during_the_initialisation(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() assert len(sheerka.memory(context)) == 0 class TestSheerkaMemoryUsingFileBase(TestUsingFileBasedSheerka): def test_i_can_record_memory_objects(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() sheerka.add_to_memory(context, "item", Concept("foo")) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) sheerka = self.get_sheerka() context = self.get_context(sheerka) diff --git a/tests/core/test_SheerkaRuleManager.py b/tests/core/test_SheerkaRuleManager.py index 931bb6a..a4197db 100644 --- a/tests/core/test_SheerkaRuleManager.py +++ b/tests/core/test_SheerkaRuleManager.py @@ -3,8 +3,8 @@ import ast import pytest from core.builtin_concepts import BuiltinConcepts from core.concept import Concept, CMV -from core.global_symbols import RULE_COMPARISON_CONTEXT -from core.rule import Rule +from core.global_symbols import RULE_COMPARISON_CONTEXT, NotFound +from core.rule import Rule, ACTION_TYPE_PRINT, ACTION_TYPE_EXEC from core.sheerka.services.SheerkaRuleManager import SheerkaRuleManager, FormatRuleParser, \ FormatAstRawText, FormatAstVariable, FormatAstSequence, FormatAstFunction, \ FormatRuleSyntaxError, FormatAstList, UnexpectedEof, FormatAstColor, RulePredicate, FormatAstDict, FormatAstMulti @@ -28,13 +28,12 @@ CONCEPT_EVALUATOR_NAME = "Concept" class TestSheerkaRuleManager(TestUsingMemoryBasedSheerka): @pytest.mark.parametrize("action_type, cache_entry", [ - ("print", SheerkaRuleManager.FORMAT_RULE_ENTRY), - ("exec", SheerkaRuleManager.EXEC_RULE_ENTRY), + (ACTION_TYPE_PRINT, SheerkaRuleManager.FORMAT_RULE_ENTRY), + (ACTION_TYPE_EXEC, SheerkaRuleManager.EXEC_RULE_ENTRY), ]) def test_i_can_create_a_new_rule(self, action_type, cache_entry): sheerka, context = self.init_concepts(cache_only=False) - previous_rules_number = sheerka.cache_manager.caches[sheerka.OBJECTS_IDS_ENTRY].cache.copy()[ - SheerkaRuleManager.RULE_IDS] + previous_rules_number = sheerka.om.get_all(sheerka.OBJECTS_IDS_ENTRY)[SheerkaRuleManager.RULE_IDS] rule = Rule(action_type, "name", "True", "Hello world") @@ -52,33 +51,50 @@ class TestSheerkaRuleManager(TestUsingMemoryBasedSheerka): assert created_rule.metadata.action == "Hello world" # saved in cache - assert len(sheerka.cache_manager.caches[cache_entry].cache) > 0 - from_cache = sheerka.cache_manager.get(cache_entry, expected_id) + assert len(sheerka.om.current_cache_manager().caches[cache_entry].cache) > 0 + from_cache = sheerka.om.get(cache_entry, expected_id) assert from_cache.metadata.id == expected_id assert from_cache.metadata.name == "name" assert from_cache.metadata.predicate == "True" assert from_cache.metadata.action_type == action_type assert from_cache.metadata.action == "Hello world" - sheerka.cache_manager.commit(context) + # the rule is also saved by name + by_name = sheerka.get_rule_by_name("name") + assert by_name.metadata.id == expected_id + assert by_name.metadata.name == "name" + assert by_name.metadata.predicate == "True" + assert by_name.metadata.action_type == action_type + assert by_name.metadata.action == "Hello world" + + sheerka.om.commit(context) # saved in sdp - from_sdp = sheerka.sdp.get(cache_entry, expected_id) + from_sdp = sheerka.om.current_sdp().get(cache_entry, expected_id) assert from_sdp.metadata.id == expected_id assert from_sdp.metadata.name == "name" assert from_sdp.metadata.predicate == "True" assert from_sdp.metadata.action_type == action_type assert from_sdp.metadata.action == "Hello world" + by_name = sheerka.om.current_sdp().get(SheerkaRuleManager.RULES_BY_NAME_ENTRY, "name") + assert by_name.metadata.id == expected_id + assert by_name.metadata.name == "name" + assert by_name.metadata.predicate == "True" + assert by_name.metadata.action_type == action_type + assert by_name.metadata.action == "Hello world" + def test_i_can_create_multiple_rules(self): sheerka, context = self.init_concepts(cache_only=False) - previous_rules_number = len(sheerka.cache_manager.caches[SheerkaRuleManager.FORMAT_RULE_ENTRY].cache) + previous_rules_number = len( + sheerka.om.current_cache_manager().caches[SheerkaRuleManager.FORMAT_RULE_ENTRY].cache) sheerka.create_new_rule(context, Rule("print", "name1", "True", "Hello world")) sheerka.create_new_rule(context, Rule("print", "name2", "value() is __EXPLANATION", "list(value())")) assert len( - sheerka.cache_manager.caches[SheerkaRuleManager.FORMAT_RULE_ENTRY].cache) == 2 + previous_rules_number + sheerka.om.current_cache_manager().caches[ + SheerkaRuleManager.FORMAT_RULE_ENTRY].cache) == 2 + previous_rules_number @pytest.mark.parametrize("text, expected", [ ("", FormatAstRawText("")), @@ -186,7 +202,7 @@ class TestSheerkaRuleManager(TestUsingMemoryBasedSheerka): assert res[0].concept is None def test_i_can_compile_predicate_when_python_and_concept(self): - sheerka, context, *concepts = self.init_concepts(Concept("foo bar"), create_new=True) + sheerka, context, *concepts = self.init_test().with_concepts(Concept("foo bar"), create_new=True).unpack() service = sheerka.services[SheerkaRuleManager.NAME] text = "foo bar == 5" ast_ = ast.parse("__C__foo0bar__1001__C__ == 5", "", 'eval') @@ -207,12 +223,12 @@ class TestSheerkaRuleManager(TestUsingMemoryBasedSheerka): ("cat is an b", ["cat", "b"]), ]) def test_i_can_compile_predicate_when_exact_concept(self, text, expected_variables): - sheerka, context, *concepts = self.init_concepts( + sheerka, context, *concepts = self.init_test().with_concepts( Concept("x is an y", pre="is_question()", body="isinstance(x, y)").def_var("x").def_var("y"), Concept("cat"), Concept("animal"), create_new=True - ) + ).unpack() service = sheerka.services[SheerkaRuleManager.NAME] expected = concepts[0] @@ -232,12 +248,12 @@ class TestSheerkaRuleManager(TestUsingMemoryBasedSheerka): ("a cat is an b", ["a cat", "b"]), ]) def test_i_can_compile_predicate_when_sya_node_parser(self, text, expected_variables): - sheerka, context, *concepts = self.init_concepts( + sheerka, context, *concepts = self.init_test().with_concepts( Concept("x is an y", pre="is_question()", body="isinstance(x, y)").def_var("x").def_var("y"), Concept("a cat"), Concept("animal"), create_new=True - ) + ).unpack() service = sheerka.services[SheerkaRuleManager.NAME] expected = CMV(concepts[0], x=expected_variables[0], y=expected_variables[1]) @@ -251,11 +267,11 @@ class TestSheerkaRuleManager(TestUsingMemoryBasedSheerka): assert res[0].concept == expected def test_i_can_compile_predicate_when_bnf_node_parser(self): - sheerka, context, *concepts = self.init_concepts( + sheerka, context, *concepts = self.init_test().with_concepts( Concept("animal"), Concept("x is an y", pre="is_question()", definition="('cat'|'bird')=x 'is an' animal").def_var("x"), create_new=True - ) + ).unpack() service = sheerka.services[SheerkaRuleManager.NAME] expected = concepts[1] @@ -269,11 +285,11 @@ class TestSheerkaRuleManager(TestUsingMemoryBasedSheerka): assert res[0].concept == expected def test_i_can_compile_predicate_when_multiple_choices(self): - sheerka, context, *concepts = self.init_concepts( + sheerka, context, *concepts = self.init_test().with_concepts( Concept("x is a y", pre="is_question()", body="isinstance(x, y)").def_var("x").def_var("y"), Concept("x is a y", pre="is_question()", body="isa(x, y)").def_var("x").def_var("y"), create_new=True - ) + ).unpack() service = sheerka.services[SheerkaRuleManager.NAME] res = service.compile_when(context, "test", "a is a b") @@ -291,6 +307,34 @@ class TestSheerkaRuleManager(TestUsingMemoryBasedSheerka): assert sheerka.objvalue(res[1].predicate)[0].concept == CMV(concepts[1], x="a", y="b") assert res[1].concept == CMV(concepts[1], x="a", y="b") + def test_i_can_get_rule_priorities(self): + sheerka, context, rule_true, rule_false = self.init_test().with_rules(("True", "True"), + ("False", "False")).unpack() + + sheerka.set_is_greater_than(context, BuiltinConcepts.PRECEDENCE, + rule_true, + rule_false, + RULE_COMPARISON_CONTEXT) + + rules_from_cache = sheerka.om.get_all(SheerkaRuleManager.FORMAT_RULE_ENTRY) + + assert rules_from_cache[rule_true.id].priority == 2 + assert rules_from_cache[rule_false.id].priority == 1 + + def test_i_can_get_and_retrieve_rules_when_multiple_ontology_layers(self): + sheerka, context, rule_true = self.init_test().with_rules(("true", "True", "True")).unpack() + + sheerka.push_ontology(context, "new ontology") + rule_false = sheerka.create_new_rule(context, Rule(ACTION_TYPE_EXEC, "false", "False", "False")).body.body + + # All rules are visible + assert sheerka.get_rule_by_id(rule_true.id) == rule_true + assert sheerka.get_rule_by_id(rule_false.id) == rule_false + + sheerka.pop_ontology() + assert sheerka.get_rule_by_id(rule_true.id) == rule_true + assert not sheerka.is_known(sheerka.get_rule_by_id(rule_false.id)) + # @pytest.mark.skip # @pytest.mark.parametrize("text, expected", [ # ("cat is an animal", set()), @@ -337,24 +381,58 @@ class TestSheerkaRuleManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka): rules[1], RULE_COMPARISON_CONTEXT) - sheerka.cache_manager.commit(context) - assert len(sheerka.cache_manager.copy(SheerkaRuleManager.FORMAT_RULE_ENTRY)) == len(rules) + sheerka.om.commit(context) + expected_rules_by_id = sheerka.om.get_all(SheerkaRuleManager.FORMAT_RULE_ENTRY) - sheerka = self.get_sheerka() # new instance - assert len(sheerka.cache_manager.copy(SheerkaRuleManager.FORMAT_RULE_ENTRY)) == len(rules) + sheerka = self.new_sheerka_instance(False) # new instance + rules_by_id = sheerka.om.get_all(SheerkaRuleManager.FORMAT_RULE_ENTRY) - # manually update the rules (I need their new priorities) - service = sheerka.services[SheerkaRuleManager.NAME] - rules = [service.format_rule_cache.get(rule_id) for rule_id in service.format_rule_cache] + assert len(rules_by_id) == len(expected_rules_by_id) - # check if the rules are correctly initialized - rules_as_map = {rule.id: rule for rule in rules} - for rule_id in service.format_rule_cache: - actual = service.format_rule_cache.get(rule_id) - expected = rules_as_map[rule_id] - assert actual.metadata.is_compiled == expected.metadata.is_compiled - assert actual.metadata.is_enabled == expected.metadata.is_enabled - assert actual.compiled_action == expected.compiled_action - assert actual.compiled_predicate == expected.compiled_predicate - assert actual.priority is not None - assert actual.priority == expected.priority + for rule_id, rule in rules_by_id.items(): + expected = expected_rules_by_id[rule_id] + + assert rule.metadata.action_type == expected.metadata.action_type + assert rule.metadata.name == expected.metadata.name + assert rule.metadata.predicate == expected.metadata.predicate + assert rule.metadata.action == expected.metadata.action + assert rule.metadata.id == expected.metadata.id + assert rule.metadata.is_compiled == expected.metadata.is_compiled + assert rule.metadata.is_enabled == expected.metadata.is_enabled + assert rule.compiled_action == expected.compiled_action + assert rule.compiled_predicate == expected.compiled_predicate + assert rule.priority is not None + assert rule.priority == expected.priority + + def test_rules_are_still_accessible_after_a_new_ontology_layer(self): + sheerka, context, *rules = self.init_format_rules( + Rule("print", "name1", "True", "Hello world"), + Rule("print", "name2", "value() is __EXPLANATION", "list(value())") + ) + sheerka.set_is_greater_than(context, BuiltinConcepts.PRECEDENCE, + rules[0], + rules[1], + RULE_COMPARISON_CONTEXT) + + sheerka.om.commit(context) + expected_rules_by_id = sheerka.om.get_all(SheerkaRuleManager.FORMAT_RULE_ENTRY) + + sheerka.push_ontology(context, "new ontology") + rules_by_id = sheerka.om.get_all(SheerkaRuleManager.FORMAT_RULE_ENTRY) + + assert len(rules_by_id) == len(expected_rules_by_id) + + for rule_id, rule in rules_by_id.items(): + expected = expected_rules_by_id[rule_id] + + assert rule.metadata.action_type == expected.metadata.action_type + assert rule.metadata.name == expected.metadata.name + assert rule.metadata.predicate == expected.metadata.predicate + assert rule.metadata.action == expected.metadata.action + assert rule.metadata.id == expected.metadata.id + assert rule.metadata.is_compiled == expected.metadata.is_compiled + assert rule.metadata.is_enabled == expected.metadata.is_enabled + assert rule.compiled_action == expected.compiled_action + assert rule.compiled_predicate == expected.compiled_predicate + assert rule.priority is not None + assert rule.priority == expected.priority diff --git a/tests/core/test_SheerkaVariableManager.py b/tests/core/test_SheerkaVariableManager.py index d92b3ae..576013e 100644 --- a/tests/core/test_SheerkaVariableManager.py +++ b/tests/core/test_SheerkaVariableManager.py @@ -1,4 +1,7 @@ +import pytest + from core.concept import Concept, ConceptParts +from core.global_symbols import NotFound from core.sheerka.services.SheerkaVariableManager import SheerkaVariableManager from tests.TestUsingFileBasedSheerka import TestUsingFileBasedSheerka @@ -15,10 +18,11 @@ class TestSheerkaVariable(TestUsingMemoryBasedSheerka): assert res == 1 # I can persist in db - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) - assert sheerka.sdp.exists(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable|my_variable") - loaded = sheerka.sdp.get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable|my_variable") + assert sheerka.om.current_sdp().exists(SheerkaVariableManager.VARIABLES_ENTRY, + "TestSheerkaVariable|my_variable") + loaded = sheerka.om.current_sdp().get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable|my_variable") assert loaded.event_id == context.event.get_digest() assert loaded.key == "my_variable" assert loaded.value == 1 @@ -45,10 +49,10 @@ class TestSheerkaVariable(TestUsingMemoryBasedSheerka): concept = Concept("foo") sheerka.record_var(context, "TestSheerkaVariable", "my_variable", concept) - assert sheerka.load_var("TestSheerkaVariable", "my_variable") is not None + assert sheerka.load_var("TestSheerkaVariable", "my_variable") == concept sheerka.delete_var(context, "TestSheerkaVariable", "my_variable") - assert sheerka.load_var("TestSheerkaVariable", "my_variable") is None + assert sheerka.load_var("TestSheerkaVariable", "my_variable") is NotFound def test_i_can_set_and_get_a_value(self): sheerka = self.get_sheerka(cache_only=False) @@ -60,35 +64,101 @@ class TestSheerkaVariable(TestUsingMemoryBasedSheerka): assert res == "my value" # I can persist in db - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) - assert sheerka.sdp.exists(SheerkaVariableManager.VARIABLES_ENTRY, "Test_user|my_variable") - loaded = sheerka.sdp.get(SheerkaVariableManager.VARIABLES_ENTRY, "Test_user|my_variable") + assert sheerka.om.current_sdp().exists(SheerkaVariableManager.VARIABLES_ENTRY, "Test_user|my_variable") + loaded = sheerka.om.current_sdp().get(SheerkaVariableManager.VARIABLES_ENTRY, "Test_user|my_variable") assert loaded.event_id == context.event.get_digest() assert loaded.key == "my_variable" assert loaded.value == "my value" assert loaded.who == "Test_user" assert loaded.parents is None + def test_i_can_record_and_load_internal_variables(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + sheerka.record_internal_var(context, "TestSheerkaVariable", "my_variable", 1) + res = sheerka.load_internal_var("TestSheerkaVariable", "my_variable") + assert res == 1 + + sheerka.record_internal_var(context, "TestSheerkaVariable", "lambda", lambda x: x + 1) + res = sheerka.load_internal_var("TestSheerkaVariable", "lambda") + assert res(10) == 11 + + def test_i_can_record_and_load_in_multiple_ontology_layers(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + sheerka.record_var(context, "TestSheerkaVariable", "my_variable", 1) + sheerka.record_internal_var(context, "TestSheerkaVariable", "lambda", lambda x: x + 1) + sheerka.om.commit(context) + + sheerka.push_ontology(context, "new ontology") + sheerka.push_ontology(context, "another ontology") + + # I can still access to the previous values + assert sheerka.load_var("TestSheerkaVariable", "my_variable") == 1 + assert sheerka.load_internal_var("TestSheerkaVariable", "lambda")(10) == 11 + + # I can modify the current values + sheerka.record_var(context, "TestSheerkaVariable", "my_variable", 2) + sheerka.record_internal_var(context, "TestSheerkaVariable", "lambda", lambda x: x + 2) + sheerka.om.commit(context) + + assert sheerka.load_var("TestSheerkaVariable", "my_variable") == 2 + assert sheerka.load_internal_var("TestSheerkaVariable", "lambda")(10) == 12 + + # I can revert back + sheerka.pop_ontology() + assert sheerka.load_var("TestSheerkaVariable", "my_variable") == 1 + assert sheerka.load_internal_var("TestSheerkaVariable", "lambda")(10) == 11 + + sheerka.pop_ontology() + assert sheerka.load_var("TestSheerkaVariable", "my_variable") == 1 + assert sheerka.load_internal_var("TestSheerkaVariable", "lambda")(10) == 11 + class TestSheerkaVariableUsingFileBasedSdp(TestUsingFileBasedSheerka): def test_i_can_bound_variables_to_sheerka(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() old_value = sheerka.enable_process_return_values new_value = not old_value - sheerka.record_var(context, "TestSheerkaVariable", "sheerka.enable_process_return_values", new_value) - sheerka.cache_manager.commit(context) + sheerka.record_var(context, sheerka.name, "enable_process_return_values", new_value) + sheerka.om.commit(context) assert sheerka.enable_process_return_values == new_value # the modification is persisted upon new Sheerka creation - sheerka = self.get_sheerka() + sheerka = self.new_sheerka_instance(False) + assert sheerka.load_var(sheerka.name, "enable_process_return_values") == new_value assert sheerka.enable_process_return_values == new_value # reset old value - sheerka.enable_process_return_values = old_value + sheerka.record_var(context, sheerka.name, "enable_process_return_values", old_value) + + def test_i_can_bound_variables_to_sheerka_when_ontology_layer(self): + sheerka, context = self.init_test().unpack() + + old_value = sheerka.enable_process_return_values + new_value = not old_value + sheerka.record_var(context, sheerka.name, "enable_process_return_values", new_value) + sheerka.om.commit(context) + + # sanity + assert sheerka.enable_process_return_values == new_value + assert sheerka.load_var(sheerka.name, "enable_process_return_values") == new_value + + # the modification is seen even when there is an ontology layer + sheerka.push_ontology(context, "new ontology") + sheerka = self.new_sheerka_instance(False) + assert sheerka.load_var(sheerka.name, "enable_process_return_values") == new_value + assert sheerka.enable_process_return_values == new_value + + # reset old value + sheerka.record_var(context, sheerka.name, "enable_process_return_values", old_value) # def test_i_can_get_the_parent_when_modified(self): # sheerka = self.get_sheerka() @@ -99,14 +169,14 @@ class TestSheerkaVariableUsingFileBasedSdp(TestUsingFileBasedSheerka): # res = sheerka.load_var("TestSheerkaVariable", "my_variable") # assert res == 2 # - # loaded = sheerka.sdp.get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable.my_variable") + # loaded = sheerka.om.current_sdp().get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable.my_variable") # assert loaded.event_id == context.event.get_digest() # assert loaded.key == "my_variable" # assert loaded.value == 2 # assert loaded.who == "TestSheerkaVariable" # assert loaded.parents == ['8c9ada7bf488d84229f6539f76042431638f16d600fe3b7ec7e7161043a40d59'] # - # parent = sheerka.sdp.load_obj(loaded.parents[0]) + # parent = sheerka.om.current_sdp().load_obj(loaded.parents[0]) # assert parent.event_id == context.event.get_digest() # assert parent.key == "my_variable" # assert parent.value == 1 @@ -120,7 +190,7 @@ class TestSheerkaVariableUsingFileBasedSdp(TestUsingFileBasedSheerka): # sheerka.record_var(context, "TestSheerkaVariable", "my_variable", 1) # sheerka.record_var(context, "TestSheerkaVariable", "my_variable", 1) # - # loaded = sheerka.sdp.get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable.my_variable") + # loaded = sheerka.om.current_sdp().get(SheerkaVariableManager.VARIABLES_ENTRY, "TestSheerkaVariable.my_variable") # assert loaded.event_id == context.event.get_digest() # assert loaded.key == "my_variable" # assert loaded.value == 1 diff --git a/tests/core/test_sheerka.py b/tests/core/test_sheerka.py index 968dc2e..f4af575 100644 --- a/tests/core/test_sheerka.py +++ b/tests/core/test_sheerka.py @@ -1,10 +1,14 @@ import os import pytest + +from conftest import SHEERKA_TEST_FOLDER from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept, UserInputConcept from core.builtin_concepts_ids import AllBuiltinConcepts -from core.concept import Concept, PROPERTIES_TO_SERIALIZE, ConceptParts, NotInit +from core.concept import Concept, PROPERTIES_TO_SERIALIZE, ConceptParts, get_concept_attrs +from core.global_symbols import NotInit from core.sheerka.Sheerka import Sheerka, BASE_NODE_PARSER_CLASS +from core.sheerka.SheerkaOntologyManager import OntologyAlreadyExists from core.sheerka.services.SheerkaConceptManager import SheerkaConceptManager from core.tokenizer import Token, TokenKind @@ -107,7 +111,7 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert ret.body == "fake_concept" def test_i_can_instantiate_a_concept(self): - sheerka, context, concept = self.init_concepts(self.get_default_concept(), create_new=True) + sheerka, context, concept = self.init_test().with_concepts(self.get_default_concept(), create_new=True).unpack() new = sheerka.new(concept.key, a=10, b="value") @@ -119,10 +123,10 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert new.get_value("b") == "value" def test_i_can_instantiate_multiple_when_same_key(self): - sheerka, context, *concepts = self.init_concepts( + sheerka, context, *concepts = self.init_test().with_concepts( Concept("foo", body="foo1"), Concept("foo", body="foo2"), - create_new=True) + create_new=True).unpack() # when no id, i get two instances concepts = sheerka.new("foo") @@ -141,7 +145,7 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert foo2.get_metadata().body == "foo2" def test_instances_are_different_when_asking_for_new(self): - sheerka, context, concept = self.init_concepts(self.get_default_concept(), create_new=True) + sheerka, context, concept = self.init_test().with_concepts(self.get_default_concept(), create_new=True).unpack() new1 = sheerka.new(concept.key, a=10, b="value") new2 = sheerka.new(concept.key, a=10, b="value") @@ -150,7 +154,7 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert id(new1) != id(new2) def test_new_instance_does_not_impact_each_others(self): - sheerka, context, foo, bar = self.init_concepts("foo", "bar", create_new=True) + sheerka, context, foo, bar = self.init_test().with_concepts("foo", "bar", create_new=True).unpack() new_foo = sheerka.new("foo") new_foo.get_metadata().body = "metadata value" # modify metadata @@ -167,7 +171,8 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert new_foo.get_compiled() != foo.get_compiled() def test_i_get_the_same_instance_when_is_unique_is_true(self): - sheerka, context, concept = self.init_concepts(Concept(name="unique", is_unique=True), create_new=True) + sheerka, context, concept = self.init_test(). \ + with_concepts(Concept(name="unique", is_unique=True), create_new=True).unpack() new1 = sheerka.new(concept.key) new2 = sheerka.new(concept.key, a=10, b="value") # not that variables are simply discareded @@ -176,10 +181,8 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert id(new1) == id(new2) def test_values_are_reset_when_asking_for_a_new_instance(self): - sheerka, context, template = self.init_concepts( - Concept("foo", body="'foo body'"), - create_new=True, - eval_body=True) + sheerka, context, template = self.init_test(eval_body=True).with_concepts(Concept("foo", body="'foo body'"), + create_new=True).unpack() sheerka.evaluate_concept(context, sheerka.get_by_id(template.id)) assert template.get_metadata().is_evaluated @@ -202,10 +205,9 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert new.body == ('key', 'fake_concept') def test_i_cannot_instantiate_with_invalid_id(self): - sheerka, context, *concepts = self.init_concepts( - Concept("foo", body="foo1"), - Concept("foo", body="foo2"), - create_new=True) + sheerka, context, *concepts = self.init_test().with_concepts(Concept("foo", body="foo1"), + Concept("foo", body="foo2"), + create_new=True).unpack() new = sheerka.new(("foo", "invalid_id")) @@ -213,10 +215,9 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert new.body == [('key', 'foo'), ('id', 'invalid_id')] def test_i_cannot_instantiate_with_invalid_key(self): - sheerka, context, *concepts = self.init_concepts( - Concept("foo", body="foo1"), - Concept("foo", body="foo2"), - create_new=True) + sheerka, context, *concepts = self.init_test().with_concepts(Concept("foo", body="foo1"), + Concept("foo", body="foo2"), + create_new=True).unpack() new = sheerka.new(("invalid_key", "1001")) @@ -224,9 +225,8 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert new.body == [('key', 'invalid_key'), ('id', '1001')] def test_concept_id_is_irrelevant_when_only_one_concept(self): - sheerka, context, *concepts = self.init_concepts( - Concept("foo", body="foo1"), - create_new=True) + sheerka, context, *concepts = self.init_test().with_concepts(Concept("foo", body="foo1"), + create_new=True).unpack() new = sheerka.new(("foo", "invalid_id")) @@ -234,7 +234,8 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert new.get_metadata().body == "foo1" def test_i_cannot_instantiate_when_properties_are_not_recognized(self): - sheerka, context, concept = self.init_concepts(self.get_default_concept(), create_new=True) + sheerka, context, concept = self.init_test().with_concepts(self.get_default_concept(), + create_new=True).unpack() new = sheerka.new(concept.key, a=10, c="value") @@ -326,46 +327,138 @@ class TestSheerkaUsingMemoryBasedSheerka(TestUsingMemoryBasedSheerka): assert sheerka.resolve(concept) == real_expected def test_i_can_resolve_when_searching_by_definition(self): - sheerka, context, plus = self.init_concepts( - self.from_def_concept("plus", "a plus b", ["a", "b"]), - create_new=True - ) + sheerka, context, plus = self.init_test().with_concepts(self.from_def_concept("plus", "a plus b", ["a", "b"]), + create_new=True).unpack() assert sheerka.resolve("a plus b") == plus + def test_i_can_reset_global_concept_definition_when_adding_and_removing_layers(self): + sheerka, context, foo = self.init_concepts(Concept("foo").def_var("a").def_var("b")) + assert get_concept_attrs(foo) == ["a", "b"] + + sheerka.push_ontology(context, "another ontology") + sheerka.modify_concept(context, foo, to_add={"variables": {"c": None}}, to_remove={"variables": ["b"]}) + assert get_concept_attrs(foo) == ["a", "c"] + + sheerka.pop_ontology() + assert get_concept_attrs(foo) == ["a", "b"] + + def test_i_can_manage_concepts_ids_on_multiple_ontology_layers(self): + sheerka, context = self.init_concepts() + + res = sheerka.create_new_concept(context, Concept("foo")) + assert res.body.body.id == "1001" + + sheerka.push_ontology(context, "new ontology") + res = sheerka.create_new_concept(context, Concept("bar")) + assert res.body.body.id == "1002" + + res = sheerka.create_new_concept(context, Concept("baz")) + assert res.body.body.id == "1003" + + sheerka.pop_ontology() + res = sheerka.create_new_concept(context, Concept("baz")) + assert res.body.body.id == "1002" + + def test_i_can_add_ontology(self): + sheerka, context = self.init_test().unpack() + + # Create an ontology an set some values + res = sheerka.push_ontology(context, "new ontology") + + assert res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.SUCCESS) + + foo = sheerka.create_new_concept(context, Concept("foo").def_var("a").def_var("b")).body.body + sheerka.locals = {"key1": "value1"} + + # sanity check + assert sheerka.get_by_name("foo") == foo + assert not sheerka.is_known(sheerka.get_by_name("bar")) + assert sheerka.locals == {"key1": "value1"} + assert get_concept_attrs(foo) == ["a", "b"] + + # record the ontology + ontology = sheerka.get_ontology(context) + sheerka.pop_ontology() + + # Create another ontology with some other values + sheerka.push_ontology(context, "another ontology") + foo2 = sheerka.create_new_concept(context, Concept("foo").def_var("a").def_var("b").def_var("c")).body.body + bar = sheerka.create_new_concept(context, Concept("bar")).body.body + sheerka.locals = {"key2": "value2"} + + # sanity check + assert sheerka.get_by_name("foo") == foo2 + assert sheerka.get_by_name("bar") == bar + assert sheerka.locals == {"key2": "value2"} + assert get_concept_attrs(foo) == ["a", "b", "c"] + + # put pack the previous ontology + sheerka.add_ontology(context, ontology) + + assert sheerka.get_by_name("foo") == foo # not [foo, foo2], foo2 is not seen !!! + assert sheerka.get_by_name("bar") == bar + assert sheerka.locals == {"key1": "value1"} + assert get_concept_attrs(foo) == ["a", "b"] + + # sanity check + sheerka.pop_ontology() + assert sheerka.get_by_name("foo") == foo2 + assert sheerka.get_by_name("bar") == bar + assert sheerka.locals == {"key2": "value2"} + assert get_concept_attrs(foo) == ["a", "b", "c"] + + def test_adding_the_same_ontology_twice_has_no_effect(self): + sheerka, context = self.init_test().unpack() + + sheerka.push_ontology(context, "#unit_test#") + + assert len(sheerka.om.ontologies) == 2 + + def test_i_cannot_add_an_ontology_that_already_exists(self): + sheerka, context = self.init_test().unpack() + + sheerka.push_ontology(context, "new ontology") + + res = sheerka.push_ontology(context, "#unit_test#") + assert not res.status + assert sheerka.isinstance(res.body, BuiltinConcepts.ONTOLOGY_ALREADY_DEFINED) + class TestSheerkaUsingFileBasedSheerka(TestUsingFileBasedSheerka): def test_root_folder_is_created_after_initialization(self): - return_value = Sheerka().initialize(self.root_folder) + return_value = Sheerka().initialize(SHEERKA_TEST_FOLDER) assert return_value.status, "initialisation should be successful" - assert os.path.exists(self.root_folder), "init folder should be created" + assert os.path.exists(SHEERKA_TEST_FOLDER), "init folder should be created" def test_builtin_concepts_are_initialized(self): - sheerka = self.get_sheerka() + sheerka, context = self.init_test().unpack() service = sheerka.services[SheerkaConceptManager.NAME] for concept_name in AllBuiltinConcepts: - assert service.has_key(str(concept_name)) - assert sheerka.sdp.get(service.CONCEPTS_BY_KEY_ENTRY, str(concept_name)) is not None + # check that the concept is already in cache + assert sheerka.om.ontologies[-1].cache_manager.has(service.CONCEPTS_BY_KEY_ENTRY, concept_name) + + # check that we can access it + assert sheerka.om.get(service.CONCEPTS_BY_KEY_ENTRY, concept_name) is not None # I can get back data from the sdp when the cache is empty - sheerka.cache_manager.clear() + sheerka.push_ontology(context, 'new ontology') # caches are empty assert not service.has_id("1") - assert not service.has_key(str(BuiltinConcepts.SHEERKA)) + assert not service.has_key(BuiltinConcepts.SHEERKA) assert sheerka.get_by_id("1") == sheerka # use sdp - # assert sheerka.has_key(str(BuiltinConcepts.SHEERKA)) # auto update the other caches - def test_builtin_concepts_can_be_updated(self): sheerka = self.get_sheerka() service = sheerka.services[SheerkaConceptManager.NAME] before_parsing = sheerka.get_by_key(BuiltinConcepts.BEFORE_PARSING) before_parsing.get_metadata().desc = "I have a description" before_parsing.get_metadata().full_serialization = True - with sheerka.sdp.get_transaction("Test") as transac: + with sheerka.om.current_sdp().get_transaction("Test") as transac: transac.add(service.CONCEPTS_BY_KEY_ENTRY, before_parsing.key, before_parsing, use_ref=True) sheerka = self.get_sheerka() # another fresh new instance @@ -374,8 +467,8 @@ class TestSheerkaUsingFileBasedSheerka(TestUsingFileBasedSheerka): assert before_parsing.get_metadata().desc == "I have a description" def test_i_first_look_in_local_cache(self): - sheerka, context, concept = self.init_concepts("foo", create_new=True) - sheerka.cache_manager.commit(context) + sheerka, context, concept = self.init_test().with_concepts("foo", create_new=True).unpack() + sheerka.om.commit(context) sheerka.get_by_key(concept.key).new_property = "I have modified the concept in cache" @@ -385,7 +478,7 @@ class TestSheerkaUsingFileBasedSheerka(TestUsingFileBasedSheerka): assert from_cache.new_property == "I have modified the concept in cache" # sdp instance is not modified - sheerka.cache_manager.clear() + sheerka.om.clear() from_sdp = sheerka.get_by_key(concept.key) assert from_sdp is not None assert from_sdp.key == concept.key @@ -394,42 +487,42 @@ class TestSheerkaUsingFileBasedSheerka(TestUsingFileBasedSheerka): def test_i_can_retrieve_from_sdp_when_cache_is_reset(self): sheerka, context, concept = self.init_concepts(Concept("foo", body="1")) service = sheerka.services[SheerkaConceptManager.NAME] - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) - sheerka.cache_manager.clear() + sheerka.om.clear() sheerka.get_by_key("foo") assert service.has_key("foo") # It's also updated when sdp returns more than one element concept2 = Concept("foo", body="2") sheerka.create_new_concept(context, concept2) - sheerka.cache_manager.commit(context) + sheerka.om.commit(context) - sheerka.cache_manager.clear() + sheerka.om.clear() assert len(sheerka.get_by_key("foo")) == 2 assert service.has_key("foo") # updated when by_id - sheerka.cache_manager.clear() + sheerka.om.clear() assert sheerka.get_by_id("1001") == concept assert service.has_id("1001") - sheerka.cache_manager.clear() + sheerka.om.clear() assert sheerka.get_by_name("foo") == [concept, concept2] assert service.has_name("foo") - sheerka.cache_manager.clear() + sheerka.om.clear() assert sheerka.get_by_hash(concept.get_definition_hash()) == concept assert service.has_hash(concept.get_definition_hash()) def test_get_by_key_retrieve_all_elements(self): - sheerka, context, *concepts = self.init_concepts( + sheerka, context, *concepts = self.init_test().with_concepts( Concept("foo", body="1"), Concept("foo", body="2"), - create_new=True) - sheerka.cache_manager.commit(context) + create_new=True).unpack() + sheerka.om.commit(context) - sheerka.cache_manager.clear() + sheerka.om.clear() sheerka.get_by_key("foo", "1001") # I ask only for the one with id = "1001" # but the two keys are returned @@ -439,28 +532,55 @@ class TestSheerkaUsingFileBasedSheerka(TestUsingFileBasedSheerka): assert concepts[1].id == "1002" def test_concept_node_parsing_is_initialized_at_startup(self): - sheerka, context, foo, bar, baz = self.init_concepts( + sheerka, context, foo, bar, baz = self.init_test().with_concepts( "foo", "bar", Concept("baz", definition="foo"), - create_new=True) - sheerka.cache_manager.commit(context) + create_new=True).unpack() + sheerka.om.commit(context) - assert sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { + assert sheerka.om.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { 'bar': ['1002'], 'c:|1001:': ['1003'], 'foo': ['1001']} - assert sheerka.cache_manager.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { + assert sheerka.om.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { 'bar': ['1002'], 'foo': ['1001', '1003'] } sheerka = self.get_sheerka() # another instance - assert sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { + assert sheerka.om.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { 'bar': ['1002'], 'c:|1001:': ['1003'], 'foo': ['1001']} - assert sheerka.cache_manager.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { + assert sheerka.om.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == { 'bar': ['1002'], 'foo': ['1001', '1003'] } + + def test_i_can_remember_ontologies_when_sheerka_is_recreated(self): + sheerka, context = self.init_test().unpack() + + sheerka.push_ontology(context, "to remove") + sheerka.pop_ontology() + sheerka.push_ontology(context, "new ontology") + sheerka.push_ontology(context, "another ontology") + + # sanity check + ontologies_names = [o.name for o in sheerka.om.ontologies] + assert ontologies_names == ['another ontology', 'new ontology', '#unit_test#', '__default__'] + + # get new instance + sheerka = self.new_sheerka_instance(False) + + # make sure that ontologies are recreated + ontologies_names = [o.name for o in sheerka.om.ontologies] + assert ontologies_names == ['another ontology', 'new ontology', '#unit_test#', '__default__'] + + def test_adding_the_same_ontology_twice_has_no_effect(self): + sheerka, context = self.init_test().unpack() + + sheerka = self.new_sheerka_instance(False) # new instance that remembers the top layer ontology + sheerka.push_ontology(context, "#unit_test#") + + assert len(sheerka.om.ontologies) == 2 diff --git a/tests/core/test_sheerkaResultManager.py b/tests/core/test_sheerkaResultManager.py index a4ceaca..447523b 100644 --- a/tests/core/test_sheerkaResultManager.py +++ b/tests/core/test_sheerkaResultManager.py @@ -3,6 +3,7 @@ import types import pytest from core.builtin_concepts import BuiltinConcepts from core.sheerka.ExecutionContext import ExecutionContext +from core.sheerka.SheerkaOntologyManager import SheerkaOntologyManager from core.sheerka.services.SheerkaResultManager import SheerkaResultConcept from sdp.sheerkaDataProvider import Event @@ -10,26 +11,27 @@ from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): - io_cache = None @classmethod def setup_class(cls): - sheerka = cls().get_sheerka(cache_only=False) + sheerka = cls().get_sheerka(cache_only=False, ontology="#TestSheerkaResultManager#") sheerka.save_execution_context = True + cls.root_ontology_name = "#TestSheerkaResultManager#" @classmethod def teardown_class(cls): - sheerka = cls().get_sheerka() - sheerka.save_execution_context = False + cls.sheerka.pop_ontology() + cls.root_ontology_name = SheerkaOntologyManager.ROOT_ONTOLOGY_NAME - def init_test(self): - sheerka, context = self.init_concepts() + def init_service(self): + sheerka, context = self.init_test().unpack() service = sheerka.services[SheerkaResultConcept.NAME] return sheerka, context, service def test_i_can_record_execution_contexts(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() + service.test_only_reset() sheerka.evaluate_user_input("foo") @@ -40,7 +42,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert execution_context.desc == "Evaluating 'foo'" - executions_contexts_in_db = sheerka.sdp.load_result(event_id) + executions_contexts_in_db = sheerka.om.current_sdp().load_result(event_id) assert executions_contexts_in_db is not None assert executions_contexts_in_db.desc == "Evaluating 'foo'" @@ -48,7 +50,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert service.last_execution.desc == "Evaluating 'foo'" def test_i_can_get_the_result_by_digest_using_cache(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") digest = service.last_execution.event.get_digest() @@ -77,10 +79,10 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert list(res.body) == previous_results def test_i_can_get_result_by_digest_using_db(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") digest = service.last_execution.event.get_digest() - service.reset() + service.test_only_reset() res = sheerka.get_results_by_digest(context, digest) @@ -106,18 +108,18 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert list(res.body) == previous_results def test_i_cannot_get_result_by_digest_if_the_digest_does_not_exist(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() res = sheerka.get_results_by_digest(context, "fake digest") assert sheerka.isinstance(res, BuiltinConcepts.NOT_FOUND) assert res.body == {'digest': 'fake digest'} def test_i_cannot_get_results_if_no_previous_digest(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() assert sheerka.get_results(context) is None def test_i_can_get_the_result_by_command_name(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") digest = service.last_execution.event.get_digest() @@ -130,16 +132,16 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert isinstance(res.body, types.GeneratorType) def test_i_can_get_the_result_by_command_name_using_db(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") sheerka.evaluate_user_input("one") # another command - service.reset() + service.test_only_reset() res = sheerka.get_results_by_command(context, "def concept") assert res.command == "def concept one as 1" def test_i_can_get_the_result_by_command_when_not_in_the_same_page_size(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") sheerka.evaluate_user_input("one") @@ -152,14 +154,17 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert res.command == "def concept one as 1" def test_i_cannot_get_results_from_command_if_the_command_does_not_exist(self): - sheerka, context = self.init_concepts() + sheerka, context, service = self.init_service() + service.test_only_reset() res = sheerka.get_results_by_command(context, "def concept") assert sheerka.isinstance(res, BuiltinConcepts.NOT_FOUND) assert res.body == {'command': 'def concept'} def test_i_cannot_get_result_from_command_if_the_command_does_not_exists_multiple_pages(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() + service.test_only_reset() + sheerka.evaluate_user_input("def concept one as 1") sheerka.evaluate_user_input("one") @@ -172,7 +177,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert res.body == {'command': 'fake command'} def test_i_can_get_last_results(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") sheerka.evaluate_user_input("one") @@ -182,21 +187,21 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert res.command == "one" def test_i_can_get_last_results_using_db(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") sheerka.evaluate_user_input("one") - service.reset() + service.test_only_reset() res = sheerka.get_last_results(context) assert sheerka.isinstance(res, BuiltinConcepts.EXPLANATION) assert res.command == "one" def test_i_can_get_last_results_when_event_with_no_result(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() - sheerka.sdp.save_event(Event("event 1")) - sheerka.sdp.save_event(Event("event 2")) - sheerka.sdp.save_event(Event("event 3")) + sheerka.om.save_event(Event("event 1")) + sheerka.om.save_event(Event("event 2")) + sheerka.om.save_event(Event("event 3")) sheerka.evaluate_user_input("def concept one as 1") res = sheerka.get_last_results(context) @@ -204,18 +209,20 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): assert res.command == "def concept one as 1" def test_i_cannot_get_last_results_when_no_result(self): - sheerka, context = self.init_concepts() + sheerka, context, service = self.init_service() + service.test_only_reset() res = sheerka.get_last_results(context) assert sheerka.isinstance(res, BuiltinConcepts.NOT_FOUND) assert res.body == {'query': 'last'} def test_i_cannot_get_last_results_when_only_events(self): - sheerka, context = self.init_concepts() + sheerka, context, service = self.init_service() + service.test_only_reset() - sheerka.sdp.save_event(Event("event 1")) - sheerka.sdp.save_event(Event("event 2")) - sheerka.sdp.save_event(Event("event 3")) + sheerka.om.save_event(Event("event 1")) + sheerka.om.save_event(Event("event 2")) + sheerka.om.save_event(Event("event 3")) res = sheerka.get_last_results(context) assert sheerka.isinstance(res, BuiltinConcepts.NOT_FOUND) @@ -227,7 +234,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): {"desc": "Evaluating 'def concept one as 1'", "id": 0} ]) def test_i_can_get_last_results_using_kwarg(self, kwargs): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") ExecutionContext.ids.clear() @@ -244,7 +251,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): ("'def concept one as 1' in desc and id == 0", {"desc": "Evaluating 'def concept one as 1'", "id": 0}) ]) def test_i_can_get_last_results_using_filter(self, predicate, expected): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") ExecutionContext.ids.clear() @@ -261,7 +268,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): ("'def concept one as 1' in desc and id == 0", {"desc": "Evaluating 'def concept one as 1'", "id": 0}) ]) def test_i_can_get_last_results_using_the_first_argument_to_filter(self, predicate, expected): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") ExecutionContext.ids.clear() @@ -278,7 +285,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): {"desc": "Evaluating 'def concept one as 1'", "id": 0} ]) def test_i_can_get_results_using_kwarg(self, kwargs): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") ExecutionContext.ids.clear() sheerka.get_last_results(context) @@ -296,7 +303,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): ("'def concept one as 1' in desc and id == 0", {"desc": "Evaluating 'def concept one as 1'", "id": 0}) ]) def test_i_can_get_results_using_filter(self, predicate, expected): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") ExecutionContext.ids.clear() sheerka.get_last_results(context) @@ -314,7 +321,7 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): ("'def concept one as 1' in desc and id == 0", {"desc": "Evaluating 'def concept one as 1'", "id": 0}) ]) def test_i_can_get_results_using_the_first_argument_to_filter(self, predicate, expected): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") ExecutionContext.ids.clear() sheerka.get_last_results(context) @@ -332,33 +339,32 @@ class TestSheerkaResultManager(TestUsingMemoryBasedSheerka): SheerkaResultConcept.get_predicate(**predicate) def test_i_can_get_last_return_value(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() sheerka.evaluate_user_input("def concept one as 1") - ret = sheerka.last_ret(context) + ret = sheerka.get_last_return_value(context) assert sheerka.isinstance(ret[0].body, BuiltinConcepts.NEW_CONCEPT) sheerka.evaluate_user_input("eval one") - ret = sheerka.last_ret(context) + ret = sheerka.get_last_return_value(context) assert ret[0].body == 1 def test_i_can_track_new_concept(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() res = sheerka.evaluate_user_input("def concept one as 1") new_concept = res[0].body.body - assert sheerka.last_created_concept(context) == new_concept + assert sheerka.get_last_created_concept(context) == new_concept assert service.last_created_concept_id == new_concept.id def test_last_created_concept_is_recorded(self): - sheerka, context, service = self.init_test() + sheerka, context, service = self.init_service() res = sheerka.evaluate_user_input("def concept one as 1") new_concept = res[0].body.body - service.reset() + service.test_only_reset() service.initialize_deferred(context, False) assert service.last_created_concept_id == new_concept.id - assert sheerka.last_created_concept(context) == new_concept - + assert sheerka.get_last_created_concept(context) == new_concept diff --git a/tests/core/test_sheerka_call_evaluators.py b/tests/core/test_sheerka_call_evaluators.py index 0e2e712..2f4dab3 100644 --- a/tests/core/test_sheerka_call_evaluators.py +++ b/tests/core/test_sheerka_call_evaluators.py @@ -252,7 +252,7 @@ class TestSheerkaExecuteEvaluators(TestUsingMemoryBasedSheerka): def teardown_class(cls): # At the end of the tests, sheerka singleton instance will be corrupted # Ask for a new one - TestUsingMemoryBasedSheerka.singleton_instance = None + TestUsingMemoryBasedSheerka.sheerka = None def test_i_can_get_evaluators_when_context_is_not_altered(self): sheerka, context = self.init_concepts() diff --git a/tests/core/test_sheerka_call_parsers.py b/tests/core/test_sheerka_call_parsers.py index 0030fac..ca11c11 100644 --- a/tests/core/test_sheerka_call_parsers.py +++ b/tests/core/test_sheerka_call_parsers.py @@ -140,7 +140,7 @@ class TestSheerkaExecuteParsers(TestUsingMemoryBasedSheerka): def teardown_class(cls): # At the end of the tests, sheerka singleton instance will be corrupted # Ask for a new one - TestUsingMemoryBasedSheerka.singleton_instance = None + TestUsingMemoryBasedSheerka.sheerka = None def test_i_can_get_parser_when_context_is_not_altered(self): sheerka, context = self.init_concepts() diff --git a/tests/core/test_sheerka_ontology.py b/tests/core/test_sheerka_ontology.py new file mode 100644 index 0000000..2f84481 --- /dev/null +++ b/tests/core/test_sheerka_ontology.py @@ -0,0 +1,1460 @@ +from dataclasses import dataclass + +import pytest + +from cache.Cache import Cache +from cache.DictionaryCache import DictionaryCache +from cache.IncCache import IncCache +from cache.ListCache import ListCache +from cache.ListIfNeededCache import ListIfNeededCache +from core.concept import Concept +from core.global_symbols import NotFound, Removed +from core.sheerka.SheerkaOntologyManager import SheerkaOntologyManager, OntologyManagerFrozen, OntologyManagerNotFrozen, \ + OntologyManagerCannotPopLatest, OntologyAlreadyExists +from tests.TestUsingFileBasedSheerka import TestUsingFileBasedSheerka +from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka + + +@dataclass +class DummyObj: + key: str + value: object + + +class TestSheerkaOntology(TestUsingMemoryBasedSheerka): + def test_i_can_create_ontology_manager(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + + assert len(manager.ontologies) == 1 + assert id(manager.current_cache_manager()) == id(manager.ontologies[0].cache_manager) + assert id(manager.current_sdp()) == id(manager.ontologies[0].cache_manager.sdp) + + def test_i_can_get_value_from_the_current_cache_manager(self): + sheerka, context = self.init_concepts(cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + + cache = Cache() + manager.register_cache("test", cache) + + assert cache._sdp == manager.current_sdp() + + manager.put("test", "key", "value") + assert manager.get("test", "key") == "value" + assert manager.current_sdp().get("test", "key") == NotFound + + manager.commit(context) + assert manager.get("test", "key") == "value" + assert manager.current_sdp().get("test", "key") == "value" + + def test_i_cannot_register_cache_once_ontology_is_frozen(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.freeze() + + with pytest.raises(OntologyManagerFrozen): + manager.register_cache("test", Cache()) + + with pytest.raises(OntologyManagerFrozen): + manager.register_concept_cache("test", Cache(), lambda obj: obj.key, True) + + def test_i_cannot_push_ontology_if_not_frozen(self): + sheerka = self.get_sheerka() + + ontology = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + + with pytest.raises(OntologyManagerNotFrozen): + ontology.push_ontology("new_ontology") + + def test_i_can_push_ontology_from_simple_caches(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("Cache", Cache(), persist=True, use_ref=True) + manager.register_cache("DictionaryCache", DictionaryCache(), persist=True, use_ref=False) + manager.register_cache("ListIfNeededCache", ListIfNeededCache(), persist=False, use_ref=False) + manager.register_cache("ListCache", ListCache(), persist=False, use_ref=True) + manager.register_cache("IncCache", IncCache(), False) + manager.freeze() + + manager.push_ontology("new_ontology") + + assert len(manager.ontologies) == 2 + + cache_manager_0 = manager.ontologies[0].cache_manager + assert len(cache_manager_0.caches) == 5 + assert cache_manager_0.concept_caches == [] + + assert isinstance(cache_manager_0.get_cache("Cache"), Cache) + assert cache_manager_0.caches["Cache"].persist == True + assert cache_manager_0.caches["Cache"].use_ref == True + assert cache_manager_0.get_cache("Cache")._sdp.name == "new_ontology" + + assert isinstance(cache_manager_0.get_cache("DictionaryCache"), DictionaryCache) + assert cache_manager_0.caches["DictionaryCache"].persist == True + assert cache_manager_0.caches["DictionaryCache"].use_ref == False + assert cache_manager_0.get_cache("DictionaryCache")._sdp.name == "new_ontology" + + assert isinstance(cache_manager_0.get_cache("ListIfNeededCache"), ListIfNeededCache) + assert cache_manager_0.caches["ListIfNeededCache"].persist == False + assert cache_manager_0.caches["ListIfNeededCache"].use_ref == False + assert cache_manager_0.get_cache("ListIfNeededCache")._sdp.name == "new_ontology" + + assert isinstance(cache_manager_0.get_cache("ListCache"), ListCache) + assert cache_manager_0.caches["ListCache"].persist == False + assert cache_manager_0.caches["ListCache"].use_ref == True + assert cache_manager_0.get_cache("ListCache")._sdp.name == "new_ontology" + + assert isinstance(cache_manager_0.get_cache("IncCache"), IncCache) + assert cache_manager_0.sdp.name == "new_ontology" + assert cache_manager_0.get_cache("IncCache")._sdp.name == "new_ontology" + + # old ontology is still there + cache_manager_1 = manager.ontologies[1].cache_manager + assert len(cache_manager_1.caches) == 5 + assert isinstance(cache_manager_1.get_cache("Cache"), Cache) + assert isinstance(cache_manager_1.get_cache("DictionaryCache"), DictionaryCache) + assert isinstance(cache_manager_1.get_cache("ListIfNeededCache"), ListIfNeededCache) + assert isinstance(cache_manager_1.get_cache("ListCache"), ListCache) + assert isinstance(cache_manager_1.get_cache("IncCache"), IncCache) + assert cache_manager_1.sdp.name == "__default__" + + def test_i_can_push_ontology_from_concept_caches(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_concept_cache("ByKey", Cache(), get_key=lambda obj: obj.key, use_ref=True) + manager.register_concept_cache("ById", ListIfNeededCache(), get_key=lambda obj: obj.id, use_ref=False) + manager.freeze() + + manager.push_ontology("new_ontology") + assert len(manager.ontologies) == 2 + + cache_manager_0 = manager.ontologies[0].cache_manager + cache_manager_1 = manager.ontologies[1].cache_manager + + assert len(cache_manager_0.caches) == 2 + + assert isinstance(cache_manager_0.get_cache("ByKey"), Cache) + assert cache_manager_0.caches["ByKey"].persist == cache_manager_1.caches["ByKey"].persist + assert cache_manager_0.caches["ByKey"].get_key == cache_manager_1.caches["ByKey"].get_key + assert cache_manager_0.caches["ByKey"].use_ref == cache_manager_1.caches["ByKey"].use_ref + + assert isinstance(cache_manager_0.get_cache("ById"), ListIfNeededCache) + assert cache_manager_0.caches["ById"].persist == cache_manager_1.caches["ById"].persist + assert cache_manager_0.caches["ById"].get_key == cache_manager_1.caches["ById"].get_key + assert cache_manager_0.caches["ById"].use_ref == cache_manager_1.caches["ById"].use_ref + + assert cache_manager_0.concept_caches == cache_manager_1.concept_caches + + assert cache_manager_0.sdp.name == "new_ontology" + assert cache_manager_1.sdp.name == "__default__" + + def test_i_can_get_database_value(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache(default=lambda sdp, key: sdp.get("cache_name", key))) + + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value") + + assert not manager.current_cache_manager().has("cache_name", "key") + assert manager.get("cache_name", "key") == "value" + assert manager.current_cache_manager().has("cache_name", "key") + + def test_i_cannot_pop_ontology_when_not_frozen(self): + sheerka = self.get_sheerka() + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + + with pytest.raises(OntologyManagerNotFrozen): + manager.pop_ontology() + + def test_i_cannot_pop_the_latest_cache_manager(self): + sheerka = self.get_sheerka() + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.freeze() + + with pytest.raises(OntologyManagerCannotPopLatest): + manager.pop_ontology() + + def test_i_can_pop_ontology(self): + sheerka = self.get_sheerka() + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.freeze() + + manager.push_ontology("ontology1") + manager.push_ontology("ontology2") + manager.push_ontology("ontology3") + + manager.pop_ontology() + assert len(manager.ontologies) == 3 + + manager.pop_ontology() + manager.pop_ontology() + with pytest.raises(OntologyManagerCannotPopLatest): + manager.pop_ontology() + + def test_i_can_add_ontology(self): + sheerka, context = self.init_test().unpack() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + # init the ontology that will be put back + manager.push_ontology("new ontology") + manager.put("cache_name", "key1", "value1") + manager.commit(context) + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key2", "value2") + + # call key3 to check. This time there is no value, but there will be later + assert manager.get("cache_name", "key3") is NotFound + + new_ontology = manager.get_ontology() + manager.pop_ontology() + + # add another ontology, with its own values + manager.push_ontology("another ontology") + manager.put("cache_name", "key1", "value1_from_another") + manager.put("cache_name", "key2", "value2_from_another") + manager.put("cache_name", "key3", "value3_from_another") + manager.commit(context) + + # put back the ontology + manager.add_ontology(new_ontology) + + assert manager.get("cache_name", "key1") == "value1" + assert manager.get("cache_name", "key2") == "value2" + assert manager.get("cache_name", "key3") == "value3_from_another" + + def test_i_can_get_ontology(self): + sheerka, context = self.init_test().unpack() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.freeze() + + manager.push_ontology("name1") + manager.push_ontology("name2") + manager.push_ontology("name3") + + assert manager.get_ontology("name2").name == "name2" + assert manager.get_ontology().name == "name3" + + with pytest.raises(KeyError): + assert manager.get_ontology("name4") + + def test_i_can_access_values_after_push_and_pop_cache_only_true(self): + sheerka = self.get_sheerka(cache_only=True) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache()) + manager.freeze() + + manager.put("cache_name", "key", "value1") + assert manager.get("cache_name", "key") == "value1" + + manager.push_ontology("new ontology") + manager.put("cache_name", "key", "value2") + assert manager.get("cache_name", "key") == "value2" + + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value1" + + def test_i_can_access_values_after_push_and_pop_cache_only_false(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache(default=lambda sdp, key: sdp.get("cache_name", key))) + manager.freeze() + + # put value in DB + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value1") + assert not manager.current_cache_manager().has("cache_name", "key") # value not in cache + assert manager.current_sdp().exists("cache_name", "key") # but value is in DB + + # add an ontology layer + manager.push_ontology("new ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value2") + + # At this point, the value is in DB, but not in cache + assert not manager.current_cache_manager().has("cache_name", "key") + assert manager.get("cache_name", "key") == "value2" + assert manager.current_cache_manager().has("cache_name", "key") + + # sanity check + # Let's check sdp values + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {'key': 'value2'}} + assert manager.ontologies[1].cache_manager.sdp.state.data == {'cache_name': {'key': 'value1'}} + + # remove a layer + manager.pop_ontology() + assert not manager.current_cache_manager().has("cache_name", "key") # value is no longer in cache + assert manager.get("cache_name", "key") == "value1" + + # sanity check + # Let's check sdp values + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {'key': 'value1'}} + + def test_i_can_manage_multiple_ontology_layers(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache(default=lambda sdp, key: sdp.get("cache_name", key))) + manager.freeze() + + # default layer + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value1") + + # add an ontology layer + manager.push_ontology("new ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value2") + + # add an ontology layer + manager.push_ontology("another ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value3") + + # add an ontology layer + manager.push_ontology("fourth ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value4") + + assert manager.get("cache_name", "key") == "value4" + + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value3" + + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value2" + + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value1" + + def test_i_have_access_to_sub_layers_values_cache_only_false(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + # default layer + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value") + + # add ontology layers + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + + # I can get the low level value + assert manager.get("cache_name", "key") == "value" + + # check that the value is copied on the top level cache + assert manager.current_cache_manager().has("cache_name", "key") + assert not manager.ontologies[1].cache_manager.has("cache_name", "key") # not the top level + assert manager.ontologies[2].cache_manager.has("cache_name", "key") # the data comes from it + + def test_i_can_get_value_from_all_layers(self): + sheerka = self.get_sheerka(cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key", "value") + + # add ontology layers + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + + assert manager.get("cache_name", "key") == "value" + + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value" + + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value" + + def test_i_can_only_get_top_layer_values_when_dictionary_cache(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", DictionaryCache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", False, {"key": "value"}) # add some values in default layer + + # add some other values in another layer + manager.push_ontology("new ontology") + manager.put("cache_name", False, {"key1": "value1"}) + + assert manager.get("cache_name", "key") is NotFound # other layer are not visible + assert manager.get("cache_name", "key1") == "value1" # I an only see the current layer + + # I still can use get all + assert manager.get_all("cache_name") == {"key": "value", "key1": "value1"} + + # I can get back my values after pop + manager.pop_ontology() + assert manager.copy("cache_name") == {"key": "value"} + + def test_dictionary_caches_values_are_copied_when_a_new_ontology_is_pushed(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", DictionaryCache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", False, {"key": "value"}) # add some values in default layer + manager.push_ontology("new ontology") + + assert manager.copy("cache_name") == {"key": "value"} + assert manager.current_cache_manager().get_cache("cache_name").to_add == set() + assert manager.current_cache_manager().get_cache("cache_name").to_remove == set() + + def test_initialized_key_are_correctly_managed_when_multiple_layers(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("c_name", Cache().auto_configure("c_name")) + manager.freeze() + + manager.put("c_name", "key", "value") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("c_name", "key2", "value2") # not in cache + + # add ontology layers + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + manager.push_ontology("last ontology") + + manager.get("c_name", "key") # == "value" but we don't care + assert manager.ontologies[0].cache_manager.caches["c_name"].cache._initialized_keys == {"key"} + assert manager.ontologies[1].cache_manager.caches["c_name"].cache._initialized_keys == set() + assert manager.ontologies[2].cache_manager.caches["c_name"].cache._initialized_keys == set() + assert manager.ontologies[3].cache_manager.caches["c_name"].cache._initialized_keys == set() + + manager.get("c_name", "key2") # == "value2" but we don't care + assert manager.ontologies[0].cache_manager.caches["c_name"].cache._initialized_keys == {"key", "key2"} + assert manager.ontologies[1].cache_manager.caches["c_name"].cache._initialized_keys == set() + assert manager.ontologies[2].cache_manager.caches["c_name"].cache._initialized_keys == set() + assert manager.ontologies[3].cache_manager.caches["c_name"].cache._initialized_keys == {"key2"} + + manager.get("c_name", "no_key") # is NotFound but we don't care + assert manager.ontologies[0].cache_manager.caches["c_name"].cache._initialized_keys == {"key", "key2", "no_key"} + assert manager.ontologies[1].cache_manager.caches["c_name"].cache._initialized_keys == set() + assert manager.ontologies[2].cache_manager.caches["c_name"].cache._initialized_keys == set() + assert manager.ontologies[3].cache_manager.caches["c_name"].cache._initialized_keys == {"key2", "no_key"} + + def test_i_cannot_get_a_value_that_does_not_exists(self): + sheerka = self.get_sheerka(cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + # add ontology layers + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + + assert manager.get("cache_name", "key") is NotFound + + def test_i_cannot_get_a_value_that_is_removed(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache()) + manager.freeze() + + manager.put("cache_name", "key", "value") + + # add ontology layers + manager.push_ontology("new ontology") + manager.put("cache_name", "key", Removed) + + assert manager.get("cache_name", "key") is NotFound + + def test_i_cannot_get_value_that_is_removed_in_sub_level(self): + sheerka = self.get_sheerka(cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key", "value") # value exists + + # add ontology layer + manager.push_ontology("new ontology") + manager.put("cache_name", "key", Removed) # value is removed + + # add another layer + manager.push_ontology("another ontology") # no indication + + assert manager.get("cache_name", "key") is NotFound + + # check that the cache of the top level ontology is updated + assert manager.current_cache_manager().caches["cache_name"].cache.copy() == {"key": Removed} + + def test_i_can_test_if_a_value_exists(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache(extend_exists=lambda sdp, key: sdp.exists("cache_name", key))) + manager.freeze() + + # default layer + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value") + + # add ontology layers + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + + # I can get the low level value + assert manager.exists("cache_name", "key") + + # check that the value is not in cache (only in the low level database) + assert not manager.current_cache_manager().has("cache_name", "key") + assert not manager.ontologies[1].cache_manager.has("cache_name", "key") + assert not manager.ontologies[2].cache_manager.has("cache_name", "key") + + def test_i_can_check_that_a_value_does_not_exist(self): + sheerka = self.get_sheerka(cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache(extend_exists=lambda sdp, key: sdp.exists("cache_name", key))) + manager.freeze() + + # add ontology layers + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + + assert not manager.exists("cache_name", "key") + + def test_i_can_list_from_multiple_ontologies(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache()) + manager.freeze() + + # default layer + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key1", DummyObj("key1", "value1")) + + manager.push_ontology("new ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key2", DummyObj("key2", "value2")) + transaction.add("cache_name", "key1", DummyObj("key1", "value11")) # key1 is modified + + manager.push_ontology("another ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key3", DummyObj("key3", "value3")) + + assert manager.list("cache_name") == [DummyObj("key1", "value11"), + DummyObj("key2", "value2"), + DummyObj("key3", "value3")] + + def test_i_can_list_from_multiple_ontologies_even_if_they_are_not_all_filled(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache()) + manager.freeze() + + # default layer + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key1", DummyObj("key1", "value1")) + + manager.push_ontology("new ontology") + # nothing in this ontology + + manager.push_ontology("another ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key3", DummyObj("key3", "value3")) + + assert manager.list("cache_name") == [DummyObj("key1", "value1"), + DummyObj("key3", "value3")] + + def test_i_can_list_when_no_items(self): + sheerka = self.get_sheerka(cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache()) + manager.freeze() + + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + + assert manager.list("cache_name") == [] + + def test_i_can_put_an_entry_cache_only_true(self): + sheerka = self.get_sheerka(cache_only=True) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache()) + manager.freeze() + + manager.put("cache_name", "key", "value") + assert manager.get("cache_name", "key") == "value" + + manager.push_ontology("new ontology") + manager.put("cache_name", "key", "value2") + assert manager.get("cache_name", "key") == "value2" + + def test_i_can_put_entry_cache_only_false(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache()) + manager.freeze() + + # default ontology + manager.put("cache_name", "key", "value") + manager.commit(context) + + assert manager.get("cache_name", "key") == "value" + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {'key': 'value'}} + + # add an ontology layer + manager.push_ontology("new ontology") + manager.put("cache_name", "key", "value2") + manager.commit(context) + assert manager.get("cache_name", "key") == "value2" + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {'key': 'value2'}} + assert manager.ontologies[1].cache_manager.sdp.state.data == {'cache_name': {'key': 'value'}} + + def test_i_can_put_in_a_list_cache(self): + # in this test, sub layers have values. + # We need to check that those values are not lost when adding a new element + + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", ListCache().auto_configure("cache_name")) + manager.freeze() + + # default ontology + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", ["val1"]) + + # add an ontology layer + manager.push_ontology("new ontology") + manager.put("cache_name", "key", "val2") + manager.commit(context) + + assert manager.get("cache_name", "key") == ["val1", "val2"] + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {'key': ['val1', 'val2']}} + assert manager.ontologies[1].cache_manager.sdp.state.data == {'cache_name': {'key': ['val1']}} + + # and I can keep adding in another layer + manager.push_ontology("another ontology") + manager.put("cache_name", "key", "val3") + manager.commit(context) + + assert manager.get("cache_name", "key") == ["val1", "val2", "val3"] + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {'key': ['val1', 'val2', 'val3']}} + assert manager.ontologies[1].cache_manager.sdp.state.data == {'cache_name': {'key': ['val1', 'val2']}} + assert manager.ontologies[2].cache_manager.sdp.state.data == {'cache_name': {'key': ['val1']}} + + def test_i_can_remove_an_entry_that_is_only_in_db(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + # default ontology + # value in DB but not in cache + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value") + + manager.delete("cache_name", "key") + manager.commit(context) + + assert manager.get("cache_name", "key") is NotFound + + # sanity check, the entry is removed + assert manager.ontologies[0].cache_manager.caches["cache_name"].cache.copy() == {} + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {}} + + def test_i_can_remove_when_value_is_in_low_level(self): + # In this test, there is a value in a lower level ontology + # After calling delete(), the value is no longer accessible, but not deleted + + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache(default=lambda sdp, key: sdp.get("cache_name", key), + extend_exists=lambda sdp, key: sdp.exists("cache_name", key))) + manager.freeze() + + # default ontology + # value in DB but not in cache + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value") + + # add an ontology layer + manager.push_ontology("new ontology") + manager.delete("cache_name", "key", "value") + manager.commit(context) + + assert manager.get("cache_name", "key") is NotFound + # sanity check, the entry is removed + assert manager.ontologies[0].cache_manager.caches["cache_name"].cache.copy() == {"key": Removed} + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {"key": Removed}} + + assert manager.ontologies[1].cache_manager.caches["cache_name"].cache.copy() == {} + assert manager.ontologies[1].cache_manager.sdp.state.data == {'cache_name': {"key": "value"}} + + # The entry still exists in lower ontology + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value" + + def test_i_can_remove_when_value_is_in_both_low_and_current_level(self): + # In this test, there is a value is in a lower level ontology and in the current ontology + # After calling delete(), the value is no longer accessible, but not deleted + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache(default=lambda sdp, key: sdp.get("cache_name", key), + extend_exists=lambda sdp, key: sdp.exists("cache_name", key))) + manager.freeze() + + # default ontology + # value in DB but not in cache + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value") + + # add an ontology layer + manager.push_ontology("new ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value2") + + manager.delete("cache_name", "key", "value") + manager.commit(context) + + assert manager.get("cache_name", "key") is NotFound + # sanity check, the entry is removed + assert manager.ontologies[0].cache_manager.caches["cache_name"].cache.copy() == {"key": Removed} + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {"key": Removed}} + + assert manager.ontologies[1].cache_manager.caches["cache_name"].cache.copy() == {} + assert manager.ontologies[1].cache_manager.sdp.state.data == {'cache_name': {"key": "value"}} + + # The entry still exists in lower ontology + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value" + + def test_i_can_remove_when_value_is_not_low_level(self): + # In this test, there is a value is only in the current level + # The value is deleted + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + # add an ontology layer + # so that the value does not exists in the lower level ontology + manager.push_ontology("new ontology") + + # value in DB but not in cache + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value") + + manager.delete("cache_name", "key") + manager.commit(context) + + assert manager.get("cache_name", "key") is NotFound + + # sanity check, the entry is removed + assert manager.ontologies[0].cache_manager.caches["cache_name"].cache.copy() == {} + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {}} + + def test_i_can_remove_list_if_needed_when_value_is_in_low_level(self): + # In this test, there are multiple values in a low level ontology + # We remove only one, + # So the top level ontology must be a copy minus the removed value + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + cache = ListIfNeededCache().auto_configure("cache_name") + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", cache) + manager.freeze() + + # default ontology + # value in DB but not in cache + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", ["value", "value2"]) + + # add ontology layers + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + manager.delete("cache_name", "key", "value") + manager.commit(context) + + assert manager.get("cache_name", "key") == "value2" + # sanity check, the entry is removed + assert manager.ontologies[0].cache_manager.caches["cache_name"].cache.copy() == {"key": "value2"} + assert manager.ontologies[0].cache_manager.sdp.state.data == {'cache_name': {"key": "value2"}} + + assert manager.ontologies[1].cache_manager.caches["cache_name"].cache.copy() == {} + assert manager.ontologies[1].cache_manager.sdp.state.data == {} + + assert manager.ontologies[2].cache_manager.caches["cache_name"].cache.copy() == {"key": ["value", "value2"]} + assert manager.ontologies[2].cache_manager.sdp.state.data == {'cache_name': {"key": ["value", "value2"]}} + + # The entry still exists in lower ontology + manager.pop_ontology() + manager.pop_ontology() + assert manager.get("cache_name", "key") == ["value", "value2"] + + def test_i_can_add_concept_default_layer(self): + sheerka, context, foo = self.init_concepts("foo", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache(default=lambda sdp, key: sdp.get("by_id", key), + extend_exists=lambda sdp, key: sdp.get("by_id", key)) + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache(default=lambda sdp, key: sdp.get("by_key", key), + extend_exists=lambda sdp, key: sdp.get("by_key", key)) + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + manager.add_concept(foo) + manager.commit(context) + + assert manager.get("by_key", foo.key) == foo + assert manager.get("by_id", foo.id) == foo + + assert manager.current_sdp().get("by_key", foo.key) == foo + assert manager.current_sdp().get("by_id", foo.id) == foo + + def test_i_can_add_concept_in_top_layer(self): + sheerka, context, foo = self.init_concepts("foo", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache(default=lambda sdp, key: sdp.get("by_id", key), + extend_exists=lambda sdp, key: sdp.get("by_id", key)) + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache(default=lambda sdp, key: sdp.get("by_key", key), + extend_exists=lambda sdp, key: sdp.get("by_key", key)) + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + # add an ontology layer + manager.push_ontology("new ontology") + + manager.add_concept(foo) + manager.commit(context) + + assert manager.get("by_key", foo.key) == foo + assert manager.get("by_id", foo.id) == foo + + assert manager.current_sdp().get("by_key", foo.key) == foo + assert manager.current_sdp().get("by_id", foo.id) == foo + + # sanity check + assert list(manager.ontologies[0].cache_manager.sdp.state.data.keys()) == ['by_id', 'by_key'] + assert manager.ontologies[1].cache_manager.sdp.state.data == {} + + def test_i_can_update_concept_in_default_layer(self): + sheerka, context, foo = self.init_concepts("foo", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache(default=lambda sdp, key: sdp.get("by_id", key), + extend_exists=lambda sdp, key: sdp.get("by_id", key)) + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache(default=lambda sdp, key: sdp.get("by_key", key), + extend_exists=lambda sdp, key: sdp.get("by_key", key)) + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + manager.add_concept(foo) + + modified = Concept().update_from(foo) + modified.get_metadata().body = "new body" + assert foo != modified + + manager.update_concept(foo, modified) + manager.commit(context) + + assert manager.get("by_key", foo.key) == modified + assert manager.get("by_id", foo.id) == modified + + def test_i_can_update_concept_in_top_layer(self): + sheerka, context, foo = self.init_concepts("foo", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache().auto_configure("by_id") + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache().auto_configure("by_key") + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + manager.add_concept(foo) + manager.commit(context) + + # add an ontology layer + manager.push_ontology("new ontology") + + modified = Concept().update_from(foo) + modified.get_metadata().body = "new body" + assert foo != modified + + manager.update_concept(foo, modified) + manager.commit(context) + + assert manager.get("by_key", foo.key) == modified + assert manager.get("by_id", foo.id) == modified + + # sanity check. + # make sure that the previous values are kept + assert manager.ontologies[0].cache_manager.sdp.get('by_key', foo.key) == modified + assert manager.ontologies[0].cache_manager.sdp.get('by_id', foo.id) == modified + assert manager.ontologies[1].cache_manager.sdp.get('by_key', foo.key) == foo + assert manager.ontologies[1].cache_manager.sdp.get('by_id', foo.id) == foo + + # so I can get the old values when I pop ontology + manager.pop_ontology() + assert manager.get("by_key", foo.key) == foo + assert manager.get("by_id", foo.id) == foo + + def test_i_can_update_when_concept_in_both_top_and_bottom_layers(self): + sheerka, context, foo = self.init_concepts("foo", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache(default=lambda sdp, key: sdp.get("by_id", key), + extend_exists=lambda sdp, key: sdp.get("by_id", key)) + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache(default=lambda sdp, key: sdp.get("by_key", key), + extend_exists=lambda sdp, key: sdp.get("by_key", key)) + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + manager.add_concept(foo) + manager.commit(context) + + # add an ontology layer + # and modify the concept + # The database is updated, but not the internal cache + manager.push_ontology("new ontology") + modified1 = Concept().update_from(foo) + modified1.get_metadata().body = "new body" + assert foo != modified1 + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("by_key", foo.key, modified1) + transaction.add("by_id", foo.id, modified1) + + # modify the top layer a second time + modified2 = Concept().update_from(foo) + modified2.get_metadata().pre = "True" + + manager.update_concept(foo, modified2) + manager.commit(context) + + assert manager.get("by_key", foo.key) == modified2 + assert manager.get("by_id", foo.id) == modified2 + + # sanity check. + # make sure that the previous values are kept + # sanity check + assert manager.ontologies[0].cache_manager.sdp.get('by_key', foo.key) == modified2 + assert manager.ontologies[0].cache_manager.sdp.get('by_id', foo.id) == modified2 + assert manager.ontologies[1].cache_manager.sdp.get('by_key', foo.key) == foo + assert manager.ontologies[1].cache_manager.sdp.get('by_id', foo.id) == foo + + # so I can get the old values when I pop ontology + manager.pop_ontology() + assert manager.get("by_key", foo.key) == foo + assert manager.get("by_id", foo.id) == foo + + def test_i_can_update_when_the_key_changes(self): + sheerka, context, foo = self.init_concepts("foo", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache().auto_configure("by_id") + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache().auto_configure("by_key") + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + # create an entry + manager.add_concept(foo) + manager.commit(context) + + # add a new layer, and modify the concept + manager.push_ontology("new ontology") + modified = Concept().update_from(foo) + modified.get_metadata().key = "another key" + manager.update_concept(foo, modified) + manager.commit(context) + + assert manager.get("by_id", modified.id) == modified + assert manager.get("by_key", modified.key) == modified + assert manager.get("by_key", foo.key) == NotFound + + # sanity + assert manager.ontologies[0].cache_manager.sdp.get('by_key', foo.key) == Removed + assert manager.ontologies[0].cache_manager.sdp.get('by_key', modified.key) == modified + assert manager.ontologies[0].cache_manager.sdp.get('by_id', foo.id) == modified + assert manager.ontologies[1].cache_manager.sdp.get('by_key', foo.key) == foo + assert manager.ontologies[1].cache_manager.sdp.get('by_key', modified.key) == NotFound + assert manager.ontologies[1].cache_manager.sdp.get('by_id', foo.id) == foo + + def test_i_can_update_when_key_changes_and_there_are_lists(self): + sheerka, context, foo, foo2, bar = self.init_concepts("foo", Concept("foo", body="x"), "bar", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache().auto_configure("by_id") + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache().auto_configure("by_key") + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + # create entries + manager.add_concept(foo) + manager.add_concept(foo2) + manager.add_concept(bar) + manager.commit(context) + + # add a new layer, and modify the concept + manager.push_ontology("new ontology") + modified = Concept().update_from(foo) + modified.get_metadata().key = "bar" + manager.update_concept(foo, modified) + manager.commit(context) + + assert manager.get("by_id", modified.id) == modified + assert manager.get("by_key", modified.key) == [bar, modified] + assert manager.get("by_key", foo.key) == foo2 + + # sanity check + assert manager.ontologies[0].cache_manager.sdp.get('by_key', foo.key) == foo2 + assert manager.ontologies[0].cache_manager.sdp.get('by_key', modified.key) == [bar, modified] + assert manager.ontologies[0].cache_manager.sdp.get('by_id', foo.id) == modified + assert manager.ontologies[1].cache_manager.sdp.get('by_key', foo.key) == [foo, foo2] + assert manager.ontologies[1].cache_manager.sdp.get('by_key', modified.key) == bar + assert manager.ontologies[1].cache_manager.sdp.get('by_id', foo.id) == foo + + def test_i_can_remove_concept_from_default_layer(self): + sheerka, context, foo = self.init_concepts("foo", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache(default=lambda sdp, key: sdp.get("by_id", key), + extend_exists=lambda sdp, key: sdp.get("by_id", key)) + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache(default=lambda sdp, key: sdp.get("by_key", key), + extend_exists=lambda sdp, key: sdp.get("by_key", key)) + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + manager.add_concept(foo) + manager.commit(context) + + manager.remove_concept(foo) + manager.commit(context) + + assert manager.get("by_id", foo.id) == NotFound + assert manager.get("by_key", foo.key) == NotFound + + # sanity check + assert manager.current_sdp().get("by_key") == {} + assert manager.current_sdp().get("by_id") == {} + + def test_i_can_remove_concept_from_top_layer(self): + sheerka, context, foo = self.init_concepts("foo", cache_only=False) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + cache = Cache().auto_configure("by_id") + manager.register_concept_cache("by_id", cache, lambda obj: obj.id, use_ref=True) + cache = ListIfNeededCache().auto_configure("by_key") + manager.register_concept_cache("by_key", cache, lambda obj: obj.key, use_ref=True) + manager.freeze() + + manager.add_concept(foo) + manager.commit(context) + + # add a new layer, and remove the concept + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + + manager.remove_concept(foo) + manager.commit(context) + + assert manager.get("by_id", foo.id) == NotFound + assert manager.get("by_key", foo.key) == NotFound + + # sanity check + assert manager.current_sdp().get("by_id") == {foo.id: Removed} + assert manager.current_sdp().get("by_key") == {foo.key: Removed} + assert manager.ontologies[1].cache_manager.sdp.get("by_id") == NotFound + assert manager.ontologies[1].cache_manager.sdp.get("by_key") == NotFound + assert manager.ontologies[2].cache_manager.sdp.get("by_id") == {foo.id: foo} + assert manager.ontologies[2].cache_manager.sdp.get("by_key") == {foo.key: foo} + + # So I can pop + manager.pop_ontology() + assert manager.get("by_id", foo.id) == foo + assert manager.get("by_key", foo.key) == foo + + # and pop again + manager.pop_ontology() + assert manager.get("by_id", foo.id) == foo + assert manager.get("by_key", foo.key) == foo + + def test_i_can_get_all(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key_to_remove1", "value1") + manager.put("cache_name", "key_to_remove2", "value1") + manager.put("cache_name", "key1", "value1") + manager.put("cache_name", "key2", "value2_in_sdp") + manager.put("cache_name", "key3", "value3") + manager.commit(context) + manager.put("cache_name", "key2", "value2_in_cache") # in cache, but not in remote sdp + + # add ontology layers + manager.push_ontology("new ontology") + manager.put("cache_name", "key1", "value1_from_new_ontology") + manager.put("cache_name", "key2", "value2_from_new_ontology") + manager.put("cache_name", "key4", "value4_in_sdp") + manager.commit(context) + manager.put("cache_name", "key4", "value4_in_cache") + manager.put("cache_name", "key_to_remove1", Removed) + + manager.push_ontology("another ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + # so that value is only in sdp, not in cache + transaction.add("cache_name", "key5", "value5") + transaction.add("cache_name", "key_to_remove2", Removed) + + assert manager.get_all("cache_name") == { + "key1": "value1_from_new_ontology", + "key2": "value2_from_new_ontology", + "key3": "value3", + "key4": "value4_in_cache", + "key5": "value5" + } + + def test_i_can_list_by_key_when_dictionaries(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key1", {"a": "value1", "b": "value2", "c": "value3"}) + manager.commit(context) + + manager.push_ontology("new ontology") + manager.put("cache_name", "key1", {"a": "new value1", "d": "value4"}) # only in cache + + manager.push_ontology("another ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key1", {"b": "new value2", "e": "value5"}) + + assert manager.list_by_key("cache_name", "key1") == { + "a": "new value1", + "b": "new value2", + "c": "value3", + "d": "value4", + "e": "value5", + } + + def test_i_can_list_by_key_when_lists(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key1", ["a", "b", "c"]) + manager.commit(context) + + manager.push_ontology("new ontology") + manager.put("cache_name", "key1", ["a", "d"]) # only in cache + + manager.push_ontology("another ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key1", ["b", "e"]) + + assert manager.list_by_key("cache_name", "key1") == ["a", "b", "c", "a", "d", "b", "e"] + + def test_i_can_list_by_key_when_dictionaries_and_entries_are_removed(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key1", {"a": "value1", "b": "value2", "c": "value3"}) + manager.put("cache_name", "key2", {"a": "value1", "b": "value2", "c": "value3"}) + manager.put("cache_name", "key3", {"a": "value1", "b": "value2", "c": "value3"}) + manager.commit(context) + + manager.push_ontology("new ontology") + manager.put("cache_name", "key1", Removed) # removed in cache + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key2", Removed) # removed in sdp + + manager.push_ontology("another ontology") + manager.put("cache_name", "key1", {"e": "value1", "f": "value2", "g": "value3"}) + manager.put("cache_name", "key2", {"e": "value1", "f": "value2", "g": "value3"}) + manager.put("cache_name", "key3", {"e": "value1", "f": "value2", "g": "value3"}) + + assert manager.list_by_key("cache_name", "key1") == {"e": "value1", "f": "value2", "g": "value3"} + assert manager.list_by_key("cache_name", "key2") == {"e": "value1", "f": "value2", "g": "value3"} + assert manager.list_by_key("cache_name", "key3") == {"a": "value1", "b": "value2", "c": "value3", + "e": "value1", "f": "value2", "g": "value3"} + + def test_i_can_list_by_key_when_lists_and_entries_are_removed(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key1", ["a", "b", "c"]) + manager.put("cache_name", "key2", ["a", "b", "c"]) + manager.put("cache_name", "key3", ["a", "b", "c"]) + manager.commit(context) + + manager.push_ontology("new ontology") + manager.put("cache_name", "key1", Removed) # removed in cache + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key2", Removed) # removed in sdp + + manager.push_ontology("another ontology") + manager.put("cache_name", "key1", ["e", "f", "g"]) + manager.put("cache_name", "key2", ["e", "f", "g"]) + manager.put("cache_name", "key3", ["e", "f", "g"]) + + assert manager.list_by_key("cache_name", "key1") == ["e", "f", "g"] + assert manager.list_by_key("cache_name", "key2") == ["e", "f", "g"] + assert manager.list_by_key("cache_name", "key3") == ["a", "b", "c", "e", "f", "g"] + + def test_i_can_get_call_when_a_cache_is_cleared(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key1", "value1") + manager.put("cache_name", "key2", "value2") + manager.put("cache_name", "key3", "value3") + + # add ontology layers + manager.push_ontology("new ontology") + manager.clear("cache_name") + manager.put("cache_name", "key1", "new value1") + manager.put("cache_name", "key4", "value4") + + manager.push_ontology("another ontology") + manager.put("cache_name", "key5", "value5") + + assert manager.get_all("cache_name") == { + "key1": "new value1", + "key4": "value4", + "key5": "value5" + } + + def test_i_can_get_all_when_inc_cache(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", IncCache().auto_configure("cache_name")) + manager.freeze() + + assert manager.get("cache_name", "key1") == 1 + assert manager.get("cache_name", "key1") == 2 + + manager.push_ontology("new ontology") + assert manager.get("cache_name", "key1") == 3 + assert manager.get("cache_name", "key2") == 1 + assert manager.get("cache_name", "key2") == 2 + + assert manager.get_all("cache_name") == { + "key1": 3, + "key2": 2, + } + + # a second time, to make sure that nothing was incremented + assert manager.get_all("cache_name") == { + "key1": 3, + "key2": 2, + } + + @pytest.mark.parametrize("all_ontologies, expected_in_layer_1", [ + (False, {}), + (True, {'key1': DummyObj(key='key1', value='value1'), + 'key2': DummyObj(key='key2', value='value2')}), + ]) + def test_i_can_populate(self, all_ontologies, expected_in_layer_1): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key1", DummyObj("key1", "value1")) + manager.put("cache_name", "key2", DummyObj("key2", "value2")) + manager.commit(context) + manager.clear("cache_name") + + manager.push_ontology("new ontology") + manager.put("cache_name", "key2", DummyObj("key2", "value22")) + manager.put("cache_name", "key3", DummyObj("key3", "value3")) + manager.commit(context) + manager.clear("cache_name") + + # sanity check + assert manager.ontologies[0].cache_manager.get_cache("cache_name").copy() == {} + assert manager.ontologies[1].cache_manager.get_cache("cache_name").copy() == {} + + manager.populate("cache_name", + lambda sdp: sdp.list("cache_name"), + lambda obj: obj.key, + all_ontologies=all_ontologies) + + assert manager.ontologies[0].cache_manager.get_cache("cache_name").copy() == { + 'key2': DummyObj(key='key2', value='value22'), + 'key3': DummyObj(key='key3', value='value3')} + assert manager.ontologies[1].cache_manager.get_cache("cache_name").copy() == expected_in_layer_1 + + def test_i_can_clear_when_multiple_ontology_layers(self): + sheerka = self.get_sheerka(cache_only=False) + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + manager.put("cache_name", "key1", "value1") + manager.put("cache_name", "key2", "value2") + assert manager.ontologies[0].cache_manager.get_cache("cache_name").copy() == {'key1': 'value1', + 'key2': 'value2'} + + # I can clear in another layer + manager.push_ontology("new ontology") + manager.clear("cache_name") + assert manager.get("cache_name", "key1") is NotFound + assert manager.get("cache_name", "key2") is NotFound + assert manager.ontologies[0].cache_manager.get_cache("cache_name").copy() == {} + + manager.put("cache_name", "key1", "new value1") + assert manager.get("cache_name", "key1") == "new value1" + assert manager.get("cache_name", "key2") is NotFound + assert manager.ontologies[0].cache_manager.get_cache("cache_name").copy() == {'key1': "new value1"} + + manager.push_ontology("another ontology") + manager.put("cache_name", "key2", "new value2") + assert manager.get("cache_name", "key1") == "new value1" + assert manager.get("cache_name", "key2") == "new value2" + assert manager.ontologies[0].cache_manager.get_cache("cache_name").copy() == {'key1': "new value1", + 'key2': "new value2"} + + manager.clear("cache_name") + assert manager.get("cache_name", "key1") is NotFound + assert manager.get("cache_name", "key2") is NotFound + assert manager.ontologies[0].cache_manager.get_cache("cache_name").copy() == {} + assert manager.ontologies[1].cache_manager.get_cache("cache_name").copy() == {'key1': "new value1"} + assert manager.ontologies[2].cache_manager.get_cache("cache_name").copy() == {'key1': 'value1', + 'key2': 'value2'} + + manager.pop_ontology() + assert manager.get("cache_name", "key1") == "new value1" + assert manager.get("cache_name", "key2") is NotFound + + manager.pop_ontology() + assert manager.get("cache_name", "key1") == "value1" + assert manager.get("cache_name", "key2") == "value2" + + def test_already_on_the_top(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.freeze() + + manager.push_ontology("new ontology") + + assert manager.already_on_top("new ontology") + assert not manager.already_on_top("another ontology") + + def test_already_on_the_top_when_the_ontology_already_exists(self): + sheerka = self.get_sheerka() + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.freeze() + + manager.push_ontology("new ontology") + manager.push_ontology("another ontology") + + with pytest.raises(OntologyAlreadyExists): + assert manager.already_on_top("new ontology") + + +class TestSheerkaOntologyWithFileBasedSheerka(TestUsingFileBasedSheerka): + def test_i_can_put_back_ontology(self): + sheerka = self.get_sheerka() + context = self.get_context(sheerka) + + manager = SheerkaOntologyManager(sheerka, sheerka.root_folder, sheerka.cache_only) + manager.register_cache("cache_name", Cache().auto_configure("cache_name")) + manager.freeze() + + # default layer + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value1") + + # add a layer + manager.push_ontology("new ontology") + with manager.current_sdp().get_transaction(context.event) as transaction: + transaction.add("cache_name", "key", "value2") + + assert manager.get("cache_name", "key") == "value2" + + manager.pop_ontology() + assert manager.get("cache_name", "key") == "value1" + + # put back the previous ontology + manager.push_ontology("new ontology") + assert manager.get("cache_name", "key") == "value2" diff --git a/tests/core/test_sheerka_printer.py b/tests/core/test_sheerka_printer.py index 394f25a..cc32bc5 100644 --- a/tests/core/test_sheerka_printer.py +++ b/tests/core/test_sheerka_printer.py @@ -2,7 +2,8 @@ from dataclasses import dataclass import pytest from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, ConceptParts +from core.concept import Concept, ConceptParts, freeze_concept_attrs +from core.simple_debug import my_debug from printer.Formatter import Formatter, BraceToken from printer.SheerkaPrinter import FormatInstructions @@ -154,10 +155,13 @@ class TestSheerkaPrinter(TestUsingMemoryBasedSheerka): # for all obj of a given type sheerka = self.get_sheerka() + sheerka.printer_handler.reset() + foo = Concept("foo a b").def_var("a").def_var("b").init_key() foo.set_value("a", "value a").set_value("b", "value b") foo.set_value(ConceptParts.BODY, "body") sheerka.set_id_if_needed(foo, False) + freeze_concept_attrs(foo) sheerka.printer_handler.register_format_l(foo, "DEFAULT:{id}-{name}-{key}-{body}-{a}-{b}") @@ -170,10 +174,13 @@ class TestSheerkaPrinter(TestUsingMemoryBasedSheerka): # for all obj of a given type in the current print call sheerka = self.get_sheerka() + sheerka.printer_handler.reset() + foo = Concept("foo a b").def_var("a").def_var("b").init_key() foo.set_value("a", "value a").set_value("b", "value b") foo.set_value(ConceptParts.BODY, "body") sheerka.set_id_if_needed(foo, False) + freeze_concept_attrs(foo) sheerka.printer_handler.register_format_l(foo, "DEFAULT:{id}-{name}-{key}-{body}-{a}-{b}") context_instructions = FormatInstructions().set_format_l(foo, "CONTEXT:{id}-{name}-{key}-{body}-{a}-{b}") @@ -187,10 +194,13 @@ class TestSheerkaPrinter(TestUsingMemoryBasedSheerka): # for the item only sheerka = self.get_sheerka() + sheerka.printer_handler.reset() + foo = Concept("foo a b").def_var("a").def_var("b").init_key() foo.set_value("a", "value a").set_value("b", "value b") foo.set_value(ConceptParts.BODY, "body") sheerka.set_id_if_needed(foo, False) + freeze_concept_attrs(foo) sheerka.printer_handler.register_format_l(foo, "{id}-{name}-{key}-{body}-{a}-{b}") context_instructions = FormatInstructions().set_format_l(foo, "CONTEXT:{id}-{name}-{key}-{body}-{a}-{b}") @@ -242,6 +252,8 @@ class TestSheerkaPrinter(TestUsingMemoryBasedSheerka): def test_i_can_format_d_concepts_using_default_definition(self, capsys): sheerka, context, foo = self.init_concepts(Concept("foo a b").def_var("a").def_var("b")) + sheerka.printer_handler.reset() + foo_1 = sheerka.new(foo.key, a="value a", b="value b") foo_2 = sheerka.new(foo.key, a="value c", b="value d") lst = [foo_1, foo_2] @@ -259,6 +271,8 @@ b: DEFAULT:'value d' def test_i_can_format_d_concepts_using_context_definition(self, capsys): sheerka, context, foo = self.init_concepts(Concept("foo a b").def_var("a").def_var("b")) + sheerka.printer_handler.reset() + foo_1 = sheerka.new(foo.key, a="value a", b="value b") foo_2 = sheerka.new(foo.key, a="value c", b="value d") lst = [foo_1, foo_2] @@ -277,6 +291,8 @@ b: CONTEXT:'value d' def test_i_can_format_d_concepts_using_item_definition(self, capsys): sheerka, context, foo = self.init_concepts(Concept("foo a b").def_var("a").def_var("b")) + sheerka.printer_handler.reset() + item_instructions = FormatInstructions().set_format_d(foo, {"a": "ITEM:{a}", "b": "ITEM:{b}"}) foo.set_format_instructions(item_instructions) foo_1 = sheerka.new(foo.key, a="value a", b="value b") @@ -371,6 +387,8 @@ self: (1001)foo a b def test_i_can_format_d_when_dictionary(self, capsys): sheerka, context, foo = self.init_concepts(Concept("foo a b").def_var("a").def_var("b")) + sheerka.printer_handler.reset() + dict_value = { "a": "value a", "beta": {"b1": 10, "b2": Obj("10", 15), "b3": ["items", "in", "a", "list"]}, @@ -436,6 +454,8 @@ bar: *name 'bar' is not defined* ]) def test_i_can_concat_print_instruction_and_register_format(self, capsys, template, expected): sheerka = self.get_sheerka() + sheerka.printer_handler.reset() + foo = Obj("value a", "value b") sheerka.printer_handler.register_format_l("tests.core.test_sheerka_printer.Obj", "{a}") diff --git a/tests/core/test_utils.py b/tests/core/test_utils.py index f46c494..f59f318 100644 --- a/tests/core/test_utils.py +++ b/tests/core/test_utils.py @@ -3,7 +3,9 @@ from dataclasses import dataclass import core.utils import pytest from core.builtin_concepts import BuiltinConcepts +from core.builtin_helpers import evaluate_expression from core.concept import Concept +from core.global_symbols import NotFound, NotInit, Removed from core.tokenizer import Token, TokenKind, Tokenizer, Keywords @@ -306,7 +308,7 @@ def test_i_can_make_unique(): ("a.prop2.bar", {"a": Obj2(None, {'foo': 'dict-first', 'bar': 'dict-second'})}, 'dict-second'), ]) def test_i_can_evaluate_expression(expression, bag, expected): - assert core.utils.evaluate_expression(expression, bag) == expected + assert evaluate_expression(expression, bag) == expected @pytest.mark.parametrize("expression, bag, expected_error, prop_name", [ @@ -315,7 +317,7 @@ def test_i_can_evaluate_expression(expression, bag, expected): ]) def test_i_cannot_evaluate_expression(expression, bag, expected_error, prop_name): with pytest.raises(expected_error) as e: - core.utils.evaluate_expression(expression, bag) + evaluate_expression(expression, bag) assert e.value.args == (prop_name,) @@ -417,3 +419,9 @@ def test_i_can_deep_copy_a_concept(): concept_props = sorted(list(concept.get_prop(BuiltinConcepts.ISA)), key=lambda o: o.id) for copied_prop, concept_prop in zip(copied_props, concept_props): check_are_the_same(copied_prop, concept_prop) + + +def test_i_can_deep_copy_a_custom_type(): + assert core.utils.sheerka_deepcopy(NotInit) is NotInit + assert core.utils.sheerka_deepcopy(NotFound) is NotFound + assert core.utils.sheerka_deepcopy(Removed) is Removed diff --git a/tests/evaluators/test_AddConceptInSetEvaluator.py b/tests/evaluators/test_AddConceptInSetEvaluator.py index 33742fb..7ae6640 100644 --- a/tests/evaluators/test_AddConceptInSetEvaluator.py +++ b/tests/evaluators/test_AddConceptInSetEvaluator.py @@ -51,7 +51,7 @@ class TestAddConceptInSetEvaluator(TestUsingMemoryBasedSheerka): assert res.value.body == "bar" def test_i_can_add_concept_to_a_set_of_concept(self): - sheerka, context, foo, bar = self.init_concepts("foo", "bar", create_new=True) + sheerka, context, foo, bar = self.init_test().with_concepts("foo", "bar", create_new=True).unpack() ret_val = get_isa_ret_val("foo", "bar") res = AddConceptInSetEvaluator().eval(context, ret_val) @@ -72,12 +72,12 @@ class TestAddConceptInSetEvaluator(TestUsingMemoryBasedSheerka): So 'foo' cannot be put is set :return: """ - sheerka, context, one, two, foo, bar = self.init_concepts( + sheerka, context, one, two, foo, bar = self.init_test().with_concepts( "one", "two", Concept("foo", definition="(one|two)=a 'plus' (one|two)=b", body="a + b").def_var("a").def_var("b"), "bar", - create_new=True) + create_new=True).unpack() ret_val = get_isa_ret_val("foo", "bar") res = AddConceptInSetEvaluator().eval(context, ret_val) @@ -107,7 +107,7 @@ class TestAddConceptInSetEvaluator(TestUsingMemoryBasedSheerka): assert context.sheerka.isinstance(res.value, BuiltinConcepts.SUCCESS) def test_i_cannot_add_the_same_concept_twice(self): - sheerka, context, foo, bar = self.init_concepts("foo", "bar", create_new=True) + sheerka, context, foo, bar = self.init_test().with_concepts("foo", "bar", create_new=True).unpack() ret_val = get_isa_ret_val("foo", "bar") AddConceptInSetEvaluator().eval(context, ret_val) diff --git a/tests/evaluators/test_DefConceptEvaluator.py b/tests/evaluators/test_DefConceptEvaluator.py index 1dbd3c9..3c65a63 100644 --- a/tests/evaluators/test_DefConceptEvaluator.py +++ b/tests/evaluators/test_DefConceptEvaluator.py @@ -158,7 +158,7 @@ class TestDefConceptEvaluator(TestUsingMemoryBasedSheerka): assert created_concept.get_metadata().variables == [("x", None), ("y", None)] def test_other_concepts_are_not_variables(self): - sheerka, context, *concepts = self.init_concepts("little", "size", create_new=True) + sheerka, context, *concepts = self.init_test().with_concepts("little", "size", create_new=True).unpack() def_concept_node = self.get_def_concept_node_from_name_only("little x") name_to_use = DefConceptEvaluator.get_name_to_use(def_concept_node) diff --git a/tests/evaluators/test_EvalEvaluator.py b/tests/evaluators/test_EvalEvaluator.py index 4e21cb3..a4bda21 100644 --- a/tests/evaluators/test_EvalEvaluator.py +++ b/tests/evaluators/test_EvalEvaluator.py @@ -1,7 +1,7 @@ import pytest from core.builtin_concepts import ReturnValueConcept, BuiltinConcepts from core.concept import Concept -from core.sheerka.services.SheerkaSetsManager import SheerkaSetsManager +from core.sheerka.services.SheerkaIsAManager import SheerkaIsAManager from evaluators.ReturnBodyEvaluator import ReturnBodyEvaluator from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka @@ -89,7 +89,7 @@ class TestEvalEvaluator(TestUsingMemoryBasedSheerka): Concept("bar"), Concept("baz"), Concept("number")) - sets_handler = sheerka.services[SheerkaSetsManager.NAME] + sets_handler = sheerka.services[SheerkaIsAManager.NAME] sets_handler.add_concepts_to_set(context, [foo, bar, baz], number) evaluated = ReturnBodyEvaluator().eval(context, [retval(number)]) diff --git a/tests/evaluators/test_LexerNodeEvaluator.py b/tests/evaluators/test_LexerNodeEvaluator.py index a7d9187..b26be69 100644 --- a/tests/evaluators/test_LexerNodeEvaluator.py +++ b/tests/evaluators/test_LexerNodeEvaluator.py @@ -55,7 +55,7 @@ class TestLexerNodeEvaluator(TestUsingMemoryBasedSheerka): assert LexerNodeEvaluator().matches(context, ret_val) == expected def test_concept_is_returned_when_only_one_in_the_list(self): - sheerka, context, foo = self.init_concepts(self.bnf_concept("foo"), singleton=True) + sheerka, context, foo = self.init_concepts(self.bnf_concept("foo")) ret_val = self.init_from_concepts(context, [foo], "foo") evaluator = LexerNodeEvaluator() diff --git a/tests/evaluators/test_PythonEvaluator.py b/tests/evaluators/test_PythonEvaluator.py index 646e1af..2b97922 100644 --- a/tests/evaluators/test_PythonEvaluator.py +++ b/tests/evaluators/test_PythonEvaluator.py @@ -68,8 +68,8 @@ class TestPythonEvaluator(TestUsingMemoryBasedSheerka): assert PythonEvaluator().matches(context, ret_val) == expected @pytest.mark.parametrize("text, expected", [ - # ("1 + 1", 2), - # ("test()", "I have access to Sheerka !"), + ("1 + 1", 2), + ("test()", "I have access to Sheerka !"), ("sheerka.test()", "I have access to Sheerka !"), ("a=10\na", 10), ("Concept('foo')", Concept('foo')), @@ -131,7 +131,7 @@ class TestPythonEvaluator(TestUsingMemoryBasedSheerka): assert evaluated.value == "I have access to Sheerka ! param='value for param', event='xxx'." def test_i_can_eval_using_context_when_self_is_not_sheerka(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() parsed = PythonParser().parse(context, ParserInput("create_new_concept(Concept('foo'))")) evaluated = PythonEvaluator().eval(context, parsed) @@ -230,7 +230,7 @@ class TestPythonEvaluator(TestUsingMemoryBasedSheerka): 'c:__var__0 mult __var__1|1002:': 2} def test_i_can_define_variables(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() parsed = PythonParser().parse(context, ParserInput("a=10")) python_evaluator = PythonEvaluator() @@ -293,7 +293,7 @@ class TestPythonEvaluator(TestUsingMemoryBasedSheerka): assert error1.concepts == {'foo': 'string'} def test_i_can_use_sheerka_locals(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() def func(i): return i + 1 @@ -318,7 +318,7 @@ class TestPythonEvaluator(TestUsingMemoryBasedSheerka): assert evaluated.value == "I have access to Sheerka ! param=(1001)one, event='xxx'." def test_i_can_eval_rules_from_python_parser(self): - sheerka, context = self.init_concepts() + sheerka, context = self.init_test().unpack() parsed_ret_val = PythonParser().parse(context, ParserInput("r:|1:.id")) assert parsed_ret_val.status diff --git a/tests/non_reg/test_sheerka_display.py b/tests/non_reg/test_sheerka_display.py index 789060f..819fb24 100644 --- a/tests/non_reg/test_sheerka_display.py +++ b/tests/non_reg/test_sheerka_display.py @@ -2,11 +2,6 @@ from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka class TestSheerkaNonRegDisplay(TestUsingMemoryBasedSheerka): - @classmethod - def teardown_class(cls): - # At the end of the tests, sheerka singleton instance will be corrupted - # Ask for a new one - TestSheerkaNonRegDisplay.singleton_instance = None def test_i_can_display_results_when_return_values_processing_is_on(self, capsys): init = [ @@ -27,7 +22,7 @@ class TestSheerkaNonRegDisplay(TestUsingMemoryBasedSheerka): ] sheerka = self.init_scenario(init) capsys.readouterr() - + sheerka.enable_process_return_values = True sheerka.evaluate_user_input("desc(foo)") @@ -72,4 +67,24 @@ props : {} captured = capsys.readouterr() assert captured.out == """ReturnValue(who=evaluators.Concept, status=True, value=(1001)foo) ReturnValue(who=evaluators.Concept, status=True, value=(1002)foo) +""" + + def test_i_can_list_ontologies(self, capsys): + init = [ + "push_ontology('test 1')", + "push_ontology('test 2')", + "pop_ontology()", + "push_ontology('test 3')", + ] + sheerka = self.init_scenario(init) + capsys.readouterr() + + sheerka.enable_process_return_values = True + sheerka.evaluate_user_input("ontologies()") + + captured = capsys.readouterr() + assert captured.out == """test 3 +test 1 +#unit_test# +__default__ """ diff --git a/tests/non_reg/test_sheerka_non_reg.py b/tests/non_reg/test_sheerka_non_reg.py index 51d1bee..112bc87 100644 --- a/tests/non_reg/test_sheerka_non_reg.py +++ b/tests/non_reg/test_sheerka_non_reg.py @@ -1,6 +1,7 @@ import pytest from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, PROPERTIES_TO_SERIALIZE, simplec, CMV, NotInit, CC +from core.concept import Concept, PROPERTIES_TO_SERIALIZE, simplec, CMV, CC +from core.global_symbols import NotInit from core.sheerka.services.SheerkaConceptManager import SheerkaConceptManager from evaluators.MutipleSameSuccessEvaluator import MultipleSameSuccessEvaluator from evaluators.OneSuccessEvaluator import OneSuccessEvaluator @@ -115,10 +116,10 @@ as: assert service.has_id(concept_saved.id) assert service.has_name(concept_saved.name) assert service.has_hash(concept_saved.get_definition_hash()) - assert sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == {'+': ['1001']} + assert sheerka.om.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == {'+': ['1001']} # sdp is up to date - assert sheerka.sdp.exists(SheerkaConceptManager.CONCEPTS_BY_KEY_ENTRY, expected.key) + assert sheerka.om.current_sdp().exists(SheerkaConceptManager.CONCEPTS_BY_KEY_ENTRY, expected.key) def test_i_can_evaluate_def_concept_part_when_one_part_is_a_ref_of_another_concept(self): """ @@ -182,10 +183,10 @@ as: assert sheerka.isinstance(res[0].value, BuiltinConcepts.NOP) def test_i_can_recognize_concept_with_variable(self): - sheerka, context, concept_foo, concept_hello = self.init_concepts( + sheerka, context, concept_foo, concept_hello = self.init_test().with_concepts( "foo", Concept(name="hello a").def_var("a"), - create_new=True) + create_new=True).unpack() res = sheerka.evaluate_user_input("hello foo") return_value = res[0].value @@ -787,14 +788,10 @@ as: "def concept plus from a plus b as a + b", "def concept mult from a mult b as a * b", "def concept twenties from bnf 'twenty' (one|two)=unit as 20 + unit", + "set_is_greater_than(BuiltinConcepts.PRECEDENCE, mult, plus, 'Sya')" ] sheerka = self.init_scenario(definitions) - context = self.get_context(sheerka) - sheerka.test_only_force_sya_def(context, [ - (sheerka.get_by_name("mult").id, 20, SyaAssociativity.Right), - (sheerka.get_by_name("plus").id, 10, SyaAssociativity.Right), - ]) res = sheerka.evaluate_user_input("eval one plus two mult three") assert len(res) == 1 @@ -952,8 +949,8 @@ as: sheerka = self.init_scenario(init) # simulate that sheerka was stopped and restarted - sheerka.cache_manager.clear(sheerka.CONCEPTS_GRAMMARS_ENTRY) - sheerka.cache_manager.get(SheerkaConceptManager.CONCEPTS_BY_KEY_ENTRY, "twenties").set_compiled({}) + sheerka.om.clear(sheerka.CONCEPTS_GRAMMARS_ENTRY) + sheerka.om.get(SheerkaConceptManager.CONCEPTS_BY_KEY_ENTRY, "twenties").set_compiled({}) res = sheerka.evaluate_user_input("eval twenty one") assert res[0].status @@ -1109,12 +1106,10 @@ as: assert len(res) == 1 assert res[0].status - sheerka = self.init_scenario(init) res = sheerka.evaluate_user_input("desc(bar)") assert len(res) == 1 assert res[0].status - sheerka = self.init_scenario(init) res = sheerka.evaluate_user_input("desc(baz)") assert len(res) == 1 assert res[0].status @@ -1208,6 +1203,32 @@ as: assert res[0].status assert res[0].body == 2 + def test_i_can_parse_when_multiple_ontology_layers(self): + init = [ + "def concept one as 1", + "def concept two as 2", + "def concept a plus b as a + b", # sya node + "def concept twenties from bnf 'twenty' (one | two)=unit as 20 + unit", # bnf node + ] + sheerka = self.init_scenario(init) + + sheerka.push_ontology(self.get_context(sheerka), "new ontology") + + res = sheerka.evaluate_user_input("eval one") + assert len(res) == 1 + assert res[0].status + assert res[0].body == 1 + + res = sheerka.evaluate_user_input("eval one plus two") + assert len(res) == 1 + assert res[0].status + assert res[0].body == 3 + + res = sheerka.evaluate_user_input("eval twenty one") + assert len(res) == 1 + assert res[0].status + assert res[0].body == 21 + class TestSheerkaNonRegFile(TestUsingFileBasedSheerka): def test_i_can_def_several_concepts(self): @@ -1231,7 +1252,7 @@ class TestSheerkaNonRegFile(TestUsingFileBasedSheerka): assert res[0].status assert sheerka.isinstance(res[0].value, BuiltinConcepts.NEW_CONCEPT) - saved_concept = sheerka.sdp.get(SheerkaConceptManager.CONCEPTS_BY_KEY_ENTRY, "plus") + saved_concept = sheerka.om.current_sdp().get(SheerkaConceptManager.CONCEPTS_BY_KEY_ENTRY, "plus") assert saved_concept.key == "plus" assert saved_concept.get_metadata().definition == "a ('plus' plus)?" assert "a" in saved_concept.values() @@ -1295,3 +1316,15 @@ class TestSheerkaNonRegFile(TestUsingFileBasedSheerka): assert sheerka.evaluate_user_input("eval twenty one")[0].body == 21 assert sheerka.evaluate_user_input("eval thirty one")[0].body == 31 + + def test_i_can_pop_ontology_after_restart(self): + sheerka = self.get_sheerka() + sheerka.evaluate_user_input("push_ontology('test')") + + sheerka = self.new_sheerka_instance(False) + + res = sheerka.evaluate_user_input("pop_ontology()") + + assert len(res) == 1 + assert res[0].status + assert sheerka.isinstance(res[0].body, BuiltinConcepts.ONTOLOGY_REMOVED) diff --git a/tests/out/test_SheerkaOut.py b/tests/out/test_SheerkaOut.py index 62ed84e..c8276a7 100644 --- a/tests/out/test_SheerkaOut.py +++ b/tests/out/test_SheerkaOut.py @@ -4,16 +4,19 @@ import pytest from core.builtin_concepts import ReturnValueConcept, BuiltinConcepts from core.concept import Concept from core.rule import Rule +from core.sheerka.Sheerka import Sheerka +from core.sheerka.SheerkaOntologyManager import SheerkaOntologyManager +from core.sheerka.services.SheerkaComparisonManager import SheerkaComparisonManager from core.sheerka.services.SheerkaOut import SheerkaOut from core.sheerka.services.SheerkaRuleManager import FormatAstRawText, FormatAstVariable, FormatAstSequence, \ - FormatAstColor, FormatAstVariableNotFound, FormatAstList, FormatAstDict + FormatAstColor, FormatAstVariableNotFound, FormatAstList, FormatAstDict, SheerkaRuleManager from core.utils import flatten_all_children from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka -seq = FormatAstSequence -raw = FormatAstRawText -var = FormatAstVariable + +def seq(*args, **kwargs): + return FormatAstSequence(*args, **kwargs) @dataclass @@ -24,6 +27,20 @@ class DummyObj: class TestSheerkaOut(TestUsingMemoryBasedSheerka): + @classmethod + def setup_class(cls): + sheerka = cls().get_sheerka(cache_only=False, ontology="#TestSheerkaOut#") + sheerka.om.clear(SheerkaRuleManager.FORMAT_RULE_ENTRY) + sheerka.om.clear(SheerkaComparisonManager.COMPARISON_ENTRY) + sheerka.om.clear(SheerkaComparisonManager.RESOLVED_COMPARISON_ENTRY) + sheerka.om.delete(Sheerka.OBJECTS_IDS_ENTRY, SheerkaRuleManager.RULE_IDS) + cls.root_ontology_name = "#TestSheerkaOut#" + + @classmethod + def teardown_class(cls): + cls.sheerka.pop_ontology() + cls.root_ontology_name = SheerkaOntologyManager.ROOT_ONTOLOGY_NAME + def init_service_with_rules(self, *rules, **kwargs): sheerka, context, *rules = self.init_format_rules(*rules, **kwargs) service = sheerka.services[SheerkaOut.NAME] diff --git a/tests/parsers/test_BaseNodeParser.py b/tests/parsers/test_BaseNodeParser.py index 90cf8fe..83f8585 100644 --- a/tests/parsers/test_BaseNodeParser.py +++ b/tests/parsers/test_BaseNodeParser.py @@ -24,7 +24,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): sheerka, context, *updated = self.init_concepts(concept) - res = BaseNodeParser.get_concepts_by_first_token(context, updated) + res = BaseNodeParser.compute_concepts_by_first_token(context, updated) assert res.status assert res.body == expected @@ -54,7 +54,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): concept.set_bnf(bnf) sheerka.set_id_if_needed(concept, False) - res = BaseNodeParser.get_concepts_by_first_token(context, [concept]) + res = BaseNodeParser.compute_concepts_by_first_token(context, [concept]) assert res.status assert res.body == expected @@ -75,7 +75,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): foo.set_bnf(OrderedChoice(ConceptExpression("bar"), ConceptExpression("baz"), StrMatch("qux"))) sheerka.set_id_if_needed(foo, False) - res = BaseNodeParser.get_concepts_by_first_token(context, [bar, baz, foo]) + res = BaseNodeParser.compute_concepts_by_first_token(context, [bar, baz, foo]) assert res.status assert res.body == { @@ -87,12 +87,12 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): } def test_i_can_get_concepts_by_first_keyword_using_sheerka(self): - sheerka, context, *updated = self.init_concepts( + sheerka, context, *updated = self.init_test().with_concepts( "one", "two", Concept("twenty", definition="'twenty' (one|two)"), create_new=True - ) + ).unpack() bar = Concept("bar").init_key() sheerka.set_id_if_needed(bar, False) @@ -102,7 +102,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): foo.set_bnf(OrderedChoice(ConceptExpression("one"), ConceptExpression("bar"), StrMatch("qux"))) sheerka.set_id_if_needed(foo, False) - res = BaseNodeParser.get_concepts_by_first_token(context, [bar, foo], use_sheerka=True) + res = BaseNodeParser.compute_concepts_by_first_token(context, [bar, foo], use_sheerka=True) assert res.status assert res.body == { @@ -117,7 +117,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): def test_i_cannot_get_concept_by_first_keyword_when_no_first_keyword(self): sheerka, context, foo = self.init_concepts(Concept("x y", body="x y").def_var("x").def_var("y")) - res = BaseNodeParser.get_concepts_by_first_token(context, [foo]) + res = BaseNodeParser.compute_concepts_by_first_token(context, [foo]) assert not res.status assert res.body == NoFirstTokenError(foo, foo.key) @@ -126,9 +126,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): sheerka, context, *updated = self.init_concepts( "one", Concept("two", definition="one"), - Concept("three", definition="two"), - create_new=False - ) + Concept("three", definition="two")) concepts_by_first_keywords = { "one": ["1001"], @@ -152,7 +150,6 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): "hundred", Concept("twenties", definition="twenty number"), Concept("hundreds", definition="number hundred"), - create_new=True # mandatory because set_isa() needs it ) sheerka.set_isa(context, sheerka.new("one"), number) @@ -166,7 +163,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): sheerka.concepts_grammars.clear() # reset all the grammar to simulate Sheerka restart # cbft : concept_by_first_token (I usually don't use abbreviation) - cbft = BaseNodeParser.get_concepts_by_first_token(context, [number] + concepts).body + cbft = BaseNodeParser.compute_concepts_by_first_token(context, [number] + concepts).body resolved_ret_val = BaseNodeParser.resolve_concepts_by_first_keyword(context, cbft) assert resolved_ret_val.status @@ -188,7 +185,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): ConceptExpression("foo"), ConceptExpression("bar"))) - concepts_by_first_keywords = BaseNodeParser.get_concepts_by_first_token( + concepts_by_first_keywords = BaseNodeParser.compute_concepts_by_first_token( context, [good, foo, bar, baz]).body resolved_ret_val = BaseNodeParser.resolve_concepts_by_first_keyword(context, concepts_by_first_keywords) @@ -204,7 +201,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): a = self.create_and_add_in_cache_concept(sheerka, "a", bnf=Sequence("one", "two")) b = self.create_and_add_in_cache_concept(sheerka, "b", bnf=Sequence(ConceptExpression("a"), "two")) - concepts_by_first_keywords = BaseNodeParser.get_concepts_by_first_token( + concepts_by_first_keywords = BaseNodeParser.compute_concepts_by_first_token( context, [a, b]).body resolved_ret_val = BaseNodeParser.resolve_concepts_by_first_keyword(context, concepts_by_first_keywords) @@ -220,7 +217,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): self.bnf_concept("bar", ConceptExpression("foo")), ) - concepts_by_first_keywords = BaseNodeParser.get_concepts_by_first_token(context, [good, foo, bar]).body + concepts_by_first_keywords = BaseNodeParser.compute_concepts_by_first_token(context, [good, foo, bar]).body resolved_ret_val = BaseNodeParser.resolve_concepts_by_first_keyword(context, concepts_by_first_keywords) assert resolved_ret_val.status assert resolved_ret_val.body == { @@ -237,7 +234,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): self.bnf_concept("three", ConceptExpression("two")), ) - concepts_by_first_keywords = BaseNodeParser.get_concepts_by_first_token(context, [good, one, two, three]).body + concepts_by_first_keywords = BaseNodeParser.compute_concepts_by_first_token(context, [good, one, two, three]).body resolved_ret_val = BaseNodeParser.resolve_concepts_by_first_keyword(context, concepts_by_first_keywords) assert resolved_ret_val.status assert resolved_ret_val.body == { @@ -255,7 +252,7 @@ class TestBaseNodeParser(TestUsingMemoryBasedSheerka): self.bnf_concept("one", ConceptExpression("three")), ) - concepts_by_first_keywords = BaseNodeParser.get_concepts_by_first_token(context, [good, one, two, three]).body + concepts_by_first_keywords = BaseNodeParser.compute_concepts_by_first_token(context, [good, one, two, three]).body resolved_ret_val = BaseNodeParser.resolve_concepts_by_first_keyword(context, concepts_by_first_keywords) assert resolved_ret_val.status assert resolved_ret_val.body == { diff --git a/tests/parsers/test_BnfNodeParser.py b/tests/parsers/test_BnfNodeParser.py index 0ff135a..af2799d 100644 --- a/tests/parsers/test_BnfNodeParser.py +++ b/tests/parsers/test_BnfNodeParser.py @@ -1,13 +1,14 @@ import pytest + +import tests.parsers.parsers_utils from core.builtin_concepts import BuiltinConcepts -from core.concept import Concept, ConceptParts, DoNotResolve, CC, DEFINITION_TYPE_BNF, NotInit +from core.concept import Concept, ConceptParts, DoNotResolve, CC, DEFINITION_TYPE_BNF +from core.global_symbols import NotInit from core.sheerka.services.SheerkaExecute import ParserInput from parsers.BaseNodeParser import CNC, UTN, CN from parsers.BnfDefinitionParser import BnfDefinitionParser from parsers.BnfNodeParser import StrMatch, TerminalNode, NonTerminalNode, Sequence, OrderedChoice, \ Optional, ZeroOrMore, OneOrMore, ConceptExpression, UnOrderedChoice, BnfNodeParser - -import tests.parsers.parsers_utils from tests.BaseTest import BaseTest from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka @@ -82,28 +83,26 @@ def compute_expected_array(my_concepts_map, expression, expected, exclude_body=F class TestBnfNodeParser(TestUsingMemoryBasedSheerka): - sheerka = None + shared_ontology = None @classmethod def setup_class(cls): - t = cls() - TestBnfNodeParser.sheerka, context, _ = t.init_parser( - cmap, - singleton=False, - create_new=True, - init_from_sheerka=True) + init_test_helper = cls().init_test(cache_only=False, ontology="#TestBnfNodeParser#") + sheerka, context, *updated = init_test_helper.with_concepts(*cmap.values(), create_new=True).unpack() + for i, concept_name in enumerate(cmap): + cmap[concept_name] = updated[i] # end of initialisation sheerka = TestBnfNodeParser.sheerka - sheerka.set_isa(context, sheerka.new("one"), sheerka.new("number")) - sheerka.set_isa(context, sheerka.new("two"), sheerka.new("number")) - sheerka.set_isa(context, sheerka.new("three"), sheerka.new("number")) - sheerka.set_isa(context, sheerka.new("four"), sheerka.new("number")) - sheerka.set_isa(context, sheerka.new("thirty"), sheerka.new("number")) - sheerka.set_isa(context, sheerka.new("forty"), sheerka.new("number")) - sheerka.set_isa(context, sheerka.new("fifty"), sheerka.new("number")) - sheerka.set_isa(context, sheerka.new("one hundred"), sheerka.new("number")) - sheerka.set_isa(context, sheerka.new("hundreds"), sheerka.new("number")) + sheerka.set_isa(context, cmap["one"], cmap["number"]) + sheerka.set_isa(context, cmap["two"], cmap["number"]) + sheerka.set_isa(context, cmap["three"], cmap["number"]) + sheerka.set_isa(context, cmap["four"], cmap["number"]) + sheerka.set_isa(context, cmap["thirty"], cmap["number"]) + sheerka.set_isa(context, cmap["forty"], cmap["number"]) + sheerka.set_isa(context, cmap["fifty"], cmap["number"]) + sheerka.set_isa(context, cmap["one hundred"], cmap["number"]) + sheerka.set_isa(context, cmap["hundreds"], cmap["number"]) # Pay attention. 'twenties (t1 and t2) are not set as number @@ -135,6 +134,9 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): cmap["thousands"] = sheerka.create_new_concept(context, thousands).body.body sheerka.set_isa(context, sheerka.new("thousands"), sheerka.new("number")) + cls.shared_ontology = sheerka.get_ontology(context) + sheerka.pop_ontology() + @staticmethod def update_bnf(context, concept): bnf_parser = BnfDefinitionParser() @@ -147,19 +149,38 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): return concept def init_parser(self, my_concepts_map=None, init_from_sheerka=False, **kwargs): - if my_concepts_map is not None: - sheerka, context, *updated = self.init_concepts(*my_concepts_map.values(), **kwargs) + if my_concepts_map is None: + sheerka, context = self.init_test().unpack() + sheerka.add_ontology(context, self.shared_ontology) + else: + sheerka, context, *updated = self.init_test().with_concepts(*my_concepts_map.values(), **kwargs).unpack() for i, pair in enumerate(my_concepts_map): my_concepts_map[pair] = updated[i] - else: - sheerka = TestBnfNodeParser.sheerka - context = self.get_context(sheerka) parser = BnfNodeParser(sheerka=sheerka) if init_from_sheerka else BnfNodeParser() return sheerka, context, parser - def exec_get_concepts_sequences(self, my_map, text, expected, multiple_result=False, post_init_concepts=None): - sheerka, context, *updated = self.init_concepts(*my_map.values(), create_new=False, singleton=True) + def validate_get_concepts_sequences(self, my_map, text, expected, multiple_result=False, post_init_concepts=None): + sheerka, context, *updated = self.init_test().with_concepts(*my_map.values(), create_new=False).unpack() + sequences = self.exec_get_concepts_sequences(context, + my_map, + text, + expected, + multiple_result, + post_init_concepts, + *updated) + return sequences + + @staticmethod + def exec_get_concepts_sequences(context, + my_map, + text, + expected, + multiple_result=False, + post_init_concepts=None, + *concepts): + sheerka = context.sheerka + if not multiple_result: expected_array = [compute_expected_array(my_map, text, expected)] else: @@ -169,7 +190,7 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): post_init_concepts(sheerka, context) parser = BnfNodeParser() - parser.init_from_concepts(context, updated) + parser.init_from_concepts(context, concepts) parser.reset_parser(context, ParserInput(text)) bnf_parsers_helpers = parser.get_concepts_sequences(context) @@ -179,15 +200,9 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): assert parser_helper.sequence == expected_sequence if len(bnf_parsers_helpers) == 1: - return sheerka, context, bnf_parsers_helpers[0].sequence + return bnf_parsers_helpers[0].sequence else: - return sheerka, context, [pe.sequence for pe in bnf_parsers_helpers] - - def validate_get_concepts_sequences(self, my_map, text, expected, multiple_result=False, post_init_concepts=None): - sheerka, context, sequences = self.exec_get_concepts_sequences( - my_map, text, expected, multiple_result, post_init_concepts - ) - return sequences + return [pe.sequence for pe in bnf_parsers_helpers] def test_i_cannot_parse_empty_strings(self): sheerka, context, parser = self.init_parser({}, singleton=True) @@ -706,10 +721,11 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): ConceptExpression("foo"), OrderedChoice(StrMatch("one"), StrMatch("two")))), } + sheerka, context, *concepts = self.init_test().with_concepts(*my_map.values(), create_new=False).unpack() text = "twenty two" expected = [CN("bar", source="twenty two")] - sheerka, context, sequences = self.exec_get_concepts_sequences(my_map, text, expected) + sequences = self.exec_get_concepts_sequences(context, my_map, text, expected, False, None, *concepts) concept_bar = sequences[0].concept assert concept_bar.get_compiled() == { @@ -720,7 +736,7 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): text = "thirty one" expected = [CN("bar", source="thirty one")] - sequences = self.validate_get_concepts_sequences(my_map, text, expected) + sequences = self.exec_get_concepts_sequences(context, my_map, text, expected, False, None, *concepts) concept_bar = sequences[0].concept assert concept_bar.get_compiled() == { ConceptParts.BODY: DoNotResolve("thirty one"), @@ -817,7 +833,7 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): # every obvious cyclic recursion are removed from concept_by_first_keyword dict parser.init_from_concepts(context, my_map.values()) - assert parser.concepts_by_first_keyword == expected + assert sheerka.om.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == expected # get_parsing_expression() also returns CHICKEN_AND_EGG parsing_expression = parser.get_parsing_expression(context, my_map["foo"]) @@ -842,7 +858,7 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): # every obvious cyclic recursion are removed from concept_by_first_keyword dict parser.init_from_concepts(context, my_map.values()) - assert parser.concepts_by_first_keyword == {} + assert sheerka.om.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == {} parsing_expression = parser.get_parsing_expression(context, my_map["foo"]) assert sheerka.isinstance(parsing_expression, BuiltinConcepts.CHICKEN_AND_EGG) @@ -868,7 +884,7 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): # every obvious cyclic recursion are removed from concept_by_first_keyword dict parser.init_from_concepts(context, my_map.values()) - assert parser.concepts_by_first_keyword == {} + assert sheerka.om.copy(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY) == {} parsing_expression = parser.get_parsing_expression(context, my_map["foo"]) assert sheerka.isinstance(parsing_expression, BuiltinConcepts.CHICKEN_AND_EGG) @@ -1008,8 +1024,8 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): ConceptExpression(my_map["one"], rule_name="one")) @pytest.mark.parametrize("expr, text, expected", [ - (ZeroOrMore(StrMatch("one"), sep=","), "one,", [CNC("foo", source="one"), UTN(",")]), - (StrMatch("one"), "one two", [CNC("foo", source="one"), UTN(" two")]), + # (ZeroOrMore(StrMatch("one"), sep=","), "one,", [CNC("foo", source="one"), UTN(",")]), + # (StrMatch("one"), "one two", [CNC("foo", source="one"), UTN(" two")]), (StrMatch("one"), "two one", [UTN("two "), CNC("foo", source="one")]), ]) def test_i_can_recognize_unknown_concepts(self, expr, text, expected): @@ -1442,6 +1458,22 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka): def test_i_can_get_expression_from_concept_name(self, name, expected): assert BnfNodeParser.get_expression_from_concept_name(name) == expected + def test_i_can_parse_when_multiple_layers(self): + sheerka, context, parser = self.init_parser(init_from_sheerka=True) + + # sanity + text = "thirty one" + res = parser.parse(context, ParserInput(text)) + assert res.status + assert res.value.value == compute_expected_array(cmap, text, [CN("thirties", source=text)]) + + # add a layer, I still can parse the text + sheerka.push_ontology(context, "new layer") + parser = BnfNodeParser(sheerka=sheerka) + res = parser.parse(context, ParserInput(text)) + assert res.status + assert res.value.value == compute_expected_array(cmap, text, [CN("thirties", source=text)]) + # @pytest.mark.parametrize("parser_input, expected", [ # ("one", [ # (True, [CNC("bnf_one", source="one", one="one", body="one")]), diff --git a/tests/parsers/test_DefFormatRuleParser.py b/tests/parsers/test_DefFormatRuleParser.py index 64ecde9..913fc2f 100644 --- a/tests/parsers/test_DefFormatRuleParser.py +++ b/tests/parsers/test_DefFormatRuleParser.py @@ -18,19 +18,26 @@ cmap = { class TestDefFormatRuleParser(TestUsingMemoryBasedSheerka): - sheerka = None + shared_ontology = None @classmethod def setup_class(cls): - t = cls() - cls.sheerka, context, _ = t.init_parser(cmap) + init_test_helper = cls().init_test(cache_only=False, ontology="#TestDefFormatRuleParser#") + sheerka, context, *updated = init_test_helper.with_concepts(*cmap.values(), create_new=True).unpack() + for i, concept_name in enumerate(cmap): + cmap[concept_name] = updated[i] - def init_parser(self, concepts_map=None): - if concepts_map is not None: - sheerka, context, *concepts = self.init_concepts(*concepts_map.values(), create_new=True) + cls.shared_ontology = sheerka.get_ontology(context) + sheerka.pop_ontology() + + def init_parser(self, my_concepts_map=None, **kwargs): + if my_concepts_map is None: + sheerka, context = self.init_test().unpack() + sheerka.add_ontology(context, self.shared_ontology) else: - sheerka = TestDefFormatRuleParser.sheerka - context = self.get_context(sheerka) + sheerka, context, *updated = self.init_test().with_concepts(*my_concepts_map.values(), **kwargs).unpack() + for i, pair in enumerate(my_concepts_map): + my_concepts_map[pair] = updated[i] parser = DefFormatRuleParser() return sheerka, context, parser @@ -134,4 +141,3 @@ class TestDefFormatRuleParser(TestUsingMemoryBasedSheerka): assert res.status assert format_ast == expected - diff --git a/tests/parsers/test_FunctionParser.py b/tests/parsers/test_FunctionParser.py index d0bd00c..c074026 100644 --- a/tests/parsers/test_FunctionParser.py +++ b/tests/parsers/test_FunctionParser.py @@ -18,19 +18,26 @@ cmap = { class TestFunctionParser(TestUsingMemoryBasedSheerka): - sheerka = None + shared_ontology = None @classmethod def setup_class(cls): - t = cls() - cls.sheerka, context, _ = t.init_parser(cmap) + init_test_helper = cls().init_test(cache_only=False, ontology="#TestFunctionParser#") + sheerka, context, *updated = init_test_helper.with_concepts(*cmap.values(), create_new=True).unpack() + for i, concept_name in enumerate(cmap): + cmap[concept_name] = updated[i] - def init_parser(self, concepts_map=None): - if concepts_map is not None: - sheerka, context, *concepts = self.init_concepts(*concepts_map.values(), create_new=True) + cls.shared_ontology = sheerka.get_ontology(context) + sheerka.pop_ontology() + + def init_parser(self, my_concepts_map=None, **kwargs): + if my_concepts_map is None: + sheerka, context = self.init_test().unpack() + sheerka.add_ontology(context, self.shared_ontology) else: - sheerka = TestFunctionParser.sheerka - context = self.get_context(sheerka) + sheerka, context, *updated = self.init_test().with_concepts(*my_concepts_map.values(), **kwargs).unpack() + for i, pair in enumerate(my_concepts_map): + my_concepts_map[pair] = updated[i] parser = FunctionParser() return sheerka, context, parser @@ -244,4 +251,3 @@ class TestFunctionParser(TestUsingMemoryBasedSheerka): assert isinstance(concept.get_compiled()["b"], list) for item in concept.get_compiled()["b"]: assert sheerka.isinstance(item, BuiltinConcepts.RETURN_VALUE) - diff --git a/tests/parsers/test_RuleParser.py b/tests/parsers/test_RuleParser.py index 2d0f1b6..223cb67 100644 --- a/tests/parsers/test_RuleParser.py +++ b/tests/parsers/test_RuleParser.py @@ -5,23 +5,26 @@ from parsers.RuleParser import RuleParser, RuleNotFoundError from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka -my_rules = {("__rets", "list(__rets)")} +my_rules = [("__rets", "list(__rets)")] class TestRuleParser(TestUsingMemoryBasedSheerka): - sheerka = None + shared_ontology = None @classmethod def setup_class(cls): - t = cls() - cls.sheerka, context, _ = t.init_parser(my_rules) + init_test_helper = cls().init_test(cache_only=False, ontology="#TestRuleParser#") + sheerka, context, *updated = init_test_helper.with_rules(*my_rules).unpack() - def init_parser(self, rules=None): - if rules is not None: - sheerka, context, *concepts = self.init_format_rules(*rules) + cls.shared_ontology = sheerka.get_ontology(context) + sheerka.pop_ontology() + + def init_parser(self, rules=None, **kwargs): + if rules is None: + sheerka, context = self.init_test().unpack() + sheerka.add_ontology(context, self.shared_ontology) else: - sheerka = TestRuleParser.sheerka - context = self.get_context(sheerka) + sheerka, context, *updated = self.init_test().with_rules(*rules, **kwargs).unpack() parser = RuleParser() return sheerka, context, parser diff --git a/tests/parsers/test_AtomsParser.py b/tests/parsers/test_SequenceNodeParser.py similarity index 99% rename from tests/parsers/test_AtomsParser.py rename to tests/parsers/test_SequenceNodeParser.py index 900fd90..d0b96ce 100644 --- a/tests/parsers/test_AtomsParser.py +++ b/tests/parsers/test_SequenceNodeParser.py @@ -11,10 +11,9 @@ from tests.parsers.parsers_utils import compute_expected_array class TestAtomsParser(TestUsingMemoryBasedSheerka): def init_parser(self, my_map, create_new=False, singleton=True, use_sheerka=False): - sheerka, context, *updated_concepts = self.init_concepts( + sheerka, context, *updated_concepts = self.init_test().with_concepts( *my_map.values(), - create_new=create_new, - singleton=singleton) + create_new=create_new).unpack() if use_sheerka: parser = SequenceNodeParser(sheerka=sheerka) diff --git a/tests/parsers/test_SyaNodeParser.py b/tests/parsers/test_SyaNodeParser.py index e904e5f..3cba7fc 100644 --- a/tests/parsers/test_SyaNodeParser.py +++ b/tests/parsers/test_SyaNodeParser.py @@ -41,16 +41,14 @@ cmap = { class TestSyaNodeParser(TestUsingMemoryBasedSheerka): - sheerka = None + shared_ontology = None @classmethod def setup_class(cls): - t = TestSyaNodeParser() - TestSyaNodeParser.sheerka, context, _ = t.init_parser( - cmap, - singleton=False, - create_new=True, - init_from_sheerka=True) + init_test_helper = cls().init_test(cache_only=False, ontology="#TestSyaNodeParser#") + sheerka, context, *updated = init_test_helper.with_concepts(*cmap.values(), create_new=True).unpack() + for i, concept_name in enumerate(cmap): + cmap[concept_name] = updated[i] cmap["plus"].set_prop(BuiltinConcepts.ASSOCIATIVITY, "right") cmap["mult"].set_prop(BuiltinConcepts.ASSOCIATIVITY, "right") @@ -66,35 +64,24 @@ class TestSyaNodeParser(TestUsingMemoryBasedSheerka): cmap["minus"], CONCEPT_COMPARISON_CONTEXT) - # TestSyaNodeParser.sheerka.test_only_force_sya_def(context, [ - # (cmap["plus"].id, 5, SyaAssociativity.Right), - # (cmap["mult"].id, 10, SyaAssociativity.Right), - # (cmap["minus"].id, 5, SyaAssociativity.Right)]) + cls.shared_ontology = sheerka.get_ontology(context) + sheerka.pop_ontology() def init_parser(self, my_concepts_map=None, sya_def=None, post_init_concepts=None, **kwargs): - - if my_concepts_map is not None: - # a new concept map is given - # use it but - # do not instantiate a new sheerka - # do not update / init from sheerka - if 'singleton' not in kwargs: - kwargs["singleton"] = True - init_from_sheerka = kwargs.get("init_from_sheerka", False) - sheerka, context, *concepts = self.init_concepts(*my_concepts_map.values(), **kwargs) - else: - # No custom concept map is given -> Use the global cmap - # Sheerka is already initialized (the class instance) - # Use it to initialize the parser - init_from_sheerka = kwargs.get("init_from_sheerka", True) - sheerka = TestSyaNodeParser.sheerka - context = self.get_context(sheerka) + if my_concepts_map is None: + sheerka, context = self.init_test().unpack() + sheerka.add_ontology(context, self.shared_ontology) concepts = cmap.values() - ALL_ATTRIBUTES.clear() + init_from_sheerka = kwargs.get("init_from_sheerka", True) + else: + sheerka, context, *concepts = self.init_test().with_concepts(*my_concepts_map.values(), **kwargs).unpack() + for i, pair in enumerate(my_concepts_map): + my_concepts_map[pair] = concepts[i] + init_from_sheerka = kwargs.get("init_from_sheerka", False) if post_init_concepts: post_init_concepts(sheerka, context) @@ -112,7 +99,6 @@ class TestSyaNodeParser(TestUsingMemoryBasedSheerka): parser = SyaNodeParser() if my_concepts_map: parser.init_from_concepts(context, concepts, sya=sya_def_to_use) - return sheerka, context, parser @pytest.mark.parametrize("expression, expected_sequences", [ @@ -1346,3 +1332,27 @@ class TestSyaNodeParser(TestUsingMemoryBasedSheerka): assert actual.function == resolved_function_name[0] else: assert actual.function is None + + def test_i_can_parse_when_multiple_ontologies(self): + sheerka, context, parser = self.init_parser() + + text = "suffixed 1 + 1" + res = parser.parse(context, ParserInput(text)) + wrapper = res.body + lexer_nodes = res.body.body + + assert res.status + assert context.sheerka.isinstance(wrapper, BuiltinConcepts.PARSER_RESULT) + assert lexer_nodes == [CN(cmap["suffixed"], 0, 6, source=text)] + + # add an ontology layer and make sure will still can parse + sheerka.push_ontology(context, "new ontology") + parser = SyaNodeParser(sheerka=sheerka) + + res = parser.parse(context, ParserInput(text)) + wrapper = res.body + lexer_nodes = res.body.body + + assert res.status + assert context.sheerka.isinstance(wrapper, BuiltinConcepts.PARSER_RESULT) + assert lexer_nodes == [CN(cmap["suffixed"], 0, 6, source=text)] diff --git a/tests/parsers/test_UnrecognizedNodeParser.py b/tests/parsers/test_UnrecognizedNodeParser.py index 1ad9def..79b494e 100644 --- a/tests/parsers/test_UnrecognizedNodeParser.py +++ b/tests/parsers/test_UnrecognizedNodeParser.py @@ -70,25 +70,31 @@ concepts_map = { class TestUnrecognizedNodeParser(TestUsingMemoryBasedSheerka): - sheerka = None + shared_ontology = None @classmethod def setup_class(cls): - t = TestUnrecognizedNodeParser() - TestUnrecognizedNodeParser.sheerka, context, _ = t.init_parser(concepts_map, create_new=True) - TestUnrecognizedNodeParser.sheerka.test_only_force_sya_def(context, [ - (concepts_map["mult"].id, 20, SyaAssociativity.Right), - (concepts_map["plus"].id, 10, SyaAssociativity.Right), - ]) + init_test_helper = cls().init_test(cache_only=False, ontology="#TestUnrecognizedNodeParser#") + sheerka, context, *updated = init_test_helper.with_concepts(*concepts_map.values(), create_new=True).unpack() + for i, concept_name in enumerate(concepts_map): + concepts_map[concept_name] = updated[i] + + sheerka.set_is_greater_than(context, + BuiltinConcepts.PRECEDENCE, + concepts_map["mult"], + concepts_map["plus"], 'Sya') + + cls.shared_ontology = sheerka.get_ontology(context) + sheerka.pop_ontology() def init_parser(self, my_concepts_map=None, **kwargs): - if my_concepts_map: - sheerka, context, *updated_concepts = self.init_concepts(*my_concepts_map.values(), **kwargs) - for i, pair in enumerate(my_concepts_map): - my_concepts_map[pair] = updated_concepts[i] + if my_concepts_map is None: + sheerka, context = self.init_test().unpack() + sheerka.add_ontology(context, self.shared_ontology) else: - sheerka = TestUnrecognizedNodeParser.sheerka - context = self.get_context(sheerka) + sheerka, context, *updated = self.init_test().with_concepts(*my_concepts_map.values(), **kwargs).unpack() + for i, pair in enumerate(my_concepts_map): + my_concepts_map[pair] = updated[i] parser = UnrecognizedNodeParser() return sheerka, context, parser @@ -186,12 +192,14 @@ class TestUnrecognizedNodeParser(TestUsingMemoryBasedSheerka): assert res.body.concept.get_compiled()["a"][0].body.source == "1 " assert res.body.concept.get_compiled()["b"] == concepts_map["mult"] - assert sheerka.isinstance(res.body.concept.get_compiled()["b"].get_compiled()["a"][0], BuiltinConcepts.RETURN_VALUE) + assert sheerka.isinstance(res.body.concept.get_compiled()["b"].get_compiled()["a"][0], + BuiltinConcepts.RETURN_VALUE) assert res.body.concept.get_compiled()["b"].get_compiled()["a"][0].status assert res.body.concept.get_compiled()["b"].get_compiled()["a"][0].who == "parsers.Python" assert res.body.concept.get_compiled()["b"].get_compiled()["a"][0].body.source == " 2 " - assert sheerka.isinstance(res.body.concept.get_compiled()["b"].get_compiled()["b"][0], BuiltinConcepts.RETURN_VALUE) + assert sheerka.isinstance(res.body.concept.get_compiled()["b"].get_compiled()["b"][0], + BuiltinConcepts.RETURN_VALUE) assert res.body.concept.get_compiled()["b"].get_compiled()["b"][0].status assert res.body.concept.get_compiled()["b"].get_compiled()["b"][0].who == "parsers.Bnf" expected_nodes = compute_expected_array( @@ -339,7 +347,8 @@ class TestUnrecognizedNodeParser(TestUsingMemoryBasedSheerka): assert sheerka.isinstance(parser_result, BuiltinConcepts.PARSER_RESULT) assert parser_result.source == expression assert len(actual_nodes) == 1 - assert actual_nodes[0].nodes[0].concept.get_metadata().is_evaluated # 'a plus b' is recognized as concept definition + assert actual_nodes[0].nodes[ + 0].concept.get_metadata().is_evaluated # 'a plus b' is recognized as concept definition def test_i_can_parse_unrecognized_source_code_with_concept_node_when_var_in_short_term_memory(self): sheerka, context, parser = self.init_parser() diff --git a/tests/sdp/test_sheerkaDataProvider.py b/tests/sdp/test_sheerkaDataProvider.py index 4fc218e..8687df1 100644 --- a/tests/sdp/test_sheerkaDataProvider.py +++ b/tests/sdp/test_sheerkaDataProvider.py @@ -5,6 +5,8 @@ from datetime import date, datetime from os import path import pytest + +from core.global_symbols import NotFound from sdp.sheerkaDataProvider import SheerkaDataProvider, Event from sdp.sheerkaSerializer import PickleSerializer @@ -116,6 +118,28 @@ def test_i_can_save_and_load_an_event(root): evt = sdp.load_event() assert evt.message == "hello world" + # check that the last event is updated + last_event_file = path.join(sdp.io.root, SheerkaDataProvider.LastEventFile) + assert sdp.io.exists(last_event_file) + assert sdp.io.read_text(last_event_file) == evt_digest + + +def test_i_can_save_and_load_events_with_multiple_sdp(): + root = ".sheerka" + sdp1 = SheerkaDataProvider(root) + sdp1.save_event(Event("event 1", date=date(year=2007, month=9, day=10), user_id="kodjo")) + sdp1.save_event(Event("event 2", date=date(year=2007, month=9, day=10), user_id="kodjo")) + + sdp2 = SheerkaDataProvider(root, "Another sdp") + sdp2.save_event(Event("event 3", date=date(year=2007, month=9, day=10), user_id="kodjo")) + sdp2.save_event(Event("event 4", date=date(year=2007, month=9, day=10), user_id="kodjo")) + + events_from_1 = list(sdp1.load_events(-1)) + events_from_2 = list(sdp2.load_events(-1)) + + assert [e.message for e in events_from_1] == ['event 4', 'event 3', 'event 2', 'event 1'] + assert [e.message for e in events_from_2] == ['event 4', 'event 3', 'event 2', 'event 1'] + @pytest.mark.parametrize("root", [ ".sheerka", @@ -219,7 +243,7 @@ def test_i_can_add_and_reload_one_item(root): } assert sdp.io.exists(path.join(sdp.io.root, SheerkaDataProvider.StateFolder, last_commit[0:24], last_commit)) - assert sdp.io.exists(path.join(sdp.io.root, SheerkaDataProvider.HeadFile)) + assert sdp.io.exists(path.join(sdp.io.root, SheerkaDataProvider.RefFolder, sdp.name, SheerkaDataProvider.HeadFile)) assert state.date is not None assert state.parents == [] @@ -227,7 +251,8 @@ def test_i_can_add_and_reload_one_item(root): assert state.data == {"entry": {'key': 'foo => bar', 'key2': ObjNoKey("a", "b")}, 'entry2': {'key': 'value2'}} - assert sdp.io.read_text(path.join(sdp.io.root, SheerkaDataProvider.HeadFile)) == last_commit + assert sdp.io.read_text( + path.join(sdp.io.root, SheerkaDataProvider.RefFolder, sdp.name, SheerkaDataProvider.HeadFile)) == last_commit @pytest.mark.parametrize("root", [ @@ -419,7 +444,7 @@ def test_i_can_remove_elements(root): with sdp.get_transaction(evt_digest) as transaction: transaction.remove("entry", "key") - assert sdp.get("entry", "key") is None + assert sdp.get("entry", "key") is NotFound state = sdp.load_state(sdp.get_snapshot(SheerkaDataProvider.HeadFile)) assert state.data == { @@ -456,6 +481,25 @@ def test_i_can_keep_state_history(root): assert state.parents == [] +@pytest.mark.parametrize("root", [ + ".sheerka", + "mem://" +]) +def test_i_can_save_and_load_ontologies_names(root): + sdp = SheerkaDataProvider(root) + + ontologies = ['new ontology', '#unit_test#', '__default__'] + sdp.save_ontologies(ontologies) + assert sdp.load_ontologies() == ontologies + + # extra + ontologies_files = path.join(sdp.io.root, SheerkaDataProvider.OntologiesFiles) + assert sdp.io.exists(ontologies_files) + assert sdp.io.read_text(ontologies_files) == """new ontology +#unit_test# +__default__""" + + def test_i_can_remove_even_if_not_exist(): sdp = SheerkaDataProvider("mem://") with sdp.get_transaction(evt_digest) as transaction: @@ -478,3 +522,14 @@ def test_exists(): assert not sdp.exists("entry2") assert not sdp.exists("entry", "key2") assert sdp.exists("entry", "key") + + +def test_not_found_is_returned_when_an_entry_is_not_found(): + sdp = SheerkaDataProvider("mem://") + + with sdp.get_transaction(evt_digest) as transaction: + transaction.add("entry", "key", "value") + + assert sdp.get("entry", "key") == "value" + assert sdp.get("entry", "key2") == NotFound + assert sdp.get("entry2") == NotFound diff --git a/tests/sdp/test_sheerkaSerializer.py b/tests/sdp/test_sheerkaSerializer.py index a96616f..32d3e48 100644 --- a/tests/sdp/test_sheerkaSerializer.py +++ b/tests/sdp/test_sheerkaSerializer.py @@ -1,8 +1,9 @@ import pytest from dataclasses import dataclass +from core.global_symbols import NotInit, NotFound, Removed from sdp.sheerkaDataProvider import Event -from sdp.sheerkaSerializer import Serializer, JsonSerializer, SerializerContext +from sdp.sheerkaSerializer import Serializer, JsonSerializer, SerializerContext, CustomTypeSerializer from datetime import datetime import core.utils @@ -50,6 +51,20 @@ def test_i_can_serialize_an_object(): assert loaded.prop1 == "value" +@pytest.mark.parametrize("custom_type", [ + NotInit, NotFound, Removed +]) +def test_i_can_serialize_custom_type(custom_type): + serializer = Serializer() + serializer.register(CustomTypeSerializer()) + context = SerializerContext("kodjo", "6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b") + + stream = serializer.serialize(custom_type, context) + loaded = serializer.deserialize(stream, context) + + assert loaded == custom_type + + @pytest.mark.parametrize("obj, expected", [ (Obj("10", "value"), "tests.sdp.test_sheerkaSerializer.Obj") ]) diff --git a/tests/sheerkapickle/test_SheerkaPickler.py b/tests/sheerkapickle/test_SheerkaPickler.py index 7ed2847..9958ffb 100644 --- a/tests/sheerkapickle/test_SheerkaPickler.py +++ b/tests/sheerkapickle/test_SheerkaPickler.py @@ -2,6 +2,7 @@ import logging import pytest from core.concept import Concept +from core.global_symbols import NotInit, NotFound, Removed from core.tokenizer import Keywords from sheerkapickle import tags from sheerkapickle.SheerkaPickler import SheerkaPickler @@ -55,6 +56,19 @@ class TestSheerkaPickler(TestUsingMemoryBasedSheerka): decoded = SheerkaUnpickler(sheerka).restore(flatten) assert decoded == obj + @pytest.mark.parametrize("obj, expected", [ + (NotInit, {tags.CUSTOM: NotInit.value}), + (NotFound, {tags.CUSTOM: NotFound.value}), + (Removed, {tags.CUSTOM: Removed.value}), + ]) + def test_i_can_flatten_and_restore_custom_types(self, obj, expected): + sheerka = self.get_sheerka() + flatten = SheerkaPickler(sheerka).flatten(obj) + assert flatten == expected + + decoded = SheerkaUnpickler(sheerka).restore(flatten) + assert decoded == obj + def test_i_can_flatten_and_restore_instances(self): sheerka = self.get_sheerka() @@ -175,3 +189,18 @@ class TestSheerkaPickler(TestUsingMemoryBasedSheerka): flatten = SheerkaPickler(sheerka).flatten(obj) decoded = SheerkaUnpickler(sheerka).restore(flatten) assert decoded == Obj("foo", None, {"a": None, "b": None}) + + def test_ontology_are_not_serialized(self): + sheerka, context = self.init_test().unpack() + + sheerka.push_ontology(context, "new ontology") + ontology = sheerka.pop_ontology().body.body + obj = sheerka.ret(sheerka.name, True, ontology) + + flatten = SheerkaPickler(sheerka).flatten(obj) + assert flatten == { + '_sheerka/obj': 'core.builtin_concepts.ReturnValueConcept', + 'concept/id': ('__RETURN_VALUE', '43'), + 'status': True, + 'value': 'new ontology', + 'who': '__SHEERKA'} diff --git a/tests/sheerkapickle/test_sheerka_handlers.py b/tests/sheerkapickle/test_sheerka_handlers.py index 275fa0e..f747826 100644 --- a/tests/sheerkapickle/test_sheerka_handlers.py +++ b/tests/sheerkapickle/test_sheerka_handlers.py @@ -69,7 +69,7 @@ class TestSheerkaPickleHandler(TestUsingMemoryBasedSheerka): to_string = sheerkapickle.encode(sheerka, concept) decoded = sheerkapickle.decode(sheerka, to_string) assert decoded == concept - assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.variables": [["a", "value_a"], ["b", "value_b"]], "values": [["a", null], ["b", null]]}' + assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.variables": [["a", "value_a"], ["b", "value_b"]], "values": [["a", {"_sheerka/custom": "**NotInit**"}], ["b", {"_sheerka/id": 1}]]}' concept = Concept("foo").init_key() sheerka.create_new_concept(self.get_context(sheerka), concept) diff --git a/utils/sheerka.rebuild.sh b/utils/sheerka.rebuild.sh index 54cf5a8..e50f621 100755 --- a/utils/sheerka.rebuild.sh +++ b/utils/sheerka.rebuild.sh @@ -1,7 +1,7 @@ #!/bin/sh set -e -BASEDIR=$(dirname $0) +BASEDIR=$(dirname "$0") list_available() { available=$(ls "$BASEDIR"/../_concepts_*.txt | awk -F_ '{ print " "$3}' ) 2> /dev/null @@ -37,6 +37,6 @@ if [ -e ~/.sheerka ]; then mv ~/.sheerka ~/.sheerka.bak fi -python $BASEDIR/../main.py "sheerka.restore('$1')" +python "$BASEDIR"/../main.py "sheerka.restore('$1')" rm -rf "$env_folder" cp -R ~/.sheerka "$env_folder"