@@ -0,0 +1,35 @@
|
||||
import os
|
||||
import shutil
|
||||
from os import path
|
||||
|
||||
import pytest
|
||||
|
||||
from core.Sheerka import Sheerka
|
||||
from sdp.sheerkaDataProvider import SheerkaDataProvider
|
||||
|
||||
|
||||
class BaseTest:
|
||||
@pytest.fixture()
|
||||
def sdp(self) -> SheerkaDataProvider:
|
||||
return SheerkaDataProvider("mem://", name="test")
|
||||
|
||||
|
||||
class UsingFileBasedSheerka(BaseTest):
|
||||
TESTS_ROOT_DIRECTORY = path.abspath("../build/tests")
|
||||
SHEERKA_ROOT_DIR = os.path.join(TESTS_ROOT_DIRECTORY, ".sheerka")
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def sheerka_fb(self):
|
||||
"""
|
||||
the default fixture to get Sheerka is overriden
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
# first, make sure to create a fresh environment
|
||||
if path.exists(self.SHEERKA_ROOT_DIR):
|
||||
shutil.rmtree(self.SHEERKA_ROOT_DIR)
|
||||
|
||||
# create the new Sheerka instance
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize(root_folder=self.SHEERKA_ROOT_DIR)
|
||||
return sheerka
|
||||
@@ -0,0 +1,18 @@
|
||||
class FakeSdp:
|
||||
def __init__(self, /, get_value=None, extend_exists=None, get_alt_value=None, populate=None):
|
||||
self.get_value = get_value
|
||||
self.extend_exists = extend_exists
|
||||
self.populate_function = populate
|
||||
self.get_alt_value = get_alt_value
|
||||
|
||||
def get(self, cache_name, key):
|
||||
return self.get_value(cache_name, key)
|
||||
|
||||
def exists(self, cache_name, key):
|
||||
return self.extend_exists(cache_name, key)
|
||||
|
||||
def alt_get(self, cache_name, key):
|
||||
return self.get_alt_value(cache_name, key)
|
||||
|
||||
def populate(self):
|
||||
return self.populate_function() if callable(self.populate_function) else self.populate_function
|
||||
@@ -0,0 +1,288 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from caching.Cache import Cache
|
||||
from caching.CacheManager import CacheManager, ConceptNotFound
|
||||
from caching.ListIfNeededCache import ListIfNeededCache
|
||||
from common.global_symbols import NotFound
|
||||
from helpers import get_metadata
|
||||
from tests.caching import FakeSdp
|
||||
|
||||
|
||||
@dataclass
|
||||
class Obj:
|
||||
key: str
|
||||
value: str
|
||||
|
||||
|
||||
class TestCacheManager(BaseTest):
|
||||
|
||||
def test_i_can_push_into_sdp(self, context, sdp):
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test", Cache(), persist=True)
|
||||
cache_manager.put("test", "key", "value")
|
||||
|
||||
cache_manager.commit(context)
|
||||
assert sdp.exists("test", "key")
|
||||
|
||||
def test_sdp_given_to_the_cache_manager_can_be_used_by_the_cache(self, context, sdp):
|
||||
"""
|
||||
When the sdp is given to the cache_manager, the 'Cache' object can use the lambda with two parameters
|
||||
:param context:
|
||||
:type context:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache = Cache(default=lambda _sdp, k: _sdp.get("test", k))
|
||||
cache_manager.register_cache("test", cache)
|
||||
|
||||
with cache_manager.sdp.get_transaction(context.event) as transaction:
|
||||
transaction.add("test", "key", "value")
|
||||
|
||||
assert cache_manager.get("test", "key") == "value"
|
||||
|
||||
def test_cache_can_use_auto_configure_when_sdp_is_given_to_the_cache_manager(self, context, sdp):
|
||||
"""
|
||||
When sdp is given to the cache manager,
|
||||
Cache can be configured using auto_configure as the sdp will be provided by the cache_manager
|
||||
:param context:
|
||||
:type context:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
with sdp.get_transaction(context.event) as transaction:
|
||||
transaction.add("test", "key", "value")
|
||||
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache = Cache().auto_configure("test")
|
||||
cache_manager.register_cache("test", cache)
|
||||
|
||||
assert cache_manager.get("test", "key") == "value"
|
||||
|
||||
def test_i_can_get_value_when_the_sdp_is_given_to_the_cache(self, context, sdp):
|
||||
"""
|
||||
If the sdp is not given to the cache manager, the cache will need to explicitly provide the sdp
|
||||
during its configuration
|
||||
:param context:
|
||||
:type context:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
with sdp.get_transaction(context.event) as transaction:
|
||||
transaction.add("test", "key", "value")
|
||||
|
||||
cache_manager = CacheManager()
|
||||
cache = Cache(default=lambda k: sdp.get("test", k))
|
||||
cache_manager.register_cache("test", cache)
|
||||
|
||||
assert cache_manager.get("test", "key") == "value"
|
||||
|
||||
def test_i_can_get_value_from_alt_sdp(self, sdp):
|
||||
"""
|
||||
When nothing is found in cache and in sdp, we use alternate sdp
|
||||
:param sdp:
|
||||
:type sdp:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test", Cache().auto_configure("test"))
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value found !")
|
||||
assert cache_manager.get("test", "key", alt_sdp=alt_sdp) == "value found !"
|
||||
|
||||
def test_i_can_commit_simple_cache(self, context, sdp):
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test", Cache().auto_configure("test"))
|
||||
cache = cache_manager.caches["test"].cache
|
||||
|
||||
cache_manager.put("test", "key", "value")
|
||||
|
||||
cache_manager.commit(context)
|
||||
|
||||
cache.clear()
|
||||
assert cache_manager.sdp.get("test", "key") == "value"
|
||||
assert cache.get("key") == "value"
|
||||
|
||||
cache.update("key", "value", "key", "another_value")
|
||||
cache_manager.commit(context)
|
||||
assert cache_manager.sdp.get("test", "key") == "another_value"
|
||||
|
||||
cache.update("key", "another_value", "key2", "another_value")
|
||||
cache_manager.commit(context)
|
||||
assert cache_manager.sdp.get("test", "key") is NotFound
|
||||
assert cache_manager.sdp.get("test", "key2") == "another_value"
|
||||
|
||||
# sanity check
|
||||
# sdp 'test' has value, but sdp '__default__' does not
|
||||
assert cache_manager.sdp.name == "test"
|
||||
assert cache_manager.sdp.state.data == {'test': {'key2': 'another_value'}}
|
||||
|
||||
def test_i_can_use_concept_cache(self):
|
||||
cache_manager = CacheManager()
|
||||
cache_manager.register_concept_cache("id", Cache(), lambda c: c.id, True)
|
||||
cache_manager.register_concept_cache("name", ListIfNeededCache(), lambda c: c.name, True)
|
||||
|
||||
# caches are correctly created
|
||||
assert cache_manager.concept_caches == ["id", "name"]
|
||||
assert "id" in cache_manager.caches
|
||||
assert "name" in cache_manager.caches
|
||||
|
||||
# caches are correctly updated on insertion
|
||||
meta1 = get_metadata(id="1", name="foo")
|
||||
meta2 = get_metadata(id="2", name="bar")
|
||||
meta3 = get_metadata(id="3", name="foo")
|
||||
|
||||
for metadata in meta1, meta2, meta3:
|
||||
cache_manager.add_concept(metadata)
|
||||
|
||||
assert cache_manager.get_inner_cache("id").copy() == {"1": meta1, "2": meta2, "3": meta3}
|
||||
assert cache_manager.get_inner_cache("name").copy() == {"foo": [meta1, meta3], "bar": meta2}
|
||||
|
||||
# caches are correctly updated on modification
|
||||
meta3prime = get_metadata(id="3", name="bar")
|
||||
cache_manager.update_concept(meta3, meta3prime)
|
||||
|
||||
assert cache_manager.get_inner_cache("id").copy() == {"1": meta1, "2": meta2, "3": meta3prime}
|
||||
assert cache_manager.get_inner_cache("name").copy() == {"foo": meta1, "bar": [meta2, meta3prime]}
|
||||
|
||||
# caches are correctly updated on removal
|
||||
cache_manager.remove_concept(meta3prime)
|
||||
assert cache_manager.get_inner_cache("id").copy() == {"1": meta1, "2": meta2}
|
||||
assert cache_manager.get_inner_cache("name").copy() == {"foo": meta1, "bar": meta2}
|
||||
|
||||
def test_i_cannot_remove_a_concept_that_does_not_exists(self):
|
||||
cache_manager = CacheManager()
|
||||
cache_manager.register_concept_cache("id", Cache(), lambda c: c.id, True)
|
||||
cache_manager.register_concept_cache("key", ListIfNeededCache(), lambda c: c.key, True)
|
||||
|
||||
meta1 = get_metadata(id="1", name="foo")
|
||||
|
||||
with pytest.raises(ConceptNotFound) as ex:
|
||||
cache_manager.remove_concept(meta1)
|
||||
|
||||
assert ex.value.concept == meta1
|
||||
|
||||
def test_nothing_is_sent_to_sdp_if_persist_is_false(self, context, sdp):
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test", Cache(), persist=False)
|
||||
|
||||
cache_manager.put("test", "key", "value")
|
||||
cache_manager.commit(context)
|
||||
|
||||
value = sdp.get("test", "key")
|
||||
assert value is NotFound
|
||||
|
||||
def test_i_can_delete_from_cache_manager(self, context, sdp):
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test", Cache(), persist=True)
|
||||
cache_manager.put("test", "key", "value")
|
||||
cache_manager.commit(context)
|
||||
|
||||
# sanity check
|
||||
assert cache_manager.get("test", "key") == "value"
|
||||
assert sdp.get("test", "key") == "value"
|
||||
|
||||
# I remove but I don't commit
|
||||
cache_manager.delete("test", "key")
|
||||
assert cache_manager.get("test", "key") is NotFound
|
||||
assert sdp.get("test", "key") == "value"
|
||||
|
||||
# commit
|
||||
cache_manager.commit(context)
|
||||
assert cache_manager.get("test", "key") is NotFound
|
||||
assert sdp.get("test", "key") is NotFound
|
||||
|
||||
def test_i_can_get_the_inner_cache(self):
|
||||
cache_manager = CacheManager()
|
||||
cache = Cache()
|
||||
cache_manager.register_cache("test", cache)
|
||||
|
||||
inner_cache = cache_manager.get_inner_cache("test")
|
||||
assert id(inner_cache) == id(cache)
|
||||
|
||||
def test_i_can_get_a_copy_of_a_cache(self):
|
||||
cache_manager = CacheManager()
|
||||
cache = Cache()
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
cache_manager.register_cache("test", cache)
|
||||
|
||||
copy = cache_manager.copy("test")
|
||||
|
||||
assert isinstance(copy, dict)
|
||||
assert copy == {"key1": "value1", "key2": "value2"}
|
||||
|
||||
def test_i_can_populate(self):
|
||||
cache_manager = CacheManager()
|
||||
cache = Cache()
|
||||
cache_manager.register_cache("test", cache)
|
||||
|
||||
obj1, obj2 = Obj("key1", "value1"), Obj("key2", "value2")
|
||||
cache_manager.populate("test", lambda: [obj1, obj2], lambda o: o.key)
|
||||
|
||||
assert cache_manager.copy("test") == {"key1": obj1, "key2": obj2}
|
||||
|
||||
def test_has(self, context, sdp):
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test", Cache().auto_configure("test"))
|
||||
|
||||
with cache_manager.sdp.get_transaction(context.event) as transaction:
|
||||
transaction.add("test", "key_from_sdp", "value")
|
||||
|
||||
cache_manager.put("test", "key_in_cache", "value")
|
||||
|
||||
assert cache_manager.has("test", "key_in_cache")
|
||||
assert not cache_manager.has("test", "key_from_sdp")
|
||||
|
||||
def test_exist(self, context, sdp):
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test", Cache().auto_configure("test"))
|
||||
|
||||
with cache_manager.sdp.get_transaction(context.event) as transaction:
|
||||
transaction.add("test", "key_from_sdp", "value")
|
||||
|
||||
cache_manager.put("test", "key_in_cache", "value")
|
||||
|
||||
assert cache_manager.exists("test", "key_in_cache")
|
||||
assert cache_manager.exists("test", "key_from_sdp")
|
||||
|
||||
def test_i_can_clear_a_single_cache(self, context, sdp):
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test", Cache().auto_configure("test"))
|
||||
cache_manager.put("test", "key1", "value1")
|
||||
cache_manager.put("test", "key2", "value2")
|
||||
|
||||
cache_manager.commit(context)
|
||||
|
||||
cache_manager.clear("test")
|
||||
assert cache_manager.copy("test") == {}
|
||||
assert sdp.exists("test", "key1")
|
||||
assert sdp.exists("test", "key2")
|
||||
|
||||
cache_manager.commit(context)
|
||||
assert cache_manager.copy("test") == {}
|
||||
assert not sdp.exists("test", "key1")
|
||||
assert not sdp.exists("test", "key2")
|
||||
|
||||
def test_i_can_clear_all_caches(self, sdp):
|
||||
cache_manager = CacheManager(sdp)
|
||||
cache_manager.register_cache("test1", Cache())
|
||||
cache_manager.register_cache("test2", Cache())
|
||||
cache_manager.put("test1", "key", "value")
|
||||
cache_manager.put("test2", "key", "value")
|
||||
|
||||
cache_manager.clear()
|
||||
assert cache_manager.copy("test1") == {}
|
||||
assert cache_manager.copy("test2") == {}
|
||||
|
||||
def test_i_cannot_add_null_keys_into_concept_cache(self):
|
||||
cache_manager = CacheManager()
|
||||
cache_manager.register_concept_cache("id", Cache(), lambda c: c.id, True)
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
meta1 = get_metadata(name="foo")
|
||||
cache_manager.add_concept(meta1)
|
||||
@@ -0,0 +1,165 @@
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from caching.DictionaryCache import DictionaryCache
|
||||
from common.global_symbols import NotFound
|
||||
from tests.caching import FakeSdp
|
||||
|
||||
|
||||
class TestDictionaryCache(BaseTest):
|
||||
@pytest.mark.parametrize('key', [None, "str_value", 0, 1.0])
|
||||
def test_key_must_be_true_or_false(self, key):
|
||||
cache = DictionaryCache()
|
||||
|
||||
# key must be true False
|
||||
with pytest.raises(KeyError):
|
||||
cache.put("key", key)
|
||||
|
||||
def test_value_must_be_a_dictionary(self):
|
||||
cache = DictionaryCache()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
cache.put(True, "value")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
cache.put(False, "value")
|
||||
|
||||
def test_i_can_put_and_retrieve_value_from_dictionary_cache(self):
|
||||
cache = DictionaryCache()
|
||||
|
||||
entry = {"key": "value", "key2": ["value21", "value22"]}
|
||||
cache.put(False, entry)
|
||||
assert len(cache) == 3
|
||||
assert id(cache._cache) == id(entry)
|
||||
assert cache.get("key") == "value"
|
||||
assert cache.get("key2") == ["value21", "value22"]
|
||||
|
||||
# I can append values
|
||||
cache.put(True, {"key": "another_value", "key3": "value3"})
|
||||
assert len(cache) == 4
|
||||
assert cache.get("key") == "another_value"
|
||||
assert cache.get("key2") == ["value21", "value22"]
|
||||
assert cache.get("key3") == "value3"
|
||||
|
||||
# I can reset
|
||||
entry = {"key": "value", "key2": ["value21", "value22"]}
|
||||
cache.put(False, entry)
|
||||
assert len(cache) == 3
|
||||
assert id(cache._cache) == id(entry)
|
||||
assert cache.get("key") == "value"
|
||||
assert cache.get("key2") == ["value21", "value22"]
|
||||
assert cache.get("key3") is NotFound
|
||||
|
||||
assert cache.copy() == {'key': 'value', 'key2': ['value21', 'value22']}
|
||||
|
||||
def test_i_can_get_a_value_that_does_not_exist_without_compromising_the_cache(self):
|
||||
cache = DictionaryCache()
|
||||
cache.put(False, {"key": "value"})
|
||||
|
||||
assert cache.get("key2") is NotFound
|
||||
assert cache.copy() == {"key": "value"}
|
||||
|
||||
def test_i_can_append_to_a_dictionary_cache_even_if_it_is_new(self):
|
||||
cache = DictionaryCache()
|
||||
|
||||
entry = {"key": "value", "key2": ["value21", "value22"]}
|
||||
cache.put(True, entry)
|
||||
assert len(cache) == 3
|
||||
assert id(cache._cache) != id(entry)
|
||||
assert cache.get("key") == "value"
|
||||
assert cache.get("key2") == ["value21", "value22"]
|
||||
|
||||
def test_exists_in_dictionary_cache(self):
|
||||
cache = DictionaryCache()
|
||||
assert not cache.exists("key")
|
||||
|
||||
cache.put(True, {"key": "value"})
|
||||
assert cache.exists("key")
|
||||
|
||||
def test_default_for_dictionary_cache(self):
|
||||
cache = DictionaryCache(default={"key": "value", "key2": "value2"})
|
||||
|
||||
# cache is fully set when the value is found
|
||||
assert cache.get("key") == "value"
|
||||
assert cache.copy() == {"key": "value", "key2": "value2"}
|
||||
|
||||
# cache is fully set when the value is not found
|
||||
cache.test_only_reset()
|
||||
assert cache.get("key3") is NotFound
|
||||
assert cache.copy() == {"key": "value", "key2": "value2"}
|
||||
|
||||
# cache is not corrupted when value is found
|
||||
cache.put(True, {"key3": "value3", "key4": "value4"})
|
||||
assert cache.get("key3") == "value3"
|
||||
assert cache.copy() == {"key": "value", "key2": "value2", "key3": "value3", "key4": "value4"}
|
||||
|
||||
# cache is not corrupted when value is not found
|
||||
cache._cache["key"] = "another value" # operation that is normally not possible
|
||||
assert cache.get("key5") is NotFound
|
||||
assert cache.copy() == {"key": "value", "key2": "value2", "key3": "value3", "key4": "value4"}
|
||||
|
||||
def test_default_callable_for_dictionary_cache(self):
|
||||
cache = DictionaryCache(default=lambda k: {"key": "value", "key2": "value2"})
|
||||
|
||||
assert cache.get("key") == "value"
|
||||
assert "key2" in cache
|
||||
assert len(cache) == 2
|
||||
|
||||
cache.clear()
|
||||
assert cache.get("key3") is NotFound
|
||||
assert len(cache) == 2
|
||||
assert "key" in cache
|
||||
assert "key2" in cache
|
||||
|
||||
def test_default_callable_with_internal_sdp_for_dictionary_cache(self):
|
||||
cache = DictionaryCache(default=lambda sdp, key: sdp.get("cache_name", key),
|
||||
sdp=FakeSdp(lambda entry, k: {"key": "value", "key2": "value2"}))
|
||||
|
||||
assert cache.get("key") == "value"
|
||||
assert "key2" in cache
|
||||
assert len(cache) == 2
|
||||
|
||||
cache.clear()
|
||||
assert cache.get("key3") is NotFound
|
||||
assert len(cache) == 2
|
||||
assert "key" in cache
|
||||
assert "key2" in cache
|
||||
|
||||
def test_dictionary_cache_cannot_be_null(self):
|
||||
cache = DictionaryCache(default=lambda k: NotFound)
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache._cache == {}
|
||||
|
||||
cache = DictionaryCache(default=NotFound)
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache._cache == {}
|
||||
|
||||
cache = DictionaryCache(default=lambda k: None)
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache._cache == {}
|
||||
|
||||
cache = DictionaryCache(default=None)
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache._cache == {}
|
||||
|
||||
def test_auto_configure_retrieves_the_whole_remote_repository(self, sdp, context):
|
||||
cache = DictionaryCache(sdp=sdp).auto_configure("test")
|
||||
with sdp.get_transaction(context.event) as transaction:
|
||||
transaction.add("test", "key1", "value1")
|
||||
transaction.add("test", "key2", "value2")
|
||||
|
||||
# when call for a value that is not in the cache, Dictionary cache is configured to retrieve the repo
|
||||
cache.get("value")
|
||||
|
||||
assert cache.copy() == {'key1': 'value1', 'key2': 'value2'}
|
||||
|
||||
def test_we_do_no_go_twice_in_repo_when_not_found(self, sdp, context):
|
||||
cache = DictionaryCache(sdp=sdp).auto_configure("test")
|
||||
|
||||
assert cache.get("key") is NotFound
|
||||
|
||||
# now add a value in remote repo
|
||||
with sdp.get_transaction(context.event) as transaction:
|
||||
transaction.add("test", "key", "value")
|
||||
|
||||
assert cache.get("key") is NotFound # the key was previously requested
|
||||
@@ -0,0 +1,111 @@
|
||||
from caching.FastCache import FastCache
|
||||
from common.global_symbols import NotFound
|
||||
|
||||
|
||||
def test_i_can_put_an_retrieve_values():
|
||||
cache = FastCache()
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.get("key") == "value"
|
||||
assert cache.cache == {"key": "value"}
|
||||
assert cache.lru == ["key"]
|
||||
|
||||
|
||||
def test_i_can_put_and_retrieve_multiple_items():
|
||||
cache = FastCache()
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("key3", "value3")
|
||||
|
||||
assert cache.cache == {"key1": "value1", "key2": "value2", "key3": "value3"}
|
||||
assert cache.lru == ["key1", "key2", "key3"]
|
||||
|
||||
|
||||
def test_i_the_least_used_is_remove_first():
|
||||
cache = FastCache(3)
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("key3", "value3")
|
||||
|
||||
cache.put("key4", "value4")
|
||||
assert cache.cache == {"key4": "value4", "key2": "value2", "key3": "value3"}
|
||||
assert cache.lru == ["key2", "key3", "key4"]
|
||||
|
||||
cache.put("key5", "value5")
|
||||
assert cache.cache == {"key4": "value4", "key5": "value5", "key3": "value3"}
|
||||
assert cache.lru == ["key3", "key4", "key5"]
|
||||
|
||||
|
||||
def test_i_can_put_the_same_key_several_times():
|
||||
cache = FastCache()
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("key1", "value3")
|
||||
|
||||
assert cache.cache == {"key1": "value3", "key2": "value2"}
|
||||
assert cache.lru == ["key2", "key1"]
|
||||
|
||||
|
||||
def test_none_is_returned_when_not_found():
|
||||
cache = FastCache()
|
||||
assert cache.get("foo") is NotFound
|
||||
|
||||
|
||||
def test_i_can_evict_by_key():
|
||||
cache = FastCache()
|
||||
cache.put("key1", "value1")
|
||||
cache.put("to_keep1", "to_keep_value1")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("to_keep2", "to_keep_value2")
|
||||
cache.put("key3", "value3")
|
||||
cache.put("to_keep3", "to_keep_value3")
|
||||
|
||||
cache.evict_by_key(lambda k: k.startswith("key"))
|
||||
assert cache.cache == {"to_keep1": "to_keep_value1",
|
||||
"to_keep2": "to_keep_value2",
|
||||
"to_keep3": "to_keep_value3"}
|
||||
|
||||
assert cache.lru == ["to_keep1", "to_keep2", "to_keep3"]
|
||||
|
||||
|
||||
def test_i_can_get_default_value():
|
||||
cache = FastCache(max_size=3, default=lambda key: key + 1)
|
||||
|
||||
assert cache.get(1) == 2
|
||||
assert cache.get(2) == 3
|
||||
assert cache.get(3) == 4
|
||||
assert cache.get(4) == 5
|
||||
|
||||
assert cache.cache == {2: 3, 3: 4, 4: 5} # only 3 values
|
||||
|
||||
|
||||
def test_i_can_iter_on_entries():
|
||||
cache = FastCache()
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("key3", "value3")
|
||||
|
||||
res = []
|
||||
for k in cache:
|
||||
assert k in cache
|
||||
res.append(k)
|
||||
|
||||
assert res == ["key1", "key2", "key3"]
|
||||
|
||||
|
||||
def test_i_can_count():
|
||||
cache = FastCache()
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("key3", "value3")
|
||||
|
||||
assert len(cache) == 3
|
||||
|
||||
|
||||
def test_i_can_copy():
|
||||
cache = FastCache()
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("key3", "value3")
|
||||
|
||||
assert cache.copy() == {"key1": "value1", "key2": "value2", "key3": "value3"}
|
||||
@@ -0,0 +1,87 @@
|
||||
from base import BaseTest
|
||||
from caching.IncCache import IncCache
|
||||
from common.global_symbols import NotFound, Removed
|
||||
from tests.caching import FakeSdp
|
||||
|
||||
|
||||
class FakeIncSdp:
|
||||
"""
|
||||
FakeIncSdp has two levels of "ontology"
|
||||
if a value of
|
||||
"""
|
||||
def __init__(self, init_value1, init_value2):
|
||||
self.level1 = IncCache()
|
||||
self.level1.put("key", init_value1)
|
||||
|
||||
self.level2 = IncCache()
|
||||
self.level2.put("key", init_value2)
|
||||
|
||||
def alt_get(self, cache_name, key):
|
||||
for cache in [self.level1, self.level2]:
|
||||
value = cache.alt_get(key)
|
||||
if value is not NotFound:
|
||||
return value
|
||||
|
||||
return NotFound
|
||||
|
||||
|
||||
class TestIncCache(BaseTest):
|
||||
def test_i_can_put_and_retrieve_values_from_inc_cache(self):
|
||||
cache = IncCache()
|
||||
|
||||
assert cache.get("key") == 1
|
||||
assert cache.get("key") == 2
|
||||
assert cache.get("key") == 3
|
||||
assert cache.get("key2") == 1
|
||||
assert cache.get("key2") == 2
|
||||
|
||||
cache.put("key", 100)
|
||||
assert cache.get("key") == 101
|
||||
|
||||
assert cache.copy() == {'key': 101, 'key2': 2}
|
||||
|
||||
def test_i_can_alt_get(self):
|
||||
cache = IncCache()
|
||||
|
||||
assert cache.get("key") == 1
|
||||
assert cache.get("key") == 2
|
||||
assert cache.alt_get("key") == 2
|
||||
assert cache.alt_get("key") == 2
|
||||
assert cache.get("key") == 3
|
||||
|
||||
def test_current_cache_takes_precedence_over_alt_sdp(self):
|
||||
cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
assert cache.get("key") == 1
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: 10)
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 2
|
||||
|
||||
def test_remote_repository_takes_precedence_over_alt_sdp(self):
|
||||
cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: 5)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: 10)
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 6
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 7 # then we use the value from the cache
|
||||
|
||||
def test_i_can_take_value_from_alt_sdp(self):
|
||||
cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeIncSdp(10, NotFound)
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 11
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 12 # then we use the value from the cache
|
||||
|
||||
def test_i_can_get_when_alt_sdp_and_cache_is_cleared(self):
|
||||
cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
cache.clear()
|
||||
|
||||
alt_sdp = FakeIncSdp(10, NotFound)
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 1
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 2 # then we use the value from the cache
|
||||
|
||||
def test_i_can_manage_when_the_value_from_alt_sdp_is_removed(self):
|
||||
cache = IncCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeIncSdp(Removed, 10)
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 1
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == 2 # then we use the value from the cache
|
||||
@@ -0,0 +1,281 @@
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from caching.ListCache import ListCache
|
||||
from common.global_symbols import NotFound, Removed
|
||||
from tests.caching import FakeSdp
|
||||
|
||||
|
||||
class TestListCache(BaseTest):
|
||||
|
||||
def test_i_can_put_and_retrieve_value_from_list_cache(self):
|
||||
cache = ListCache()
|
||||
|
||||
cache.put("key", "value")
|
||||
assert cache.get("key") == ["value"]
|
||||
assert len(cache) == 1
|
||||
|
||||
cache.put("key", "value2") # we can append to this list
|
||||
assert cache.get("key") == ["value", "value2"]
|
||||
assert len(cache) == 2
|
||||
|
||||
cache.put("key2", "value")
|
||||
assert cache.get("key2") == ["value"]
|
||||
assert len(cache) == 3
|
||||
|
||||
# duplicates are allowed
|
||||
cache.put("key", "value")
|
||||
assert cache.get("key") == ["value", "value2", "value"]
|
||||
assert len(cache) == 4
|
||||
|
||||
assert cache.copy() == {'key': ['value', 'value2', 'value'], 'key2': ['value']}
|
||||
|
||||
def test_i_can_put_in_list_cache_when_alt_sdp_returns_values(self):
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: ["value1"]))
|
||||
assert cache.get("key") == ["value1", "value2"]
|
||||
|
||||
cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed))
|
||||
assert cache.get("key3") == ["value1"]
|
||||
|
||||
def test_i_can_put_in_list_cache_when_alt_sdp_returns_values_and_cache_is_cleared(self):
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
cache.clear()
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: ["value1"]))
|
||||
assert cache.get("key") == ["value2"]
|
||||
|
||||
cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed))
|
||||
assert cache.get("key3") == ["value1"]
|
||||
|
||||
def test_current_cache_take_precedence_over_alt_sdp_when_i_put_data_in_list_cache(self):
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value1")
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx"))
|
||||
assert cache.get("key") == ["value1", "value2"]
|
||||
|
||||
def test_current_sdp_take_precedence_over_alt_sdp_when_i_put_data_in_list_cache(self):
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: ["value1"])).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx"))
|
||||
assert cache.get("key") == ["value1", "value2"]
|
||||
|
||||
def test_i_can_get_when_alt_sdp(self):
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.get("key", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: ["value1"]))
|
||||
assert cache.get("key") == ["value1"]
|
||||
|
||||
def test_i_can_update_from_list_cache(self):
|
||||
cache = ListCache()
|
||||
|
||||
cache.put("key", "value")
|
||||
cache.put("key", "value2")
|
||||
cache.put("key", "value")
|
||||
cache.update("key", "value", "key", "another value")
|
||||
|
||||
assert len(cache._cache) == 1
|
||||
assert len(cache) == 3
|
||||
assert cache.get("key") == ["another value", "value2", "value"] # only the first one is affected
|
||||
|
||||
cache.update("key", "value2", "key2", "value2")
|
||||
assert len(cache._cache) == 2
|
||||
assert len(cache) == 3
|
||||
assert cache.get("key") == ["another value", "value"]
|
||||
assert cache.get("key2") == ["value2"]
|
||||
|
||||
cache.update("key2", "value2", "key3", "value2")
|
||||
assert len(cache._cache) == 2
|
||||
assert len(cache) == 3
|
||||
assert cache.get("key") == ["another value", "value"]
|
||||
assert cache.get("key3") == ["value2"]
|
||||
assert cache.get("key2") is NotFound
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("wrong key", "value", "key", "value")
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same(self):
|
||||
cache = ListCache(default=lambda sdp, key: sdp.get("cache_name", key),
|
||||
extend_exists=lambda sdp, key: sdp.exists("cache_name", key),
|
||||
sdp=FakeSdp(get_value=lambda cache_name, key: NotFound))
|
||||
|
||||
cache.put("key", "value")
|
||||
cache.update("key", "value", "key", "new_value", FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
|
||||
assert cache.get("key") == ["new_value"]
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same_but_nothing_in_cache(self):
|
||||
# There is nothing in cache or remote repository.
|
||||
# We must ust the value from alt_sdp
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
previous_value = ["old_1", "old_2", "value"]
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: True,
|
||||
get_alt_value=lambda cache_name, key: previous_value)
|
||||
cache.update("key", "value", "key", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key") == ["old_1", "old_2", "new_value"]
|
||||
assert previous_value == ["old_1", "old_2", "value"]
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_different(self):
|
||||
# keys are different
|
||||
# make sure that current cache take precedence over alt_sdp
|
||||
# In this test, the values from alt_sdp are never used
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: ["xxx1"] if key == "key1" else NotFound)
|
||||
|
||||
# one values in 'key1'
|
||||
cache.put("key1", "old_1")
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == ["new_value"]
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
cache.clear()
|
||||
cache.put("key1", "old_1")
|
||||
cache.put("key1", "old_2")
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == ["old_2"]
|
||||
assert cache.get("key2") == ["new_value"]
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_repository_keys_are_different(self):
|
||||
# keys are different
|
||||
# make sure that current repo take precedence over alt_sdp
|
||||
remote = FakeSdp(get_value=lambda cache_name, key: ["old_1"] if key == "key1" else NotFound)
|
||||
cache = ListCache(sdp=remote).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: ["xxx1"] if key == "key1" else NotFound)
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == ["new_value"]
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
remote = FakeSdp(get_value=lambda cache_name, key: ["old_1", "old_2"] if key == "key1" else NotFound)
|
||||
cache = ListCache(sdp=remote).auto_configure("cache_name")
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == ["old_2"]
|
||||
assert cache.get("key2") == ["new_value"]
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_alt_sdp_keys_are_different_one_value(self):
|
||||
# keys are different
|
||||
# No value found in cache or remote repository,
|
||||
# Will use values from alt_sdp
|
||||
# The old key is the same, so it has to be marked as Removed
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
# one values in 'key1'
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: ["old_1"] if key == "key1" else NotFound)
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == ["new_value"]
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
cache.test_only_reset()
|
||||
old_values = ["old_1", "old_2"]
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: old_values if key == "key1" else NotFound)
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == ["old_2"]
|
||||
assert cache.get("key2") == ["new_value"]
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
assert old_values == ["old_1", "old_2"] # not modified
|
||||
|
||||
def test_i_can_update_when_alt_sdp_cache_take_precedence_for_destination_key(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It takes precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: ["xxx2"] if key == "key2" else NotFound)
|
||||
cache.put("key1", "source_value")
|
||||
cache.put("key2", "old_value")
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == ['old_value', 'new_value']
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
|
||||
def test_i_can_update_when_alt_sdp_repository_take_precedence_for_destination_key(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It takes precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
remote_repo = FakeSdp(get_value=lambda cache_name, key: ["old_value"] if key == "key2" else NotFound)
|
||||
cache = ListCache(sdp=remote_repo).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: ["xxx2"] if key == "key2" else NotFound)
|
||||
cache.put("key1", "source_value")
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == ['old_value', 'new_value']
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
|
||||
def test_i_can_update_when_alt_sdp_use_alt_sdp_when_no_destination_value_found(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It takes precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key1", "source_value")
|
||||
previous_values = ["old_1", "old_2"]
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: previous_values if key == "key2" else NotFound)
|
||||
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == ["old_1", "old_2", 'new_value']
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
assert previous_values == ["old_1", "old_2"] # not modified
|
||||
|
||||
def test_i_can_update_when_alt_sdp_and_cache_is_cleared(self):
|
||||
cache = ListCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1"])
|
||||
cache.clear()
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("key", "value1", "key", "value2", alt_sdp=alt_sdp)
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("key", "value1", "key2", "value2", alt_sdp=alt_sdp)
|
||||
|
||||
def test_default_is_called_before_updating_list_cache(self):
|
||||
cache = ListCache(default=lambda k: NotFound)
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
|
||||
cache = ListCache(default=lambda k: ["old_value", "other old value"])
|
||||
cache.update("old_key", "old_value", "old_key", "new_value")
|
||||
assert cache.get("old_key") == ["new_value", "other old value"]
|
||||
|
||||
cache = ListCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else NotFound)
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
assert cache.get("old_key") == ["other old value"]
|
||||
assert cache.get("new_key") == ["new_value"]
|
||||
|
||||
cache = ListCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else ["other new"])
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
assert cache.get("old_key") == ["other old value"]
|
||||
assert cache.get("new_key") == ["other new", "new_value"]
|
||||
@@ -0,0 +1,648 @@
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from caching.ListIfNeededCache import ListIfNeededCache
|
||||
from common.global_symbols import NotFound, Removed
|
||||
from tests.caching import FakeSdp
|
||||
|
||||
|
||||
class TestListIfNeededCache(BaseTest):
|
||||
def test_i_can_put_and_retrieve_value_from_list_if_needed_cache(self):
|
||||
cache = ListIfNeededCache()
|
||||
|
||||
cache.put("key", "value")
|
||||
assert cache.get("key") == "value"
|
||||
|
||||
# second time with the same key creates a list
|
||||
cache.put("key", "value2")
|
||||
assert cache.get("key") == ["value", "value2"]
|
||||
assert len(cache) == 2
|
||||
|
||||
# third time, we now have a list
|
||||
cache.put("key", "value3")
|
||||
assert cache.get("key") == ["value", "value2", "value3"]
|
||||
assert len(cache) == 3
|
||||
|
||||
# other keys are not affected
|
||||
cache.put("key2", "value")
|
||||
assert cache.get("key2") == "value"
|
||||
assert len(cache) == 4
|
||||
|
||||
# duplicates are allowed
|
||||
cache.put("key", "value")
|
||||
assert cache.get("key") == ["value", "value2", "value3", "value"]
|
||||
assert len(cache) == 5
|
||||
|
||||
def test_i_can_put_in_list_if_need_cache_when_alt_sdp_returns_values(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "value1"))
|
||||
assert cache.get("key") == ["value1", "value2"]
|
||||
|
||||
cache.put("key2", "value3", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: ["value1", "value2"]))
|
||||
assert cache.get("key2") == ["value1", "value2", "value3"]
|
||||
|
||||
cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed))
|
||||
assert cache.get("key3") == "value1"
|
||||
|
||||
def test_i_can_put_in_list_if_need__cache_when_alt_sdp_returns_values_and_cache_is_cleared(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
cache.clear()
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "value1"))
|
||||
assert cache.get("key") == "value2"
|
||||
|
||||
cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed))
|
||||
assert cache.get("key3") == "value1"
|
||||
|
||||
def test_current_cache_take_precedence_over_alt_sdp_when_i_put_data_in_list_if_needed_cache(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value1")
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx"))
|
||||
assert cache.get("key") == ["value1", "value2"]
|
||||
|
||||
def test_current_sdp_take_precedence_over_alt_sdp_when_i_put_data_in_list_if_needed_cache(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: "value1")).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx"))
|
||||
assert cache.get("key") == ["value1", "value2"]
|
||||
|
||||
def test_i_can_update_from_list_if_needed_cache(self):
|
||||
cache = ListIfNeededCache()
|
||||
|
||||
cache.put("key", "value")
|
||||
cache.put("key", "value2")
|
||||
cache.put("key", "value")
|
||||
|
||||
# only the first 'value' is affected
|
||||
cache.update("key", "value", "key", "another value")
|
||||
assert len(cache._cache) == 1
|
||||
assert len(cache) == 3
|
||||
assert cache.get("key") == ["another value", "value2", "value"]
|
||||
|
||||
# change the key
|
||||
cache.update("key", "value2", "key2", "value2")
|
||||
assert len(cache._cache) == 2
|
||||
assert len(cache) == 3
|
||||
assert cache.get("key") == ["another value", "value"]
|
||||
assert cache.get("key2") == "value2"
|
||||
|
||||
# rename the newly created key
|
||||
cache.update("key2", "value2", "key3", "value2")
|
||||
assert len(cache._cache) == 2
|
||||
assert len(cache) == 3
|
||||
assert cache.get("key") == ["another value", "value"]
|
||||
assert cache.get("key3") == "value2"
|
||||
assert cache.get("key2") is NotFound
|
||||
|
||||
# from list to single item and vice versa
|
||||
cache.update("key", "value", "key3", "value")
|
||||
assert len(cache._cache) == 2
|
||||
assert len(cache) == 3
|
||||
assert cache.get("key") == "another value" # 'key' is no longer a list
|
||||
assert cache.get("key3") == ["value2", "value"] # 'key3' is now a list
|
||||
assert cache.get("key2") is NotFound
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("wrong key", "value", "key", "value")
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value")
|
||||
cache.update("key", "value", "key", "new_value", FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
|
||||
assert cache.get("key") == "new_value"
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same_but_nothing_in_cache(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
# one value in alt_sdp
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: True,
|
||||
get_alt_value=lambda cache_name, key: "old_value")
|
||||
cache.update("key", "value", "key", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key") == "new_value"
|
||||
|
||||
# multiple values in alt_sdp
|
||||
cache.test_only_reset()
|
||||
previous_value = ["old_1", "old_2", "value"]
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: True,
|
||||
get_alt_value=lambda cache_name, key: previous_value)
|
||||
cache.update("key", "value", "key", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key") == ["old_1", "old_2", "new_value"]
|
||||
assert previous_value == ["old_1", "old_2", "value"]
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_different(self):
|
||||
# keys are different
|
||||
# make sure that current cache take precedence over alt_sdp
|
||||
# In this test, the values from alt_sdp are never used
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: "xxx1" if key == "key1" else NotFound)
|
||||
|
||||
# one values in 'key1'
|
||||
cache.put("key1", "old_1")
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
cache.clear()
|
||||
cache.put("key1", "old_1")
|
||||
cache.put("key1", "old_2")
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == "old_2"
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# List of values in 'key1'
|
||||
cache.clear()
|
||||
cache.put("key1", "old_1")
|
||||
cache.put("key1", "old_2")
|
||||
cache.put("key1", "old_3")
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == ["old_2", "old_3"]
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_repository_keys_are_different(self):
|
||||
# keys are different
|
||||
# make sure that current repo take precedence over alt_sdp
|
||||
remote = FakeSdp(get_value=lambda cache_name, key: "old_1" if key == "key1" else NotFound)
|
||||
cache = ListIfNeededCache(sdp=remote).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: "xxx1" if key == "key1" else NotFound)
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
remote = FakeSdp(get_value=lambda cache_name, key: ["old_1", "old_2"] if key == "key1" else NotFound)
|
||||
cache = ListIfNeededCache(sdp=remote).auto_configure("cache_name")
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == "old_2"
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# List of values in 'key1'
|
||||
remote = FakeSdp(get_value=lambda cache_name, key: ["old_1", "old_2", "old_3"] if key == "key1" else NotFound)
|
||||
cache = ListIfNeededCache(sdp=remote).auto_configure("cache_name")
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == ["old_2", "old_3"]
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_alt_sdp_keys_are_different_one_value(self):
|
||||
# keys are different
|
||||
# No value found in cache or remote repository,
|
||||
# Will use values from alt_sdp
|
||||
# The old key is the same, so it has to be marked as Removed
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
# one values in 'key1'
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: "old_1" if key == "key1" else NotFound)
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
cache.test_only_reset()
|
||||
old_values = ["old_1", "old_2"]
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: old_values if key == "key1" else NotFound)
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == "old_2"
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
assert old_values == ["old_1", "old_2"] # not modified
|
||||
|
||||
# List of values in 'key1'
|
||||
cache.test_only_reset()
|
||||
old_values = ["old_1", "old_2", "old_3"]
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: old_values if key == "key1" else NotFound)
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == ["old_2", "old_3"]
|
||||
assert cache.get("key2") == "new_value"
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
assert old_values == ["old_1", "old_2", "old_3"] # not modified
|
||||
|
||||
def test_i_can_update_when_alt_sdp_cache_take_precedence_for_destination_key(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It take precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: "xxx2" if key == "key2" else NotFound)
|
||||
cache.put("key1", "source_value")
|
||||
cache.put("key2", "old_value")
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == ['old_value', 'new_value']
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
|
||||
def test_i_can_update_when_alt_sdp_repository_take_precedence_for_destination_key(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It take precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
remote_repo = FakeSdp(get_value=lambda cache_name, key: "old_value" if key == "key2" else NotFound)
|
||||
cache = ListIfNeededCache(sdp=remote_repo).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: "xxx2" if key == "key2" else NotFound)
|
||||
cache.put("key1", "source_value")
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == ['old_value', 'new_value']
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
|
||||
def test_i_can_update_when_alt_sdp_use_alt_sdp_when_no_destination_value_found(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It take precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
# one value in 'key2'
|
||||
cache.put("key1", "source_value")
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: "old_value" if key == "key2" else NotFound)
|
||||
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == ['old_value', 'new_value']
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
|
||||
# Multiple values in 'key2'
|
||||
cache.test_only_reset()
|
||||
cache.put("key1", "source_value")
|
||||
previous_values = ["old_1", "old_2"]
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: previous_values if key == "key2" else NotFound)
|
||||
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == ["old_1", "old_2", 'new_value']
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
assert previous_values == ["old_1", "old_2"] # not modified
|
||||
|
||||
def test_i_can_update_when_alt_sdp_and_cache_is_cleared(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value1")
|
||||
cache.clear()
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("key", "value1", "key", "value2", alt_sdp=alt_sdp)
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("key", "value1", "key2", "value2", alt_sdp=alt_sdp)
|
||||
|
||||
def test_default_is_called_before_updating_list_if_needed_cache(self):
|
||||
cache = ListIfNeededCache(default=lambda k: NotFound)
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
|
||||
cache = ListIfNeededCache(default=lambda k: "old_value")
|
||||
cache.update("old_key", "old_value", "old_key", "new_value")
|
||||
assert cache.get("old_key") == "new_value"
|
||||
|
||||
cache = ListIfNeededCache(default=lambda k: ["old_value", "other old value"])
|
||||
cache.update("old_key", "old_value", "old_key", "new_value")
|
||||
assert cache.get("old_key") == ["new_value", "other old value"]
|
||||
|
||||
cache = ListIfNeededCache(default=lambda k: ["old_value", "other old value"] if k == "old_key" else NotFound)
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
assert cache.get("old_key") == "other old value"
|
||||
assert cache.get("new_key") == "new_value"
|
||||
|
||||
def test_i_can_delete_key_and_values(self):
|
||||
cache = ListIfNeededCache()
|
||||
cache.put("key", "value1")
|
||||
cache.put("key", "value11")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("key2", "value22")
|
||||
cache.put("key2", "value222")
|
||||
cache.put("key3", "value3")
|
||||
cache.put("key3", "value33")
|
||||
cache.put("key4", "value4")
|
||||
cache.reset_events()
|
||||
|
||||
assert len(cache) == 8
|
||||
|
||||
# I can remove a whole key
|
||||
cache.delete("key")
|
||||
assert cache.get("key") is NotFound
|
||||
assert len(cache) == 6
|
||||
assert cache.to_remove == {"key"}
|
||||
assert cache.to_add == set()
|
||||
|
||||
# I can remove an element while a list is remaining
|
||||
cache.reset_events()
|
||||
cache.delete("key2", "value22")
|
||||
assert cache.get("key2") == ["value2", "value222"]
|
||||
assert len(cache) == 5
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# I can remove an element while a single element is remaining
|
||||
cache.reset_events()
|
||||
cache.delete("key3", "value33")
|
||||
assert cache.get("key3") == "value3"
|
||||
assert len(cache) == 4
|
||||
assert cache.to_add == {"key3"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# I can remove an element while nothing remains
|
||||
cache.reset_events()
|
||||
cache.delete("key4", "value4")
|
||||
assert cache.get("key4") is NotFound
|
||||
assert len(cache) == 3
|
||||
assert cache.to_remove == {"key4"}
|
||||
assert cache.to_add == set()
|
||||
|
||||
# I do not remove when the value is not the same
|
||||
cache.reset_events()
|
||||
cache.delete("key3", "value33") # value33 was already remove
|
||||
assert cache.get("key3") == "value3"
|
||||
assert len(cache) == 3
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_key_from_cache(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_cache(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_cache_and_then_put_back(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp) # remove all values
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.copy() == {"key": "value"}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_cache_remaining_one_value(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# But this, there are remaining values in current cache after deletion
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
cache.put("key", "value2")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": "value2"}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_cache_remaining_values(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# But this, there are remaining values in current cache after deletion
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
cache.put("key", "value2")
|
||||
cache.put("key", "value3")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": ['value2', 'value3']}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_key_from_remote_repository(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: ["value1", "value2"])).auto_configure(
|
||||
"cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value=None, alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: "value")).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_key_from_remote_repository_and_then_put_back(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: ["value1", "value2"])).auto_configure(
|
||||
"cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value=None, alt_sdp=alt_sdp) # remove all values
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.copy() == {"key": "value"}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository_remaining_one_value(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# But this, there are remaining values in current cache after deletion
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: ["value1", "value2"])).auto_configure(
|
||||
"cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": "value2"}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_key_from_alt_sdp(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1, value2"],
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value1",
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_and_then_put_back(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value1",
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.copy() == {"key": "value"}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_one_value_remaining(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, one value remains in the cache
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1", "value2"],
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": "value2"}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_multiple_values_remaining(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, one value remains in the cache
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1", "value2", "value3"],
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": ["value2", "value3"]}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_an_already_removed_value_from_alt_sdp(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# But the alternate sdp returns Removed, which means that previous value was deleted
|
||||
# It's like there is nothing to delete
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: Removed,
|
||||
extend_exists=lambda cache_name, key: False)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_deleting_an_entry_that_does_not_exist_is_not_an_error(self):
|
||||
cache = ListIfNeededCache()
|
||||
cache.put("key", "value1")
|
||||
|
||||
cache.reset_events()
|
||||
cache.delete("key3")
|
||||
assert len(cache) == 1
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.delete("key3", "value")
|
||||
assert len(cache) == 1
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.delete("key", "value2")
|
||||
assert len(cache) == 1
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_and_cache_is_cleared(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value",
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.clear()
|
||||
cache.delete("key", value=None, alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_add_when_alt_sdp_from_a_removed_remote_repository(self):
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: Removed)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.copy() == {"key": "value"}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_add_when_alt_sdp_from_a_removed_remote_repository_from_alt_sdp(self):
|
||||
# The key is removed in the sub layers
|
||||
# We can put it back
|
||||
cache = ListIfNeededCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: Removed,
|
||||
extend_exists=lambda cache_name, key: False)
|
||||
|
||||
cache.put("key", "value", alt_sdp=alt_sdp)
|
||||
|
||||
assert cache.copy() == {"key": "value"}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
@@ -0,0 +1,540 @@
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from caching.SetCache import SetCache
|
||||
from common.global_symbols import NotFound, Removed
|
||||
from tests.caching import FakeSdp
|
||||
|
||||
|
||||
class TestSetCache(BaseTest):
|
||||
|
||||
def test_i_can_put_and_retrieve_values_from_set_cache(self):
|
||||
cache = SetCache()
|
||||
|
||||
cache.put("key", "value")
|
||||
assert cache.get("key") == {"value"}
|
||||
assert len(cache) == 1
|
||||
|
||||
# we can add to this set
|
||||
cache.put("key", "value2")
|
||||
assert cache.get("key") == {"value", "value2"}
|
||||
assert len(cache) == 2
|
||||
|
||||
# other keys are not affected
|
||||
cache.put("key2", "value")
|
||||
assert cache.get("key2") == {"value"}
|
||||
assert len(cache) == 3
|
||||
|
||||
# duplicates are removed
|
||||
cache.put("key", "value")
|
||||
assert cache.get("key") == {"value", "value2"}
|
||||
assert len(cache) == 3
|
||||
|
||||
assert cache.copy() == {'key': {'value', 'value2'}, 'key2': {'value'}}
|
||||
|
||||
def test_i_can_put_in_set_cache_when_alt_sdp_returns_values(self):
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: {"value1"}))
|
||||
assert cache.get("key") == {"value1", "value2"}
|
||||
|
||||
cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed))
|
||||
assert cache.get("key3") == {"value1"}
|
||||
|
||||
def test_i_can_put_in_set_cache_when_alt_sdp_returns_values_and_cache_is_cleared(self):
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
cache.clear()
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: {"value1"}))
|
||||
assert cache.get("key") == {"value2"}
|
||||
|
||||
cache.put("key3", "value1", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: Removed))
|
||||
assert cache.get("key3") == {"value1"}
|
||||
|
||||
def test_current_cache_take_precedence_over_alt_sdp_when_i_put_data_in_set_cache(self):
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value1")
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx"))
|
||||
assert cache.get("key") == {"value1", "value2"}
|
||||
|
||||
def test_current_sdp_take_precedence_over_alt_sdp_when_i_put_data_in_set_cache(self):
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: {"value1"})).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value2", alt_sdp=FakeSdp(get_alt_value=lambda cache_name, key: "xxx"))
|
||||
assert cache.get("key") == {"value1", "value2"}
|
||||
|
||||
def test_i_can_update_from_set_cache(self):
|
||||
cache = SetCache()
|
||||
|
||||
cache.put("key", "value")
|
||||
cache.put("key", "value2")
|
||||
cache.update("key", "value", "key", "another value")
|
||||
|
||||
assert len(cache._cache) == 1
|
||||
assert len(cache) == 2
|
||||
assert cache.get("key") == {"another value", "value2"}
|
||||
|
||||
cache.update("key", "value2", "key2", "value2")
|
||||
assert len(cache._cache) == 2
|
||||
assert len(cache) == 2
|
||||
assert cache.get("key") == {"another value"}
|
||||
assert cache.get("key2") == {"value2"}
|
||||
|
||||
cache.update("key", "another value", "key3", "another value")
|
||||
assert len(cache._cache) == 2
|
||||
assert len(cache) == 2
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache.get("key2") == {"value2"}
|
||||
assert cache.get("key3") == {"another value"}
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("wrong key", "value", "key", "value")
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same(self):
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key", "value")
|
||||
cache.update("key", "value", "key", "new_value", alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
|
||||
assert cache.get("key") == {"new_value"}
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_the_same_but_nothing_in_cache(self):
|
||||
# There is nothing in cache or remote repository.
|
||||
# We must ust the value from alt_sdp
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
previous_value = {"old_1", "old_2", "value"}
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: True,
|
||||
get_alt_value=lambda cache_name, key: previous_value)
|
||||
cache.update("key", "value", "key", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key") == {"old_1", "old_2", "new_value"}
|
||||
assert previous_value == {"old_1", "old_2", "value"}
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_cache_keys_are_different(self):
|
||||
# keys are different
|
||||
# make sure that current cache take precedence over alt_sdp
|
||||
# In this test, the values from alt_sdp are never used
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: {"xxx1"} if key == "key1" else NotFound)
|
||||
|
||||
# one values in 'key1'
|
||||
cache.put("key1", "old_1")
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == {"new_value"}
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
cache.clear()
|
||||
cache.put("key1", "old_1")
|
||||
cache.put("key1", "old_2")
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == {"old_2"}
|
||||
assert cache.get("key2") == {"new_value"}
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_repository_keys_are_different(self):
|
||||
# keys are different
|
||||
# make sure that current repo take precedence over alt_sdp
|
||||
remote = FakeSdp(get_value=lambda cache_name, key: {"old_1"} if key == "key1" else NotFound)
|
||||
cache = SetCache(sdp=remote).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: {"xxx1"} if key == "key1" else NotFound)
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == {"new_value"}
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
remote = FakeSdp(get_value=lambda cache_name, key: {"old_1", "old_2"} if key == "key1" else NotFound)
|
||||
cache = SetCache(sdp=remote).auto_configure("cache_name")
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == {"old_2"}
|
||||
assert cache.get("key2") == {"new_value"}
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_update_when_alt_sdp_from_alt_sdp_keys_are_different_one_value(self):
|
||||
# keys are different
|
||||
# No value found in cache or remote repository,
|
||||
# Will use values from alt_sdp
|
||||
# The old key is the same, so it has to be marked as Removed
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
# one values in 'key1'
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: {"old_1"} if key == "key1" else NotFound)
|
||||
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == Removed
|
||||
assert cache.get("key2") == {"new_value"}
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
# Multiple values in 'key1'
|
||||
cache.test_only_reset()
|
||||
old_values = {"old_1", "old_2"}
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key1",
|
||||
get_alt_value=lambda cache_name, key: old_values if key == "key1" else NotFound)
|
||||
cache.update("key1", "old_1", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == {"old_2"}
|
||||
assert cache.get("key2") == {"new_value"}
|
||||
assert cache.to_add == {"key2", "key1"}
|
||||
assert cache.to_remove == set()
|
||||
assert old_values == {"old_1", "old_2"} # not modified
|
||||
|
||||
def test_i_can_update_when_alt_sdp_cache_take_precedence_for_destination_key(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It take precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: {"xxx2"} if key == "key2" else NotFound)
|
||||
cache.put("key1", "source_value")
|
||||
cache.put("key2", "old_value")
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == {'old_value', 'new_value'}
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
|
||||
def test_i_can_update_when_alt_sdp_repository_take_precedence_for_destination_key(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It take precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
remote_repo = FakeSdp(get_value=lambda cache_name, key: {"old_value"} if key == "key2" else NotFound)
|
||||
cache = SetCache(sdp=remote_repo).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: "xxx2" if key == "key2" else NotFound)
|
||||
cache.put("key1", "source_value")
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == {'old_value', 'new_value'}
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
|
||||
def test_i_can_update_when_alt_sdp_use_alt_sdp_when_no_destination_value_found(self):
|
||||
# If a value exists in destination key, either in local cache or remote repository
|
||||
# It take precedence
|
||||
# If no value is found, we must use the value from alt_sdp
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
cache.put("key1", "source_value")
|
||||
previous_values = {"old_1", "old_2"}
|
||||
alt_sdp = FakeSdp(extend_exists=lambda cache_name, key: key == "key2",
|
||||
get_alt_value=lambda cache_name, key: previous_values if key == "key2" else NotFound)
|
||||
|
||||
cache.update("key1", "source_value", "key2", "new_value", alt_sdp=alt_sdp)
|
||||
assert cache.get("key1") == NotFound
|
||||
assert cache.get("key2") == {"old_1", "old_2", 'new_value'}
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key1"}
|
||||
assert previous_values == {"old_1", "old_2"} # not modified
|
||||
|
||||
def test_i_can_update_when_alt_sdp_and_cache_is_cleared(self):
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: ["value1"])
|
||||
cache.clear()
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("key", "value1", "key", "value2", alt_sdp=alt_sdp)
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("key", "value1", "key2", "value2", alt_sdp=alt_sdp)
|
||||
|
||||
def test_default_is_called_before_updating_set_cache(self):
|
||||
cache = SetCache(default=lambda k: NotFound)
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
|
||||
cache = SetCache(default=lambda k: {"old_value", "other old value"})
|
||||
cache.update("old_key", "old_value", "old_key", "new_value")
|
||||
assert cache.get("old_key") == {"new_value", "other old value"}
|
||||
|
||||
cache = SetCache(default=lambda k: {"old_value", "other old value"} if k == "old_key" else NotFound)
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
assert cache.get("old_key") == {"other old value"}
|
||||
assert cache.get("new_key") == {"new_value"}
|
||||
|
||||
cache = SetCache(default=lambda k: {"old_value", "other old value"} if k == "old_key" else {"other new"})
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
assert cache.get("old_key") == {"other old value"}
|
||||
assert cache.get("new_key") == {"other new", "new_value"}
|
||||
|
||||
def test_i_can_delete_values_from_set_cache(self):
|
||||
cache = SetCache()
|
||||
cache.put("key", "value1")
|
||||
cache.put("key", "value2")
|
||||
cache.reset_events()
|
||||
|
||||
cache.delete("key", "fake_value")
|
||||
assert cache.get("key") == {"value1", "value2"}
|
||||
assert len(cache) == 2
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.delete("key", "value1")
|
||||
assert cache.get("key") == {"value2"}
|
||||
assert cache.to_add == {"key"}
|
||||
assert len(cache) == 1
|
||||
|
||||
cache.delete("key", "value2")
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache.to_remove == {"key"}
|
||||
assert len(cache) == 0
|
||||
|
||||
def test_i_can_delete_key_from_set_cache(self):
|
||||
cache = SetCache()
|
||||
cache.put("key", "value1")
|
||||
cache.put("key", "value2")
|
||||
|
||||
cache.delete("key")
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache.to_remove == {"key"}
|
||||
assert len(cache) == 0
|
||||
|
||||
def test_i_can_delete_a_key_that_does_not_exists(self):
|
||||
cache = SetCache()
|
||||
cache.delete("key")
|
||||
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_key_from_cache(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_cache(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_cache_and_then_put_back(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "xxx", extend_exists=lambda cache_name, key: True)
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp)
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.copy() == {"key": {"value"}}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_cache_remaining_values(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# But there is a value in the current cache after deletion
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
cache.put("key", "value1")
|
||||
cache.put("key", "value2")
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": {"value2"}}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_key_from_remote_repository(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: {"value1", "value2"})).auto_configure("cache_name")
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: {"value"})).auto_configure("cache_name")
|
||||
|
||||
cache.delete("key", value="value", alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository_and_then_put_back(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: {"value1", "value2"})).auto_configure("cache_name")
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.copy() == {"key": {"value"}}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_remote_repository_remaining_values(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# But there is a value in the current cache after deletion
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: {"value1", "value2"})).auto_configure("cache_name")
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": {"value2"}}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_key_from_alt_sdp(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1, value2"},
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1"},
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_and_then_put_back(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1"},
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.copy() == {"key": {"value"}}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_from_alt_sdp_one_value_remaining(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1", "value2"},
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {"key": {"value2"}}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_key_that_does_not_exist_from_alt_sdp(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=(lambda cache_name, key: {"value1", "value2"} if key == "key" else NotFound),
|
||||
extend_exists=lambda cache_name, key: key == "key")
|
||||
|
||||
cache.delete("key2", value=None, alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_a_value_that_does_not_exist_from_alt_sdp(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# After value deletion, the key is empty
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value1", "value2"},
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.delete("key", value="value4", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_an_already_removed_value_from_alt_sdp(self):
|
||||
# alt_cache_manager is used because no value in cache or in remote repository
|
||||
# But the alternate sdp returns Removed, which means that previous value was deleted
|
||||
# It's like there is nothing to delete
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: Removed,
|
||||
extend_exists=lambda cache_name, key: False)
|
||||
|
||||
cache.delete("key", value="value1", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_when_alt_sdp_and_cache_is_cleared(self):
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: {"value"},
|
||||
extend_exists=lambda cache_name, key: True)
|
||||
|
||||
cache.clear()
|
||||
cache.delete("key", value=None, alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.delete("key", value="value", alt_sdp=alt_sdp)
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_add_when_alt_sdp_from_a_removed_remote_repository(self):
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: Removed)).auto_configure("cache_name")
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.copy() == {"key": {"value"}}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
|
||||
def test_i_can_add_when_alt_sdp_from_a_removed_remote_repository_from_alt_sdp(self):
|
||||
# The key is removed in the sub layers
|
||||
# We can put it back
|
||||
cache = SetCache(sdp=FakeSdp(get_value=lambda entry, k: NotFound)).auto_configure("cache_name")
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: Removed,
|
||||
extend_exists=lambda cache_name, key: False)
|
||||
|
||||
cache.put("key", "value", alt_sdp=alt_sdp)
|
||||
|
||||
assert cache.copy() == {"key": {"value"}}
|
||||
assert cache.to_remove == set()
|
||||
assert cache.to_add == {"key"}
|
||||
@@ -0,0 +1,512 @@
|
||||
import pytest
|
||||
|
||||
from caching.BaseCache import BaseCache, MAX_INITIALIZED_KEY
|
||||
from caching.Cache import Cache
|
||||
from caching.DictionaryCache import DictionaryCache
|
||||
from caching.IncCache import IncCache
|
||||
from caching.ListCache import ListCache
|
||||
from caching.ListIfNeededCache import ListIfNeededCache
|
||||
from caching.SetCache import SetCache
|
||||
from common.global_symbols import NotFound, Removed
|
||||
from tests.caching import FakeSdp
|
||||
|
||||
|
||||
class TestCache:
|
||||
def test_i_can_configure(self):
|
||||
cache = Cache()
|
||||
cache.configure(max_size=256,
|
||||
default="default_delegate",
|
||||
extend_exists="extend_exists_delegate",
|
||||
alt_sdp_get="alt_sdp_delegate",
|
||||
sdp=FakeSdp())
|
||||
|
||||
# Caution, in this test, I initialize default, extend_exists and alt_get_delegate with string
|
||||
# to simplify the test, but in real usage, they are lambda
|
||||
# default = lambda sdp, key: sdp.get(cache_name, key) or lambda key: func(key)
|
||||
# extend_exists = lambda sdp, key: sdp.exists(cache_name, key) or lambda key: func(key)
|
||||
# alt_sdp_get = lambda sdp, key: sdp.alt_get(cache_name, key)
|
||||
|
||||
assert cache._max_size == 256
|
||||
assert cache._default == "default_delegate"
|
||||
assert cache._extend_exists == "extend_exists_delegate"
|
||||
assert cache._alt_sdp_get == "alt_sdp_delegate"
|
||||
assert cache._sdp is not None
|
||||
|
||||
def test_i_can_auto_configure(self):
|
||||
sdp = FakeSdp(get_value=lambda cache_name, key: key + 1 if cache_name == "cache_name" else NotFound,
|
||||
extend_exists=lambda cache_name, key: True if cache_name == "cache_name" else False,
|
||||
get_alt_value=lambda cache_name, key: key + 2 if cache_name == "cache_name" else NotFound)
|
||||
|
||||
cache = Cache(sdp=sdp).auto_configure("cache_name")
|
||||
assert cache._default(cache._sdp, 10) == 11
|
||||
assert cache._extend_exists(cache._sdp, 10) is True
|
||||
assert cache._alt_sdp_get(cache._sdp, 10) == 12
|
||||
|
||||
cache = Cache(sdp=sdp).auto_configure("another_cache")
|
||||
assert cache._default(cache._sdp, 10) == NotFound
|
||||
assert cache._extend_exists(cache._sdp, 10) is False
|
||||
assert cache._alt_sdp_get(cache._sdp, 10) == NotFound
|
||||
|
||||
def test_i_can_get_an_retrieve_value_from_cache(self):
|
||||
cache = Cache()
|
||||
cache.put("key", "value")
|
||||
assert cache.get("key") == "value"
|
||||
assert len(cache) == 1
|
||||
|
||||
cache.put("key", "another value") # another value in the cache replace the previous one
|
||||
assert cache.get("key") == "another value"
|
||||
assert len(cache) == 1
|
||||
|
||||
cache.put("key2", "value2") # another key
|
||||
assert cache.get("key2") == "value2"
|
||||
assert len(cache) == 2
|
||||
assert cache.copy() == {"key": "another value", "key2": "value2"}
|
||||
|
||||
def test_i_do_not_evict_when_put(self):
|
||||
"""
|
||||
It's because we evict on get()
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
maxsize = 5
|
||||
cache = Cache(max_size=5)
|
||||
|
||||
for key in range(maxsize + 2):
|
||||
cache.put(str(key), key)
|
||||
|
||||
assert len(cache) == maxsize + 2
|
||||
assert cache.copy() == {
|
||||
"0": 0,
|
||||
"1": 1,
|
||||
"2": 2,
|
||||
"3": 3,
|
||||
"4": 4,
|
||||
"5": 5,
|
||||
"6": 6,
|
||||
}
|
||||
|
||||
def test_i_can_evict_when_get(self):
|
||||
maxsize = 5
|
||||
cache = Cache(max_size=5, default=lambda k: int(k))
|
||||
|
||||
for key in range(maxsize + 2):
|
||||
cache.get(str(key))
|
||||
|
||||
assert len(cache) == maxsize
|
||||
assert cache.copy() == {
|
||||
"2": 2,
|
||||
"3": 3,
|
||||
"4": 4,
|
||||
"5": 5,
|
||||
"6": 6,
|
||||
}
|
||||
|
||||
def test_i_do_not_evict_when_items_are_not_committed(self):
|
||||
maxsize = 5
|
||||
cache = Cache(max_size=5, default=lambda k: k)
|
||||
|
||||
for key in range(maxsize + 2):
|
||||
cache.put(str(key), key)
|
||||
|
||||
assert len(cache) == maxsize + 2
|
||||
|
||||
cache.get("-1")
|
||||
assert len(cache) == maxsize + 2
|
||||
assert cache.copy() == {
|
||||
"0": 0,
|
||||
"1": 1,
|
||||
"2": 2,
|
||||
"3": 3,
|
||||
"4": 4,
|
||||
"5": 5,
|
||||
"6": 6,
|
||||
}
|
||||
|
||||
def test_i_can_get_a_value_from_alt_sdp(self):
|
||||
cache = Cache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value found !")
|
||||
assert cache.get("key", alt_sdp=alt_sdp) == "value found !"
|
||||
|
||||
# The value is now in cache
|
||||
assert cache.copy() == {'key': 'value found !'}
|
||||
|
||||
def test_i_cannot_get_a_value_from_alt_sdp_when_cache_is_cleared(self):
|
||||
cache = Cache(sdp=FakeSdp(get_value=lambda cache_name, key: NotFound)).auto_configure("cache_name")
|
||||
cache.clear()
|
||||
|
||||
alt_sdp = FakeSdp(get_alt_value=lambda cache_name, key: "value found !")
|
||||
assert cache.get("key", alt_sdp=alt_sdp) is NotFound
|
||||
assert cache.copy() == {}
|
||||
|
||||
def test_i_can_get_default_value_from_simple_cache(self):
|
||||
cache = Cache()
|
||||
assert cache.get("key") is NotFound
|
||||
|
||||
cache = Cache(default=10)
|
||||
assert cache.get("key") == 10
|
||||
assert "key" not in cache # default value are not put in cache
|
||||
|
||||
cache = Cache(default=lambda key: key + "_not_found")
|
||||
assert cache.get("key") == "key_not_found"
|
||||
assert "key" in cache # default callable are put in cache
|
||||
|
||||
cache = Cache(default=lambda sdp, key: sdp.get("cache_name", key),
|
||||
sdp=FakeSdp(get_value=lambda entry, key: key + "_not_found"))
|
||||
assert cache.get("key") == "key_not_found"
|
||||
assert "key" in cache # default callable are put in cache
|
||||
|
||||
def test_i_do_not_ask_the_remote_repository_twice(self):
|
||||
nb_request = []
|
||||
|
||||
cache = Cache(default=lambda key: nb_request.append("requested"))
|
||||
assert cache.get("key") is None
|
||||
assert cache.get("key") is None
|
||||
assert len(nb_request) == 1
|
||||
|
||||
def test_i_can_update_from_simple_cache(self):
|
||||
cache = Cache()
|
||||
cache.put("key", "value")
|
||||
cache.update("key", "value", "key", "new_value")
|
||||
|
||||
assert len(cache._cache) == 1
|
||||
assert len(cache) == 1
|
||||
assert cache.get("key") == "new_value"
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.reset_events()
|
||||
cache.update("key", "new_value", "another_key", "another_value")
|
||||
assert len(cache._cache) == 1
|
||||
assert len(cache) == 1
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache.get("another_key") == "another_value"
|
||||
assert cache.to_add == {"another_key"}
|
||||
assert cache.to_remove == {"key"}
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("wrong key", "value", "key", "value")
|
||||
|
||||
def test_i_can_update_when_alt_sdp_same_keys(self):
|
||||
cache = Cache(default=lambda sdp, key: sdp.get("cache_name", key),
|
||||
extend_exists=lambda sdp, key: sdp.exists("cache_name", key),
|
||||
sdp=FakeSdp(get_value=lambda cache_name, key: NotFound))
|
||||
|
||||
cache.put("key", "value")
|
||||
cache.update("key", "value", "key", "new_value", FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
|
||||
assert cache.get("key") == "new_value"
|
||||
|
||||
def test_i_can_update_when_alt_sdp_different_keys(self):
|
||||
cache = Cache(default=lambda sdp, key: sdp.get("cache_name", key),
|
||||
extend_exists=lambda sdp, key: sdp.exists("cache_name", key),
|
||||
sdp=FakeSdp(get_value=lambda cache_name, key: NotFound))
|
||||
|
||||
cache.put("key", "value")
|
||||
cache.update("key", "value", "key2", "value2", FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
|
||||
assert cache.get("key2") == "value2"
|
||||
assert cache.get("key") == Removed
|
||||
assert cache.to_add == {"key", "key2"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
@pytest.mark.parametrize("cache", [
|
||||
Cache(), ListCache(), ListIfNeededCache(), SetCache(), IncCache()
|
||||
])
|
||||
def test_i_can_manage_cache_events(self, cache: BaseCache):
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.update("key", "value", "key", "another value")
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.update("key", "another value", "key2", "value2")
|
||||
assert cache.to_add == {"key2"}
|
||||
assert cache.to_remove == {"key"}
|
||||
|
||||
cache.update("key2", "value2", "key", "value")
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == {"key2"}
|
||||
|
||||
@pytest.mark.parametrize("cache", [
|
||||
ListCache(), SetCache(), ListIfNeededCache()
|
||||
])
|
||||
def test_i_can_manage_list_and_set_cache_events(self, cache):
|
||||
cache.put("key", "value")
|
||||
cache.put("key", "value2")
|
||||
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.update("key", "value", "key", "another value")
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.update("key", "value2", "key2", "value2")
|
||||
assert cache.to_add == {"key", "key2"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.update("key", "another value", "key3", "another value")
|
||||
assert cache.to_add == {"key2", "key3"}
|
||||
assert cache.to_remove == {"key"}
|
||||
|
||||
@pytest.mark.parametrize("cache", [
|
||||
Cache(), ListCache(), SetCache(), ListIfNeededCache(), IncCache()
|
||||
])
|
||||
def test_exists(self, cache):
|
||||
assert not cache.exists("key")
|
||||
cache.put("key", "value")
|
||||
assert cache.exists("key")
|
||||
|
||||
def test_exists_extend(self):
|
||||
cache = Cache(extend_exists=lambda k: True if k == "special_key" else False)
|
||||
assert not cache.exists("key")
|
||||
assert cache.exists("special_key")
|
||||
|
||||
def test_i_can_extend_exists_when_internal_sdp(self):
|
||||
cache = Cache(extend_exists=lambda sdp, k: True if k == "special_key" else False, sdp=FakeSdp)
|
||||
assert not cache.exists("key")
|
||||
assert cache.exists("special_key")
|
||||
|
||||
@pytest.mark.parametrize("cache, default, new_value, expected", [
|
||||
(ListCache(), lambda k: NotFound, "value", ["value"]),
|
||||
(ListCache(), lambda k: ["value"], "value", ["value", "value"]),
|
||||
(ListIfNeededCache(), lambda k: NotFound, "value", "value"),
|
||||
(ListIfNeededCache(), lambda k: "value", "value1", ["value", "value1"]),
|
||||
(ListIfNeededCache(), lambda k: ["value1", "value2"], "value1", ["value1", "value2", "value1"]),
|
||||
(SetCache(), lambda k: NotFound, "value", {"value"}),
|
||||
(SetCache(), lambda k: {"value"}, "value", {"value"}),
|
||||
(SetCache(), lambda k: {"value1"}, "value2", {"value1", "value2"}),
|
||||
])
|
||||
def test_default_is_called_before_put_to_keep_in_sync(self, cache, default, new_value, expected):
|
||||
cache.configure(default=default)
|
||||
cache.put("key", new_value)
|
||||
|
||||
assert cache.get("key") == expected
|
||||
|
||||
def test_default_is_called_before_updating_simple_cache(self):
|
||||
cache = Cache(default=lambda k: NotFound)
|
||||
with pytest.raises(KeyError):
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
|
||||
cache = Cache(default=lambda k: "old_value")
|
||||
cache.update("old_key", "old_value", "new_key", "new_value")
|
||||
assert cache.get("new_key") == "new_value"
|
||||
|
||||
def test_i_can_delete_an_entry_from_cache(self):
|
||||
cache = Cache()
|
||||
cache.put("key", "value")
|
||||
|
||||
assert cache.get("key") == "value"
|
||||
cache.delete("key")
|
||||
assert cache.get("key") is NotFound
|
||||
assert cache.to_remove == {"key"}
|
||||
|
||||
def test_i_can_delete_when_entry_is_only_in_db(self):
|
||||
cache = Cache(default=lambda k: "value" if k == 'key' else NotFound)
|
||||
|
||||
cache.delete("another_key")
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
cache.delete("key")
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == {"key"}
|
||||
|
||||
def test_i_can_delete_an_entry_from_cache_when_alt_sdp_and_value_in_cache(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = Cache(extend_exists=lambda sdp, k: sdp.exists("cache_name", k))
|
||||
cache.put("key", "value")
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_an_entry_from_cache_when_alt_sdp_when_in_remote_repository(self):
|
||||
# There is a value in alt_cache_manager,
|
||||
# No remaining value in current cache after deletion
|
||||
# The key must be flagged as Removed
|
||||
cache = Cache(default=lambda k: "value", extend_exists=lambda sdp, k: sdp.exists("cache_name", k))
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_delete_an_entry_from_cache_when_alt_sdp_and_no_value_in_cache_or_remote_repository(self):
|
||||
# alt_cache_manager is used when no value found
|
||||
cache = Cache(default=lambda sdp, k: sdp.get("cache_name", k),
|
||||
extend_exists=lambda sdp, k: sdp.exists("cache_name", k),
|
||||
sdp=FakeSdp(get_value=lambda entry, k: NotFound))
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: True))
|
||||
assert cache.copy() == {"key": Removed}
|
||||
assert cache.to_add == {"key"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_no_error_when_deleting_a_key_that_does_not_exists_when_alt_sdp(self):
|
||||
# alt_cache_manager is used when no value found
|
||||
cache = Cache(default=lambda sdp, k: sdp.get("cache_name", k),
|
||||
extend_exists=lambda sdp, k: sdp.exists("cache_name", k),
|
||||
sdp=FakeSdp(get_value=lambda entry, k: NotFound))
|
||||
|
||||
cache.delete("key", value=None, alt_sdp=FakeSdp(extend_exists=lambda cache_name, key: False))
|
||||
assert cache.copy() == {}
|
||||
assert cache.to_add == set()
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_initialized_key_is_removed_when_the_entry_is_found(self):
|
||||
caches = [Cache(), ListCache(), ListIfNeededCache(), SetCache()]
|
||||
|
||||
for cache in caches:
|
||||
cache.put("key", "value")
|
||||
cache.get("key")
|
||||
|
||||
assert len(cache._initialized_keys) == 0
|
||||
|
||||
cache = IncCache()
|
||||
cache.put("key", 10)
|
||||
cache.get("key")
|
||||
assert len(cache._initialized_keys) == 0
|
||||
|
||||
def test_initialized_keys_are_reset_when_max_length_is_reached(self):
|
||||
cache = Cache()
|
||||
for i in range(MAX_INITIALIZED_KEY):
|
||||
cache.get(str(i))
|
||||
|
||||
assert len(cache._initialized_keys) == MAX_INITIALIZED_KEY
|
||||
|
||||
cache.get(str(MAX_INITIALIZED_KEY + 1))
|
||||
assert len(cache._initialized_keys) == 1
|
||||
|
||||
def test_i_can_populate(self):
|
||||
items = [("1", "1"), ("2", "2"), ("3", "3")]
|
||||
cache = Cache()
|
||||
|
||||
cache.populate(lambda: items, lambda item: item[0])
|
||||
|
||||
assert len(cache) == 3
|
||||
assert cache.get("1") == ("1", "1")
|
||||
assert cache.get("2") == ("2", "2")
|
||||
assert cache.get("3") == ("3", "3")
|
||||
|
||||
assert cache.to_add == {"1", "2", "3"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_populate_using_internal_sdp(self):
|
||||
items = [("1", "1"), ("2", "2"), ("3", "3")]
|
||||
cache = Cache(sdp=FakeSdp(populate=items))
|
||||
|
||||
cache.populate(lambda sdp: sdp.populate(), lambda item: item[0])
|
||||
|
||||
assert len(cache) == 3
|
||||
assert cache.get("1") == ("1", "1")
|
||||
assert cache.get("2") == ("2", "2")
|
||||
assert cache.get("3") == ("3", "3")
|
||||
|
||||
assert cache.to_add == {"1", "2", "3"}
|
||||
assert cache.to_remove == set()
|
||||
|
||||
def test_i_can_reset_the_event_after_populate(self):
|
||||
items = [("1", "1"), ("2", "2"), ("3", "3")]
|
||||
cache = Cache()
|
||||
cache.to_add = {"some_value"}
|
||||
cache.to_remove = {"some_other_value"}
|
||||
|
||||
cache.populate(lambda: items, lambda item: item[0], reset_events=True)
|
||||
|
||||
assert len(cache) == 3
|
||||
assert cache.copy() == {"1": ("1", "1"),
|
||||
"2": ("2", "2"),
|
||||
"3": ("3", "3")}
|
||||
assert cache.to_add == {"some_value"}
|
||||
assert cache.to_remove == {"some_other_value"}
|
||||
|
||||
def test_i_can_get_all(self):
|
||||
items = [("1", "1"), ("2", "2"), ("3", "3")]
|
||||
cache = Cache()
|
||||
|
||||
cache.populate(lambda: items, lambda item: item[0])
|
||||
|
||||
res = cache.get_all()
|
||||
assert len(res) == 3
|
||||
assert list(res) == [('1', '1'), ('2', '2'), ('3', '3')]
|
||||
|
||||
def test_i_can_clone_cache(self):
|
||||
cache = Cache(max_size=256,
|
||||
default=lambda sdp, key: sdp.get("cache_name", key),
|
||||
extend_exists=False,
|
||||
alt_sdp_get=lambda sdp, key: sdp.alt_get("cache_name", key),
|
||||
sdp=FakeSdp(get_value=lambda entry, key: key + "_not_found"))
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
|
||||
clone = cache.clone()
|
||||
assert type(cache) == type(clone)
|
||||
assert clone._max_size == cache._max_size
|
||||
assert clone._default == cache._default
|
||||
assert clone._extend_exists == cache._extend_exists
|
||||
assert clone._alt_sdp_get == cache._alt_sdp_get
|
||||
assert clone._sdp == cache._sdp
|
||||
assert clone._cache == {} # value are not copied
|
||||
assert clone._initialized_keys == set()
|
||||
assert clone._current_size == 0
|
||||
assert clone.to_add == set()
|
||||
assert clone.to_remove == set()
|
||||
|
||||
clone.configure(sdp=FakeSdp(lambda entry, key: key + " found !"))
|
||||
|
||||
assert cache.get("key3") == "key3_not_found"
|
||||
assert clone.get("key3") == "key3 found !"
|
||||
|
||||
@pytest.mark.parametrize("cache", [
|
||||
Cache(),
|
||||
DictionaryCache(),
|
||||
IncCache(),
|
||||
ListCache(),
|
||||
ListIfNeededCache()
|
||||
])
|
||||
def test_i_can_clone_all_caches(self, cache):
|
||||
clone = cache.clone()
|
||||
assert type(clone) == type(cache)
|
||||
|
||||
def test_sanity_check_on_list_if_needed_cache(self):
|
||||
cache = ListIfNeededCache()
|
||||
clone = cache.clone()
|
||||
|
||||
clone.put("key", "value1")
|
||||
clone.put("key", "value2")
|
||||
|
||||
assert clone.get("key") == ["value1", "value2"]
|
||||
|
||||
def test_i_can_clear_when_alt_sdp(self):
|
||||
cache = Cache().auto_configure("cache_name")
|
||||
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
|
||||
cache.clear()
|
||||
|
||||
assert cache.copy() == {}
|
||||
assert cache._is_cleared
|
||||
|
||||
def test_i_can_iter_on_the_content(self):
|
||||
cache = Cache()
|
||||
cache.put("key1", "value1")
|
||||
cache.put("key2", "value2")
|
||||
cache.put("key3", "value3")
|
||||
|
||||
res = []
|
||||
for k in cache:
|
||||
assert k in cache
|
||||
res.append(k)
|
||||
|
||||
assert res == ["key1", "key2", "key3"]
|
||||
@@ -0,0 +1,122 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pytest
|
||||
|
||||
from common.utils import decode_enum, get_class, to_dict, str_concept, unstr_concept
|
||||
from helpers import get_concept
|
||||
from parsers.tokenizer import Keywords, Token, TokenKind
|
||||
|
||||
|
||||
@dataclass
|
||||
class Obj:
|
||||
prop1: str
|
||||
prop2: str
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.prop1, self.prop1))
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Obj):
|
||||
return False
|
||||
|
||||
return self.prop1 == other.prop1 and self.prop2 == other.prop2
|
||||
|
||||
|
||||
@dataclass
|
||||
class Obj2:
|
||||
prop1: object
|
||||
prop2: object
|
||||
|
||||
|
||||
def get_tokens(lst):
|
||||
res = []
|
||||
for e in lst:
|
||||
if e == " ":
|
||||
res.append(Token(TokenKind.WHITESPACE, " ", 0, 0, 0))
|
||||
elif e == "\n":
|
||||
res.append(Token(TokenKind.NEWLINE, "\n", 0, 0, 0))
|
||||
elif e == "<EOF>":
|
||||
res.append(Token(TokenKind.EOF, "\n", 0, 0, 0))
|
||||
else:
|
||||
res.append(Token(TokenKind.IDENTIFIER, e, 0, 0, 0))
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def test_i_can_get_class():
|
||||
# example of classes that should be in the result
|
||||
create_parser_input = get_class("evaluators.CreateParserInput.CreateParserInput")
|
||||
|
||||
assert isinstance(create_parser_input, type)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected_key, expected_id", [
|
||||
(None, None, None),
|
||||
(10, None, None),
|
||||
("", None, None),
|
||||
("xxx", None, None),
|
||||
("c:", None, None),
|
||||
("c:key", None, None),
|
||||
("c:key:", "key", None),
|
||||
("c:key#id", None, None),
|
||||
("c:key#id:", "key", "id"),
|
||||
("c:#id:", None, "id"),
|
||||
("c:key#:", "key", None),
|
||||
("c:key#id:x", None, None),
|
||||
("c:one: plus c:two:", None, None),
|
||||
("c:one#id: plus c:two:", None, None),
|
||||
])
|
||||
def test_i_can_unstr_concept(text, expected_key, expected_id):
|
||||
k, i = unstr_concept(text)
|
||||
assert k == expected_key
|
||||
assert i == expected_id
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected_key, expected_id", [
|
||||
("r:key:", "key", None),
|
||||
("r:key#id:", "key", "id"),
|
||||
])
|
||||
def test_i_can_unstr_concept_rules(text, expected_key, expected_id):
|
||||
k, i = unstr_concept(text, prefix="r:")
|
||||
assert k == expected_key
|
||||
assert i == expected_id
|
||||
|
||||
|
||||
def test_i_can_str_concept():
|
||||
assert str_concept(("key", "id")) == "c:key#id:"
|
||||
assert str_concept((None, "id")) == "c:#id:"
|
||||
assert str_concept(("key", None)) == "c:key:"
|
||||
assert str_concept((None, None)) == ""
|
||||
assert str_concept(("key", "id"), drop_name=True) == "c:#id:"
|
||||
|
||||
concept = get_concept("foo")
|
||||
assert str_concept(concept) == "c:foo:"
|
||||
|
||||
concept.get_metadata().id = "1001"
|
||||
assert str_concept(concept) == "c:foo#1001:"
|
||||
assert str_concept(concept, drop_name=True) == "c:#1001:"
|
||||
|
||||
assert str_concept(("key", "id"), prefix='r:') == "r:key#id:"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
(None, None),
|
||||
(10, None),
|
||||
("", None),
|
||||
("xxx", None),
|
||||
("xxx.", None),
|
||||
("xxx.yyy", None),
|
||||
("parsers.tokenizer.Keywords.CONCEPT", Keywords.CONCEPT),
|
||||
])
|
||||
def test_i_can_decode_enum(text, expected):
|
||||
actual = decode_enum(text)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("items, expected", [
|
||||
([], {}),
|
||||
([Obj("a", "1"), Obj("a", "2"), Obj("b", "3")], {"a": [Obj("a", "1"), Obj("a", "2")],
|
||||
"b": [Obj("b", "3")]}),
|
||||
])
|
||||
def test_i_can_to_dict(items, expected):
|
||||
assert to_dict(items, lambda obj: obj.prop1) == expected
|
||||
@@ -0,0 +1,84 @@
|
||||
import pytest
|
||||
|
||||
from helpers import GetNextId
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def sheerka():
|
||||
from core.Sheerka import Sheerka
|
||||
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize("mem://")
|
||||
return sheerka
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def on_new_module(sheerka, request):
|
||||
"""
|
||||
For each new module, make sure to create a new ontology
|
||||
Remove it at the end of the module
|
||||
:param sheerka:
|
||||
:type sheerka:
|
||||
:param request:
|
||||
:type request:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
from core.Event import Event
|
||||
from core.ExecutionContext import ExecutionContext, ExecutionContextActions
|
||||
module_name = request.module.__name__.split(".")[-1]
|
||||
context = ExecutionContext("test",
|
||||
Event(message=f"Executing module {module_name}"),
|
||||
sheerka,
|
||||
ExecutionContextActions.TESTING,
|
||||
None)
|
||||
|
||||
ontology = sheerka.om.push_ontology(module_name)
|
||||
yield
|
||||
sheerka.om.revert_ontology(context, ontology)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def context(sheerka):
|
||||
from core.Event import Event
|
||||
from core.ExecutionContext import ExecutionContext, ExecutionContextActions
|
||||
|
||||
return ExecutionContext("test",
|
||||
Event(message=""),
|
||||
sheerka,
|
||||
ExecutionContextActions.TESTING,
|
||||
None)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def next_id():
|
||||
return GetNextId()
|
||||
|
||||
|
||||
class TestUsingFileBasedSheerka:
|
||||
@pytest.fixture(scope="class")
|
||||
def sheerka(self):
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize()
|
||||
return sheerka
|
||||
|
||||
|
||||
class NewOntology:
|
||||
"""
|
||||
For some test who may need to declare the same concepts across the tests
|
||||
"""
|
||||
from core.ExecutionContext import ExecutionContext
|
||||
|
||||
def __init__(self, context: ExecutionContext, name="current_test"):
|
||||
self.sheerka = context.sheerka
|
||||
self.context = context
|
||||
self.name = name
|
||||
self.ontology = None
|
||||
|
||||
def __enter__(self):
|
||||
self.ontology = self.sheerka.om.push_ontology(self.name)
|
||||
return self.ontology
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.sheerka.om.revert_ontology(self.context, self.ontology)
|
||||
return False
|
||||
@@ -0,0 +1,103 @@
|
||||
from common.global_symbols import NotFound, NotInit
|
||||
from core.concept import ConceptDefaultProps
|
||||
from helpers import GetNextId, get_concept
|
||||
|
||||
|
||||
def test_i_can_retrieve_concept_properties():
|
||||
foo = get_concept("a plus b", "a + b", variables=("a", "b"), id="1001")
|
||||
|
||||
assert foo.name == "a plus b"
|
||||
assert foo.id == "1001"
|
||||
assert foo.str_id == "c:#1001:"
|
||||
assert foo.all_attrs() == ('#where#', '#pre#', '#post#', '#body#', '#ret#', 'a', 'b')
|
||||
assert foo.get_definition_digest() == "3a2cfcda8ffd0d99a7f8c7d2f1ffc4a99fc96162f3be7b9875f30751d3691af6"
|
||||
|
||||
# sanity check to make sure that 'get_concept' works as expected
|
||||
assert foo.get_metadata().variables == (("a", NotInit), ("b", NotInit))
|
||||
|
||||
|
||||
def test_i_can_set_and_get_value():
|
||||
foo = get_concept("foo", variables=["a"])
|
||||
foo.set_value("a", "some value")
|
||||
assert foo.get_value("a") == "some value"
|
||||
|
||||
|
||||
def test_i_can_set_and_get_value_from_bound_attr():
|
||||
foo = get_concept("foo", variables=["a"], bound_body="a")
|
||||
|
||||
foo.set_value("a", "some value")
|
||||
assert foo.get_value(ConceptDefaultProps.BODY) == "some value"
|
||||
|
||||
foo.set_value(ConceptDefaultProps.BODY, "another value")
|
||||
assert foo.get_value("a") == "another value"
|
||||
|
||||
|
||||
def test_i_can_test_concept_equality():
|
||||
foo1 = get_concept("foo", "a + b", variables=["a", "b"], id=5)
|
||||
foo2 = get_concept("foo", "a + b", variables=["a", "b"], id=6)
|
||||
foo1.set_value("a", 10).set_value("b", 20)
|
||||
foo2.set_value("a", 10).set_value("b", 20)
|
||||
|
||||
assert foo1 == foo2
|
||||
|
||||
|
||||
def test_i_can_detect_when_concepts_are_not_equal():
|
||||
foo1 = get_concept("foo", "a + b", variables=["a", "b"], id=5)
|
||||
foo2 = get_concept("foo", "a + b", variables=["a", "b"], id=6)
|
||||
foo1.set_value("a", 10).set_value("b", 20)
|
||||
foo2.set_value("a", 10).set_value("b", 25)
|
||||
|
||||
assert foo1 != foo2
|
||||
|
||||
|
||||
def test_i_can_test_concept_equality_in_case_of_infinite_recursion():
|
||||
foo1 = get_concept("foo", "a + b", variables=["a"], id=5)
|
||||
foo2 = get_concept("foo", "a + b", variables=["a"], id=6)
|
||||
|
||||
# foo1 and foo2 are equals
|
||||
assert foo1 == foo2
|
||||
|
||||
foo1.set_value("a", foo1)
|
||||
foo2.set_value("a", foo2)
|
||||
assert foo1 == foo2
|
||||
|
||||
foo1.set_value("a", foo2)
|
||||
foo2.set_value("a", foo1)
|
||||
assert foo1 == foo2
|
||||
|
||||
|
||||
def test_i_can_test_concept_equality_in_case_of_infinite_recursion_with_more_than_two_concepts():
|
||||
foo1 = get_concept("foo", "a + b", variables=["a"], id=5)
|
||||
foo2 = get_concept("foo", "a + b", variables=["a"], id=6)
|
||||
foo3 = get_concept("foo", "a + b", variables=["a"], id=7)
|
||||
|
||||
foo1.set_value("a", foo2)
|
||||
foo2.set_value("a", foo3)
|
||||
foo3.set_value("a", foo1)
|
||||
assert foo1 == foo2
|
||||
|
||||
foo1.set_value("a", foo2)
|
||||
foo2.set_value("a", foo3)
|
||||
foo3.set_value("a", foo3)
|
||||
assert foo1 == foo2
|
||||
|
||||
|
||||
def test_i_cannot_get_an_attribute_which_is_not_defined():
|
||||
next_id = GetNextId()
|
||||
foo = get_concept("add a b", definition="add", variables=["a", "b"], sequence=next_id)
|
||||
|
||||
assert foo.get_value("a") is NotInit
|
||||
assert foo.get_value("b") is NotInit
|
||||
assert foo.get_value("c") is NotFound
|
||||
|
||||
|
||||
def test_i_can_repr_a_concept():
|
||||
next_id = GetNextId()
|
||||
foo = get_concept("foo", sequence=next_id)
|
||||
assert repr(foo) == "(1001)foo"
|
||||
|
||||
bar = get_concept("bar", pre="is an int", sequence=next_id)
|
||||
assert repr(bar) == "(1002)bar, #pre=is an int"
|
||||
|
||||
baz = get_concept("baz", definition="add a b", variables=["a", "b"], sequence=next_id)
|
||||
assert repr(baz) == "(1003)baz, a=**NotInit**, b=**NotInit**"
|
||||
@@ -0,0 +1,151 @@
|
||||
from core.Event import Event
|
||||
from core.ExecutionContext import ExecutionContext, ExecutionContextActions
|
||||
|
||||
|
||||
def test_i_can_create_execution_context(sheerka):
|
||||
event = Event("myEvent", "fake_userid")
|
||||
context1 = ExecutionContext("who", event, sheerka, ExecutionContextActions.TESTING, "value1", "my desc")
|
||||
|
||||
assert context1.who == "who"
|
||||
assert context1.event == event
|
||||
assert context1.sheerka == sheerka
|
||||
assert context1.action == ExecutionContextActions.TESTING
|
||||
assert context1.action_context == "value1"
|
||||
assert context1.desc == "my desc"
|
||||
assert context1.id == 0
|
||||
assert context1.long_id == f"{event.get_digest()}:{context1.id}"
|
||||
|
||||
|
||||
def test_i_can_push(sheerka):
|
||||
event = Event("test")
|
||||
context = ExecutionContext("who", event, sheerka, ExecutionContextActions.TESTING, "value")
|
||||
with context.push("pusher", ExecutionContextActions.PARSING, "action_context", "my desc") as sub_context:
|
||||
assert sub_context.who == "pusher"
|
||||
assert sub_context.event == event
|
||||
assert sub_context.sheerka == sheerka
|
||||
assert sub_context.action == ExecutionContextActions.PARSING
|
||||
assert sub_context.action_context == "action_context"
|
||||
assert sub_context.desc == "my desc"
|
||||
assert sub_context.id == context.id + 1
|
||||
|
||||
|
||||
def test_i_can_increment_ids(sheerka):
|
||||
# The id of an execution context is linked to the event
|
||||
# If the event is the same, the id is incremented
|
||||
|
||||
event = Event("TEST::myEvent", "fake_userid")
|
||||
context1 = ExecutionContext("who", event, sheerka, ExecutionContextActions.TESTING, "value")
|
||||
context2 = context1.push("who1", ExecutionContextActions.TESTING, "value1")
|
||||
context3 = context2.push("who2", ExecutionContextActions.TESTING, "value2")
|
||||
context4 = context1.push("who1", ExecutionContextActions.TESTING, "value3")
|
||||
context5 = ExecutionContext("who", event, sheerka, ExecutionContextActions.TESTING, "value4")
|
||||
|
||||
assert context1.id == 0
|
||||
assert context2.id == 1
|
||||
assert context3.id == 2
|
||||
assert context4.id == 3
|
||||
assert context5.id == 4
|
||||
|
||||
event2 = Event("TEST::myEvent2", "fake_userid")
|
||||
context6 = ExecutionContext("who", event2, sheerka, ExecutionContextActions.TESTING, "value")
|
||||
assert context6.id == 0
|
||||
|
||||
|
||||
def test_i_can_manage_global_hints(context):
|
||||
context2 = context.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context3 = context2.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context4 = context3.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context5 = context.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
|
||||
context.global_hints.add("new_hint")
|
||||
assert context.global_hints == {"new_hint"}
|
||||
assert context2.global_hints == {"new_hint"}
|
||||
assert context3.global_hints == {"new_hint"}
|
||||
assert context4.global_hints == {"new_hint"}
|
||||
assert context5.global_hints == {"new_hint"}
|
||||
|
||||
context4.global_hints.add("another_hint")
|
||||
assert context.global_hints == {"new_hint", "another_hint"}
|
||||
assert context2.global_hints == {"new_hint", "another_hint"}
|
||||
assert context3.global_hints == {"new_hint", "another_hint"}
|
||||
assert context4.global_hints == {"new_hint", "another_hint"}
|
||||
assert context5.global_hints == {"new_hint", "another_hint"}
|
||||
|
||||
|
||||
def test_i_can_manage_protected_hint(context):
|
||||
# Note that protected hint only works if the hint is added BEFORE the creation of the child
|
||||
context.protected_hints.add("new_hint")
|
||||
context2 = context.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context3 = context2.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context3.protected_hints.add("another_hint")
|
||||
context4 = context3.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context5 = context.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
|
||||
assert context.protected_hints == {"new_hint"}
|
||||
assert context2.protected_hints == {"new_hint"}
|
||||
assert context3.protected_hints == {"new_hint", "another_hint"}
|
||||
assert context4.protected_hints == {"new_hint", "another_hint"}
|
||||
assert context5.protected_hints == {"new_hint"}
|
||||
|
||||
|
||||
def test_i_can_manage_private_hints(context):
|
||||
context.private_hints.add("new_hint")
|
||||
context2 = context.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context3 = context2.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context3.private_hints.add("another_hint")
|
||||
context4 = context3.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context5 = context.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
|
||||
assert context.private_hints == {"new_hint"}
|
||||
assert context2.private_hints == set()
|
||||
assert context3.private_hints == {"another_hint"}
|
||||
assert context4.private_hints == set()
|
||||
assert context5.private_hints == set()
|
||||
|
||||
|
||||
def test_i_can_keep_track_of_children(context):
|
||||
context2 = context.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context3 = context.push("pusher", ExecutionContextActions.TESTING, None)
|
||||
context4 = context2.push("pusher2", ExecutionContextActions.TESTING, None)
|
||||
|
||||
assert len(context._children) == 2
|
||||
assert len(context2._children) == 1
|
||||
assert len(context3._children) == 0
|
||||
assert len(context4._children) == 0
|
||||
|
||||
|
||||
def test_i_can_get_children(context):
|
||||
context1 = context.push("child 1", ExecutionContextActions.TESTING, None)
|
||||
context2 = context.push("child 2", ExecutionContextActions.TESTING, None)
|
||||
context3 = context.push("child 3", ExecutionContextActions.TESTING, None)
|
||||
context21 = context2.push("child 21", ExecutionContextActions.TESTING, None)
|
||||
context22 = context2.push("child 22", ExecutionContextActions.TESTING, None)
|
||||
context211 = context21.push("child 211", ExecutionContextActions.TESTING, None)
|
||||
context31 = context3.push("child 31", ExecutionContextActions.TESTING, None)
|
||||
|
||||
assert list(context1.get_children()) == []
|
||||
|
||||
assert list(context.get_children()) == [
|
||||
context1,
|
||||
context2,
|
||||
context21,
|
||||
context211,
|
||||
context22,
|
||||
context3,
|
||||
context31
|
||||
]
|
||||
|
||||
assert list(context.get_children(level=1)) == [
|
||||
context1,
|
||||
context2,
|
||||
context3
|
||||
]
|
||||
|
||||
assert list(context.get_children(level=2)) == [
|
||||
context1,
|
||||
context2,
|
||||
context21,
|
||||
context22,
|
||||
context3,
|
||||
context31,
|
||||
]
|
||||
@@ -0,0 +1,36 @@
|
||||
from os import path
|
||||
|
||||
from base import UsingFileBasedSheerka
|
||||
from helpers import get_concept, get_concepts, get_file_content
|
||||
|
||||
|
||||
class TestSheerka(UsingFileBasedSheerka):
|
||||
|
||||
def test_i_can_initialize_sheerka(self, sheerka_fb):
|
||||
sheerka = sheerka_fb
|
||||
assert path.exists(self.SHEERKA_ROOT_DIR)
|
||||
|
||||
last_event_path = path.join(self.SHEERKA_ROOT_DIR, "LAST_EVENT")
|
||||
assert path.exists(last_event_path)
|
||||
|
||||
last_event_digest = get_file_content(last_event_path)
|
||||
last_event_folder = path.join(self.SHEERKA_ROOT_DIR, "events", last_event_digest[:24], last_event_digest)
|
||||
assert path.exists(last_event_folder)
|
||||
assert path.exists(last_event_folder + "_admin_context")
|
||||
|
||||
assert len(sheerka.services) > 0
|
||||
assert len(sheerka.evaluators) > 0
|
||||
|
||||
# add test to validate that we can access bind methods
|
||||
|
||||
def test_i_can_use_isinstance(self, sheerka, context):
|
||||
foo, bar = get_concepts(context, "foo", "bar", use_sheerka=True)
|
||||
assert sheerka.isinstance(foo, foo.key)
|
||||
assert sheerka.isinstance(foo, foo.str_id)
|
||||
assert sheerka.isinstance(foo, foo)
|
||||
assert sheerka.isinstance(foo, foo.get_metadata())
|
||||
|
||||
assert not sheerka.isinstance(foo, bar.key)
|
||||
assert not sheerka.isinstance(foo, bar.str_id)
|
||||
assert not sheerka.isinstance(foo, bar)
|
||||
assert not sheerka.isinstance(foo, bar.get_metadata())
|
||||
@@ -0,0 +1,23 @@
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from core.BuiltinConcepts import BuiltinConcepts
|
||||
from evaluators.CreateParserInput import CreateParserInput
|
||||
from helpers import _rv, _rvf
|
||||
|
||||
|
||||
class TestCreateParserInput(BaseTest):
|
||||
|
||||
@pytest.fixture()
|
||||
def evaluator(self, sheerka):
|
||||
return sheerka.evaluators[CreateParserInput.NAME]
|
||||
|
||||
def test_i_can_match(self, sheerka, context, evaluator):
|
||||
ret_val = _rv(sheerka.newn(BuiltinConcepts.USER_INPUT, command="hello sheerka"))
|
||||
assert evaluator.matches(context, ret_val).status is True
|
||||
|
||||
ret_val = _rv(sheerka.newn(BuiltinConcepts.UNKNOWN_CONCEPT)) # it responds to USER_INPUT only
|
||||
assert evaluator.matches(context, ret_val).status is False
|
||||
|
||||
ret_val = _rvf(sheerka.newn(BuiltinConcepts.USER_INPUT, command="hello sheerka")) # status should be true
|
||||
assert evaluator.matches(context, ret_val).status is False
|
||||
@@ -0,0 +1,377 @@
|
||||
from common.global_symbols import NotInit
|
||||
from core.ExecutionContext import ExecutionContext
|
||||
from core.ReturnValue import ReturnValue
|
||||
from core.concept import Concept, ConceptMetadata, DefinitionType
|
||||
from core.services.SheerkaConceptManager import ConceptManager
|
||||
|
||||
|
||||
class GetNextId:
|
||||
def __init__(self):
|
||||
self.seq = 1000
|
||||
|
||||
def next(self):
|
||||
self.seq += 1
|
||||
return self.seq
|
||||
|
||||
|
||||
def get_concept(name=None, body=None,
|
||||
id=None,
|
||||
key=None,
|
||||
where=None,
|
||||
pre=None,
|
||||
post=None,
|
||||
ret=None,
|
||||
definition=None,
|
||||
definition_type=None,
|
||||
desc=None,
|
||||
props=None,
|
||||
variables=None,
|
||||
parameters=None,
|
||||
bound_body=None,
|
||||
is_builtin=False,
|
||||
is_unique=False,
|
||||
autouse=False,
|
||||
sequence=None) -> Concept:
|
||||
"""
|
||||
Create a Concept objet
|
||||
Caution : 'id' and 'key' are not initialized
|
||||
|
||||
:param name:
|
||||
:type name:
|
||||
:param body:
|
||||
:type body:
|
||||
:param id:
|
||||
:type id:
|
||||
:param key:
|
||||
:type key:
|
||||
:param where:
|
||||
:type where:
|
||||
:param pre:
|
||||
:type pre:
|
||||
:param post:
|
||||
:type post:
|
||||
:param ret:
|
||||
:type ret:
|
||||
:param definition:
|
||||
:type definition:
|
||||
:param definition_type:
|
||||
:type definition_type:
|
||||
:param desc:
|
||||
:type desc:
|
||||
:param props:
|
||||
:type props:
|
||||
:param variables:
|
||||
:type variables:
|
||||
:param parameters:
|
||||
:type parameters:
|
||||
:param bound_body:
|
||||
:type bound_body:
|
||||
:param is_builtin:
|
||||
:type is_builtin:
|
||||
:param is_unique:
|
||||
:type is_unique:
|
||||
:param autouse:
|
||||
:type autouse:
|
||||
:param sequence:
|
||||
:type sequence:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
metadata = get_metadata(
|
||||
name, body,
|
||||
id,
|
||||
key,
|
||||
where,
|
||||
pre,
|
||||
post,
|
||||
ret,
|
||||
definition,
|
||||
definition_type,
|
||||
desc,
|
||||
props,
|
||||
variables,
|
||||
parameters,
|
||||
bound_body,
|
||||
is_builtin,
|
||||
is_unique,
|
||||
autouse
|
||||
)
|
||||
if sequence:
|
||||
metadata.auto_init(sequence)
|
||||
else:
|
||||
metadata.digest = ConceptManager.compute_metadata_digest(metadata)
|
||||
metadata.all_attrs = ConceptManager.compute_all_attrs(metadata.variables)
|
||||
return Concept(metadata)
|
||||
|
||||
|
||||
def get_metadata(name=None, body=None,
|
||||
id=None,
|
||||
key=None,
|
||||
where=None,
|
||||
pre=None,
|
||||
post=None,
|
||||
ret=None,
|
||||
definition=None,
|
||||
definition_type=DefinitionType.DEFAULT,
|
||||
desc=None,
|
||||
props=None,
|
||||
variables=None,
|
||||
parameters=None,
|
||||
bound_body=None,
|
||||
is_builtin=False,
|
||||
is_unique=False,
|
||||
autouse=False,
|
||||
digest=None,
|
||||
all_attrs=None):
|
||||
new_variables = []
|
||||
if variables:
|
||||
for v in variables:
|
||||
if isinstance(v, tuple):
|
||||
new_variables.append(v)
|
||||
else:
|
||||
new_variables.append((v, NotInit))
|
||||
|
||||
return ConceptMetadata(
|
||||
id,
|
||||
name,
|
||||
key,
|
||||
is_builtin,
|
||||
is_unique,
|
||||
body,
|
||||
where,
|
||||
pre,
|
||||
post,
|
||||
ret,
|
||||
definition,
|
||||
definition_type,
|
||||
desc,
|
||||
autouse,
|
||||
bound_body,
|
||||
props or {},
|
||||
tuple(new_variables),
|
||||
parameters or [],
|
||||
digest,
|
||||
all_attrs,
|
||||
)
|
||||
|
||||
|
||||
def metadata_auto_init(self: ConceptMetadata, sequence) -> ConceptMetadata:
|
||||
"""
|
||||
Helper function for the unit tests.
|
||||
This method will be added to the `ConceptMetadata` to ease the writing of the unit tests
|
||||
It properly initializes the ConceptMetadata
|
||||
:param self:
|
||||
:type self:
|
||||
:param sequence:
|
||||
:type sequence:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
if not self.id:
|
||||
self.id = str(sequence.next())
|
||||
if not self.key:
|
||||
self.key = ConceptManager.create_concept_key(self.name, self.definition, self.variables)
|
||||
if not self.is_unique:
|
||||
self.is_unique = False
|
||||
if not self.is_builtin:
|
||||
self.is_builtin = False
|
||||
if not self.definition_type:
|
||||
self.definition_type = DefinitionType.DEFAULT
|
||||
if not self.all_attrs:
|
||||
self.all_attrs = ConceptManager.compute_all_attrs(self.variables)
|
||||
if not self.digest:
|
||||
self.digest = ConceptManager.compute_metadata_digest(self)
|
||||
|
||||
# Note that I do not automatically update the digest as I don't want to make unnecessary computations
|
||||
|
||||
return self
|
||||
|
||||
|
||||
def metadata_clone(self: ConceptMetadata, name=None, body=None,
|
||||
key=None,
|
||||
where=None,
|
||||
pre=None,
|
||||
post=None,
|
||||
ret=None,
|
||||
definition=None,
|
||||
definition_type=None,
|
||||
desc=None,
|
||||
props=None,
|
||||
variables=None,
|
||||
parameters=None,
|
||||
bound_body=None,
|
||||
is_builtin=None,
|
||||
is_unique=None,
|
||||
autouse=None,
|
||||
digest=None,
|
||||
all_attrs=None) -> ConceptMetadata:
|
||||
"""
|
||||
Helper function for the unit tests.
|
||||
This method will be added to the `ConceptMetadata` to ease the writing of the unit tests
|
||||
It clones a ConceptMetadata, but can override some attributes if requested
|
||||
:param self:
|
||||
:type self:
|
||||
:param name:
|
||||
:type name:
|
||||
:param body:
|
||||
:type body:
|
||||
:param key:
|
||||
:type key:
|
||||
:param where:
|
||||
:type where:
|
||||
:param pre:
|
||||
:type pre:
|
||||
:param post:
|
||||
:type post:
|
||||
:param ret:
|
||||
:type ret:
|
||||
:param definition:
|
||||
:type definition:
|
||||
:param definition_type:
|
||||
:type definition_type:
|
||||
:param desc:
|
||||
:type desc:
|
||||
:param props:
|
||||
:type props:
|
||||
:param variables:
|
||||
:type variables:
|
||||
:param parameters:
|
||||
:type parameters:
|
||||
:param bound_body:
|
||||
:type bound_body:
|
||||
:param is_builtin:
|
||||
:type is_builtin:
|
||||
:param is_unique:
|
||||
:type is_unique:
|
||||
:param autouse:
|
||||
:type autouse:
|
||||
:param digest:
|
||||
:type digest:
|
||||
:param all_attrs:
|
||||
:type all_attrs:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
return ConceptMetadata(
|
||||
id=self.id,
|
||||
name=self.name if name is None else name,
|
||||
body=self.body if body is None else body,
|
||||
key=self.key if key is None else key,
|
||||
where=self.where if where is None else where,
|
||||
pre=self.pre if pre is None else pre,
|
||||
post=self.post if post is None else post,
|
||||
ret=self.ret if ret is None else ret,
|
||||
definition=self.definition if definition is None else definition,
|
||||
definition_type=self.definition_type if definition_type is None else definition_type,
|
||||
desc=self.desc if desc is None else desc,
|
||||
props=self.props if props is None else props,
|
||||
variables=self.variables if variables is None else variables,
|
||||
parameters=self.parameters if parameters is None else parameters,
|
||||
bound_body=self.bound_body if bound_body is None else bound_body,
|
||||
is_builtin=self.is_builtin if is_builtin is None else is_builtin,
|
||||
is_unique=self.is_unique if is_unique is None else is_unique,
|
||||
autouse=self.autouse if autouse is None else autouse,
|
||||
digest=self.digest if digest is None else digest,
|
||||
all_attrs=self.all_attrs if all_attrs is None else all_attrs,
|
||||
)
|
||||
|
||||
|
||||
# Helpers functions for unit tests
|
||||
setattr(ConceptMetadata, 'auto_init', metadata_auto_init)
|
||||
setattr(ConceptMetadata, 'clone', metadata_clone)
|
||||
|
||||
|
||||
def get_metadatas(*args, **kwargs):
|
||||
as_metadatas = [arg if isinstance(arg, ConceptMetadata) else get_metadata(arg) for arg in args]
|
||||
next_id = kwargs.get("next_id", None)
|
||||
if next_id:
|
||||
for metadata in as_metadatas:
|
||||
metadata_auto_init(metadata, next_id)
|
||||
|
||||
return as_metadatas
|
||||
|
||||
|
||||
def get_concepts(context: ExecutionContext, *concepts, **kwargs) -> list[Concept]:
|
||||
"""
|
||||
Simple and quick way to get initialize concepts for a test
|
||||
:param sheerka:
|
||||
:type sheerka:
|
||||
:param context:
|
||||
:type context:
|
||||
:param concepts:
|
||||
:type concepts:
|
||||
:param kwargs:
|
||||
:type kwargs:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
res = []
|
||||
use_sheerka = kwargs.pop("use_sheerka", False)
|
||||
sequence = kwargs.pop("sequence", None)
|
||||
for c in concepts:
|
||||
if use_sheerka:
|
||||
c = define_new_concept(context, c)
|
||||
elif isinstance(c, str):
|
||||
c = get_concept(c)
|
||||
|
||||
if sequence:
|
||||
c.get_metadata().auto_init(sequence)
|
||||
|
||||
res.append(c)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def define_new_concept(context: ExecutionContext, c: str | Concept) -> Concept:
|
||||
sheerka = context.sheerka
|
||||
if isinstance(c, str):
|
||||
retval = sheerka.define_new_concept(context, c)
|
||||
else:
|
||||
metadata = c.get_metadata()
|
||||
retval = sheerka.define_new_concept(context,
|
||||
metadata.name,
|
||||
metadata.is_builtin,
|
||||
metadata.is_unique,
|
||||
metadata.body,
|
||||
metadata.where,
|
||||
metadata.pre,
|
||||
metadata.post,
|
||||
metadata.ret,
|
||||
metadata.definition,
|
||||
metadata.definition_type,
|
||||
metadata.autouse,
|
||||
metadata.bound_body,
|
||||
metadata.desc,
|
||||
metadata.props,
|
||||
metadata.variables,
|
||||
metadata.parameters)
|
||||
|
||||
assert retval.status
|
||||
concept = sheerka.newn(retval.value.metadata.name)
|
||||
return concept
|
||||
|
||||
|
||||
def get_file_content(file_name):
|
||||
with open(file_name) as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def _rv(value, who="Test"):
|
||||
return ReturnValue(who=who, status=True, value=value)
|
||||
|
||||
|
||||
def _rvc(concept_name, who="Test"):
|
||||
next_id = GetNextId()
|
||||
concept = get_concept(concept_name, sequence=next_id)
|
||||
return ReturnValue(who=who, status=True, value=concept)
|
||||
|
||||
|
||||
def _rvf(value, who="Test"):
|
||||
"""
|
||||
Return Value False
|
||||
:param value:
|
||||
:type value:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
return ReturnValue(who=who, status=False, value=value)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,14 @@
|
||||
from parsers.ParserInput import ParserInput
|
||||
from parsers.tokenizer import LexerError
|
||||
|
||||
|
||||
def test_i_can_parser_input():
|
||||
parser_input = ParserInput("def concept a")
|
||||
assert parser_input.init() is True
|
||||
assert parser_input.exception is None
|
||||
|
||||
|
||||
def test_i_can_detect_errors():
|
||||
parser_input = ParserInput('def concept "a')
|
||||
assert parser_input.init() is False
|
||||
assert isinstance(parser_input.exception, LexerError)
|
||||
@@ -0,0 +1,211 @@
|
||||
import pytest
|
||||
|
||||
from parsers.tokenizer import LexerError, Token, TokenKind, Tokenizer
|
||||
|
||||
|
||||
def test_i_can_tokenize():
|
||||
source = "+*-/{}[]() ,;:.?\n\n\r\r\r\nidentifier_0\t \t10.15 10 'string\n' \"another string\"=|&<>c:name:"
|
||||
source += "$£€!_identifier°~_^\\`==#__var__10r/regex\nregex/r:xxx#1:**//%that's"
|
||||
tokens = list(Tokenizer(source))
|
||||
assert tokens[0] == Token(TokenKind.PLUS, "+", 0, 1, 1)
|
||||
assert tokens[1] == Token(TokenKind.STAR, "*", 1, 1, 2)
|
||||
assert tokens[2] == Token(TokenKind.MINUS, "-", 2, 1, 3)
|
||||
assert tokens[3] == Token(TokenKind.SLASH, "/", 3, 1, 4)
|
||||
assert tokens[4] == Token(TokenKind.LBRACE, "{", 4, 1, 5)
|
||||
assert tokens[5] == Token(TokenKind.RBRACE, "}", 5, 1, 6)
|
||||
assert tokens[6] == Token(TokenKind.LBRACKET, "[", 6, 1, 7)
|
||||
assert tokens[7] == Token(TokenKind.RBRACKET, "]", 7, 1, 8)
|
||||
assert tokens[8] == Token(TokenKind.LPAR, "(", 8, 1, 9)
|
||||
assert tokens[9] == Token(TokenKind.RPAR, ")", 9, 1, 10)
|
||||
assert tokens[10] == Token(TokenKind.WHITESPACE, " ", 10, 1, 11)
|
||||
assert tokens[11] == Token(TokenKind.COMMA, ",", 14, 1, 15)
|
||||
assert tokens[12] == Token(TokenKind.SEMICOLON, ";", 15, 1, 16)
|
||||
assert tokens[13] == Token(TokenKind.COLON, ":", 16, 1, 17)
|
||||
assert tokens[14] == Token(TokenKind.DOT, ".", 17, 1, 18)
|
||||
assert tokens[15] == Token(TokenKind.QMARK, "?", 18, 1, 19)
|
||||
assert tokens[16] == Token(TokenKind.NEWLINE, "\n", 19, 1, 20)
|
||||
assert tokens[17] == Token(TokenKind.NEWLINE, "\n\r", 20, 2, 1)
|
||||
assert tokens[18] == Token(TokenKind.NEWLINE, "\r", 22, 3, 1)
|
||||
assert tokens[19] == Token(TokenKind.NEWLINE, "\r\n", 23, 4, 1)
|
||||
assert tokens[20] == Token(TokenKind.IDENTIFIER, "identifier_0", 25, 5, 1)
|
||||
assert tokens[21] == Token(TokenKind.WHITESPACE, "\t \t", 37, 5, 13)
|
||||
assert tokens[22] == Token(TokenKind.NUMBER, "10.15", 41, 5, 17)
|
||||
assert tokens[23] == Token(TokenKind.WHITESPACE, " ", 46, 5, 22)
|
||||
assert tokens[24] == Token(TokenKind.NUMBER, "10", 47, 5, 23)
|
||||
assert tokens[25] == Token(TokenKind.WHITESPACE, " ", 49, 5, 25)
|
||||
assert tokens[26] == Token(TokenKind.STRING, "'string\n'", 50, 5, 26)
|
||||
assert tokens[27] == Token(TokenKind.WHITESPACE, " ", 59, 6, 2)
|
||||
assert tokens[28] == Token(TokenKind.STRING, '"another string"', 60, 6, 3)
|
||||
assert tokens[29] == Token(TokenKind.EQUALS, '=', 76, 6, 19)
|
||||
assert tokens[30] == Token(TokenKind.VBAR, '|', 77, 6, 20)
|
||||
assert tokens[31] == Token(TokenKind.AMPER, '&', 78, 6, 21)
|
||||
assert tokens[32] == Token(TokenKind.LESS, '<', 79, 6, 22)
|
||||
assert tokens[33] == Token(TokenKind.GREATER, '>', 80, 6, 23)
|
||||
assert tokens[34] == Token(TokenKind.CONCEPT, ('name', None), 81, 6, 24)
|
||||
assert tokens[35] == Token(TokenKind.DOLLAR, '$', 88, 6, 31)
|
||||
assert tokens[36] == Token(TokenKind.STERLING, '£', 89, 6, 32)
|
||||
assert tokens[37] == Token(TokenKind.EURO, '€', 90, 6, 33)
|
||||
assert tokens[38] == Token(TokenKind.EMARK, '!', 91, 6, 34)
|
||||
assert tokens[39] == Token(TokenKind.IDENTIFIER, '_identifier', 92, 6, 35)
|
||||
assert tokens[40] == Token(TokenKind.DEGREE, '°', 103, 6, 46)
|
||||
assert tokens[41] == Token(TokenKind.TILDE, '~', 104, 6, 47)
|
||||
assert tokens[42] == Token(TokenKind.UNDERSCORE, '_', 105, 6, 48)
|
||||
assert tokens[43] == Token(TokenKind.CARAT, '^', 106, 6, 49)
|
||||
assert tokens[44] == Token(TokenKind.BACK_SLASH, '\\', 107, 6, 50)
|
||||
assert tokens[45] == Token(TokenKind.BACK_QUOTE, '`', 108, 6, 51)
|
||||
assert tokens[46] == Token(TokenKind.EQUALSEQUALS, '==', 109, 6, 52)
|
||||
assert tokens[47] == Token(TokenKind.HASH, '#', 111, 6, 54)
|
||||
assert tokens[48] == Token(TokenKind.VAR_DEF, '__var__10', 112, 6, 55)
|
||||
assert tokens[49] == Token(TokenKind.REGEX, '/regex\nregex/', 121, 6, 64)
|
||||
assert tokens[50] == Token(TokenKind.RULE, ("xxx", "1"), 135, 7, 7)
|
||||
assert tokens[51] == Token(TokenKind.STARSTAR, "**", 143, 7, 15)
|
||||
assert tokens[52] == Token(TokenKind.SLASHSLASH, "//", 145, 7, 17)
|
||||
assert tokens[53] == Token(TokenKind.PERCENT, "%", 147, 7, 19)
|
||||
assert tokens[54] == Token(TokenKind.IDENTIFIER, "that", 148, 7, 20)
|
||||
assert tokens[55] == Token(TokenKind.QUOTE, "'", 152, 7, 24)
|
||||
assert tokens[56] == Token(TokenKind.IDENTIFIER, "s", 153, 7, 25)
|
||||
|
||||
assert tokens[57] == Token(TokenKind.EOF, '', 154, 7, 26)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("_ident", True),
|
||||
("__ident", True),
|
||||
("___ident", True),
|
||||
("ident", True),
|
||||
("ident123", True),
|
||||
("ident_123", True),
|
||||
("ident-like-this", True),
|
||||
("àèùéû", True),
|
||||
("011254", False),
|
||||
("0abcd", False),
|
||||
("-abcd", False)
|
||||
])
|
||||
def test_i_can_tokenize_identifiers(text, expected):
|
||||
tokens = list(Tokenizer(text))
|
||||
comparison = tokens[0].type == TokenKind.IDENTIFIER
|
||||
assert comparison == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text", [
|
||||
"123abc",
|
||||
"123",
|
||||
"abc",
|
||||
"abc123"
|
||||
])
|
||||
def test_i_can_parse_word(text):
|
||||
tokens = list(Tokenizer(text, parse_word=True))
|
||||
assert tokens[0].type == TokenKind.WORD
|
||||
assert tokens[0].value == text
|
||||
assert tokens[1].index == len(text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text", [
|
||||
"__var__0",
|
||||
"__var__1",
|
||||
"__var__10",
|
||||
"__var__999",
|
||||
])
|
||||
def test_i_can_parse_var_def(text):
|
||||
tokens = list(Tokenizer(text))
|
||||
assert len(tokens) == 2
|
||||
assert tokens[0].type == TokenKind.VAR_DEF
|
||||
assert tokens[0].value == text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, message, error_text, index, line, column", [
|
||||
("'string", "Missing Trailing quote", "'string", 7, 1, 8),
|
||||
('"string', "Missing Trailing quote", '"string', 7, 1, 8),
|
||||
('"a" + "string', "Missing Trailing quote", '"string', 13, 1, 14),
|
||||
('"a"\n\n"string', "Missing Trailing quote", '"string', 12, 3, 8),
|
||||
('"', "Missing Trailing quote", '"', 1, 1, 2),
|
||||
("c::", "Concept identifiers not found", "", 2, 1, 3),
|
||||
("c:foo\nbar:", "New line in concept name", "foo", 5, 1, 6),
|
||||
("c:foo", "Missing ending colon", "foo", 5, 1, 6)
|
||||
])
|
||||
def test_i_can_detect_tokenizer_errors(text, message, error_text, index, line, column):
|
||||
with pytest.raises(LexerError) as e:
|
||||
list(Tokenizer(text))
|
||||
assert e.value.message == message
|
||||
assert e.value.text == error_text
|
||||
assert e.value.index == index
|
||||
assert e.value.line == line
|
||||
assert e.value.column == column
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected_text, expected_newlines, expected_column", [
|
||||
("'foo'", "'foo'", 0, 6),
|
||||
('"foo"', '"foo"', 0, 6),
|
||||
("'foo\nbar'", "'foo\nbar'", 1, 5),
|
||||
("'foo\rbar'", "'foo\rbar'", 0, 10),
|
||||
("'foo\n\rbar'", "'foo\n\rbar'", 1, 6),
|
||||
("'foo\r\nbar'", "'foo\r\nbar'", 1, 5),
|
||||
("'foo\n\nbar'", "'foo\n\nbar'", 2, 5),
|
||||
("'foo\r\n\n\rbar'", "'foo\r\n\n\rbar'", 2, 6),
|
||||
("'\nfoo\nbar\n'", "'\nfoo\nbar\n'", 3, 2),
|
||||
("'\n\rfoo\r\n'", "'\n\rfoo\r\n'", 2, 2),
|
||||
(r"'foo\'bar'", r"'foo\'bar'", 0, 11),
|
||||
(r'"foo\"bar"', r'"foo\"bar"', 0, 11),
|
||||
('"foo"bar"', '"foo"', 0, 6),
|
||||
("'foo'bar'", "'foo'", 0, 6),
|
||||
])
|
||||
def test_i_can_parse_strings(text, expected_text, expected_newlines, expected_column):
|
||||
lexer = Tokenizer(text)
|
||||
text_found, nb_of_newlines, column_index = lexer.eat_string(0, 1, 1)
|
||||
|
||||
assert text_found == expected_text
|
||||
assert nb_of_newlines == expected_newlines
|
||||
assert column_index == expected_column
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text", [
|
||||
"1", "3.1415", "0.5", "01", "-5", "-5.10"
|
||||
])
|
||||
def test_i_can_parse_numbers(text):
|
||||
tokens = list(Tokenizer(text))
|
||||
assert tokens[0].type == TokenKind.NUMBER
|
||||
assert tokens[0].value == text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("c:key:", ("key", None)),
|
||||
("c:key|id:", ("key", "id")),
|
||||
("c:key|:", ("key", None)),
|
||||
("c:|id:", (None, "id")),
|
||||
("c:125:", ("125", None)),
|
||||
])
|
||||
def test_i_can_parse_concept_token(text, expected):
|
||||
tokens = list(Tokenizer(text))
|
||||
|
||||
assert tokens[0].type == TokenKind.CONCEPT
|
||||
assert tokens[0].value == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("r:key:", ("key", None)),
|
||||
("r:key#id:", ("key", "id")),
|
||||
("r:key#:", ("key", None)),
|
||||
("r:#id:", (None, "id")),
|
||||
("r:125:", ("125", None)),
|
||||
])
|
||||
def test_i_can_parse_concept_token(text, expected):
|
||||
tokens = list(Tokenizer(text))
|
||||
|
||||
assert tokens[0].type == TokenKind.RULE
|
||||
assert tokens[0].value == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("r|regex|", "|regex|"),
|
||||
("r/regex/", "/regex/"),
|
||||
("r'regex'", "'regex'"),
|
||||
('r"regex"', '"regex"'),
|
||||
])
|
||||
def test_i_can_parse_regex_token(text, expected):
|
||||
tokens = list(Tokenizer(text))
|
||||
|
||||
assert tokens[0].type == TokenKind.REGEX
|
||||
assert tokens[0].value == expected
|
||||
assert tokens[0].str_value == "r" + expected
|
||||
assert tokens[0].repr_value == "r" + expected
|
||||
assert tokens[0].strip_quote == expected[1:-1]
|
||||
@@ -6,11 +6,11 @@ from os import path
|
||||
|
||||
import pytest
|
||||
|
||||
from core.global_symbols import NotFound
|
||||
from common.global_symbols import NotFound
|
||||
from sdp.sheerkaDataProvider import Event, SheerkaDataProvider
|
||||
from sdp.sheerkaSerializer import JsonSerializer, PickleSerializer
|
||||
|
||||
tests_root = path.abspath("../../build/tests")
|
||||
tests_root = path.abspath("../build/tests")
|
||||
evt_digest = "3a571cb6034ef6fc8d7fe91948d0d29728eed74de02bac7968b0e9facca2c2d7"
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ class ObjWithDigestWithKey:
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def init_test():
|
||||
def init_current_dir():
|
||||
"""
|
||||
I test both SheerkaDataProviderFileIO and SheerkaDataProviderDictionaryIO
|
||||
So it's important to reset the folders between two tests
|
||||
@@ -407,11 +407,11 @@ def test_i_can_add_an_object_and_save_it_as_a_reference(root):
|
||||
|
||||
state = sdp.load_state(sdp.get_snapshot(SheerkaDataProvider.HeadFile))
|
||||
assert state.data == {
|
||||
"entry": {'key1': '##REF##:fbc2b1c60ed753b49217cae851e342371ee39ebabc9778105f450812e615a513',
|
||||
'key2': ['##REF##:fbc2b1c60ed753b49217cae851e342371ee39ebabc9778105f450812e615a513',
|
||||
'##REF##:448420dbc57d61401d10a98759fccdabbe50e2e825b6da3bd018c190926bcda4'],
|
||||
'key3': {'##REF##:448420dbc57d61401d10a98759fccdabbe50e2e825b6da3bd018c190926bcda4',
|
||||
'##REF##:fbc2b1c60ed753b49217cae851e342371ee39ebabc9778105f450812e615a513'}}
|
||||
"entry": {'key1': '##REF##:4d20621e3c45e8977504016caa2539c0d518850d3a8f92eb20f3e9e5192c41cf',
|
||||
'key2': ['##REF##:4d20621e3c45e8977504016caa2539c0d518850d3a8f92eb20f3e9e5192c41cf',
|
||||
'##REF##:c142f0a14ae4b52afa7cdcbb88dc16563468ca3fa99584323083968099cbaf6b'],
|
||||
'key3': {'##REF##:4d20621e3c45e8977504016caa2539c0d518850d3a8f92eb20f3e9e5192c41cf',
|
||||
'##REF##:c142f0a14ae4b52afa7cdcbb88dc16563468ca3fa99584323083968099cbaf6b'}}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ class ObjNoKey:
|
||||
|
||||
|
||||
def test_i_can_json_serialize():
|
||||
json_serializer = JsonSerializer(lambda obj: True)
|
||||
json_serializer = JsonSerializer(lambda o: True)
|
||||
obj = ObjNoKey("a", "b")
|
||||
stream = io.BytesIO()
|
||||
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
from server.main import app
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def test_i_can_authenticate():
|
||||
data = {
|
||||
"username": "kodjo",
|
||||
"password": "kodjo"
|
||||
}
|
||||
response = client.post("/token", data=data)
|
||||
assert response.status_code == 200
|
||||
as_json = response.json()
|
||||
assert 'access_token' in as_json
|
||||
assert 'first_name' in as_json
|
||||
assert 'last_name' in as_json
|
||||
assert 'token_type' in as_json
|
||||
@@ -0,0 +1,186 @@
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from common.global_symbols import NotFound, NotInit
|
||||
from conftest import NewOntology
|
||||
from core.BuiltinConcepts import BuiltinConcepts
|
||||
from core.ErrorContext import ErrorContext
|
||||
from core.concept import ConceptMetadata
|
||||
from core.services.SheerkaConceptManager import ConceptAlreadyDefined, ConceptManager
|
||||
from helpers import get_metadata
|
||||
|
||||
|
||||
class TestConceptManager(BaseTest):
|
||||
|
||||
@pytest.fixture()
|
||||
def service(self, sheerka):
|
||||
return sheerka.services[ConceptManager.NAME]
|
||||
|
||||
def test_i_can_compute_concept_digest(self, service):
|
||||
"""
|
||||
Two concepts with the same definition share the same digest
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
metadata = get_metadata("foo", "body")
|
||||
digest = service.compute_metadata_digest(metadata)
|
||||
assert digest == "21a1c2f420da62f4dc60f600c95b19dd9527b19dd28fd38e17f5c0e28963d176"
|
||||
|
||||
another_metadata = get_metadata("foo", "body")
|
||||
other_digest = service.compute_metadata_digest(another_metadata)
|
||||
|
||||
assert digest == other_digest
|
||||
|
||||
def test_id_is_not_part_of_the_digest(self, service):
|
||||
metadata1 = get_metadata("foo", "body", id=1)
|
||||
metadata2 = get_metadata("foo", "body", id=2)
|
||||
|
||||
assert service.compute_metadata_digest(metadata1) == service.compute_metadata_digest(metadata2)
|
||||
|
||||
def test_i_can_compute_concept_attributes_based_on_the_metadata(self, service):
|
||||
compute_all_attrs = service.compute_all_attrs
|
||||
|
||||
m1 = get_metadata("foo")
|
||||
assert compute_all_attrs(m1.variables) == ('#where#', '#pre#', '#post#', '#body#', '#ret#')
|
||||
|
||||
m2 = get_metadata("bar", variables=[("var1", None), ("var2", None)])
|
||||
assert compute_all_attrs(m2.variables) == ('#where#', '#pre#', '#post#', '#body#', '#ret#', 'var1', 'var2')
|
||||
|
||||
@pytest.mark.parametrize("definition, variables, expected", [
|
||||
("foo", [], "foo"),
|
||||
("foo(bar)", [], "foo ( bar )"),
|
||||
("foo a", ["a"], "foo __var__0"),
|
||||
("a foo b", ["a", "b"], "__var__0 foo __var__1"),
|
||||
("a foo b", ["b", "a"], "__var__1 foo __var__0"),
|
||||
("foo", ["foo"], "foo"),
|
||||
("foo a", ["foo"], "__var__0 a"),
|
||||
("foo a b", ["a"], "foo __var__0 b"),
|
||||
("'foo'", [], "'foo'"),
|
||||
("my name is a", ["a"], "my name is __var__0"),
|
||||
("a b c d", ["b", "c"], "a __var__0 __var__1 d"),
|
||||
("a 'b c' d", ["b", "c"], "a 'b c' d"),
|
||||
("a | b", ["a", "b"], "__var__0 | __var__1"),
|
||||
("a b a c", ["a", "b"], "__var__0 __var__1 __var__0 c"),
|
||||
("a b a c", ["b", "a"], "__var__1 __var__0 __var__1 c"),
|
||||
("def concept a", ["a"], "def concept __var__0"),
|
||||
])
|
||||
def test_i_can_create_concept_key(self, service, definition, variables, expected):
|
||||
expanded_variables = tuple((v, NotInit) for v in variables)
|
||||
key = service.create_concept_key(definition, None, expanded_variables)
|
||||
|
||||
assert key == expected
|
||||
|
||||
def test_the_key_is_created_from_the_definition_if_it_is_set(self, service):
|
||||
assert service.create_concept_key("from name", "from definition", None) == "from definition"
|
||||
assert service.create_concept_key("from name", None, None) == "from name"
|
||||
|
||||
def test_i_can_define_a_new_concept(self, context, service):
|
||||
with NewOntology(context, "test_i_can_define_a_new_concept"):
|
||||
res = service.define_new_concept(context, "name", body="body")
|
||||
|
||||
assert res.status is True
|
||||
|
||||
metadata = res.value.metadata
|
||||
assert isinstance(metadata, ConceptMetadata)
|
||||
assert metadata.id == "1001"
|
||||
assert metadata.name == "name"
|
||||
assert metadata.key == "name"
|
||||
assert metadata.body == "body"
|
||||
assert metadata.digest == "eb0620bd4a317af8a403c0ae1e185a528f9b58f8b0878d990e62278f89cf10d5"
|
||||
assert metadata.all_attrs == ('#where#', '#pre#', '#post#', '#body#', '#ret#')
|
||||
|
||||
# is sorted in db
|
||||
om = context.sheerka.om
|
||||
assert om.get(ConceptManager.CONCEPTS_BY_ID_ENTRY, metadata.id) == metadata
|
||||
assert om.get(ConceptManager.CONCEPTS_BY_NAME_ENTRY, metadata.name) == metadata
|
||||
assert om.get(ConceptManager.CONCEPTS_BY_KEY_ENTRY, metadata.key) == metadata
|
||||
assert om.get(ConceptManager.CONCEPTS_BY_HASH_ENTRY, metadata.digest) == metadata
|
||||
|
||||
def test_i_cannot_create_the_same_concept_twice(self, context, service):
|
||||
with NewOntology(context, "test_i_cannot_create_the_same_concept_twice"):
|
||||
res = service.define_new_concept(context, "name", body="body")
|
||||
assert res.status
|
||||
|
||||
res = service.define_new_concept(context, "name", body="body")
|
||||
assert not res.status
|
||||
assert isinstance(res.value, ErrorContext)
|
||||
assert isinstance(res.value.value, ConceptAlreadyDefined)
|
||||
|
||||
def test_i_can_add_the_same_concept_on_different_ontologies(self, context, service):
|
||||
with NewOntology(context, "test_i_can_add_the_same_concept_on_different_ontologies"):
|
||||
res = service.define_new_concept(context, "name", body="body")
|
||||
assert res.status
|
||||
|
||||
sheerka = context.sheerka
|
||||
om = sheerka.om
|
||||
om.push_ontology("my_new_ontology")
|
||||
res = service.define_new_concept(context, "name", body="body")
|
||||
assert res.status is True
|
||||
|
||||
def test_i_can_get_a_newly_created_concept(self, context, service):
|
||||
with NewOntology(context, "test_i_can_get_a_newly_created_concept"):
|
||||
res = service.define_new_concept(context, "name", body="body")
|
||||
assert res.status
|
||||
metadata = res.value.metadata
|
||||
|
||||
assert service.get_by_id(metadata.id).id == metadata.id
|
||||
assert service.get_by_name(metadata.name).name == metadata.name
|
||||
assert service.get_by_key(metadata.key).key == metadata.key
|
||||
|
||||
def test_i_can_instantiate_a_new_concept_by_its_name(self, context, service):
|
||||
with NewOntology(context, "test_i_can_instantiate_a_new_concept_by_its_name"):
|
||||
res = service.define_new_concept(context, "foo", variables=[("var1", None), ("var2", None)])
|
||||
assert res.status
|
||||
|
||||
foo = service.newn("foo", var1="value1", var2="value2")
|
||||
|
||||
assert foo.id == "1001"
|
||||
assert foo.key == "foo"
|
||||
assert foo.name == "foo"
|
||||
assert foo.str_id == "c:#1001:"
|
||||
assert foo.var1 == "value1"
|
||||
assert foo.var2 == "value2"
|
||||
|
||||
def test_i_can_instantiate_a_new_concept_by_its_id(self, context, service):
|
||||
with NewOntology(context, "test_i_can_instantiate_a_new_concept_by_its_id"):
|
||||
res = service.define_new_concept(context, "foo", variables=[("var1", None), ("var2", None)])
|
||||
assert res.status
|
||||
|
||||
foo = service.newi("1001", var1="value1", var2="value2")
|
||||
|
||||
assert foo.id == "1001"
|
||||
assert foo.key == "foo"
|
||||
assert foo.name == "foo"
|
||||
assert foo.str_id == "c:#1001:"
|
||||
assert foo.var1 == "value1"
|
||||
assert foo.var2 == "value2"
|
||||
|
||||
def test_i_cannot_instantiate_a_concept_which_does_not_exist(self, context, service):
|
||||
foo = service.newn("foo", var1="value1", var2="value2")
|
||||
assert foo.key == BuiltinConcepts.UNKNOWN_CONCEPT
|
||||
assert foo.requested_name == "foo"
|
||||
|
||||
foo = service.newi("1001", var1="value1", var2="value2")
|
||||
assert foo.key == BuiltinConcepts.UNKNOWN_CONCEPT
|
||||
assert foo.requested_id == "1001"
|
||||
|
||||
def test_i_can_instantiate_by_name_when_multiple_results(self, context, service):
|
||||
with NewOntology(context, "test_i_can_instantiate_by_name_when_multiple_results"):
|
||||
service.define_new_concept(context, "foo", body="body1")
|
||||
service.define_new_concept(context, "foo", body="body2")
|
||||
|
||||
concepts = service.newn("foo")
|
||||
|
||||
assert len(concepts) == 2
|
||||
assert concepts[0].id == "1001"
|
||||
assert concepts[0].get_metadata().body == "body1"
|
||||
assert concepts[1].id == "1002"
|
||||
assert concepts[1].get_metadata().body == "body2"
|
||||
|
||||
def test_concepts_are_removed_when_ontology_is_popped(self, context, service):
|
||||
context.sheerka.om.push_ontology("new ontology")
|
||||
res = service.define_new_concept(context, "foo", body="body")
|
||||
assert service.get_by_id(res.value.metadata.id) is not NotFound
|
||||
|
||||
context.sheerka.om.pop_ontology(context)
|
||||
assert service.get_by_id(res.value.metadata.id) is NotFound
|
||||
@@ -0,0 +1,379 @@
|
||||
from typing import Callable
|
||||
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from core.BuiltinConcepts import BuiltinConcepts
|
||||
from core.ExecutionContext import ExecutionContext, ExecutionContextActions
|
||||
from core.ReturnValue import ReturnValue
|
||||
from core.services.SheerkaEngine import SheerkaEngine
|
||||
from evaluators.CreateParserInput import CreateParserInput
|
||||
from evaluators.base_evaluator import AllReturnValuesEvaluator, BaseEvaluator, EvaluatorEvalResult, \
|
||||
EvaluatorMatchResult, \
|
||||
OneReturnValueEvaluator
|
||||
from helpers import _rvc
|
||||
|
||||
ALL_STEPS = [
|
||||
ExecutionContextActions.BEFORE_PARSING,
|
||||
ExecutionContextActions.PARSING,
|
||||
ExecutionContextActions.AFTER_PARSING,
|
||||
ExecutionContextActions.BEFORE_EVALUATION,
|
||||
ExecutionContextActions.EVALUATION,
|
||||
ExecutionContextActions.AFTER_EVALUATION
|
||||
]
|
||||
|
||||
|
||||
class OneReturnValueEvaluatorForTesting(OneReturnValueEvaluator):
|
||||
def __init__(self, name,
|
||||
step: ExecutionContextActions,
|
||||
priority: int,
|
||||
enabled=True,
|
||||
match: bool | Callable = True,
|
||||
match_context=None,
|
||||
eval_result: list[ReturnValue] = None,
|
||||
eval_eaten: list[ReturnValue] = None):
|
||||
super().__init__(name, step, priority, enabled)
|
||||
self.matches_delegate = match
|
||||
self.matches_context = match_context
|
||||
self.eval_result = eval_result
|
||||
self.eval_eaten = eval_eaten
|
||||
|
||||
def matches(self, context: ExecutionContext, return_value: ReturnValue) -> EvaluatorMatchResult:
|
||||
# if status is a bool, use it
|
||||
# otherwise, it's a delegate, so apply to return_value
|
||||
status = self.matches_delegate if \
|
||||
isinstance(self.matches_delegate, bool) else \
|
||||
self.matches_delegate(return_value)
|
||||
return EvaluatorMatchResult(status, self.matches_context)
|
||||
|
||||
def eval(self, context: ExecutionContext,
|
||||
evaluation_context: object,
|
||||
return_value: ReturnValue) -> EvaluatorEvalResult:
|
||||
|
||||
# make sure to correctly set up the parent when the return value is modified
|
||||
if self.eval_result:
|
||||
for ret_val in self.eval_result:
|
||||
if ret_val != return_value:
|
||||
ret_val.parents = [return_value]
|
||||
|
||||
return EvaluatorEvalResult(self.eval_result, self.eval_eaten or [return_value])
|
||||
|
||||
|
||||
class AllReturnValuesEvaluatorForTesting(AllReturnValuesEvaluator):
|
||||
def __init__(self, name,
|
||||
step: ExecutionContextActions,
|
||||
priority: int,
|
||||
enabled=True,
|
||||
match: bool | Callable = True,
|
||||
match_context=None,
|
||||
eval_result: list[ReturnValue] = None,
|
||||
eval_eaten: list[ReturnValue] = None):
|
||||
super().__init__(name, step, priority, enabled)
|
||||
self.matches_delegate = match
|
||||
self.matches_context = match_context
|
||||
self.eval_result = eval_result
|
||||
self.eval_eaten = eval_eaten
|
||||
|
||||
def matches(self, context: ExecutionContext, return_values: list[ReturnValue]) -> EvaluatorMatchResult:
|
||||
# if status is a bool, use it
|
||||
# otherwise, it's a delegate, so apply to return_value
|
||||
status = self.matches_delegate if \
|
||||
isinstance(self.matches_delegate, bool) else \
|
||||
self.matches_delegate(return_values)
|
||||
return EvaluatorMatchResult(status, self.matches_context)
|
||||
|
||||
def eval(self, context: ExecutionContext,
|
||||
evaluation_context: object,
|
||||
return_values: list[ReturnValue]) -> EvaluatorEvalResult:
|
||||
|
||||
# make sure to correctly set up the parent when the return value is modified
|
||||
if self.eval_result:
|
||||
for ret_val in self.eval_result:
|
||||
ret_val.parents = return_values
|
||||
|
||||
return EvaluatorEvalResult(self.eval_result, self.eval_eaten or return_values)
|
||||
|
||||
|
||||
class TestSheerkaEngine(BaseTest):
|
||||
@pytest.fixture()
|
||||
def service(self, sheerka):
|
||||
return SheerkaEngine(sheerka)
|
||||
|
||||
def test_i_can_compute_execution_plan(self, service):
|
||||
assert service.compute_execution_plan([]) == {}
|
||||
|
||||
e1 = BaseEvaluator("eval1", ExecutionContextActions.BEFORE_EVALUATION, 5)
|
||||
e2 = BaseEvaluator("eval2", ExecutionContextActions.BEFORE_EVALUATION, 5)
|
||||
e3 = BaseEvaluator("eval3", ExecutionContextActions.BEFORE_EVALUATION, 10)
|
||||
e4 = BaseEvaluator("eval4", ExecutionContextActions.EVALUATION, 10)
|
||||
e5 = BaseEvaluator("eval5", ExecutionContextActions.AFTER_EVALUATION, 10, enabled=False)
|
||||
res = service.compute_execution_plan([e1, e2, e3, e4, e5])
|
||||
assert res == {ExecutionContextActions.BEFORE_EVALUATION: {5: [e1, e2], 10: [e3]},
|
||||
ExecutionContextActions.EVALUATION: {10: [e4]}}
|
||||
|
||||
def test_i_can_call_execute(self, sheerka, context, service):
|
||||
service.execution_plan = {ExecutionContextActions.BEFORE_EVALUATION: {50: [CreateParserInput()]}}
|
||||
start = [ReturnValue("TestSheerkaEngine", True, sheerka.newn(BuiltinConcepts.USER_INPUT, command="1 + 1"))]
|
||||
|
||||
ret = service.execute(context, start, [ExecutionContextActions.BEFORE_EVALUATION])
|
||||
assert len(ret) == 1
|
||||
ret = ret[0]
|
||||
assert isinstance(ret, ReturnValue)
|
||||
assert ret.who == CreateParserInput.NAME
|
||||
assert ret.status is True
|
||||
assert ret.parents == start
|
||||
|
||||
def test_that_return_values_is_unchanged_when_no_evaluator(self, context, service):
|
||||
service.execution_plan = {}
|
||||
start = [_rvc("foo")]
|
||||
|
||||
ret = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
|
||||
assert ret == start
|
||||
|
||||
def test_steps_are_executed_in_correct_order(self, context, service):
|
||||
# properly init the service
|
||||
_ = OneReturnValueEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1", ExecutionContextActions.AFTER_PARSING, 21, match=False),
|
||||
_("eval2", ExecutionContextActions.BEFORE_EVALUATION, 5, match=False),
|
||||
_("eval3", ExecutionContextActions.AFTER_EVALUATION, 12, match=False),
|
||||
_("eval4", ExecutionContextActions.EVALUATION, 99, match=False),
|
||||
_("eval5", ExecutionContextActions.BEFORE_PARSING, 5, match=False),
|
||||
_("eval6", ExecutionContextActions.PARSING, 25, match=False),
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
# init test variables
|
||||
start = [_rvc("foo")]
|
||||
service.execute(context, start, ALL_STEPS)
|
||||
# to check what happened, look at the execution context children
|
||||
executed_steps = [ec.action_context["step"] for ec in context.get_children(level=1)]
|
||||
assert executed_steps == ALL_STEPS
|
||||
|
||||
def test_higher_priority_evaluators_are_executed_first(self, context, service):
|
||||
# properly init the service
|
||||
_ = OneReturnValueEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1", ExecutionContextActions.EVALUATION, 20, match=False),
|
||||
_("eval2", ExecutionContextActions.EVALUATION, 5, match=False),
|
||||
_("eval3", ExecutionContextActions.EVALUATION, 20, match=False),
|
||||
_("eval4", ExecutionContextActions.EVALUATION, 99, match=False),
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [_rvc("foo")]
|
||||
service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
|
||||
# to check what happened, look at the execution context children
|
||||
evaluators_executed = [ec.action_context["evaluator"] for ec in context.get_children() if
|
||||
"evaluator" in ec.action_context]
|
||||
assert evaluators_executed == ["eval4", "eval1", "eval3", "eval2"]
|
||||
|
||||
def test_evaluation_loop_stops_when_no_modification(self, context, service):
|
||||
rv_foo, rv_bar = _rvc("foo"), _rvc("bar") # rv => ReturnValue
|
||||
# properly init the service
|
||||
_ = OneReturnValueEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda r: context.sheerka.isinstance(r.value, "foo"),
|
||||
eval_result=[rv_bar])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [rv_foo]
|
||||
service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
children = [ec for ec in context.get_children() if ec.action == ExecutionContextActions.EVALUATING_ITERATION]
|
||||
assert len(children) == 2
|
||||
|
||||
def test_eval_is_not_called_if_match_fails_for_one_return(self, context, service):
|
||||
# properly init the service
|
||||
_ = OneReturnValueEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda r: context.sheerka.isinstance(r.value, "foo"),
|
||||
eval_result=[_rvc("bar")])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [_rvc("baz")]
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == start
|
||||
|
||||
# check what happen in details
|
||||
exec_context = next(filter(lambda ec: "evaluator" in ec.action_context, context.get_children()))
|
||||
evaluation_trace = exec_context.values["evaluation"]
|
||||
assert evaluation_trace == [{"item": start[0], "match": False}]
|
||||
|
||||
def test_eval_is_called_if_match_succeed_for_one_return(self, context, service):
|
||||
# properly init the service
|
||||
_ = OneReturnValueEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda r: context.sheerka.isinstance(r.value, "foo"),
|
||||
eval_result=[_rvc("bar")])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [_rvc("foo")]
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == [_rvc("bar")]
|
||||
assert res[0].parents == start
|
||||
|
||||
# check what happen in details
|
||||
exec_context = next(filter(lambda ec: "evaluator" in ec.action_context, context.get_children()))
|
||||
evaluation_trace = exec_context.values["evaluation"]
|
||||
assert evaluation_trace == [{"item": start[0], "match": True, "new": res, "eaten": start}]
|
||||
|
||||
def test_all_item_are_processed_during_one_return(self, context, service):
|
||||
rv_foo, rv_bar, rv_baz, rv_qux = _rvc("foo"), _rvc("bar"), _rvc("baz"), _rvc("qux") # rv => ReturnValue
|
||||
|
||||
# properly init the service
|
||||
_ = OneReturnValueEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda r: context.sheerka.isinstance(r.value, "foo"),
|
||||
eval_result=[rv_qux])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [rv_bar, rv_foo, rv_baz]
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == [rv_bar, rv_qux, rv_baz] # We must keep the order ! rv_qux replaces rv_foo
|
||||
assert res[0].parents is None
|
||||
assert res[1].parents == [rv_foo]
|
||||
assert res[2].parents is None
|
||||
|
||||
# check what happen in details
|
||||
exec_context = next(filter(lambda ec: "evaluator" in ec.action_context, context.get_children()))
|
||||
evaluation_trace = exec_context.values["evaluation"]
|
||||
assert evaluation_trace == [{"item": rv_bar, "match": False},
|
||||
{"item": rv_foo, "match": True, "new": [rv_qux], "eaten": [rv_foo]},
|
||||
{"item": rv_baz, "match": False}]
|
||||
|
||||
def test_evaluators_with_the_same_priority_do_not_compete_with_each_other_one_return(self, context, service):
|
||||
rv_foo, rv_bar, rv_baz, rv_qux = _rvc("foo"), _rvc("bar"), _rvc("baz"), _rvc("qux") # rv => ReturnValue
|
||||
|
||||
# properly init the service
|
||||
# both evaluator want to eat 'foo'
|
||||
_ = OneReturnValueEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda r: context.sheerka.isinstance(r.value, "foo"),
|
||||
eval_result=[rv_bar]),
|
||||
_("eval2",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda r: context.sheerka.isinstance(r.value, "foo"),
|
||||
eval_result=[rv_baz])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [rv_qux, rv_foo, rv_qux]
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == [rv_qux, rv_bar, rv_baz, rv_qux] # they both eat it !
|
||||
assert res[1].parents == [rv_foo]
|
||||
assert res[2].parents == [rv_foo]
|
||||
|
||||
def test_evaluators_with_higher_priority_take_precedence_one_return(self, context, service):
|
||||
rv_foo, rv_bar, rv_baz = _rvc("foo"), _rvc("bar"), _rvc("baz") # rv => ReturnValue
|
||||
|
||||
# properly init the service
|
||||
# both evaluator want to eat 'foo'
|
||||
_ = OneReturnValueEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda r: context.sheerka.isinstance(r.value, "foo"),
|
||||
eval_result=[rv_bar]),
|
||||
_("eval2",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
30,
|
||||
match=lambda r: context.sheerka.isinstance(r.value, "foo"),
|
||||
eval_result=[rv_baz])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [rv_foo]
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == [rv_baz]
|
||||
assert res[0].parents == start
|
||||
|
||||
def test_evaluator_matches_is_called_before_eval_for_all_return(self, context, service):
|
||||
# properly init the service
|
||||
_ = AllReturnValuesEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda r: context.sheerka.isinstance(r[0].value, "foo"),
|
||||
eval_result=[_rvc("bar")])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [_rvc("baz")]
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == start
|
||||
|
||||
start = [_rvc("foo")]
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == [_rvc("bar")]
|
||||
assert res[0].parents == start
|
||||
|
||||
def test_eval_is_not_call_if_match_fails_for_all_return(self, context, service):
|
||||
rv_foo, rv_bar, rv_baz = _rvc("foo"), _rvc("bar"), _rvc("baz") # rv => ReturnValue
|
||||
|
||||
# properly init the service
|
||||
_ = AllReturnValuesEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda lst: context.sheerka.isinstance(lst[0].value, "foo"),
|
||||
eval_result=[rv_bar])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [rv_baz, rv_foo] # foo is not the first in the list
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == start
|
||||
|
||||
# check what happen in details
|
||||
exec_context = next(filter(lambda ec: "evaluator" in ec.action_context, context.get_children()))
|
||||
evaluation_trace = exec_context.values["evaluation"]
|
||||
assert evaluation_trace == {"match": False}
|
||||
|
||||
def test_eval_is_called_if_match_succeed_for_all_return(self, context, service):
|
||||
rv_foo, rv_bar, rv_baz = _rvc("foo"), _rvc("bar"), _rvc("baz") # rv => ReturnValue
|
||||
# properly init the service
|
||||
_ = AllReturnValuesEvaluatorForTesting
|
||||
evaluators = [
|
||||
_("eval1",
|
||||
ExecutionContextActions.EVALUATION,
|
||||
20,
|
||||
match=lambda lst: context.sheerka.isinstance(lst[0].value, "foo"),
|
||||
eval_result=[rv_bar])
|
||||
]
|
||||
service.execution_plan = service.compute_execution_plan(evaluators)
|
||||
|
||||
start = [rv_foo, rv_baz]
|
||||
res = service.execute(context, start, [ExecutionContextActions.EVALUATION])
|
||||
assert res == [rv_bar]
|
||||
assert res[0].parents == start
|
||||
|
||||
children = list(context.get_children())
|
||||
# check what happen in details
|
||||
exec_context = next(filter(lambda ec: "evaluator" in ec.action_context, context.get_children()))
|
||||
evaluation_trace = exec_context.values["evaluation"]
|
||||
assert evaluation_trace == {"match": True, "new": res, "eaten": start}
|
||||
@@ -0,0 +1,183 @@
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from common.global_symbols import NoFirstToken, NotFound, NotInit, Removed
|
||||
from helpers import get_concept, get_concepts
|
||||
from ontologies.SheerkaOntologyManager import SheerkaOntologyManager
|
||||
from parsers.tokenizer import Keywords
|
||||
from sheerkapickle import tags
|
||||
from sheerkapickle.sheerkaplicker import SheerkaPickler
|
||||
from sheerkapickle.sheerkaunpickler import SheerkaUnpickler
|
||||
|
||||
|
||||
class Obj:
|
||||
def __init__(self, a, b, c):
|
||||
self.a = a
|
||||
self.b = b
|
||||
self.c = c
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if not isinstance(other, Obj):
|
||||
return False
|
||||
|
||||
return self.a == other.a and self.b == other.b and self.c == other.c
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.a, self.b, self.c))
|
||||
|
||||
|
||||
class TestSheerkaPickler(BaseTest):
|
||||
|
||||
@pytest.mark.parametrize("obj, expected", [
|
||||
(1, 1),
|
||||
(3.14, 3.14),
|
||||
("a string", "a string"),
|
||||
(True, True),
|
||||
(None, None),
|
||||
([1, 3.14, "a string"], [1, 3.14, "a string"]),
|
||||
((1, 3.14, "a string"), {tags.TUPLE: [1, 3.14, "a string"]}),
|
||||
({1}, {tags.SET: [1]}),
|
||||
({"a": "a", "b": 3.14, "c": True}, {"a": "a", "b": 3.14, "c": True}),
|
||||
({1: "a", 2: 3.14, 3: True}, {1: "a", 2: 3.14, 3: True}),
|
||||
([1, [3.14, "a string"]], [1, [3.14, "a string"]]),
|
||||
([1, (3.14, "a string")], [1, {tags.TUPLE: [3.14, "a string"]}]),
|
||||
([], []),
|
||||
(Keywords.DEF, {tags.ENUM: 'parsers.tokenizer.Keywords.DEF'}),
|
||||
])
|
||||
def test_i_can_flatten_and_restore_primitives(self, sheerka, obj, expected):
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == expected
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
@pytest.mark.parametrize("obj, expected", [
|
||||
(NotInit, {tags.CUSTOM: NotInit.value}),
|
||||
(NotFound, {tags.CUSTOM: NotFound.value}),
|
||||
(Removed, {tags.CUSTOM: Removed.value}),
|
||||
(NoFirstToken, {tags.CUSTOM: NoFirstToken.value}),
|
||||
])
|
||||
def test_i_can_flatten_and_restore_custom_types(self, sheerka, obj, expected):
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == expected
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
def test_i_can_flatten_and_restore_instances(self, sheerka):
|
||||
obj1 = Obj(1, "b", True)
|
||||
obj2 = Obj(3.14, ("a", "b"), obj1)
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj2)
|
||||
assert flatten == {'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 3.14,
|
||||
'b': {'_sheerka/tuple': ['a', 'b']},
|
||||
'c': {'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 1,
|
||||
'b': 'b',
|
||||
'c': True}}
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj2
|
||||
|
||||
def test_i_can_manage_circular_reference(self, sheerka):
|
||||
obj1 = Obj(1, "b", True)
|
||||
obj1.c = obj1
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj1)
|
||||
assert flatten == {'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 1,
|
||||
'b': 'b',
|
||||
'c': {'_sheerka/id': 0}}
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded.a == obj1.a
|
||||
assert decoded.b == obj1.b
|
||||
assert decoded.c == decoded
|
||||
|
||||
def test_i_can_flatten_obj_with_new_props(self, sheerka):
|
||||
# property 'z' is not part of the `Obj` definition
|
||||
obj = Obj(1, "b", True)
|
||||
obj.z = "new prop"
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == {'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 1,
|
||||
'b': 'b',
|
||||
'c': True,
|
||||
'z': "new prop"}
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
def test_i_cannot_correctly_flatten_compiled_and_generator(self, sheerka):
|
||||
obj = Obj((i for i in range(3)), compile("a + b", "<src>", mode="eval"), None)
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
|
||||
assert isinstance(flatten["a"], str)
|
||||
assert flatten["a"].startswith("<generator object")
|
||||
assert isinstance(flatten["b"], str)
|
||||
assert flatten["b"].startswith("<code object")
|
||||
assert flatten["c"] is None
|
||||
|
||||
@pytest.mark.parametrize("obj, expected", [
|
||||
({None: "a"}, {'null': "a"}),
|
||||
({Keywords.DEF: "a"}, {'parsers.tokenizer.Keywords.DEF': 'a'}),
|
||||
({(1, 2): "a"}, {(1, 2): "a"}),
|
||||
])
|
||||
def test_i_can_manage_specific_keys_in_dictionaries(self, sheerka, obj, expected):
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == expected
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
@pytest.mark.skip("Concepts are not fully working")
|
||||
def test_i_can_use_concept_as_dictionary_key(self, sheerka, context):
|
||||
concept = get_concepts(context, "foo", use_sheerka=True)[0]
|
||||
|
||||
obj = {concept: "a"}
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == {'c:foo|1001:': 'a'}
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
def test_i_can_manage_references(self, sheerka):
|
||||
foo = Obj("foo", "bar", "baz")
|
||||
obj = [Keywords.DEF, foo, Keywords.WHERE, Keywords.DEF, foo]
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
|
||||
assert flatten == [{'_sheerka/enum': 'parsers.tokenizer.Keywords.DEF'},
|
||||
{'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 'foo',
|
||||
'b': 'bar',
|
||||
'c': 'baz'},
|
||||
{'_sheerka/enum': 'parsers.tokenizer.Keywords.WHERE'},
|
||||
{'_sheerka/id': 0},
|
||||
{'_sheerka/id': 1}]
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
def test_i_do_not_encode_logger(self, sheerka):
|
||||
logger = logging.getLogger("log_name")
|
||||
logger2 = logging.getLogger("log_name2")
|
||||
obj = Obj("foo", logger, {"a": logger, "b": logger2})
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == Obj("foo", None, {"a": None, "b": None})
|
||||
|
||||
def test_ontology_are_not_serialized(self, sheerka, context):
|
||||
om = SheerkaOntologyManager(sheerka, "mem://").freeze()
|
||||
ontology = om.push_ontology("new ontology")
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(ontology)
|
||||
assert flatten == "__ONTOLOGY:new ontology__"
|
||||
@@ -0,0 +1,325 @@
|
||||
import pytest
|
||||
|
||||
import sheerkapickle
|
||||
from base import BaseTest
|
||||
from core.concept import Concept
|
||||
|
||||
|
||||
def set_full_serialization(concept):
|
||||
concept.get_metadata().full_serialization = True
|
||||
return concept
|
||||
|
||||
|
||||
@pytest.mark.skip("Handler are not implemented")
|
||||
class TestSheerkaPickleHandler(BaseTest):
|
||||
|
||||
def test_i_can_encode_decode_unknown_concept_metadata(self, sheerka):
|
||||
concept = set_full_serialization(Concept(name="foo", key="my_key"))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.key": "my_key"}'
|
||||
assert decoded == concept
|
||||
|
||||
concept = set_full_serialization(Concept("foo", is_builtin=True, is_unique=True))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.is_builtin": true, "meta.is_unique": true}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo", body="my_body"))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.body": "my_body"}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo", pre="my_pre"))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.pre": "my_pre"}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo", post="my_post"))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.post": "my_post"}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo", where="my_where"))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.where": "my_where"}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo").def_var("a", "value_a").def_var("b", "value_b"))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.variables": [["a", "value_a"], ["b", "value_b"]], "values": [["a", {"_sheerka/custom": "**NotInit**"}], ["b", {"_sheerka/id": 1}]]}'
|
||||
|
||||
concept = Concept("foo").init_key()
|
||||
sheerka.define_new_concept(self.get_context(sheerka), concept)
|
||||
concept.get_metadata().full_serialization = True
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.key": "foo", "meta.id": "1001"}'
|
||||
|
||||
def test_i_can_encode_decode_unknown_concept_values(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value(ConceptParts.PRE, 10) # an int
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["#pre#", 10]]}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value(ConceptParts.POST, 'a string') # an string
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["#post#", "a string"]]}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value(ConceptParts.WHERE, ['a string', 3.14]) # a list
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["#where#", ["a string", 3.14]]]}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value(ConceptParts.WHERE, ('a string', 3.14)) # a tuple
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["#where#", {"_sheerka/tuple": ["a string", 3.14]}]]}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value(ConceptParts.BODY, set_full_serialization(Concept("foo", body="foo_body")))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["#body#", {"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "meta.body": "foo_body"}]]}'
|
||||
|
||||
def test_i_can_encode_decode_unknown_concept_variables(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value("a", "value_a") # string
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["a", "value_a"]]}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value("a", 10) # int
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["a", 10]]}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value("a", set_full_serialization(Concept("bar"))) # another concept
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["a", {"_sheerka/obj": "core.concept.Concept", "meta.name": "bar"}]]}'
|
||||
|
||||
concept = set_full_serialization(Concept("foo"))
|
||||
concept.set_value("a", "a").set_value("b", "b") # at least two variables
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "foo", "values": [["a", "a"], ["b", "b"]]}'
|
||||
|
||||
def test_i_can_encode_decode_known_concepts(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
ref_concept = Concept("my_name", True, True, "my_key", "my_body", "my_where", "my_pre", "my_post", "my_def")
|
||||
ref_concept.def_var("a", "value_a").def_var("b", "value_b")
|
||||
|
||||
sheerka.define_new_concept(self.get_context(sheerka), ref_concept)
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, ref_concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == ref_concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "concept/id": "1001"}'
|
||||
|
||||
# same test, modify a value and check if this modification is correctly saved
|
||||
concept = Concept().update_from(sheerka.get_by_id(ref_concept.id))
|
||||
concept.set_value(ConceptParts.BODY, set_full_serialization(Concept("bar")))
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "concept/id": "1001", "values": [["#body#", {"_sheerka/obj": "core.concept.Concept", "meta.name": "bar"}]]}'
|
||||
|
||||
def test_i_can_manage_reference_of_the_same_object(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
concept_ref = set_full_serialization(Concept("foo"))
|
||||
|
||||
concept = set_full_serialization(Concept("bar"))
|
||||
concept.set_value(ConceptParts.PRE, concept_ref)
|
||||
concept.set_value(ConceptParts.BODY, concept_ref)
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, concept)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == concept
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "meta.name": "bar", "values": [["#pre#", {"_sheerka/obj": "core.concept.Concept", "meta.name": "foo"}], ["#body#", {"_sheerka/id": 1}]]}'
|
||||
|
||||
def test_i_can_encode_decode_user_input(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
user_input = sheerka.new(BuiltinConcepts.USER_INPUT, body="my_text", user_name="my_user_name")
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, user_input)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == user_input
|
||||
assert to_string == f'{{"_sheerka/obj": "core.builtin_concepts.UserInputConcept", "concept/id": ["__USER_INPUT", "{self.user_input_id}"], "user_name": "my_user_name", "text": "my_text"}}'
|
||||
|
||||
def test_i_can_encode_decode_user_input_when_tokens(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
text = "I have 'a complicated' 10 text"
|
||||
tokens = list(Tokenizer(text))
|
||||
user_input = sheerka.new(BuiltinConcepts.USER_INPUT, body=tokens, user_name="my_user_name")
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, user_input)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == sheerka.new(BuiltinConcepts.USER_INPUT, body=text, user_name="my_user_name")
|
||||
assert to_string == f'{{"_sheerka/obj": "core.builtin_concepts.UserInputConcept", "concept/id": ["__USER_INPUT", "{self.user_input_id}"], "user_name": "my_user_name", "text": "{text}"}}'
|
||||
|
||||
def test_i_can_encode_decode_return_value(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
ret_val = sheerka.ret("who", True, 10)
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, ret_val)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == ret_val
|
||||
assert to_string == f'{{"_sheerka/obj": "core.builtin_concepts.ReturnValueConcept", "concept/id": ["__RETURN_VALUE", "{self.return_value_id}"], "who": "who", "status": true, "value": 10}}'
|
||||
|
||||
def test_i_can_encode_decode_return_value_with_parent(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
ret_val = sheerka.ret("who", True, 10)
|
||||
ret_val_parent = sheerka.ret("parent_who", True, "10")
|
||||
ret_val.parents = [ret_val_parent, ret_val_parent]
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, ret_val)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == ret_val
|
||||
assert decoded.parents == ret_val.parents
|
||||
id_str = f', "concept/id": ["__RETURN_VALUE", "{self.return_value_id}"]'
|
||||
parents_str = '[{"_sheerka/obj": "core.builtin_concepts.ReturnValueConcept"' + id_str + ', "who": "parent_who", "status": true, "value": "10"}, {"_sheerka/id": 1}]'
|
||||
assert to_string == '{"_sheerka/obj": "core.builtin_concepts.ReturnValueConcept"' + id_str + ', "who": "who", "status": true, "value": 10, "parents": ' + parents_str + '}'
|
||||
|
||||
def test_i_can_encode_decode_return_values_with_complex_body(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
ret_val = sheerka.ret("who", True, set_full_serialization(Concept("foo", body="bar")))
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, ret_val)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == ret_val
|
||||
|
||||
def test_i_can_encode_decode_return_values_from_concepts_parsers_or_evaluators(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
foo = Concept("foo")
|
||||
sheerka.set_id_if_needed(foo, False)
|
||||
ret_val = sheerka.ret(foo, True, 10)
|
||||
to_string = sheerkapickle.encode(sheerka, ret_val)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == sheerka.ret("c:1001:", True, 10)
|
||||
|
||||
ret_val = sheerka.ret(DefConceptParser(), True, 10)
|
||||
to_string = sheerkapickle.encode(sheerka, ret_val)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == sheerka.ret("parsers.DefConcept", True, 10)
|
||||
|
||||
ret_val = sheerka.ret(ConceptEvaluator(), True, 10)
|
||||
to_string = sheerkapickle.encode(sheerka, ret_val)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == sheerka.ret("evaluators.Concept", True, 10)
|
||||
|
||||
def test_i_can_encode_decode_execution_context(self):
|
||||
sheerka = self.get_sheerka()
|
||||
c = Concept("foo").def_var("a")
|
||||
context = ExecutionContext("who", Event("xxx"), sheerka, BuiltinConcepts.EVALUATE_CONCEPT, c, "my desc")
|
||||
input_list = [ReturnValueConcept("who", True, 10), ReturnValueConcept("who2", False, 20)]
|
||||
context.inputs = {"a": input_list, "b": set_full_serialization(Concept("foo"))}
|
||||
context.values = {"c": input_list, "d": set_full_serialization(Concept("bar"))}
|
||||
context.obj = set_full_serialization(Concept("baz"))
|
||||
context.push("who3", BuiltinConcepts.EVALUATING_CONCEPT, c, desc="sub_child1")
|
||||
context.push("who4", BuiltinConcepts.EVALUATING_ATTRIBUTE, "a", desc="sub_child2")
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, context)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
assert decoded == context
|
||||
|
||||
def test_complicated_execution_context(self):
|
||||
sheerka = self.get_sheerka(skip_builtins_in_db=False)
|
||||
|
||||
text = "def concept one as 1"
|
||||
execution_context = ExecutionContext("s", Event(), sheerka, BuiltinConcepts.NOP, None, f"Evaluating '{text}'")
|
||||
user_input = sheerka.ret("s", True, sheerka.new(BuiltinConcepts.USER_INPUT, body=text, user_name="n"))
|
||||
reduce_requested = sheerka.ret("s", True, sheerka.new(BuiltinConcepts.REDUCE_REQUESTED))
|
||||
|
||||
steps = [
|
||||
BuiltinConcepts.BEFORE_PARSING,
|
||||
BuiltinConcepts.PARSING,
|
||||
BuiltinConcepts.AFTER_PARSING,
|
||||
BuiltinConcepts.BEFORE_EVALUATION,
|
||||
BuiltinConcepts.EVALUATION,
|
||||
BuiltinConcepts.AFTER_EVALUATION
|
||||
]
|
||||
|
||||
ret = sheerka.execute(execution_context, [user_input, reduce_requested], steps)
|
||||
execution_context.add_values(return_values=ret)
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, execution_context)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
|
||||
return_value = decoded.values["return_values"][0].value
|
||||
assert sheerka.isinstance(return_value, BuiltinConcepts.NEW_CONCEPT)
|
||||
|
||||
def test_encode_simple_concept(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
foo = set_full_serialization(Concept("foo"))
|
||||
to_string = sheerkapickle.encode(sheerka, foo)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
|
||||
assert decoded == foo
|
||||
|
||||
def test_i_can_encode_decode_rule(self):
|
||||
sheerka = self.get_sheerka()
|
||||
|
||||
rule = Rule("print", "my rule", "True", "Hello world")
|
||||
rule.metadata.id = "1"
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, rule)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
|
||||
assert to_string == '{"_sheerka/obj": "core.rule.Rule", "rule/id": "1", "name": "my rule", "predicate": "True", "action_type": "print", "action": "Hello world"}'
|
||||
assert decoded == rule
|
||||
|
||||
def test_i_can_encode_decode_dynamic_concept(self):
|
||||
sheerka, context, foo = self.init_concepts("foo", global_truth=True, create_new=True)
|
||||
sheerka.set_attr(context, foo, "attr", "attr_value")
|
||||
sheerka.set_property(context, foo, "prop", "prop_value", all_concepts=True)
|
||||
|
||||
foo_instance = sheerka.new(foo)
|
||||
dynamic_foo = sheerka.new_dynamic(foo_instance,
|
||||
"SUFFIX",
|
||||
"new_name",
|
||||
props={"new_prop": "value"},
|
||||
attrs={"new_attr": "value"})
|
||||
|
||||
to_string = sheerkapickle.encode(sheerka, dynamic_foo)
|
||||
decoded = sheerkapickle.decode(sheerka, to_string)
|
||||
|
||||
assert decoded == dynamic_foo
|
||||
assert to_string == '{"_sheerka/obj": "core.concept.Concept", "concept/id": "1001-SUFFIX", "meta.name": "new_name", "meta.key": "new_name", "meta.props": {"prop": "prop_value", "new_prop": "value"}, "meta.id": "1001-SUFFIX", "values": [["new_attr", "value"]]}'
|
||||
+56
-77
@@ -1,5 +1,3 @@
|
||||
import json
|
||||
|
||||
from fastapi import HTTPException
|
||||
from starlette import status
|
||||
|
||||
@@ -7,92 +5,73 @@ from client import SheerkaClient, parse_arguments
|
||||
from mockserver import MockServer
|
||||
|
||||
|
||||
def test_i_can_start_with_a_default_hostname():
|
||||
parsed = parse_arguments([])
|
||||
# @pytest.mark.skip("too long")
|
||||
class TestSheerkaClient:
|
||||
def test_i_can_start_with_a_default_hostname(self):
|
||||
parsed = parse_arguments([])
|
||||
|
||||
assert parsed.hostname == "http://localhost"
|
||||
assert parsed.port == 56356
|
||||
assert parsed.hostname == "http://localhost"
|
||||
assert parsed.port == 56356
|
||||
|
||||
def test_i_can_override_hostname_and_port(self):
|
||||
parsed = parse_arguments(["new_host", "--port", "1515"])
|
||||
|
||||
def test_i_can_override_hostname_and_port():
|
||||
parsed = parse_arguments(["new_host", "--port", "1515"])
|
||||
assert parsed.hostname == "new_host"
|
||||
assert parsed.port == 1515
|
||||
|
||||
assert parsed.hostname == "new_host"
|
||||
assert parsed.port == 1515
|
||||
parsed = parse_arguments(["new_host", "-p", "1515"])
|
||||
|
||||
parsed = parse_arguments(["new_host", "-p", "1515"])
|
||||
assert parsed.hostname == "new_host"
|
||||
assert parsed.port == 1515
|
||||
|
||||
assert parsed.hostname == "new_host"
|
||||
assert parsed.port == 1515
|
||||
def test_i_can_provide_user_and_password(self):
|
||||
parsed = parse_arguments(["--username", "my_user", "--password", "my_password"])
|
||||
assert parsed.username == "my_user"
|
||||
assert parsed.password == "my_password"
|
||||
|
||||
parsed = parse_arguments(["-u", "my_user", "-P", "my_password"])
|
||||
assert parsed.username == "my_user"
|
||||
assert parsed.password == "my_password"
|
||||
|
||||
def test_i_can_provide_user_and_password():
|
||||
parsed = parse_arguments(["--username", "my_user", "--password", "my_password"])
|
||||
assert parsed.username == "my_user"
|
||||
assert parsed.password == "my_password"
|
||||
|
||||
parsed = parse_arguments(["-u", "my_user", "-P", "my_password"])
|
||||
assert parsed.username == "my_user"
|
||||
assert parsed.password == "my_password"
|
||||
|
||||
|
||||
def test_i_can_manage_when_no_server():
|
||||
client = SheerkaClient("http://localhost", 80)
|
||||
res = client.check_url()
|
||||
|
||||
assert res.status is False
|
||||
assert res.message == "Connection refused."
|
||||
|
||||
|
||||
def test_i_can_manage_when_resource_is_not_found():
|
||||
with MockServer([]):
|
||||
client = SheerkaClient("http://localhost", 5000)
|
||||
def test_i_can_manage_when_no_server(self):
|
||||
client = SheerkaClient("http://localhost", 80)
|
||||
res = client.check_url()
|
||||
|
||||
assert not res.status
|
||||
assert res.message == '{"detail":"Not Found"}'
|
||||
assert res.status is False
|
||||
assert res.message == "Connection refused."
|
||||
|
||||
def test_i_can_manage_when_resource_is_not_found(self):
|
||||
with MockServer([]):
|
||||
client = SheerkaClient("http://localhost", 5000)
|
||||
res = client.check_url()
|
||||
|
||||
def test_i_can_connect_to_a_server():
|
||||
with MockServer([{
|
||||
"path": "/",
|
||||
"response": "Hello world"
|
||||
}]):
|
||||
client = SheerkaClient("http://localhost", 5000)
|
||||
res = client.check_url()
|
||||
assert res.status
|
||||
assert res.message == '"Hello world"'
|
||||
assert not res.status
|
||||
assert res.message == '{"detail":"Not Found"}'
|
||||
|
||||
def test_i_can_connect_to_a_server(self):
|
||||
with MockServer([{
|
||||
"path": "/",
|
||||
"response": "Hello world"
|
||||
}]):
|
||||
client = SheerkaClient("http://localhost", 5000)
|
||||
res = client.check_url()
|
||||
assert res.status
|
||||
assert res.message == '"Hello world"'
|
||||
|
||||
def test_i_can_authenticate_with_valid_credentials():
|
||||
with MockServer([{
|
||||
"path": "/",
|
||||
"response": "Hello world"
|
||||
}, {
|
||||
"method": "post",
|
||||
"path": "/token",
|
||||
"response": {"access_token": "xxxx", "token_type": "bearer"}
|
||||
}]):
|
||||
client = SheerkaClient("http://localhost", 5000)
|
||||
res = client.connect("valid_username", "valid_password")
|
||||
assert res.status
|
||||
assert res.message == "Connected as valid_username"
|
||||
|
||||
|
||||
def test_i_can_manage_when_authentication_fails():
|
||||
with MockServer([{
|
||||
"path": "/",
|
||||
"response": "Hello world"
|
||||
}, {
|
||||
"method": "post",
|
||||
"path": "/token",
|
||||
"exception": HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
}]):
|
||||
client = SheerkaClient("http://localhost", 5000)
|
||||
res = client.connect("username", "wrong_password")
|
||||
assert not res.status
|
||||
assert res.message == 'Incorrect username or password'
|
||||
def test_i_can_manage_when_authentication_fails(self):
|
||||
with MockServer([{
|
||||
"path": "/",
|
||||
"response": "Hello world"
|
||||
}, {
|
||||
"method": "post",
|
||||
"path": "/token",
|
||||
"exception": HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
}]):
|
||||
client = SheerkaClient("http://localhost", 5000)
|
||||
res = client.connect("username", "wrong_password")
|
||||
assert not res.status
|
||||
assert res.message == 'Incorrect username or password'
|
||||
|
||||
@@ -0,0 +1,223 @@
|
||||
import pytest
|
||||
|
||||
from common.global_symbols import NotInit
|
||||
from core.concept import Concept, ConceptMetadata, DefinitionType
|
||||
from helpers import GetNextId, get_concept, get_concepts, get_metadata, get_metadatas
|
||||
|
||||
|
||||
def test_i_can_get_default_value_when_get_metadata():
|
||||
metadata = get_metadata()
|
||||
assert metadata.id is None
|
||||
assert metadata.name is None
|
||||
assert metadata.name is None
|
||||
assert metadata.body is None
|
||||
assert metadata.id is None
|
||||
assert metadata.key is None
|
||||
assert metadata.where is None
|
||||
assert metadata.pre is None
|
||||
assert metadata.post is None
|
||||
assert metadata.ret is None
|
||||
assert metadata.definition is None
|
||||
assert metadata.definition_type == DefinitionType.DEFAULT
|
||||
assert metadata.desc is None
|
||||
assert metadata.props == {}
|
||||
assert metadata.variables == tuple()
|
||||
assert metadata.parameters == []
|
||||
assert metadata.bound_body is None
|
||||
assert metadata.is_builtin is False
|
||||
assert metadata.is_unique is False
|
||||
assert metadata.autouse is False
|
||||
|
||||
|
||||
def test_i_can_use_shortcut_to_declare_variables():
|
||||
metadata = get_metadata(variables=(("var1", NotInit), ("var2", "value")))
|
||||
assert metadata.variables == (("var1", NotInit), ("var2", "value")) # default behaviour
|
||||
|
||||
metadata = get_metadata(variables=[("var1", NotInit), ("var2", "value")])
|
||||
assert metadata.variables == (("var1", NotInit), ("var2", "value")) # lists are transformed into tuples
|
||||
|
||||
metadata = get_metadata(variables=["var1", "var2"])
|
||||
assert metadata.variables == (("var1", NotInit), ("var2", NotInit)) # expanded
|
||||
|
||||
|
||||
def test_i_can_clone():
|
||||
metadata = ConceptMetadata(
|
||||
"id",
|
||||
"name",
|
||||
"key",
|
||||
True,
|
||||
True,
|
||||
"body",
|
||||
"where",
|
||||
"pre",
|
||||
"post",
|
||||
"ret",
|
||||
"definition",
|
||||
DefinitionType.BNF,
|
||||
"desc",
|
||||
True,
|
||||
"bound_body",
|
||||
{"prop": "value"},
|
||||
(("variable", "value"),),
|
||||
("p1",),
|
||||
"digest",
|
||||
("all_attr",),
|
||||
)
|
||||
|
||||
clone = metadata.clone()
|
||||
for attr, value in vars(metadata).items():
|
||||
clone_value = getattr(clone, attr)
|
||||
assert clone_value == value
|
||||
|
||||
|
||||
def test_i_can_override_values_when_i_clone_metadata():
|
||||
metadata = get_metadata()
|
||||
assert metadata.clone(name="new_name").name == "new_name"
|
||||
assert metadata.clone(body="new_body").body == "new_body"
|
||||
assert metadata.clone(key="new_key").key == "new_key"
|
||||
assert metadata.clone(where="new_where").where == "new_where"
|
||||
assert metadata.clone(pre="new_pre").pre == "new_pre"
|
||||
assert metadata.clone(post="new_post").post == "new_post"
|
||||
assert metadata.clone(ret="new_ret").ret == "new_ret"
|
||||
assert metadata.clone(definition="new_definition").definition == "new_definition"
|
||||
assert metadata.clone(definition_type="new_definition_type").definition_type == "new_definition_type"
|
||||
assert metadata.clone(desc="new_desc").desc == "new_desc"
|
||||
assert metadata.clone(props="new_props").props == "new_props"
|
||||
assert metadata.clone(variables="new_variables").variables == "new_variables"
|
||||
assert metadata.clone(parameters="new_parameters").parameters == "new_parameters"
|
||||
assert metadata.clone(bound_body="new_bound_body").bound_body == "new_bound_body"
|
||||
assert metadata.clone(is_builtin="new_is_builtin").is_builtin == "new_is_builtin"
|
||||
assert metadata.clone(is_unique="new_is_unique").is_unique == "new_is_unique"
|
||||
assert metadata.clone(autouse="new_autouse").autouse == "new_autouse"
|
||||
assert metadata.clone(digest="new_digest").digest == "new_digest"
|
||||
assert metadata.clone(all_attrs="new_all_attrs").all_attrs == "new_all_attrs"
|
||||
|
||||
|
||||
def test_i_cannot_change_the_id_when_cloning():
|
||||
with pytest.raises(TypeError):
|
||||
metadata = get_metadata()
|
||||
metadata.clone(id="new_id")
|
||||
|
||||
|
||||
def test_i_can_auto_init():
|
||||
next_id = GetNextId()
|
||||
metadata = get_metadata("a plus b", body="a + b", variables=["a", "b"]).auto_init(next_id)
|
||||
|
||||
assert metadata.name == "a plus b"
|
||||
assert metadata.id == "1001"
|
||||
assert metadata.key == "__var__0 plus __var__1"
|
||||
assert metadata.all_attrs == ('#where#', '#pre#', '#post#', '#body#', '#ret#', 'a', 'b')
|
||||
assert metadata.is_unique is False
|
||||
assert metadata.is_builtin is False
|
||||
assert metadata.definition_type is DefinitionType.DEFAULT
|
||||
assert metadata.digest == '426d88b1b928a421366c12fb283267b89610cbfb9efb470813ea8b5ba37a2013'
|
||||
|
||||
|
||||
def test_sequences_are_incremented_when_multiples_call():
|
||||
next_id = GetNextId()
|
||||
assert get_metadata("foo").auto_init(next_id).id == "1001"
|
||||
assert get_metadata("bar").auto_init(next_id).id == "1002"
|
||||
|
||||
|
||||
def test_i_can_get_multiple_metadatas():
|
||||
res = get_metadatas("foo", get_metadata("bar", body="body"))
|
||||
|
||||
assert len(res) == 2
|
||||
|
||||
metadata = res[0]
|
||||
assert isinstance(metadata, ConceptMetadata)
|
||||
assert metadata.name == "foo"
|
||||
assert metadata.body is None
|
||||
assert metadata.key is None
|
||||
assert metadata.id is None
|
||||
|
||||
metadata = res[1]
|
||||
assert isinstance(metadata, ConceptMetadata)
|
||||
assert metadata.name == "bar"
|
||||
assert metadata.body == "body"
|
||||
assert metadata.key is None
|
||||
assert metadata.id is None
|
||||
|
||||
|
||||
def test_i_can_get_multiple_already_initialized_metadatas():
|
||||
res = get_metadatas("foo", get_metadata("bar", body="body"), next_id=GetNextId())
|
||||
|
||||
assert len(res) == 2
|
||||
|
||||
metadata = res[0]
|
||||
assert isinstance(metadata, ConceptMetadata)
|
||||
assert metadata.name == "foo"
|
||||
assert metadata.body is None
|
||||
assert metadata.key == "foo"
|
||||
assert metadata.id == "1001"
|
||||
|
||||
metadata = res[1]
|
||||
assert isinstance(metadata, ConceptMetadata)
|
||||
assert metadata.name == "bar"
|
||||
assert metadata.body == "body"
|
||||
assert metadata.key == "bar"
|
||||
assert metadata.id == "1002"
|
||||
|
||||
|
||||
def test_i_can_get_a_concept():
|
||||
foo = get_concept("foo", variables=("var1",))
|
||||
|
||||
assert isinstance(foo, Concept)
|
||||
assert foo.name == "foo"
|
||||
assert foo.key is None
|
||||
assert foo.id is None
|
||||
assert foo.all_attrs() == ('#where#', '#pre#', '#post#', '#body#', '#ret#', 'var1')
|
||||
|
||||
|
||||
def test_i_can_request_basic_initialization_when_getting_a_concept():
|
||||
next_id = GetNextId()
|
||||
foo = get_concept("foo", variables=("var1",), sequence=next_id)
|
||||
|
||||
assert foo.name == "foo"
|
||||
assert foo.key == "foo"
|
||||
assert foo.id == "1001"
|
||||
assert foo.all_attrs() == ('#where#', '#pre#', '#post#', '#body#', '#ret#', 'var1')
|
||||
|
||||
|
||||
def test_i_can_get_multiple_concepts(context):
|
||||
next_id = GetNextId()
|
||||
|
||||
foo, bar, baz = get_concepts(context,
|
||||
"foo",
|
||||
"bar",
|
||||
get_concept("baz", definition="baz var1", variables=("var1",)),
|
||||
sequence=next_id)
|
||||
assert foo.name == "foo"
|
||||
assert foo.id == "1001"
|
||||
assert foo.key == "foo"
|
||||
assert bar.name == "bar"
|
||||
assert bar.id == "1002"
|
||||
assert bar.key == "bar"
|
||||
assert baz.name == "baz"
|
||||
assert baz.id == "1003"
|
||||
assert baz.key == "baz __var__0"
|
||||
|
||||
|
||||
def test_i_can_get_multiple_concepts_using_sheerka(sheerka, context):
|
||||
foo, bar, baz = get_concepts(context,
|
||||
"foo",
|
||||
"bar",
|
||||
get_concept("baz", definition="baz var1", variables=("var1",)),
|
||||
use_sheerka=True)
|
||||
assert foo.name == "foo"
|
||||
assert foo.id == "1001"
|
||||
assert foo.key == "foo"
|
||||
assert bar.name == "bar"
|
||||
assert bar.id == "1002"
|
||||
assert bar.key == "bar"
|
||||
assert baz.name == "baz"
|
||||
assert baz.id == "1003"
|
||||
assert baz.key == "baz __var__0"
|
||||
assert baz.get_value("var1") is NotInit
|
||||
|
||||
# the concepts are defined in Sheerka, so we can instantiate them
|
||||
baz2 = sheerka.newn("baz", var1="value for var1")
|
||||
assert baz2.name == "baz"
|
||||
assert baz2.id == "1003"
|
||||
assert baz2.key == "baz __var__0"
|
||||
assert baz2.get_value("var1") == "value for var1"
|
||||
Reference in New Issue
Block a user