294 lines
9.1 KiB
Python
294 lines
9.1 KiB
Python
from dataclasses import dataclass, field
|
|
from threading import RLock
|
|
from typing import Callable
|
|
|
|
from cache.BaseCache import BaseCache
|
|
from core.concept import Concept
|
|
|
|
|
|
class MultipleEntryError(Exception):
|
|
"""
|
|
Exception raised when trying to alter an entry with multiple element
|
|
without giving the origin of the element
|
|
"""
|
|
|
|
def __init__(self, key):
|
|
self.key = key
|
|
|
|
|
|
@dataclass
|
|
class CacheDefinition:
|
|
cache: BaseCache
|
|
use_ref: bool
|
|
get_key: Callable[[Concept], str] = field(repr=False)
|
|
persist: bool = True
|
|
|
|
|
|
class CacheManager:
|
|
"""
|
|
Single class to manage all the caches
|
|
"""
|
|
|
|
def __init__(self, cache_only):
|
|
self.cache_only = cache_only # if true disable all remote access when key not found
|
|
self.caches = {}
|
|
self.concept_caches = []
|
|
self.is_dirty = False # to indicate that the value of a cache has changed
|
|
|
|
self._lock = RLock()
|
|
|
|
def register_concept_cache(self, name, cache, get_key, use_ref):
|
|
"""
|
|
Define which type of cache along with how to compute the key
|
|
:param name:
|
|
:param cache:
|
|
:param get_key:
|
|
:param use_ref:
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
if self.cache_only:
|
|
cache.disable_default()
|
|
self.caches[name] = CacheDefinition(cache, use_ref, get_key)
|
|
self.concept_caches.append(name)
|
|
|
|
def register_cache(self, name, cache, persist=True, use_ref=False):
|
|
"""
|
|
Define which type of cache along with how to compute the key
|
|
:param name:
|
|
:param cache:
|
|
:param persist:
|
|
:param use_ref:
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
if self.cache_only:
|
|
cache.disable_default()
|
|
self.caches[name] = CacheDefinition(cache, use_ref, None, persist)
|
|
|
|
def add_concept(self, concept):
|
|
"""
|
|
We need multiple indexes to retrieve a concept
|
|
So the new concept is dispatched into multiple caches
|
|
:param concept:
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
for name in self.concept_caches:
|
|
cache_def = self.caches[name]
|
|
key = cache_def.get_key(concept)
|
|
cache_def.cache.put(key, concept)
|
|
|
|
self.is_dirty = True
|
|
|
|
def update_concept(self, old, new):
|
|
"""
|
|
Update a concept.
|
|
:param old: old version of the concept
|
|
:param new: new version of the concept
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
for cache_name in self.concept_caches:
|
|
cache_def = self.caches[cache_name]
|
|
|
|
old_key = cache_def.get_key(old)
|
|
new_key = cache_def.get_key(new)
|
|
|
|
cache_def.cache.update(old_key, old, new_key, new)
|
|
|
|
self.is_dirty = True
|
|
|
|
# how can you update an entry it the key may have changed ?
|
|
# You need to have an invariant. By convention the keys in the first cache cannot change
|
|
# with self._lock:
|
|
# iter_cache_def = iter(self.caches)
|
|
#
|
|
# cache_def = next(iter_cache_def)
|
|
# old_key = cache_def.get_key(concept)
|
|
#
|
|
# try:
|
|
# while True:
|
|
# items = cache_def.cache[old_key]
|
|
# if isinstance(items, (list, set)):
|
|
# for item in items:
|
|
# if item.id == concept.id:
|
|
# break
|
|
# else:
|
|
# raise IndexError(f"{old_key=}, id={concept.id}")
|
|
#
|
|
# cache_def.cache.update(old_key, item, cache_def.get_key(concept), concept)
|
|
#
|
|
# else:
|
|
# cache_def.cache.update(old_key, items, cache_def.get_key(concept), concept)
|
|
#
|
|
# cache_def = next(iter_cache_def)
|
|
# except StopIteration:
|
|
# pass
|
|
# self.is_dirty = True
|
|
|
|
def get(self, cache_name, key):
|
|
"""
|
|
From concept cache, get an entry
|
|
:param cache_name:
|
|
:param key:
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
return self.caches[cache_name].cache.get(key)
|
|
|
|
def get_cache(self, cache_name):
|
|
"""
|
|
Return the BaseCache object
|
|
:param cache_name:
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
return self.caches[cache_name].cache
|
|
|
|
def copy(self, cache_name):
|
|
"""
|
|
get a copy the content of the whole cache as a dictionary
|
|
:param self:
|
|
:param cache_name:
|
|
:return:
|
|
"""
|
|
return self.caches[cache_name].cache.copy()
|
|
|
|
def put(self, cache_name, key, value):
|
|
"""
|
|
Add to a cache
|
|
:param cache_name:
|
|
:param key:
|
|
:param value:
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
self.caches[cache_name].cache.put(key, value)
|
|
self.is_dirty = True
|
|
|
|
def delete(self, cache_name, key, value=None):
|
|
"""
|
|
Delete an entry from the cache
|
|
:param cache_name:
|
|
:param key:
|
|
:param value:
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
self.caches[cache_name].cache.delete(key, value)
|
|
self.is_dirty = True
|
|
|
|
def populate(self, cache_name, populate_function, get_key_function):
|
|
"""
|
|
Populate a specific cache with a bunch of items
|
|
:param cache_name:
|
|
:param populate_function: how to get the items
|
|
:param get_key_function: how to get the key, out of an item
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
self.caches[cache_name].cache.init(populate_function, get_key_function)
|
|
|
|
def has(self, cache_name, key):
|
|
"""
|
|
True if the value is in cache only. Never try to look in a remote repository
|
|
:param cache_name:
|
|
:param key:
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
return self.caches[cache_name].cache.has(key)
|
|
|
|
def exists(self, cache_name, key):
|
|
"""
|
|
True if the value is in cache.
|
|
If not found, may search in a remote repository
|
|
:param cache_name:
|
|
:param key:
|
|
:return:
|
|
"""
|
|
if self.cache_only:
|
|
return self.has(cache_name, key)
|
|
|
|
with self._lock:
|
|
return self.caches[cache_name].cache.exists(key)
|
|
|
|
def commit(self, context):
|
|
"""
|
|
Persist all the caches into a physical persistence storage
|
|
:param context:
|
|
:return:
|
|
"""
|
|
|
|
def update_full_serialisation(items, value):
|
|
# Take care, infinite recursion is not handled !!
|
|
if isinstance(items, (list, set, tuple)):
|
|
for item in items:
|
|
update_full_serialisation(item, value)
|
|
elif isinstance(items, dict):
|
|
for values in items.values():
|
|
update_full_serialisation(values, value)
|
|
elif isinstance(items, Concept):
|
|
items.get_metadata().full_serialization = value
|
|
|
|
if self.cache_only:
|
|
return
|
|
|
|
with self._lock:
|
|
with context.sheerka.sdp.get_transaction(context.event.get_digest()) as transaction:
|
|
for cache_name, cache_def in self.caches.items():
|
|
if not cache_def.persist:
|
|
continue
|
|
|
|
for key in cache_def.cache.to_remove:
|
|
transaction.remove(cache_name, key)
|
|
|
|
for key in cache_def.cache.to_add:
|
|
if key == "*self*":
|
|
transaction.add(cache_name, None, cache_def.cache.dump()["cache"])
|
|
else:
|
|
to_save = cache_def.cache.inner_get(key)
|
|
update_full_serialisation(to_save, True)
|
|
transaction.add(cache_name, key, to_save, cache_def.use_ref)
|
|
update_full_serialisation(to_save, False)
|
|
|
|
cache_def.cache.reset_events()
|
|
self.is_dirty = False
|
|
|
|
def clear(self, cache_name=None):
|
|
with self._lock:
|
|
if cache_name:
|
|
self.caches[cache_name].cache.clear()
|
|
else:
|
|
for cache_def in self.caches.values():
|
|
cache_def.cache.clear()
|
|
|
|
def dump(self):
|
|
"""
|
|
For test purpose, dumps the whole content of the cache manager
|
|
:return:
|
|
"""
|
|
with self._lock:
|
|
res = {}
|
|
for cache_name, cache_def in self.caches.items():
|
|
res[cache_name] = cache_def.cache.dump()
|
|
|
|
return res
|
|
|
|
def init_from_dump(self, dump):
|
|
with self._lock:
|
|
for cache_name, content in dump.items():
|
|
if cache_name in self.caches:
|
|
self.caches[cache_name].cache.init_from_dump(content)
|
|
|
|
return self
|
|
|
|
def reset(self, cache_only):
|
|
"""For unit test speed enhancement"""
|
|
self.clear()
|
|
self.cache_only = cache_only
|
|
self.caches.clear()
|
|
self.concept_caches.clear()
|
|
self.is_dirty = False
|