Files
Sheerka-Old/src/cache/CacheManager.py
T
kodjo e69745adc8 Fixed #100 : SheerkaAdmin: Add builtins() command
Fixed #99 : SheerkaQueryManager: I can manage contains predicate when filtering objects
Fixed #97 : ERROR: list indices must be integers or slices, not Concept
Fixed #96 : SequenceNodeParser: SequenceNodeParser must correctly handle concept definition
Fixed #95 : ResolveAmbiguity must not remove concepts that do not require evaluation
Fixed #94 : Concepts with the same key are lost when new ontology
Fixed #93 : Introduce BuiltinConcepts.EVAL_GLOBAL_TRUTH_REQUESTED
Fixed #92 : ExpressionParser: Implement compile_disjunctions()
Fixed #91 : Implement get_concepts_complexity(context, concepts, concept_parts)
Fixed #90 : ResolveAmbiguity : where predicate is not used to resolve ambiguity
Fixed #89 : ResolveAmbiguityEvaluator: Concepts embedded in ConceptNode are not resolved
Fixed #88: SyaNodeParser: Parse multiple parameters when some of the are not recognized
Fixed #87: SyaNodeParser : Parse the multiple parameters
2021-07-31 08:52:00 +02:00

340 lines
11 KiB
Python

from dataclasses import dataclass, field
from threading import RLock
from typing import Callable
from cache.BaseCache import BaseCache
from core.concept import Concept
from core.global_symbols import NotFound
@dataclass
class MultipleEntryError(Exception):
"""
Exception raised when trying to alter an entry with multiple element
without giving the origin of the element
"""
key: str
@dataclass
class ConceptNotFound(Exception):
"""
Thrown when you try to remove a concept that is not found
"""
concept: object
@dataclass
class CacheDefinition:
cache: BaseCache
use_ref: bool
get_key: Callable[[Concept], str] = field(repr=False)
persist: bool = True
class CacheManager:
"""
Single class to manage all the caches
"""
def __init__(self, cache_only, sdp=None):
self.cache_only = cache_only # if true disable all remote access when key not found
self.sdp = sdp
self.caches = {}
self.concept_caches = []
self.is_dirty = False # to indicate that the value of a cache has changed
self._lock = RLock()
def register_concept_cache(self, name, cache, get_key, use_ref):
"""
Define which type of cache along with how to compute the key
:param name:
:param cache:
:param get_key:
:param use_ref:
:return:
"""
with self._lock:
if self.cache_only:
cache.disable_default()
if self.sdp:
cache.configure(sdp=self.sdp)
self.caches[name] = CacheDefinition(cache, use_ref, get_key)
self.concept_caches.append(name)
def register_cache(self, name, cache, persist=True, use_ref=False):
"""
Define which type of cache along with how to compute the key
:param name:
:param cache:
:param persist:
:param use_ref:
:return:
"""
with self._lock:
if self.sdp:
cache.configure(sdp=self.sdp)
if self.cache_only:
cache.disable_default()
persist = False
self.caches[name] = CacheDefinition(cache, use_ref, None, persist)
def add_concept(self, concept, alt_sdp=None):
"""
We need multiple indexes to retrieve a concept
So the new concept is dispatched into multiple caches
:param concept:
:param alt_sdp: if not found in self.sdp, look in other repositories
:return:
"""
with self._lock:
for name in self.concept_caches:
cache_def = self.caches[name]
key = cache_def.get_key(concept)
cache_def.cache.put(key, concept, alt_sdp)
self.is_dirty = True
def update_concept(self, old, new, alt_sdp=None):
"""
Update a concept.
:param old: old version of the concept
:param new: new version of the concept
:param alt_sdp: if not found in self.sdp, look in other repositories
:return:
"""
with self._lock:
for cache_name in self.concept_caches:
cache_def = self.caches[cache_name]
old_key = cache_def.get_key(old)
new_key = cache_def.get_key(new)
cache_def.cache.update(old_key, old, new_key, new, alt_sdp=alt_sdp)
self.is_dirty = True
def remove_concept(self, concept, alt_sdp=None):
"""
Remove a concept from all caches
:param concept:
:param alt_sdp: if not found in self.sdp, look in other repositories
:return:
"""
with self._lock:
# the first concept cache must the one where all concept are unique
# eg it has to be the concept by id
ref_cache_def = self.caches[self.concept_caches[0]]
concept_id = ref_cache_def.get_key(concept)
ref_concept = ref_cache_def.cache.get(concept_id)
if ref_concept is NotFound and alt_sdp:
ref_concept = alt_sdp.get(self.concept_caches[0], concept_id)
if ref_concept is NotFound:
raise ConceptNotFound(concept)
for cache_name in self.concept_caches:
cache_def = self.caches[cache_name]
key = cache_def.get_key(ref_concept)
cache_def.cache.delete(key, ref_concept, alt_sdp=alt_sdp)
self.is_dirty = True
def get(self, cache_name, key, alt_sdp=None):
"""
From concept cache, get an entry
:param cache_name:
:param key:
:param alt_sdp: if not found in self.sdp, look in other repositories
:return:
"""
with self._lock:
return self.caches[cache_name].cache.get(key, alt_sdp)
def alt_get(self, cache_name, key):
"""
Alternate way to get an entry, from concept cache
This is mainly used for IncCache, in order to get the value without increasing it
:param cache_name:
:param key:
:return:
"""
with self._lock:
return self.caches[cache_name].cache.alt_get(key)
def put(self, cache_name, key, value, alt_sdp=None):
"""
Add to a cache
:param cache_name:
:param key:
:param value:
:param alt_sdp: if not found in self.sdp, look in other repositories
:return:
"""
with self._lock:
self.caches[cache_name].cache.put(key, value, alt_sdp)
self.is_dirty = True
def delete(self, cache_name, key, value=None, alt_sdp=None):
"""
Delete an entry from the cache
:param cache_name:
:param key:
:param value:
:param alt_sdp: if not found in self.sdp, look in other repositories
:return:
"""
with self._lock:
if self.caches[cache_name].cache.delete(key, value, alt_sdp):
self.is_dirty = True
def get_cache(self, cache_name):
"""
Return the BaseCache object
:param cache_name:
:return:
"""
with self._lock:
return self.caches[cache_name].cache
def copy(self, cache_name):
"""
get a copy the content of the whole cache as a dictionary
:param self:
:param cache_name:
:return:
"""
return self.caches[cache_name].cache.copy()
def populate(self, cache_name, populate_function, get_key_function, reset_events=False):
"""
Populate a specific cache with a bunch of items
:param cache_name:
:param populate_function: how to get the items
:param get_key_function: how to get the key, out of an item
:param reset_events: reset to_add and to_remove events after populate
:return:
"""
with self._lock:
self.caches[cache_name].cache.populate(populate_function, get_key_function, reset_events)
def force_value(self, cache_name, key, value):
"""
Update the content of the cache, but does not raise any event
"""
with self._lock:
self.caches[cache_name].cache.force_value(key, value)
def remove_initialized_key(self, cache_name, key):
"""
"""
with self._lock:
self.caches[cache_name].cache.remove_initialized_key(key)
def has(self, cache_name, key):
"""
True if the value is in cache only. Never try to look in a remote repository
:param cache_name:
:param key:
:return:
"""
with self._lock:
return self.caches[cache_name].cache.has(key)
def exists(self, cache_name, key):
"""
True if the value is in cache.
If not found, may search in a remote repository
:param cache_name:
:param key:
:return:
"""
if self.cache_only:
return self.has(cache_name, key)
with self._lock:
return self.caches[cache_name].cache.exists(key)
def commit(self, context):
"""
Persist all the caches into a physical persistence storage
:param context:
:return:
"""
def update_full_serialisation(items, value):
# Take care, infinite recursion is not handled !!
if isinstance(items, (list, set, tuple)):
for item in items:
update_full_serialisation(item, value)
elif isinstance(items, dict):
for values in items.values():
update_full_serialisation(values, value)
elif isinstance(items, Concept):
items.get_metadata().full_serialization = value
if self.cache_only:
return
with self._lock:
with self.sdp.get_transaction(context.event.get_digest()) as transaction:
for cache_name, cache_def in self.caches.items():
if not cache_def.persist:
continue
for key in cache_def.cache.to_remove:
transaction.remove(cache_name, key)
for key in cache_def.cache.to_add:
if key == "*self*":
transaction.add(cache_name, None, cache_def.cache.dump()["cache"])
else:
to_save = cache_def.cache.inner_get(key)
update_full_serialisation(to_save, True)
transaction.add(cache_name, key, to_save, cache_def.use_ref)
update_full_serialisation(to_save, False)
cache_def.cache.reset_events()
self.is_dirty = False
def clear(self, cache_name=None, set_is_cleared=True):
with self._lock:
if cache_name:
self.caches[cache_name].cache.clear(set_is_cleared)
else:
for cache_def in self.caches.values():
cache_def.cache.clear(set_is_cleared)
def dump(self):
"""
For test purpose, dumps the whole content of the cache manager
:return:
"""
with self._lock:
res = {}
for cache_name, cache_def in self.caches.items():
res[cache_name] = cache_def.cache.dump()
return res
def init_from_dump(self, dump):
with self._lock:
for cache_name, content in dump.items():
if cache_name in self.caches:
self.caches[cache_name].cache.init_from_dump(content)
return self
def reset(self, cache_only):
"""For unit test speed enhancement"""
self.clear()
self.cache_only = cache_only
self.caches.clear()
self.concept_caches.clear()
self.is_dirty = False