Added first version of DebugManager. Implemented draft of the rule engine

This commit is contained in:
2020-11-20 13:41:45 +01:00
parent cd066881b4
commit 315f8ea09b
156 changed files with 8388 additions and 2852 deletions
+21 -16
View File
@@ -86,6 +86,15 @@ class BaseCache:
with self._lock:
return self._get(key)
def get_all(self):
"""
Retrieve all items already in cache
This method does not fetch in the remoter repository
:return:
"""
with self._lock:
return self._cache.values()
def inner_get(self, key):
return self._cache[key]
@@ -108,6 +117,17 @@ class BaseCache:
except KeyError:
pass
def populate(self, populate_function, get_key_function):
"""
Initialise the cache with a bunch of data
:param populate_function:
:param get_key_function:
:return:
"""
with self._lock:
for item in populate_function():
self.put(get_key_function(item), item)
def has(self, key):
"""
Return True if the key is in the cache
@@ -194,7 +214,7 @@ class BaseCache:
with self._lock:
return self._cache.copy()
def init_from(self, dump):
def init_from_dump(self, dump):
with self._lock:
self._current_size = dump["current_size"]
self._cache = dump["cache"].copy()
@@ -259,18 +279,3 @@ class BaseCache:
def _delete(self, key, value):
raise NotImplementedError()
# def _put(self, key, value):
# self._cache[key] = value
# self._add_to_add(key)
# return True
#
#
# def _update(self, old_key, old_value, new_key, new_value):
# self._cache[new_key] = new_value
# self._add_to_add(new_key)
#
# if new_key != old_key:
# del (self._cache[old_key])
# self._add_to_remove(old_key)
+29 -6
View File
@@ -2,7 +2,7 @@ from dataclasses import dataclass, field
from threading import RLock
from typing import Callable
from cache.Cache import Cache
from cache.BaseCache import BaseCache
from core.concept import Concept
@@ -18,7 +18,7 @@ class MultipleEntryError(Exception):
@dataclass
class CacheDefinition:
cache: Cache
cache: BaseCache
use_ref: bool
get_key: Callable[[Concept], str] = field(repr=False)
persist: bool = True
@@ -137,6 +137,15 @@ class CacheManager:
with self._lock:
return self.caches[cache_name].cache.get(key)
def get_cache(self, cache_name):
"""
Return the BaseCache object
:param cache_name:
:return:
"""
with self._lock:
return self.caches[cache_name].cache
def copy(self, cache_name):
"""
get a copy the content of the whole cache as a dictionary
@@ -170,6 +179,17 @@ class CacheManager:
self.caches[cache_name].cache.delete(key, value)
self.is_dirty = True
def populate(self, cache_name, populate_function, get_key_function):
"""
Populate a specific cache with a bunch of items
:param cache_name:
:param populate_function: how to get the items
:param get_key_function: how to get the key, out of an item
:return:
"""
with self._lock:
self.caches[cache_name].cache.init(populate_function, get_key_function)
def has(self, cache_name, key):
"""
True if the value is in cache only. Never try to look in a remote repository
@@ -210,7 +230,7 @@ class CacheManager:
for values in items.values():
update_full_serialisation(values, value)
elif isinstance(items, Concept):
items.metadata.full_serialization = value
items.get_metadata().full_serialization = value
if self.cache_only:
return
@@ -245,6 +265,10 @@ class CacheManager:
cache_def.cache.clear()
def dump(self):
"""
For test purpose, dumps the whole content of the cache manager
:return:
"""
with self._lock:
res = {}
for cache_name, cache_def in self.caches.items():
@@ -252,11 +276,11 @@ class CacheManager:
return res
def init_from(self, dump):
def init_from_dump(self, dump):
with self._lock:
for cache_name, content in dump.items():
if cache_name in self.caches:
self.caches[cache_name].cache.init_from(content)
self.caches[cache_name].cache.init_from_dump(content)
return self
@@ -267,4 +291,3 @@ class CacheManager:
self.caches.clear()
self.concept_caches.clear()
self.is_dirty = False
+43
View File
@@ -0,0 +1,43 @@
class FastCache:
"""
Simplest LRU cache
"""
def __init__(self, max_size=256):
self.max_size = max_size
self.cache = {}
self.lru = []
def put(self, key, value):
if len(self.cache) == self.max_size:
del self.cache[self.lru.pop(0)]
if key in self.cache:
self.lru.remove(key)
self.cache[key] = value
self.lru.append(key)
def get(self, key):
try:
return self.cache[key]
except KeyError:
return None
def evict_by_key(self, predicate):
to_remove = []
for k, v in self.cache.items():
if predicate(k):
to_remove.append(k)
for k in to_remove:
self.lru.remove(k)
del self.cache[k]
def copy(self):
return self.cache.copy()
def clear(self):
self.cache.clear()
self.lru.clear()
+6
View File
@@ -5,8 +5,14 @@ class SetCache(BaseCache):
"""
An in memory FIFO cache object
When the max_size is reach the first element that was put is removed
You can use the same key multiple times, but the elements under this key will be unique
When there are multiple elements, a python set is used
>> self.put('key', 'value1')
>> assert {'value1'} == self.get('key')
>> self.put('key', 'value2')
>> assert {'value1', 'value2'} == self.get('key')
"""
def _put(self, key, value):