Implemented SheerkaOntology
This commit is contained in:
Vendored
+132
-27
@@ -1,5 +1,8 @@
|
||||
from threading import RLock
|
||||
|
||||
from core.global_symbols import NotFound, Removed
|
||||
from core.utils import sheerka_deepcopy
|
||||
|
||||
MAX_INITIALIZED_KEY = 100
|
||||
|
||||
|
||||
@@ -10,14 +13,17 @@ class BaseCache:
|
||||
When you put the same key twice, the previous element is overridden
|
||||
"""
|
||||
|
||||
def __init__(self, max_size=None, default=None, extend_exists=None):
|
||||
def __init__(self, max_size=None, default=NotFound, extend_exists=None, alt_sdp_get=None, sdp=None):
|
||||
self._cache = {}
|
||||
self._max_size = max_size
|
||||
self._default = default # default value to return when key is not found. It can be a callable of key
|
||||
self._extend_exists = extend_exists # search in remote
|
||||
self._alt_sdp_get = alt_sdp_get # How to get the value when called by alt_sdp
|
||||
self._sdp = sdp # current instance of SheerkaDataProvider
|
||||
self._lock = RLock()
|
||||
self._current_size = 0
|
||||
self._initialized_keys = set() # to keep the list of the keys already requested (using get())
|
||||
self._is_cleared = False # indicate that clear() was called
|
||||
|
||||
self.to_add = set()
|
||||
self.to_remove = set()
|
||||
@@ -49,42 +55,75 @@ class BaseCache:
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}(size={self._current_size}, #keys={len(self._cache)})"
|
||||
|
||||
def configure(self, max_size=None, default=None, extend_exists=None):
|
||||
def configure(self, max_size=None, default=NotFound, extend_exists=None, alt_sdp_get=None, sdp=None):
|
||||
if max_size is not None:
|
||||
self._max_size = max_size
|
||||
|
||||
if default is not None:
|
||||
if default is not NotFound:
|
||||
self._default = default
|
||||
|
||||
if extend_exists is not None:
|
||||
self._extend_exists = extend_exists
|
||||
|
||||
def disable_default(self):
|
||||
self._default = None
|
||||
if alt_sdp_get is not None:
|
||||
self._alt_sdp_get = alt_sdp_get
|
||||
|
||||
def put(self, key, value):
|
||||
if sdp is not None:
|
||||
self._sdp = sdp
|
||||
|
||||
return self
|
||||
|
||||
def auto_configure(self, cache_name):
|
||||
"""
|
||||
Convenient way to configure the cache
|
||||
:param cache_name:
|
||||
:return:
|
||||
"""
|
||||
self._default = lambda sdp, key: sdp.get(cache_name, key)
|
||||
self._extend_exists = lambda sdp, key: sdp.exists(cache_name, key)
|
||||
self._alt_sdp_get = lambda sdp, key: sdp.alt_get(cache_name, key) # by default, same than get
|
||||
|
||||
return self
|
||||
|
||||
def disable_default(self):
|
||||
self._default = (lambda sdp, key: NotFound) if self._sdp else (lambda key: NotFound)
|
||||
|
||||
def put(self, key, value, alt_sdp=None):
|
||||
"""
|
||||
Add a new entry in cache
|
||||
:param key:
|
||||
:param value:
|
||||
:param alt_sdp:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
if self._max_size and self._current_size >= self._max_size:
|
||||
self.evict(self._max_size - self._current_size + 1)
|
||||
|
||||
if self._put(key, value):
|
||||
if self._put(key, value, alt_sdp):
|
||||
self._current_size += 1
|
||||
|
||||
def get(self, key):
|
||||
def get(self, key, alt_sdp=None):
|
||||
"""
|
||||
Retrieve an entry from the cache
|
||||
If the entry does not exist, will use the 'default' value or delegate
|
||||
:param key:
|
||||
:param alt_sdp: if not found in cache._sdp, look in other repositories
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
return self._get(key)
|
||||
return self._get(key, alt_sdp)
|
||||
|
||||
def alt_get(self, key):
|
||||
"""
|
||||
Alternate way to get an entry, from concept cache
|
||||
This is mainly used for IncCache, in order to get the value without increasing it
|
||||
It used for another cache, it must return the value from key WITHOUT modifying the state of the cache
|
||||
:param key:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
return self._alt_get(key)
|
||||
|
||||
def get_all(self):
|
||||
"""
|
||||
@@ -98,36 +137,63 @@ class BaseCache:
|
||||
def inner_get(self, key):
|
||||
return self._cache[key]
|
||||
|
||||
def update(self, old_key, old_value, new_key, new_value):
|
||||
def update(self, old_key, old_value, new_key, new_value, alt_sdp=None):
|
||||
"""
|
||||
Update an entry in the cache
|
||||
:param old_key: key of the previous version of the entry
|
||||
:param old_value: previous version of the entry
|
||||
:param new_key: key of the entry
|
||||
:param new_value: new value
|
||||
:param alt_sdp: new value
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
self._update(old_key, old_value, new_key, new_value)
|
||||
self._update(old_key, old_value, new_key, new_value, alt_sdp)
|
||||
|
||||
def delete(self, key, value=None):
|
||||
def delete(self, key, value=None, alt_sdp=None):
|
||||
with self._lock:
|
||||
try:
|
||||
self._delete(key, value)
|
||||
self._sync(key)
|
||||
self._delete(key, value, alt_sdp)
|
||||
return True
|
||||
except KeyError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def populate(self, populate_function, get_key_function):
|
||||
def populate(self, populate_function, get_key_function, reset_events=False):
|
||||
"""
|
||||
Initialise the cache with a bunch of data
|
||||
:param populate_function:
|
||||
:param get_key_function:
|
||||
:param reset_events:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
for item in populate_function():
|
||||
if reset_events:
|
||||
to_add_copy = self.to_add.copy()
|
||||
to_remove_copy = self.to_remove.copy()
|
||||
|
||||
for item in (populate_function(self._sdp) if self._sdp else populate_function()):
|
||||
self.put(get_key_function(item), item)
|
||||
|
||||
if reset_events:
|
||||
self.to_add = to_add_copy
|
||||
self.to_remove = to_remove_copy
|
||||
|
||||
def force_value(self, key, value):
|
||||
"""
|
||||
Force a value into a key without raising any event
|
||||
"""
|
||||
with self._lock:
|
||||
self._cache[key] = value
|
||||
|
||||
def remove_initialized_key(self, key):
|
||||
"""
|
||||
When a value is requested by alt_sdp, we should not keep track of the request
|
||||
As the outcome is not known
|
||||
"""
|
||||
with self._lock:
|
||||
self._initialized_keys.remove(key)
|
||||
|
||||
def has(self, key):
|
||||
"""
|
||||
Return True if the key is in the cache
|
||||
@@ -149,7 +215,10 @@ class BaseCache:
|
||||
if key in self._cache:
|
||||
return True
|
||||
|
||||
return self._extend_exists(key) if self._extend_exists else False
|
||||
if self._extend_exists:
|
||||
return self._extend_exists(self._sdp, key) if self._sdp else self._extend_exists(key)
|
||||
else:
|
||||
return False
|
||||
|
||||
def evict(self, nb_items):
|
||||
"""
|
||||
@@ -195,13 +264,16 @@ class BaseCache:
|
||||
|
||||
return len(to_delete)
|
||||
|
||||
def clear(self):
|
||||
def clear(self, set_is_cleared=True):
|
||||
with self._lock:
|
||||
# Seems that remote sdp is not correctly updated
|
||||
self._cache.clear()
|
||||
self._current_size = 0
|
||||
self._initialized_keys.clear()
|
||||
self.to_add.clear()
|
||||
self.to_remove.clear()
|
||||
if set_is_cleared:
|
||||
self._is_cleared = True
|
||||
|
||||
def dump(self):
|
||||
with self._lock:
|
||||
@@ -225,9 +297,32 @@ class BaseCache:
|
||||
self.to_add.clear()
|
||||
self.to_remove.clear()
|
||||
|
||||
def reset_initialized_keys(self):
|
||||
"""
|
||||
Use when an ontology is put back. Reset all the previous requests as alt_sdp is a new one
|
||||
"""
|
||||
with self._lock:
|
||||
self._initialized_keys.clear()
|
||||
|
||||
def is_cleared(self):
|
||||
with self._lock:
|
||||
return self._is_cleared
|
||||
|
||||
def clone(self):
|
||||
return type(self)(self._max_size, self._default, self._extend_exists, self._alt_sdp_get, self._sdp)
|
||||
|
||||
def test_only_reset(self):
|
||||
"""
|
||||
Clears the cache, but does not set is_cleared to True
|
||||
It's a convenient way to clear the cache without altering alt_sdp behaviour
|
||||
"""
|
||||
self.clear(set_is_cleared=False)
|
||||
|
||||
def _sync(self, *keys):
|
||||
# KSI 2020-12-29. DO not try to use alt_sdp here
|
||||
# Sync must only sync with the current sdp
|
||||
for key in keys:
|
||||
if key not in self._initialized_keys and self._default:
|
||||
if key not in self._initialized_keys and callable(self._default):
|
||||
# to keep sync with the remote repo is needed
|
||||
# first check self._initialized_keys to prevent infinite loop
|
||||
self.get(key)
|
||||
@@ -246,7 +341,7 @@ class BaseCache:
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def _get(self, key):
|
||||
def _get(self, key, alt_sdp=None):
|
||||
try:
|
||||
value = self._cache[key]
|
||||
except KeyError:
|
||||
@@ -254,11 +349,18 @@ class BaseCache:
|
||||
self._initialized_keys.clear()
|
||||
if callable(self._default):
|
||||
if key in self._initialized_keys:
|
||||
return None
|
||||
# it means that we have already asked the repository
|
||||
return NotFound
|
||||
|
||||
value = self._default(key)
|
||||
if value is not None:
|
||||
self._cache[key] = value
|
||||
simple_copy = True
|
||||
value = self._default(self._sdp, key) if self._sdp else self._default(key)
|
||||
if value is NotFound and alt_sdp and not self._is_cleared:
|
||||
value = self._alt_sdp_get(alt_sdp, key)
|
||||
simple_copy = False
|
||||
|
||||
if value is not NotFound:
|
||||
self._cache[key] = value if simple_copy else sheerka_deepcopy(value)
|
||||
value = self._cache[key]
|
||||
|
||||
# update _current_size
|
||||
if isinstance(value, (list, set)):
|
||||
@@ -271,11 +373,14 @@ class BaseCache:
|
||||
|
||||
return value
|
||||
|
||||
def _put(self, key, value):
|
||||
def _alt_get(self, key):
|
||||
return self._get(key) # by default, point to _get
|
||||
|
||||
def _put(self, key, value, alt_sdp):
|
||||
pass
|
||||
|
||||
def _update(self, old_key, old_value, new_key, new_value):
|
||||
def _update(self, old_key, old_value, new_key, new_value, alt_sdp):
|
||||
pass
|
||||
|
||||
def _delete(self, key, value):
|
||||
def _delete(self, key, value, alt_sdp):
|
||||
raise NotImplementedError()
|
||||
|
||||
Vendored
+19
-9
@@ -1,6 +1,7 @@
|
||||
from threading import RLock
|
||||
|
||||
from cache.BaseCache import BaseCache
|
||||
from core.global_symbols import Removed
|
||||
|
||||
|
||||
class Cache(BaseCache):
|
||||
@@ -10,23 +11,32 @@ class Cache(BaseCache):
|
||||
When you put the same key twice, the previous element is overridden
|
||||
"""
|
||||
|
||||
def _put(self, key, value):
|
||||
def _put(self, key, value, alt_sdp):
|
||||
res = key not in self._cache
|
||||
self._cache[key] = value
|
||||
self._add_to_add(key)
|
||||
return res
|
||||
|
||||
def _update(self, old_key, old_value, new_key, new_value):
|
||||
def _update(self, old_key, old_value, new_key, new_value, alt_sdp):
|
||||
self._cache[new_key] = new_value
|
||||
self._add_to_add(new_key)
|
||||
|
||||
if new_key != old_key:
|
||||
self._sync(old_key)
|
||||
del (self._cache[old_key])
|
||||
self._add_to_remove(old_key)
|
||||
|
||||
def _delete(self, key, value):
|
||||
del(self._cache[key])
|
||||
self._current_size -= 1
|
||||
self._add_to_remove(key)
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists and self._extend_exists(alt_sdp, old_key):
|
||||
self._cache[old_key] = Removed
|
||||
self._add_to_add(old_key)
|
||||
self._current_size += 1
|
||||
else:
|
||||
del (self._cache[old_key])
|
||||
self._add_to_remove(old_key)
|
||||
|
||||
def _delete(self, key, value, alt_sdp):
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists and self._extend_exists(alt_sdp, key):
|
||||
self._cache[key] = Removed
|
||||
self._add_to_add(key)
|
||||
# do not decrease self._current_size as 'Removed' takes on slot
|
||||
else:
|
||||
del (self._cache[key])
|
||||
self._add_to_remove(key)
|
||||
self._current_size -= 1
|
||||
|
||||
Vendored
+81
-66
@@ -4,6 +4,7 @@ from typing import Callable
|
||||
|
||||
from cache.BaseCache import BaseCache
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import NotFound
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -37,8 +38,9 @@ class CacheManager:
|
||||
Single class to manage all the caches
|
||||
"""
|
||||
|
||||
def __init__(self, cache_only):
|
||||
def __init__(self, cache_only, sdp=None):
|
||||
self.cache_only = cache_only # if true disable all remote access when key not found
|
||||
self.sdp = sdp
|
||||
self.caches = {}
|
||||
self.concept_caches = []
|
||||
self.is_dirty = False # to indicate that the value of a cache has changed
|
||||
@@ -57,6 +59,8 @@ class CacheManager:
|
||||
with self._lock:
|
||||
if self.cache_only:
|
||||
cache.disable_default()
|
||||
if self.sdp:
|
||||
cache.configure(sdp=self.sdp)
|
||||
self.caches[name] = CacheDefinition(cache, use_ref, get_key)
|
||||
self.concept_caches.append(name)
|
||||
|
||||
@@ -70,8 +74,13 @@ class CacheManager:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
if self.sdp:
|
||||
cache.configure(sdp=self.sdp)
|
||||
|
||||
if self.cache_only:
|
||||
cache.disable_default()
|
||||
persist = False
|
||||
|
||||
self.caches[name] = CacheDefinition(cache, use_ref, None, persist)
|
||||
|
||||
def add_concept(self, concept):
|
||||
@@ -89,11 +98,12 @@ class CacheManager:
|
||||
|
||||
self.is_dirty = True
|
||||
|
||||
def update_concept(self, old, new):
|
||||
def update_concept(self, old, new, alt_sdp=None):
|
||||
"""
|
||||
Update a concept.
|
||||
:param old: old version of the concept
|
||||
:param new: new version of the concept
|
||||
:param alt_sdp: if not found in self.sdp, look in other repositories
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
@@ -103,42 +113,15 @@ class CacheManager:
|
||||
old_key = cache_def.get_key(old)
|
||||
new_key = cache_def.get_key(new)
|
||||
|
||||
cache_def.cache.update(old_key, old, new_key, new)
|
||||
cache_def.cache.update(old_key, old, new_key, new, alt_sdp=alt_sdp)
|
||||
|
||||
self.is_dirty = True
|
||||
|
||||
# how can you update an entry it the key may have changed ?
|
||||
# You need to have an invariant. By convention the keys in the first cache cannot change
|
||||
# with self._lock:
|
||||
# iter_cache_def = iter(self.caches)
|
||||
#
|
||||
# cache_def = next(iter_cache_def)
|
||||
# old_key = cache_def.get_key(concept)
|
||||
#
|
||||
# try:
|
||||
# while True:
|
||||
# items = cache_def.cache[old_key]
|
||||
# if isinstance(items, (list, set)):
|
||||
# for item in items:
|
||||
# if item.id == concept.id:
|
||||
# break
|
||||
# else:
|
||||
# raise IndexError(f"{old_key=}, id={concept.id}")
|
||||
#
|
||||
# cache_def.cache.update(old_key, item, cache_def.get_key(concept), concept)
|
||||
#
|
||||
# else:
|
||||
# cache_def.cache.update(old_key, items, cache_def.get_key(concept), concept)
|
||||
#
|
||||
# cache_def = next(iter_cache_def)
|
||||
# except StopIteration:
|
||||
# pass
|
||||
# self.is_dirty = True
|
||||
|
||||
def remove_concept(self, concept):
|
||||
def remove_concept(self, concept, alt_sdp=None):
|
||||
"""
|
||||
Remove a concept from all caches
|
||||
:param concept:
|
||||
:param alt_sdp: if not found in self.sdp, look in other repositories
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
@@ -148,25 +131,66 @@ class CacheManager:
|
||||
concept_id = ref_cache_def.get_key(concept)
|
||||
ref_concept = ref_cache_def.cache.get(concept_id)
|
||||
|
||||
if ref_concept is None:
|
||||
if ref_concept is NotFound and alt_sdp:
|
||||
ref_concept = alt_sdp.get(self.concept_caches[0], concept_id)
|
||||
|
||||
if ref_concept is NotFound:
|
||||
raise ConceptNotFound(concept)
|
||||
|
||||
for cache_name in self.concept_caches:
|
||||
cache_def = self.caches[cache_name]
|
||||
key = cache_def.get_key(ref_concept)
|
||||
cache_def.cache.delete(key, ref_concept)
|
||||
cache_def.cache.delete(key, ref_concept, alt_sdp=alt_sdp)
|
||||
|
||||
self.is_dirty = True
|
||||
|
||||
def get(self, cache_name, key):
|
||||
def get(self, cache_name, key, alt_sdp=None):
|
||||
"""
|
||||
From concept cache, get an entry
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:param alt_sdp: if not found in self.sdp, look in other repositories
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
return self.caches[cache_name].cache.get(key, alt_sdp)
|
||||
|
||||
def alt_get(self, cache_name, key):
|
||||
"""
|
||||
Alternate way to get an entry, from concept cache
|
||||
This is mainly used for IncCache, in order to get the value without increasing it
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
return self.caches[cache_name].cache.get(key)
|
||||
return self.caches[cache_name].cache.alt_get(key)
|
||||
|
||||
def put(self, cache_name, key, value, alt_sdp=None):
|
||||
"""
|
||||
Add to a cache
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:param value:
|
||||
:param alt_sdp: if not found in self.sdp, look in other repositories
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
self.caches[cache_name].cache.put(key, value, alt_sdp)
|
||||
self.is_dirty = True
|
||||
|
||||
def delete(self, cache_name, key, value=None, alt_sdp=None):
|
||||
"""
|
||||
Delete an entry from the cache
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:param value:
|
||||
:param alt_sdp: if not found in self.sdp, look in other repositories
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
if self.caches[cache_name].cache.delete(key, value, alt_sdp):
|
||||
self.is_dirty = True
|
||||
|
||||
def get_cache(self, cache_name):
|
||||
"""
|
||||
@@ -186,40 +210,31 @@ class CacheManager:
|
||||
"""
|
||||
return self.caches[cache_name].cache.copy()
|
||||
|
||||
def put(self, cache_name, key, value):
|
||||
"""
|
||||
Add to a cache
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
self.caches[cache_name].cache.put(key, value)
|
||||
self.is_dirty = True
|
||||
|
||||
def delete(self, cache_name, key, value=None):
|
||||
"""
|
||||
Delete an entry from the cache
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
self.caches[cache_name].cache.delete(key, value)
|
||||
self.is_dirty = True
|
||||
|
||||
def populate(self, cache_name, populate_function, get_key_function):
|
||||
def populate(self, cache_name, populate_function, get_key_function, reset_events=False):
|
||||
"""
|
||||
Populate a specific cache with a bunch of items
|
||||
:param cache_name:
|
||||
:param populate_function: how to get the items
|
||||
:param get_key_function: how to get the key, out of an item
|
||||
:param reset_events: reset to_add and to_remove events after populate
|
||||
:return:
|
||||
"""
|
||||
with self._lock:
|
||||
self.caches[cache_name].cache.init(populate_function, get_key_function)
|
||||
self.caches[cache_name].cache.populate(populate_function, get_key_function, reset_events)
|
||||
|
||||
def force_value(self, cache_name, key, value):
|
||||
"""
|
||||
Update the content of the cache, but does not raise any event
|
||||
"""
|
||||
with self._lock:
|
||||
self.caches[cache_name].cache.force_value(key, value)
|
||||
|
||||
def remove_initialized_key(self, cache_name, key):
|
||||
"""
|
||||
|
||||
"""
|
||||
with self._lock:
|
||||
self.caches[cache_name].cache.remove_initialized_key(key)
|
||||
|
||||
def has(self, cache_name, key):
|
||||
"""
|
||||
@@ -267,7 +282,7 @@ class CacheManager:
|
||||
return
|
||||
|
||||
with self._lock:
|
||||
with context.sheerka.sdp.get_transaction(context.event.get_digest()) as transaction:
|
||||
with self.sdp.get_transaction(context.event.get_digest()) as transaction:
|
||||
for cache_name, cache_def in self.caches.items():
|
||||
if not cache_def.persist:
|
||||
continue
|
||||
@@ -287,13 +302,13 @@ class CacheManager:
|
||||
cache_def.cache.reset_events()
|
||||
self.is_dirty = False
|
||||
|
||||
def clear(self, cache_name=None):
|
||||
def clear(self, cache_name=None, set_is_cleared=True):
|
||||
with self._lock:
|
||||
if cache_name:
|
||||
self.caches[cache_name].cache.clear()
|
||||
self.caches[cache_name].cache.clear(set_is_cleared)
|
||||
else:
|
||||
for cache_def in self.caches.values():
|
||||
cache_def.cache.clear()
|
||||
cache_def.cache.clear(set_is_cleared)
|
||||
|
||||
def dump(self):
|
||||
"""
|
||||
|
||||
Vendored
+45
-10
@@ -1,30 +1,62 @@
|
||||
from cache.BaseCache import BaseCache
|
||||
from cache.BaseCache import BaseCache, MAX_INITIALIZED_KEY
|
||||
from core.global_symbols import NotFound, Removed
|
||||
from core.utils import sheerka_deepcopy
|
||||
|
||||
|
||||
class DictionaryCache(BaseCache):
|
||||
def _get(self, key):
|
||||
"""
|
||||
Kind of all or nothing dictionary database
|
||||
You can get the values key by by
|
||||
But when you want to put, you must put the whole database
|
||||
For this reason, alt_sdp is not supported. The top ontology layer contains the whole database
|
||||
"""
|
||||
|
||||
def auto_configure(self, cache_name):
|
||||
"""
|
||||
Convenient way to configure the cache
|
||||
:param cache_name:
|
||||
:return:
|
||||
"""
|
||||
self._default = lambda sdp, key: sdp.get(cache_name) # retrieve the whole entry
|
||||
self._extend_exists = None # not used
|
||||
self._alt_sdp_get = None # not used
|
||||
|
||||
return self
|
||||
|
||||
def _get(self, key, alt_sdp=None):
|
||||
"""
|
||||
Management of the default is different
|
||||
:param key:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
value = self._cache[key]
|
||||
return value
|
||||
return self._cache[key]
|
||||
except KeyError:
|
||||
if key in self._initialized_keys:
|
||||
return NotFound
|
||||
|
||||
if len(self._initialized_keys) == MAX_INITIALIZED_KEY:
|
||||
self._initialized_keys.clear()
|
||||
|
||||
self._initialized_keys.add(key)
|
||||
|
||||
if callable(self._default):
|
||||
self._cache = self._default(key) or {}
|
||||
default_values = self._default(self._sdp, key) if self._sdp else self._default(key)
|
||||
else:
|
||||
self._cache = self._default.copy() if self._default else {}
|
||||
default_values = self._default
|
||||
|
||||
if isinstance(default_values, dict):
|
||||
self._cache.update(default_values) # update the whole cache dictionary to resync with remote sdp
|
||||
|
||||
self._count_items()
|
||||
return self._cache[key] if key in self._cache else None
|
||||
return self._cache[key] if key in self._cache else NotFound
|
||||
|
||||
def _put(self, key, value):
|
||||
def _put(self, key, value, alt_sdp):
|
||||
"""
|
||||
Adds a whole dictionary
|
||||
:param key: True to append, false to reset
|
||||
:param value: dictionary
|
||||
:param alt_sdp: NOT SUPPORTED as the values from alt_sdp must be retrieved and computed BEFORE the put
|
||||
:return:
|
||||
"""
|
||||
if not isinstance(key, bool):
|
||||
@@ -33,12 +65,12 @@ class DictionaryCache(BaseCache):
|
||||
if not isinstance(value, dict):
|
||||
raise ValueError
|
||||
|
||||
if key:
|
||||
if key: # update the current cache
|
||||
if self._cache is None:
|
||||
self._cache = value.copy()
|
||||
else:
|
||||
self._cache.update(value)
|
||||
else:
|
||||
else: # reset the current cache
|
||||
self._cache = value
|
||||
|
||||
self._count_items()
|
||||
@@ -47,6 +79,9 @@ class DictionaryCache(BaseCache):
|
||||
self._add_to_add("*self*")
|
||||
return False
|
||||
|
||||
def _delete(self, key, value, alt_sdp):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _count_items(self):
|
||||
self._current_size = 0
|
||||
for v in self._cache.values():
|
||||
|
||||
Vendored
+14
-2
@@ -1,12 +1,16 @@
|
||||
from core.global_symbols import NotFound
|
||||
|
||||
|
||||
class FastCache:
|
||||
"""
|
||||
Simplest LRU cache
|
||||
"""
|
||||
|
||||
def __init__(self, max_size=256):
|
||||
def __init__(self, max_size=256, default=None):
|
||||
self.max_size = max_size
|
||||
self.cache = {}
|
||||
self.lru = []
|
||||
self.default = default
|
||||
|
||||
def put(self, key, value):
|
||||
if len(self.cache) == self.max_size:
|
||||
@@ -18,11 +22,19 @@ class FastCache:
|
||||
self.cache[key] = value
|
||||
self.lru.append(key)
|
||||
|
||||
def has(self, key):
|
||||
return key in self.cache
|
||||
|
||||
def get(self, key):
|
||||
try:
|
||||
return self.cache[key]
|
||||
except KeyError:
|
||||
return None
|
||||
if self.default:
|
||||
value = self.default(key)
|
||||
self.put(key, value)
|
||||
return value
|
||||
|
||||
return NotFound
|
||||
|
||||
def evict_by_key(self, predicate):
|
||||
to_remove = []
|
||||
|
||||
Vendored
+10
-4
@@ -1,4 +1,5 @@
|
||||
from cache.Cache import Cache
|
||||
from core.global_symbols import NotFound, Removed
|
||||
|
||||
|
||||
class IncCache(Cache):
|
||||
@@ -6,13 +7,18 @@ class IncCache(Cache):
|
||||
Increment the value of the key every time it's accessed
|
||||
"""
|
||||
|
||||
def _get(self, key):
|
||||
value = super()._get(key) or 0
|
||||
def _get(self, key, alt_sdp=None):
|
||||
value = super()._get(key, alt_sdp=alt_sdp)
|
||||
if value in (NotFound, Removed):
|
||||
value = 0
|
||||
value += 1
|
||||
self._put(key, value)
|
||||
self._put(key, value, alt_sdp)
|
||||
return value
|
||||
|
||||
def _put(self, key, value):
|
||||
def _put(self, key, value, alt_sdp):
|
||||
self._cache[key] = value
|
||||
self._add_to_add(key)
|
||||
return True
|
||||
|
||||
def _alt_get(self, key):
|
||||
return super()._get(key) # point to parent, not to self
|
||||
|
||||
Vendored
+27
-5
@@ -1,4 +1,6 @@
|
||||
from cache.Cache import BaseCache
|
||||
from core.global_symbols import Removed, NotFound
|
||||
from core.utils import sheerka_deepcopy
|
||||
|
||||
|
||||
class ListCache(BaseCache):
|
||||
@@ -8,12 +10,17 @@ class ListCache(BaseCache):
|
||||
Items of this cache are list
|
||||
"""
|
||||
|
||||
def _put(self, key, value):
|
||||
def _put(self, key, value, alt_sdp):
|
||||
if key in self._cache:
|
||||
self._cache[key].append(value)
|
||||
else:
|
||||
self._sync(key)
|
||||
|
||||
if key not in self._cache and alt_sdp and not self._is_cleared:
|
||||
previous = self._alt_sdp_get(alt_sdp, key)
|
||||
if previous not in (NotFound, Removed):
|
||||
self._cache[key] = sheerka_deepcopy(previous)
|
||||
|
||||
if key in self._cache:
|
||||
self._cache[key].append(value)
|
||||
else:
|
||||
@@ -22,18 +29,33 @@ class ListCache(BaseCache):
|
||||
self._add_to_add(key)
|
||||
return True
|
||||
|
||||
def _update(self, old_key, old_value, new_key, new_value):
|
||||
def _update(self, old_key, old_value, new_key, new_value, alt_sdp):
|
||||
self._sync(old_key, new_key)
|
||||
|
||||
if old_key not in self._cache and alt_sdp and not self._is_cleared:
|
||||
# no value found in local cache or remote repository
|
||||
# Use the values from alt_sdp
|
||||
previous = self._alt_sdp_get(alt_sdp, old_key)
|
||||
if previous in (NotFound, Removed):
|
||||
raise KeyError(old_key)
|
||||
|
||||
self._cache[old_key] = sheerka_deepcopy(previous)
|
||||
self._current_size += len(previous)
|
||||
|
||||
if old_key != new_key:
|
||||
self._cache[old_key].remove(old_value)
|
||||
if len(self._cache[old_key]) == 0:
|
||||
del (self._cache[old_key])
|
||||
self._add_to_remove(old_key)
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, old_key):
|
||||
self._cache[old_key] = Removed
|
||||
self._add_to_add(old_key)
|
||||
self._current_size += 1
|
||||
else:
|
||||
del (self._cache[old_key])
|
||||
self._add_to_remove(old_key)
|
||||
else:
|
||||
self._add_to_add(old_key)
|
||||
|
||||
self._put(new_key, new_value)
|
||||
self._put(new_key, new_value, alt_sdp)
|
||||
self._add_to_add(new_key)
|
||||
else:
|
||||
for i in range(len(self._cache[new_key])):
|
||||
|
||||
Vendored
+84
-26
@@ -1,4 +1,6 @@
|
||||
from cache.Cache import BaseCache
|
||||
from core.global_symbols import Removed, NotFound
|
||||
from core.utils import sheerka_deepcopy
|
||||
|
||||
|
||||
class ListIfNeededCache(BaseCache):
|
||||
@@ -8,7 +10,7 @@ class ListIfNeededCache(BaseCache):
|
||||
When you put the same key twice, you now have a list of two elements
|
||||
"""
|
||||
|
||||
def _put(self, key, value):
|
||||
def _put(self, key, value, alt_sdp):
|
||||
if key in self._cache:
|
||||
if isinstance(self._cache[key], list):
|
||||
self._cache[key].append(value)
|
||||
@@ -17,6 +19,11 @@ class ListIfNeededCache(BaseCache):
|
||||
else:
|
||||
self._sync(key)
|
||||
|
||||
if key not in self._cache and alt_sdp and not self._is_cleared:
|
||||
previous = self._alt_sdp_get(alt_sdp, key)
|
||||
if previous not in (NotFound, Removed):
|
||||
self._cache[key] = sheerka_deepcopy(previous)
|
||||
|
||||
if key in self._cache:
|
||||
if isinstance(self._cache[key], list):
|
||||
self._cache[key].append(value)
|
||||
@@ -27,23 +34,36 @@ class ListIfNeededCache(BaseCache):
|
||||
self._add_to_add(key)
|
||||
return True
|
||||
|
||||
def _update(self, old_key, old_value, new_key, new_value):
|
||||
def _update(self, old_key, old_value, new_key, new_value, alt_sdp):
|
||||
|
||||
self._sync(old_key, new_key)
|
||||
|
||||
if old_key not in self._cache and alt_sdp and not self._is_cleared:
|
||||
# no value found in local cache or remote repository
|
||||
# Use the values from alt_sdp
|
||||
previous = self._alt_sdp_get(alt_sdp, old_key)
|
||||
if previous in (NotFound, Removed):
|
||||
raise KeyError(old_key)
|
||||
|
||||
self._cache[old_key] = sheerka_deepcopy(previous)
|
||||
self._current_size += len(previous) if isinstance(previous, list) else 1
|
||||
|
||||
if old_key != new_key:
|
||||
if isinstance(self._cache[old_key], list):
|
||||
self._cache[old_key].remove(old_value)
|
||||
if len(self._cache[old_key]) == 0:
|
||||
if len(self._cache[old_key]) == 1:
|
||||
self._cache[old_key] = self._cache[old_key][0]
|
||||
self._add_to_add(old_key)
|
||||
else:
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, old_key):
|
||||
self._cache[old_key] = Removed
|
||||
self._add_to_add(old_key)
|
||||
self._current_size += 1
|
||||
else:
|
||||
del (self._cache[old_key])
|
||||
self._add_to_remove(old_key)
|
||||
else:
|
||||
self._add_to_add(old_key)
|
||||
else:
|
||||
del (self._cache[old_key])
|
||||
self._add_to_remove(old_key)
|
||||
|
||||
self._put(new_key, new_value)
|
||||
self._put(new_key, new_value, alt_sdp)
|
||||
self._add_to_add(new_key)
|
||||
else:
|
||||
if isinstance(self._cache[new_key], list):
|
||||
@@ -55,22 +75,60 @@ class ListIfNeededCache(BaseCache):
|
||||
self._cache[new_key] = new_value
|
||||
self._add_to_add(new_key)
|
||||
|
||||
def _delete(self, key, value):
|
||||
def _delete(self, key, value, alt_sdp):
|
||||
if value is None:
|
||||
self._current_size -= len(self._cache[key])
|
||||
del self._cache[key]
|
||||
self._add_to_remove(key)
|
||||
else:
|
||||
previous = self._cache[key]
|
||||
if isinstance(previous, list):
|
||||
previous.remove(value)
|
||||
if len(previous) == 1:
|
||||
self._cache[key] = previous[0]
|
||||
self._current_size -= 1
|
||||
self.to_add.add(key)
|
||||
else:
|
||||
if previous == value:
|
||||
del self._cache[key]
|
||||
self._current_size -= 1
|
||||
self.to_remove.add(key)
|
||||
# Remove the whole key
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key):
|
||||
if key in self._cache:
|
||||
previous = self._cache[key]
|
||||
if isinstance(previous, list):
|
||||
self._current_size -= len(previous) + 1
|
||||
else:
|
||||
self._current_size += 1
|
||||
|
||||
self._cache[key] = Removed
|
||||
self._add_to_add(key)
|
||||
else:
|
||||
previous = self._cache[key]
|
||||
self._current_size -= len(previous) if isinstance(previous, list) else 1
|
||||
del self._cache[key]
|
||||
self._add_to_remove(key)
|
||||
|
||||
else:
|
||||
# Remove a single value
|
||||
try:
|
||||
previous = self._cache[key]
|
||||
if isinstance(previous, list):
|
||||
previous.remove(value)
|
||||
self._cache[key] = previous[0] if len(previous) == 1 else previous
|
||||
self._current_size -= 1
|
||||
self.to_add.add(key)
|
||||
else:
|
||||
if previous == value:
|
||||
# I am about to delete the entry
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key):
|
||||
self._cache[key] = Removed
|
||||
self.to_add.add(key)
|
||||
# self._current_size -= 1 # Do not decrease size, as it's replaced by 'Removed'
|
||||
else:
|
||||
del self._cache[key]
|
||||
self._current_size -= 1
|
||||
self.to_remove.add(key)
|
||||
except KeyError as ex:
|
||||
previous = self._alt_sdp_get(alt_sdp, key) if not self._is_cleared and alt_sdp else NotFound
|
||||
if previous in (NotFound, Removed):
|
||||
raise ex
|
||||
|
||||
if isinstance(previous, list):
|
||||
previous = sheerka_deepcopy(previous)
|
||||
previous.remove(value) # raise an exception if value in not in the list
|
||||
self._cache[key] = previous[0] if len(previous) == 1 else previous
|
||||
self._current_size -= 1
|
||||
self.to_add.add(key)
|
||||
else:
|
||||
if previous == value:
|
||||
self._cache[key] = Removed
|
||||
self.to_add.add(key)
|
||||
self._current_size -= 1
|
||||
|
||||
return True
|
||||
|
||||
Vendored
+66
-15
@@ -1,4 +1,6 @@
|
||||
from cache.Cache import BaseCache
|
||||
from core.global_symbols import NotFound, Removed
|
||||
from core.utils import sheerka_deepcopy
|
||||
|
||||
|
||||
class SetCache(BaseCache):
|
||||
@@ -15,7 +17,7 @@ class SetCache(BaseCache):
|
||||
>> assert {'value1', 'value2'} == self.get('key')
|
||||
"""
|
||||
|
||||
def _put(self, key, value):
|
||||
def _put(self, key, value, alt_sdp):
|
||||
if key in self._cache:
|
||||
if value in self._cache[key]:
|
||||
return False
|
||||
@@ -23,6 +25,11 @@ class SetCache(BaseCache):
|
||||
else:
|
||||
self._sync(key)
|
||||
|
||||
if key not in self._cache and alt_sdp and not self._is_cleared:
|
||||
previous = self._alt_sdp_get(alt_sdp, key)
|
||||
if previous not in (NotFound, Removed):
|
||||
self._cache[key] = sheerka_deepcopy(previous)
|
||||
|
||||
if key in self._cache:
|
||||
self._cache[key].add(value)
|
||||
else:
|
||||
@@ -31,35 +38,79 @@ class SetCache(BaseCache):
|
||||
self._add_to_add(key)
|
||||
return True
|
||||
|
||||
def _update(self, old_key, old_value, new_key, new_value):
|
||||
def _update(self, old_key, old_value, new_key, new_value, alt_sdp):
|
||||
self._sync(old_key, new_key)
|
||||
|
||||
if old_key not in self._cache and alt_sdp and not self._is_cleared:
|
||||
# no value found in local cache or remote repository
|
||||
# Use the values from alt_sdp
|
||||
previous = self._alt_sdp_get(alt_sdp, old_key)
|
||||
if previous in (NotFound, Removed):
|
||||
raise KeyError(old_key)
|
||||
|
||||
self._cache[old_key] = sheerka_deepcopy(previous)
|
||||
self._current_size += len(previous)
|
||||
|
||||
if old_key != new_key:
|
||||
if isinstance(self._cache[old_key], set):
|
||||
self._cache[old_key].remove(old_value)
|
||||
if len(self._cache[old_key]) == 0:
|
||||
del (self._cache[old_key])
|
||||
self._add_to_remove(old_key)
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, old_key):
|
||||
self._cache[old_key] = Removed
|
||||
self._add_to_add(old_key)
|
||||
self._current_size += 1
|
||||
else:
|
||||
del (self._cache[old_key])
|
||||
self._add_to_remove(old_key)
|
||||
else:
|
||||
self._add_to_add(old_key)
|
||||
|
||||
self._put(new_key, new_value)
|
||||
self._put(new_key, new_value, alt_sdp)
|
||||
self._add_to_add(new_key)
|
||||
else:
|
||||
self._cache[new_key].remove(old_value)
|
||||
self._put(new_key, new_value)
|
||||
self._put(new_key, new_value, alt_sdp)
|
||||
self._add_to_add(new_key)
|
||||
|
||||
def _delete(self, key, value):
|
||||
def _delete(self, key, value, alt_sdp):
|
||||
if value is None:
|
||||
self._current_size -= len(self._cache[key])
|
||||
del self._cache[key]
|
||||
self._add_to_remove(key)
|
||||
else:
|
||||
self._cache[key].remove(value)
|
||||
self._current_size -= 1
|
||||
if len(self._cache[key]) == 0:
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key):
|
||||
self._current_size += 1 - len(self._cache[key]) if key in self._cache else 1
|
||||
self._cache[key] = Removed
|
||||
self._add_to_add(key)
|
||||
else:
|
||||
self._current_size -= len(self._cache[key])
|
||||
del self._cache[key]
|
||||
self._add_to_remove(key)
|
||||
else:
|
||||
|
||||
else:
|
||||
try:
|
||||
self._cache[key].remove(value)
|
||||
if len(self._cache[key]) == 0:
|
||||
if not self._is_cleared and alt_sdp and self._extend_exists(alt_sdp, key):
|
||||
self._cache[key] = Removed
|
||||
self._add_to_add(key)
|
||||
# self._current_size -= 1 # Do not decrease size, as it's replaced by 'Removed'
|
||||
else:
|
||||
del self._cache[key]
|
||||
self._add_to_remove(key)
|
||||
self._current_size -= 1
|
||||
else:
|
||||
self._add_to_add(key)
|
||||
self._current_size -= 1
|
||||
except KeyError as ex:
|
||||
previous = self._alt_sdp_get(alt_sdp, key) if not self._is_cleared and alt_sdp else NotFound
|
||||
if previous in (NotFound, Removed):
|
||||
raise ex
|
||||
|
||||
previous = sheerka_deepcopy(previous)
|
||||
previous.remove(value) # will raise a KeyError if value is not in the set
|
||||
if len(previous) == 0:
|
||||
self._cache[key] = Removed
|
||||
self._current_size += 1
|
||||
else:
|
||||
self._cache[key] = previous
|
||||
self._current_size += len(previous)
|
||||
self._add_to_add(key)
|
||||
|
||||
return True
|
||||
|
||||
@@ -2,6 +2,7 @@ import ast
|
||||
from dataclasses import dataclass
|
||||
|
||||
from cache.FastCache import FastCache
|
||||
from core.global_symbols import NotFound
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -56,12 +57,12 @@ class UnreferencedNamesVisitor(ast.NodeVisitor):
|
||||
|
||||
def get_names(self, node):
|
||||
names = UnreferencedNamesVisitor.cache.get(node)
|
||||
if names is not None:
|
||||
return names
|
||||
if names is NotFound:
|
||||
self.visit(node)
|
||||
UnreferencedNamesVisitor.cache.put(node, self.names)
|
||||
return self.names
|
||||
|
||||
self.visit(node)
|
||||
UnreferencedNamesVisitor.cache.put(node, self.names)
|
||||
return self.names
|
||||
return names
|
||||
|
||||
def visit_Name(self, node):
|
||||
self.names.add(node.id)
|
||||
|
||||
@@ -351,7 +351,7 @@ class PythonSecurityError(Concept, ErrorObj):
|
||||
self._metadata.is_evaluated = True
|
||||
|
||||
|
||||
class NotFound(Concept, ErrorObj):
|
||||
class NotFoundConcept(Concept, ErrorObj):
|
||||
ALL_ATTRIBUTES = []
|
||||
|
||||
def __init__(self, body=None):
|
||||
|
||||
@@ -95,6 +95,8 @@ class BuiltinConcepts:
|
||||
INVALID_GREATEST_OPERATION = "__INVALID_GREATEST_OPERATION"
|
||||
NEW_RULE = "__NEW_RULE"
|
||||
UNKNOWN_RULE = "__UNKNOWN_RULE"
|
||||
ONTOLOGY_ALREADY_DEFINED = "__ONTOLOGY_ALREADY_DEFINED"
|
||||
ONTOLOGY_REMOVED = "__ONTOLOGY_REMOVED"
|
||||
|
||||
NODE = "__NODE"
|
||||
GENERIC_NODE = "__GENERIC_NODE"
|
||||
@@ -167,6 +169,7 @@ BuiltinErrors = [
|
||||
BuiltinConcepts.NOT_FOUND,
|
||||
BuiltinConcepts.INVALID_LESSER_OPERATION,
|
||||
BuiltinConcepts.INVALID_GREATEST_OPERATION,
|
||||
BuiltinConcepts.ONTOLOGY_ALREADY_DEFINED
|
||||
]
|
||||
|
||||
BuiltinContainers = [
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import ast
|
||||
import logging
|
||||
|
||||
from cache.Cache import Cache
|
||||
from core.ast_helpers import ast_to_props
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, NotInit, ConceptParts, DEFINITION_TYPE_BNF, concept_part_value
|
||||
from core.concept import Concept, ConceptParts, DEFINITION_TYPE_BNF, concept_part_value
|
||||
from core.global_symbols import NotInit, NotFound
|
||||
from core.rule import Rule
|
||||
from core.sheerka.services.SheerkaExecute import SheerkaExecute
|
||||
from core.tokenizer import Keywords
|
||||
from core.utils import as_bag
|
||||
from parsers.BaseNodeParser import SourceCodeNode, ConceptNode, UnrecognizedTokensNode, SourceCodeWithConceptNode, \
|
||||
RuleNode
|
||||
from parsers.BaseParser import BaseParser, ParsingError
|
||||
@@ -510,11 +515,11 @@ def get_lexer_nodes_from_unrecognized(context, unrecognized_tokens_node, parsers
|
||||
|
||||
def update_compiled(context, concept, errors, parsers=None):
|
||||
"""
|
||||
recursively iterate thru concept.get_compiled() to replace LexerNode into concepts or list of ReturnValueConcept
|
||||
recursively iterate over concept.get_compiled() to replace LexerNode into concepts or list of ReturnValueConcept
|
||||
When parsing using a LexerNodeParser (SyaNodeParser, BnfNodeParser...)
|
||||
the result will be a LexerNode.
|
||||
In the specific case of a ConceptNode, the compiled variables will also be LexerNode (UnrecognizedTokensNode...)
|
||||
This function iterate thru the compile to transform these nodes into concept of compiled AST
|
||||
This function iterate over the compile to transform these nodes into concept of compiled AST
|
||||
:param context:
|
||||
:param concept:
|
||||
:param errors: a list the must be initialized by the caller
|
||||
@@ -648,3 +653,59 @@ def ensure_concept_or_rule(*items):
|
||||
else:
|
||||
if not isinstance(items, (Concept, Rule)):
|
||||
raise TypeError(f"'{items}' must be a concept or rule")
|
||||
|
||||
|
||||
expressions_cache = Cache()
|
||||
|
||||
|
||||
def evaluate_expression(expr, bag):
|
||||
"""
|
||||
Try to evaluate expr in context of bag
|
||||
:param expr:
|
||||
:param bag:
|
||||
:return:
|
||||
"""
|
||||
|
||||
if expr is None or expr.strip() == "":
|
||||
return None
|
||||
|
||||
if expr in bag:
|
||||
return bag[expr]
|
||||
|
||||
props_definitions = expressions_cache.get(expr)
|
||||
if props_definitions is NotFound:
|
||||
_ast = ast.parse(expr, mode="eval")
|
||||
props_definitions = []
|
||||
ast_to_props(props_definitions, _ast.body, None)
|
||||
props_definitions.reverse()
|
||||
expressions_cache.put(expr, props_definitions)
|
||||
|
||||
return evaluate_object(bag, props_definitions)
|
||||
|
||||
|
||||
def evaluate_object(bag, properties):
|
||||
"""
|
||||
Evaluate the properties of an object
|
||||
Works with evaluate_expression
|
||||
:param bag:
|
||||
:param properties: List of ast_helpers.PropDef
|
||||
:return:
|
||||
"""
|
||||
for prop in properties:
|
||||
try:
|
||||
obj = bag[prop.prop]
|
||||
except KeyError:
|
||||
try:
|
||||
obj = bag["self"][prop.prop]
|
||||
except Exception:
|
||||
raise NameError(prop.prop)
|
||||
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
if prop.index is not None:
|
||||
obj = obj[prop.index]
|
||||
|
||||
bag = as_bag(obj)
|
||||
|
||||
return obj
|
||||
|
||||
+22
-19
@@ -2,10 +2,12 @@ import hashlib
|
||||
from collections import namedtuple
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from threading import RLock
|
||||
from typing import Union
|
||||
|
||||
import core.utils
|
||||
from core.builtin_concepts_ids import BuiltinDynamicAttrs
|
||||
from core.global_symbols import NotInit
|
||||
from core.tokenizer import Tokenizer, TokenKind
|
||||
|
||||
PROPERTIES_FOR_DIGEST = ("name", "key",
|
||||
@@ -21,19 +23,6 @@ DEFINITION_TYPE_BNF = "bnf"
|
||||
DEFINITION_TYPE_DEF = "def"
|
||||
|
||||
|
||||
class NotInitialized:
|
||||
value = "**NotInit**"
|
||||
|
||||
def __repr__(self):
|
||||
return self.value
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, NotInitialized)
|
||||
|
||||
|
||||
NotInit = NotInitialized()
|
||||
|
||||
|
||||
class ConceptParts:
|
||||
"""
|
||||
Lists metadata that can contains some code
|
||||
@@ -75,6 +64,7 @@ class ConceptMetadata:
|
||||
|
||||
|
||||
ALL_ATTRIBUTES = {}
|
||||
all_attributes_lock = RLock()
|
||||
|
||||
|
||||
def get_concept_attrs(concept):
|
||||
@@ -86,15 +76,28 @@ def get_concept_attrs(concept):
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
all_attributes = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"]
|
||||
if concept.id and concept.key not in BuiltinDynamicAttrs:
|
||||
ALL_ATTRIBUTES[concept.id] = all_attributes
|
||||
return all_attributes
|
||||
with all_attributes_lock:
|
||||
all_attributes = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"]
|
||||
if concept.id and concept.key not in BuiltinDynamicAttrs:
|
||||
ALL_ATTRIBUTES[concept.id] = all_attributes
|
||||
return all_attributes
|
||||
|
||||
|
||||
def freeze_concept_attrs(concept):
|
||||
if concept.key not in BuiltinDynamicAttrs:
|
||||
ALL_ATTRIBUTES[concept.id] = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"]
|
||||
with all_attributes_lock:
|
||||
if concept.key not in BuiltinDynamicAttrs:
|
||||
ALL_ATTRIBUTES[concept.id] = [k for k in concept.__dict__ if k[0] != "_" and k[0] != "#"]
|
||||
|
||||
|
||||
def copy_concepts_attrs():
|
||||
with all_attributes_lock:
|
||||
return ALL_ATTRIBUTES.copy()
|
||||
|
||||
|
||||
def load_concepts_attrs(attrs):
|
||||
global ALL_ATTRIBUTES
|
||||
with all_attributes_lock:
|
||||
ALL_ATTRIBUTES = attrs
|
||||
|
||||
|
||||
class Concept:
|
||||
|
||||
@@ -8,3 +8,35 @@ EVENT_CONCEPT_CREATED = "evt_cc"
|
||||
# comparison context
|
||||
RULE_COMPARISON_CONTEXT = "Rule"
|
||||
CONCEPT_COMPARISON_CONTEXT = "Sya"
|
||||
|
||||
|
||||
class CustomType:
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __repr__(self):
|
||||
return self.value
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, CustomType) and self.value == other.value
|
||||
|
||||
|
||||
class NotInitType(CustomType):
|
||||
def __init__(self):
|
||||
super(NotInitType, self).__init__("**NotInit**")
|
||||
|
||||
|
||||
class NotFoundType(CustomType):
|
||||
def __init__(self):
|
||||
super(NotFoundType, self).__init__("**NotFound**")
|
||||
|
||||
|
||||
class RemovedType(CustomType):
|
||||
def __init__(self):
|
||||
super(RemovedType, self).__init__("**Removed**")
|
||||
|
||||
|
||||
NotInit = NotInitType()
|
||||
NotFound = NotFoundType()
|
||||
Removed = RemovedType()
|
||||
|
||||
+23
-3
@@ -5,7 +5,7 @@ import core.utils
|
||||
|
||||
ACTION_TYPE_PRINT = "print"
|
||||
ACTION_TYPE_EXEC = "exec"
|
||||
ACTION_TYPE_DEFERRED = "deferred"
|
||||
ACTION_TYPE_DEFERRED = "deferred" # KSI 2021-04-01 What is it for ? I definitely need some proper documentation
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -27,8 +27,9 @@ class Rule:
|
||||
predicate=None,
|
||||
action=None,
|
||||
priority=None,
|
||||
rule_id=None,
|
||||
is_enabled=None):
|
||||
self.metadata = RuleMetadata(action_type, name, predicate, action, is_enabled=is_enabled)
|
||||
self.metadata = RuleMetadata(action_type, name, predicate, action, id=rule_id, is_enabled=is_enabled)
|
||||
self.compiled_predicate = None
|
||||
self.compiled_action = None
|
||||
from core.sheerka.services.SheerkaComparisonManager import SheerkaComparisonManager
|
||||
@@ -36,7 +37,10 @@ class Rule:
|
||||
self.error_sink = None
|
||||
|
||||
def __repr__(self):
|
||||
return f"Rule(#{self.metadata.id}, when '{self.metadata.predicate}' {self.metadata.action_type} '{self.metadata.action}', priority={self.priority})"
|
||||
rule_id = f"#{self.metadata.id}"
|
||||
if self.name:
|
||||
rule_id += f" ({self.metadata.name})"
|
||||
return f"Rule({rule_id}, when '{self.metadata.predicate}' {self.metadata.action_type} '{self.metadata.action}', priority={self.priority})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(other) == id(self):
|
||||
@@ -57,6 +61,22 @@ class Rule:
|
||||
self.metadata.action_type,
|
||||
self.metadata.action))
|
||||
|
||||
def __deepcopy__(self, memodict={}):
|
||||
copy = Rule(self.metadata.action_type,
|
||||
self.name,
|
||||
self.metadata.predicate,
|
||||
self.metadata.action,
|
||||
self.priority,
|
||||
self.id,
|
||||
self.metadata.is_enabled)
|
||||
copy.compiled_predicate = self.compiled_predicate
|
||||
copy.compiled_action = self.compiled_action
|
||||
|
||||
return copy
|
||||
|
||||
def __copy__(self):
|
||||
return self.__deepcopy__()
|
||||
|
||||
def set_id(self, rule_id):
|
||||
self.metadata.id = rule_id
|
||||
return self
|
||||
|
||||
+138
-116
@@ -5,21 +5,20 @@ from dataclasses import dataclass
|
||||
import core.builtin_helpers
|
||||
import core.utils
|
||||
from cache.Cache import Cache
|
||||
from cache.CacheManager import CacheManager
|
||||
from cache.DictionaryCache import DictionaryCache
|
||||
from cache.IncCache import IncCache
|
||||
from core.builtin_concepts import ErrorConcept, ReturnValueConcept, UnknownConcept
|
||||
from core.builtin_concepts_ids import BuiltinErrors, BuiltinConcepts
|
||||
from core.concept import Concept, ConceptParts, NotInit, get_concept_attrs
|
||||
from core.concept import Concept, ConceptParts, get_concept_attrs
|
||||
from core.error import ErrorObj
|
||||
from core.global_symbols import EVENT_USER_INPUT_EVALUATED
|
||||
from core.global_symbols import EVENT_USER_INPUT_EVALUATED, NotInit, NotFound
|
||||
from core.profiling import profile
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from core.sheerka.SheerkaOntologyManager import SheerkaOntologyManager, OntologyAlreadyExists
|
||||
from core.sheerka_logger import console_handler
|
||||
from core.simple_debug import my_debug
|
||||
from core.tokenizer import Token, TokenKind
|
||||
from printer.SheerkaPrinter import SheerkaPrinter
|
||||
from sdp.sheerkaDataProvider import SheerkaDataProvider, Event
|
||||
from sdp.sheerkaDataProvider import Event
|
||||
|
||||
BASE_NODE_PARSER_CLASS = "parsers.BaseNodeParser.BaseNodeParser"
|
||||
EXIT_COMMANDS = ("quit", "exit", "bye")
|
||||
@@ -64,18 +63,19 @@ class Sheerka(Concept):
|
||||
ALL_ATTRIBUTES = []
|
||||
|
||||
def __init__(self, cache_only=False, debug=False, loggers=None):
|
||||
super().__init__(BuiltinConcepts.SHEERKA, True, True, BuiltinConcepts.SHEERKA)
|
||||
|
||||
self.init_logging(debug, loggers)
|
||||
self.loggers = loggers
|
||||
self.cache_only = cache_only
|
||||
|
||||
super().__init__(BuiltinConcepts.SHEERKA, True, True, BuiltinConcepts.SHEERKA)
|
||||
# self.log.debug("Starting Sheerka.")
|
||||
|
||||
self.bnp = None # reference to the BaseNodeParser class (to compute first keyword token)
|
||||
self.return_value_concept_id = None
|
||||
self.error_concept_id = None
|
||||
|
||||
self.sdp: SheerkaDataProvider = None
|
||||
self.cache_manager = CacheManager(cache_only)
|
||||
self.om: SheerkaOntologyManager = None
|
||||
|
||||
self.services = {} # sheerka plugins
|
||||
|
||||
@@ -105,29 +105,20 @@ class Sheerka(Concept):
|
||||
self.locals = {}
|
||||
self.concepts_ids = None
|
||||
|
||||
@property
|
||||
def resolved_concepts_by_first_keyword(self):
|
||||
"""
|
||||
We return the cache as we will be interested by statistics
|
||||
:return:
|
||||
"""
|
||||
return self.cache_manager.caches[self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY].cache
|
||||
|
||||
@property
|
||||
def resolved_sya_def(self):
|
||||
"""
|
||||
|
||||
:return:
|
||||
"""
|
||||
return self.cache_manager.caches[self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY].cache
|
||||
|
||||
@property
|
||||
def concepts_grammars(self):
|
||||
return self.cache_manager.caches[self.CONCEPTS_GRAMMARS_ENTRY].cache
|
||||
"""
|
||||
Quick access to BNF grammars
|
||||
"""
|
||||
return self.om.current_cache_manager().caches[self.CONCEPTS_GRAMMARS_ENTRY].cache
|
||||
|
||||
@property
|
||||
def chicken_and_eggs(self):
|
||||
return self.cache_manager.caches[self.CHICKEN_AND_EGG_CONCEPTS_ENTRY].cache
|
||||
return self.om.current_cache_manager().caches[self.CHICKEN_AND_EGG_CONCEPTS_ENTRY].cache
|
||||
|
||||
@property
|
||||
def root_folder(self):
|
||||
return self.om.root_folder
|
||||
|
||||
def bind_service_method(self, bound_method, has_side_effect, as_name=None, visible=True):
|
||||
"""
|
||||
@@ -147,7 +138,7 @@ class Sheerka(Concept):
|
||||
self.methods_with_context.add(as_name)
|
||||
self.sheerka_methods[as_name] = SheerkaMethod(bound_method, has_side_effect)
|
||||
|
||||
setattr(self, as_name, bound_method)
|
||||
setattr(self, bound_method.__name__, bound_method)
|
||||
|
||||
def initialize(self, root_folder: str = None, save_execution_context=None, enable_process_return_values=None):
|
||||
"""
|
||||
@@ -171,7 +162,7 @@ class Sheerka(Concept):
|
||||
from sheerkapickle.sheerka_handlers import initialize_pickle_handlers
|
||||
initialize_pickle_handlers()
|
||||
|
||||
self.sdp = SheerkaDataProvider(root_folder, self)
|
||||
self.om = SheerkaOntologyManager(self, root_folder, self.cache_only)
|
||||
self.builtin_cache = self.get_builtins_classes_as_dict()
|
||||
|
||||
self.initialize_caching()
|
||||
@@ -181,33 +172,38 @@ class Sheerka(Concept):
|
||||
self.initialize_builtin_evaluators()
|
||||
|
||||
event = Event("Initializing Sheerka.", user_id=self.name)
|
||||
self.sdp.save_event(event)
|
||||
self.om.save_event(event)
|
||||
with ExecutionContext(self.key,
|
||||
event,
|
||||
self,
|
||||
BuiltinConcepts.INIT_SHEERKA,
|
||||
None,
|
||||
desc="Initializing Sheerka.") as exec_context:
|
||||
if self.sdp.first_time:
|
||||
if self.om.current_sdp().first_time:
|
||||
self.first_time_initialisation(exec_context)
|
||||
|
||||
self.initialize_builtin_concepts()
|
||||
self.initialize_concept_node_parsing(exec_context)
|
||||
|
||||
self.initialize_services_deferred(exec_context, self.sdp.first_time)
|
||||
self.initialize_services_deferred(exec_context, self.om.current_sdp().first_time)
|
||||
|
||||
res = ReturnValueConcept(self, True, self)
|
||||
exec_context.add_values(return_values=res)
|
||||
|
||||
if self.cache_manager.is_dirty:
|
||||
self.cache_manager.commit(exec_context)
|
||||
if self.om.is_dirty():
|
||||
self.om.commit(exec_context)
|
||||
|
||||
if self.save_execution_context:
|
||||
self.sdp.save_result(exec_context, is_admin=True)
|
||||
self.om.save_result(exec_context, is_admin=True)
|
||||
|
||||
# append the other ontologies if needed
|
||||
self.om.freeze()
|
||||
self.initialize_ontologies(exec_context)
|
||||
|
||||
# self.init_log.debug(f"Sheerka successfully initialized")
|
||||
|
||||
except IOError as e:
|
||||
res = ReturnValueConcept(self, False, self.get(BuiltinConcepts.ERROR), e)
|
||||
res = ReturnValueConcept(self.name, False, self.new(BuiltinConcepts.ERROR, body=e))
|
||||
|
||||
finally:
|
||||
self.during_initialisation = False
|
||||
@@ -216,28 +212,28 @@ class Sheerka(Concept):
|
||||
|
||||
def initialize_caching(self):
|
||||
|
||||
cache = IncCache(default=lambda k: self.sdp.get(self.OBJECTS_IDS_ENTRY, k))
|
||||
self.cache_manager.register_cache(self.OBJECTS_IDS_ENTRY, cache)
|
||||
cache = IncCache().auto_configure(self.OBJECTS_IDS_ENTRY)
|
||||
self.om.register_cache(self.OBJECTS_IDS_ENTRY, cache)
|
||||
|
||||
cache = DictionaryCache(default=lambda k: self.sdp.get(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, k))
|
||||
self.cache_manager.register_cache(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache)
|
||||
self.cache_manager.get(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, None) # to init from sdp
|
||||
cache = DictionaryCache().auto_configure(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY)
|
||||
self.om.register_cache(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache)
|
||||
self.om.get(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, None) # to init from sdp
|
||||
|
||||
cache = DictionaryCache(default=lambda k: self.sdp.get(self.CONCEPTS_SYA_DEFINITION_ENTRY, k))
|
||||
self.cache_manager.register_cache(self.CONCEPTS_SYA_DEFINITION_ENTRY, cache)
|
||||
self.cache_manager.get(self.CONCEPTS_SYA_DEFINITION_ENTRY, None) # to init from sdp
|
||||
cache = DictionaryCache().auto_configure(self.CONCEPTS_SYA_DEFINITION_ENTRY)
|
||||
self.om.register_cache(self.CONCEPTS_SYA_DEFINITION_ENTRY, cache)
|
||||
self.om.get(self.CONCEPTS_SYA_DEFINITION_ENTRY, None) # to init from sdp
|
||||
|
||||
cache = DictionaryCache()
|
||||
self.cache_manager.register_cache(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache, persist=False)
|
||||
cache = DictionaryCache().auto_configure(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY)
|
||||
self.om.register_cache(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache, persist=False)
|
||||
|
||||
cache = DictionaryCache()
|
||||
self.cache_manager.register_cache(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY, cache, persist=False)
|
||||
cache = DictionaryCache().auto_configure(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY)
|
||||
self.om.register_cache(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY, cache, persist=False)
|
||||
|
||||
cache = Cache()
|
||||
self.cache_manager.register_cache(self.CONCEPTS_GRAMMARS_ENTRY, cache, persist=False)
|
||||
cache = Cache().auto_configure(self.CONCEPTS_GRAMMARS_ENTRY)
|
||||
self.om.register_cache(self.CONCEPTS_GRAMMARS_ENTRY, cache, persist=False)
|
||||
|
||||
cache = Cache()
|
||||
self.cache_manager.register_cache(self.CHICKEN_AND_EGG_CONCEPTS_ENTRY, cache, persist=False)
|
||||
cache = Cache().auto_configure(self.CHICKEN_AND_EGG_CONCEPTS_ENTRY)
|
||||
self.om.register_cache(self.CHICKEN_AND_EGG_CONCEPTS_ENTRY, cache, persist=False)
|
||||
|
||||
def initialize_services(self):
|
||||
"""
|
||||
@@ -261,13 +257,12 @@ class Sheerka(Concept):
|
||||
:return:
|
||||
"""
|
||||
# self.init_log.debug("Initializing services (deferred)")
|
||||
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "initialize_deferred"):
|
||||
service.initialize_deferred(context, is_first_time)
|
||||
|
||||
def first_time_initialisation(self, context):
|
||||
self.record_var(context, self.name, "save_execution_context", True)
|
||||
self.record_var(context, self.name, "save_execution_context", self.save_execution_context)
|
||||
|
||||
def initialize_builtin_concepts(self):
|
||||
"""
|
||||
@@ -339,27 +334,18 @@ class Sheerka(Concept):
|
||||
def initialize_concept_node_parsing(self, context):
|
||||
# self.init_log.debug("Initializing concepts by first keyword.")
|
||||
|
||||
concepts_by_first_keyword = self.cache_manager.copy(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY)
|
||||
concepts_by_first_keyword = self.om.current_cache_manager().copy(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY)
|
||||
res = self.bnp.resolve_concepts_by_first_keyword(context, concepts_by_first_keyword)
|
||||
self.cache_manager.put(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, res.body)
|
||||
self.om.put(self.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, res.body)
|
||||
|
||||
def reset(self, cache_only=False):
|
||||
if self.cache_manager.cache_only != cache_only:
|
||||
self.cache_manager.reset(cache_only)
|
||||
self.initialize_caching()
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "initialize"):
|
||||
service.initialize()
|
||||
else:
|
||||
self.cache_manager.clear()
|
||||
def initialize_ontologies(self, context):
|
||||
ontologies = self.om.current_sdp().load_ontologies()
|
||||
if not ontologies:
|
||||
return
|
||||
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "reset"):
|
||||
service.reset()
|
||||
|
||||
self.printer_handler.reset()
|
||||
self.sdp.reset()
|
||||
self.locals = {}
|
||||
for ontology_name in list(reversed(ontologies))[1:]:
|
||||
self.om.push_ontology(ontology_name, False)
|
||||
self.initialize_services_deferred(context, False)
|
||||
|
||||
# @profile(filename="profile_80")
|
||||
def evaluate_user_input(self, text: str, user_name="kodjo"):
|
||||
@@ -371,9 +357,9 @@ class Sheerka(Concept):
|
||||
:return:
|
||||
"""
|
||||
# self.log.debug(f"Processing user input '{text}', {user_name=}.")
|
||||
my_debug(f"****************** Processing user input '{text}', {user_name=}.***********************************")
|
||||
# my_debug(f"****************** Processing user input '{text}', {user_name=}.***********************************")
|
||||
event = Event(text, user_name)
|
||||
self.sdp.save_event(event)
|
||||
self.om.save_event(event)
|
||||
|
||||
with ExecutionContext(self.key,
|
||||
event,
|
||||
@@ -391,8 +377,8 @@ class Sheerka(Concept):
|
||||
ret = self.execute(execution_context, [user_input, reduce_requested], EXECUTE_STEPS)
|
||||
execution_context.add_values(return_values=ret)
|
||||
|
||||
if self.cache_manager.is_dirty:
|
||||
self.cache_manager.commit(execution_context)
|
||||
if self.om.is_dirty:
|
||||
self.om.commit(execution_context)
|
||||
|
||||
self.publish(execution_context, EVENT_USER_INPUT_EVALUATED)
|
||||
|
||||
@@ -489,13 +475,13 @@ class Sheerka(Concept):
|
||||
return None
|
||||
|
||||
if key[1]:
|
||||
concept = self.cache_manager.get(self.CONCEPTS_BY_ID_ENTRY, key[1])
|
||||
concept = self.om.get(self.CONCEPTS_BY_ID_ENTRY, key[1])
|
||||
else:
|
||||
concept = self.cache_manager.get(self.CONCEPTS_BY_NAME_ENTRY, key[0])
|
||||
concept = self.om.get(self.CONCEPTS_BY_NAME_ENTRY, key[0])
|
||||
else:
|
||||
concept = self.cache_manager.get(self.CONCEPTS_BY_NAME_ENTRY, key)
|
||||
concept = self.om.get(self.CONCEPTS_BY_NAME_ENTRY, key)
|
||||
|
||||
if concept is None:
|
||||
if concept is NotFound:
|
||||
return None
|
||||
return new_instances(concept) if return_new else concept
|
||||
|
||||
@@ -556,6 +542,76 @@ class Sheerka(Concept):
|
||||
concept._metadata.is_evaluated = True # because we have manually set the variables
|
||||
return concept
|
||||
|
||||
def push_ontology(self, context, name, cache_only=False):
|
||||
|
||||
try:
|
||||
if self.om.already_on_top(name):
|
||||
return self.ret(self.name, True, self.new(BuiltinConcepts.SUCCESS))
|
||||
except OntologyAlreadyExists:
|
||||
return self.ret(self.name, False, self.new(BuiltinConcepts.ONTOLOGY_ALREADY_DEFINED, body=name))
|
||||
|
||||
# record sheerka and services states
|
||||
self.om.record_sheerka_state()
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "save_state"):
|
||||
service.save_state(context)
|
||||
if hasattr(service, "reset_state"):
|
||||
service.reset_state()
|
||||
|
||||
self.om.push_ontology(name, cache_only)
|
||||
|
||||
# Not the first time for this ontology. Update the services
|
||||
if name in self.om.current_sdp().load_ontologies():
|
||||
self.initialize_services_deferred(context, False)
|
||||
|
||||
self.om.save_ontologies()
|
||||
|
||||
return self.ret(self.name, True, self.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def pop_ontology(self):
|
||||
ontology = self.om.pop_ontology()
|
||||
|
||||
self.om.reset_sheerka_state()
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "restore_state"):
|
||||
service.restore_state()
|
||||
if hasattr(service, "reset_state"):
|
||||
service.reset_state()
|
||||
|
||||
self.om.save_ontologies()
|
||||
return self.ret(self.name, True, self.new(BuiltinConcepts.ONTOLOGY_REMOVED, body=ontology))
|
||||
|
||||
def get_ontology(self, context):
|
||||
self.om.record_sheerka_state()
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "save_state"):
|
||||
service.save_state(context)
|
||||
|
||||
return self.om.get_ontology()
|
||||
|
||||
def add_ontology(self, context, ontology):
|
||||
"""
|
||||
Add the previously recorded ontology on the top
|
||||
"""
|
||||
|
||||
# save the state of the current ontology
|
||||
self.om.record_sheerka_state()
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "save_state"):
|
||||
service.save_state(context)
|
||||
# if hasattr(service, "reset_state"): # no need to do it twice
|
||||
# service.reset_state()
|
||||
|
||||
self.om.add_ontology(ontology)
|
||||
|
||||
# update sheerka with this new ontology
|
||||
self.om.reset_sheerka_state()
|
||||
for service in self.services.values():
|
||||
if hasattr(service, "restore_state"):
|
||||
service.restore_state()
|
||||
if hasattr(service, "reset_state"):
|
||||
service.reset_state()
|
||||
|
||||
def ret(self, who: str, status: bool, value, parents=None):
|
||||
"""
|
||||
Creates and returns a ReturnValue concept
|
||||
@@ -665,7 +721,7 @@ class Sheerka(Concept):
|
||||
if not isinstance(obj, Concept):
|
||||
return True
|
||||
|
||||
return obj.key != str(BuiltinConcepts.UNKNOWN_CONCEPT)
|
||||
return obj.key not in (BuiltinConcepts.UNKNOWN_CONCEPT, BuiltinConcepts.UNKNOWN_RULE)
|
||||
|
||||
@staticmethod
|
||||
def isinstance(a, b):
|
||||
@@ -791,40 +847,6 @@ class Sheerka(Concept):
|
||||
def test_error(self):
|
||||
raise Exception("I can raise an error")
|
||||
|
||||
def test_only_force_sya_def(self, context, list_of_def):
|
||||
"""
|
||||
Set the precedence and/or the associativity of a concept
|
||||
FOR TESTS PURPOSE. TO REMOVE EVENTUALLY
|
||||
:param context:
|
||||
:param list_of_def list of tuple(concept_id, precedence (int), SyaAssociativity)
|
||||
:return:
|
||||
"""
|
||||
|
||||
# validate the entries
|
||||
# If one entry is an invalid concept, rollback everything
|
||||
for concept_id, precedence, associativity in list_of_def:
|
||||
if concept_id == BuiltinConcepts.UNKNOWN_CONCEPT:
|
||||
return self.ret(self.name,
|
||||
False,
|
||||
self.new(BuiltinConcepts.ERROR, body=f"Concept {concept_id} is not known"))
|
||||
|
||||
sya_def = self.cache_manager.copy(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY) or {}
|
||||
|
||||
# update the definitions
|
||||
for concept_id, precedence, associativity in list_of_def:
|
||||
if precedence is None and associativity is None:
|
||||
try:
|
||||
del self.sya_definitions[concept_id]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
sya_def[concept_id] = (precedence, associativity)
|
||||
|
||||
# put in cache
|
||||
self.cache_manager.put(self.RESOLVED_CONCEPTS_SYA_DEFINITION_ENTRY, False, sya_def)
|
||||
|
||||
return self.ret(self.name, True, self.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def test_only_add_in_cache(self, concept: Concept):
|
||||
"""
|
||||
Adds a concept template in cache.
|
||||
@@ -840,7 +862,7 @@ class Sheerka(Concept):
|
||||
if concept.key is None:
|
||||
raise KeyError()
|
||||
|
||||
self.cache_manager.add_concept(concept)
|
||||
self.om.add_concept(concept)
|
||||
|
||||
return concept
|
||||
|
||||
@@ -848,7 +870,7 @@ class Sheerka(Concept):
|
||||
def to_profile():
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize(save_execution_context=False, enable_process_return_values=False)
|
||||
event = Event("test", "kodjoko")
|
||||
event = Event("test", "kodjo")
|
||||
execution_context = ExecutionContext(sheerka.name,
|
||||
event,
|
||||
sheerka,
|
||||
|
||||
@@ -0,0 +1,470 @@
|
||||
from cache.CacheManager import CacheManager
|
||||
from cache.DictionaryCache import DictionaryCache
|
||||
from core.concept import copy_concepts_attrs, load_concepts_attrs
|
||||
from core.global_symbols import NotFound, Removed
|
||||
from core.utils import sheerka_deepcopy
|
||||
from sdp.sheerkaDataProvider import SheerkaDataProvider
|
||||
|
||||
|
||||
class OntologyManagerFrozen(Exception):
|
||||
"""
|
||||
Raised when you try to add a cache manager while the ontology manager is frozen
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class OntologyManagerNotFrozen(Exception):
|
||||
"""
|
||||
Raised when you try to push or pop a cache manager while the ontology manager is not frozen
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class OntologyManagerCannotPopLatest(Exception):
|
||||
"""
|
||||
Raised when you try pop the latest cache manager
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class OntologyAlreadyExists(Exception):
|
||||
"""
|
||||
When the ontology exists AND is not the top layer
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
|
||||
class AlternateSdp:
|
||||
def __init__(self, ontologies):
|
||||
self.names = [o.name for o in ontologies]
|
||||
self.cache_managers = [o.cache_manager for o in ontologies]
|
||||
|
||||
def get(self, cache_name, key):
|
||||
last = len(self.cache_managers) - 1
|
||||
for i, cache_manager in enumerate(self.cache_managers):
|
||||
value = cache_manager.get(cache_name, key)
|
||||
if value is not NotFound:
|
||||
return value
|
||||
|
||||
if i != last:
|
||||
# forget than the key was requested
|
||||
cache_manager.remove_initialized_key(cache_name, key)
|
||||
|
||||
return NotFound
|
||||
|
||||
def alt_get(self, cache_name, key):
|
||||
last = len(self.cache_managers) - 1
|
||||
for i, cache_manager in enumerate(self.cache_managers):
|
||||
value = cache_manager.alt_get(cache_name, key)
|
||||
if value is not NotFound:
|
||||
return value
|
||||
|
||||
if i != last:
|
||||
# forget than the key was requested
|
||||
cache_manager.remove_initialized_key(cache_name, key)
|
||||
|
||||
return NotFound
|
||||
|
||||
def exists(self, cache_name, key):
|
||||
for cache_manager in self.cache_managers:
|
||||
if cache_manager.exists(cache_name, key):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class Ontology:
|
||||
def __init__(self, name, cache_manager: CacheManager, alt_sdp: AlternateSdp):
|
||||
self.name = name
|
||||
self.cache_manager = cache_manager
|
||||
self.alt_sdp = alt_sdp
|
||||
self.concepts_attributes = None
|
||||
self.local_variables = None
|
||||
|
||||
def __repr__(self):
|
||||
return f"Ontology('{self.name}')"
|
||||
|
||||
|
||||
class SheerkaOntologyManager:
|
||||
ROOT_ONTOLOGY_NAME = "__default__"
|
||||
|
||||
def __init__(self, sheerka, root_folder, cache_only):
|
||||
self.sheerka = sheerka
|
||||
self.root_folder = root_folder
|
||||
self.cache_only = cache_only
|
||||
self.frozen = False
|
||||
|
||||
ref_cache_manager = CacheManager(self.cache_only, sdp=SheerkaDataProvider(root_folder, self.sheerka))
|
||||
self.ontologies = [Ontology(self.ROOT_ONTOLOGY_NAME, ref_cache_manager, None)]
|
||||
|
||||
@property
|
||||
def ontologies_names(self):
|
||||
return [o.name for o in self.ontologies]
|
||||
|
||||
def freeze(self):
|
||||
self.frozen = True
|
||||
return self
|
||||
|
||||
def test_only_unfreeze(self):
|
||||
# To remove ASAP
|
||||
self.frozen = False
|
||||
return self
|
||||
|
||||
def push_ontology(self, name, cache_only=None):
|
||||
"""
|
||||
Add an ontology layer
|
||||
:param name: name of the layer
|
||||
:param cache_only:
|
||||
"""
|
||||
if not self.frozen:
|
||||
raise OntologyManagerNotFrozen()
|
||||
|
||||
# pseudo clone cache manager
|
||||
cache_manager = CacheManager(cache_only or self.cache_only, sdp=self.get_sdp(name))
|
||||
for cache_name, cache_def in self.current_cache_manager().caches.items():
|
||||
clone = cache_def.cache.clone()
|
||||
if cache_name in self.current_cache_manager().concept_caches:
|
||||
cache_manager.register_concept_cache(cache_name, clone, cache_def.get_key, cache_def.use_ref)
|
||||
else:
|
||||
cache_manager.register_cache(cache_name, clone, cache_def.persist, cache_def.use_ref)
|
||||
|
||||
# Dictionary cache special treatment
|
||||
if isinstance(clone, DictionaryCache):
|
||||
clone.put(False, cache_def.cache.copy()) # only a shadow copy for now
|
||||
clone.reset_events()
|
||||
|
||||
alt_sdp = AlternateSdp(self.ontologies)
|
||||
self.ontologies.insert(0, Ontology(name, cache_manager, alt_sdp))
|
||||
return self
|
||||
|
||||
def pop_ontology(self):
|
||||
"""
|
||||
Remove the top ontology layer
|
||||
"""
|
||||
if not self.frozen:
|
||||
raise OntologyManagerNotFrozen()
|
||||
|
||||
if len(self.ontologies) == 1:
|
||||
raise OntologyManagerCannotPopLatest()
|
||||
|
||||
return self.ontologies.pop(0)
|
||||
|
||||
def add_ontology(self, ontology: Ontology):
|
||||
"""
|
||||
Put back a previously created ontology
|
||||
:param ontology: how to get the items
|
||||
"""
|
||||
if not self.frozen:
|
||||
raise OntologyManagerNotFrozen()
|
||||
|
||||
ontology.alt_sdp = AlternateSdp(self.ontologies)
|
||||
self.ontologies.insert(0, ontology)
|
||||
for cache_def in ontology.cache_manager.caches.values():
|
||||
cache_def.cache.reset_initialized_keys()
|
||||
|
||||
return self
|
||||
|
||||
def get_ontology(self, name=None):
|
||||
"""
|
||||
Return the first ontology with the corresponding name
|
||||
When no is given, return the top ontology
|
||||
"""
|
||||
if name is None:
|
||||
return self.ontologies[0]
|
||||
|
||||
for ontology in self.ontologies:
|
||||
if ontology.name == name:
|
||||
return ontology
|
||||
|
||||
raise KeyError(name)
|
||||
|
||||
def save_ontologies(self):
|
||||
self.current_sdp().save_ontologies(self.ontologies_names)
|
||||
|
||||
# def load_ontologies(self):
|
||||
# ontologies = self.current_sdp().load_ontologies()
|
||||
# if not ontologies:
|
||||
# return
|
||||
#
|
||||
# for ontology_name in list(reversed(ontologies))[1:]:
|
||||
# self.push_ontology(ontology_name)
|
||||
|
||||
def already_on_top(self, name):
|
||||
"""
|
||||
Returns True if the ontology 'name' is already on the top
|
||||
Raises a OntologyAlreadyExists exception if the ontology exists, but not at the top
|
||||
"""
|
||||
if self.ontologies[0].name == name:
|
||||
return True
|
||||
|
||||
if name in self.ontologies_names:
|
||||
raise OntologyAlreadyExists(name)
|
||||
|
||||
return False
|
||||
|
||||
def record_sheerka_state(self):
|
||||
"""
|
||||
The current ontology can keep extra information
|
||||
"""
|
||||
# TODO persist these information ?
|
||||
self.current_ontology().concepts_attributes = copy_concepts_attrs()
|
||||
self.current_ontology().local_variables = sheerka_deepcopy(self.sheerka.locals)
|
||||
|
||||
def reset_sheerka_state(self):
|
||||
if self.current_ontology().concepts_attributes is not None:
|
||||
load_concepts_attrs(self.current_ontology().concepts_attributes)
|
||||
if self.current_ontology().local_variables is not None:
|
||||
self.sheerka.locals = self.current_ontology().local_variables
|
||||
|
||||
def current_cache_manager(self) -> CacheManager:
|
||||
return self.ontologies[0].cache_manager
|
||||
|
||||
def current_sdp(self) -> SheerkaDataProvider:
|
||||
return self.ontologies[0].cache_manager.sdp
|
||||
|
||||
def current_ontology(self) -> Ontology:
|
||||
return self.ontologies[0]
|
||||
|
||||
def register_concept_cache(self, name, cache, get_key, use_ref):
|
||||
"""
|
||||
Define which type of cache along with how to compute the key
|
||||
:param name:
|
||||
:param cache:
|
||||
:param get_key:
|
||||
:param use_ref:
|
||||
:return:
|
||||
"""
|
||||
if self.frozen:
|
||||
raise OntologyManagerFrozen
|
||||
|
||||
return self.current_cache_manager().register_concept_cache(name, cache, get_key, use_ref)
|
||||
|
||||
def register_cache(self, name, cache, persist=True, use_ref=False):
|
||||
"""
|
||||
Define which type of cache along with how to compute the key
|
||||
:param name:
|
||||
:param cache:
|
||||
:param persist:
|
||||
:param use_ref:
|
||||
:return:
|
||||
"""
|
||||
if self.frozen:
|
||||
raise OntologyManagerFrozen
|
||||
|
||||
return self.current_cache_manager().register_cache(name, cache, persist, use_ref)
|
||||
|
||||
def add_concept(self, concept):
|
||||
"""
|
||||
We need multiple indexes to retrieve a concept
|
||||
So the new concept is dispatched into multiple caches
|
||||
:param concept:
|
||||
:return:
|
||||
"""
|
||||
return self.current_cache_manager().add_concept(concept)
|
||||
|
||||
def update_concept(self, old, new):
|
||||
"""
|
||||
Update a concept.
|
||||
:param old: old version of the concept
|
||||
:param new: new version of the concept
|
||||
:return:
|
||||
"""
|
||||
|
||||
return self.current_cache_manager().update_concept(old, new, self.ontologies[0].alt_sdp)
|
||||
|
||||
def remove_concept(self, concept):
|
||||
"""
|
||||
Remove a concept from all caches
|
||||
:param concept:
|
||||
:return:
|
||||
"""
|
||||
return self.current_cache_manager().remove_concept(concept, self.ontologies[0].alt_sdp)
|
||||
|
||||
def get(self, cache_name, key):
|
||||
"""
|
||||
Browses the ontologies, looking for the data 'key' in entry 'cache_name'
|
||||
If a value is found in a low level cache, updates the top level one
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:return:
|
||||
"""
|
||||
value = self.current_cache_manager().get(cache_name, key, self.ontologies[0].alt_sdp)
|
||||
return NotFound if value is Removed else value
|
||||
|
||||
def exists(self, cache_name, key):
|
||||
"""
|
||||
Browses the ontologies to check if the data 'key' is defined in entry 'cache_name'
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:return:
|
||||
"""
|
||||
for ontology in self.ontologies:
|
||||
if ontology.cache_manager.exists(cache_name, key):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def list(self, entry, cache_only=False):
|
||||
"""
|
||||
list all entries
|
||||
"""
|
||||
return list(self.get_all(entry, cache_only).values())
|
||||
|
||||
def list_by_key(self, entry, key):
|
||||
"""
|
||||
List all entries of a given key
|
||||
If the values are lists, sets of dictionaries, they will be concatenated
|
||||
Otherwise it will raise an error
|
||||
"""
|
||||
res = None
|
||||
|
||||
def update_values(_res, values_):
|
||||
if values_ is NotFound:
|
||||
return _res
|
||||
elif values_ is Removed:
|
||||
_res.clear()
|
||||
elif isinstance(values_, dict):
|
||||
if _res is None:
|
||||
_res = values_.copy()
|
||||
elif isinstance(_res, dict):
|
||||
_res.update(values_)
|
||||
else:
|
||||
raise ValueError(f"Expecting dict while found '{values_}'")
|
||||
elif isinstance(values_, list):
|
||||
if _res is None:
|
||||
_res = values_.copy()
|
||||
elif isinstance(_res, list):
|
||||
_res.extend(values_)
|
||||
else:
|
||||
raise ValueError(f"Expecting list while found '{values_}'")
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
return _res
|
||||
|
||||
for ontology in reversed(self.ontologies):
|
||||
|
||||
from_cache_values = ontology.cache_manager.get(entry, key)
|
||||
if from_cache_values is not NotFound:
|
||||
res = update_values(res, from_cache_values)
|
||||
else:
|
||||
from_sdp_values = ontology.cache_manager.sdp.get(entry, key)
|
||||
res = update_values(res, from_sdp_values)
|
||||
|
||||
return res
|
||||
|
||||
def get_all(self, entry, cache_only=False):
|
||||
"""
|
||||
Return all key, value from all ontologies
|
||||
First look in sdp, then override with the cache, for all ontologies
|
||||
:param entry: cache name / sdp entry
|
||||
:param cache_only: Do no fetch data from remote sdp
|
||||
"""
|
||||
res = {}
|
||||
for ontology in reversed(self.ontologies):
|
||||
|
||||
if not cache_only:
|
||||
# get values from sdp
|
||||
values = ontology.cache_manager.sdp.get(entry)
|
||||
if values is Removed:
|
||||
res.clear()
|
||||
|
||||
elif values is not NotFound:
|
||||
for k, v in values.items():
|
||||
if v is Removed:
|
||||
del res[k]
|
||||
else:
|
||||
res[k] = v
|
||||
|
||||
# override with the values from cache
|
||||
try:
|
||||
cache = ontology.cache_manager.get_cache(entry)
|
||||
|
||||
if cache.is_cleared():
|
||||
res.clear()
|
||||
|
||||
for k in cache:
|
||||
v = cache.alt_get(k) # Do not use get(), because of IncCache()
|
||||
if v is Removed:
|
||||
del res[k]
|
||||
else:
|
||||
res[k] = v
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return res
|
||||
|
||||
def put(self, cache_name, key, value):
|
||||
"""
|
||||
Add to a cache
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
return self.current_cache_manager().put(cache_name, key, value, self.ontologies[0].alt_sdp)
|
||||
|
||||
def delete(self, cache_name, key, value=None):
|
||||
"""
|
||||
Delete an entry
|
||||
:param cache_name:
|
||||
:param key:
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
return self.current_cache_manager().delete(cache_name, key, value, self.ontologies[0].alt_sdp)
|
||||
|
||||
def populate(self, cache_name, populate_function, get_key_function, reset_events=False, all_ontologies=False):
|
||||
"""
|
||||
Populate a specific cache with a bunch of items
|
||||
:param cache_name:
|
||||
:param populate_function: how to get the items
|
||||
:param get_key_function: how to get the key, out of an item
|
||||
:param reset_events: reset the to_add and to_remove events after the populate
|
||||
:param all_ontologies: populate all ontology layers
|
||||
:return:
|
||||
"""
|
||||
self.current_cache_manager().populate(cache_name, populate_function, get_key_function, reset_events)
|
||||
if all_ontologies:
|
||||
for ontology in self.ontologies[1:]:
|
||||
ontology.cache_manager.populate(cache_name, populate_function, get_key_function, reset_events)
|
||||
|
||||
def copy(self, cache_name):
|
||||
"""
|
||||
get a copy the content of the top ontology layer
|
||||
:param self:
|
||||
:param cache_name:
|
||||
:return:
|
||||
"""
|
||||
return self.current_cache_manager().caches[cache_name].cache.copy()
|
||||
|
||||
def commit(self, context):
|
||||
"""
|
||||
Persist all the caches into a physical persistence storage
|
||||
:param context:
|
||||
:return:
|
||||
"""
|
||||
return self.current_cache_manager().commit(context)
|
||||
|
||||
def clear(self, cache_name=None):
|
||||
return self.current_cache_manager().clear(cache_name)
|
||||
|
||||
def get_sdp(self, name=None):
|
||||
"""
|
||||
Return new instance of SheerkaDataProvider
|
||||
"""
|
||||
if name:
|
||||
return SheerkaDataProvider(self.root_folder, self.sheerka, name)
|
||||
else:
|
||||
return self.current_sdp()
|
||||
|
||||
def save_event(self, event):
|
||||
return self.current_sdp().save_event(event)
|
||||
|
||||
def save_result(self, execution_context, is_admin):
|
||||
return self.current_sdp().save_result(execution_context, is_admin)
|
||||
|
||||
def is_dirty(self):
|
||||
return self.current_cache_manager().is_dirty
|
||||
@@ -1,5 +1,6 @@
|
||||
import sys
|
||||
import time
|
||||
from operator import attrgetter
|
||||
from os import path
|
||||
|
||||
from core.builtin_concepts_ids import BuiltinConcepts, BuiltinContainers
|
||||
@@ -27,13 +28,16 @@ class SheerkaAdmin(BaseService):
|
||||
self.sheerka.bind_service_method(self.extended_isinstance, False)
|
||||
self.sheerka.bind_service_method(self.is_container, False)
|
||||
self.sheerka.bind_service_method(self.format_rules, False)
|
||||
self.sheerka.bind_service_method(self.admin_push_ontology, True, as_name="push_ontology")
|
||||
self.sheerka.bind_service_method(self.admin_pop_ontology, True, as_name="pop_ontology")
|
||||
self.sheerka.bind_service_method(self.ontologies, False)
|
||||
|
||||
def caches_names(self):
|
||||
"""
|
||||
Returns the name of all the caches
|
||||
:return:
|
||||
"""
|
||||
return list(self.sheerka.cache_manager.caches.keys())
|
||||
return list(self.sheerka.om.current_cache_manager().caches.keys())
|
||||
|
||||
def cache(self, name, *keys):
|
||||
"""
|
||||
@@ -42,13 +46,13 @@ class SheerkaAdmin(BaseService):
|
||||
:param keys: look for a specific key. May ask to sdp if the key is not in cache
|
||||
:return:
|
||||
"""
|
||||
if name not in self.sheerka.cache_manager.caches:
|
||||
if name not in self.sheerka.om.current_cache_manager().caches:
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"cache": name})
|
||||
|
||||
if not keys:
|
||||
return self.sheerka.cache_manager.caches[name].cache.copy()
|
||||
return self.sheerka.om.current_cache_manager().caches[name].cache.copy()
|
||||
|
||||
return {key: self.sheerka.cache_manager.get(name, key) for key in keys}
|
||||
return {key: self.sheerka.om.get(name, key) for key in keys}
|
||||
|
||||
def restore(self, concept_file=CONCEPTS_FILE_TO_USE):
|
||||
"""
|
||||
@@ -119,7 +123,8 @@ class SheerkaAdmin(BaseService):
|
||||
raise e
|
||||
|
||||
def concepts(self):
|
||||
return self.sheerka.new(BuiltinConcepts.TO_LIST, body=self.sheerka.sdp.list(self.sheerka.CONCEPTS_BY_ID_ENTRY))
|
||||
concepts = sorted(self.sheerka.om.list(self.sheerka.CONCEPTS_BY_ID_ENTRY), key=lambda item: int(item.id))
|
||||
return self.sheerka.new(BuiltinConcepts.TO_LIST, body=concepts)
|
||||
|
||||
def desc(self, *concepts):
|
||||
ensure_concept(*concepts)
|
||||
@@ -146,7 +151,6 @@ class SheerkaAdmin(BaseService):
|
||||
def format_rules(self):
|
||||
return self.sheerka.new(BuiltinConcepts.TO_LIST, items=self.sheerka.get_format_rules())
|
||||
|
||||
|
||||
def extended_isinstance(self, a, b):
|
||||
"""
|
||||
switch between sheerka.isinstance and builtin.isinstance
|
||||
@@ -171,3 +175,13 @@ class SheerkaAdmin(BaseService):
|
||||
return False
|
||||
|
||||
return obj.key in BuiltinContainers
|
||||
|
||||
def admin_push_ontology(self, context, name):
|
||||
return self.sheerka.push_ontology(context, name, False)
|
||||
|
||||
def admin_pop_ontology(self):
|
||||
return self.sheerka.pop_ontology()
|
||||
|
||||
def ontologies(self):
|
||||
ontologies = self.sheerka.om.ontologies_names
|
||||
return self.sheerka.new(BuiltinConcepts.TO_LIST, body=ontologies)
|
||||
|
||||
@@ -3,8 +3,9 @@ from dataclasses import dataclass
|
||||
from cache.Cache import Cache
|
||||
from cache.ListCache import ListCache
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.global_symbols import EVENT_CONCEPT_PRECEDENCE_MODIFIED, EVENT_RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT, \
|
||||
CONCEPT_COMPARISON_CONTEXT
|
||||
from core.global_symbols import EVENT_CONCEPT_PRECEDENCE_MODIFIED, EVENT_RULE_PRECEDENCE_MODIFIED, \
|
||||
RULE_COMPARISON_CONTEXT, \
|
||||
CONCEPT_COMPARISON_CONTEXT, NotFound
|
||||
from core.builtin_helpers import ensure_concept_or_rule
|
||||
from core.concept import Concept
|
||||
from core.sheerka.services.SheerkaRuleManager import SheerkaRuleManager
|
||||
@@ -136,8 +137,8 @@ class SheerkaComparisonManager(BaseService):
|
||||
|
||||
def _add_comparison(self, context, comparison_obj):
|
||||
key = self._compute_key(comparison_obj.property, comparison_obj.context)
|
||||
previous = self.sheerka.cache_manager.get(self.COMPARISON_ENTRY, key)
|
||||
new = previous.copy() if previous else []
|
||||
previous = self.sheerka.om.get(self.COMPARISON_ENTRY, key)
|
||||
new = previous.copy() if isinstance(previous, list) else []
|
||||
|
||||
for co in new:
|
||||
if co.property == comparison_obj.property and \
|
||||
@@ -176,10 +177,10 @@ class SheerkaComparisonManager(BaseService):
|
||||
chicken_an_egg = self.sheerka.new(BuiltinConcepts.CHICKEN_AND_EGG, body=concepts_in_cycle)
|
||||
return self.sheerka.ret(self.NAME, False, chicken_an_egg)
|
||||
|
||||
self.sheerka.cache_manager.put(self.COMPARISON_ENTRY, key, comparison_obj)
|
||||
self.sheerka.cache_manager.put(self.RESOLVED_COMPARISON_ENTRY, key, self._compute_weights(new,
|
||||
lesser_objs_ids,
|
||||
greatest_objs_ids))
|
||||
self.sheerka.om.put(self.COMPARISON_ENTRY, key, comparison_obj)
|
||||
self.sheerka.om.put(self.RESOLVED_COMPARISON_ENTRY, key, self._compute_weights(new,
|
||||
lesser_objs_ids,
|
||||
greatest_objs_ids))
|
||||
|
||||
if comparison_obj.property == BuiltinConcepts.PRECEDENCE:
|
||||
if comparison_obj.context == CONCEPT_COMPARISON_CONTEXT:
|
||||
@@ -190,11 +191,11 @@ class SheerkaComparisonManager(BaseService):
|
||||
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def initialize(self):
|
||||
cache = ListCache(default=lambda k: self.sheerka.sdp.get(self.COMPARISON_ENTRY, k))
|
||||
self.sheerka.cache_manager.register_cache(self.COMPARISON_ENTRY, cache, True, True)
|
||||
cache = ListCache().auto_configure(self.COMPARISON_ENTRY)
|
||||
self.sheerka.om.register_cache(self.COMPARISON_ENTRY, cache, True, True)
|
||||
|
||||
cache = Cache()
|
||||
self.sheerka.cache_manager.register_cache(self.RESOLVED_COMPARISON_ENTRY, cache, persist=False)
|
||||
cache = Cache().auto_configure(self.RESOLVED_COMPARISON_ENTRY)
|
||||
self.sheerka.om.register_cache(self.RESOLVED_COMPARISON_ENTRY, cache, persist=False)
|
||||
|
||||
self.sheerka.bind_service_method(self.set_is_greater_than, True)
|
||||
self.sheerka.bind_service_method(self.set_is_less_than, True)
|
||||
@@ -325,19 +326,26 @@ class SheerkaComparisonManager(BaseService):
|
||||
return self._get_partition(weighted_concept)
|
||||
|
||||
def get_concepts_weights(self, prop_name, comparison_context="#"):
|
||||
weighted_concepts = self.sheerka.cache_manager.get(
|
||||
self.RESOLVED_COMPARISON_ENTRY,
|
||||
self._compute_key(prop_name, comparison_context))
|
||||
# KSI 2021-01-10 This implementation seems to be too complicated
|
||||
# Chances are that there is a better way to implement this.
|
||||
# Note that I don't want to use a DictionaryCache for the RESOLVED_COMPARISON_ENTRY
|
||||
# as I don't need to have all the keys in memory at the same time
|
||||
# Anyway...
|
||||
|
||||
if weighted_concepts is None:
|
||||
key = self._compute_key(prop_name, comparison_context)
|
||||
entries = self.sheerka.cache_manager.get(self.COMPARISON_ENTRY, key)
|
||||
# If the weighted_concepts is in the TOP LAYER cache, we can use it
|
||||
key_to_use = self._compute_key(prop_name, comparison_context)
|
||||
if self.sheerka.om.current_cache_manager().has(self.RESOLVED_COMPARISON_ENTRY, key_to_use):
|
||||
weighted_concepts = self.sheerka.om.get(self.RESOLVED_COMPARISON_ENTRY, key_to_use)
|
||||
|
||||
else:
|
||||
# otherwise, either it's not computed yet or it does not include the info of the current layer
|
||||
# In both case, it is safer to recompute the weights
|
||||
entries = self.sheerka.om.list_by_key(self.COMPARISON_ENTRY, key_to_use)
|
||||
if entries is None:
|
||||
return {}
|
||||
weighted_concepts = {} # Why not put it in cache ???
|
||||
else:
|
||||
weighted_concepts = self._compute_weights(entries)
|
||||
self.sheerka.cache_manager.put(self.RESOLVED_COMPARISON_ENTRY, key, weighted_concepts)
|
||||
self.sheerka.om.put(self.RESOLVED_COMPARISON_ENTRY, key_to_use, weighted_concepts)
|
||||
|
||||
return weighted_concepts
|
||||
|
||||
|
||||
@@ -8,10 +8,9 @@ from cache.SetCache import SetCache
|
||||
from core.builtin_concepts import ErrorConcept
|
||||
from core.builtin_concepts_ids import BuiltinConcepts, AllBuiltinConcepts, BuiltinUnique
|
||||
from core.builtin_helpers import ensure_concept
|
||||
from core.concept import Concept, DEFINITION_TYPE_DEF, DEFINITION_TYPE_BNF, freeze_concept_attrs, NotInit, \
|
||||
ConceptMetadata
|
||||
from core.concept import Concept, DEFINITION_TYPE_DEF, DEFINITION_TYPE_BNF, freeze_concept_attrs, ConceptMetadata
|
||||
from core.error import ErrorObj
|
||||
from core.global_symbols import EVENT_CONCEPT_CREATED
|
||||
from core.global_symbols import EVENT_CONCEPT_CREATED, NotInit, NotFound
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.tokenizer import Tokenizer, TokenKind
|
||||
from sdp.sheerkaDataProvider import SheerkaDataProviderDuplicateKeyError
|
||||
@@ -101,34 +100,28 @@ class SheerkaConceptManager(BaseService):
|
||||
self.sheerka.bind_service_method(self.get_by_name, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.get_by_hash, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.get_by_id, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.not_is_variable, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.is_not_a_variable, False, visible=False)
|
||||
|
||||
def params(cache_name):
|
||||
return {
|
||||
'default': lambda k: self.sheerka.sdp.get(cache_name, k),
|
||||
'extend_exists': lambda k: self.sheerka.sdp.exists(cache_name, k)
|
||||
}
|
||||
register_concept_cache = self.sheerka.om.register_concept_cache
|
||||
|
||||
register_concept_cache = self.sheerka.cache_manager.register_concept_cache
|
||||
|
||||
cache = Cache(**params(self.CONCEPTS_BY_ID_ENTRY))
|
||||
cache = Cache().auto_configure(self.CONCEPTS_BY_ID_ENTRY)
|
||||
register_concept_cache(self.CONCEPTS_BY_ID_ENTRY, cache, lambda c: c.id, True)
|
||||
|
||||
cache = ListIfNeededCache(**params(self.CONCEPTS_BY_KEY_ENTRY))
|
||||
cache = ListIfNeededCache().auto_configure(self.CONCEPTS_BY_KEY_ENTRY)
|
||||
register_concept_cache(self.CONCEPTS_BY_KEY_ENTRY, cache, lambda c: c.key, True)
|
||||
|
||||
cache = ListIfNeededCache(**params(self.CONCEPTS_BY_NAME_ENTRY))
|
||||
cache = ListIfNeededCache().auto_configure(self.CONCEPTS_BY_NAME_ENTRY)
|
||||
register_concept_cache(self.CONCEPTS_BY_NAME_ENTRY, cache, lambda c: c.name, True)
|
||||
|
||||
cache = ListIfNeededCache(**params(self.CONCEPTS_BY_HASH_ENTRY))
|
||||
cache = ListIfNeededCache().auto_configure(self.CONCEPTS_BY_HASH_ENTRY)
|
||||
register_concept_cache(self.CONCEPTS_BY_HASH_ENTRY, cache, lambda c: c.get_definition_hash(), True)
|
||||
|
||||
cache = SetCache(default=lambda k: self.sheerka.sdp.get(self.CONCEPTS_REFERENCES_ENTRY, k))
|
||||
self.sheerka.cache_manager.register_cache(self.CONCEPTS_REFERENCES_ENTRY, cache)
|
||||
cache = SetCache().auto_configure(self.CONCEPTS_REFERENCES_ENTRY)
|
||||
self.sheerka.om.register_cache(self.CONCEPTS_REFERENCES_ENTRY, cache)
|
||||
|
||||
def initialize_deferred(self, context, is_first_time):
|
||||
if is_first_time:
|
||||
self.sheerka.cache_manager.put(self.sheerka.OBJECTS_IDS_ENTRY, self.USER_CONCEPTS_IDS, 1000)
|
||||
self.sheerka.om.put(self.sheerka.OBJECTS_IDS_ENTRY, self.USER_CONCEPTS_IDS, 1000)
|
||||
|
||||
def initialize_builtin_concepts(self):
|
||||
"""
|
||||
@@ -146,11 +139,11 @@ class SheerkaConceptManager(BaseService):
|
||||
concept.get_metadata().is_unique = True
|
||||
concept.get_metadata().is_evaluated = True
|
||||
|
||||
from_db = self.sheerka.cache_manager.get(self.CONCEPTS_BY_KEY_ENTRY, concept.get_metadata().key)
|
||||
if from_db is None:
|
||||
from_db = self.sheerka.om.get(self.CONCEPTS_BY_KEY_ENTRY, concept.get_metadata().key)
|
||||
if from_db is NotFound:
|
||||
# self.init_log.debug(f"'{concept.name}' concept is not found in db. Adding.")
|
||||
self.set_id_if_needed(concept, True)
|
||||
self.sheerka.cache_manager.add_concept(concept)
|
||||
self.sheerka.om.add_concept(concept)
|
||||
else:
|
||||
# self.init_log.debug(f"Found concept '{from_db}' in db. Updating.")
|
||||
concept.update_from(from_db)
|
||||
@@ -173,9 +166,9 @@ class SheerkaConceptManager(BaseService):
|
||||
concept.init_key()
|
||||
init_bnf_ret_value = None
|
||||
|
||||
cache_manager = sheerka.cache_manager
|
||||
ontology = sheerka.om
|
||||
|
||||
if cache_manager.exists(self.CONCEPTS_BY_HASH_ENTRY, concept.get_definition_hash()):
|
||||
if ontology.exists(self.CONCEPTS_BY_HASH_ENTRY, concept.get_definition_hash()):
|
||||
error = SheerkaDataProviderDuplicateKeyError(self.CONCEPTS_BY_KEY_ENTRY + "." + concept.key, concept)
|
||||
return sheerka.ret(
|
||||
self.NAME,
|
||||
@@ -186,9 +179,6 @@ class SheerkaConceptManager(BaseService):
|
||||
# set id before saving in db
|
||||
sheerka.set_id_if_needed(concept, False)
|
||||
|
||||
# freeze attributes
|
||||
freeze_concept_attrs(concept)
|
||||
|
||||
# check if the bnf definition is correctly computed
|
||||
try:
|
||||
self.bnp.ensure_bnf(context, concept)
|
||||
@@ -196,7 +186,7 @@ class SheerkaConceptManager(BaseService):
|
||||
return sheerka.ret(self.NAME, False, ex.args[0])
|
||||
|
||||
# compute new concepts_by_first_keyword
|
||||
init_ret_value = self.bnp.get_concepts_by_first_token(context, [concept], True)
|
||||
init_ret_value = self.bnp.compute_concepts_by_first_token(context, [concept], True)
|
||||
if not init_ret_value.status:
|
||||
return sheerka.ret(self.NAME, False, ErrorConcept(init_ret_value.value))
|
||||
concepts_by_first_keyword = init_ret_value.body
|
||||
@@ -208,18 +198,20 @@ class SheerkaConceptManager(BaseService):
|
||||
resolved_concepts_by_first_keyword = init_ret_value.body
|
||||
|
||||
# if everything is fine
|
||||
freeze_concept_attrs(concept)
|
||||
concept.freeze_definition_hash()
|
||||
cache_manager.add_concept(concept)
|
||||
cache_manager.put(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, concepts_by_first_keyword)
|
||||
cache_manager.put(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, resolved_concepts_by_first_keyword)
|
||||
|
||||
ontology.add_concept(concept)
|
||||
ontology.put(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, concepts_by_first_keyword)
|
||||
ontology.put(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, resolved_concepts_by_first_keyword)
|
||||
|
||||
if concept.get_metadata().definition_type == DEFINITION_TYPE_DEF and concept.get_metadata().definition != concept.name:
|
||||
# allow search by definition when definition relevant
|
||||
cache_manager.put(self.sheerka.CONCEPTS_BY_NAME_ENTRY, concept.get_metadata().definition, concept)
|
||||
ontology.put(self.sheerka.CONCEPTS_BY_NAME_ENTRY, concept.get_metadata().definition, concept)
|
||||
|
||||
# update references
|
||||
for ref in self.compute_references(concept):
|
||||
cache_manager.put(self.CONCEPTS_REFERENCES_ENTRY, ref, concept.id)
|
||||
ontology.put(self.CONCEPTS_REFERENCES_ENTRY, ref, concept.id)
|
||||
|
||||
# TODO : this line seems to be useless
|
||||
# The grammar is never reset
|
||||
@@ -246,7 +238,7 @@ class SheerkaConceptManager(BaseService):
|
||||
|
||||
# to_add is a dictionary
|
||||
# to_add = {
|
||||
# 'meta' : {<key, value>} of metadata to add/update,
|
||||
# 'meta' : {<key, value>} of metadata to update,
|
||||
# 'props' : {<key, value>} of properties to add/update,
|
||||
# 'variables': {<key, value>} of variables to add/update,
|
||||
# }
|
||||
@@ -259,12 +251,12 @@ class SheerkaConceptManager(BaseService):
|
||||
# }
|
||||
#
|
||||
sheerka = self.sheerka
|
||||
cache_manager = self.sheerka.cache_manager
|
||||
cache_manager = self.sheerka.om
|
||||
|
||||
if not to_add and not to_remove:
|
||||
return sheerka.ret(self.NAME, False, sheerka.err(NoModificationFound(concept)))
|
||||
|
||||
if not sheerka.cache_manager.exists(self.CONCEPTS_BY_ID_ENTRY, concept.id):
|
||||
if not sheerka.om.exists(self.CONCEPTS_BY_ID_ENTRY, concept.id):
|
||||
return sheerka.ret(self.NAME, False, sheerka.new(BuiltinConcepts.UNKNOWN_CONCEPT, body=concept))
|
||||
|
||||
# modify the metadata. Almost all ConceptMetadata attributes except variables and props
|
||||
@@ -274,8 +266,6 @@ class SheerkaConceptManager(BaseService):
|
||||
if res is not None:
|
||||
return res
|
||||
|
||||
freeze_concept_attrs(new_concept)
|
||||
|
||||
# To update concept by first keyword
|
||||
# first remove the old references
|
||||
keywords = self.bnp.get_first_tokens(sheerka, concept) # keyword of the old concept
|
||||
@@ -289,7 +279,7 @@ class SheerkaConceptManager(BaseService):
|
||||
pass
|
||||
|
||||
# and then update
|
||||
init_ret_value = self.bnp.get_concepts_by_first_token(context, [new_concept], False, concepts_by_first_keyword)
|
||||
init_ret_value = self.bnp.compute_concepts_by_first_token(context, [new_concept], False, concepts_by_first_keyword)
|
||||
if not init_ret_value.status:
|
||||
return sheerka.ret(self.NAME, False, ErrorConcept(init_ret_value.value))
|
||||
concepts_by_first_keyword = init_ret_value.body
|
||||
@@ -316,6 +306,10 @@ class SheerkaConceptManager(BaseService):
|
||||
cache_manager.put(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False,
|
||||
resolved_concepts_by_first_keyword)
|
||||
|
||||
# everything seems to be fine. Update the list of attributes
|
||||
# Caution. Must be done AFTER update_concept()
|
||||
freeze_concept_attrs(new_concept)
|
||||
|
||||
# TODO : update when definition_type = DEFINITION_TYPE_DEF : have a look at update_references() below
|
||||
# TODO : Update concepts grammars : have a look at update_references() below
|
||||
if modify_source:
|
||||
@@ -332,13 +326,13 @@ class SheerkaConceptManager(BaseService):
|
||||
:return:
|
||||
"""
|
||||
sheerka = context.sheerka
|
||||
refs = self.sheerka.cache_manager.get(self.CONCEPTS_REFERENCES_ENTRY, concept.id)
|
||||
if refs:
|
||||
refs = self.sheerka.om.get(self.CONCEPTS_REFERENCES_ENTRY, concept.id)
|
||||
if refs is not NotFound:
|
||||
refs_instances = [sheerka.new_from_template(c, c.key) for c in [self.get_by_id(ref) for ref in refs]]
|
||||
return sheerka.ret(self.NAME, False, sheerka.err(ConceptIsReferenced(refs_instances)))
|
||||
|
||||
try:
|
||||
sheerka.cache_manager.remove_concept(concept)
|
||||
sheerka.om.remove_concept(concept)
|
||||
return sheerka.ret(self.NAME, True, sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
except ConceptNotFound as ex:
|
||||
return sheerka.ret(self.NAME, False, sheerka.err(ex))
|
||||
@@ -387,7 +381,7 @@ class SheerkaConceptManager(BaseService):
|
||||
return
|
||||
|
||||
entry_key = self.BUILTIN_CONCEPTS_IDS if is_builtin else self.USER_CONCEPTS_IDS
|
||||
obj.get_metadata().id = str(self.sheerka.cache_manager.get(self.sheerka.OBJECTS_IDS_ENTRY, entry_key))
|
||||
obj.get_metadata().id = str(self.sheerka.om.get(self.sheerka.OBJECTS_IDS_ENTRY, entry_key))
|
||||
# self.log.debug(f"Setting id '{obj.metadata.id}' to concept '{obj.metadata.name}'.")
|
||||
|
||||
def get_by_key(self, concept_key, concept_id=None):
|
||||
@@ -412,7 +406,7 @@ class SheerkaConceptManager(BaseService):
|
||||
"""
|
||||
if concept_id is None:
|
||||
return False
|
||||
return self.sheerka.cache_manager.has(self.CONCEPTS_BY_ID_ENTRY, concept_id)
|
||||
return self.sheerka.om.current_cache_manager().has(self.CONCEPTS_BY_ID_ENTRY, concept_id)
|
||||
|
||||
def has_key(self, concept_key):
|
||||
"""
|
||||
@@ -421,7 +415,7 @@ class SheerkaConceptManager(BaseService):
|
||||
:param concept_key:
|
||||
:return:
|
||||
"""
|
||||
return self.sheerka.cache_manager.has(self.CONCEPTS_BY_KEY_ENTRY, concept_key)
|
||||
return self.sheerka.om.current_cache_manager().has(self.CONCEPTS_BY_KEY_ENTRY, concept_key)
|
||||
|
||||
def has_name(self, concept_name):
|
||||
"""
|
||||
@@ -430,7 +424,7 @@ class SheerkaConceptManager(BaseService):
|
||||
:param concept_name:
|
||||
:return:
|
||||
"""
|
||||
return self.sheerka.cache_manager.has(self.CONCEPTS_BY_NAME_ENTRY, concept_name)
|
||||
return self.sheerka.om.current_cache_manager().has(self.CONCEPTS_BY_NAME_ENTRY, concept_name)
|
||||
|
||||
def has_hash(self, concept_hash):
|
||||
"""
|
||||
@@ -439,7 +433,7 @@ class SheerkaConceptManager(BaseService):
|
||||
:param concept_hash:
|
||||
:return:
|
||||
"""
|
||||
return self.sheerka.cache_manager.has(self.CONCEPTS_BY_HASH_ENTRY, concept_hash)
|
||||
return self.sheerka.om.current_cache_manager().has(self.CONCEPTS_BY_HASH_ENTRY, concept_hash)
|
||||
|
||||
def internal_get(self, index_name, key, cache_name, concept_id=None):
|
||||
"""
|
||||
@@ -454,8 +448,8 @@ class SheerkaConceptManager(BaseService):
|
||||
if key is None:
|
||||
return ErrorConcept(f"Concept '{key}' is undefined.")
|
||||
|
||||
concepts = self.sheerka.cache_manager.get(cache_name, key)
|
||||
if concepts:
|
||||
concepts = self.sheerka.om.get(cache_name, key)
|
||||
if concepts is not NotFound:
|
||||
if concept_id is None:
|
||||
return concepts
|
||||
|
||||
@@ -479,13 +473,13 @@ class SheerkaConceptManager(BaseService):
|
||||
:return:
|
||||
"""
|
||||
|
||||
refs = self.sheerka.cache_manager.get(self.CONCEPTS_REFERENCES_ENTRY, concept.id)
|
||||
if not refs:
|
||||
refs = self.sheerka.om.get(self.CONCEPTS_REFERENCES_ENTRY, concept.id)
|
||||
if refs is NotFound:
|
||||
return
|
||||
|
||||
for concept_id in refs:
|
||||
# remove the grammar entry so that it can be recreated
|
||||
self.sheerka.cache_manager.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_id)
|
||||
self.sheerka.om.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_id)
|
||||
|
||||
# reset the bnf definition if needed
|
||||
if modified_concept:
|
||||
@@ -527,13 +521,13 @@ class SheerkaConceptManager(BaseService):
|
||||
|
||||
return refs
|
||||
|
||||
def not_is_variable(self, name):
|
||||
def is_not_a_variable(self, name):
|
||||
"""
|
||||
Given a name tells if it refers to a variable name
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
return not self.sheerka.cache_manager.get(self.sheerka.CONCEPTS_BY_NAME_ENTRY, name)
|
||||
return self.sheerka.om.get(self.sheerka.CONCEPTS_BY_NAME_ENTRY, name) is NotFound
|
||||
|
||||
@staticmethod
|
||||
def _name_has_changed(to_add):
|
||||
|
||||
@@ -119,10 +119,9 @@ class SheerkaConceptsAlgebra(BaseService):
|
||||
if nb_props == 0:
|
||||
return res
|
||||
|
||||
concepts_service = self.sheerka.services[SheerkaConceptManager.NAME]
|
||||
concepts_manager = self.sheerka.services[SheerkaConceptManager.NAME]
|
||||
|
||||
all_concepts = self.sheerka.cache_manager.copy(concepts_service.CONCEPTS_BY_ID_ENTRY).values() \
|
||||
if self.sheerka.cache_manager.cache_only else self.sheerka.sdp.list(concepts_service.CONCEPTS_BY_ID_ENTRY)
|
||||
all_concepts = self.sheerka.om.list(concepts_manager.CONCEPTS_BY_ID_ENTRY)
|
||||
|
||||
for c in all_concepts:
|
||||
score = self._compute_score(c, concept, step_b=round(1 / nb_props, 2))
|
||||
|
||||
@@ -3,17 +3,17 @@ import re
|
||||
from dataclasses import dataclass
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, NotInit
|
||||
from core.builtin_helpers import evaluate_expression
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import NotInit, NotFound
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.utils import CONSOLE_COLORS_MAP as CCM, CONSOLE_COLUMNS, PRIMITIVES_TYPES
|
||||
from core.utils import evaluate_expression, as_bag
|
||||
from core.utils import as_bag
|
||||
from parsers.BaseNodeParser import SourceCodeWithConceptNode, UnrecognizedTokensNode
|
||||
|
||||
pp = pprint.PrettyPrinter(indent=2, width=CONSOLE_COLUMNS)
|
||||
|
||||
NotFound = "** Not Found **"
|
||||
|
||||
|
||||
class ConceptDebugObj:
|
||||
def __init__(self, concept, **kwargs):
|
||||
@@ -278,12 +278,21 @@ class SheerkaDebugManager(BaseService):
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.activated = False # is debug activated
|
||||
self.explicit = False # No need to activate context debug when debug mode is on
|
||||
self.context_cache = set() # debug for specific context
|
||||
self.variable_cache = set() # debug for specific variable
|
||||
self.explicit = False # No need to activate context debug when debug mode is on # to remove ?
|
||||
self.context_cache = set() # debug for specific context # to remove ?
|
||||
self.variable_cache = set() # debug for specific variable # to remove ?
|
||||
self.debug_vars_settings = []
|
||||
self.debug_rules_settings = []
|
||||
self.debug_concepts_settings = []
|
||||
self.state_vars = [
|
||||
"activated",
|
||||
"explicit", # to remove ?
|
||||
"context_cache", # to remove ?
|
||||
"variable_cache", # to remove ?
|
||||
"debug_vars_settings",
|
||||
"debug_rules_settings",
|
||||
"debug_concepts_settings"
|
||||
]
|
||||
|
||||
def initialize(self):
|
||||
# TO REMOVE ???
|
||||
@@ -307,13 +316,7 @@ class SheerkaDebugManager(BaseService):
|
||||
# self.sheerka.bind_service_method(self.get_debug_settings, False, as_name="debug_settings")
|
||||
|
||||
def initialize_deferred(self, context, is_first_time):
|
||||
self.restore_values("activated",
|
||||
"explicit",
|
||||
"context_cache",
|
||||
"variable_cache",
|
||||
"debug_vars_settings",
|
||||
"debug_rules_settings",
|
||||
"debug_concepts_settings")
|
||||
self.restore_state()
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
@@ -327,6 +330,12 @@ class SheerkaDebugManager(BaseService):
|
||||
self.debug_rules_settings.clear()
|
||||
self.debug_concepts_settings.clear()
|
||||
|
||||
def save_state(self, context):
|
||||
self.store_values(context, *self.state_vars)
|
||||
|
||||
def restore_state(self):
|
||||
self.restore_values(*self.state_vars)
|
||||
|
||||
def set_debug(self, context, value=True):
|
||||
self.activated = value
|
||||
self.sheerka.record_var(context, self.NAME, "activated", self.activated)
|
||||
@@ -728,6 +737,16 @@ class SheerkaDebugManager(BaseService):
|
||||
|
||||
@staticmethod
|
||||
def parse_debug_args(item_name, *args, **kwargs):
|
||||
"""
|
||||
Returns
|
||||
i : item to debug. It can be a Concept, Rule or a variable
|
||||
s : Service to debug (so far, it is SheerkaService)
|
||||
m : Method within the serice
|
||||
c_id : Context id
|
||||
c_children : True / False to allow debugging of context children
|
||||
d : Debug id
|
||||
e : enable / disabled
|
||||
"""
|
||||
service, method_name, context_id, context_children, item, debug_id, enabled = None, None, None, False, None, None, True
|
||||
if len(args) > 0:
|
||||
if args[0] is None or args[0] == "":
|
||||
|
||||
@@ -2,8 +2,9 @@ from dataclasses import dataclass
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.builtin_helpers import expect_one, only_successful, parse_unrecognized, evaluate, ensure_concept
|
||||
from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, NotInit, AllConceptParts, \
|
||||
from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved, AllConceptParts, \
|
||||
concept_part_value
|
||||
from core.global_symbols import NotInit
|
||||
from core.sheerka.services.SheerkaConceptManager import SheerkaConceptManager
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
@@ -605,7 +606,7 @@ class SheerkaEvaluateConcept(BaseService):
|
||||
# # update the cache for concepts with no variables
|
||||
# Cannot use cache. See the comment at the beginning of this method
|
||||
# if len(concept.get_metadata().variables) == 0:
|
||||
# self.sheerka.cache_manager.put(self.sheerka.CONCEPTS_BY_ID_ENTRY, concept.id, concept)
|
||||
# self.sheerka.om.put(self.sheerka.CONCEPTS_BY_ID_ENTRY, concept.id, concept)
|
||||
|
||||
if not concept.get_metadata().is_builtin:
|
||||
self.sheerka.register_object(sub_context, concept.name, concept)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from threading import RLock
|
||||
|
||||
from core.global_symbols import NotFound
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
|
||||
|
||||
@@ -19,6 +19,16 @@ class SheerkaEventManager(BaseService):
|
||||
self.sheerka.bind_service_method(self.subscribe, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.publish, True, visible=False)
|
||||
|
||||
def save_state(self, context):
|
||||
with self._lock:
|
||||
copy = self.subscribers.copy()
|
||||
self.sheerka.record_internal_var(context, self.NAME, "subscribers", copy)
|
||||
|
||||
def restore_state(self):
|
||||
with self._lock:
|
||||
if (from_cache := self.sheerka.load_internal_var(self.NAME, "subscribers")) is not NotFound:
|
||||
self.subscribers = from_cache
|
||||
|
||||
def subscribe(self, topic, callback):
|
||||
"""
|
||||
To subscribe to a topic, just give the callback to call
|
||||
@@ -51,9 +61,10 @@ class SheerkaEventManager(BaseService):
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def reset_topic(self, topic):
|
||||
def test_only_reset_topic(self, topic):
|
||||
"""
|
||||
Remove all subsccribers from a given topic
|
||||
Remove all subscribers from a given topic
|
||||
TO REMOVE once sheerka ontology is fully implemented
|
||||
:param topic:
|
||||
:return:
|
||||
"""
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import core.utils
|
||||
from cache.Cache import Cache
|
||||
from cache.FastCache import FastCache
|
||||
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
|
||||
from core.global_symbols import NotFound
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.tokenizer import Tokenizer, TokenKind, Token
|
||||
|
||||
@@ -166,7 +168,7 @@ class SheerkaExecute(BaseService):
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.pi_cache = Cache(default=lambda key: ParserInput(key), max_size=20)
|
||||
self.pi_cache = FastCache(default=lambda key: ParserInput(key), max_size=20)
|
||||
self.instantiated_evaluators = None
|
||||
self.evaluators_by_name = None
|
||||
|
||||
@@ -192,10 +194,12 @@ class SheerkaExecute(BaseService):
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.execute, True)
|
||||
|
||||
self.sheerka.cache_manager.register_cache(self.PARSERS_INPUTS_ENTRY, self.pi_cache, False)
|
||||
self.reset_registered_evaluators()
|
||||
self.reset_registered_parsers()
|
||||
|
||||
def reset_state(self):
|
||||
self.pi_cache.clear()
|
||||
|
||||
def reset_registered_evaluators(self):
|
||||
# instantiate evaluators, once for all, only keep when it's enabled
|
||||
self.instantiated_evaluators = [e_class() for e_class in self.sheerka.evaluators]
|
||||
@@ -340,7 +344,7 @@ class SheerkaExecute(BaseService):
|
||||
|
||||
if tokens is None or self.pi_cache.has(text):
|
||||
pi = self.pi_cache.get(text)
|
||||
if pi is None: # when CacheManager.cache_only is True
|
||||
if pi is NotFound: # when CacheManager.cache_only is True
|
||||
pi = ParserInput(text)
|
||||
self.pi_cache.put(text, pi)
|
||||
return pi
|
||||
|
||||
@@ -3,6 +3,7 @@ from operator import itemgetter
|
||||
from typing import Tuple, Dict, List
|
||||
|
||||
from cache.Cache import Cache
|
||||
from core.global_symbols import NotFound
|
||||
from core.sheerka.services.sheerka_service import BaseService, ServiceObj
|
||||
|
||||
|
||||
@@ -29,10 +30,10 @@ class SheerkaFunctionsParametersHistory(BaseService):
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.cache = Cache(max_size=1024, default=lambda k: self.sheerka.sdp.get(self.FUNCTIONS_PARAMETERS_ENTRY, k))
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.cache_manager.register_cache(self.FUNCTIONS_PARAMETERS_ENTRY, self.cache, True, True)
|
||||
cache = Cache(max_size=1024).auto_configure(self.FUNCTIONS_PARAMETERS_ENTRY)
|
||||
self.sheerka.om.register_cache(self.FUNCTIONS_PARAMETERS_ENTRY, cache, True, True)
|
||||
return self
|
||||
|
||||
def record_function_parameter(self, context, func_name: str, param_number: int, param_value: str):
|
||||
@@ -44,8 +45,11 @@ class SheerkaFunctionsParametersHistory(BaseService):
|
||||
:param param_value:
|
||||
:return:
|
||||
"""
|
||||
old = self.cache.get(func_name)
|
||||
if old is not None:
|
||||
old = self.sheerka.om.get(self.FUNCTIONS_PARAMETERS_ENTRY, func_name)
|
||||
if old is NotFound:
|
||||
obj = FunctionParametersObj(context.event.get_digest(), func_name, {param_number: [(param_value, 1)]})
|
||||
self.sheerka.om.put(self.FUNCTIONS_PARAMETERS_ENTRY, func_name, obj)
|
||||
else:
|
||||
if param_number in old.params:
|
||||
lst = old.params[param_number]
|
||||
for i, value in enumerate(lst): # value is a tuple (param_value, counter)
|
||||
@@ -56,10 +60,7 @@ class SheerkaFunctionsParametersHistory(BaseService):
|
||||
lst.append((param_value, 1))
|
||||
else:
|
||||
old.params[param_number] = [(param_value, 1)]
|
||||
self.cache.put(func_name, old)
|
||||
else:
|
||||
obj = FunctionParametersObj(context.event.get_digest(), func_name, {param_number: [(param_value, 1)]})
|
||||
self.cache.put(func_name, obj)
|
||||
self.sheerka.om.put(self.FUNCTIONS_PARAMETERS_ENTRY, func_name, old)
|
||||
|
||||
def get_function_parameters(self, func_name: str, param_number: int):
|
||||
"""
|
||||
@@ -68,8 +69,8 @@ class SheerkaFunctionsParametersHistory(BaseService):
|
||||
:param param_number:
|
||||
:return:
|
||||
"""
|
||||
values = self.cache.get(func_name)
|
||||
if values is None:
|
||||
values = self.sheerka.om.get(self.FUNCTIONS_PARAMETERS_ENTRY, func_name)
|
||||
if values is NotFound:
|
||||
return []
|
||||
|
||||
if param_number not in values.params:
|
||||
|
||||
@@ -67,10 +67,10 @@ class SheerkaHistoryManager(BaseService):
|
||||
:return:
|
||||
"""
|
||||
|
||||
events = list(self.sheerka.sdp.load_events(depth, start))
|
||||
events = list(self.sheerka.om.current_sdp().load_events(depth, start))
|
||||
for event in events:
|
||||
try:
|
||||
result = self.sheerka.sdp.load_result(event.get_digest())
|
||||
result = self.sheerka.om.current_sdp().load_result(event.get_digest())
|
||||
except (IOError, KeyError):
|
||||
result = None
|
||||
yield History(event, result)
|
||||
|
||||
+24
-25
@@ -4,21 +4,18 @@ from cache.SetCache import SetCache
|
||||
from core.ast_helpers import UnreferencedVariablesVisitor
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, ConceptParts, DEFINITION_TYPE_BNF
|
||||
from core.global_symbols import NotFound
|
||||
from core.sheerka.services.SheerkaConceptManager import SheerkaConceptManager
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
|
||||
GROUP_PREFIX = 'All_'
|
||||
|
||||
|
||||
class SheerkaSetsManager(BaseService):
|
||||
NAME = "SetsManager"
|
||||
CONCEPTS_GROUPS_ENTRY = "SetsManager:Concepts_Groups"
|
||||
CONCEPTS_IN_GROUPS_ENTRY = "SetsManager:Concepts_In_Groups" # cache for get_set_elements()
|
||||
class SheerkaIsAManager(BaseService):
|
||||
NAME = "IsAManager"
|
||||
CONCEPTS_GROUPS_ENTRY = "IsAManager:Concepts_Groups"
|
||||
CONCEPTS_IN_GROUPS_ENTRY = "IsAManager:Concepts_In_Groups" # cache for get_set_elements()
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.sets = SetCache(default=lambda k: self.sheerka.sdp.get(self.CONCEPTS_GROUPS_ENTRY, k))
|
||||
self.concepts_in_set = Cache()
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.set_isa, True)
|
||||
@@ -28,8 +25,10 @@ class SheerkaSetsManager(BaseService):
|
||||
self.sheerka.bind_service_method(self.isa, False)
|
||||
self.sheerka.bind_service_method(self.isaset, True) # concept is evaluated, need to change the code
|
||||
|
||||
self.sheerka.cache_manager.register_cache(self.CONCEPTS_GROUPS_ENTRY, self.sets)
|
||||
self.sheerka.cache_manager.register_cache(self.CONCEPTS_IN_GROUPS_ENTRY, self.concepts_in_set, persist=False)
|
||||
cache = SetCache().auto_configure(self.CONCEPTS_GROUPS_ENTRY)
|
||||
self.sheerka.om.register_cache(self.CONCEPTS_GROUPS_ENTRY, cache)
|
||||
cache = Cache().auto_configure(self.CONCEPTS_IN_GROUPS_ENTRY)
|
||||
self.sheerka.om.register_cache(self.CONCEPTS_IN_GROUPS_ENTRY, cache, persist=False)
|
||||
|
||||
def set_isa(self, context, concept, concept_set):
|
||||
"""
|
||||
@@ -43,8 +42,8 @@ class SheerkaSetsManager(BaseService):
|
||||
context.log(f"Setting concept {concept} is a {concept_set}", who=self.NAME)
|
||||
core.builtin_helpers.ensure_concept(concept, concept_set)
|
||||
|
||||
if BuiltinConcepts.ISA in concept.get_metadata().props and concept_set in concept.get_metadata().props[
|
||||
BuiltinConcepts.ISA]:
|
||||
if BuiltinConcepts.ISA in concept.get_metadata().props and \
|
||||
concept_set in concept.get_metadata().props[BuiltinConcepts.ISA]:
|
||||
return self.sheerka.ret(
|
||||
self.NAME,
|
||||
False,
|
||||
@@ -75,23 +74,23 @@ class SheerkaSetsManager(BaseService):
|
||||
context.log(f"Adding concept {concept} to set {concept_set}", who=self.NAME)
|
||||
core.builtin_helpers.ensure_concept(concept, concept_set)
|
||||
|
||||
set_elements = self.sets.get(concept_set.id)
|
||||
if set_elements and concept.id in set_elements:
|
||||
set_elements = self.sheerka.om.get(self.CONCEPTS_GROUPS_ENTRY, concept_set.id)
|
||||
if set_elements is not NotFound and concept.id in set_elements:
|
||||
return self.sheerka.ret(
|
||||
self.NAME,
|
||||
False,
|
||||
self.sheerka.new(BuiltinConcepts.CONCEPT_ALREADY_IN_SET, body=concept, concept_set=concept_set))
|
||||
|
||||
self.sets.put(concept_set.id, concept.id)
|
||||
self.sheerka.om.put(self.CONCEPTS_GROUPS_ENTRY, concept_set.id, concept.id)
|
||||
|
||||
# invalidate the cache of what contains concept_set
|
||||
self.concepts_in_set.delete(concept_set.id)
|
||||
self.sheerka.om.delete(self.CONCEPTS_IN_GROUPS_ENTRY, concept_set.id)
|
||||
|
||||
# update concept_set references
|
||||
self.sheerka.services[SheerkaConceptManager.NAME].update_references(context, concept_set)
|
||||
|
||||
# remove the grammar entry so that it can be recreated
|
||||
self.sheerka.cache_manager.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_set.id)
|
||||
self.sheerka.om.delete(self.sheerka.CONCEPTS_GRAMMARS_ENTRY, concept_set.id)
|
||||
|
||||
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
@@ -116,7 +115,7 @@ class SheerkaSetsManager(BaseService):
|
||||
concept_set=concept_set)
|
||||
else:
|
||||
body = self.sheerka.new(BuiltinConcepts.SUCCESS)
|
||||
self.concepts_in_set.delete(concept_set.id)
|
||||
self.sheerka.om.delete(self.CONCEPTS_IN_GROUPS_ENTRY, concept_set.id)
|
||||
|
||||
return self.sheerka.ret(self.NAME, len(already_in_set) != len(concepts), body)
|
||||
|
||||
@@ -136,7 +135,7 @@ class SheerkaSetsManager(BaseService):
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_A_SET, body=concept)
|
||||
|
||||
# first, try to see if sub_concept has it's own group entry
|
||||
ids = self.sets.get(sub_concept.id)
|
||||
ids = self.sheerka.om.get(self.CONCEPTS_GROUPS_ENTRY, sub_concept.id)
|
||||
concepts = self._get_concepts(context, ids, True)
|
||||
|
||||
# aggregate with en entries from its body
|
||||
@@ -166,13 +165,13 @@ class SheerkaSetsManager(BaseService):
|
||||
return concepts
|
||||
|
||||
# already in cache ?
|
||||
if res := self.concepts_in_set.get(concept.id):
|
||||
if (res := self.sheerka.om.get(self.CONCEPTS_IN_GROUPS_ENTRY, concept.id)) is not NotFound:
|
||||
return res
|
||||
|
||||
res = _get_set_elements(concept)
|
||||
|
||||
# put in cache
|
||||
self.concepts_in_set.put(concept.id, res)
|
||||
self.sheerka.om.put(self.CONCEPTS_IN_GROUPS_ENTRY, concept.id, res)
|
||||
return res
|
||||
|
||||
def isinset(self, a, b):
|
||||
@@ -190,8 +189,8 @@ class SheerkaSetsManager(BaseService):
|
||||
if not (a.id and b.id):
|
||||
return False
|
||||
|
||||
group_elements = self.sets.get(b.id)
|
||||
return group_elements and a.id in group_elements
|
||||
group_elements = self.sheerka.om.get(self.CONCEPTS_GROUPS_ENTRY, b.id)
|
||||
return group_elements is not NotFound and a.id in group_elements
|
||||
|
||||
def isa(self, a, b):
|
||||
|
||||
@@ -226,7 +225,7 @@ class SheerkaSetsManager(BaseService):
|
||||
|
||||
# check if it has a group
|
||||
# TODO: use cache instead of directly requesting sdp
|
||||
if self.sets.get(concept.id):
|
||||
if self.sheerka.om.get(self.CONCEPTS_GROUPS_ENTRY, concept.id) is not NotFound:
|
||||
return True
|
||||
|
||||
# it may be a concept that references a set
|
||||
@@ -267,7 +266,7 @@ for x in xx__concepts__xx:
|
||||
:return:
|
||||
"""
|
||||
|
||||
if not ids:
|
||||
if ids in (None, NotFound):
|
||||
return []
|
||||
|
||||
if not evaluate:
|
||||
@@ -4,7 +4,7 @@ from cache.FastCache import FastCache
|
||||
from cache.ListIfNeededCache import ListIfNeededCache
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import EVENT_CONTEXT_DISPOSED
|
||||
from core.global_symbols import EVENT_CONTEXT_DISPOSED, NotFound
|
||||
from core.sheerka.services.sheerka_service import BaseService, ServiceObj
|
||||
|
||||
|
||||
@@ -17,13 +17,11 @@ class SheerkaMemory(BaseService):
|
||||
NAME = "Memory"
|
||||
GLOBAL = "global"
|
||||
|
||||
SHORT_TERM_OBJECTS_ENTRY = "Memory:ShortTermMemoryObjects"
|
||||
OBJECTS_ENTRY = "Memory:Objects"
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.short_term_objects = FastCache()
|
||||
self.memory_objects = ListIfNeededCache(default=lambda k: self.sheerka.sdp.get(self.OBJECTS_ENTRY, k))
|
||||
self.registration = {}
|
||||
|
||||
def initialize(self):
|
||||
@@ -35,15 +33,20 @@ class SheerkaMemory(BaseService):
|
||||
self.sheerka.bind_service_method(self.get_from_memory, False)
|
||||
self.sheerka.bind_service_method(self.register_object, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.unregister_object, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.add_registered_objects, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.commit_registered_objects, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.memory, False)
|
||||
self.sheerka.bind_service_method(self.mem, False)
|
||||
|
||||
self.sheerka.cache_manager.register_cache(self.OBJECTS_ENTRY, self.memory_objects, persist=True, use_ref=True)
|
||||
cache = ListIfNeededCache().auto_configure(self.OBJECTS_ENTRY)
|
||||
self.sheerka.om.register_cache(self.OBJECTS_ENTRY, cache, persist=True, use_ref=True)
|
||||
|
||||
def reset(self):
|
||||
self.short_term_objects.clear()
|
||||
self.memory_objects.clear()
|
||||
self.sheerka.om.clear(self.OBJECTS_ENTRY)
|
||||
|
||||
def reset_state(self):
|
||||
self.short_term_objects.clear()
|
||||
self.registration.clear()
|
||||
|
||||
def initialize_deferred(self, context, is_first_time):
|
||||
self.sheerka.subscribe(EVENT_CONTEXT_DISPOSED, self.remove_context)
|
||||
@@ -55,7 +58,7 @@ class SheerkaMemory(BaseService):
|
||||
return self.short_term_objects.cache[id_to_use][key]
|
||||
except KeyError:
|
||||
if context is None:
|
||||
return None
|
||||
return NotFound
|
||||
|
||||
context = context.get_parent()
|
||||
|
||||
@@ -92,12 +95,12 @@ class SheerkaMemory(BaseService):
|
||||
:param concept:
|
||||
:return:
|
||||
"""
|
||||
self.memory_objects.put(key, MemoryObject(context.event.get_digest(), concept))
|
||||
self.sheerka.om.put(SheerkaMemory.OBJECTS_ENTRY, key, MemoryObject(context.event.get_digest(), concept))
|
||||
|
||||
def get_from_memory(self, context, key):
|
||||
""""
|
||||
"""
|
||||
return self.memory_objects.get(key)
|
||||
return self.sheerka.om.get(SheerkaMemory.OBJECTS_ENTRY, key)
|
||||
|
||||
def register_object(self, context, key, concept):
|
||||
"""
|
||||
@@ -126,7 +129,7 @@ class SheerkaMemory(BaseService):
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def add_registered_objects(self, context):
|
||||
def commit_registered_objects(self, context):
|
||||
"""
|
||||
Adds all registered memory_objects
|
||||
:param context:
|
||||
@@ -147,7 +150,7 @@ class SheerkaMemory(BaseService):
|
||||
name_to_use = name.name if isinstance(name, Concept) else name
|
||||
self.unregister_object(context, name_to_use)
|
||||
obj = self.get_from_memory(context, name_to_use)
|
||||
if obj is None:
|
||||
if obj is NotFound:
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body={"#name": name})
|
||||
|
||||
if isinstance(obj, list):
|
||||
@@ -156,8 +159,7 @@ class SheerkaMemory(BaseService):
|
||||
return obj.obj
|
||||
|
||||
res = {}
|
||||
for k in self.memory_objects:
|
||||
obj = self.memory_objects.get(k)
|
||||
for k, obj in self.sheerka.om.get_all(SheerkaMemory.OBJECTS_ENTRY).items():
|
||||
if isinstance(obj, list):
|
||||
obj = obj[-1]
|
||||
res[k] = obj.obj
|
||||
@@ -165,5 +167,5 @@ class SheerkaMemory(BaseService):
|
||||
return res
|
||||
|
||||
def mem(self):
|
||||
keys = sorted([k for k in self.memory_objects])
|
||||
keys = sorted([k for k in self.sheerka.om.list(SheerkaMemory.OBJECTS_ENTRY)])
|
||||
return {"keys": keys, "len": len(keys)}
|
||||
|
||||
@@ -10,27 +10,10 @@ class SheerkaQuestion(BaseService):
|
||||
super().__init__(sheerka)
|
||||
|
||||
def initialize(self):
|
||||
# self.sheerka.bind_service_method(self.question, False)
|
||||
self.sheerka.bind_service_method(self.is_question, False)
|
||||
|
||||
# def question(self, context, q):
|
||||
# """
|
||||
# Evaluate q in the context in a question
|
||||
# :param context:
|
||||
# :param q:
|
||||
# :return:
|
||||
# """
|
||||
#
|
||||
# if isinstance(q, Concept):
|
||||
# with context.push(BuiltinConcepts.EVALUATE_CONCEPT, q, desc=f"Evaluating question '{q}'") as sub_context:
|
||||
# sub_context.global_hints.add(BuiltinConcepts.EVAL_QUESTION_REQUESTED)
|
||||
# sub_context.global_hints.add(BuiltinConcepts.EVAL_UNTIL_SUCCESS_REQUESTED)
|
||||
#
|
||||
# evaluated = self.sheerka.evaluate_concept(sub_context, q)
|
||||
#
|
||||
# return evaluated
|
||||
|
||||
def is_question(self, context):
|
||||
@staticmethod
|
||||
def is_question(context):
|
||||
"""
|
||||
Returns True if a question is asked
|
||||
:return:
|
||||
|
||||
@@ -2,7 +2,7 @@ import ast
|
||||
|
||||
from cache.Cache import Cache
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.global_symbols import EVENT_USER_INPUT_EVALUATED, EVENT_CONCEPT_CREATED
|
||||
from core.global_symbols import EVENT_USER_INPUT_EVALUATED, EVENT_CONCEPT_CREATED, NotFound
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.utils import CONSOLE_COLORS_MAP as CCM
|
||||
from core.utils import as_bag
|
||||
@@ -13,6 +13,9 @@ MAX_EXECUTION_HISTORY = 100
|
||||
class SheerkaResultConcept(BaseService):
|
||||
NAME = "Result"
|
||||
|
||||
# SheerkaResultConcept seems to be a concept that must not support multiple ontology layers
|
||||
# We must have always access to everything that was done, whatever the ontology
|
||||
|
||||
def __init__(self, sheerka, page_size=30):
|
||||
super().__init__(sheerka)
|
||||
self.page_size = page_size
|
||||
@@ -20,6 +23,7 @@ class SheerkaResultConcept(BaseService):
|
||||
self.last_execution = None
|
||||
self.last_created_concept = None
|
||||
self.last_created_concept_id = None
|
||||
self.state_vars = ["last_created_concept_id"]
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.get_results_by_digest, True) # digest is recorded
|
||||
@@ -27,20 +31,26 @@ class SheerkaResultConcept(BaseService):
|
||||
self.sheerka.bind_service_method(self.get_last_results, True) # digest is recorded
|
||||
self.sheerka.bind_service_method(self.get_results, False)
|
||||
self.sheerka.bind_service_method(self.get_execution_item, False)
|
||||
self.sheerka.bind_service_method(self.get_last_ret, False, as_name="last_ret")
|
||||
self.sheerka.bind_service_method(self.get_last_return_value, False, as_name="last_ret")
|
||||
self.sheerka.bind_service_method(self.get_last_created_concept, False, as_name="last_created_concept")
|
||||
|
||||
def initialize_deferred(self, context, is_first_time):
|
||||
self.restore_values("last_created_concept_id")
|
||||
self.restore_values(*self.state_vars)
|
||||
self.sheerka.subscribe(EVENT_USER_INPUT_EVALUATED, self.user_input_evaluated)
|
||||
self.sheerka.subscribe(EVENT_CONCEPT_CREATED, self.new_concept_created)
|
||||
|
||||
def reset(self):
|
||||
def test_only_reset(self):
|
||||
self.executions_contexts_cache.clear()
|
||||
self.last_execution = None
|
||||
self.last_created_concept = None
|
||||
self.last_created_concept_id = None
|
||||
|
||||
def save_state(self, context):
|
||||
self.store_values(context, *self.state_vars)
|
||||
|
||||
def restore_state(self):
|
||||
self.restore_values(*self.state_vars)
|
||||
|
||||
@staticmethod
|
||||
def get_predicate(**kwargs):
|
||||
if len(kwargs) == 0:
|
||||
@@ -81,7 +91,7 @@ class SheerkaResultConcept(BaseService):
|
||||
:param record_digest:
|
||||
:return:
|
||||
"""
|
||||
if digest is None:
|
||||
if digest is NotFound:
|
||||
return None
|
||||
|
||||
if filter is not None:
|
||||
@@ -92,8 +102,8 @@ class SheerkaResultConcept(BaseService):
|
||||
result = self.executions_contexts_cache.get(digest)
|
||||
event = result.event
|
||||
else:
|
||||
result = self.sheerka.sdp.load_result(digest)
|
||||
event = self.sheerka.sdp.load_event(digest) # there is no real need for a cache of the events
|
||||
result = self.sheerka.om.current_sdp().load_result(digest)
|
||||
event = self.sheerka.om.current_sdp().load_event(digest) # really needed ?
|
||||
|
||||
if record_digest:
|
||||
context.log(f"Recording digest '{digest}'")
|
||||
@@ -141,7 +151,7 @@ class SheerkaResultConcept(BaseService):
|
||||
start = len(self.executions_contexts_cache)
|
||||
consumed = 0
|
||||
while True:
|
||||
for event in self.sheerka.sdp.load_events(self.page_size, start):
|
||||
for event in self.sheerka.om.current_sdp().load_events(self.page_size, start):
|
||||
consumed += 1
|
||||
if event.message.startswith(command):
|
||||
return self.get_results_by_digest(context, event.get_digest(), filter, record_digest, **kwargs)
|
||||
@@ -200,14 +210,14 @@ class SheerkaResultConcept(BaseService):
|
||||
:return:
|
||||
"""
|
||||
digest = self.sheerka.load_var(self.NAME, "digest")
|
||||
if digest is None:
|
||||
if digest is NotFound:
|
||||
return self.sheerka.new(BuiltinConcepts.NOT_FOUND, body="no digest")
|
||||
|
||||
try:
|
||||
if digest in self.executions_contexts_cache:
|
||||
result = self.executions_contexts_cache.get(digest)
|
||||
else:
|
||||
result = self.sheerka.sdp.load_result(digest)
|
||||
result = self.sheerka.om.current_sdp().load_result(digest)
|
||||
items = list(self.as_list(result, self.get_predicate(id=item_id)))
|
||||
|
||||
if len(items) == 0:
|
||||
@@ -227,7 +237,7 @@ class SheerkaResultConcept(BaseService):
|
||||
"""
|
||||
if self.sheerka.save_execution_context:
|
||||
try:
|
||||
self.sheerka.sdp.save_result(execution_context)
|
||||
self.sheerka.om.current_sdp().save_result(execution_context)
|
||||
except Exception as ex:
|
||||
print(f"{CCM['red']}Failed to save execution context. Reason: {ex}{CCM['reset']}")
|
||||
pass
|
||||
@@ -236,7 +246,7 @@ class SheerkaResultConcept(BaseService):
|
||||
self.executions_contexts_cache.put(execution_context.event.get_digest(), execution_context)
|
||||
self.last_execution = execution_context
|
||||
|
||||
def get_last_ret(self, context):
|
||||
def get_last_return_value(self, context):
|
||||
"""
|
||||
Return the last return value(s)
|
||||
:return:
|
||||
@@ -248,7 +258,7 @@ class SheerkaResultConcept(BaseService):
|
||||
if event_id is not None:
|
||||
try:
|
||||
|
||||
execution_result = self.sheerka.sdp.load_result(event_id)
|
||||
execution_result = self.sheerka.om.current_sdp().load_result(event_id)
|
||||
return execution_result.values["return_values"]
|
||||
|
||||
except FileNotFoundError as ex:
|
||||
@@ -278,9 +288,9 @@ class SheerkaResultConcept(BaseService):
|
||||
page_size = 2
|
||||
consumed = 0
|
||||
while True:
|
||||
for event in self.sheerka.sdp.load_events(page_size, start):
|
||||
for event in self.sheerka.om.current_sdp().load_events(page_size, start):
|
||||
consumed += 1
|
||||
if self.sheerka.sdp.has_result(event.get_digest()):
|
||||
if self.sheerka.om.current_sdp().has_result(event.get_digest()):
|
||||
return event.get_digest()
|
||||
|
||||
if consumed < page_size:
|
||||
|
||||
@@ -4,10 +4,11 @@ from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from cache.Cache import Cache
|
||||
from cache.ListIfNeededCache import ListIfNeededCache
|
||||
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
|
||||
from core.builtin_helpers import parse_unrecognized, only_successful, ensure_rule
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import EVENT_RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT
|
||||
from core.global_symbols import EVENT_RULE_PRECEDENCE_MODIFIED, RULE_COMPARISON_CONTEXT, NotFound
|
||||
from core.rule import Rule
|
||||
from core.sheerka.services.sheerka_service import BaseService
|
||||
from core.tokenizer import Keywords, TokenKind, Token, IterParser
|
||||
@@ -509,22 +510,25 @@ class SheerkaRuleManager(BaseService):
|
||||
RULE_IDS = "Rules_Ids"
|
||||
FORMAT_RULE_ENTRY = "RuleManager:FormatRules"
|
||||
EXEC_RULE_ENTRY = "RuleManager:ExecRules"
|
||||
RULES_BY_NAME_ENTRY = "RuleManager:Rules_By_Name"
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.format_rule_cache = Cache(default=lambda k: self.sheerka.sdp.get(self.FORMAT_RULE_ENTRY, k))
|
||||
self.exec_rule_cache = Cache(default=lambda k: self.sheerka.sdp.get(self.EXEC_RULE_ENTRY, k))
|
||||
|
||||
self._format_rules = None # sorted by priority
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.create_new_rule, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.get_rule_by_id, False)
|
||||
self.sheerka.bind_service_method(self.get_rule_by_name, False)
|
||||
self.sheerka.bind_service_method(self.dump_desc_rule, False, as_name="desc_rule")
|
||||
self.sheerka.bind_service_method(self.get_format_rules, False, visible=False)
|
||||
|
||||
self.sheerka.cache_manager.register_cache(self.FORMAT_RULE_ENTRY, self.format_rule_cache, True, True)
|
||||
self.sheerka.cache_manager.register_cache(self.EXEC_RULE_ENTRY, self.exec_rule_cache, True, True)
|
||||
cache = Cache().auto_configure(self.FORMAT_RULE_ENTRY)
|
||||
self.sheerka.om.register_cache(self.FORMAT_RULE_ENTRY, cache, True, True)
|
||||
cache = Cache().auto_configure(self.EXEC_RULE_ENTRY)
|
||||
self.sheerka.om.register_cache(self.EXEC_RULE_ENTRY, cache, True, True)
|
||||
cache = ListIfNeededCache().auto_configure(self.RULES_BY_NAME_ENTRY)
|
||||
self.sheerka.om.register_cache(self.RULES_BY_NAME_ENTRY, cache, True, True)
|
||||
|
||||
def initialize_deferred(self, context, is_first_time):
|
||||
|
||||
@@ -533,20 +537,29 @@ class SheerkaRuleManager(BaseService):
|
||||
self.init_builtin_rules(context)
|
||||
else:
|
||||
# adds the other rules (when it's not the first time)
|
||||
self.format_rule_cache.populate(lambda: self.sheerka.sdp.list(self.FORMAT_RULE_ENTRY), lambda rule: rule.id)
|
||||
self.exec_rule_cache.populate(lambda: self.sheerka.sdp.list(self.EXEC_RULE_ENTRY), lambda rule: rule.id)
|
||||
self.format_rule_cache.reset_events()
|
||||
self.exec_rule_cache.reset_events()
|
||||
self.sheerka.om.populate(self.FORMAT_RULE_ENTRY,
|
||||
lambda sdp: sdp.list(self.FORMAT_RULE_ENTRY),
|
||||
lambda rule: rule.id,
|
||||
reset_events=True,
|
||||
all_ontologies=True)
|
||||
self.sheerka.om.populate(self.EXEC_RULE_ENTRY,
|
||||
lambda sdp: sdp.list(self.EXEC_RULE_ENTRY),
|
||||
lambda rule: rule.id,
|
||||
reset_events=True,
|
||||
all_ontologies=True)
|
||||
|
||||
# compile all the rules
|
||||
for rule_id in self.format_rule_cache:
|
||||
rule = self.init_rule(context, self.format_rule_cache.get(rule_id))
|
||||
# compile all format the rules
|
||||
for rule_id, rule_def in self.sheerka.om.get_all(self.FORMAT_RULE_ENTRY, cache_only=True).items():
|
||||
rule = self.init_rule(context, rule_def)
|
||||
|
||||
# update rules priorities
|
||||
self.update_rules_priorities(context)
|
||||
|
||||
self.sheerka.subscribe(EVENT_RULE_PRECEDENCE_MODIFIED, self.update_rules_priorities)
|
||||
|
||||
def reset_state(self):
|
||||
self._format_rules = None
|
||||
|
||||
def update_rules_priorities(self, context):
|
||||
"""
|
||||
Ask the SheerkaComparisonManager for the priorities
|
||||
@@ -555,9 +568,8 @@ class SheerkaRuleManager(BaseService):
|
||||
# get the priorities
|
||||
rules_weights = self.sheerka.get_concepts_weights(BuiltinConcepts.PRECEDENCE, RULE_COMPARISON_CONTEXT)
|
||||
|
||||
# compile all the rules
|
||||
for rule_id in self.format_rule_cache:
|
||||
rule = self.format_rule_cache.get(rule_id)
|
||||
# update the priorities
|
||||
for rule in self.sheerka.om.list(self.FORMAT_RULE_ENTRY, cache_only=True):
|
||||
if rule.str_id in rules_weights:
|
||||
rule.priority = rules_weights[rule.str_id]
|
||||
|
||||
@@ -623,7 +635,7 @@ class SheerkaRuleManager(BaseService):
|
||||
if rule.metadata.id is not None:
|
||||
return
|
||||
|
||||
rule.metadata.id = str(self.sheerka.cache_manager.get(self.sheerka.OBJECTS_IDS_ENTRY, self.RULE_IDS))
|
||||
rule.metadata.id = str(self.sheerka.om.get(self.sheerka.OBJECTS_IDS_ENTRY, self.RULE_IDS))
|
||||
|
||||
def create_new_rule(self, context, rule):
|
||||
"""
|
||||
@@ -642,10 +654,14 @@ class SheerkaRuleManager(BaseService):
|
||||
|
||||
# save it
|
||||
if rule.metadata.action_type == "print":
|
||||
self.sheerka.cache_manager.put(self.FORMAT_RULE_ENTRY, rule.metadata.id, rule)
|
||||
self.sheerka.om.put(self.FORMAT_RULE_ENTRY, rule.metadata.id, rule)
|
||||
self._format_rules = None
|
||||
else:
|
||||
self.sheerka.cache_manager.put(self.EXEC_RULE_ENTRY, rule.metadata.id, rule)
|
||||
self.sheerka.om.put(self.EXEC_RULE_ENTRY, rule.metadata.id, rule)
|
||||
|
||||
# save by name if needed
|
||||
if rule.metadata.name:
|
||||
self.sheerka.om.put(self.RULES_BY_NAME_ENTRY, rule.metadata.name, rule)
|
||||
|
||||
# process the return if needed
|
||||
ret = sheerka.ret(self.NAME, True, sheerka.new(BuiltinConcepts.NEW_RULE, body=rule))
|
||||
@@ -721,17 +737,28 @@ class SheerkaRuleManager(BaseService):
|
||||
if rule_id is None:
|
||||
return None
|
||||
|
||||
rule = self.format_rule_cache.get(rule_id)
|
||||
if rule:
|
||||
rule = self.sheerka.om.get(self.FORMAT_RULE_ENTRY, rule_id)
|
||||
if rule is not NotFound:
|
||||
return rule
|
||||
|
||||
rule = self.exec_rule_cache.get(rule_id)
|
||||
if rule:
|
||||
rule = self.sheerka.om.get(self.EXEC_RULE_ENTRY, rule_id)
|
||||
if rule is not NotFound:
|
||||
return rule
|
||||
|
||||
metadata = [("id", rule_id)]
|
||||
return self.sheerka.new(BuiltinConcepts.UNKNOWN_RULE, body=metadata)
|
||||
|
||||
def get_rule_by_name(self, rule_name):
|
||||
if rule_name is None:
|
||||
return None
|
||||
|
||||
rule = self.sheerka.om.get(self.RULES_BY_NAME_ENTRY, rule_name)
|
||||
if rule is NotFound:
|
||||
metadata = [("name", rule_name)]
|
||||
return self.sheerka.new(BuiltinConcepts.UNKNOWN_RULE, body=metadata)
|
||||
|
||||
return rule
|
||||
|
||||
def dump_desc_rule(self, rules):
|
||||
"""
|
||||
dumps the definition of a rule
|
||||
@@ -759,7 +786,9 @@ class SheerkaRuleManager(BaseService):
|
||||
if self._format_rules:
|
||||
return self._format_rules
|
||||
|
||||
self._format_rules = sorted(self.format_rule_cache.get_all(), key=operator.attrgetter('priority'), reverse=True)
|
||||
self._format_rules = sorted(self.sheerka.om.list(self.FORMAT_RULE_ENTRY, cache_only=True),
|
||||
key=operator.attrgetter('priority'),
|
||||
reverse=True)
|
||||
return self._format_rules
|
||||
|
||||
def add_evaluators(self, source, ret_vals):
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import List
|
||||
|
||||
from cache.Cache import Cache
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.global_symbols import NotFound
|
||||
from core.sheerka.services.sheerka_service import ServiceObj, BaseService
|
||||
|
||||
|
||||
@@ -23,32 +24,52 @@ class Variable(ServiceObj):
|
||||
return f"({self.who}){self.key}={self.value}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class InternalObj:
|
||||
obj: object
|
||||
|
||||
def __deepcopy__(self, memodict={}):
|
||||
return self
|
||||
|
||||
def __copy__(self):
|
||||
return self
|
||||
|
||||
|
||||
class SheerkaVariableManager(BaseService):
|
||||
NAME = "VariableManager"
|
||||
VARIABLES_ENTRY = "VariableManager:Variables" # entry for admin or internal variables
|
||||
VARIABLES_ENTRY = "VariableManager:Variables" # entry for variables which will be copied in sdp
|
||||
INTERNAL_VARIABLES_ENTRY = "VariableManager:InternalVariables" # internal to current process (can store lambda)
|
||||
|
||||
def __init__(self, sheerka):
|
||||
super().__init__(sheerka)
|
||||
self.bound = {
|
||||
"sheerka.enable_process_return_values": "enable_process_return_values",
|
||||
"sheerka.save_execution_context": "save_execution_context"
|
||||
self.bound_variables = {
|
||||
self.sheerka.name: {"enable_process_return_values", "save_execution_context"}
|
||||
}
|
||||
|
||||
def initialize(self):
|
||||
self.sheerka.bind_service_method(self.record_var, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.load_var, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.record_internal_var, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.load_internal_var, False, visible=False)
|
||||
self.sheerka.bind_service_method(self.delete_var, True, visible=False)
|
||||
self.sheerka.bind_service_method(self.set_var, True)
|
||||
self.sheerka.bind_service_method(self.get_var, False)
|
||||
self.sheerka.bind_service_method(self.list_vars, False)
|
||||
|
||||
cache = Cache()
|
||||
cache.populate(lambda: self.sheerka.sdp.list(self.VARIABLES_ENTRY), lambda var: var.get_key())
|
||||
self.sheerka.cache_manager.register_cache(self.VARIABLES_ENTRY, cache, True, True)
|
||||
cache = Cache().auto_configure(self.VARIABLES_ENTRY)
|
||||
self.sheerka.om.register_cache(self.VARIABLES_ENTRY, cache, True, True)
|
||||
cache.populate(lambda sdp: sdp.list(self.VARIABLES_ENTRY), lambda var: var.get_key())
|
||||
|
||||
for variable in cache.get_all():
|
||||
if variable.key in self.bound:
|
||||
setattr(self.sheerka, self.bound[variable.key], variable.value)
|
||||
internal_vars = Cache().auto_configure(self.INTERNAL_VARIABLES_ENTRY)
|
||||
self.sheerka.om.register_cache(self.INTERNAL_VARIABLES_ENTRY, internal_vars, False, False)
|
||||
|
||||
def initialize_deferred(self, context, first_time):
|
||||
# update bound variables
|
||||
for who, keys in self.bound_variables.items():
|
||||
for key in keys:
|
||||
if (variable := self.sheerka.om.get(self.VARIABLES_ENTRY, f"{who}|{key}")) is not NotFound:
|
||||
service = self.sheerka if who == self.sheerka.name else self.sheerka.services[who]
|
||||
setattr(service, key, variable.value)
|
||||
|
||||
def record_var(self, context, who, key, value):
|
||||
"""
|
||||
@@ -61,34 +82,49 @@ class SheerkaVariableManager(BaseService):
|
||||
"""
|
||||
|
||||
variable = Variable(context.event.get_digest(), who, key, value, None)
|
||||
self.sheerka.cache_manager.put(self.VARIABLES_ENTRY, variable.get_key(), variable)
|
||||
self.sheerka.om.put(self.VARIABLES_ENTRY, variable.get_key(), variable)
|
||||
|
||||
# TODO: manage credentials
|
||||
if key in self.bound:
|
||||
setattr(self.sheerka, self.bound[key], value)
|
||||
|
||||
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
if who in self.bound_variables and key in self.bound_variables[who]:
|
||||
service = self.sheerka if who == self.sheerka.name else self.sheerka.services[who]
|
||||
setattr(service, key, value)
|
||||
|
||||
def load_var(self, who, key):
|
||||
variable = self.sheerka.cache_manager.get(self.VARIABLES_ENTRY, who + "|" + key)
|
||||
if variable is None:
|
||||
return None
|
||||
variable = self.sheerka.om.get(self.VARIABLES_ENTRY, who + "|" + key)
|
||||
if variable is NotFound:
|
||||
return NotFound
|
||||
|
||||
return variable.value
|
||||
|
||||
def record_internal_var(self, context, who, key, value):
|
||||
"""
|
||||
Stores the value in the internal cache
|
||||
This cache is not pushed to the remote repository
|
||||
:param context:
|
||||
:param who: entity that owns the key (acts as a namespace)
|
||||
:param key:
|
||||
:param value:
|
||||
"""
|
||||
self.sheerka.om.put(self.INTERNAL_VARIABLES_ENTRY, f"{who}|{key}", InternalObj(value))
|
||||
|
||||
def load_internal_var(self, who, key):
|
||||
value = self.sheerka.om.get(self.INTERNAL_VARIABLES_ENTRY, f"{who}|{key}")
|
||||
return NotFound if value is NotFound else value.obj
|
||||
|
||||
def delete_var(self, context, who, key):
|
||||
self.sheerka.cache_manager.delete(self.VARIABLES_ENTRY, who + "|" + key)
|
||||
self.sheerka.om.delete(self.VARIABLES_ENTRY, who + "|" + key)
|
||||
|
||||
def set_var(self, context, key, value):
|
||||
return self.record_var(context, context.event.user_id, key, value)
|
||||
self.record_var(context, context.event.user_id, key, value)
|
||||
return self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.SUCCESS))
|
||||
|
||||
def get_var(self, context, key):
|
||||
return self.load_var(context.event.user_id, key)
|
||||
|
||||
def list_vars(self, context, all_vars=False):
|
||||
if all_vars:
|
||||
res = [str(v) for v in self.sheerka.cache_manager.copy(self.VARIABLES_ENTRY).values()]
|
||||
res = [str(v) for v in self.sheerka.om.copy(self.VARIABLES_ENTRY).values()]
|
||||
else:
|
||||
res = [str(v) for v in self.sheerka.cache_manager.copy(self.VARIABLES_ENTRY).values() if
|
||||
res = [str(v) for v in self.sheerka.om.copy(self.VARIABLES_ENTRY).values() if
|
||||
v.who == context.event.user_id]
|
||||
return res
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from core.global_symbols import NotFound
|
||||
from core.utils import sheerka_deepcopy
|
||||
|
||||
|
||||
@dataclass
|
||||
class ServiceObj:
|
||||
@@ -21,6 +24,13 @@ class BaseService:
|
||||
"""
|
||||
pass
|
||||
|
||||
def store_values(self, context, *args):
|
||||
"""
|
||||
Use variable Manager to store the state of the service
|
||||
"""
|
||||
for prop_name in args:
|
||||
self.sheerka.record_var(context, self.NAME, prop_name, sheerka_deepcopy(getattr(self, prop_name)))
|
||||
|
||||
def restore_values(self, *args):
|
||||
"""
|
||||
Use Variable Manager to restore the state of a service
|
||||
@@ -28,5 +38,5 @@ class BaseService:
|
||||
:return:
|
||||
"""
|
||||
for prop_name in args:
|
||||
if (value := self.sheerka.load_var(self.NAME, prop_name)) is not None:
|
||||
if (value := self.sheerka.load_var(self.NAME, prop_name)) is not NotFound:
|
||||
setattr(self, prop_name, value)
|
||||
|
||||
@@ -23,13 +23,13 @@ def my_debug(*args, check_started=None):
|
||||
if debug_name not in debug_activated:
|
||||
return
|
||||
|
||||
# with open("debug.txt", "a") as f:
|
||||
# for arg in args:
|
||||
# if isinstance(arg, list):
|
||||
# for item in arg:
|
||||
# f.write(f"{item}\n")
|
||||
# else:
|
||||
# f.write(f"{arg}\n")
|
||||
with open("debug.txt", "a") as f:
|
||||
for arg in args:
|
||||
if isinstance(arg, list):
|
||||
for item in arg:
|
||||
f.write(f"{item}\n")
|
||||
else:
|
||||
f.write(f"{arg}\n")
|
||||
|
||||
|
||||
def start_debug(debug_name=default_debug_name, msg=None):
|
||||
|
||||
+5
-58
@@ -5,8 +5,7 @@ import os
|
||||
import pkgutil
|
||||
from copy import deepcopy
|
||||
|
||||
from cache.Cache import Cache
|
||||
from core.ast_helpers import ast_to_props
|
||||
from core.global_symbols import CustomType
|
||||
from core.tokenizer import TokenKind, Tokenizer
|
||||
from pyparsing import *
|
||||
|
||||
@@ -35,8 +34,6 @@ CONSOLE_COLORS_MAP = {
|
||||
|
||||
PRIMITIVES_TYPES = (str, bool, type(None), int, float, list, dict, set, bytes, tuple, type)
|
||||
|
||||
expressions_cache = Cache()
|
||||
|
||||
ESC = Literal('\x1b')
|
||||
integer = Word(nums)
|
||||
escapeSeq = Combine(ESC + '[' + Optional(delimitedList(integer, ';')) +
|
||||
@@ -603,59 +600,6 @@ def flatten_all_children(item, get_children):
|
||||
return inner_get_all_children(item)
|
||||
|
||||
|
||||
def evaluate_expression(expr, bag):
|
||||
"""
|
||||
Try to evaluate expr in context of bag
|
||||
:param expr:
|
||||
:param bag:
|
||||
:return:
|
||||
"""
|
||||
|
||||
if expr is None or expr.strip() == "":
|
||||
return None
|
||||
|
||||
if expr in bag:
|
||||
return bag[expr]
|
||||
|
||||
props_definitions = expressions_cache.get(expr)
|
||||
if props_definitions is None:
|
||||
_ast = ast.parse(expr, mode="eval")
|
||||
props_definitions = []
|
||||
ast_to_props(props_definitions, _ast.body, None)
|
||||
props_definitions.reverse()
|
||||
expressions_cache.put(expr, props_definitions)
|
||||
|
||||
return evaluate_object(bag, props_definitions)
|
||||
|
||||
|
||||
def evaluate_object(bag, properties):
|
||||
"""
|
||||
Evaluate the properties of an object
|
||||
Works with evaluate_expression
|
||||
:param bag:
|
||||
:param properties: List of ast_helpers.PropDef
|
||||
:return:
|
||||
"""
|
||||
for prop in properties:
|
||||
try:
|
||||
obj = bag[prop.prop]
|
||||
except KeyError:
|
||||
try:
|
||||
obj = bag["self"][prop.prop]
|
||||
except Exception:
|
||||
raise NameError(prop.prop)
|
||||
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
if prop.index is not None:
|
||||
obj = obj[prop.index]
|
||||
|
||||
bag = as_bag(obj)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def get_text_from_tokens(tokens, custom_switcher=None, tracker=None):
|
||||
"""
|
||||
Create the source code, from the list of token
|
||||
@@ -729,7 +673,9 @@ def sheerka_deepcopy(obj):
|
||||
return instance
|
||||
|
||||
from core.concept import Concept
|
||||
if isinstance(obj, dict):
|
||||
if isinstance(obj, CustomType):
|
||||
return obj
|
||||
elif isinstance(obj, dict):
|
||||
res = {sheerka_deepcopy(k): sheerka_deepcopy(v) for k, v in obj.items()}
|
||||
return res
|
||||
elif isinstance(obj, list):
|
||||
@@ -759,6 +705,7 @@ class NextIdManager:
|
||||
"""
|
||||
solely return the next integer
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.id = -1
|
||||
|
||||
|
||||
@@ -29,5 +29,5 @@ class AddToMemoryEvaluator(OneReturnValueEvaluator):
|
||||
service.registration.clear()
|
||||
return None
|
||||
|
||||
context.sheerka.add_registered_objects(context)
|
||||
context.sheerka.commit_registered_objects(context)
|
||||
return None # no need to have a second pass
|
||||
|
||||
@@ -137,7 +137,7 @@ class DefConceptEvaluator(OneReturnValueEvaluator):
|
||||
names = [str(t.value) for t in ret_value.tokens if t.type in (
|
||||
TokenKind.IDENTIFIER, TokenKind.STRING, TokenKind.KEYWORD)]
|
||||
debugger.debug_var("names", names, hint="from NameNode")
|
||||
return set(filter(lambda x: x in concept_name and context.sheerka.not_is_variable(x), names))
|
||||
return set(filter(lambda x: x in concept_name and context.sheerka.is_not_a_variable(x), names))
|
||||
|
||||
#
|
||||
# case of BNF
|
||||
@@ -156,7 +156,7 @@ class DefConceptEvaluator(OneReturnValueEvaluator):
|
||||
visitor = UnreferencedVariablesVisitor(context)
|
||||
names = visitor.get_names(python_node.ast_)
|
||||
debugger.debug_var("names", names, hint="from python node")
|
||||
return set(filter(lambda x: x in concept_name and context.sheerka.not_is_variable(x), names))
|
||||
return set(filter(lambda x: x in concept_name and context.sheerka.is_not_a_variable(x), names))
|
||||
else:
|
||||
return set()
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, NotInit
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import NotInit
|
||||
from evaluators.BaseEvaluator import OneReturnValueEvaluator
|
||||
|
||||
|
||||
@@ -8,6 +9,7 @@ class PostExecutionEvaluator(OneReturnValueEvaluator):
|
||||
Last chance to alter the return_value
|
||||
This evaluator is supposed to be a generic evaluator for all rules that must be executed just before
|
||||
the aggregations
|
||||
As of now, the AUTO_EVAL rule implementation is simply hardcoded
|
||||
"""
|
||||
|
||||
NAME = "PostExecution"
|
||||
@@ -20,12 +22,11 @@ class PostExecutionEvaluator(OneReturnValueEvaluator):
|
||||
if len(evaluation_parents) > 1:
|
||||
return False # It must be executed only when the top level context
|
||||
|
||||
# only support the rule for the COMMANDS
|
||||
value = return_value.body
|
||||
return isinstance(value, Concept) and context.sheerka.isa(value, context.sheerka.new(BuiltinConcepts.AUTO_EVAL))
|
||||
|
||||
def eval(self, context, return_value):
|
||||
# only support the rule for the COMMANDS ??
|
||||
# only support the rule for the AUTO_EVAL
|
||||
return context.sheerka.ret(
|
||||
self.name,
|
||||
True,
|
||||
|
||||
@@ -7,7 +7,8 @@ import core.builtin_helpers
|
||||
import core.utils
|
||||
from core.ast_helpers import UnreferencedNamesVisitor, NamesWithAttributesVisitor
|
||||
from core.builtin_concepts import BuiltinConcepts, ParserResultConcept
|
||||
from core.concept import ConceptParts, Concept, NotInit
|
||||
from core.concept import ConceptParts, Concept
|
||||
from core.global_symbols import NotInit, NotFound
|
||||
from core.rule import Rule
|
||||
from core.sheerka.ExecutionContext import ExecutionContext
|
||||
from core.tokenizer import Token, TokenKind
|
||||
@@ -127,12 +128,16 @@ class PythonEvaluator(OneReturnValueEvaluator):
|
||||
for globals_ in all_possible_globals:
|
||||
try:
|
||||
# eval
|
||||
my_locals = {}
|
||||
if isinstance(node.ast_, ast.Expression):
|
||||
context.log("Evaluating using 'eval'.", self.name)
|
||||
evaluated = eval(node.get_compiled(), globals_, sheerka.locals)
|
||||
evaluated = eval(node.get_compiled(), globals_, my_locals)
|
||||
else:
|
||||
context.log("Evaluating using 'exec'.", self.name)
|
||||
evaluated = self.exec_with_return(node.ast_, globals_, sheerka.locals)
|
||||
evaluated = self.exec_with_return(node.ast_, globals_, my_locals)
|
||||
|
||||
# TODO find a better implementation using SheerkaMemory
|
||||
sheerka.locals.update(my_locals)
|
||||
|
||||
if not expect_success or evaluated:
|
||||
break # in this first version, we stop once a success is found
|
||||
@@ -140,8 +145,8 @@ class PythonEvaluator(OneReturnValueEvaluator):
|
||||
if concepts_entries is None:
|
||||
concepts_entries = self.get_concepts_entries_from_globals(my_globals)
|
||||
eval_error = PythonEvalError(ex,
|
||||
traceback.format_exc() if get_trace_back else None,
|
||||
self.get_concepts_values_from_globals(globals_, concepts_entries))
|
||||
traceback.format_exc() if get_trace_back else None,
|
||||
self.get_concepts_values_from_globals(globals_, concepts_entries))
|
||||
errors.append(eval_error)
|
||||
exception_debugger.debug_var("exception", eval_error.error, is_error=True)
|
||||
exception_debugger.debug_var("trace", eval_error.traceback, is_error=True)
|
||||
@@ -223,8 +228,13 @@ class PythonEvaluator(OneReturnValueEvaluator):
|
||||
my_globals["sheerka"] = Expando(bag)
|
||||
continue
|
||||
|
||||
# search in local variables. To remove when local variables will be merged with memory
|
||||
if name in context.sheerka.locals:
|
||||
my_globals[name] = context.sheerka.locals[name]
|
||||
continue
|
||||
|
||||
# search in short term memory
|
||||
if (obj := context.get_from_short_term_memory(name)) is not None:
|
||||
if (obj := context.get_from_short_term_memory(name)) is not NotFound:
|
||||
context.log(f"Resolving '{name}'. Using value found in STM.", self.name)
|
||||
my_globals[name] = obj
|
||||
continue
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, NotInit
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import NotInit
|
||||
from evaluators.BaseEvaluator import AllReturnValuesEvaluator
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from core.builtin_concepts import BuiltinConcepts, ParserResultConcept
|
||||
from core.global_symbols import NotFound
|
||||
from core.rule import Rule, ACTION_TYPE_DEFERRED
|
||||
from evaluators.BaseEvaluator import OneReturnValueEvaluator
|
||||
|
||||
@@ -35,7 +36,7 @@ class RuleEvaluator(OneReturnValueEvaluator):
|
||||
# Browse the rules to find possible deferred rules
|
||||
if r.metadata.action_type == ACTION_TYPE_DEFERRED:
|
||||
rule_id = sheerka.get_from_short_term_memory(context, r.id)
|
||||
rule = sheerka.get_rule_by_id(str(rule_id or r.id))
|
||||
rule = sheerka.get_rule_by_id(str(rule_id if rule_id is not NotFound else r.id))
|
||||
resolved.append(rule)
|
||||
success &= isinstance(rule, Rule)
|
||||
else:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from core.builtin_helpers import evaluate_expression
|
||||
from core.sheerka.services.SheerkaRuleManager import FormatAstVariable, FormatAstVariableNotFound, FormatAstColor, \
|
||||
FormatAstList, FormatAstRawText, FormatAstDict
|
||||
from core.utils import evaluate_expression, as_bag
|
||||
from core.utils import as_bag
|
||||
|
||||
fstring = compile('f"{value:{format}}"', "DeveloperVisitor.fstring", mode="eval")
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ from typing import Set
|
||||
import core.utils
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import VARIABLE_PREFIX, Concept, DEFINITION_TYPE_BNF, ConceptParts
|
||||
from core.global_symbols import NotFound
|
||||
from core.rule import Rule
|
||||
from core.tokenizer import TokenKind, Token
|
||||
from parsers.BaseParser import Node, BaseParser, ParsingError
|
||||
@@ -817,12 +818,6 @@ class BaseNodeParser(BaseParser):
|
||||
|
||||
def __init__(self, name, priority, **kwargs):
|
||||
super().__init__(name, priority, yield_eof=True)
|
||||
if 'sheerka' in kwargs:
|
||||
sheerka = kwargs.get("sheerka")
|
||||
self.concepts_by_first_keyword = sheerka.resolved_concepts_by_first_keyword
|
||||
|
||||
else:
|
||||
self.concepts_by_first_keyword = None
|
||||
|
||||
def init_from_concepts(self, context, concepts, **kwargs):
|
||||
"""
|
||||
@@ -832,8 +827,12 @@ class BaseNodeParser(BaseParser):
|
||||
:param concepts
|
||||
:return:
|
||||
"""
|
||||
concepts_by_first_keyword = self.get_concepts_by_first_token(context, concepts).body
|
||||
self.concepts_by_first_keyword = self.resolve_concepts_by_first_keyword(context, concepts_by_first_keyword).body
|
||||
concepts_by_first_keyword = self.compute_concepts_by_first_token(context, concepts).body
|
||||
resolved = self.resolve_concepts_by_first_keyword(context, concepts_by_first_keyword).body
|
||||
|
||||
context.sheerka.om.put(context.sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY,
|
||||
False,
|
||||
resolved)
|
||||
|
||||
def get_concepts(self, token, to_keep, custom=None, to_map=None, strip_quotes=False):
|
||||
"""
|
||||
@@ -858,24 +857,25 @@ class BaseNodeParser(BaseParser):
|
||||
custom_concepts = custom(name) if custom else [] # to get extra concepts using an alternative method
|
||||
|
||||
result = []
|
||||
if name in self.concepts_by_first_keyword:
|
||||
for concept_id in self.concepts_by_first_keyword.get(name):
|
||||
concepts_ids = self.sheerka.om.get(self.sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, name)
|
||||
if concepts_ids is NotFound:
|
||||
return custom_concepts if custom else None
|
||||
|
||||
concept = self.sheerka.get_by_id(concept_id)
|
||||
for concept_id in concepts_ids:
|
||||
|
||||
if not to_keep(concept):
|
||||
continue
|
||||
concept = self.sheerka.get_by_id(concept_id)
|
||||
|
||||
concept = to_map(concept, self, self.sheerka) if to_map else concept
|
||||
result.append(concept)
|
||||
if not to_keep(concept):
|
||||
continue
|
||||
|
||||
return core.utils.make_unique(result + custom_concepts,
|
||||
lambda c: c.concept.id if hasattr(c, "concept") else c.id)
|
||||
concept = to_map(concept, self, self.sheerka) if to_map else concept
|
||||
result.append(concept)
|
||||
|
||||
return custom_concepts if custom else None
|
||||
return core.utils.make_unique(result + custom_concepts,
|
||||
lambda c: c.concept.id if hasattr(c, "concept") else c.id)
|
||||
|
||||
@staticmethod
|
||||
def get_concepts_by_first_token(context, concepts, use_sheerka=False, previous_entries=None):
|
||||
def compute_concepts_by_first_token(context, concepts, use_sheerka=False, previous_entries=None):
|
||||
"""
|
||||
Create the map describing the first token expected by a concept
|
||||
:param context:
|
||||
@@ -885,7 +885,7 @@ class BaseNodeParser(BaseParser):
|
||||
:return:
|
||||
"""
|
||||
sheerka = context.sheerka
|
||||
res = sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) if use_sheerka else (previous_entries or {})
|
||||
res = sheerka.om.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) if use_sheerka else (previous_entries or {})
|
||||
for concept in concepts:
|
||||
keywords = BaseNodeParser.get_first_tokens(sheerka, concept)
|
||||
|
||||
@@ -966,7 +966,7 @@ class BaseNodeParser(BaseParser):
|
||||
for concept_id in concepts_in_recursion:
|
||||
# make sure we keep the longest chain
|
||||
old = sheerka.chicken_and_eggs.get(concept_id)
|
||||
if old is None or len(old) < len(ex.concepts):
|
||||
if old is NotFound or len(old) < len(ex.concepts):
|
||||
sheerka.chicken_and_eggs.put(concept_id, concepts_in_recursion)
|
||||
else:
|
||||
res.setdefault(k, []).extend(v)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.global_symbols import NotFound
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from parsers.BaseParser import BaseParser
|
||||
|
||||
@@ -34,11 +35,10 @@ class ShortTermMemoryParser(BaseParser):
|
||||
concept_name = parser_input.as_text()
|
||||
concept = sheerka.get_from_short_term_memory(context, concept_name)
|
||||
|
||||
if concept:
|
||||
if concept is NotFound:
|
||||
body = sheerka.new(BuiltinConcepts.NOT_FOUND, body=concept_name)
|
||||
return sheerka.ret(self.name, False, body)
|
||||
else:
|
||||
# Unlike what is usually done, we directly return the concept, not a ParsingResult of the concept
|
||||
# This is to save the evaluation time cost
|
||||
return sheerka.ret(self.name, True, concept)
|
||||
|
||||
else:
|
||||
body = sheerka.new(BuiltinConcepts.NOT_FOUND, body=concept_name)
|
||||
return sheerka.ret(self.name, False, body)
|
||||
|
||||
@@ -126,15 +126,15 @@ class SyaConceptDef:
|
||||
|
||||
# first, try to look in the parser
|
||||
# it is where to find the data during the unit tests
|
||||
if parser and concept.id in parser.sya_definitions:
|
||||
if parser and concept.id in parser.test_only_sya_definitions:
|
||||
# Manage when precedence and associativity are given in the unit tests
|
||||
sya_def = parser.sya_definitions.get(concept.id)
|
||||
sya_def = parser.test_only_sya_definitions.get(concept.id)
|
||||
if sya_def[0] is not None:
|
||||
sya_concept_def.precedence = sya_def[0]
|
||||
if sya_def[1] is not None:
|
||||
sya_concept_def.associativity = sya_def[1]
|
||||
|
||||
# otherwise, use sheerka
|
||||
# otherwise, use sheerka # KSI 20210109 otherwise or override ??
|
||||
if sheerka:
|
||||
concept_weight = parser.sheerka.get_concepts_weights(BuiltinConcepts.PRECEDENCE, CONCEPT_COMPARISON_CONTEXT)
|
||||
if concept.str_id in concept_weight:
|
||||
@@ -332,7 +332,7 @@ class InFixToPostFix:
|
||||
def _add_debug(self, debug_info: DebugInfo):
|
||||
if debug_info.level is None or (self.enabled_debug_levels and
|
||||
(f"#{self.id}.{debug_info.level}" in self.enabled_debug_levels or
|
||||
"*" in self.enabled_debug_levels)):
|
||||
"*" in self.enabled_debug_levels)):
|
||||
self.debug.append(debug_info)
|
||||
|
||||
def _is_lpar(self, token):
|
||||
@@ -1134,20 +1134,14 @@ class SyaNodeParser(BaseNodeParser):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(SyaNodeParser.NAME, 50, **kwargs)
|
||||
if 'sheerka' in kwargs:
|
||||
sheerka = kwargs.get("sheerka")
|
||||
self.sya_definitions = sheerka.resolved_sya_def
|
||||
|
||||
else:
|
||||
self.concepts_by_first_keyword = {}
|
||||
self.sya_definitions = {}
|
||||
self.test_only_sya_definitions = {}
|
||||
|
||||
def init_from_concepts(self, context, concepts, **kwargs):
|
||||
super().init_from_concepts(context, concepts)
|
||||
|
||||
sya_definitions = kwargs.get("sya", None)
|
||||
if sya_definitions:
|
||||
self.sya_definitions = sya_definitions
|
||||
self.test_only_sya_definitions = sya_definitions
|
||||
|
||||
@staticmethod
|
||||
def _is_eligible(concept):
|
||||
@@ -1431,10 +1425,3 @@ class SyaNodeParser(BaseNodeParser):
|
||||
result.append(infix_to_postfix)
|
||||
|
||||
return result
|
||||
|
||||
# @staticmethod
|
||||
# def init_sheerka(self, sheerka):
|
||||
# if hasattr(BaseNodeParser, "init_sheerka"):
|
||||
# BaseNodeParser.init_sheerka(sheerka)
|
||||
#
|
||||
# # init syadefinitins
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import types
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.concept import Concept, NotInit
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import NotInit
|
||||
from printer.FormatInstructions import FormatInstructions, FormatDetailType
|
||||
from printer.Formatter import Formatter
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
- O : ServiceObj (from pickle)
|
||||
- M : MemoryObject (using SheerkaPickle)
|
||||
- X : Rule (from sheerkaPickle, 'X' stands for nothing, I am running out of meaningful letters)
|
||||
- T : CustomType
|
||||
|
||||
## How concepts are serialized ?
|
||||
- get the id of the concept
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import hashlib
|
||||
import json
|
||||
import shutil
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, date
|
||||
from threading import RLock
|
||||
from os import path
|
||||
|
||||
from core.global_symbols import NotFound
|
||||
from core.sheerka_logger import get_logger
|
||||
from sdp.sheerkaDataProviderIO import SheerkaDataProviderIO
|
||||
from sdp.sheerkaSerializer import Serializer, SerializerContext
|
||||
@@ -71,6 +74,22 @@ class Event(object):
|
||||
self.parents = as_dict["parents"]
|
||||
self._digest = as_dict["_digest"] # freeze the digest
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if isinstance(other, Event):
|
||||
return (self.version == other.version and
|
||||
self.user_id == other.user_id and
|
||||
self.date == other.date and
|
||||
self.message == other.message and
|
||||
self.parents == other.parents)
|
||||
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.get_digest())
|
||||
|
||||
|
||||
class State:
|
||||
"""
|
||||
@@ -140,10 +159,10 @@ class SheerkaDataProviderTransaction:
|
||||
else:
|
||||
items = self.sdp.REF_PREFIX + self.sdp.save_obj(items)
|
||||
|
||||
if key:
|
||||
self.state.data[entry][key] = items
|
||||
else:
|
||||
if key is None:
|
||||
self.state.data[entry] = items
|
||||
else:
|
||||
self.state.data[entry][key] = items
|
||||
|
||||
def remove(self, entry, key):
|
||||
"""
|
||||
@@ -189,12 +208,14 @@ class SheerkaDataProvider:
|
||||
StateFolder = "state"
|
||||
ObjectsFolder = "objects"
|
||||
CacheFolder = "cache"
|
||||
RefFolder = "refs"
|
||||
HeadFile = "HEAD"
|
||||
LastEventFile = "LAST_EVENT"
|
||||
KeysFile = "keys"
|
||||
OntologiesFiles = "ontologies"
|
||||
REF_PREFIX = "##REF##:"
|
||||
|
||||
def __init__(self, root=None, sheerka=None):
|
||||
def __init__(self, root=None, sheerka=None, name="__default__"):
|
||||
self.log = get_logger(__name__)
|
||||
self.init_log = get_logger("init." + __name__)
|
||||
self.init_log.debug("Initializing sdp.")
|
||||
@@ -202,6 +223,7 @@ class SheerkaDataProvider:
|
||||
self.sheerka = sheerka
|
||||
self.io = SheerkaDataProviderIO.get(root)
|
||||
self.first_time = self.io.first_time
|
||||
self.name = name
|
||||
|
||||
self.serializer = Serializer()
|
||||
self.lock = RLock()
|
||||
@@ -218,10 +240,10 @@ class SheerkaDataProvider:
|
||||
stream.seek(0)
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
def get_transaction(self, event):
|
||||
def get_transaction(self, event) -> SheerkaDataProviderTransaction:
|
||||
return SheerkaDataProviderTransaction(self, event)
|
||||
|
||||
def get(self, entry, key=None, default=None, load_origin=True):
|
||||
def get(self, entry, key=None, default=NotFound, load_origin=True):
|
||||
"""
|
||||
Get an element
|
||||
:param entry:
|
||||
@@ -307,7 +329,7 @@ class SheerkaDataProvider:
|
||||
:param event:
|
||||
:return: digest of the event
|
||||
"""
|
||||
parent = self.get_snapshot(SheerkaDataProvider.LastEventFile)
|
||||
parent = self.get_last_event()
|
||||
event.parents = [parent] if parent else None
|
||||
digest = event.get_digest() # must be call after setting the parents
|
||||
|
||||
@@ -316,7 +338,7 @@ class SheerkaDataProvider:
|
||||
return digest
|
||||
|
||||
self.io.write_binary(target_path, self.serializer.serialize(event, None).read())
|
||||
self.set_snapshot(SheerkaDataProvider.LastEventFile, digest)
|
||||
self.set_last_event(digest)
|
||||
|
||||
return digest
|
||||
|
||||
@@ -326,7 +348,7 @@ class SheerkaDataProvider:
|
||||
:param digest:
|
||||
:return:
|
||||
"""
|
||||
digest = digest or self.get_snapshot(SheerkaDataProvider.LastEventFile)
|
||||
digest = digest or self.get_last_event()
|
||||
if digest is None:
|
||||
return None
|
||||
|
||||
@@ -339,7 +361,7 @@ class SheerkaDataProvider:
|
||||
"""
|
||||
Load multiple events in the same command
|
||||
:param start:
|
||||
:param page_size:
|
||||
:param page_size: = -1 to load everything
|
||||
:return:
|
||||
"""
|
||||
|
||||
@@ -365,16 +387,26 @@ class SheerkaDataProvider:
|
||||
digest = event.parents[0]
|
||||
count += 1
|
||||
|
||||
def get_last_event(self):
|
||||
last_event_file = self.io.path_join(self.LastEventFile)
|
||||
if not self.io.exists(last_event_file):
|
||||
return None
|
||||
return self.io.read_text(last_event_file)
|
||||
|
||||
def set_last_event(self, digest):
|
||||
last_event_file = self.io.path_join(self.LastEventFile)
|
||||
return self.io.write_text(last_event_file, digest)
|
||||
|
||||
def set_snapshot(self, file, digest):
|
||||
head_file = self.io.path_join(self.RefFolder, self.name, file)
|
||||
return self.io.write_text(head_file, digest)
|
||||
|
||||
def get_snapshot(self, file):
|
||||
head_file = self.io.path_join(file)
|
||||
head_file = self.io.path_join(self.RefFolder, self.name, file)
|
||||
if not self.io.exists(head_file):
|
||||
return None
|
||||
return self.io.read_text(head_file)
|
||||
|
||||
def set_snapshot(self, file, digest):
|
||||
head_file = self.io.path_join(file)
|
||||
return self.io.write_text(head_file, digest)
|
||||
|
||||
def load_state(self, digest):
|
||||
if digest is None:
|
||||
return State()
|
||||
@@ -516,3 +548,26 @@ class SheerkaDataProvider:
|
||||
elif not isinstance(obj, str):
|
||||
setattr(obj, Serializer.ORIGIN, digest)
|
||||
return obj
|
||||
|
||||
def save_ontologies(self, ontologies_names):
|
||||
"""
|
||||
Keep track of the sequence of ontologies
|
||||
This is a quick and dirty ontology management
|
||||
I would like the ontologies to have a digest and to know what is their parent
|
||||
"""
|
||||
ontology_file = self.io.path_join(SheerkaDataProvider.OntologiesFiles)
|
||||
text = "\n".join(ontologies_names)
|
||||
self.io.write_text(ontology_file, text)
|
||||
|
||||
def load_ontologies(self):
|
||||
ontology_file = self.io.path_join(SheerkaDataProvider.OntologiesFiles)
|
||||
if not self.io.exists(ontology_file):
|
||||
return []
|
||||
|
||||
text = self.io.read_text(ontology_file)
|
||||
return text.split("\n")
|
||||
|
||||
def test_only_destroy_refs(self):
|
||||
current_sdp_refs_folder = self.io.path_join(self.RefFolder, self.name)
|
||||
if path.exists(current_sdp_refs_folder):
|
||||
shutil.rmtree(current_sdp_refs_folder)
|
||||
|
||||
@@ -8,6 +8,7 @@ from enum import Enum
|
||||
|
||||
import sheerkapickle
|
||||
from core.concept import Concept
|
||||
from core.global_symbols import CustomType, NotInit, NotFound, Removed
|
||||
from core.rule import Rule
|
||||
from core.sheerka_logger import get_logger
|
||||
from core.utils import get_full_qualified_name, get_class
|
||||
@@ -64,6 +65,7 @@ class Serializer:
|
||||
self.register(MemoryObjectSerializer()) # before ServiceObjSerializer
|
||||
self.register(ServiceObjSerializer())
|
||||
self.register(RuleSerializer())
|
||||
self.register(CustomTypeSerializer())
|
||||
|
||||
def register(self, serializer):
|
||||
"""
|
||||
@@ -305,3 +307,26 @@ class MemoryObjectSerializer(SheerkaPickleSerializer):
|
||||
class RuleSerializer(SheerkaPickleSerializer):
|
||||
def __init__(self):
|
||||
super().__init__(lambda obj: isinstance(obj, Rule), "X", 1)
|
||||
|
||||
|
||||
class CustomTypeSerializer(BaseSerializer):
|
||||
def __init__(self):
|
||||
BaseSerializer.__init__(self, "T", 1)
|
||||
|
||||
def matches(self, obj):
|
||||
return isinstance(obj, CustomType)
|
||||
|
||||
def dump(self, stream, obj, context):
|
||||
stream.write(obj.value.encode("utf-8"))
|
||||
stream.seek(0)
|
||||
return stream
|
||||
|
||||
def load(self, stream, context):
|
||||
value = stream.read().decode("utf-8")
|
||||
if value == NotInit.value:
|
||||
return NotInit
|
||||
elif value == NotFound.value:
|
||||
return NotFound
|
||||
elif value == Removed.value:
|
||||
return Removed
|
||||
raise NotImplemented(value)
|
||||
|
||||
@@ -2,16 +2,15 @@ import json
|
||||
from logging import Logger
|
||||
|
||||
import core.utils
|
||||
from core.concept import Concept, NotInitialized
|
||||
from core.concept import Concept
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from core.simple_debug import my_debug
|
||||
from sheerkapickle import utils, tags, handlers
|
||||
|
||||
|
||||
def encode(sheerka, obj):
|
||||
pickler = SheerkaPickler(sheerka)
|
||||
flatten = pickler.flatten(obj)
|
||||
my_debug(f"{obj} ids={len(pickler.ids)}, objs={len(pickler.objs)}")
|
||||
# my_debug(f"{obj} ids={len(pickler.ids)}, objs={len(pickler.objs)}")
|
||||
return json.dumps(flatten)
|
||||
|
||||
|
||||
@@ -38,9 +37,10 @@ class SheerkaPickler:
|
||||
self.to_reduce.append(ToReduce(lambda o: isinstance(o, Logger), lambda o: None))
|
||||
from parsers.BaseParser import BaseParser
|
||||
from evaluators.BaseEvaluator import BaseEvaluator
|
||||
from core.sheerka.SheerkaOntologyManager import Ontology
|
||||
self.to_reduce.append(ToReduce(lambda o: isinstance(o, (BaseParser, BaseEvaluator)), lambda o: o.name))
|
||||
self.to_reduce.append(ToReduce(lambda o: isinstance(o, ParserInput), lambda o: o.as_text()))
|
||||
self.to_reduce.append(ToReduce(lambda o: isinstance(o, NotInitialized), lambda o: None))
|
||||
self.to_reduce.append(ToReduce(lambda o: isinstance(o, Ontology), lambda o: o.name))
|
||||
|
||||
def flatten(self, obj):
|
||||
if utils.is_to_discard(obj):
|
||||
@@ -49,6 +49,9 @@ class SheerkaPickler:
|
||||
if utils.is_primitive(obj):
|
||||
return obj
|
||||
|
||||
if utils.is_custom_type(obj):
|
||||
return self._flatten_custom_type(obj)
|
||||
|
||||
if utils.is_type(obj):
|
||||
return str(obj)
|
||||
|
||||
@@ -133,6 +136,18 @@ class SheerkaPickler:
|
||||
|
||||
return data
|
||||
|
||||
def _flatten_custom_type(self, obj):
|
||||
# check if the object was already seen
|
||||
exists, _id = self.exist(obj)
|
||||
if exists:
|
||||
return {tags.ID: _id}
|
||||
else:
|
||||
self.id_count = self.id_count + 1
|
||||
self.ids[id(obj)] = self.id_count
|
||||
self.objs.append(obj)
|
||||
|
||||
return {tags.CUSTOM: obj.value}
|
||||
|
||||
def exist(self, obj):
|
||||
try:
|
||||
v = self.ids[id(obj)]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import json
|
||||
|
||||
import core.utils
|
||||
from core.global_symbols import NotInit, NotFound, Removed
|
||||
from sheerkapickle import tags, utils, handlers
|
||||
|
||||
|
||||
@@ -20,6 +21,9 @@ class SheerkaUnpickler:
|
||||
if has_tag(obj, tags.TUPLE):
|
||||
return self._restore_tuple(obj)
|
||||
|
||||
if has_tag(obj, tags.CUSTOM):
|
||||
return self._restore_custom(obj)
|
||||
|
||||
if has_tag(obj, tags.SET):
|
||||
return self._restore_set(obj)
|
||||
|
||||
@@ -43,6 +47,19 @@ class SheerkaUnpickler:
|
||||
def _restore_tuple(self, obj):
|
||||
return tuple([self.restore(v) for v in obj[tags.TUPLE]])
|
||||
|
||||
def _restore_custom(self, obj):
|
||||
if obj[tags.CUSTOM] == NotInit.value:
|
||||
instance = NotInit
|
||||
elif obj[tags.CUSTOM] == NotFound.value:
|
||||
instance = NotFound
|
||||
elif obj[tags.CUSTOM] == Removed.value:
|
||||
instance = Removed
|
||||
else:
|
||||
raise KeyError(f"unknown {obj[tags.CUSTOM]}")
|
||||
|
||||
self.objs.append(instance)
|
||||
return instance
|
||||
|
||||
def _restore_set(self, obj):
|
||||
return set([self.restore(v) for v in obj[tags.SET]])
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import core.utils
|
||||
from core.builtin_concepts import UserInputConcept, ReturnValueConcept, BuiltinConcepts
|
||||
from core.concept import Concept, PROPERTIES_TO_SERIALIZE as CONCEPT_PROPERTIES_TO_SERIALIZE, ConceptParts, NotInit, \
|
||||
get_concept_attrs
|
||||
from core.concept import Concept, PROPERTIES_TO_SERIALIZE as CONCEPT_PROPERTIES_TO_SERIALIZE
|
||||
from core.global_symbols import NotInit
|
||||
from core.rule import Rule
|
||||
from core.sheerka.ExecutionContext import ExecutionContext, PROPERTIES_TO_SERIALIZE as CONTEXT_PROPERTIES_TO_SERIALIZE
|
||||
from core.sheerka.Sheerka import Sheerka
|
||||
@@ -221,4 +221,3 @@ def initialize_pickle_handlers():
|
||||
registry.register(ExecutionContext, ExecutionContextHandler, True)
|
||||
registry.register(Rule, RuleContextHandler, True)
|
||||
registry.register(PythonNode, PythonNodeHandler, True)
|
||||
|
||||
|
||||
@@ -3,3 +3,4 @@ TUPLE = "_sheerka/tuple"
|
||||
SET = "_sheerka/set"
|
||||
OBJECT = "_sheerka/obj"
|
||||
ENUM = "_sheerka/enum"
|
||||
CUSTOM = "_sheerka/custom"
|
||||
|
||||
@@ -2,6 +2,8 @@ import base64
|
||||
import types
|
||||
from enum import Enum
|
||||
|
||||
from core.global_symbols import CustomType
|
||||
|
||||
class_types = (type,)
|
||||
PRIMITIVES = (str, bool, type(None), int, float)
|
||||
|
||||
@@ -17,6 +19,10 @@ def is_enum(obj):
|
||||
return isinstance(obj, Enum)
|
||||
|
||||
|
||||
def is_custom_type(obj):
|
||||
return isinstance(obj, CustomType)
|
||||
|
||||
|
||||
def is_object(obj):
|
||||
"""Returns True is obj is a reference to an object instance."""
|
||||
|
||||
@@ -36,7 +42,7 @@ def is_primitive(obj):
|
||||
|
||||
|
||||
def is_dictionary(obj):
|
||||
return isinstance(obj, dict)
|
||||
return isinstance(obj, dict)
|
||||
|
||||
|
||||
def is_list(obj):
|
||||
|
||||
Reference in New Issue
Block a user