Initialized logging

This commit is contained in:
2019-11-05 19:56:00 +01:00
parent b12204360e
commit 0d2adf1b6c
10 changed files with 448 additions and 249 deletions
+29 -18
View File
@@ -5,6 +5,9 @@ import hashlib
import json
import zlib
from sdp.sheerkaSerializer import Serializer, SerializerContext
import logging
log = logging.getLogger(__name__)
def json_default_converter(o):
@@ -203,13 +206,19 @@ class SheerkaDataProvider:
REF_PREFIX = "##REF##:"
def __init__(self, root=None):
log.debug("Initializing sdp.")
self.root = path.abspath(path.join(path.expanduser("~"), ".sheerka")) \
if root is None \
else path.abspath(root)
log.debug("root is set to '" + self.root + "'")
if not path.exists(self.root):
log.debug("root folder not found. Creating it.")
os.makedirs(self.root)
self.first_time = True
else:
self.first_time = False
self.serializer = Serializer()
@@ -239,10 +248,10 @@ class SheerkaDataProvider:
def is_reference(obj):
return isinstance(obj, str) and obj.startswith(SheerkaDataProvider.REF_PREFIX)
def add(self, event: Event, entry, obj, allow_multiple=True, use_ref=False):
def add(self, event_digest: str, entry, obj, allow_multiple=True, use_ref=False):
"""
Adds obj to the entry 'entry'
:param event: events that triggers the update of the state
:param event_digest: digest of the event that triggers the modification of the state
:param entry: entry of the state to update
:param obj: obj to insert or add
:param allow_multiple: if set to true, the same key can be added several times.
@@ -252,12 +261,14 @@ class SheerkaDataProvider:
:return: (entry, key) to retrieve the object
"""
event_digest = self.save_event(event)
snapshot = self.get_snapshot()
state = self.load_state(snapshot)
log.debug(f"Adding obj '{obj}' in entry '{entry}' (allow_multiple={allow_multiple}, use_ref={use_ref})")
# check uniqueness, cannot add the same key twice if allow_multiple == False
key = self.get_obj_key(obj)
log.debug(f"key found : '{key}'") if key else log.debug("No key found")
if not allow_multiple:
if isinstance(obj, dict):
for k in obj:
@@ -270,6 +281,7 @@ class SheerkaDataProvider:
state.parents = [] if snapshot is None else [snapshot]
state.events = [event_digest]
state.date = datetime.now()
log.debug(state.data)
if use_ref:
digest = self.save_obj(obj)
@@ -281,10 +293,10 @@ class SheerkaDataProvider:
self.set_snapshot(new_snapshot)
return entry, key
def add_with_auto_key(self, event: Event, entry, obj):
def add_with_auto_key(self, event_digest: str, entry, obj):
"""
Add obj to entry. An autogenerated key created for obj
:param event:
:param event_digest:
:param entry:
:param obj:
:return:
@@ -292,12 +304,11 @@ class SheerkaDataProvider:
next_key = self.get_next_key(entry)
if hasattr(obj, "set_key"):
obj.set_key(next_key)
self.add(event, entry, ObjWithKey(next_key, obj))
self.add(event_digest, entry, ObjWithKey(next_key, obj))
return entry, next_key
def add_unique(self, event: Event, entry, obj):
def add_unique(self, event_digest: str, entry, obj):
"""Add an entry and make sure it's unique"""
event_digest = self.save_event(event)
snapshot = self.get_snapshot()
state = self.load_state(snapshot)
@@ -313,17 +324,16 @@ class SheerkaDataProvider:
self.set_snapshot(new_snapshot)
return entry, None
def set(self, event: Event, entry, obj, use_ref=False):
def set(self, event_digest, entry, obj, use_ref=False):
"""
Add or replace an entry. The entry is reinitialized.
If the previous value was dict, all keys are lost
:param event:
:param event_digest:
:param entry:
:param obj:
:param use_ref:
:return:
"""
event_digest = self.save_event(event)
snapshot = self.get_snapshot()
state = self.load_state(snapshot)
@@ -340,11 +350,11 @@ class SheerkaDataProvider:
self.set_snapshot(new_snapshot)
return entry, key
def modify(self, event: Event, entry, key, obj):
def modify(self, event_digest, entry, key, obj):
"""
Replace an element
If the key is not provided, has the same effect than set eg, the entry is reset
:param event:
:param event_digest:
:param entry:
:param key: key of the object to update
:param obj: new data
@@ -354,7 +364,6 @@ class SheerkaDataProvider:
if key is None:
raise SheerkaDataProviderError("Key is mandatory.", None)
event_digest = self.save_event(event)
snapshot = self.get_snapshot()
state = self.load_state(snapshot)
@@ -416,10 +425,10 @@ class SheerkaDataProvider:
if filter_to_use(element):
yield self.load_ref_if_needed(element)[0]
def remove(self, event: Event, entry, filter=None):
def remove(self, event_digest, entry, filter=None):
"""
Removes elements under the entry 'entry'
:param event: event that triggers the deletion
:param event_digest: event that triggers the deletion
:param entry:
:param filter: filter to use
:return: new sha256 of the state
@@ -431,8 +440,6 @@ class SheerkaDataProvider:
if entry not in state.data:
raise IndexError(entry)
event_digest = self.save_event(event)
state.parents = [] if snapshot is None else [snapshot]
state.events = [event_digest]
state.date = datetime.now()
@@ -523,6 +530,7 @@ class SheerkaDataProvider:
def save_state(self, state: State):
digest = state.get_digest()
log.debug(f"Saving new state. digest={digest}")
target_path = path.join(self.root, SheerkaDataProvider.StateFolder, digest[:24], digest)
if path.exists(target_path):
return digest
@@ -544,11 +552,13 @@ class SheerkaDataProvider:
return self.serializer.deserialize(f, None)
def save_obj(self, obj):
log.debug(f"Saving '{obj}' as reference...")
stream = self.serializer.serialize(obj, SerializerContext(user_name="kodjo"))
digest = obj.get_digest() if hasattr(obj, "get_digest") else self.get_stream_digest(stream)
target_path = path.join(self.root, SheerkaDataProvider.ObjectsFolder, digest[:24], digest)
if path.exists(target_path):
log.debug(f"...already saved. digest is {digest}")
return digest
if not path.exists(path.dirname(target_path)):
@@ -557,6 +567,7 @@ class SheerkaDataProvider:
with open(target_path, "wb") as f:
f.write(stream.read())
log.debug(f"...digest is {digest}.")
return digest
def load_obj(self, digest):