Implemented dictionnary based file system to speed up the tests

This commit is contained in:
2019-11-21 21:54:39 +01:00
parent 714f4f5dd0
commit 5d37addc7d
17 changed files with 817 additions and 494 deletions
+2 -1
View File
@@ -2,4 +2,5 @@ venv
.pytest_cache .pytest_cache
.idea .idea
__pycache__ __pycache__
build build
prof
+1
View File
@@ -6,5 +6,6 @@ test: clean
clean: clean:
rm -rf build rm -rf build
rm -rf prof
find . -name '.pytest_cache' -exec rm -rf {} + find . -name '.pytest_cache' -exec rm -rf {} +
find . -name '__pycache__' -exec rm -rf {} + find . -name '__pycache__' -exec rm -rf {} +
+24
View File
@@ -470,3 +470,27 @@ MUST be different ? How I handle when the explicitly have to be the same ?
I seems that the purpose of the **ExactConceptParser** is to find exact match. I seems that the purpose of the **ExactConceptParser** is to find exact match.
I need another way to express that 'a' and 'b' can be the same. I need another way to express that 'a' and 'b' can be the same.
2019-11-21
**********
MemoryFS, is it a joke ?
"""""""""""""""""""""""""""""
I spent this day working on a improving the test performances. By default Sheerka
persists its data on the file system (even if I said that where the data is saved)
is not important for the sdp module.
For each test, a folder in initialized to hold concepts information. And this folder
is destroyed after usage. For almost every single test !
So I decided to implement fs.MemoryFS. Information in memory is supposed to be
faster than on the disk !
I was very disappointed, after a afternoon of refactoring that it is actually slower
than the native io implementation.
Even now that I am writing it, I just can't believe it. I must I have implemented
it wrong. But the profiling shows that the time is lost in the under layers of the
FS library.
It's a shame !
+4
View File
@@ -1,10 +1,14 @@
appdirs==1.4.3
atomicwrites==1.3.0 atomicwrites==1.3.0
attrs==19.3.0 attrs==19.3.0
fs==2.4.11
more-itertools==7.2.0 more-itertools==7.2.0
packaging==19.2 packaging==19.2
pluggy==0.13.0 pluggy==0.13.0
py==1.8.0 py==1.8.0
Pygments==2.4.2
pyparsing==2.4.4 pyparsing==2.4.4
pytest==5.2.2 pytest==5.2.2
pytz==2019.3
six==1.13.0 six==1.13.0
wcwidth==0.1.7 wcwidth==0.1.7
+45 -70
View File
@@ -1,9 +1,11 @@
from os import path # from os import path
import os # import os
from datetime import datetime, date from datetime import datetime, date
import hashlib import hashlib
import json import json
import zlib import zlib
from sdp.sheerkaDataProviderIO import SheerkaDataProviderIO
from sdp.sheerkaSerializer import Serializer, SerializerContext from sdp.sheerkaSerializer import Serializer, SerializerContext
import logging import logging
@@ -278,23 +280,11 @@ class SheerkaDataProvider:
def __init__(self, root=None): def __init__(self, root=None):
log.debug("Initializing sdp.") log.debug("Initializing sdp.")
self.root = path.abspath(path.join(path.expanduser("~"), ".sheerka")) \ self.io = SheerkaDataProviderIO.get(root)
if root is None \ self.first_time = self.io.first_time
else path.abspath(root)
log.debug("root is set to '" + self.root + "'")
if not path.exists(self.root):
log.debug("root folder not found. Creating it.")
os.makedirs(self.root)
self.first_time = True
else:
self.first_time = False
self.serializer = Serializer() self.serializer = Serializer()
def get_obj_path(self, object_type, digest):
return path.join(self.root, object_type, digest[:24], digest)
@staticmethod @staticmethod
def get_obj_key(obj): def get_obj_key(obj):
""" """
@@ -620,15 +610,11 @@ class SheerkaDataProvider:
:return: digest of the event :return: digest of the event
""" """
digest = event.get_digest() digest = event.get_digest()
target_path = path.join(self.root, SheerkaDataProvider.EventFolder, digest[:24], digest) target_path = self.io.get_obj_path(SheerkaDataProvider.EventFolder, digest)
if path.exists(target_path): if self.io.exists(target_path):
return digest return digest
if not path.exists(path.dirname(target_path)): self.io.write_binary(target_path, self.serializer.serialize(event, None).read())
os.makedirs(path.dirname(target_path))
with open(target_path, "wb") as f:
f.write(self.serializer.serialize(event, None).read())
return digest return digest
@@ -638,31 +624,27 @@ class SheerkaDataProvider:
:param digest: :param digest:
:return: :return:
""" """
target_path = path.join(self.root, SheerkaDataProvider.EventFolder, digest[:24], digest) target_path = self.io.get_obj_path(SheerkaDataProvider.EventFolder, digest)
with open(target_path, "rb") as f:
with self.io.open(target_path, "rb") as f:
return self.serializer.deserialize(f, None) return self.serializer.deserialize(f, None)
def save_state(self, state: State): def save_state(self, state: State):
digest = state.get_digest() digest = state.get_digest()
log.debug(f"Saving new state. digest={digest}") log.debug(f"Saving new state. digest={digest}")
target_path = path.join(self.root, SheerkaDataProvider.StateFolder, digest[:24], digest) target_path = self.io.get_obj_path(SheerkaDataProvider.StateFolder, digest)
if path.exists(target_path): if self.io.exists(target_path):
return digest return digest
if not path.exists(path.dirname(target_path)): self.io.write_binary(target_path, self.serializer.serialize(state, None).read())
os.makedirs(path.dirname(target_path))
with open(target_path, "wb") as f:
f.write(self.serializer.serialize(state, None).read())
return digest return digest
def load_state(self, digest): def load_state(self, digest):
if digest is None: if digest is None:
return State() return State()
target_path = path.join(self.root, SheerkaDataProvider.StateFolder, digest[:24], digest) target_path = self.io.get_obj_path(SheerkaDataProvider.StateFolder, digest)
with open(target_path, "rb") as f: with self.io.open(target_path, "rb") as f:
return self.serializer.deserialize(f, None) return self.serializer.deserialize(f, None)
def save_obj(self, obj): def save_obj(self, obj):
@@ -670,16 +652,12 @@ class SheerkaDataProvider:
stream = self.serializer.serialize(obj, SerializerContext(user_name="kodjo")) stream = self.serializer.serialize(obj, SerializerContext(user_name="kodjo"))
digest = obj.get_digest() if hasattr(obj, "get_digest") else self.get_stream_digest(stream) digest = obj.get_digest() if hasattr(obj, "get_digest") else self.get_stream_digest(stream)
target_path = path.join(self.root, SheerkaDataProvider.ObjectsFolder, digest[:24], digest) target_path = self.io.get_obj_path(SheerkaDataProvider.ObjectsFolder, digest)
if path.exists(target_path): if self.io.exists(target_path):
log.debug(f"...already saved. digest is {digest}") log.debug(f"...already saved. digest is {digest}")
return digest return digest
if not path.exists(path.dirname(target_path)): self.io.write_binary(target_path, stream.read())
os.makedirs(path.dirname(target_path))
with open(target_path, "wb") as f:
f.write(stream.read())
log.debug(f"...digest={digest}.") log.debug(f"...digest={digest}.")
return digest return digest
@@ -688,11 +666,11 @@ class SheerkaDataProvider:
if digest is None: if digest is None:
return None return None
target_path = path.join(self.root, SheerkaDataProvider.ObjectsFolder, digest[:24], digest) target_path = self.io.get_obj_path(SheerkaDataProvider.ObjectsFolder, digest)
if not path.exists(target_path): if not self.io.exists(target_path):
return None return None
with open(target_path, "rb") as f: with self.io.open(target_path, "rb") as f:
obj = self.serializer.deserialize(f, SerializerContext(origin=digest)) obj = self.serializer.deserialize(f, SerializerContext(origin=digest))
# set the origin of the object # set the origin of the object
@@ -721,7 +699,7 @@ class SheerkaDataProvider:
def get_cache_params(self, category, key): def get_cache_params(self, category, key):
digest = hashlib.sha3_256(f"{category}:{key}".encode("utf-8")).hexdigest() digest = hashlib.sha3_256(f"{category}:{key}".encode("utf-8")).hexdigest()
cache_path = path.join(self.root, SheerkaDataProvider.CacheFolder, digest[:24], digest) cache_path = self.io.get_obj_path(SheerkaDataProvider.CacheFolder, digest)
return digest, cache_path return digest, cache_path
def add_to_cache(self, category, key, obj, update=False): def add_to_cache(self, category, key, obj, update=False):
@@ -735,15 +713,10 @@ class SheerkaDataProvider:
""" """
digest, cache_path = self.get_cache_params(category, key) digest, cache_path = self.get_cache_params(category, key)
if path.exists(cache_path) and not update: if self.io.exists(cache_path) and not update:
return digest return digest
if not path.exists(path.dirname(cache_path)): self.io.write_binary(cache_path, zlib.compress(obj.encode("utf-8"), 9))
os.makedirs(path.dirname(cache_path))
with open(cache_path, "wb") as f:
f.write(zlib.compress(obj.encode("utf-8"), 9))
return digest return digest
def load_from_cache(self, category, key): def load_from_cache(self, category, key):
@@ -755,10 +728,10 @@ class SheerkaDataProvider:
""" """
digest, cache_path = self.get_cache_params(category, key) digest, cache_path = self.get_cache_params(category, key)
if not path.exists(cache_path): if not self.io.exists(cache_path):
raise IndexError(f"{category}.{key}") raise IndexError(f"{category}.{key}")
with open(cache_path, "rb") as f: with self.io.open(cache_path, "rb") as f:
return zlib.decompress(f.read()).decode("utf-8") return zlib.decompress(f.read()).decode("utf-8")
def remove_from_cache(self, category, key): def remove_from_cache(self, category, key):
@@ -769,8 +742,8 @@ class SheerkaDataProvider:
:return: :return:
""" """
digest, cache_path = self.get_cache_params(category, key) digest, cache_path = self.get_cache_params(category, key)
if path.exists(cache_path): if self.io.exists(cache_path):
os.remove(cache_path) self.io.remove(cache_path)
return digest return digest
@@ -782,32 +755,34 @@ class SheerkaDataProvider:
:return: :return:
""" """
digest, cache_path = self.get_cache_params(category, key) digest, cache_path = self.get_cache_params(category, key)
return path.exists(cache_path) return self.io.exists(cache_path)
def get_snapshot(self): def get_snapshot(self):
head_file = path.join(self.root, SheerkaDataProvider.HeadFile) head_file = self.io.path_join(SheerkaDataProvider.HeadFile)
if not path.exists(head_file): if not self.io.exists(head_file):
return None return None
with open(head_file, "r") as f: return self.io.read_text(head_file)
return f.read() # with open(head_file, "r") as f:
# return f.read()
def set_snapshot(self, digest): def set_snapshot(self, digest):
head_file = path.join(self.root, SheerkaDataProvider.HeadFile) head_file = self.io.path_join(SheerkaDataProvider.HeadFile)
with open(head_file, "w") as f: return self.io.write_text(head_file, digest)
return f.write(digest) # with open(head_file, "w") as f:
# return f.write(digest)
def load_keys(self): def load_keys(self):
keys_file = path.join(self.root, SheerkaDataProvider.KeysFile) keys_file = self.io.path_join(SheerkaDataProvider.KeysFile)
if not path.exists(keys_file): if not self.io.exists(keys_file):
keys = {} keys = {}
else: else:
with open(keys_file, "r") as f: with self.io.open(keys_file, "r") as f:
keys = json.load(f) keys = json.load(f)
return keys return keys
def save_keys(self, keys): def save_keys(self, keys):
keys_file = path.join(self.root, SheerkaDataProvider.KeysFile) keys_file = self.io.path_join(SheerkaDataProvider.KeysFile)
with open(keys_file, "w") as f: with self.io.open(keys_file, "w") as f:
json.dump(keys, f) json.dump(keys, f)
def get_next_key(self, entry): def get_next_key(self, entry):
+189
View File
@@ -0,0 +1,189 @@
import io
from os import path
import os
from fs.memoryfs import MemoryFS
import logging
class SheerkaDataProviderIO:
def __init__(self, root):
self.root = root
def exists(self, file_path):
pass
def open(self, file_path, mode):
pass
def read_text(self, file_path):
pass
def read_binary(self, file_path):
pass
def write_text(self, file_path, content):
pass
def write_binary(self, file_path, content):
pass
def remove(self, file_path):
pass
@staticmethod
def get(root):
if root == "mem://":
return SheerkaDataProviderDictionaryIO()
else:
return SheerkaDataProviderFileIO(root)
def get_obj_path(self, object_type, digest):
return path.join(self.root, object_type, digest[:24], digest)
def path_join(self, *paths):
return path.join(self.root, *paths)
class SheerkaDataProviderFileIO(SheerkaDataProviderIO):
log = logging.getLogger("FileIO")
def __init__(self, root):
root = path.abspath(path.join(path.expanduser("~"), ".sheerka")) \
if root is None \
else path.abspath(root)
super().__init__(root)
self.log.debug("root is set to '" + self.root + "'")
if not path.exists(self.root):
self.log.debug("root folder not found. Creating it.")
os.makedirs(self.root)
self.first_time = True
else:
self.first_time = False
def open(self, file_path, mode):
return open(file_path, mode)
def read_text(self, file_path):
with open(file_path) as f:
return f.read()
def read_binary(self, file_path):
with open(file_path, "rb") as f:
return f.read()
def write_text(self, file_path, content):
self._write(file_path, content, "w")
def write_binary(self, file_path, content):
self._write(file_path, content, "wb")
def exists(self, file_path):
return path.exists(file_path)
def remove(self, file_path):
os.remove(file_path)
@staticmethod
def _write(file_path, content, mode):
if not path.exists(path.dirname(file_path)):
os.makedirs(path.dirname(file_path))
with open(file_path, mode) as f:
f.write(content)
class SheerkaDataProviderMemoryIO(SheerkaDataProviderIO):
log = logging.getLogger("MemoryIO")
def __init__(self):
super().__init__("")
self.mem_fs = MemoryFS()
self.log.debug("Initializing memory file.")
self.first_time = True
def open(self, file_path, mode):
return self.mem_fs.open(file_path, mode)
def exists(self, file_path):
return self.mem_fs.exists(file_path)
def read_text(self, file_path):
return self.mem_fs.readtext(file_path)
def read_binary(self, file_path):
return self.mem_fs.readbytes(file_path)
def write_binary(self, file_path, content):
self._ensure_parent_folder(file_path)
self.mem_fs.writebytes(file_path, content)
def write_text(self, file_path, content):
self._ensure_parent_folder(file_path)
self.mem_fs.writetext(file_path, content)
def remove(self, file_path):
self.mem_fs.remove(file_path)
def _ensure_parent_folder(self, file_path):
if not self.mem_fs.exists(path.dirname(file_path)):
self.mem_fs.makedirs(path.dirname(file_path))
class SheerkaDataProviderDictionaryIO(SheerkaDataProviderIO):
log = logging.getLogger("DictionaryIO")
def __init__(self):
super().__init__("")
self.cache = {}
self.log.debug("Initializing dictionary file.")
self.first_time = True
def exists(self, file_path):
if file_path == "":
return True
return file_path in self.cache
def read_text(self, file_path):
return self.cache[file_path]
def read_binary(self, file_path):
return self.cache[file_path]
def write_binary(self, file_path, content):
self.cache[file_path] = content
def write_text(self, file_path, content):
self.cache[file_path] = content
def remove(self, file_path):
del (self.cache[file_path])
def open(self, file_path, mode):
if "w" in mode:
stream = io.BytesIO() if "b" in mode else io.StringIO()
stream.close = on_close(self, file_path, stream)(stream.close)
return stream
return io.BytesIO(self.cache[file_path]) if "b" in mode else io.StringIO(self.cache[file_path])
def on_close(dictionary_io, file_path, stream):
"""
Decorator to intercept the close.
I guess that there are solution that are more elegant
:param dictionary_io:
:param file_path:
:param stream:
:return:
"""
def decorator(func):
def wrapper(*args, **kwargs):
stream.seek(0)
dictionary_io.cache[file_path] = stream.read()
func(*args, **kwargs)
return wrapper
return decorator
+1 -23
View File
@@ -1,7 +1,3 @@
import os
import shutil
from os import path
import pytest import pytest
from core.builtin_concepts import ReturnValueConcept, ParserResultConcept, BuiltinConcepts from core.builtin_concepts import ReturnValueConcept, ParserResultConcept, BuiltinConcepts
@@ -10,27 +6,10 @@ from core.sheerka import Sheerka, ExecutionContext
from evaluators.ConceptEvaluator import ConceptEvaluator from evaluators.ConceptEvaluator import ConceptEvaluator
from parsers.BaseParser import BaseParser from parsers.BaseParser import BaseParser
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
def get_context(): def get_context():
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize("mem://")
return ExecutionContext("test", "xxx", sheerka) return ExecutionContext("test", "xxx", sheerka)
@@ -218,4 +197,3 @@ def test_i_cannot_recognize_a_concept_if_one_of_the_prop_is_unknown():
assert result.value.property_name == "b" assert result.value.property_name == "b"
assert context.sheerka.isinstance(result.value.error, BuiltinConcepts.TOO_MANY_ERRORS) assert context.sheerka.isinstance(result.value.error, BuiltinConcepts.TOO_MANY_ERRORS)
assert result.value.concept == concept_plus assert result.value.concept == concept_plus
+3 -27
View File
@@ -1,36 +1,12 @@
import os
import shutil
import pytest import pytest
from os import path
import ast import ast
from core.builtin_concepts import ParserResultConcept, BuiltinConcepts, ReturnValueConcept from core.builtin_concepts import ParserResultConcept, BuiltinConcepts, ReturnValueConcept
from core.sheerka import Sheerka, ExecutionContext from core.sheerka import Sheerka, ExecutionContext
from parsers.BaseParser import BaseParser from parsers.PythonParser import PythonParser, PythonNode
from parsers.PythonParser import PythonParser, PythonNode, PythonErrorNode
from core.tokenizer import Keywords, Tokenizer from core.tokenizer import Keywords, Tokenizer
from parsers.DefaultParser import DefaultParser, NameNode, SyntaxErrorNode from parsers.DefaultParser import DefaultParser, NameNode, SyntaxErrorNode
# from parsers.DefaultParser import NumberNode, StringNode, VariableNode, TrueNode, FalseNode, NullNode, BinaryNode from parsers.DefaultParser import UnexpectedTokenErrorNode, DefConceptNode
from parsers.DefaultParser import UnexpectedTokenErrorNode, DefConceptNode, NopNode
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
# def nop(): # def nop():
# return NopNode() # return NopNode()
@@ -92,7 +68,7 @@ def get_concept(name, where=None, pre=None, post=None, body=None):
def get_context(): def get_context():
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize("mem://")
return ExecutionContext("test", "xxx", sheerka) return ExecutionContext("test", "xxx", sheerka)
+2 -26
View File
@@ -1,33 +1,9 @@
import pytest from core.builtin_concepts import BuiltinConcepts
from os import path
import shutil
import os
from core.builtin_concepts import ParserResultConcept, BuiltinConcepts
from core.concept import Concept, Property from core.concept import Concept, Property
from core.sheerka import Sheerka, ExecutionContext from core.sheerka import Sheerka, ExecutionContext
from core.tokenizer import Tokenizer from core.tokenizer import Tokenizer
from parsers.DefaultParser import DefaultParser
from parsers.ExactConceptParser import ExactConceptParser from parsers.ExactConceptParser import ExactConceptParser
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
def test_i_can_compute_combinations(): def test_i_can_compute_combinations():
parser = ExactConceptParser() parser = ExactConceptParser()
@@ -150,7 +126,7 @@ def test_i_can_detect_concept_from_tokens():
def get_context(): def get_context():
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize("mem://")
return ExecutionContext("sheerka", "xxxx", sheerka) return ExecutionContext("sheerka", "xxxx", sheerka)
+1 -24
View File
@@ -1,9 +1,3 @@
import os
import shutil
from os import path
import pytest
from core.builtin_concepts import ReturnValueConcept, BuiltinConcepts from core.builtin_concepts import ReturnValueConcept, BuiltinConcepts
from core.concept import Concept from core.concept import Concept
from core.sheerka import Sheerka, ExecutionContext from core.sheerka import Sheerka, ExecutionContext
@@ -11,27 +5,10 @@ from evaluators.BaseEvaluator import BaseEvaluator
from evaluators.MutipleSameSuccessEvaluator import MultipleSameSuccessEvaluator from evaluators.MutipleSameSuccessEvaluator import MultipleSameSuccessEvaluator
from parsers.BaseParser import BaseParser from parsers.BaseParser import BaseParser
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
def get_context(): def get_context():
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize("mem://")
return ExecutionContext("test", "xxx", sheerka) return ExecutionContext("test", "xxx", sheerka)
+1 -21
View File
@@ -1,7 +1,4 @@
import pytest import pytest
import shutil
from os import path
import os
from core.builtin_concepts import ReturnValueConcept, ParserResultConcept from core.builtin_concepts import ReturnValueConcept, ParserResultConcept
from core.sheerka import Sheerka, ExecutionContext from core.sheerka import Sheerka, ExecutionContext
@@ -9,27 +6,10 @@ from core.concept import Concept
from evaluators.PythonEvaluator import PythonEvaluator from evaluators.PythonEvaluator import PythonEvaluator
from parsers.PythonParser import PythonNode, PythonParser from parsers.PythonParser import PythonNode, PythonParser
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
def get_context(): def get_context():
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize("mem://")
return ExecutionContext("test", "xxx", sheerka) return ExecutionContext("test", "xxx", sheerka)
+1 -22
View File
@@ -1,37 +1,16 @@
import ast import ast
import os
import shutil
from os import path
import pytest import pytest
from core.builtin_concepts import ParserResultConcept from core.builtin_concepts import ParserResultConcept
from core.sheerka import Sheerka, ExecutionContext from core.sheerka import Sheerka, ExecutionContext
from core.tokenizer import Tokenizer from core.tokenizer import Tokenizer
from parsers.BaseParser import BaseParser
from parsers.PythonParser import PythonNode, PythonParser, PythonErrorNode from parsers.PythonParser import PythonNode, PythonParser, PythonErrorNode
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
def get_context(): def get_context():
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize("mem://")
return ExecutionContext("test", "xxx", sheerka) return ExecutionContext("test", "xxx", sheerka)
+1 -24
View File
@@ -1,8 +1,3 @@
import os
import shutil
from os import path
import pytest
import ast import ast
from core.ast.nodes import NodeParent, GenericNodeConcept from core.ast.nodes import NodeParent, GenericNodeConcept
@@ -11,28 +6,10 @@ from core.ast.visitors import ConceptNodeVisitor, UnreferencedNamesVisitor
from core.builtin_concepts import BuiltinConcepts from core.builtin_concepts import BuiltinConcepts
from core.sheerka import Sheerka from core.sheerka import Sheerka
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
def get_sheerka(): def get_sheerka():
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize("mem://")
return sheerka return sheerka
+1 -25
View File
@@ -1,31 +1,7 @@
import shutil
from os import path
import os
import pytest
from core.builtin_concepts import ReturnValueConcept, BuiltinConcepts from core.builtin_concepts import ReturnValueConcept, BuiltinConcepts
from core.sheerka import Sheerka, ExecutionContext from core.sheerka import Sheerka, ExecutionContext
import core.builtin_helpers import core.builtin_helpers
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
def test_i_can_use_expect_one_when_empty(): def test_i_can_use_expect_one_when_empty():
sheerka = get_sheerka() sheerka = get_sheerka()
@@ -136,7 +112,7 @@ def test_i_can_use_expect_one_when_not_a_list_false():
def get_sheerka(): def get_sheerka():
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize("mem://")
return sheerka return sheerka
+9 -12
View File
@@ -5,14 +5,11 @@ import os
from os import path from os import path
import shutil import shutil
from core import utils
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
from core.concept import Concept, ConceptParts from core.concept import Concept
from core.sheerka import Sheerka, ExecutionContext from core.sheerka import Sheerka, ExecutionContext
from evaluators.MutipleSameSuccessEvaluator import MultipleSameSuccessEvaluator from evaluators.MutipleSameSuccessEvaluator import MultipleSameSuccessEvaluator
from parsers.DefaultParser import DefaultParser from sdp.sheerkaDataProvider import SheerkaDataProvider
from parsers.PythonParser import PythonParser
from sdp.sheerkaDataProvider import SheerkaDataProvider, SheerkaDataProviderDuplicateKeyError
tests_root = path.abspath("../build/tests") tests_root = path.abspath("../build/tests")
root_folder = "init_folder" root_folder = "init_folder"
@@ -64,12 +61,12 @@ def test_builtin_concepts_are_initialized():
def test_builtin_concepts_can_be_updated(): def test_builtin_concepts_can_be_updated():
sheerka = get_sheerka() sheerka = get_sheerka(root_folder)
loaded_sheerka = sheerka.get(BuiltinConcepts.SHEERKA) loaded_sheerka = sheerka.get(BuiltinConcepts.SHEERKA)
loaded_sheerka.desc = "I have a description" loaded_sheerka.desc = "I have a description"
sheerka.sdp.modify("Test", sheerka.CONCEPTS_ENTRY, loaded_sheerka.key, loaded_sheerka) sheerka.sdp.modify("Test", sheerka.CONCEPTS_ENTRY, loaded_sheerka.key, loaded_sheerka)
sheerka = get_sheerka() sheerka = get_sheerka(root_folder)
loaded_sheerka = sheerka.get(BuiltinConcepts.SHEERKA) loaded_sheerka = sheerka.get(BuiltinConcepts.SHEERKA)
assert loaded_sheerka.desc == "I have a description" assert loaded_sheerka.desc == "I have a description"
@@ -92,7 +89,7 @@ def test_i_can_add_a_concept():
assert concept_found.id == "1001" assert concept_found.id == "1001"
assert concept.key in sheerka.concepts_cache assert concept.key in sheerka.concepts_cache
assert path.exists(sheerka.sdp.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_found.get_digest())) assert sheerka.sdp.io.exists(sheerka.sdp.io.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_found.get_digest()))
def test_i_cannot_add_the_same_concept_twice(): def test_i_cannot_add_the_same_concept_twice():
@@ -417,7 +414,7 @@ as:
assert getattr(concept_saved, prop) == getattr(expected, prop) assert getattr(concept_saved, prop) == getattr(expected, prop)
assert concept_saved.key in sheerka.concepts_cache assert concept_saved.key in sheerka.concepts_cache
assert path.exists(sheerka.sdp.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_saved.get_digest())) assert sheerka.sdp.io.exists(sheerka.sdp.io.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_saved.get_digest()))
def test_i_can_eval_def_concept_part_when_one_part_is_a_ref_of_another_concept(): def test_i_can_eval_def_concept_part_when_one_part_is_a_ref_of_another_concept():
@@ -446,7 +443,7 @@ def test_i_can_eval_def_concept_part_when_one_part_is_a_ref_of_another_concept()
assert getattr(concept_saved, prop) == getattr(expected, prop) assert getattr(concept_saved, prop) == getattr(expected, prop)
assert concept_saved.key in sheerka.concepts_cache assert concept_saved.key in sheerka.concepts_cache
assert path.exists(sheerka.sdp.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_saved.get_digest())) assert sheerka.sdp.io.exists(sheerka.sdp.io.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_saved.get_digest()))
def test_i_cannot_eval_the_same_def_concept_twice(): def test_i_cannot_eval_the_same_def_concept_twice():
@@ -558,9 +555,9 @@ def test_i_can_manage_concepts_with_the_same_key_when_values_are_the_same():
assert res[0].who == sheerka.get_evaluator_name(MultipleSameSuccessEvaluator.NAME) assert res[0].who == sheerka.get_evaluator_name(MultipleSameSuccessEvaluator.NAME)
def get_sheerka(): def get_sheerka(root="mem://"):
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize(root)
return sheerka return sheerka
File diff suppressed because it is too large Load Diff
+1 -1
View File
@@ -2,7 +2,7 @@ import pytest
from dataclasses import dataclass from dataclasses import dataclass
from sdp.sheerkaDataProvider import Event from sdp.sheerkaDataProvider import Event
from sdp.sheerkaSerializer import Serializer, ObjectSerializer, SerializerContext, BaseSerializer from sdp.sheerkaSerializer import Serializer, ObjectSerializer, SerializerContext
from datetime import datetime from datetime import datetime
import core.utils import core.utils