@@ -0,0 +1,183 @@
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from common.global_symbols import NoFirstToken, NotFound, NotInit, Removed
|
||||
from helpers import get_concept, get_concepts
|
||||
from ontologies.SheerkaOntologyManager import SheerkaOntologyManager
|
||||
from parsers.tokenizer import Keywords
|
||||
from sheerkapickle import tags
|
||||
from sheerkapickle.sheerkaplicker import SheerkaPickler
|
||||
from sheerkapickle.sheerkaunpickler import SheerkaUnpickler
|
||||
|
||||
|
||||
class Obj:
|
||||
def __init__(self, a, b, c):
|
||||
self.a = a
|
||||
self.b = b
|
||||
self.c = c
|
||||
|
||||
def __eq__(self, other):
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if not isinstance(other, Obj):
|
||||
return False
|
||||
|
||||
return self.a == other.a and self.b == other.b and self.c == other.c
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.a, self.b, self.c))
|
||||
|
||||
|
||||
class TestSheerkaPickler(BaseTest):
|
||||
|
||||
@pytest.mark.parametrize("obj, expected", [
|
||||
(1, 1),
|
||||
(3.14, 3.14),
|
||||
("a string", "a string"),
|
||||
(True, True),
|
||||
(None, None),
|
||||
([1, 3.14, "a string"], [1, 3.14, "a string"]),
|
||||
((1, 3.14, "a string"), {tags.TUPLE: [1, 3.14, "a string"]}),
|
||||
({1}, {tags.SET: [1]}),
|
||||
({"a": "a", "b": 3.14, "c": True}, {"a": "a", "b": 3.14, "c": True}),
|
||||
({1: "a", 2: 3.14, 3: True}, {1: "a", 2: 3.14, 3: True}),
|
||||
([1, [3.14, "a string"]], [1, [3.14, "a string"]]),
|
||||
([1, (3.14, "a string")], [1, {tags.TUPLE: [3.14, "a string"]}]),
|
||||
([], []),
|
||||
(Keywords.DEF, {tags.ENUM: 'parsers.tokenizer.Keywords.DEF'}),
|
||||
])
|
||||
def test_i_can_flatten_and_restore_primitives(self, sheerka, obj, expected):
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == expected
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
@pytest.mark.parametrize("obj, expected", [
|
||||
(NotInit, {tags.CUSTOM: NotInit.value}),
|
||||
(NotFound, {tags.CUSTOM: NotFound.value}),
|
||||
(Removed, {tags.CUSTOM: Removed.value}),
|
||||
(NoFirstToken, {tags.CUSTOM: NoFirstToken.value}),
|
||||
])
|
||||
def test_i_can_flatten_and_restore_custom_types(self, sheerka, obj, expected):
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == expected
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
def test_i_can_flatten_and_restore_instances(self, sheerka):
|
||||
obj1 = Obj(1, "b", True)
|
||||
obj2 = Obj(3.14, ("a", "b"), obj1)
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj2)
|
||||
assert flatten == {'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 3.14,
|
||||
'b': {'_sheerka/tuple': ['a', 'b']},
|
||||
'c': {'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 1,
|
||||
'b': 'b',
|
||||
'c': True}}
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj2
|
||||
|
||||
def test_i_can_manage_circular_reference(self, sheerka):
|
||||
obj1 = Obj(1, "b", True)
|
||||
obj1.c = obj1
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj1)
|
||||
assert flatten == {'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 1,
|
||||
'b': 'b',
|
||||
'c': {'_sheerka/id': 0}}
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded.a == obj1.a
|
||||
assert decoded.b == obj1.b
|
||||
assert decoded.c == decoded
|
||||
|
||||
def test_i_can_flatten_obj_with_new_props(self, sheerka):
|
||||
# property 'z' is not part of the `Obj` definition
|
||||
obj = Obj(1, "b", True)
|
||||
obj.z = "new prop"
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == {'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 1,
|
||||
'b': 'b',
|
||||
'c': True,
|
||||
'z': "new prop"}
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
def test_i_cannot_correctly_flatten_compiled_and_generator(self, sheerka):
|
||||
obj = Obj((i for i in range(3)), compile("a + b", "<src>", mode="eval"), None)
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
|
||||
assert isinstance(flatten["a"], str)
|
||||
assert flatten["a"].startswith("<generator object")
|
||||
assert isinstance(flatten["b"], str)
|
||||
assert flatten["b"].startswith("<code object")
|
||||
assert flatten["c"] is None
|
||||
|
||||
@pytest.mark.parametrize("obj, expected", [
|
||||
({None: "a"}, {'null': "a"}),
|
||||
({Keywords.DEF: "a"}, {'parsers.tokenizer.Keywords.DEF': 'a'}),
|
||||
({(1, 2): "a"}, {(1, 2): "a"}),
|
||||
])
|
||||
def test_i_can_manage_specific_keys_in_dictionaries(self, sheerka, obj, expected):
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == expected
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
@pytest.mark.skip("Concepts are not fully working")
|
||||
def test_i_can_use_concept_as_dictionary_key(self, sheerka, context):
|
||||
concept = get_concepts(context, "foo", use_sheerka=True)[0]
|
||||
|
||||
obj = {concept: "a"}
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
assert flatten == {'c:foo|1001:': 'a'}
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
def test_i_can_manage_references(self, sheerka):
|
||||
foo = Obj("foo", "bar", "baz")
|
||||
obj = [Keywords.DEF, foo, Keywords.WHERE, Keywords.DEF, foo]
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
|
||||
assert flatten == [{'_sheerka/enum': 'parsers.tokenizer.Keywords.DEF'},
|
||||
{'_sheerka/obj': 'tests.sheerkapickle.test_SheerkaPickler.Obj',
|
||||
'a': 'foo',
|
||||
'b': 'bar',
|
||||
'c': 'baz'},
|
||||
{'_sheerka/enum': 'parsers.tokenizer.Keywords.WHERE'},
|
||||
{'_sheerka/id': 0},
|
||||
{'_sheerka/id': 1}]
|
||||
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == obj
|
||||
|
||||
def test_i_do_not_encode_logger(self, sheerka):
|
||||
logger = logging.getLogger("log_name")
|
||||
logger2 = logging.getLogger("log_name2")
|
||||
obj = Obj("foo", logger, {"a": logger, "b": logger2})
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(obj)
|
||||
decoded = SheerkaUnpickler(sheerka).restore(flatten)
|
||||
assert decoded == Obj("foo", None, {"a": None, "b": None})
|
||||
|
||||
def test_ontology_are_not_serialized(self, sheerka, context):
|
||||
om = SheerkaOntologyManager(sheerka, "mem://").freeze()
|
||||
ontology = om.push_ontology("new ontology")
|
||||
|
||||
flatten = SheerkaPickler(sheerka).flatten(ontology)
|
||||
assert flatten == "__ONTOLOGY:new ontology__"
|
||||
Reference in New Issue
Block a user