Added basic implementation for Python code evaluation

This commit is contained in:
2019-11-07 17:18:07 +01:00
parent b818c992ec
commit 448ebc696a
18 changed files with 501 additions and 156 deletions
+25 -2
View File
@@ -95,11 +95,34 @@ class Concept:
class ErrorConcept(Concept): class ErrorConcept(Concept):
NAME = "Error"
def __init__(self, where=None, pre=None, post=None, body=None, desc=None): def __init__(self, where=None, pre=None, post=None, body=None, desc=None):
Concept.__init__(self, "error", is_builtin=True, where=where, pre=pre, post=post, body=body, desc=desc) Concept.__init__(self, self.NAME, is_builtin=True, where=where, pre=pre, post=post, body=body, desc=desc)
self.key = self.NAME
def __repr__(self): def __repr__(self):
return f"({self.id}){self.name} : {self.body}" return f"({self.id}){self.name}: {self.body}"
class TooManySuccessConcept(Concept):
NAME = "Too many successful items"
def __init__(self, items=None):
super().__init__(self.NAME, body=items)
self.key = self.NAME
class ReturnValueConcept(Concept):
NAME = "Return Value"
def __init__(self, return_value=None):
super().__init__(self.NAME, body=return_value)
self.key = self.NAME
def __repr__(self):
return f"({self.id}){self.name}: {self.body}"
class Property: class Property:
""" """
+121 -41
View File
@@ -1,9 +1,10 @@
from dataclasses import dataclass from dataclasses import dataclass
from core.concept import Concept, ErrorConcept, Property from core.concept import Concept, ErrorConcept, Property, TooManySuccessConcept, ReturnValueConcept
from parsers.PythonParser import PythonParser, PythonGetNamesVisitor, PythonNode from parsers.PythonParser import PythonParser, PythonGetNamesVisitor, PythonNode
from sdp.sheerkaDataProvider import SheerkaDataProvider, Event, SheerkaDataProviderDuplicateKeyError from sdp.sheerkaDataProvider import SheerkaDataProvider, Event, SheerkaDataProviderDuplicateKeyError
from parsers.DefaultParser import DefaultParser, DefConceptNode from parsers.DefaultParser import DefaultParser, DefConceptNode
import core.utils
import logging import logging
@@ -26,6 +27,7 @@ class ReturnValue:
To avoid using the try/except pattern for each and every call To avoid using the try/except pattern for each and every call
To give context (ie return message) even when the call is successful To give context (ie return message) even when the call is successful
""" """
who: object
status: bool status: bool
value: Concept value: Concept
message: str = None message: str = None
@@ -36,29 +38,29 @@ class ExecutionContext:
""" """
To keep track of the execution of a request To keep track of the execution of a request
""" """
sheerka: object
event_digest: str event_digest: str
class Sheerka(Concept, metaclass=Singleton): class Sheerka(Concept):
""" """
Main controller for the project Main controller for the project
""" """
NAME = "Sheerka" NAME = "Sheerka"
UNKNOWN_CONCEPT_NAME = "Unknown Concept" UNKNOWN_CONCEPT_NAME = "Unknown Concept"
ERROR_CONCEPT_NAME = "Error"
SUCCESS_CONCEPT_NAME = "Success" SUCCESS_CONCEPT_NAME = "Success"
CONCEPTS_ENTRY = "Concepts" CONCEPTS_ENTRY = "All_Concepts"
BUILTIN_CONCEPTS_KEYS = "Builtins" BUILTIN_CONCEPTS_KEYS = "Builtins_Concepts"
USER_CONCEPTS_KEYS = "Concepts" USER_CONCEPTS_KEYS = "User_Concepts"
def __init__(self): def __init__(self, debug=False):
log.debug("Starting Sheerka.") log.debug("Starting Sheerka.")
super().__init__(Sheerka.NAME) super().__init__(Sheerka.NAME)
# cache of the most used concepts # cache of the most used concepts
self.concepts_cache = [] self.concepts_cache = {}
# a concept can be instantiated # a concept can be instantiated
# ex: File is a concept, but File('foo.txt') is an instance # ex: File is a concept, but File('foo.txt') is an instance
@@ -71,30 +73,39 @@ class Sheerka(Concept, metaclass=Singleton):
self.sdp = None self.sdp = None
self.parsers = [] self.parsers = []
self.evaluators = []
self.key = self.NAME self.key = self.NAME
self.debug = debug
def initialize(self, root_folder=None): def initialize(self, root_folder=None):
""" """
Starting Sheerka Starting Sheerka
Loads the current configuration Loads the current configuration
Notes that when it's the first time, it also create the needed working folders Notes that when it's the first time, it also create the needed working folders
:param debug:
:param root_folder: root configuration folder :param root_folder: root configuration folder
:return: ReturnValue(Success or Error) :return: ReturnValue(Success or Error)
""" """
try: try:
self.init_logging()
self.sdp = SheerkaDataProvider(root_folder) self.sdp = SheerkaDataProvider(root_folder)
self.parsers.append(lambda text: DefaultParser(text, PythonParser)) self.parsers.append(lambda text: DefaultParser(text, PythonParser))
self.parsers.append(lambda text: PythonParser(text))
self.evaluators.append(core.utils.get_object("evaluators.DefaultEvaluator.DefaultEvaluator"))
self.evaluators.append(core.utils.get_object("evaluators.AddConceptEvaluator.AddConceptEvaluator"))
self.evaluators.append(core.utils.get_object("evaluators.PythonEvaluator.PythonEvaluator"))
if self.sdp.first_time: if self.sdp.first_time:
self.sdp.set_key(self.USER_CONCEPTS_KEYS, 1000) self.sdp.set_key(self.USER_CONCEPTS_KEYS, 1000)
self.create_builtin_concepts() self.create_builtin_concepts()
except IOError as e: except IOError as e:
return ReturnValue(False, self.get_concept(Sheerka.ERROR_CONCEPT_NAME), e) return ReturnValue(self, False, self.get(Sheerka.ERROR_CONCEPT_NAME), e)
return ReturnValue(True, self.get_concept(Sheerka.SUCCESS_CONCEPT_NAME)) return ReturnValue(self, True, self.get(Sheerka.SUCCESS_CONCEPT_NAME))
def set_id_if_needed(self, obj, is_builtin): def set_id_if_needed(self, obj, is_builtin):
""" """
@@ -118,11 +129,13 @@ class Sheerka(Concept, metaclass=Singleton):
self, self,
Concept(Sheerka.UNKNOWN_CONCEPT_NAME, key=Sheerka.UNKNOWN_CONCEPT_NAME), Concept(Sheerka.UNKNOWN_CONCEPT_NAME, key=Sheerka.UNKNOWN_CONCEPT_NAME),
Concept(Sheerka.SUCCESS_CONCEPT_NAME, key=Sheerka.SUCCESS_CONCEPT_NAME), Concept(Sheerka.SUCCESS_CONCEPT_NAME, key=Sheerka.SUCCESS_CONCEPT_NAME),
Concept(Sheerka.ERROR_CONCEPT_NAME, key=Sheerka.ERROR_CONCEPT_NAME), ErrorConcept(),
TooManySuccessConcept(),
ReturnValueConcept(),
] ]
for concept in builtins: for concept in builtins:
from_db = self.sdp.get_safe(self.CONCEPTS_ENTRY, concept.name) from_db = self.sdp.get_safe(self.CONCEPTS_ENTRY, concept.key)
if from_db is None: if from_db is None:
log.debug(f"'{concept.name}' concept is not found. Adding.") log.debug(f"'{concept.name}' concept is not found. Adding.")
self.set_id_if_needed(concept, True) self.set_id_if_needed(concept, True)
@@ -130,18 +143,32 @@ class Sheerka(Concept, metaclass=Singleton):
else: else:
log.debug(f"Found concept '{from_db}'. Updating.") log.debug(f"Found concept '{from_db}'. Updating.")
concept.update_from(from_db) concept.update_from(from_db)
self.concepts_cache[concept.key] = concept
def init_logging(self):
if self.debug:
log_format = "%(asctime)s %(name)s [%(levelname)s] %(message)s"
log_level = logging.DEBUG
else:
log_format = "%(message)s"
log_level = logging.INFO
logging.basicConfig(format=log_format, level=log_level)
def eval(self, text): def eval(self, text):
evt_digest = self.sdp.save_event(Event(text)) evt_digest = self.sdp.save_event(Event(text))
exec_context = ExecutionContext(evt_digest) exec_context = ExecutionContext(self, evt_digest)
result = self.try_parse(text) return_values = self.try_parse(text)
return_values = self.try_eval(exec_context, return_values)
return_values = [] # return_values = []
for parser_name, status, node in result: # for parser_name, status, node in result:
if not status: # if not status:
return_values.append(ReturnValue(False, ErrorConcept(body=node))) # return_values.append(ReturnValue(False, ErrorConcept(body=node)))
elif status and isinstance(node, DefConceptNode): # elif status and isinstance(node, DefConceptNode):
return_values.append(self.add_concept(exec_context, node)) # return_values.append(self.add_concept(exec_context, node))
# else:
# return_values.append(ReturnValue(True, node))
return return_values return return_values
@@ -156,20 +183,36 @@ class Sheerka(Concept, metaclass=Singleton):
# except Exception as e: # except Exception as e:
# result.append((p.name, e)) # result.append((p.name, e))
tree = p.parse() tree = p.parse()
result.append((p.name, not p.has_error, p.error_sink if p.has_error else tree)) result.append(ReturnValue(p.name, not p.has_error, p.error_sink if p.has_error else tree))
return result return result
def get_concept(self, name): def try_eval(self, context, items):
""" log.debug("Evaluating parsing result.")
Given a concept name, tries to find it # group the evaluators by priority and sort them
:param name: name of the concept to look for # The first one to be applied will be the one with the highest priority
:param is_builtin: is it a builtin concept ? grouped_evaluators = {}
:return: concept if found, UNKNOWN_CONCEPT otherwise for item in self.evaluators:
""" grouped_evaluators.setdefault(item.priority, []).append(item)
for concept in self.concepts_cache: sorted_priorities = sorted(grouped_evaluators.keys(), reverse=True)
if concept.name == name:
return concept for priority in sorted_priorities:
return ErrorConcept() log.debug("Processing priority " + str(priority))
for item in items:
log.debug(item)
original_items = items[:]
evaluated_items = []
for evaluator in grouped_evaluators[priority]:
if evaluator.matches(context, original_items):
result = evaluator.eval(context, original_items)
if isinstance(result, list):
evaluated_items.extend(result)
else:
evaluated_items.append(result)
# what was computed by this group will be the input of the following group
items = evaluated_items if len(evaluated_items) > 0 else original_items
return items
def add_concept(self, exec_context, def_concept_node: DefConceptNode): def add_concept(self, exec_context, def_concept_node: DefConceptNode):
""" """
@@ -203,16 +246,53 @@ class Sheerka(Concept, metaclass=Singleton):
try: try:
self.sdp.add(exec_context.event_digest, self.CONCEPTS_ENTRY, concept, use_ref=True) self.sdp.add(exec_context.event_digest, self.CONCEPTS_ENTRY, concept, use_ref=True)
except SheerkaDataProviderDuplicateKeyError as error: except SheerkaDataProviderDuplicateKeyError as error:
return ReturnValue(False, ErrorConcept(body=error), error.args[0]) return ReturnValue(self.add_concept.__name__, False, ErrorConcept(body=error), error.args[0])
return ReturnValue(True, concept) return ReturnValue(self.add_concept.__name__, True, concept)
@staticmethod def get(self, concept_name):
def concept_equals(concept1, concept2): """
"""True if the two concepts refer to the same concept""" Tries to find a concept
if concept1 is None and concept2 is None: :param concept_name:
return True :return:
"""
if concept1 is None or concept2 is None: # first search in cache
if concept_name in self.concepts_cache:
return self.concepts_cache[concept_name]
return self.sdp.get(self.CONCEPTS_ENTRY, concept_name)
def new(self, concept, **kwargs):
"""
Returns an instance of a new concept
:param concept:
:param kwargs:
:return:
"""
if isinstance(concept, str):
concept = self.get(concept)
for k, v in kwargs.items():
if hasattr(concept, k):
setattr(concept, k, v)
return concept
def isinstance(self, a, b):
"""
return true if the concept a is an instance of the concept b
:param a:
:param b:
:return:
"""
if not isinstance(a, Concept) or not isinstance(b, Concept):
return False return False
return concept1.key == concept2.key # TODO : manage when a is the list of all possible b
return a.key == b.key
@staticmethod
def test():
return "I have access to Sheerka !"
+39 -1
View File
@@ -1,4 +1,3 @@
def sysarg_to_string(argv): def sysarg_to_string(argv):
""" """
Transform a list of strings into a single string Transform a list of strings into a single string
@@ -18,3 +17,42 @@ def sysarg_to_string(argv):
first = False first = False
return result return result
def get_class(kls):
"""
Loads a class from its string full qualified name
:param kls:
:return:
"""
parts = kls.split('.')
module = ".".join(parts[:-1])
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
def get_object(kls, *args, **kwargs):
"""
New instance of an object
:param kls:
:param args:
:param kwargs:
:return:
"""
obj_type = get_class(kls)
return obj_type(*args, **kwargs)
def get_full_qualified_name(obj):
"""
Returns the full qualified name of a class (including its module name )
:param obj:
:return:
"""
module = obj.__class__.__module__
if module is None or module == str.__class__.__module__:
return obj.__class__.__name__ # Avoid reporting __builtin__
else:
return module + '.' + obj.__class__.__name__
+54
View File
@@ -226,3 +226,57 @@ need the second call :code:`add("Concepts", "bar")` to produce
So we are no longer in the usual way of implementing a CRUD. So we are no longer in the usual way of implementing a CRUD.
2019-11-06
**********
Input processing
"""""""""""""""""
The basic processing flow should be
::
1. parsers
2. evaluators
3. printers
So, for each new input, all known parsers will try to recognize the input. Each parser will
return a triplet of :code:`(status, concept found (or node found), text message)`
This list of triplet is given to the evaluators. In the same way, there should be multiple
types of evaluators. There will be the rules that will be introduced later.
All evaluators will provide a list (a guess it will be triplets as well) to the printers.
Python processing
"""""""""""""""""
Sheerka natively understand Python. So it will be able to execute Python code.
I will manage later on the issues caused by the different version of Python, or the fact
that some external modules must remain isolated (maybe using virtualenv)
My first problem is to correctly implement the :code:`eval / exec` function.
I don't know why, by Python has two similar function to do the same thing. One must use
eval to evaluate expression, or use exec to execute code. There must be an explanation but,
as for know, it seems to be a complication for nothing.
The next issue that I will have to tackle is that Sheerka is not a REPL. After the execution
of the input, the system stops. Nothing is kept in memory (eg RAM).
The whole idea is to make Sheerka 'remember', even something that happened a long time ago.
So I should find a way to 'freeze the time'
To better explain what I have in mind. let's say that I want to pretty print an object
.. code-block:: python
import pprint
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(stuff)
I need three line in oder to be able to pretty print. I will first try by dumping the
globals(), using pickle and load it back whenever needed.
If it does not work as expected, I can find a way to save the commands a exec everything
when needed. (first time, I exec import... second time I exec import + pp == and the last
time I exec the three statements).
+23
View File
@@ -0,0 +1,23 @@
from evaluators.BaseEvaluator import BaseEvaluator
from parsers.DefaultParser import DefConceptNode
import logging
log = logging.getLogger(__name__)
class AddConceptEvaluator(BaseEvaluator):
"""
Used to add a new concept
"""
def __init__(self):
super().__init__("Add new Concept", 50)
def matches(self, context, items):
return len(items) == 1 and items[0].status and isinstance(items[0].value, DefConceptNode)
def eval(self, context, items):
log.debug("Adding a new concept")
node = items[0].value
sheerka = context.sheerka
return sheerka.add_concept(context, node)
+14
View File
@@ -0,0 +1,14 @@
class BaseEvaluator:
"""
base class to evaluate concepts or nodes
"""
def __init__(self, name, priority: int):
self.name = name
self.priority = priority
def matches(self, context, items):
pass
def eval(self, context, items):
pass
+39
View File
@@ -0,0 +1,39 @@
from core.sheerka import ReturnValue
from core.sheerka import Sheerka
from evaluators.BaseEvaluator import BaseEvaluator
import logging
log = logging.getLogger(__name__)
class DefaultEvaluator(BaseEvaluator):
"""
Used to filter the responses of the parsers
"""
def __init__(self):
super().__init__("Default Evaluator", 90)
def matches(self, context, items):
return True
def eval(self, context, items):
successful_results = [item for item in items if item.status]
number_of_successful = len(successful_results)
total_items = len(items)
# remove errors when a winner is found
if number_of_successful == 1:
log.debug(f"1 / {total_items} good item found.")
return successful_results
# too many winners, which one to choose ?
if number_of_successful > 1:
log.debug(f"{number_of_successful} / {total_items} good items. Too many success")
return ReturnValue(self.name,
False,
context.sheerka.new(Sheerka.TOO_MANY_SUCCESS_CONCEPT_NAME, body=items))
# only errors, i cannot help you
log.debug(f"{total_items} items. Only errors")
return items
+32
View File
@@ -0,0 +1,32 @@
from core.concept import ReturnValueConcept, ErrorConcept
from evaluators.BaseEvaluator import BaseEvaluator
from parsers.PythonParser import PythonNode
import ast
from core.sheerka import ReturnValue, Sheerka
import logging
log = logging.getLogger(__name__)
class PythonEvaluator(BaseEvaluator):
def __init__(self):
super().__init__("Python Evaluator", 50)
def matches(self, context, items):
return len(items) == 1 and isinstance(items[0].value, PythonNode)
def eval(self, context, items):
sheerka = context.sheerka
node = items[0].value
if isinstance(node.ast, ast.Expression):
try:
log.debug("Evaluating python expression")
compiled = compile(node.ast, "<string>", "eval")
evaluated = eval(compiled, {}, {"sheerka": context.sheerka})
concept = sheerka.new(ReturnValueConcept.NAME, body=evaluated)
return ReturnValue(self.name, True, concept)
except Exception as error:
error = sheerka.new(ErrorConcept.NAME, body=error)
return ReturnValue(self.name, False, error)
else:
raise NotImplementedError()
View File
+8 -4
View File
@@ -28,6 +28,8 @@ def sysarg_to_string(argv):
result += '"' + s + '"' if " " in s else s result += '"' + s + '"' if " " in s else s
first = False first = False
if result[0] in ('"', "'"):
result = result[1:-1] # strip quotes
return result return result
@@ -53,15 +55,17 @@ def main(argv):
if o in ('-d', "--debug"): if o in ('-d', "--debug"):
debug = True debug = True
init_logging(debug) # init_logging(debug)
sheerka = Sheerka() sheerka = Sheerka(debug=debug)
sheerka.initialize() sheerka.initialize()
_in = sysarg_to_string(args) _in = sysarg_to_string(args)
result = sheerka.eval(_in) result = sheerka.eval(_in)
logging.info(result) for res in result:
return result[-1].status logging.info(res)
return result[-1].status if len(result) > 0 else 1
except getopt.GetoptError: except getopt.GetoptError:
usage() usage()
sys.exit(2) sys.exit(2)
+18 -4
View File
@@ -31,15 +31,29 @@ class UnexpectedTokenErrorNode(DefaultParserErrorNode):
expected_tokens: list expected_tokens: list
def __post_init__(self): def __post_init__(self):
log.debug("UnexpectedToken : " + self.message) log.debug("-> UnexpectedTokenErrorNode: " + self.message)
@dataclass() @dataclass()
class SyntaxErrorNode(DefaultParserErrorNode): class SyntaxErrorNode(DefaultParserErrorNode):
"""
The input is recognized, but there is a syntax error
"""
message: str message: str
def __post_init__(self): def __post_init__(self):
log.debug("SyntaxError : " + self.message) log.debug("-> SyntaxErrorNode: " + self.message)
@dataclass()
class CannotHandleErrorNode(DefaultParserErrorNode):
"""
The input is not recognized
"""
text: str
def __post_init__(self):
log.debug("-> CannotHandleErrorNode: " + self.text)
@dataclass() @dataclass()
@@ -134,7 +148,7 @@ class BinaryNode(DefaultParserNode):
class DefaultParser(BaseParser): class DefaultParser(BaseParser):
def __init__(self, text, sub_parser): def __init__(self, text, sub_parser):
BaseParser.__init__(self, "Default", text) BaseParser.__init__(self, "DefaultParser", text)
self.sub_parser = sub_parser self.sub_parser = sub_parser
self.lexer = Tokenizer(text) self.lexer = Tokenizer(text)
self.lexer_iter = iter(Tokenizer(text)) self.lexer_iter = iter(Tokenizer(text))
@@ -237,7 +251,7 @@ class DefaultParser(BaseParser):
self.next_token() self.next_token()
return self.parse_def_concept() return self.parse_def_concept()
else: else:
return self.parse_expression() return self.add_error(CannotHandleErrorNode([], self.text))
def parse_def_concept(self): def parse_def_concept(self):
""" """
+4
View File
@@ -12,6 +12,8 @@ class PythonErrorNode(ErrorNode):
source: str source: str
exception: Exception exception: Exception
def __post_init__(self):
log.debug("-> PythonErrorNode: " + str(self.exception))
@dataclass() @dataclass()
class PythonNode(Node): class PythonNode(Node):
@@ -41,6 +43,8 @@ class PythonParser(BaseParser):
error_node = PythonErrorNode(self.text, error) error_node = PythonErrorNode(self.text, error)
self.error_sink.append(error_node) self.error_sink.append(error_node)
return error_node return error_node
log.debug("Recognized python code.")
return PythonNode(self.text, tree) return PythonNode(self.text, tree)
def try_parse_expression(self): def try_parse_expression(self):
+10
View File
@@ -0,0 +1,10 @@
atomicwrites==1.3.0
attrs==19.3.0
more-itertools==7.2.0
packaging==19.2
pluggy==0.13.0
py==1.8.0
pyparsing==2.4.4
pytest==5.2.2
six==1.13.0
wcwidth==0.1.7
+19 -42
View File
@@ -5,6 +5,9 @@ import struct
import io import io
from dataclasses import dataclass from dataclasses import dataclass
import logging import logging
import core.utils
from core.concept import Concept
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -43,8 +46,6 @@ class Serializer:
self.register(EventSerializer()) self.register(EventSerializer())
self.register(StateSerializer()) self.register(StateSerializer())
self.register(ConceptSerializer()) self.register(ConceptSerializer())
self.register(SheerkaSerializer())
def register(self, serializer): def register(self, serializer):
""" """
@@ -62,7 +63,7 @@ class Serializer:
:param obj: :param obj:
:return: :return:
""" """
serializers = [s for s in self._cache if s.match(obj)] serializers = [s for s in self._cache if s.matches(obj)]
if not serializers: if not serializers:
raise TypeError(f"Don't know how to serialize {type(obj)}") raise TypeError(f"Don't know how to serialize {type(obj)}")
@@ -104,7 +105,7 @@ class BaseSerializer:
self.name = name self.name = name
self.version = version self.version = version
def match(self, obj): def matches(self, obj):
""" """
Returns true if self can serialize obj Returns true if self can serialize obj
:param obj: :param obj:
@@ -131,33 +132,6 @@ class BaseSerializer:
""" """
pass pass
@staticmethod
def get_class(kls):
"""
Loads a class from its string full qualified name
:param kls:
:return:
"""
parts = kls.split('.')
module = ".".join(parts[:-1])
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
@staticmethod
def get_full_qualified_name(obj):
"""
Returns the full qualified name of a class (including its module name )
:param obj:
:return:
"""
module = obj.__class__.__module__
if module is None or module == str.__class__.__module__:
return obj.__class__.__name__ # Avoid reporting __builtin__
else:
return module + '.' + obj.__class__.__name__
def __repr__(self): def __repr__(self):
return self.__class__.__name__ + ' (' + self.name + ", version=" + str(self.version) + ")" return self.__class__.__name__ + ' (' + self.name + ", version=" + str(self.version) + ")"
@@ -166,8 +140,8 @@ class EventSerializer(BaseSerializer):
def __init__(self): def __init__(self):
BaseSerializer.__init__(self, "E", 1) BaseSerializer.__init__(self, "E", 1)
def match(self, obj): def matches(self, obj):
return BaseSerializer.get_full_qualified_name(obj) == "sdp.sheerkaDataProvider.Event" return core.utils.get_full_qualified_name(obj) == "sdp.sheerkaDataProvider.Event"
def dump(self, stream, obj, context): def dump(self, stream, obj, context):
stream.write(json.dumps(obj.to_dict(), default=json_default_converter).encode("utf-8")) stream.write(json.dumps(obj.to_dict(), default=json_default_converter).encode("utf-8"))
@@ -177,7 +151,7 @@ class EventSerializer(BaseSerializer):
def load(self, stream, context): def load(self, stream, context):
json_stream = stream.read().decode("utf-8") json_stream = stream.read().decode("utf-8")
as_dict = json.loads(json_stream) as_dict = json.loads(json_stream)
event = BaseSerializer.get_class("sdp.sheerkaDataProvider.Event")() event = core.utils.get_class("sdp.sheerkaDataProvider.Event")()
event.from_dict(as_dict) event.from_dict(as_dict)
return event return event
@@ -188,8 +162,8 @@ class ObjectSerializer(BaseSerializer):
BaseSerializer.__init__(self, name, version) BaseSerializer.__init__(self, name, version)
self.fully_qualified_name = fully_qualified_name self.fully_qualified_name = fully_qualified_name
def match(self, obj): def matches(self, obj):
return BaseSerializer.get_full_qualified_name(obj) == self.fully_qualified_name return core.utils.get_full_qualified_name(obj) == self.fully_qualified_name
def dump(self, stream, obj, context): def dump(self, stream, obj, context):
as_json = obj.to_dict() as_json = obj.to_dict()
@@ -206,7 +180,7 @@ class ObjectSerializer(BaseSerializer):
def load(self, stream, context): def load(self, stream, context):
json_stream = stream.read().decode("utf-8") json_stream = stream.read().decode("utf-8")
json_message = json.loads(json_stream) json_message = json.loads(json_stream)
obj = BaseSerializer.get_class(self.fully_qualified_name)() obj = core.utils.get_class(self.fully_qualified_name)()
obj.from_dict(json_message) obj.from_dict(json_message)
setattr(obj, Serializer.HISTORY, json_message[Serializer.HISTORY]) setattr(obj, Serializer.HISTORY, json_message[Serializer.HISTORY])
@@ -219,7 +193,7 @@ class PickleSerializer(BaseSerializer):
BaseSerializer.__init__(self, name, version) BaseSerializer.__init__(self, name, version)
self.predicate = predicate self.predicate = predicate
def match(self, obj): def matches(self, obj):
return self.predicate(obj) return self.predicate(obj)
def dump(self, stream, obj, context): def dump(self, stream, obj, context):
@@ -233,7 +207,7 @@ class PickleSerializer(BaseSerializer):
class StateSerializer(PickleSerializer): class StateSerializer(PickleSerializer):
def __init__(self, ): def __init__(self, ):
PickleSerializer.__init__(self, lambda obj: BaseSerializer.get_full_qualified_name( PickleSerializer.__init__(self, lambda obj: core.utils.get_full_qualified_name(
obj) == "sdp.sheerkaDataProvider.State", "S", 1) obj) == "sdp.sheerkaDataProvider.State", "S", 1)
@@ -241,7 +215,10 @@ class ConceptSerializer(ObjectSerializer):
def __init__(self): def __init__(self):
ObjectSerializer.__init__(self, "core.concept.Concept", "C", 1) ObjectSerializer.__init__(self, "core.concept.Concept", "C", 1)
def matches(self, obj):
return isinstance(obj, Concept)
class SheerkaSerializer(ObjectSerializer): #
def __init__(self): # class SheerkaSerializer(ObjectSerializer):
ObjectSerializer.__init__(self, "core.sheerka.Sheerka", "C", 1) # def __init__(self):
# ObjectSerializer.__init__(self, "core.sheerka.Sheerka", "C", 1)
+38 -38
View File
@@ -167,44 +167,44 @@ def test_i_can_recognize_keywords(text, expected):
assert tokens[0].value == expected assert tokens[0].value == expected
@pytest.mark.parametrize("text, expected", [ # @pytest.mark.parametrize("text, expected", [
("1", n(1)), # ("1", n(1)),
("+1", n(1)), # ("+1", n(1)),
("-1", n(-1)), # ("-1", n(-1)),
("'foo'", s("foo")), # ("'foo'", s("foo")),
("identifier", v("identifier")), # ("identifier", v("identifier")),
("true", t()), # ("true", t()),
("false", f()), # ("false", f()),
("null", null()), # ("null", null()),
("1 * 2", b(TokenKind.STAR, n(1), n(2))), # ("1 * 2", b(TokenKind.STAR, n(1), n(2))),
("1 * 2/3", b(TokenKind.STAR, n(1), b(TokenKind.SLASH, n(2), n(3)))), # ("1 * 2/3", b(TokenKind.STAR, n(1), b(TokenKind.SLASH, n(2), n(3)))),
("1 + 2", b(TokenKind.PLUS, n(1), n(2))), # ("1 + 2", b(TokenKind.PLUS, n(1), n(2))),
("1 + 2 - 3", b(TokenKind.PLUS, n(1), b(TokenKind.MINUS, n(2), n(3)))), # ("1 + 2 - 3", b(TokenKind.PLUS, n(1), b(TokenKind.MINUS, n(2), n(3)))),
("1 + 2-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))), # ("1 + 2-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))),
("1 + 2 +-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))), # ("1 + 2 +-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))),
("1 + 2 * 3", b(TokenKind.PLUS, n(1), b(TokenKind.STAR, n(2), n(3)))), # ("1 + 2 * 3", b(TokenKind.PLUS, n(1), b(TokenKind.STAR, n(2), n(3)))),
("1 * 2 + 3", b(TokenKind.PLUS, b(TokenKind.STAR, n(1), n(2)), n(3))), # ("1 * 2 + 3", b(TokenKind.PLUS, b(TokenKind.STAR, n(1), n(2)), n(3))),
("(1 + 2) * 3", b(TokenKind.STAR, b(TokenKind.PLUS, n(1), n(2)), n(3))), # ("(1 + 2) * 3", b(TokenKind.STAR, b(TokenKind.PLUS, n(1), n(2)), n(3))),
("1 * (2 + 3)", b(TokenKind.STAR, n(1), b(TokenKind.PLUS, n(2), n(3)))), # ("1 * (2 + 3)", b(TokenKind.STAR, n(1), b(TokenKind.PLUS, n(2), n(3)))),
]) # ])
def test_i_can_parse_simple_expression(text, expected): # def test_i_can_parse_simple_expression(text, expected):
parser = DefaultParser(text, None) # parser = DefaultParser(text, None)
ast = parser.parse() # ast = parser.parse()
assert ast.is_same(expected) # assert ast.is_same(expected)
#
#
@pytest.mark.parametrize("text, token_found, expected_tokens", [ # @pytest.mark.parametrize("text, token_found, expected_tokens", [
("1+", TokenKind.EOF, # ("1+", TokenKind.EOF,
[TokenKind.NUMBER, TokenKind.STRING, TokenKind.IDENTIFIER, 'true', 'false', 'null', TokenKind.LPAR]), # [TokenKind.NUMBER, TokenKind.STRING, TokenKind.IDENTIFIER, 'true', 'false', 'null', TokenKind.LPAR]),
("(1+1", TokenKind.EOF, [TokenKind.RPAR]) # ("(1+1", TokenKind.EOF, [TokenKind.RPAR])
]) # ])
def test_i_can_detect_unexpected_end_of_code(text, token_found, expected_tokens): # def test_i_can_detect_unexpected_end_of_code(text, token_found, expected_tokens):
parser = DefaultParser(text, None) # parser = DefaultParser(text, None)
parser.parse() # parser.parse()
#
assert parser.has_error # assert parser.has_error
assert parser.error_sink[0].tokens[0].type == token_found # assert parser.error_sink[0].tokens[0].type == token_found
assert parser.error_sink[0].expected_tokens == expected_tokens # assert parser.error_sink[0].expected_tokens == expected_tokens
@pytest.mark.parametrize("text, expected_name, expected_expr", [ @pytest.mark.parametrize("text, expected_name, expected_expr", [
+51 -20
View File
@@ -5,10 +5,11 @@ import os
from os import path from os import path
import shutil import shutil
from core.concept import Concept, ConceptParts from core.concept import Concept, ConceptParts, ReturnValueConcept
from core.sheerka import Sheerka, ExecutionContext from core.sheerka import Sheerka, ExecutionContext
from parsers.DefaultParser import DefConceptNode, DefaultParser from parsers.DefaultParser import DefConceptNode, DefaultParser
from parsers.PythonParser import PythonParser from parsers.PythonParser import PythonParser
from sdp.sheerkaDataProvider import SheerkaDataProvider
tests_root = path.abspath("../build/tests") tests_root = path.abspath("../build/tests")
root_folder = "init_folder" root_folder = "init_folder"
@@ -36,23 +37,11 @@ def test_root_folder_is_created_after_initialization():
def test_lists_of_concepts_is_initialized(): def test_lists_of_concepts_is_initialized():
Sheerka().initialize(root_folder) sheerka = Sheerka()
assert len(Sheerka().concepts_cache) > 1 sheerka.initialize(root_folder)
assert len(sheerka.concepts_cache) > 1
# def test_null_concept_are_equals():
# concept1 = Concept("test1")
# concept2 = Concept("test2")
# concept3 = Concept("test3")
#
# assert not Sheerka.concept_equals(concept1, None)
# assert not Sheerka.concept_equals(None, concept1)
# assert not Sheerka.concept_equals(concept1, concept2)
# assert not Sheerka.concept_equals(concept1, concept3)
#
# assert Sheerka.concept_equals(None, None)
# assert Sheerka.concept_equals(concept1, concept1)
def get_concept(): def get_concept():
text = """ text = """
def concept a+b def concept a+b
@@ -72,7 +61,7 @@ def test_i_can_add_a_concept():
concept = get_concept() concept = get_concept()
sheerka = Sheerka() sheerka = Sheerka()
sheerka.initialize(root_folder) sheerka.initialize(root_folder)
res = sheerka.add_concept(ExecutionContext("xxx"), concept) res = sheerka.add_concept(ExecutionContext(sheerka, "xxx"), concept)
concept_found = res.value concept_found = res.value
assert res.status assert res.status
@@ -93,6 +82,48 @@ def test_i_can_add_a_concept():
assert concept_found.key == "__var__0 + __var__1" assert concept_found.key == "__var__0 + __var__1"
assert concept_found.id == "1001" assert concept_found.id == "1001"
# def test_i_cannot_add_the_same_concept_twice(): assert path.exists(sheerka.sdp.get_obj_path(SheerkaDataProvider.ObjectsFolder,
# concept1 = DefConceptNode(name="concept") "4f249487410db35d8bcbcf4521acb3dd8354978804cd99bbc4de17a323b2f237"))
# sheerka = Sheerka
@pytest.mark.parametrize("text, expected", [
("1 + 1", 2),
("sheerka.test()", 'I have access to Sheerka !')
])
def test_i_can_eval_simple_python_expressions(text, expected):
sheerka = Sheerka(debug=True)
sheerka.initialize(root_folder)
res = sheerka.eval(text)
assert len(res) == 1
assert res[0].status
assert res[0].value.body == expected
assert sheerka.isinstance(res[0].value, ReturnValueConcept())
def test_i_cannot_add_the_same_concept_twice():
"""
Checks that duplicated concepts are managed by sheerka, not by sheerka.sdp
:return:
"""
pass
def test_i_can_get_a_concept():
"""
Checks that a concept can be found its name
even when there are variables in the name (ex 'hello + a' or 'a + b' )
:return:
"""
pass
def test_i_can_instanciate_a_concept():
"""
Test the new() functionnality
make sure that some Concept are singleton (ex Sheerka, True, False)
but some other need a new instance everytime
:return:
"""
pass
+4 -3
View File
@@ -10,6 +10,7 @@ import shutil
import json import json
from sdp.sheerkaSerializer import ObjectSerializer, BaseSerializer, Serializer, SerializerContext, PickleSerializer from sdp.sheerkaSerializer import ObjectSerializer, BaseSerializer, Serializer, SerializerContext, PickleSerializer
import core.utils
tests_root = path.abspath("../build/tests") tests_root = path.abspath("../build/tests")
evt_digest = "3a571cb6034ef6fc8d7fe91948d0d29728eed74de02bac7968b0e9facca2c2d7" evt_digest = "3a571cb6034ef6fc8d7fe91948d0d29728eed74de02bac7968b0e9facca2c2d7"
@@ -637,7 +638,7 @@ def test_i_can_add_unique():
def test_i_can_add_reference_of_an_object_with_a_key(): def test_i_can_add_reference_of_an_object_with_a_key():
sdp = SheerkaDataProvider(".sheerka") sdp = SheerkaDataProvider(".sheerka")
obj = ObjDumpJson("my_key", "value1") obj = ObjDumpJson("my_key", "value1")
obj_serializer = ObjectSerializer(BaseSerializer.get_full_qualified_name(obj)) obj_serializer = ObjectSerializer(core.utils.get_full_qualified_name(obj))
sdp.serializer.register(obj_serializer) sdp.serializer.register(obj_serializer)
entry, key = sdp.add(evt_digest, "entry", obj, use_ref=True) entry, key = sdp.add(evt_digest, "entry", obj, use_ref=True)
@@ -1102,7 +1103,7 @@ def test_i_can_get_an_entry_by_key():
def test_i_can_get_object_save_by_reference(): def test_i_can_get_object_save_by_reference():
sdp = SheerkaDataProvider(".sheerka") sdp = SheerkaDataProvider(".sheerka")
obj = ObjDumpJson("my_key", "value1") obj = ObjDumpJson("my_key", "value1")
sdp.serializer.register(ObjectSerializer(BaseSerializer.get_full_qualified_name(obj))) sdp.serializer.register(ObjectSerializer(core.utils.get_full_qualified_name(obj)))
entry, key = sdp.add(evt_digest, "entry", obj, use_ref=True) entry, key = sdp.add(evt_digest, "entry", obj, use_ref=True)
loaded = sdp.get(entry, key) loaded = sdp.get(entry, key)
@@ -1208,7 +1209,7 @@ def test_i_can_test_than_an_entry_exits():
def test_i_can_save_and_load_object_ref_with_history(): def test_i_can_save_and_load_object_ref_with_history():
sdp = SheerkaDataProvider(".sheerka") sdp = SheerkaDataProvider(".sheerka")
obj = ObjDumpJson("my_key", "value1") obj = ObjDumpJson("my_key", "value1")
sdp.serializer.register(ObjectSerializer(BaseSerializer.get_full_qualified_name(obj))) sdp.serializer.register(ObjectSerializer(core.utils.get_full_qualified_name(obj)))
entry, key = sdp.add(evt_digest, "entry", obj, use_ref=True) entry, key = sdp.add(evt_digest, "entry", obj, use_ref=True)
loaded = sdp.get(entry, key) loaded = sdp.get(entry, key)
+2 -1
View File
@@ -4,6 +4,7 @@ from dataclasses import dataclass
from sdp.sheerkaDataProvider import Event from sdp.sheerkaDataProvider import Event
from sdp.sheerkaSerializer import Serializer, ObjectSerializer, SerializerContext, BaseSerializer from sdp.sheerkaSerializer import Serializer, ObjectSerializer, SerializerContext, BaseSerializer
from datetime import datetime from datetime import datetime
import core.utils
@dataclass() @dataclass()
@@ -53,4 +54,4 @@ def test_i_can_serialize_an_object():
(Obj("10", "value"), "tests.test_sheerkaSerializer.Obj") (Obj("10", "value"), "tests.test_sheerkaSerializer.Obj")
]) ])
def test_get_full_qualified_name(obj, expected): def test_get_full_qualified_name(obj, expected):
assert expected == BaseSerializer.get_full_qualified_name(obj) assert expected == core.utils.get_full_qualified_name(obj)