Added SyaNodeParser (finally, after one month)

This commit is contained in:
2020-04-09 15:42:36 +02:00
parent c9acfa99a1
commit 6c7c529016
56 changed files with 5322 additions and 404 deletions
+7 -3
View File
@@ -43,6 +43,7 @@ class ExecutionContext:
desc: str = None,
logger=None,
global_hints=None,
global_errors=None,
**kwargs):
self._parent = None
@@ -61,6 +62,7 @@ class ExecutionContext:
self.logger = logger
self.local_hints = set()
self.global_hints = set() if global_hints is None else global_hints
self.global_errors = [] if global_errors is None else global_errors
self.inputs = {} # what was the parameters of the execution context
self.values = {} # what was produced by the execution context
@@ -146,8 +148,8 @@ class ExecutionContext:
preprocess.set_prop(k, v)
if not self.preprocess:
self.preprocess = set()
self.preprocess.add(preprocess)
self.preprocess = []
self.preprocess.append(preprocess)
return self
def add_inputs(self, **kwargs):
@@ -212,6 +214,7 @@ class ExecutionContext:
desc,
logger,
self.global_hints,
self.global_errors,
**_kwargs)
new._parent = self
new._tab = self._tab + " " * DEBUG_TAB_SIZE
@@ -230,7 +233,8 @@ class ExecutionContext:
if self.logger and not self.logger.disabled:
self.logger.debug(f"[{self._id:2}]" + self._tab + (f"[{who}] " if who else "") + str(message))
def log_error(self, message, who=None):
def log_error(self, message, who=None, exc=None):
self.global_errors.append(exc or message)
if self.logger and not self.logger.disabled:
self.logger.exception(f"[{self._id:2}]" + self._tab + (f"[{who}] " if who else "") + str(message))
@@ -1,8 +1,10 @@
from core.builtin_concepts import BuiltinConcepts, ErrorConcept
from core.concept import Concept
from sdp.sheerkaDataProvider import SheerkaDataProviderDuplicateKeyError, SheerkaDataProviderRef
import core.utils
CONCEPT_LEXER_PARSER_CLASS = "parsers.ConceptLexerParser.ConceptLexerParser"
BNF_NODE_PARSER_CLASS = "parsers.BnfNodeParser.BnfNodeParser"
BASE_NODE_PARSER_CLASS = "parsers.BaseNodeParser.BaseNodeParser"
class SheerkaCreateNewConcept:
@@ -13,6 +15,7 @@ class SheerkaCreateNewConcept:
def __init__(self, sheerka):
self.sheerka = sheerka
self.logger_name = self.create_new_concept.__name__
self.base_lexer_parser = core.utils.get_class(BASE_NODE_PARSER_CLASS)("BaseNodeParser", 0)
def create_new_concept(self, context, concept: Concept):
"""
@@ -25,7 +28,7 @@ class SheerkaCreateNewConcept:
concept.init_key()
concepts_definitions = None
init_ret_value = None
init_bnf_ret_value = None
sdp = self.sheerka.sdp
@@ -49,13 +52,19 @@ class SheerkaCreateNewConcept:
concepts_definitions[concept] = concept.bnf
# check if it's a valid BNF or whether it breaks the known rules
concept_lexer_parser = self.sheerka.parsers[CONCEPT_LEXER_PARSER_CLASS]()
bnf_lexer_parser = self.sheerka.parsers[BNF_NODE_PARSER_CLASS]()
with context.push(self.sheerka.name, desc=f"Initializing concept definition for {concept}") as sub_context:
sub_context.concepts[concept.key] = concept # the concept is not in the real cache yet
init_ret_value = concept_lexer_parser.initialize(sub_context, concepts_definitions)
sub_context.add_values(return_values=init_ret_value)
if not init_ret_value.status:
return self.sheerka.ret(self.logger_name, False, ErrorConcept(init_ret_value.value))
init_bnf_ret_value = bnf_lexer_parser.initialize(sub_context, concepts_definitions)
sub_context.add_values(return_values=init_bnf_ret_value)
if not init_bnf_ret_value.status:
return self.sheerka.ret(self.logger_name, False, ErrorConcept(init_bnf_ret_value.value))
# update concept definition by key
init_sya_ret_value = self.base_lexer_parser.initialize(context, [concept], use_sheerka=True)
if not init_sya_ret_value.status:
return self.sheerka.ret(self.logger_name, False, ErrorConcept(init_sya_ret_value.value))
concepts_by_first_keyword = init_sya_ret_value.body
concept.freeze_definition_hash()
@@ -97,9 +106,15 @@ class SheerkaCreateNewConcept:
sdp.set(
context.event.get_digest(),
self.sheerka.CONCEPTS_DEFINITIONS_ENTRY,
concept_lexer_parser.encode_grammar(init_ret_value.body),
bnf_lexer_parser.encode_grammar(init_bnf_ret_value.body),
use_ref=True)
self.sheerka.concepts_definitions_cache = None # invalidate cache
# update the concepts by first keyword
sdp.set(context.event.get_digest(),
self.sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY,
concepts_by_first_keyword)
except SheerkaDataProviderDuplicateKeyError as error:
context.log_error("Failed to create a new concept.", who=self.logger_name)
return self.sheerka.ret(
@@ -109,13 +124,13 @@ class SheerkaCreateNewConcept:
error.args[0])
# Updates the caches
self.sheerka.cache_by_key[concept.key] = sdp.get_safe(self.sheerka.CONCEPTS_ENTRY, concept.key)
self.sheerka.cache_by_name[concept.name] = sdp.get_safe(self.sheerka.CONCEPTS_BY_NAME_ENTRY, concept.name)
self.sheerka.cache_by_id[concept.id] = concept
if init_ret_value is not None and init_ret_value.status:
self.sheerka.concepts_grammars = init_ret_value.body
if init_bnf_ret_value is not None and init_bnf_ret_value.status:
self.sheerka.concepts_grammars = init_bnf_ret_value.body
self.sheerka.concepts_by_first_keyword = concepts_by_first_keyword
# process the return in needed
# process the return if needed
ret = self.sheerka.ret(self.logger_name, True, self.sheerka.new(BuiltinConcepts.NEW_CONCEPT, body=concept))
return ret
@@ -1,6 +1,6 @@
from core.builtin_concepts import BuiltinConcepts
from core.concept import Concept, DoNotResolve, ConceptParts, InfiniteRecursionResolved
from core.builtin_helpers import add_to_ret_val, remove_from_ret_val, expect_one
from core.builtin_helpers import expect_one
CONCEPT_EVALUATION_STEPS = [
BuiltinConcepts.BEFORE_EVALUATION,
+24 -15
View File
@@ -33,6 +33,8 @@ class SheerkaExecute:
# group the parsers by priorities
instantiated_parsers = [parser(sheerka=self.sheerka) for parser in self.sheerka.parsers.values()]
instantiated_parsers = self.preprocess(execution_context, instantiated_parsers)
grouped_parsers = {}
for parser in [p for p in instantiated_parsers if p.enabled]:
grouped_parsers.setdefault(parser.priority, []).append(parser)
@@ -44,7 +46,6 @@ class SheerkaExecute:
for parser in grouped_parsers[priority]:
return_value_success_found = False
for return_value in inputs_for_this_group:
to_parse = return_value.body.body \
@@ -67,22 +68,23 @@ class SheerkaExecute:
r.parents = [return_value]
result.append(r)
if self.sheerka.isinstance(r.body, BuiltinConcepts.PARSER_RESULT):
# if a ParserResultConcept is returned, it will be used by the parsers
# of the following groups
to_process.append(r)
if r.status:
return_value_success_found = True
stop_processing = True
else:
res.parents = [return_value]
result.append(res)
if self.sheerka.isinstance(res.body, BuiltinConcepts.PARSER_RESULT):
# if a ParserResultConcept is returned, it will be used by the parsers
# of the following groups
to_process.append(res)
if res.status:
return_value_success_found = True
stop_processing = True
sub_context.add_values(return_values=res)
if return_value_success_found:
stop_processing = True
break # Stop the other return_values (but not the other parsers with the same priority)
if stop_processing:
break # Do not try the other priorities if a match is found
@@ -102,7 +104,7 @@ class SheerkaExecute:
instantiated_evaluators = [e_class() for e_class in self.sheerka.evaluators]
# pre-process evaluators if needed
instantiated_evaluators = self._preprocess_evaluators(execution_context, instantiated_evaluators)
instantiated_evaluators = self.preprocess(execution_context, instantiated_evaluators)
for evaluator in [e for e in instantiated_evaluators if e.enabled and process_step in e.steps]:
grouped_evaluators.setdefault(evaluator.priority, []).append(evaluator)
@@ -123,7 +125,7 @@ class SheerkaExecute:
evaluated_items = []
to_delete = []
for evaluator in grouped_evaluators[priority]:
evaluator = self._preprocess_evaluators(execution_context, evaluator.__class__()) # fresh copy
evaluator = self.preprocess(execution_context, evaluator.__class__()) # fresh copy
sub_context_desc = f"Evaluating using {evaluator.name} ({priority=})"
with iteration_context.push(desc=sub_context_desc, logger=evaluator.verbose_log) as sub_context:
@@ -215,22 +217,29 @@ class SheerkaExecute:
return return_values
def _preprocess_evaluators(self, context, evaluators):
def preprocess(self, context, parsers_or_evaluators):
if not context.preprocess:
return evaluators
return parsers_or_evaluators
if not hasattr(evaluators, "__iter__"):
if not hasattr(parsers_or_evaluators, "__iter__"):
single_one = True
evaluators = [evaluators]
parsers_or_evaluators = [parsers_or_evaluators]
else:
single_one = False
for preprocess in context.preprocess:
for e in evaluators:
if preprocess.props["name"].value == e.name:
for e in parsers_or_evaluators:
if self.matches(e.name, preprocess.get_prop("name")):
for prop, value in preprocess.props.items():
if prop == "name":
continue
if hasattr(e, prop):
setattr(e, prop, value.value)
return evaluators[0] if single_one else evaluators
return parsers_or_evaluators[0] if single_one else parsers_or_evaluators
@staticmethod
def matches(parser_or_evaluator_name, preprocessor_name):
if preprocessor_name.endswith("*"):
return parser_or_evaluator_name.startswith(preprocessor_name[:-1])
else:
return parser_or_evaluator_name == preprocessor_name
+94 -10
View File
@@ -17,12 +17,7 @@ from core.sheerka_logger import console_handler
import logging
# CONCEPT_EVALUATION_STEPS = [
# BuiltinConcepts.BEFORE_EVALUATION,
# BuiltinConcepts.EVALUATION,
# BuiltinConcepts.AFTER_EVALUATION]
CONCEPT_LEXER_PARSER_CLASS = "parsers.ConceptLexerParser.ConceptLexerParser"
CONCEPT_LEXER_PARSER_CLASS = "parsers.BnfNodeParser.BnfNodeParser"
BNF_PARSER_CLASS = "parsers.BnfParser.BnfParser"
CONCEPTS_FILE = "_concepts.txt"
@@ -37,6 +32,9 @@ class Sheerka(Concept):
CONCEPTS_BY_NAME_ENTRY = "Concepts_By_Name"
CONCEPTS_BY_HASH_ENTRY = "Concepts_By_Hash" # store hash of concepts definitions (not values)
CONCEPTS_DEFINITIONS_ENTRY = "Concepts_Definitions" # to store definitions (bnf) of concepts
CONCEPTS_BY_FIRST_KEYWORD_ENTRY = "Concepts_By_First_Keyword"
CONCEPTS_SYA_DEFINITION_ENTRY = "Concepts_Sya_Definitions"
BUILTIN_CONCEPTS_KEYS = "Builtins_Concepts" # sequential key for builtin concepts
USER_CONCEPTS_KEYS = "User_Concepts" # sequential key for user defined concepts
@@ -65,6 +63,10 @@ class Sheerka(Concept):
# a grammar is a resolved BNF
self.concepts_grammars = {}
# cache for SYA concepts
self.concepts_by_first_keyword = {}
self.sya_definitions = {}
# a concept can be instantiated
# ex: File is a concept, but File('foo.txt') is an instance
# TODO: manage contexts
@@ -119,7 +121,8 @@ class Sheerka(Concept):
self.initialize_builtin_concepts()
self.initialize_builtin_parsers()
self.initialize_builtin_evaluators()
self.initialize_concepts_definitions(exec_context)
self.initialize_bnf_parsing(exec_context)
self.initialize_sya_parsing()
res = ReturnValueConcept(self, True, self)
exec_context.add_values(return_values=res)
@@ -174,12 +177,25 @@ class Sheerka(Concept):
"""
core.utils.init_package_import("parsers")
base_class = core.utils.get_class("parsers.BaseParser.BaseParser")
modules_to_skip = ["parsers.BaseNodeParser"]
temp_result = {}
for parser in core.utils.get_sub_classes("parsers", base_class):
if parser.__module__ == base_class.__module__:
continue
self.init_log.debug(f"Adding builtin parser '{parser.__name__}'")
self.parsers[core.utils.get_full_qualified_name(parser)] = parser
if parser.__module__ in modules_to_skip:
continue
qualified_name = core.utils.get_full_qualified_name(parser)
self.init_log.debug(f"Adding builtin parser '{qualified_name}'")
temp_result[qualified_name] = parser
# Now we sort the parser by name.
# It's not important for the logic of their usage as they have their priority anyway,
# We do that for the unit tests. They are to complicated to write otherwise
for name in sorted(temp_result.keys()):
self.parsers[name] = temp_result[name]
def initialize_builtin_evaluators(self):
"""
@@ -195,7 +211,7 @@ class Sheerka(Concept):
self.init_log.debug(f"Adding builtin evaluator '{evaluator.__name__}'")
self.evaluators.append(evaluator)
def initialize_concepts_definitions(self, execution_context):
def initialize_bnf_parsing(self, execution_context):
self.init_log.debug("Initializing concepts grammars.")
definitions = self.get_concepts_definitions(execution_context)
@@ -211,6 +227,25 @@ class Sheerka(Concept):
self.concepts_grammars = lexer_parser.concepts_grammars
def initialize_sya_parsing(self):
self.init_log.debug("Initializing sya definitions.")
self.concepts_by_first_keyword = self.sdp.get_safe(
self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY,
load_origin=False) or {}
self.sya_definitions = self.sdp.get_safe(
self.CONCEPTS_SYA_DEFINITION_ENTRY,
load_origin=False) or {}
def reset(self):
self.reset_cache()
self.concepts_by_first_keyword = {}
self.concepts_grammars = {}
self.sya_definitions = {}
self.sdp.reset()
self.sdp.set_key(self.USER_CONCEPTS_KEYS, 1000)
def reset_cache(self, filter_to_use=None):
"""
reset the different cache that exists
@@ -220,6 +255,7 @@ class Sheerka(Concept):
if filter_to_use is None:
self.cache_by_key = {}
self.cache_by_id = {}
self.cache_by_name = {}
else:
raise NotImplementedError()
@@ -324,6 +360,38 @@ class Sheerka(Concept):
"""
return self.sets_handler.set_isa(context, concept, concept_set)
def set_sya_def(self, context, list_of_def):
"""
Set the precedence and/or the associativity of a concept
:param context:
:param list_of_def list of tuple(concept_id, precedence (int), SyaAssociativity)
:return:
"""
# validate the entries
for concept_id, precedence, associativity in list_of_def:
if concept_id == BuiltinConcepts.UNKNOWN_CONCEPT:
return self.ret(self.name,
False,
self.new(BuiltinConcepts.ERROR, body=f"Concept {concept_id} is not known"))
# update the definitions
for concept_id, precedence, associativity in list_of_def:
if precedence is None and associativity is None:
try:
del self.sya_definitions[concept_id]
except KeyError:
pass
else:
self.sya_definitions[concept_id] = (precedence, associativity.value)
# then save
self.sdp.set(context.event.get_digest(),
self.CONCEPTS_SYA_DEFINITION_ENTRY,
self.sya_definitions)
return self.ret(self.name, True, self.new(BuiltinConcepts.SUCCESS))
def get_set_elements(self, context, concept):
"""
Concept is supposed to be a set
@@ -571,6 +639,22 @@ class Sheerka(Concept):
return self.value(body_to_use)
def get_error(self, obj):
if isinstance(obj, Concept) and obj.metadata.is_builtin and obj.key in BuiltinErrors:
return obj
if isinstance(obj, list):
return obj
if self.isinstance(obj, BuiltinConcepts.RETURN_VALUE):
if obj.status:
return None
if self.isinstance(obj.body, BuiltinConcepts.PARSER_RESULT):
return self.get_error(obj.body.body)
return NotImplementedError()
def get_values(self, objs):
if not (isinstance(objs, list) or
self.isinstance(objs, BuiltinConcepts.LIST) or