Implemented ConceptManager with concept creation, modification and deletion

This commit is contained in:
2020-12-08 15:36:21 +01:00
parent d364878ddb
commit 4b6e1dd55b
40 changed files with 1847 additions and 979 deletions
+11 -4
View File
@@ -846,6 +846,7 @@ class BaseNodeParser(BaseParser):
def get_concepts(self, token, to_keep, custom=None, to_map=None, strip_quotes=False):
"""
Tries to find if there are concepts that match the value of the token
Caution: Returns the actual cache, not a copy
:param token:
:param to_keep: predicate to tell if the concept is eligible
:param custom: lambda name -> List[Concepts] that gives extra concepts, according to the name
@@ -882,16 +883,17 @@ class BaseNodeParser(BaseParser):
return custom_concepts if custom else None
@staticmethod
def get_concepts_by_first_token(context, concepts, use_sheerka=False):
def get_concepts_by_first_token(context, concepts, use_sheerka=False, previous_entries=None):
"""
Create the map describing the first token expected by a concept
:param context:
:param concepts: lists of concepts to parse
:param use_sheerka: if True, update concepts_by_first_keyword from sheerka
:param previous_entries:
:return:
"""
sheerka = context.sheerka
res = sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) if use_sheerka else {}
res = sheerka.cache_manager.copy(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY) if use_sheerka else (previous_entries or {})
for concept in concepts:
keywords = BaseNodeParser.get_first_tokens(sheerka, concept)
@@ -909,10 +911,15 @@ class BaseNodeParser(BaseParser):
return sheerka.ret("BaseNodeParser", True, res)
@staticmethod
def resolve_concepts_by_first_keyword(context, concepts_by_first_keyword):
def resolve_concepts_by_first_keyword(context, concepts_by_first_keyword, modified_concepts=None):
sheerka = context.sheerka
res = {}
def get_by_id(c_id):
if modified_concepts and c_id in modified_concepts:
return modified_concepts[c_id]
return sheerka.get_by_id(c_id)
def resolve_concepts(concept_str):
c_key, c_id = core.utils.unstr_concept(concept_str)
if c_id in already_seen:
@@ -924,7 +931,7 @@ class BaseNodeParser(BaseParser):
to_resolve = set()
chicken_and_egg = set()
concept = sheerka.get_by_id(c_id)
concept = get_by_id(c_id)
if sheerka.isaset(context, concept):
concepts = sheerka.get_set_elements(context, concept)
+1 -6
View File
@@ -76,15 +76,10 @@ class ExactConceptParser(BaseParser):
value = words[i]
concept.def_var_by_index(index, str_concept(value) if isinstance(value, tuple) else value)
concept.get_metadata().need_validation = True
# if self.verbose_log.isEnabledFor(logging.DEBUG):
# prop_name = concept.get_metadata().variables[index][0]
# context.log(
# f"Added variable {index}: {prop_name}='{words[i]}'.",
# self.name)
already_recognized.append(concept)
by_name = sheerka.resolve(parser_input.as_text())
by_name = sheerka.fast_resolve(parser_input.as_text())
core.builtin_helpers.set_is_evaluated(by_name)
recognized = self.merge_concepts(already_recognized, by_name)
+7
View File
@@ -261,6 +261,13 @@ class SequenceNodeParser(BaseNodeParser):
return make_unique(concepts_by_name + concepts_by_first_keyword, lambda c: c.id)
def get_concepts_sequences(self):
"""
Tries to find the concept.
TODO: KSI 20201206
I think that the code can be optimized as we create a new instance of each concept before validating
that we are going to keep it.
:return:
"""
forked = []