Resolved some remaining chicken and egg when parsing BNF nodes

This commit is contained in:
2020-07-05 11:03:03 +02:00
parent ad8a997942
commit 71f753c925
14 changed files with 310 additions and 56 deletions
+4 -4
View File
@@ -373,15 +373,15 @@ class ExecutionContext:
:param predicate: what execution context to keep
:param get_obj: lambda to compute what to return
:param start_with_self: include the current execution context in the search
:param stop: stop the search if matched
:param stop: condition to stop
:return:
"""
current = self if start_with_self else self._parent
while current:
if stop and stop(current):
break
if predicate is None or predicate(current):
yield current if get_obj is None else get_obj(current)
if stop and stop(current):
break
current = current._parent
+6
View File
@@ -8,6 +8,7 @@ from cache.CacheManager import CacheManager
from cache.DictionaryCache import DictionaryCache
from cache.IncCache import IncCache
from cache.ListIfNeededCache import ListIfNeededCache
from cache.SetCache import SetCache
from core.builtin_concepts import BuiltinConcepts, ErrorConcept, ReturnValueConcept, BuiltinErrors, BuiltinUnique, \
UnknownConcept
from core.concept import Concept, ConceptParts, PROPERTIES_FOR_NEW
@@ -31,6 +32,8 @@ class Sheerka(Concept):
CONCEPTS_BY_NAME_ENTRY = "Concepts_By_Name"
CONCEPTS_BY_HASH_ENTRY = "Concepts_By_Hash" # store hash of concepts definitions (not values)
CONCEPTS_REFERENCES = "Concepts_References" # tracks references between concepts
CONCEPTS_BY_FIRST_KEYWORD_ENTRY = "Concepts_By_First_Keyword"
RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY = "Resolved_Concepts_By_First_Keyword"
CONCEPTS_SYA_DEFINITION_ENTRY = "Concepts_Sya_Definitions"
@@ -215,6 +218,9 @@ class Sheerka(Concept):
cache = ListIfNeededCache(**params(self.CONCEPTS_BY_HASH_ENTRY))
register_concept_cache(self.CONCEPTS_BY_HASH_ENTRY, cache, lambda c: c.get_definition_hash(), True)
cache = SetCache(default=lambda k: self.sdp.get(self.CONCEPTS_REFERENCES, k))
self.cache_manager.register_cache(self.CONCEPTS_REFERENCES, cache)
cache = DictionaryCache(default=lambda k: self.sdp.get(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, k))
self.cache_manager.register_cache(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, cache)
self.cache_manager.get(self.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, None) # to init from sdp
@@ -1,6 +1,6 @@
import core.utils
from core.builtin_concepts import BuiltinConcepts, ErrorConcept
from core.concept import Concept, DEFINITION_TYPE_DEF, ensure_concept
from core.concept import Concept, DEFINITION_TYPE_DEF, ensure_concept, DEFINITION_TYPE_BNF
from core.sheerka.services.sheerka_service import BaseService
from sdp.sheerkaDataProvider import SheerkaDataProviderDuplicateKeyError
@@ -64,7 +64,6 @@ class SheerkaCreateNewConcept(BaseService):
resolved_concepts_by_first_keyword = init_ret_value.body
# if everything is fine
concept.freeze_definition_hash()
cache_manager.add_concept(concept)
cache_manager.put(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, False, concepts_by_first_keyword)
@@ -74,9 +73,36 @@ class SheerkaCreateNewConcept(BaseService):
# allow search by definition when definition relevant
cache_manager.put(self.sheerka.CONCEPTS_BY_NAME_ENTRY, concept.metadata.definition, concept)
# update references
for ref in self.compute_references(concept):
cache_manager.put(sheerka.CONCEPTS_REFERENCES, ref, concept.id)
# TODO : this line seems to be useless
# The grammar is never reset
if concept.bnf and init_bnf_ret_value is not None and init_bnf_ret_value.status:
sheerka.cache_manager.clear(sheerka.CONCEPTS_GRAMMARS_ENTRY)
# process the return if needed
ret = sheerka.ret(self.NAME, True, sheerka.new(BuiltinConcepts.NEW_CONCEPT, body=concept))
return ret
def compute_references(self, concept):
"""
We need to keep a track of all concepts used by the current concept
So that if one of these are modified, we can modify the current concept accordingly
:param concept:
:return:
"""
refs = set()
if concept.metadata.definition_type == DEFINITION_TYPE_BNF:
from parsers.BnfNodeParser import BnfNodeConceptExpressionVisitor
other_concepts_visitor = BnfNodeConceptExpressionVisitor()
other_concepts_visitor.visit(concept.bnf)
for concept in other_concepts_visitor.references:
if isinstance(concept, str):
concept = self.sheerka.get_by_key(concept)
refs.add(concept.id)
return refs
@@ -1,5 +1,6 @@
from core.builtin_concepts import BuiltinConcepts
from core.sheerka.services.sheerka_service import BaseService
from parsers.BnfParser import BnfParser
class SheerkaModifyConcept(BaseService):
@@ -36,6 +37,10 @@ class SheerkaModifyConcept(BaseService):
BuiltinConcepts.CONCEPT_ALREADY_DEFINED,
body=concept))
old_references = self.sheerka.cache_manager.get(self.sheerka.CONCEPTS_REFERENCES, concept.id)
if old_references:
old_references = old_references.copy()
self.sheerka.cache_manager.update_concept(old_version, concept)
# TODO : update concept by first keyword
@@ -45,3 +50,21 @@ class SheerkaModifyConcept(BaseService):
ret = self.sheerka.ret(self.NAME, True, self.sheerka.new(BuiltinConcepts.NEW_CONCEPT, body=concept))
return ret
def update_references(self, context, concept):
"""
Updates all the concepts that reference concept
:param context:
:param concept:
:return:
"""
refs = self.sheerka.cache_manager.get(self.sheerka.CONCEPTS_REFERENCES, concept.id)
if not refs:
return
for concept_id in refs:
concept = self.sheerka.get_by_id(concept_id)
if concept.bnf is not None:
BnfParser.update_recurse_id(context, concept_id, concept.bnf)
@@ -4,6 +4,7 @@ from cache.SetCache import SetCache
from core.ast.nodes import python_to_concept
from core.builtin_concepts import BuiltinConcepts
from core.concept import Concept, ConceptParts, ensure_concept, DEFINITION_TYPE_BNF
from core.sheerka.services.SheerkaModifyConcept import SheerkaModifyConcept
from core.sheerka.services.sheerka_service import BaseService
GROUP_PREFIX = 'All_'
@@ -54,7 +55,13 @@ class SheerkaSetsManager(BaseService):
if not res.status:
return res
return self.add_concept_to_set(context, concept, concept_set)
res = self.add_concept_to_set(context, concept, concept_set)
# update concept_set references
self.sheerka.services[SheerkaModifyConcept.NAME].update_references(context, concept_set)
self.concepts_in_set.delete(concept_set.id)
return res
def add_concept_to_set(self, context, concept, concept_set):
"""
+117 -25
View File
@@ -158,7 +158,7 @@ class ParsingExpression:
def __init__(self, *args, **kwargs):
self.elements = args
nodes = kwargs.get('nodes', [])
nodes = kwargs.get('nodes', []) or []
if not hasattr(nodes, '__iter__'):
nodes = [nodes]
self.nodes = nodes
@@ -201,8 +201,8 @@ class ConceptExpression(ParsingExpression):
When the grammar is created, it is replaced by the actual concept
"""
def __init__(self, concept, rule_name="", recurse_id=None):
super().__init__(rule_name=rule_name)
def __init__(self, concept, rule_name="", recurse_id=None, nodes=None):
super().__init__(rule_name=rule_name, nodes=nodes)
self.concept = concept
self.recurse_id = recurse_id
@@ -216,6 +216,10 @@ class ConceptExpression(ParsingExpression):
if not isinstance(other, ConceptExpression):
return False
# TODO : enable self.recurse_id when it will be correctly implemented
# if self.recurse_id != other.recurse_id:
# return False
if isinstance(self.concept, Concept):
return self.concept.id == other.concept.id
@@ -425,6 +429,12 @@ class Repetition(ParsingExpression):
super(Repetition, self).__init__(*elements, **kwargs)
self.sep = kwargs.get('sep', None)
def clone(self):
return Repetition(self.elements,
rule_name=self.rule_name,
nodes=self.nodes,
sep=self.sep)
class ZeroOrMore(Repetition):
"""
@@ -711,6 +721,14 @@ class BnfNodeFirstTokenVisitor(ParsingExpressionVisitor):
return self.STOP
class BnfNodeConceptExpressionVisitor(ParsingExpressionVisitor):
def __init__(self):
self.references = []
def visit_ConceptExpression(self, pe):
self.references.append(pe.concept)
class BnfConceptParserHelper:
def __init__(self, parser):
self.parser = parser
@@ -1137,14 +1155,84 @@ class BnfNodeParser(BaseNodeParser):
return concept_parser_helpers
def check_for_infinite_recursion(self, parsing_expression, already_found, only_first=False):
def fix_infinite_recursions(self, context, grammar, concept_id, parsing_expression):
"""
Check the newly created parsing expresion
Some infinite recursion can be resolved, simply by removing the pexpression that causes the loop
Let's look for that
:param context:
:param grammar:
:param concept_id:
:param parsing_expression:
:return:
"""
def _find(expression_, path_):
index_ = -1
parent_ = None
for node_id in path_:
expression_ = expression_.nodes[0] if isinstance(expression_, ConceptExpression) else expression_
for i, node in [(i, n) for i, n in enumerate(expression_.nodes) if isinstance(n, ConceptExpression)]:
if node.recurse_id == node_id or node.concept.id == node_id:
index_ = i
parent_ = expression_
expression_ = node # take the child of the ConceptExpression found
break
else:
raise IndexError(f"path {path_} cannot be found in '{expression_}'")
return parent_, index_, expression_
def _fix_node(expression, path):
parent, index, expression_update = _find(expression, path[1:-2])
assert isinstance(expression_update, ConceptExpression)
desc = f"Fixing circular reference {path}"
with context.push(BuiltinConcepts.INIT_BNF,
expression_update.concept,
who=self.name,
obj=expression_update.concept,
concepts_to_skip=[concept_id],
desc=desc) as sub_context:
new_grammar = grammar.copy()
for node_id in path[-2:]:
del new_grammar[node_id]
new_nodes = self.resolve_concept_parsing_expression(sub_context,
expression_update.concept,
expression_update.rule_name, new_grammar, set())
new = ConceptExpression(expression_update.concept,
rule_name=expression_update.rule_name,
recurse_id=expression_update.recurse_id,
nodes=new_nodes)
parent.nodes[index] = new
while True:
already_found = [concept_id]
concepts_in_recursion = []
if self.check_for_infinite_recursion(parsing_expression, already_found, concepts_in_recursion):
if "#" in concepts_in_recursion[-2]:
# means that it's isaset concept
_fix_node(parsing_expression, concepts_in_recursion[:-1])
else:
break
else:
break
return concepts_in_recursion
def check_for_infinite_recursion(self, parsing_expression, already_found, in_recursion, only_first=False):
if isinstance(parsing_expression, ConceptExpression):
id_to_use = parsing_expression.recurse_id or parsing_expression.concept.id
if id_to_use in already_found:
already_found.append(id_to_use) # add the id again, to know where the cycle starts
in_recursion.extend(already_found)
return True
already_found.add(id_to_use)
return self.check_for_infinite_recursion(parsing_expression.nodes[0], already_found, only_first)
already_found.append(id_to_use)
return self.check_for_infinite_recursion(
parsing_expression.nodes[0], already_found, in_recursion, only_first)
if isinstance(parsing_expression, Sequence):
# for sequence, we need to check all nodes
@@ -1154,8 +1242,7 @@ class BnfNodeParser(BaseNodeParser):
nodes = parsing_expression.nodes
for node in nodes:
already_found_for_current_node = already_found.copy()
if self.check_for_infinite_recursion(node, already_found_for_current_node, False):
already_found.update(already_found_for_current_node)
if self.check_for_infinite_recursion(node, already_found_for_current_node, in_recursion, False):
return True
return False
@@ -1164,8 +1251,7 @@ class BnfNodeParser(BaseNodeParser):
# we are safe
for node in parsing_expression.nodes:
already_found_for_current_node = already_found.copy()
if self.check_for_infinite_recursion(node, already_found, True):
already_found.update(already_found_for_current_node)
if self.check_for_infinite_recursion(node, already_found_for_current_node, in_recursion, True):
return True
else:
return False
@@ -1174,8 +1260,7 @@ class BnfNodeParser(BaseNodeParser):
if isinstance(parsing_expression, UnOrderedChoice):
for node in parsing_expression.nodes:
already_found_for_current_node = already_found.copy()
if self.check_for_infinite_recursion(node, already_found_for_current_node, True):
already_found.update(already_found_for_current_node)
if self.check_for_infinite_recursion(node, already_found_for_current_node, in_recursion, True):
return True
return False
@@ -1215,13 +1300,20 @@ class BnfNodeParser(BaseNodeParser):
if isinstance(node, UnderConstruction):
pe.nodes[i] = grammar.get(node.concept_id)
# check for infinite recursion.
# We are adding a new concept. Does it create an infinite recursion ?
concepts_in_recursion = set()
if self.check_for_infinite_recursion(ret, concepts_in_recursion):
cycle = context.sheerka.new(BuiltinConcepts.CHICKEN_AND_EGG, body=concepts_in_recursion)
# # check for infinite recursions.
# # and try to fix them when possible
# already_found = [concept.id]
# concepts_in_recursion = []
# if self.check_for_infinite_recursion(ret, already_found, concepts_in_recursion):
# chicken_anf_egg = context.sheerka.new(BuiltinConcepts.CHICKEN_AND_EGG, body=concepts_in_recursion)
# for concept_id in concepts_in_recursion:
# grammar[concept_id] = chicken_anf_egg
concepts_in_recursion = self.fix_infinite_recursions(context, grammar, concept.id, ret)
if concepts_in_recursion:
chicken_anf_egg = context.sheerka.new(BuiltinConcepts.CHICKEN_AND_EGG, body=concepts_in_recursion)
for concept_id in concepts_in_recursion:
grammar[concept_id] = cycle
grammar[concept_id] = chicken_anf_egg
# update, in case of infinite circular recursion
ret = grammar[concept.id]
@@ -1287,13 +1379,16 @@ class BnfNodeParser(BaseNodeParser):
if c.id == context.obj.id:
continue
# c_pe = self.resolve_concept_parsing_expression(context, c, None, grammar, to_update, to_keep)
# if self.check_for_infinite_recursion(c_pe, {concept.id}, True):
# continue
if hasattr(context, "concepts_to_skip") and c.id in context.concepts_to_skip:
continue
valid_concepts.append(c)
nodes = [ConceptExpression(c, rule_name=c.name, recurse_id=key_to_use) for c in valid_concepts]
nodes = []
for c in valid_concepts:
c_recurse_id = f"{c.id}#{c.name}#{concept.id}" if self.sheerka.isaset(context, c) else None
nodes.append(ConceptExpression(c, rule_name=c.name, recurse_id=c_recurse_id))
resolved = self.resolve_parsing_expression(ssc,
UnOrderedChoice(*nodes),
grammar,
@@ -1341,9 +1436,6 @@ class BnfNodeParser(BaseNodeParser):
if not isinstance(pe, (ParsingExpression, UnderConstruction)):
return pe # an error is detected, escalate it
#
# if isinstance(pe, UnderConstruction) and expression.concept.id == pe.concept_id:
# return pe # we are looking for ourself, just return it
if isinstance(pe, UnderConstruction):
to_update.add(ToUpdate(id(expression), expression))
+31 -10
View File
@@ -35,7 +35,7 @@ class BnfParser(BaseParser):
self.lexer_iter = None
self._current = None
self.after_current = None
self.after_current = None # one look ahead
self.nb_open_par = 0
self.context = None
self.source = ""
@@ -283,16 +283,37 @@ class BnfParser(BaseParser):
def eat_rule_name_if_needed(self, expression):
token = self.get_token()
if token is None or token.type != TokenKind.EQUALS:
return expression
if token is not None and token.type == TokenKind.EQUALS:
self.next_token() # eat equals
token = self.get_token()
self.next_token() # eat equals
token = self.get_token()
if token is None or token.type != TokenKind.IDENTIFIER:
return self.add_error(
UnexpectedTokenErrorNode(f"Unexpected token '{token}'", token, [TokenKind.IDENTIFIER]))
if token is None or token.type != TokenKind.IDENTIFIER:
return self.add_error(
UnexpectedTokenErrorNode(f"Unexpected token '{token}'", token, [TokenKind.IDENTIFIER]))
expression.rule_name = token.value
self.next_token()
if BnfParser.is_expression_a_set(self.context, expression):
root_concept = self.context.search(
start_with_self=True,
predicate=lambda ec: ec.action == BuiltinConcepts.INIT_BNF,
get_obj=lambda ec: ec.action_context,
stop=lambda ec: ec.action == BuiltinConcepts.INIT_BNF)
root_concept = list(root_concept)
if root_concept and hasattr(root_concept[0], "id"):
expression.recurse_id = f"{expression.concept.id}#{expression.rule_name}#{root_concept[0].id}"
expression.rule_name = token.value
self.next_token()
return expression
@staticmethod
def is_expression_a_set(context, expression):
return isinstance(expression, ConceptExpression) and context.sheerka.isaset(context, expression.concept)
@staticmethod
def update_recurse_id(context, concept_id, expression):
if BnfParser.is_expression_a_set(context, expression):
expression.recurse_id = f"{expression.concept.id}#{expression.rule_name}#{concept_id}"
for element in expression.elements:
BnfParser.update_recurse_id(context, concept_id, element)
+2 -2
View File
@@ -355,14 +355,14 @@ class DefaultParser(BaseParser):
self.add_error(SyntaxErrorNode([definition_tokens[1]], "Empty declaration"), False)
return None, NotInitializedNode()
regex_parser = BnfParser()
bnf_regex_parser = BnfParser()
desc = f"Resolving BNF {current_concept_def.definition}"
with self.context.push(BuiltinConcepts.INIT_BNF,
current_concept_def,
who=self.name,
obj=current_concept_def,
desc=desc) as sub_context:
parsing_result = regex_parser.parse(sub_context, tokens)
parsing_result = bnf_regex_parser.parse(sub_context, tokens)
sub_context.add_values(return_values=parsing_result)
if not parsing_result.status:
+13 -7
View File
@@ -53,13 +53,19 @@ class BaseTest:
c = Concept(c)
if c.metadata.definition and c.metadata.definition_type != DEFINITION_TYPE_DEF:
bnf_parser = BnfParser()
res = bnf_parser.parse(context, c.metadata.definition)
if res.status:
c.bnf = res.value.value
c.metadata.definition_type = DEFINITION_TYPE_BNF
else:
raise Exception(f"Error in bnf definition '{c.metadata.definition}'", sheerka.get_error(res))
desc = f"Resolving BNF {c.metadata.definition}"
with context.push(BuiltinConcepts.INIT_BNF,
c,
obj=c,
desc=desc) as sub_context:
bnf_parser = BnfParser()
res = bnf_parser.parse(sub_context, c.metadata.definition)
if res.status:
c.bnf = res.value.value
c.metadata.definition_type = DEFINITION_TYPE_BNF
else:
raise Exception(f"Error in bnf definition '{c.metadata.definition}'", sheerka.get_error(res))
if create_new:
sheerka.create_new_concept(context, c)
+2 -2
View File
@@ -116,11 +116,11 @@ def test_i_can_search():
assert list(abbb.search(start_with_self=True)) == [abbb, abb, ab, a]
assert list(abbb.search(lambda ec: ec.obj != "skip")) == [ab, a]
assert list(abbb.search(lambda ec: ec.obj != "skip", lambda ec: ec.action_context)) == ["ab", "a"]
assert list(abbb.search(stop=lambda ec: ec.obj == "skip")) == []
assert list(abbb.search(stop=lambda ec: ec.obj == "skip")) == [abb]
assert list(abbb.search(
stop=lambda ec: ec.obj == "skip",
start_with_self=True,
get_obj=lambda ec: ec.obj)) == ["obj_abbb"]
get_obj=lambda ec: ec.obj)) == ["obj_abbb", "skip"]
def test_variables_are_passed_to_children_but_not_to_parents():
@@ -182,6 +182,22 @@ class TestSheerkaCreateNewConcept(TestUsingMemoryBasedSheerka):
assert sheerka.cache_manager.get(sheerka.CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [bnf_concept.id]
assert sheerka.cache_manager.get(sheerka.RESOLVED_CONCEPTS_BY_FIRST_KEYWORD_ENTRY, "-") == [bnf_concept.id]
def test_concept_references_are_updated(self):
sheerka, context, one, two, number, twenty, twenties = self.init_concepts(
"one",
"two",
"number",
"twenty",
Concept("twenties", definition="twenty one | two 'hundred'"),
create_new=True
)
assert sheerka.cache_manager.get(sheerka.CONCEPTS_REFERENCES, one.id) == {twenties.id}
assert sheerka.cache_manager.get(sheerka.CONCEPTS_REFERENCES, two.id) == {twenties.id}
assert sheerka.cache_manager.get(sheerka.CONCEPTS_REFERENCES, number.id) is None
assert sheerka.cache_manager.get(sheerka.CONCEPTS_REFERENCES, twenty.id) == {twenties.id}
assert sheerka.cache_manager.get(sheerka.CONCEPTS_REFERENCES, twenties.id) is None
class TestSheerkaCreateNewConceptFileBased(TestUsingFileBasedSheerka):
def test_i_can_add_several_concepts(self):
+35 -1
View File
@@ -250,6 +250,40 @@ class TestSheerkaSetsManager(TestUsingMemoryBasedSheerka):
assert sheerka.isa(bar, baz)
assert sheerka.isa(foo, baz)
def test_concept_expression_recurse_id_is_updated(self):
sheerka, context, one, number, twenties = self.init_concepts(
"one",
"number",
Concept("twenties", definition="'twenty' number").def_var("number"),
create_new=True
)
assert twenties.bnf.elements[1].recurse_id is None
# update number
sheerka.set_isa(context, sheerka.new("one"), number)
assert twenties.bnf.elements[1].recurse_id == "1002#number#1003"
def test_concepts_in_group_cache_is_updated(self):
sheerka, context, one, two, number = self.init_concepts("one", "two", "number")
sheerka.set_isa(context, sheerka.new("one"), number)
elements = sheerka.get_set_elements(context, number)
assert [c.id for c in elements] == [one.id]
concepts_in_cache = sheerka.cache_manager.get(SheerkaSetsManager.CONCEPTS_IN_GROUPS_ENTRY, number.id)
assert [c.id for c in concepts_in_cache] == [one.id]
# add another element to number
sheerka.set_isa(context, sheerka.new("two"), number)
elements = sheerka.get_set_elements(context, number)
assert {c.id for c in elements} == {one.id, two.id}
concepts_in_cache = sheerka.cache_manager.get(SheerkaSetsManager.CONCEPTS_IN_GROUPS_ENTRY, number.id)
assert {c.id for c in concepts_in_cache} == {one.id, two.id}
class TestSheerkaSetsManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka):
def test_i_can_add_concept_to_set_and_retrieve_it_in_another_session(self):
@@ -280,7 +314,7 @@ class TestSheerkaSetsManagerUsingFileBasedSheerka(TestUsingFileBasedSheerka):
foo4 = Concept("foo4")
for c in [foo3, foo4]:
sheerka.create_new_concept(context, c)
sets_handler = sheerka.services[SheerkaSetsManager.NAME]
res = sets_handler.add_concepts_to_set(context, (foo3, foo4), group)
assert res.status
+3 -2
View File
@@ -26,7 +26,7 @@ cmap = {
"one hundred": Concept("one hundred", body="100"),
"one_hundred": Concept("'one hundred'", body="100"),
"hundreds": Concept("hundreds", definition="number=n1 'hundred' 'and' number=n2",
where="n1 < 10 and n2 < 100", body="n1 * 100 + n2"),
where="n1 < 10 and n2 < 100", body="n1 * 100 + n2").def_var("n1").def_var("n2"),
"bnf baz": Concept("bnf baz", definition="'baz'"), # this one should be chosen
@@ -103,6 +103,7 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka):
sheerka.set_isa(context, sheerka.new("forty"), sheerka.new("number"))
sheerka.set_isa(context, sheerka.new("fifty"), sheerka.new("number"))
sheerka.set_isa(context, sheerka.new("one hundred"), sheerka.new("number"))
sheerka.set_isa(context, sheerka.new("hundreds"), sheerka.new("number"))
# Pay attention. 'twenties (t1 and t2) are not set as number
@@ -848,7 +849,7 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka):
parsing_expression = parser.get_parsing_expression(context, my_map["foo"])
assert sheerka.isinstance(parsing_expression, BuiltinConcepts.CHICKEN_AND_EGG)
assert sheerka.isinstance(parser.concepts_grammars.get(my_map["foo"].id), BuiltinConcepts.CHICKEN_AND_EGG)
assert parser.concepts_grammars.get(my_map["foo"].id).body == {"1001", "1002", "1003", "1004"}
assert parser.concepts_grammars.get(my_map["foo"].id).body == ["1001", "1002", "1003", "1004", "1001"]
assert sheerka.isinstance(parser.concepts_grammars.get(my_map["bar"].id), BuiltinConcepts.CHICKEN_AND_EGG)
assert sheerka.isinstance(parser.concepts_grammars.get(my_map["baz"].id), BuiltinConcepts.CHICKEN_AND_EGG)
+22
View File
@@ -214,3 +214,25 @@ class TestBnfParser(TestUsingMemoryBasedSheerka):
assert not res.status
assert context.sheerka.isinstance(res.value, BuiltinConcepts.UNKNOWN_CONCEPT)
assert res.value.body == ('key', 'foo')
def test_concept_expression_are_correctly_created_when_isa_concept_is_detected(self):
sheerka, context, parser, one, two, number, twenties = self.init_parser(
"one",
"two",
"number",
"twenties")
sheerka.set_isa(context, sheerka.new("one"), number)
sheerka.set_isa(context, sheerka.new("two"), number)
sheerka.set_isa(context, sheerka.new("twenties"), number)
sub_context = context.push(BuiltinConcepts.INIT_BNF, twenties)
expression = "'twenty' number=n1"
res = parser.parse(sub_context, Tokenizer(expression))
assert not parser.has_error
assert res.status
pexpression = res.value.value
assert pexpression == Sequence(StrMatch('twenty'), ConceptExpression(number, "n1"))
assert pexpression.elements[1].recurse_id == "1003#n1#1004"