Fixed #100 : SheerkaAdmin: Add builtins() command
Fixed #99 : SheerkaQueryManager: I can manage contains predicate when filtering objects Fixed #97 : ERROR: list indices must be integers or slices, not Concept Fixed #96 : SequenceNodeParser: SequenceNodeParser must correctly handle concept definition Fixed #95 : ResolveAmbiguity must not remove concepts that do not require evaluation Fixed #94 : Concepts with the same key are lost when new ontology Fixed #93 : Introduce BuiltinConcepts.EVAL_GLOBAL_TRUTH_REQUESTED Fixed #92 : ExpressionParser: Implement compile_disjunctions() Fixed #91 : Implement get_concepts_complexity(context, concepts, concept_parts) Fixed #90 : ResolveAmbiguity : where predicate is not used to resolve ambiguity Fixed #89 : ResolveAmbiguityEvaluator: Concepts embedded in ConceptNode are not resolved Fixed #88: SyaNodeParser: Parse multiple parameters when some of the are not recognized Fixed #87: SyaNodeParser : Parse the multiple parameters
This commit is contained in:
@@ -11,7 +11,7 @@ from core.global_symbols import CONCEPT_COMPARISON_CONTEXT, SyaAssociativity
|
||||
from core.sheerka.services.SheerkaComparisonManager import SheerkaComparisonManager
|
||||
from core.sheerka.services.SheerkaExecute import ParserInput
|
||||
from core.tokenizer import Token, TokenKind, Tokenizer
|
||||
from core.utils import get_n_clones, get_text_from_tokens, NextIdManager
|
||||
from core.utils import get_n_clones, get_text_from_tokens, NextIdManager, replace_after
|
||||
from parsers.BaseNodeParser import UnrecognizedTokensNode, ConceptNode, SourceCodeNode, \
|
||||
SourceCodeWithConceptNode, BaseNodeParser, VariableNode
|
||||
from parsers.BaseParser import ParsingError
|
||||
@@ -185,6 +185,19 @@ class SyaConceptParserHelper:
|
||||
if token.type != TokenKind.WHITESPACE:
|
||||
self.expected_parameters_before_first_token += 1
|
||||
|
||||
# remove useless whitespaces (spaces that are between VAR_DEF)
|
||||
if len(self.expected) > 2:
|
||||
temp = [self.expected[0]]
|
||||
for i in range(1, len(self.expected) - 1):
|
||||
token = self.expected[i]
|
||||
if (token.type == TokenKind.WHITESPACE and
|
||||
self.expected[i - 1].type == TokenKind.VAR_DEF and
|
||||
self.expected[i + 1].type == TokenKind.VAR_DEF):
|
||||
continue # skip it
|
||||
temp.append(token)
|
||||
temp.append(self.expected[-1])
|
||||
self.expected = temp
|
||||
|
||||
self.eat_token(first_keyword_found) # remove the first token
|
||||
self.tokens.append(first_keyword_found)
|
||||
|
||||
@@ -496,9 +509,18 @@ class InFixToPostFix:
|
||||
:return:
|
||||
"""
|
||||
if len(self.parameters_list) < parser_helper.expected_parameters_before_first_token:
|
||||
# The new concept expect some prefix parameters, but there's not enough
|
||||
parser_helper.error = "Not enough prefix parameters"
|
||||
return
|
||||
# There is not enough parameters to fill the new concept
|
||||
# Try to develop the UnrecognizedTokesNode, to see if it can match
|
||||
developed_param_list = self.develop_parameter_list(self.parameters_list)
|
||||
if len(developed_param_list) < parser_helper.expected_parameters_before_first_token:
|
||||
# The new concept expect some prefix parameters, but there's not enough
|
||||
parser_helper.error = "Not enough prefix parameters"
|
||||
return
|
||||
|
||||
# the developed_param_list does the job. Let's replace the previous values
|
||||
pivot = self.parameters_list[0]
|
||||
replace_after(self.parameters_list, pivot, developed_param_list)
|
||||
replace_after(self.out, pivot, developed_param_list)
|
||||
|
||||
if len(self.parameters_list) > parser_helper.expected_parameters_before_first_token:
|
||||
# There are more parameters than needed by the new concept
|
||||
@@ -530,6 +552,20 @@ class InFixToPostFix:
|
||||
:return:
|
||||
"""
|
||||
|
||||
def nb_expected_parameters(expected):
|
||||
"""
|
||||
Count the number of successive variables that are expected
|
||||
:param expected:
|
||||
:return:
|
||||
"""
|
||||
i = 0
|
||||
for token in expected:
|
||||
if token.type == TokenKind.VAR_DEF:
|
||||
i += 1
|
||||
else:
|
||||
break
|
||||
return i
|
||||
|
||||
# manage parenthesis that didn't find any match
|
||||
if self._is_lpar(self.stack[-1]):
|
||||
self._add_error(ParenthesisMismatchError(self.stack[-1]))
|
||||
@@ -538,6 +574,16 @@ class InFixToPostFix:
|
||||
assert len(self._concepts()) != 0 # sanity check
|
||||
|
||||
current_concept = self._concepts()[-1]
|
||||
|
||||
if (nb_expected := nb_expected_parameters(current_concept.expected)) > len(self.parameters_list):
|
||||
# There is not enough parameters in the list to fill the concept
|
||||
# Try to develop the UnrecognizedTokensNode to see if it can match
|
||||
developed_param_list = self.develop_parameter_list(self.parameters_list)
|
||||
if nb_expected == len(developed_param_list):
|
||||
pivot = self.parameters_list[0]
|
||||
replace_after(self.parameters_list, pivot, developed_param_list)
|
||||
replace_after(self.out, pivot, developed_param_list)
|
||||
|
||||
while len(current_concept.expected) > 0 and current_concept.expected[0].type == TokenKind.VAR_DEF:
|
||||
# eat everything that was expected
|
||||
if len(self.parameters_list) == 0:
|
||||
@@ -640,13 +686,24 @@ class InFixToPostFix:
|
||||
Helper function that pops the stack and put the item to the output, if needed
|
||||
:return:
|
||||
"""
|
||||
|
||||
item = self.stack[-1]
|
||||
|
||||
# fix the concept is needed
|
||||
if isinstance(item, SyaConceptParserHelper):
|
||||
# make sure the expected parameters of this item are eaten
|
||||
if 0 < len(item.expected) <= len(self.parameters_list):
|
||||
self.manage_parameters()
|
||||
if len(item.expected) > 0:
|
||||
# make sure the expected parameters of this item are eaten
|
||||
if len(item.expected) <= len(self.parameters_list):
|
||||
self.manage_parameters()
|
||||
else:
|
||||
# second chance to match the parameter list when it contains unrecognized token
|
||||
developed_param_list = self.develop_parameter_list(self.parameters_list)
|
||||
if len(item.expected) <= len(developed_param_list):
|
||||
pivot = self.parameters_list[0]
|
||||
replace_after(self.parameters_list, pivot, developed_param_list)
|
||||
replace_after(self.out, pivot, developed_param_list)
|
||||
self.manage_parameters()
|
||||
|
||||
item.fix_concept()
|
||||
|
||||
self.stack.pop()
|
||||
@@ -1119,6 +1176,25 @@ class InFixToPostFix:
|
||||
# clone.forked = self.forked
|
||||
return clone
|
||||
|
||||
@staticmethod
|
||||
def develop_parameter_list(parameter_list):
|
||||
"""
|
||||
given a list of parameter (solely from self.parameter_list)
|
||||
develop UnrecognizedTokensNode parameter that contains whitespaces
|
||||
:param parameter_list:
|
||||
:return:
|
||||
"""
|
||||
temp = []
|
||||
for parameter in parameter_list:
|
||||
if isinstance(parameter, UnrecognizedTokensNode):
|
||||
for i, token in [(i, t) for i, t in enumerate(parameter.tokens) if t.type != TokenKind.WHITESPACE]:
|
||||
temp.append(UnrecognizedTokensNode(parameter.start + i,
|
||||
parameter.start + i,
|
||||
[token]))
|
||||
else:
|
||||
temp.append(parameter)
|
||||
return temp
|
||||
|
||||
|
||||
@dataclass()
|
||||
class PostFixToItem:
|
||||
@@ -1428,8 +1504,8 @@ class SyaNodeParser(BaseNodeParser):
|
||||
def _has_sya(items):
|
||||
for item in items:
|
||||
if isinstance(item, SourceCodeWithConceptNode):
|
||||
if _has_sya(item.nodes):
|
||||
return True
|
||||
return _has_sya(item.nodes)
|
||||
|
||||
if isinstance(item, SyaConceptParserHelper):
|
||||
return True
|
||||
return False
|
||||
|
||||
Reference in New Issue
Block a user