Added SheerkaComparisonManager

This commit is contained in:
2020-05-17 20:19:26 +02:00
parent 56e0a9d338
commit 08e3086820
29 changed files with 586 additions and 148 deletions
+9 -99
View File
@@ -952,107 +952,17 @@ class SyaNodeParser(BaseNodeParser):
sya_concept_def.precedence = sya_def[0]
if sya_def[1] is not None:
sya_concept_def.associativity = sya_def[1]
if parser.sheerka:
concept_weight = parser.sheerka.get_concepts_weights(BuiltinConcepts.PRECEDENCE)
if concept.id in concept_weight:
sya_concept_def.precedence = concept_weight[concept.id]
if associativity := concept.get_prop(BuiltinConcepts.ASSOCIATIVITY):
sya_concept_def.associativity = SyaAssociativity(associativity)
return sya_concept_def
# def reset_parser(self, context, text):
# self.context = context
# self.sheerka = context.sheerka
# self.text = text
#
# try:
# self.tokens = list(self.get_input_as_tokens(text))
# except LexerError as e:
# self.add_error(self.sheerka.new(BuiltinConcepts.ERROR, body=e), False)
# return False
#
# self.token = None
# self.pos = -1
# return True
#
# def add_error(self, error, next_token=True):
# self.error_sink.append(error)
# if next_token:
# self.next_token()
# return error
#
# def get_token(self) -> Token:
# return self.token
#
# def next_token(self, skip_whitespace=True):
# if self.token and self.token.type == TokenKind.EOF:
# return False
#
# self.pos += 1
# self.token = self.tokens[self.pos]
#
# if skip_whitespace:
# while self.token.type == TokenKind.WHITESPACE or self.token.type == TokenKind.NEWLINE:
# self.pos += 1
# self.token = self.tokens[self.pos]
#
# return self.token.type != TokenKind.EOF
# def initialize(self, context, concepts=None, sya_definitions=None):
# self.context = context
# self.sheerka = context.sheerka
#
# if sya_definitions:
# self.sya_definitions = sya_definitions
#
# if concepts:
# for concept in concepts:
# keywords = concept.key.split()
# for keyword in keywords:
# if keyword.startswith(VARIABLE_PREFIX):
# continue
#
# self.concepts_by_first_keyword.setdefault(keyword, []).append(concept.id)
# break
#
# return self.sheerka.ret(self.name, True, self.concepts_by_first_keyword)
#
# def get_concepts(self, token):
# """
# Tries to find if there are concepts that match the value of the token
# :param token:
# :return:
# """
#
# if token.type == TokenKind.STRING:
# name = token.value[1:-1]
# elif token.type == TokenKind.KEYWORD:
# name = token.value.value
# else:
# name = token.value
#
# result = []
# if name in self.concepts_by_first_keyword:
# for concept_id in self.concepts_by_first_keyword[name]:
#
# concept = self.sheerka.get_by_id(concept_id)
#
# if len(concept.metadata.props) == 0:
# # only concepts that has parameter (refuse atoms)
# # Note that this test is needed if the definition of the concept has changed
# continue
#
# if concept.metadata.definition_type == DEFINITION_TYPE_BNF:
# # bnf definitions are not supposed to be managed by this parser
# continue
#
# sya_concept_def = SyaConceptDef(concept)
# if concept.id in self.sya_definitions:
# sya_def = self.sya_definitions[concept.id]
# if sya_def[0] is not None:
# sya_concept_def.precedence = sya_def[0]
# if sya_def[1] is not None:
# sya_concept_def.associativity = sya_def[1]
#
# result.append(sya_concept_def)
# return result
#
# return None
def infix_to_postfix(self, context, text):
"""
Implementing Shunting Yard Algorithm