Introduced ParserInput

This commit is contained in:
2020-05-25 18:09:12 +02:00
parent c79403443f
commit 479461c0a4
35 changed files with 768 additions and 480 deletions
+36 -41
View File
@@ -6,7 +6,7 @@ from typing import List
from core import builtin_helpers
from core.builtin_concepts import BuiltinConcepts
from core.concept import Concept, DEFINITION_TYPE_BNF
from core.sheerka.ExecutionContext import ExecutionContext
from core.sheerka.services.SheerkaExecute import ParserInput
from core.tokenizer import Token, TokenKind, Tokenizer
from parsers.BaseNodeParser import UnrecognizedTokensNode, ConceptNode, SourceCodeNode, SyaAssociativity, \
SourceCodeWithConceptNode, BaseNodeParser
@@ -313,21 +313,6 @@ class InFixToPostFix:
"""
return len(self.stack) > 0 and isinstance(self.stack[-1], type)
def _get_lexer_nodes_from_unrecognized(self):
"""
Use the source of self.unrecognized_tokens gto find concepts or source code
:return:
"""
res = builtin_helpers.parse_unrecognized(self.context, self.unrecognized_tokens.source, PARSERS)
only_parsers_results = builtin_helpers.only_parsers_results(self.context, res)
if not only_parsers_results.status:
return None
return builtin_helpers.get_lexer_nodes(
only_parsers_results.body.body,
self.unrecognized_tokens.start,
self.unrecognized_tokens.tokens)
def _make_source_code_with_concept(self, start, rpar_token, end):
"""
@@ -440,7 +425,10 @@ class InFixToPostFix:
self.unrecognized_tokens.fix_source()
# try to recognize concepts
nodes_sequences = self._get_lexer_nodes_from_unrecognized()
nodes_sequences = builtin_helpers.get_lexer_nodes_from_unrecognized(
self.context,
self.unrecognized_tokens,
PARSERS)
if nodes_sequences:
# There are more than one solution found
@@ -482,7 +470,10 @@ class InFixToPostFix:
:return: list of function_parser_res
"""
self.unrecognized_tokens.fix_source()
nodes_sequences = self._get_lexer_nodes_from_unrecognized()
nodes_sequences = builtin_helpers.get_lexer_nodes_from_unrecognized(
self.context,
self.unrecognized_tokens,
PARSERS)
if nodes_sequences is None:
return None
@@ -908,13 +899,13 @@ class SyaNodeParser(BaseNodeParser):
self.concepts_by_first_keyword = {}
self.sya_definitions = {}
self.token = None
self.pos = -1
self.tokens = None
self.context: ExecutionContext = None
self.text = None
self.sheerka = None
# self.token = None
# self.pos = -1
# self.tokens = None
#
# self.context: ExecutionContext = None
# self.text = None
# self.sheerka = None
def init_from_concepts(self, context, concepts, **kwargs):
super().init_from_concepts(context, concepts)
@@ -954,15 +945,15 @@ class SyaNodeParser(BaseNodeParser):
return sya_concept_def
def infix_to_postfix(self, context, text):
def infix_to_postfix(self, context, parser_input: ParserInput):
"""
Implementing Shunting Yard Algorithm
:param context:
:param text:
:param parser_input:
:return:
"""
if not self.reset_parser(context, text):
if not self.reset_parser(context, parser_input):
return None
forked = []
@@ -978,32 +969,32 @@ class SyaNodeParser(BaseNodeParser):
forked.clear()
res = [InFixToPostFix(context)]
while self.next_token(False):
while self.parser_input.next_token(False):
for infix_to_postfix in res:
infix_to_postfix.reset()
token = self.get_token()
token = self.parser_input.token
try:
if token.type in (TokenKind.LPAR, TokenKind.RPAR):
# little optim, no need to lock, unlock or get the concept when parenthesis
for infix_to_postfix in res:
infix_to_postfix.eat_token(token, self.pos)
infix_to_postfix.eat_token(token, self.parser_input.pos)
continue
for infix_to_postfix in res:
if infix_to_postfix.eat_token(token, self.pos):
if infix_to_postfix.eat_token(token, self.parser_input.pos):
infix_to_postfix.lock()
concepts = self.get_concepts(token, self._is_eligible, to_map=self._get_sya_concept_def)
if not concepts:
for infix_to_postfix in res:
infix_to_postfix.eat_unrecognized(token, self.pos)
infix_to_postfix.eat_unrecognized(token, self.parser_input.pos)
continue
if len(concepts) == 1:
for infix_to_postfix in res:
infix_to_postfix.eat_concept(concepts[0], token, self.pos)
infix_to_postfix.eat_concept(concepts[0], token, self.parser_input.pos)
continue
# make the cartesian product
@@ -1012,7 +1003,7 @@ class SyaNodeParser(BaseNodeParser):
for concept in concepts:
clone = infix_to_postfix.clone()
temp_res.append(clone)
clone.eat_concept(concept, token, self.pos)
clone.eat_concept(concept, token, self.parser_input.pos)
res = temp_res
finally:
@@ -1036,14 +1027,15 @@ class SyaNodeParser(BaseNodeParser):
while len(item.nodes) > 0:
res = self.postfix_to_item(sheerka, item.nodes)
if isinstance(res, PostFixToItem):
items.append(ConceptNode(res.concept, res.start, res.end, self.tokens[res.start: res.end + 1]))
items.append(
ConceptNode(res.concept, res.start, res.end, self.parser_input.tokens[res.start: res.end + 1]))
else:
items.append(res)
item.has_unrecognized |= hasattr(res, "has_unrecognized") and res.has_unrecognized or \
isinstance(res, UnrecognizedTokensNode)
item.nodes = items
item.fix_all_pos()
item.tokens = self.tokens[item.start:item.end + 1]
item.tokens = self.parser_input.tokens[item.start:item.end + 1]
item.fix_source(True)
return item
@@ -1069,14 +1061,14 @@ class SyaNodeParser(BaseNodeParser):
return PostFixToItem(concept, start, end, has_unrecognized)
def parse(self, context, parser_input):
def parse(self, context, parser_input: ParserInput):
"""
:param context:
:param parser_input:
:return:
"""
if parser_input == "":
if parser_input.is_empty():
return context.sheerka.ret(
self.name,
False,
@@ -1096,7 +1088,7 @@ class SyaNodeParser(BaseNodeParser):
return self.sheerka.ret(
self.name,
False,
context.sheerka.new(BuiltinConcepts.NOT_FOR_ME, body=parser_input))
context.sheerka.new(BuiltinConcepts.NOT_FOR_ME, body=parser_input.as_text()))
for infix_to_postfix in valid_infix_to_postfixs:
sequence = []
@@ -1106,7 +1098,10 @@ class SyaNodeParser(BaseNodeParser):
has_unrecognized |= hasattr(item, "has_unrecognized") and item.has_unrecognized or \
isinstance(item, UnrecognizedTokensNode)
if isinstance(item, PostFixToItem):
to_insert = ConceptNode(item.concept, item.start, item.end, self.tokens[item.start: item.end + 1])
to_insert = ConceptNode(item.concept,
item.start,
item.end,
self.parser_input.tokens[item.start: item.end + 1])
else:
to_insert = item
sequence.insert(0, to_insert)