Added SyaNodeParser (finally, after one month)

This commit is contained in:
2020-04-09 15:42:36 +02:00
parent c9acfa99a1
commit 6c7c529016
56 changed files with 5322 additions and 404 deletions
+177
View File
@@ -6,6 +6,8 @@ from core.ast.nodes import CallNodeConcept, GenericNodeConcept
from core.ast.visitors import UnreferencedNamesVisitor
from core.builtin_concepts import BuiltinConcepts
from core.concept import Concept
from parsers.BaseNodeParser import SourceCodeNode, ConceptNode, UnrecognizedTokensNode
from parsers.BaseParser import BaseParser, ErrorNode
def is_same_success(context, return_values):
@@ -132,6 +134,181 @@ def expect_one(context, return_values):
parents=return_values)
def only_successful(context, return_values):
"""
Removes all return values that are not successful
Return error when no successful return value
:param context:
:param return_values:
:return:
"""
if not isinstance(return_values, list):
return return_values
sheerka = context.sheerka
if len(return_values) == 0:
return sheerka.ret(
context.who,
False,
sheerka.new(BuiltinConcepts.IS_EMPTY, body=return_values),
parents=return_values)
successful_results = [item for item in return_values if item.status]
if len(successful_results) == 0:
return sheerka.ret(
context.who,
False,
sheerka.new(BuiltinConcepts.TOO_MANY_ERRORS, body=return_values),
parents=return_values)
return sheerka.ret(
context.who,
True,
sheerka.new(BuiltinConcepts.ONLY_SUCCESSFUL, body=successful_results),
parents=return_values)
def only_parsers_results(context, return_values):
"""
Filters the return_values and returns when the result is a ParserResult
regardless of the status
So it filters errors
:param context:
:param return_values:
:return:
"""
if not isinstance(return_values, list):
return return_values
sheerka = context.sheerka
if len(return_values) == 0:
return sheerka.ret(
context.who,
False,
sheerka.new(BuiltinConcepts.IS_EMPTY, body=return_values),
parents=return_values)
return_values_ok = [item for item in return_values if sheerka.isinstance(item.body, BuiltinConcepts.PARSER_RESULT)]
# hack because some parsers don't follow the NOT_FOR_ME rule
temp_ret_val = []
for ret_val in return_values_ok:
if isinstance(ret_val.body.body, ErrorNode):
continue
if isinstance(ret_val.body.body, list) and \
len(ret_val.body.body) == 1 and \
isinstance(ret_val.body.body[0], UnrecognizedTokensNode):
continue
temp_ret_val.append(ret_val)
return_values_ok = temp_ret_val
if len(return_values_ok) == 0:
return sheerka.ret(
context.who,
False,
sheerka.new(BuiltinConcepts.TOO_MANY_ERRORS, body=return_values),
parents=return_values)
return sheerka.ret(
context.who,
True,
sheerka.new(BuiltinConcepts.FILTERED,
body=return_values_ok,
iterable=return_values,
predicate="sheerka.isinstance(item.body, BuiltinConcepts.PARSER_RESULT)"),
parents=return_values)
def parse_unrecognized(context, tokens, parsers):
"""
Try to recognize concepts or code from tokens using the given parsers
:param context:
:param tokens:
:param parsers:
:return:
"""
steps = [BuiltinConcepts.BEFORE_PARSING, BuiltinConcepts.PARSING, BuiltinConcepts.AFTER_PARSING]
sheerka = context.sheerka
with context.push(desc=f"Parsing unrecognized '{tokens}'") as sub_context:
# disable all parsers but the following ones
sub_context.add_preprocess(BaseParser.PREFIX + "*", enabled=False)
for parser in parsers:
sub_context.add_preprocess(BaseParser.PREFIX + parser, enabled=True)
sub_context.add_inputs(source=tokens)
to_parse = sheerka.ret(
context.who,
True,
sheerka.new(BuiltinConcepts.USER_INPUT, body=tokens))
res = sheerka.execute(sub_context, to_parse, steps)
sub_context.add_values(return_values=res)
# discard Python response if accepted by AtomNode
is_concept = False
for r in res:
if r.status and r.who == "parsers.AtomNode":
is_concept = True
if not is_concept:
return res
filtered = []
for r in res:
if r.who == "parsers.Python":
continue
filtered.append(r)
return filtered
def get_lexer_nodes(return_values, start, tokens):
"""
From a parser result, return the corresponding LexerNode
either ConceptNode, UnrecognizedTokensNode or SourceCodeNode
:param return_values:
:param start:
:param tokens:
:return: list of list (list of concept node sequence)
"""
lexer_nodes = []
for ret_val in return_values:
if ret_val.who == "parsers.Python":
if ret_val.body.source.strip().isalnum() and not ret_val.body.source.strip().isnumeric():
# Discard SourceCodeNode which seems to be a concept
# It may be a wrong idea, so let's see
continue
end = start + len(tokens) - 1
lexer_nodes.append([SourceCodeNode(ret_val.body.body, start, end, tokens, ret_val.body.source, ret_val)])
elif ret_val.who == "parsers.ExactConcept":
concepts = ret_val.body.body if hasattr(ret_val.body.body, "__iter__") else [ret_val.body.body]
end = start + len(tokens) - 1
for concept in concepts:
lexer_nodes.append([ConceptNode(concept, start, end, tokens, ret_val.body.source)])
elif ret_val.who in ("parsers.BnfNode", "parsers.SyaNode", "parsers.AtomNode"):
nodes = [node for node in ret_val.body.body]
for node in nodes:
node.start += start
node.end += start
# but append the whole sequence if when it's a sequence
lexer_nodes.append(nodes)
else:
raise NotImplementedError()
return lexer_nodes
def get_names(sheerka, concept_node):
"""
Finds all the names referenced by the concept_node