Refactored Caching, Refactored BnfNodeParser, Introduced Sphinx

This commit is contained in:
2020-05-12 17:21:10 +02:00
parent 7d3a490bc5
commit 6e343ba996
110 changed files with 13865 additions and 7540 deletions
+36 -26
View File
@@ -1,12 +1,11 @@
import copy
from dataclasses import dataclass
from core import builtin_helpers
from core.builtin_concepts import BuiltinConcepts
from core.concept import Concept, DEFINITION_TYPE_BNF
from core.tokenizer import TokenKind, Tokenizer
from parsers.BaseNodeParser import BaseNodeParser, ConceptNode, UnrecognizedTokensNode
from parsers.BaseParser import BaseParser, UnexpectedTokenErrorNode, ErrorNode
from core.concept import DEFINITION_TYPE_BNF
from core.tokenizer import Tokenizer
from parsers.BaseNodeParser import BaseNodeParser, ConceptNode, UnrecognizedTokensNode, SourceCodeNode
from parsers.BaseParser import UnexpectedTokenErrorNode, ErrorNode
PARSERS = ["BnfNode", "SyaNode", "Python"]
@@ -141,7 +140,11 @@ class AtomConceptParserHelper:
self.unrecognized_tokens.fix_source()
# try to recognize concepts
nodes_sequences = self._get_lexer_nodes_from_unrecognized()
nodes_sequences = builtin_helpers.get_lexer_nodes_from_unrecognized(
self.context,
self.unrecognized_tokens,
PARSERS)
if nodes_sequences:
instances = [self]
for i in range(len(nodes_sequences) - 1):
@@ -152,7 +155,7 @@ class AtomConceptParserHelper:
for instance, node_sequence in zip(instances, nodes_sequences):
for node in node_sequence:
instance.sequence.append(node)
if isinstance(node, UnrecognizedTokensNode) or \
if isinstance(node, (UnrecognizedTokensNode, SourceCodeNode)) or \
hasattr(node, "unrecognized_tokens") and node.unrecognized_tokens:
instance.has_unrecognized = True
instance.unrecognized_tokens = UnrecognizedTokensNode(-1, -1, [])
@@ -193,22 +196,22 @@ class AtomConceptParserHelper:
clone.has_unrecognized = self.has_unrecognized
return clone
def _get_lexer_nodes_from_unrecognized(self):
"""
Use the source of self.unrecognized_tokens gto find concepts or source code
:return:
"""
res = builtin_helpers.parse_unrecognized(self.context, self.unrecognized_tokens.source, PARSERS)
only_parsers_results = builtin_helpers.only_parsers_results(self.context, res)
if not only_parsers_results.status:
return None
return builtin_helpers.get_lexer_nodes(
only_parsers_results.body.body,
self.unrecognized_tokens.start,
self.unrecognized_tokens.tokens)
# def _get_lexer_nodes_from_unrecognized(self):
# """
# Use the source of self.unrecognized_tokens gto find concepts or source code
# :return:
# """
#
# res = builtin_helpers.parse_unrecognized(self.context, self.unrecognized_tokens.source, PARSERS)
# only_parsers_results = builtin_helpers.only_parsers_results(self.context, res)
#
# if not only_parsers_results.status:
# return None
#
# return builtin_helpers.get_lexer_nodes(
# only_parsers_results.body.body,
# self.unrecognized_tokens.start,
# self.unrecognized_tokens.tokens)
class AtomNodeParser(BaseNodeParser):
@@ -230,7 +233,6 @@ class AtomNodeParser(BaseNodeParser):
def __init__(self, **kwargs):
super().__init__("AtomNode", 50, **kwargs)
self.enabled = False
@staticmethod
def _is_eligible(concept):
@@ -239,7 +241,8 @@ class AtomNodeParser(BaseNodeParser):
:param concept:
:return:
"""
return len(concept.metadata.props) == 0 or concept.metadata.definition_type == DEFINITION_TYPE_BNF
# return len(concept.metadata.props) == 0 or concept.metadata.definition_type == DEFINITION_TYPE_BNF
return len(concept.metadata.variables) == 0 and concept.metadata.definition_type != DEFINITION_TYPE_BNF
def get_concepts_sequences(self):
@@ -255,6 +258,13 @@ class AtomNodeParser(BaseNodeParser):
concept_parser_helpers.extend(forked)
forked.clear()
def _get_concepts_by_name(name):
other_concepts = self.sheerka.get_by_name(name)
if isinstance(other_concepts, list):
return other_concepts
return [other_concepts] if self.sheerka.is_known(other_concepts) else []
concept_parser_helpers = [AtomConceptParserHelper(self.context)]
while self.next_token(False):
@@ -268,7 +278,7 @@ class AtomNodeParser(BaseNodeParser):
if concept_parser.eat_token(self.token, self.pos):
concept_parser.lock()
concepts = self.get_concepts(token, self._is_eligible)
concepts = self.get_concepts(token, self._is_eligible, custom=_get_concepts_by_name)
if not concepts:
for concept_parser in concept_parser_helpers:
concept_parser.eat_unrecognized(token, self.pos)