Added SyaNodeParser (finally, after one month)
This commit is contained in:
@@ -1,8 +1,8 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from core.builtin_concepts import BuiltinConcepts
|
||||
from core.builtin_concepts import BuiltinConcepts, ParserResultConcept
|
||||
from core.concept import Concept
|
||||
from core.tokenizer import TokenKind, Keywords, Token
|
||||
from core.tokenizer import TokenKind, Keywords, Token, Tokenizer
|
||||
from core.sheerka_logger import get_logger
|
||||
import core.utils
|
||||
import logging
|
||||
@@ -77,7 +77,6 @@ class BaseParser:
|
||||
self.priority = priority
|
||||
self.enabled = enabled
|
||||
|
||||
self.has_error = False
|
||||
self.error_sink = []
|
||||
|
||||
def __eq__(self, other):
|
||||
@@ -91,9 +90,13 @@ class BaseParser:
|
||||
def __repr__(self):
|
||||
return self.name
|
||||
|
||||
def parse(self, context, text):
|
||||
def parse(self, context, parser_input):
|
||||
pass
|
||||
|
||||
@property
|
||||
def has_error(self):
|
||||
return len(self.error_sink) > 0
|
||||
|
||||
def log_result(self, context, source, ret):
|
||||
if not self.log.isEnabledFor(logging.DEBUG):
|
||||
return
|
||||
@@ -132,6 +135,53 @@ class BaseParser:
|
||||
body=self.error_sink if self.has_error else tree,
|
||||
try_parsed=try_parse)
|
||||
|
||||
def get_input_as_text(self, parser_input, custom_switcher=None):
|
||||
if isinstance(parser_input, list):
|
||||
return self.get_text_from_tokens(parser_input, custom_switcher)
|
||||
|
||||
if isinstance(parser_input, ParserResultConcept):
|
||||
parser_input = parser_input.source
|
||||
|
||||
if "c:" in parser_input:
|
||||
return self.get_text_from_tokens(list(Tokenizer(parser_input)), custom_switcher)
|
||||
|
||||
return parser_input
|
||||
|
||||
def get_input_as_tokens(self, parser_input):
|
||||
if isinstance(parser_input, list):
|
||||
return self.add_eof_if_needed(parser_input)
|
||||
|
||||
if isinstance(parser_input, ParserResultConcept):
|
||||
if parser_input.tokens:
|
||||
return self.add_eof_if_needed(parser_input.tokens)
|
||||
else:
|
||||
return Tokenizer(parser_input.source)
|
||||
|
||||
return Tokenizer(parser_input)
|
||||
|
||||
def get_input_as_lexer_nodes(self, parser_input, expected_parser=None):
|
||||
if not isinstance(parser_input, ParserResultConcept):
|
||||
return None
|
||||
|
||||
if expected_parser and parser_input.parser != expected_parser:
|
||||
return None
|
||||
|
||||
if len(parser_input.value) == 0:
|
||||
return None
|
||||
|
||||
for node in parser_input.value:
|
||||
from parsers.BaseNodeParser import LexerNode
|
||||
if not isinstance(node, LexerNode):
|
||||
return None
|
||||
|
||||
return parser_input.value
|
||||
|
||||
@staticmethod
|
||||
def add_eof_if_needed(lst):
|
||||
if len(lst) == 0 or not lst[-1].type == TokenKind.EOF:
|
||||
lst.append(Token(TokenKind.EOF, "", -1, -1, -1))
|
||||
return lst
|
||||
|
||||
@staticmethod
|
||||
def get_text_from_tokens(tokens, custom_switcher=None):
|
||||
if tokens is None:
|
||||
|
||||
Reference in New Issue
Block a user