from dataclasses import dataclass from core.tokenizer import TokenKind, Keywords @dataclass() class Node: pass @dataclass() class NopNode(Node): pass def __repr__(self): return "nop" class NotInitializedNode(Node): pass def __repr__(self): return "**N/A**" @dataclass() class ErrorNode(Node): pass class BaseParser: PREFIX = "Parsers:" def __init__(self, name): self.name = self.PREFIX + name self.has_error = False self.error_sink = [] def __eq__(self, other): if not isinstance(other, self.__class__): return False return self.name == other.name def __hash__(self): return hash(self.name) def parse(self, context, text): pass @staticmethod def get_text_from_tokens(tokens): if tokens is None: return "" res = "" if not hasattr(tokens, "__iter__"): tokens = [tokens] for token in tokens: value = Keywords(token.value).value if token.type == TokenKind.KEYWORD else token.value res += value return res @staticmethod def log_result(log, text, ret): if ret.status: log.debug(f"Recognized '{text}' as {ret.value}") else: log.debug(f"Failed to recognize '{text}'")