Added basic implentation for where

This commit is contained in:
2020-02-05 18:47:20 +01:00
parent a5a721094b
commit afc1e22949
35 changed files with 864 additions and 320 deletions
+39 -24
View File
@@ -5,7 +5,8 @@ from core.builtin_concepts import BuiltinConcepts
from core.sheerka.Sheerka import ExecutionContext
from core.tokenizer import Tokenizer, Token, TokenKind, LexerError
from parsers.BaseParser import BaseParser, ErrorNode, UnexpectedTokenErrorNode
from parsers.ConceptLexerParser import OrderedChoice, Sequence, Optional, ZeroOrMore, OneOrMore, ConceptExpression, StrMatch
from parsers.ConceptLexerParser import OrderedChoice, Sequence, Optional, ZeroOrMore, OneOrMore, ConceptExpression, \
StrMatch, ConceptGroupExpression
@dataclass()
@@ -119,11 +120,11 @@ class BnfParser(BaseParser):
tree = None
try:
self.reset_parser(context, text)
tree = self.parser_outer_rule_name()
tree = self.parse_choice()
token = self.get_token()
if token and token.type != TokenKind.EOF:
self.add_error(UnexpectedTokenErrorNode(f"Unexpected token '{token}'", []))
self.add_error(UnexpectedTokenErrorNode(f"Unexpected token '{token}'", token, []))
except LexerError as e:
self.add_error(e, False)
@@ -136,10 +137,11 @@ class BnfParser(BaseParser):
return ret
def parser_outer_rule_name(self):
return self.parser_rule_name(self.parse_choice)
def parse_choice(self):
"""
a | b | c
:return:
"""
sequence = self.parse_sequence()
self.eat_white_space()
@@ -159,9 +161,13 @@ class BnfParser(BaseParser):
sequence = self.parse_sequence()
elements.append(sequence)
return OrderedChoice(*elements)
return self.eat_rule_name_if_needed(OrderedChoice(*elements))
def parse_sequence(self):
"""
a b c
:return:
"""
expr_and_modifier = self.parse_modifier()
token = self.get_token()
if token is None or \
@@ -185,30 +191,31 @@ class BnfParser(BaseParser):
sequence = self.parse_modifier()
elements.append(sequence)
return Sequence(*elements)
return self.eat_rule_name_if_needed(Sequence(*elements))
def parse_modifier(self):
expression = self.parser_inner_rule_name()
"""
a? | a* | a+
:return:
"""
expression = self.parse_expression()
token = self.get_token()
if token.type == TokenKind.QMARK:
self.next_token()
return Optional(expression)
return self.eat_rule_name_if_needed(Optional(expression))
if token.type == TokenKind.STAR:
self.next_token()
return ZeroOrMore(expression)
return self.eat_rule_name_if_needed(ZeroOrMore(expression))
if token.type == TokenKind.PLUS:
self.next_token()
return OneOrMore(expression)
return self.eat_rule_name_if_needed(OneOrMore(expression))
return expression
def parser_inner_rule_name(self):
return self.parser_rule_name(self.parse_expression)
def parse_expression(self):
token = self.get_token()
if token.type == TokenKind.EOF:
@@ -216,15 +223,21 @@ class BnfParser(BaseParser):
if token.type == TokenKind.LPAR:
self.nb_open_par += 1
self.next_token()
expression = self.parse_choice()
expr = self.parse_choice()
token = self.get_token()
if token.type == TokenKind.RPAR:
self.nb_open_par -= 1
self.next_token()
return expression
return self.eat_rule_name_if_needed(expr)
else:
self.add_error(UnexpectedTokenErrorNode(f"Unexpected token '{token}'", [TokenKind.RPAR]))
return expression
self.add_error(UnexpectedTokenErrorNode(f"Unexpected token '{token}'", token, [TokenKind.RPAR]))
return expr
if token.type == TokenKind.CONCEPT:
self.next_token()
concept = self.sheerka.new((token.value[0], token.value[1]))
expr = ConceptGroupExpression(concept) if self.sheerka.isaset(concept) else ConceptExpression(concept)
return self.eat_rule_name_if_needed(expr)
if token.type == TokenKind.IDENTIFIER:
self.next_token()
@@ -247,14 +260,15 @@ class BnfParser(BaseParser):
body=("key", concept_name)))
return None
else:
return concept
expr = ConceptGroupExpression(concept) if self.sheerka.isaset(concept) else ConceptExpression(concept)
expr.rule_name = concept.name
return expr
ret = StrMatch(core.utils.strip_quotes(token.value))
self.next_token()
return ret
return self.eat_rule_name_if_needed(ret)
def parser_rule_name(self, next_to_parse):
expression = next_to_parse()
def eat_rule_name_if_needed(self, expression):
token = self.get_token()
if token is None or token.type != TokenKind.EQUALS:
return expression
@@ -263,7 +277,8 @@ class BnfParser(BaseParser):
token = self.get_token()
if token is None or token.type != TokenKind.IDENTIFIER:
return self.add_error(UnexpectedTokenErrorNode(f"Unexpected token '{token}'", [TokenKind.IDENTIFIER]))
return self.add_error(
UnexpectedTokenErrorNode(f"Unexpected token '{token}'", token, [TokenKind.IDENTIFIER]))
expression.rule_name = token.value
self.next_token()