Added bnf when adding a new concept + Started logging filtering

This commit is contained in:
2019-12-13 20:26:11 +01:00
parent 75c8793d53
commit c668cc46d2
29 changed files with 1487 additions and 190 deletions
+70 -22
View File
@@ -1,11 +1,14 @@
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept, ParserResultConcept
from core.concept import ConceptParts
import core.builtin_helpers
import core.utils
from parsers.BaseParser import BaseParser, Node, NopNode, ErrorNode, NotInitializedNode
from core.tokenizer import Tokenizer, TokenKind, Token, Keywords
from dataclasses import dataclass, field
import logging
from parsers.ConceptLexerParser import RegexParser
log = logging.getLogger(__name__)
@@ -180,20 +183,22 @@ class NameNode(DefaultParserNode):
@dataclass()
class DefConceptNode(DefaultParserNode):
name: NameNode = NotInitializedNode()
where: ReturnValueConcept = NotInitializedNode()
pre: ReturnValueConcept = NotInitializedNode()
post: ReturnValueConcept = NotInitializedNode()
body: ReturnValueConcept = NotInitializedNode()
definition: ReturnValueConcept = NotInitializedNode()
def get_codes(self):
codes = {}
def get_asts(self):
asts = {}
for part_key in ConceptParts:
prop_value = getattr(self, part_key.value)
if hasattr(prop_value, "ast_"):
codes[part_key] = prop_value.ast_
return codes
if isinstance(prop_value, ReturnValueConcept) and isinstance(prop_value.body,
ParserResultConcept) and hasattr(
prop_value.body.body, "ast_"):
asts[part_key] = prop_value.body.body.ast_
return asts
class DefaultParser(BaseParser):
@@ -322,20 +327,44 @@ class DefaultParser(BaseParser):
# init
log.debug("It may be a definition of a concept")
concept_special_tokens = [def_token]
concept_found = DefConceptNode(concept_special_tokens)
keywords_tokens = [def_token]
concept_found = DefConceptNode(keywords_tokens)
# the definition of a concept consists of several parts
# Keywords.CONCEPT to get the name of the concept
# Keywords.FROM [Keywords.REGEX] to get the definition of the concept
# Keywords.AS to get the body
# Keywords.WHERE to get the conditions to recognize for the variables
# Keywords.PRE to know if the conditions to evaluate the concept
# Keywords.POST to apply or verify once the concept is executed
def_concept_parts = [Keywords.CONCEPT, Keywords.AS, Keywords.WHERE, Keywords.PRE, Keywords.POST]
#
# Regroup the tokens by parts
first_token, tokens_found_by_parts = self.regroup_tokens_by_parts(keywords_tokens)
# get the name
concept_found.name = self.get_concept_name(first_token, tokens_found_by_parts)
# get the definition
concept_found.definition = self.get_concept_definition(tokens_found_by_parts)
# get the ASTs for the remaining parts
asts_found_by_parts = self.get_concept_parts(tokens_found_by_parts)
concept_found.where = asts_found_by_parts[Keywords.WHERE]
concept_found.pre = asts_found_by_parts[Keywords.PRE]
concept_found.post = asts_found_by_parts[Keywords.POST]
concept_found.body = asts_found_by_parts[Keywords.AS]
log.debug(f"Found DefConcept node '{concept_found}'")
return concept_found
def regroup_tokens_by_parts(self, keywords_tokens):
def_concept_parts = [Keywords.CONCEPT, Keywords.FROM, Keywords.AS, Keywords.WHERE, Keywords.PRE, Keywords.POST]
# tokens found, when trying to recognize the parts
tokens_found_by_parts = {
Keywords.CONCEPT: [],
Keywords.FROM: None,
Keywords.AS: None,
Keywords.WHERE: None,
Keywords.PRE: None,
@@ -348,7 +377,7 @@ class DefaultParser(BaseParser):
# loop thru the tokens, and put them in the correct tokens_found_by_parts entry
while token.type != TokenKind.EOF:
if token.value in def_concept_parts:
concept_special_tokens.append(token) # keep track of the keywords
keywords_tokens.append(token) # keep track of the keywords
keyword = token.value
if tokens_found_by_parts[keyword]:
# a part is defined more than once
@@ -364,13 +393,15 @@ class DefaultParser(BaseParser):
token = self.get_token()
# semantic checks
return first_token, tokens_found_by_parts
def get_concept_name(self, first_token, tokens_found_by_parts):
name_first_token_index = 1
token = self.get_token()
if first_token.value != Keywords.CONCEPT:
self.add_error(UnexpectedTokenErrorNode([token], "Syntax error.", [Keywords.CONCEPT]))
name_first_token_index = 0
# Manage the name
name_tokens = tokens_found_by_parts[Keywords.CONCEPT]
if len(name_tokens) == name_first_token_index:
self.add_error(SyntaxErrorNode([], "Name is mandatory"))
@@ -381,8 +412,31 @@ class DefaultParser(BaseParser):
if TokenKind.NEWLINE in [t.type for t in name_tokens]:
self.add_error(SyntaxErrorNode(tokens_found_by_parts[Keywords.CONCEPT], "Newline are not allowed in name."))
concept_found.name = NameNode(name_tokens[name_first_token_index:]) # skip the first token
return NameNode(name_tokens[name_first_token_index:]) # skip the first token
def get_concept_definition(self, tokens_found_by_parts):
if tokens_found_by_parts[Keywords.FROM] is None:
return NotInitializedNode()
definition_tokens = tokens_found_by_parts[Keywords.FROM]
if definition_tokens[1].value != Keywords.BNF:
return NotInitializedNode()
tokens = core.utils.strip_tokens(definition_tokens[2:])
if len(tokens) == 0:
self.add_error(SyntaxErrorNode([definition_tokens[1]], "Empty declaration"), False)
return NotInitializedNode()
regex_parser = RegexParser()
new_context = self.context.push(self.name)
parsing_result = regex_parser.parse(new_context, tokens)
if not parsing_result.status:
self.add_error(parsing_result.value)
return NotInitializedNode()
return parsing_result
def get_concept_parts(self, tokens_found_by_parts):
asts_found_by_parts = {
Keywords.AS: NotInitializedNode(),
Keywords.WHERE: NotInitializedNode(),
@@ -391,7 +445,7 @@ class DefaultParser(BaseParser):
}
for keyword in tokens_found_by_parts:
if keyword == Keywords.CONCEPT:
if keyword == Keywords.CONCEPT or keyword == Keywords.FROM:
continue # already done
log.debug("Processing part '" + keyword.name + "'")
@@ -418,13 +472,7 @@ class DefaultParser(BaseParser):
asts_found_by_parts[keyword] = parsing_result
concept_found.where = asts_found_by_parts[Keywords.WHERE]
concept_found.pre = asts_found_by_parts[Keywords.PRE]
concept_found.post = asts_found_by_parts[Keywords.POST]
concept_found.body = asts_found_by_parts[Keywords.AS]
log.debug(f"Found DefConcept node '{concept_found}'")
return concept_found
return asts_found_by_parts
# def parse_expression(self):
# return self.parse_addition()