Initialized logging

This commit is contained in:
2019-11-05 19:56:00 +01:00
parent b12204360e
commit 0d2adf1b6c
10 changed files with 448 additions and 249 deletions
+28 -8
View File
@@ -1,6 +1,9 @@
from parsers.BaseParser import BaseParser, Node, NopNode, ErrorNode
from parsers.tokenizer import Tokenizer, TokenKind, Token, Keywords
from dataclasses import dataclass, field
import logging
log = logging.getLogger(__name__)
@dataclass()
@@ -27,11 +30,16 @@ class UnexpectedTokenErrorNode(DefaultParserErrorNode):
message: str
expected_tokens: list
def __post_init__(self):
log.debug("UnexpectedToken : " + self.message)
@dataclass()
class SyntaxErrorNode(DefaultParserErrorNode):
message: str
pass
def __post_init__(self):
log.debug("SyntaxError : " + self.message)
@dataclass()
@@ -164,7 +172,7 @@ class DefaultParser(BaseParser):
return
@staticmethod
def get_concept_name(tokens):
def get_concept_name(tokens, variables=None):
name = ""
first = True
for token in tokens:
@@ -172,7 +180,10 @@ class DefaultParser(BaseParser):
break
if not first:
name += " "
name += token.value[1:-1] if token.type == TokenKind.STRING else token.value
if variables is not None and token.value in variables:
name += "__var__" + str(variables.index(token.value))
else:
name += token.value[1:-1] if token.type == TokenKind.STRING else token.value
first = False
return name
@@ -235,6 +246,8 @@ class DefaultParser(BaseParser):
def_concept_parts = [Keywords.AS, Keywords.WHERE, Keywords.PRE, Keywords.POST]
tokens_found = {} # Node token is supposed to be a list, but here, it will be a dict
token = self.get_token()
if token.value != Keywords.CONCEPT:
return self.add_error(UnexpectedTokenErrorNode([token], "Syntax error.", [Keywords.CONCEPT]))
@@ -251,6 +264,7 @@ class DefaultParser(BaseParser):
self.next_token()
token = self.get_token()
name = self.get_concept_name(name_as_tokens)
tokens_found["name"] = name_as_tokens
# try to parse as, where, pre and post declarations
tokens = {
@@ -276,6 +290,8 @@ class DefaultParser(BaseParser):
self.next_token(False)
token = self.get_token()
for t in tokens:
tokens_found[t.value] = tokens[t]
asts = {
Keywords.AS: NopNode(),
@@ -304,11 +320,15 @@ class DefaultParser(BaseParser):
self.add_error(sub_tree, False)
asts[keyword] = sub_tree
return DefConceptNode([], name,
asts[Keywords.WHERE],
asts[Keywords.PRE],
asts[Keywords.POST],
asts[Keywords.AS])
def_concept_node = DefConceptNode(tokens_found, # dict instead of list is wanted.
name,
asts[Keywords.WHERE],
asts[Keywords.PRE],
asts[Keywords.POST],
asts[Keywords.AS])
log.debug(f"Found DefConcept node '{def_concept_node}'")
return def_concept_node
def parse_expression(self):
return self.parse_addition()
+18 -1
View File
@@ -2,6 +2,9 @@ from parsers.BaseParser import BaseParser, Node, ErrorNode
from dataclasses import dataclass
import ast
import copy
import logging
log = logging.getLogger(__name__)
@dataclass()
@@ -17,7 +20,7 @@ class PythonNode(Node):
def __repr__(self):
return "PythonNode(" + ast.dump(self.ast) + ")"
#return "PythonNode(" + self.source + ")"
# return "PythonNode(" + self.source + ")"
class PythonParser(BaseParser):
@@ -73,3 +76,17 @@ class PythonParser(BaseParser):
return eval(compile(self.expr_to_expression(last_ast.body[0]), "<ast>", "eval"), globals())
else:
exec(compile(last_ast, "<ast>", "exec"), globals())
class PythonGetNamesVisitor(ast.NodeVisitor):
"""
This visitor will find all the name declared in the ast
"""
def __init__(self):
self.names = set()
log.debug("Searching for names.")
def visit_Name(self, node):
log.debug(f"Found name : {node.id}")
self.names.add(node.id)
+1 -1
View File
@@ -179,7 +179,7 @@ class Tokenizer:
self.column += len(number)
elif c == "'" or c == '"':
string, newlines = self.eat_string(self.i, self.line, self.column)
yield Token(TokenKind.STRING, string, self.i, self.line, self.column)
yield Token(TokenKind.STRING, string, self.i, self.line, self.column) # quotes are kept
self.i += len(string)
self.column = 1 if newlines > 0 else self.column + len(string)
self.line += newlines