Working on #48 : Refactored FunctionParser, introducting ExpressionParser
This commit is contained in:
@@ -14,7 +14,7 @@ from parsers.FunctionParser import FunctionNode
|
||||
from parsers.PythonParser import PythonNode
|
||||
from parsers.SyaNodeParser import SyaConceptParserHelper
|
||||
from parsers.expressions import NameExprNode, AndNode, OrNode, NotNode, VariableNode, ComparisonNode, ComparisonType, \
|
||||
ParenthesisNode
|
||||
FunctionParameter
|
||||
from sheerkarete.common import V
|
||||
from sheerkarete.conditions import Condition, AndConditions
|
||||
|
||||
@@ -972,8 +972,13 @@ def get_expr_node_from_test_node(full_text, test_node):
|
||||
return start, end
|
||||
|
||||
def get_pos_from_source(source):
|
||||
if isinstance(source, tuple):
|
||||
source, to_skip = source[0], source[1]
|
||||
else:
|
||||
to_skip = 0
|
||||
|
||||
source_as_node = list(Tokenizer(source, yield_eof=False))
|
||||
start = tokens_index(full_text_as_tokens, source_as_node)
|
||||
start = tokens_index(full_text_as_tokens, source_as_node, skip=to_skip)
|
||||
end = start + len(source_as_node) - 1
|
||||
return start, end
|
||||
|
||||
@@ -1017,11 +1022,31 @@ def get_expr_node_from_test_node(full_text, test_node):
|
||||
return ComparisonNode(start, end, full_text_as_tokens[start: end + 1],
|
||||
node_type, left_node, right_node)
|
||||
|
||||
if isinstance(node, PAREN):
|
||||
value_as_tokens = list(Tokenizer(node.source, yield_eof=False))
|
||||
start = tokens_index(full_text_as_tokens, value_as_tokens, 0)
|
||||
end = start + len(value_as_tokens) - 1
|
||||
return ParenthesisNode(start, end, value_as_tokens, get_expr_node(node.node))
|
||||
if isinstance(node, FN):
|
||||
start, end = get_pos_from_source(node.first)
|
||||
first = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
|
||||
start, end = get_pos_from_source(node.last)
|
||||
last = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
|
||||
parameters = []
|
||||
for param_value, sep in node.parameters:
|
||||
if isinstance(param_value, str):
|
||||
start, end = get_pos_from_source(param_value)
|
||||
param_as_expr_node = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
|
||||
else:
|
||||
param_as_expr_node = get_expr_node(param_value)
|
||||
|
||||
if sep:
|
||||
sep_tokens = Tokenizer(sep, yield_eof=False)
|
||||
start = param_as_expr_node.end + 1
|
||||
end = start + len(list(sep_tokens)) - 1
|
||||
sep_as_expr_node = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
|
||||
else:
|
||||
sep_as_expr_node = None
|
||||
|
||||
parameters.append(FunctionParameter(param_as_expr_node, sep_as_expr_node))
|
||||
|
||||
start, end = first.start, last.end
|
||||
return FunctionNode(start, end, full_text_as_tokens[start: end + 1], first, last, parameters)
|
||||
|
||||
return get_expr_node(test_node)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user