Fixed #127 : SyaNodeParser : Allow tokens parsing

Fixed #128 : parser_utils.get_node() : Refactor
This commit is contained in:
2021-09-12 11:26:14 +02:00
parent 945807b375
commit a61a1c0d2b
12 changed files with 327 additions and 290 deletions
+205 -185
View File
@@ -2,7 +2,7 @@ import ast
from dataclasses import dataclass
from typing import List, Union
from core.builtin_concepts import ReturnValueConcept
from core.builtin_concepts import ParserResultConcept, ReturnValueConcept
from core.builtin_helpers import CreateObjectIdentifiers
from core.concept import AllConceptParts, Concept, ConceptParts, DoNotResolve
from core.rule import Rule
@@ -20,14 +20,6 @@ from sheerkarete.common import V
from sheerkarete.conditions import AndConditions, Condition, NegatedCondition, NegatedConjunctiveConditions
@dataclass
class Obj:
prop_a: object
prop_b: object = None
prop_c: object = None
parent: object = None
class ExprTestObj:
@staticmethod
def get_pos(nodes):
@@ -276,6 +268,130 @@ class LC(ExprTestObj): # for List Comprehension node
return ListComprehensionNode(start, end, full_text_as_tokens[start: end + 1], element, comprehensions)
class FN(ExprTestObj):
"""
Test class only
It matches with FunctionNode but with less constraints
Thereby,
FN("first", "last", ["param1," ...]) can be compared to
FunctionNode(NameExprNode("first"), NameExprNode("second"), [FunctionParameter(NamesNodes("param1"), NamesNodes(", ")])
Note that FunctionParameter can easily be defined with a single string
* "param" -> FunctionParameter(NameExprNode("param"), None)
* "param, " -> FunctionParameter(NameExprNode("param"), NameExprNode(", "))
For more complicated situations, you can use a tuple (value, sep) to define the value part and the separator part
"""
def __init__(self, first, last, parameters):
self.first = first
self.last = last
self.parameters = []
for param in parameters:
if isinstance(param, tuple):
self.parameters.append(param)
elif isinstance(param, str) and (pos := param.find(",")) != -1:
self.parameters.append((param[:pos], param[pos:]))
else:
self.parameters.append((param, None))
def __repr__(self):
res = self.first
for param in self.parameters:
if param[1]:
res += f"{param[0]}{param[1]} "
else:
res += f"{param[0]}"
return res + self.last
def __eq__(self, other):
if id(self) == id(other):
return True
if isinstance(other, FN):
return self.first == other.first and self.last == other.last and self.parameters == other.parameters
return False
def __hash__(self):
return hash((self.first, self.last, self.parameters))
def transform_real_obj(self, other, get_test_obj_delegate):
if isinstance(other, FN):
return other
if isinstance(other, FunctionNode):
params = []
for self_parameter, other_parameter in zip(self.parameters, other.parameters):
if isinstance(self_parameter[0], str):
value = other_parameter.value.value
else:
value = get_test_obj_delegate(other_parameter.value, self_parameter[0])
sep = other_parameter.separator.value if other_parameter.separator else None
params.append((value, sep))
return FN(other.first.value, other.last.value, params)
raise Exception(f"Expecting FunctionNode but received {other=}")
def get_expr_node(self, full_text_as_tokens=None):
start, end = self.get_pos_from_source(self.first, full_text_as_tokens)
first = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
start, end = self.get_pos_from_source(self.last, full_text_as_tokens)
last = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
parameters = []
for param_value, sep in self.parameters:
if isinstance(param_value, str):
start, end = self.get_pos_from_source(param_value, full_text_as_tokens)
param_as_expr_node = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
else:
param_as_expr_node = param_value.get_expr_node(full_text_as_tokens)
if sep:
sep_tokens = Tokenizer(sep, yield_eof=False)
start = param_as_expr_node.end + 1
end = start + len(list(sep_tokens)) - 1
sep_as_expr_node = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
else:
sep_as_expr_node = None
parameters.append(FunctionParameter(param_as_expr_node, sep_as_expr_node))
start, end = first.start, last.end
return FunctionNode(start, end, full_text_as_tokens[start: end + 1], first, last, parameters)
class HelperWithPos:
def __init__(self, start=None, end=None):
self.start = start
self.end = end
self.start_is_fixed = start is not None
self.end_is_fixed = end is not None
def fix_pos(self, node):
"""
:param node: an object or a tuple
:return:
"""
if hasattr(node, "start"):
target_start, target_end = node.start, node.end
elif isinstance(node, tuple):
target_start, target_end = node
else:
target_start, target_end = None, None
if not self.start_is_fixed:
if target_start is not None and (self.start is None or target_start < self.start):
self.start = target_start
if not self.end_is_fixed:
if target_end is not None and (self.end is None or target_end > self.end):
self.end = target_end
return self
class CC:
"""
Concept class for test purpose
@@ -578,29 +694,60 @@ class CIO:
raise Exception(f"Expecting Concept but received {other=}")
class HelperWithPos:
def __init__(self, start=None, end=None):
self.start = start
self.end = end
class RETVAL:
"""
Class helper for return value for parser result
"""
self.start_is_fixed = start is not None
self.end_is_fixed = end is not None
def __init__(self, source, who=None, parser=None):
self.source = source
self.who = who
self.parser = parser
def fix_pos(self, node):
if not self.start_is_fixed:
start = node.start if hasattr(node, "start") else \
node[0] if isinstance(node, tuple) else None
def __eq__(self, other):
if id(self) == id(other):
return True
if start is not None and (self.start is None or start < self.start):
self.start = start
if not isinstance(other, RETVAL):
return False
if not self.end_is_fixed:
end = node.end if hasattr(node, "end") else \
node[1] if isinstance(node, tuple) else None
return (self.source == other.source and
self.who == other.who and
self.parser == other.parser)
if end is not None and (self.end is None or end > self.end):
self.end = end
return self
def __hash__(self):
return hash((self.source, self.who))
def __repr__(self):
txt = f"RV(source='{self.source}'"
if self.who is not None:
txt += f", who={self.who}"
if self.parser is not None:
txt += f", parser={self.parser}"
return txt + ")"
def transform_real_obj(self, other, get_test_obj_delegate):
"""
Transform other into CNC, to ease the comparison
:param other:
:param get_test_obj_delegate:
:return:
"""
if isinstance(other, RETVAL):
return other
if isinstance(other, ReturnValueConcept):
if not isinstance(other.body, ParserResultConcept):
raise Exception(f"ParserResultConcept not found body={other.body}")
parser_result = other.body
return RETVAL(parser_result.source,
other.who if self.who is not None else None,
parser_result.parser if self.parser is not None else None)
raise Exception(f"Expecting ReturnValueConcept but received {other=}")
class SCN(HelperWithPos):
@@ -992,10 +1139,10 @@ class RN(HelperWithPos):
if not isinstance(other, RN):
return False
return self.rule_id == other.rule_id and \
self.start == other.start and \
self.end == other.end and \
self.source == other.source
return (self.rule_id == other.rule_id and
self.start == other.start and
self.end == other.end and
self.source == other.source)
def __hash__(self):
return hash((self.rule_id, self.start, self.end, self.source))
@@ -1032,99 +1179,6 @@ class RN(HelperWithPos):
raise Exception(f"Expecting RuleNode but received {other=}")
class FN(ExprTestObj):
"""
Test class only
It matches with FunctionNode but with less constraints
Thereby,
FN("first", "last", ["param1," ...]) can be compared to
FunctionNode(NameExprNode("first"), NameExprNode("second"), [FunctionParameter(NamesNodes("param1"), NamesNodes(", ")])
Note that FunctionParameter can easily be defined with a single string
* "param" -> FunctionParameter(NameExprNode("param"), None)
* "param, " -> FunctionParameter(NameExprNode("param"), NameExprNode(", "))
For more complicated situations, you can use a tuple (value, sep) to define the value part and the separator part
"""
def __init__(self, first, last, parameters):
self.first = first
self.last = last
self.parameters = []
for param in parameters:
if isinstance(param, tuple):
self.parameters.append(param)
elif isinstance(param, str) and (pos := param.find(",")) != -1:
self.parameters.append((param[:pos], param[pos:]))
else:
self.parameters.append((param, None))
def __repr__(self):
res = self.first
for param in self.parameters:
if param[1]:
res += f"{param[0]}{param[1]} "
else:
res += f"{param[0]}"
return res + self.last
def __eq__(self, other):
if id(self) == id(other):
return True
if isinstance(other, FN):
return self.first == other.first and self.last == other.last and self.parameters == other.parameters
return False
def __hash__(self):
return hash((self.first, self.last, self.parameters))
def transform_real_obj(self, other, get_test_obj_delegate):
if isinstance(other, FN):
return other
if isinstance(other, FunctionNode):
params = []
for self_parameter, other_parameter in zip(self.parameters, other.parameters):
if isinstance(self_parameter[0], str):
value = other_parameter.value.value
else:
value = get_test_obj_delegate(other_parameter.value, self_parameter[0])
sep = other_parameter.separator.value if other_parameter.separator else None
params.append((value, sep))
return FN(other.first.value, other.last.value, params)
raise Exception(f"Expecting FunctionNode but received {other=}")
def get_expr_node(self, full_text_as_tokens=None):
start, end = self.get_pos_from_source(self.first, full_text_as_tokens)
first = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
start, end = self.get_pos_from_source(self.last, full_text_as_tokens)
last = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
parameters = []
for param_value, sep in self.parameters:
if isinstance(param_value, str):
start, end = self.get_pos_from_source(param_value, full_text_as_tokens)
param_as_expr_node = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
else:
param_as_expr_node = param_value.get_expr_node(full_text_as_tokens)
if sep:
sep_tokens = Tokenizer(sep, yield_eof=False)
start = param_as_expr_node.end + 1
end = start + len(list(sep_tokens)) - 1
sep_as_expr_node = NameExprNode(start, end, full_text_as_tokens[start: end + 1])
else:
sep_as_expr_node = None
parameters.append(FunctionParameter(param_as_expr_node, sep_as_expr_node))
start, end = first.start, last.end
return FunctionNode(start, end, full_text_as_tokens[start: end + 1], first, last, parameters)
@dataclass()
class NEGCOND:
"""
@@ -1207,8 +1261,6 @@ def get_node(
sub_expr,
concept_key=None,
skip=0,
is_bnf=False,
sya=False,
init_empty_body=False,
exclude_body=False):
"""
@@ -1219,41 +1271,41 @@ def get_node(
:param concepts_map: hash of the known concepts
:param concept_key: key of the concept if different from sub_expr
:param skip: number of occurrences of sub_expr to skip
:param is_bnf: True if the concept to search is a bnf definition
:param sya: Return SyaConceptParserHelper instead of a ConceptNode when needed
:param init_empty_body: if True adds the source in the body (actually in compiled.BODY)
:param exclude_body: Ask to not compare body
:return:
"""
if sub_expr == "')'":
return ")"
if isinstance(sub_expr, list):
return [get_node(concepts_map,
expression_as_tokens,
s,
concept_key,
skip,
init_empty_body,
exclude_body) for s in sub_expr]
if isinstance(sub_expr, ReturnValueConcept):
return sub_expr
if isinstance(sub_expr, tuple):
return get_node(concepts_map,
expression_as_tokens,
sub_expr[0],
concept_key,
sub_expr[1],
init_empty_body,
exclude_body)
if isinstance(sub_expr, DoNotResolve):
return sub_expr
if isinstance(sub_expr, CIO):
sub_expr.set_concept(concepts_map[sub_expr.concept_name])
source = sub_expr.source or sub_expr.concept_name
if source:
node = get_node(concepts_map, expression_as_tokens, source, sya=sya)
sub_expr.start = node.start
sub_expr.end = node.end
if isinstance(sub_expr, (DoNotResolve, ReturnValueConcept, RETVAL)):
return sub_expr
if isinstance(sub_expr, SCWC):
sub_expr.first = get_node(concepts_map, expression_as_tokens, sub_expr.first, sya=sya)
sub_expr.last = get_node(concepts_map, expression_as_tokens, sub_expr.last, sya=sya)
sub_expr.content = [get_node(concepts_map, expression_as_tokens, c, sya=sya) for c in sub_expr.content]
sub_expr.first = get_node(concepts_map, expression_as_tokens, sub_expr.first, skip=skip)
sub_expr.last = get_node(concepts_map, expression_as_tokens, sub_expr.last, skip=skip)
sub_expr.content = [get_node(concepts_map, expression_as_tokens, c, skip=skip) for c in sub_expr.content]
sub_expr.fix_pos(sub_expr.first)
sub_expr.fix_pos(sub_expr.last)
return sub_expr
# return SourceCodeWithConceptNode(first, last, content).pseudo_fix_source()
if isinstance(sub_expr, SCN):
node = get_node(concepts_map, expression_as_tokens, sub_expr.source, sya=sya)
node = get_node(concepts_map, expression_as_tokens, sub_expr.source, skip=skip)
sub_expr.fix_pos(node)
return sub_expr
@@ -1263,13 +1315,14 @@ def get_node(
sub_expr.end = start + length - 1
return sub_expr
if isinstance(sub_expr, (CNC, CC, CN, CMV)):
if isinstance(sub_expr, (CNC, CC, CN, CMV, CIO)):
if sub_expr.concept is None or sub_expr.start is None or sub_expr.end is None:
concept_node = get_node(
concepts_map,
expression_as_tokens,
sub_expr.source or sub_expr.concept_key,
sub_expr.concept_key, sya=sya)
sub_expr.concept_key,
skip)
if not hasattr(concept_node, "concept"):
raise Exception(f"'{sub_expr.concept_key}' is not a concept. Check your map.")
concept_found = concept_node.concept
@@ -1279,7 +1332,7 @@ def get_node(
(concept_node.start, concept_node.end if hasattr(concept_node, "end") else concept_node.start))
if hasattr(sub_expr, "compiled"):
for k, v in sub_expr.compiled.items():
node = get_node(concepts_map, expression_as_tokens, v, sya=sya,
node = get_node(concepts_map, expression_as_tokens, v,
exclude_body=exclude_body) # need to get start and end positions
if isinstance(v, str) and v in concepts_map:
new_value_concept = concepts_map[v]
@@ -1299,27 +1352,17 @@ def get_node(
return sub_expr
if isinstance(sub_expr, UTN):
node = get_node(concepts_map, expression_as_tokens, sub_expr.source)
node = get_node(concepts_map, expression_as_tokens, sub_expr.source, skip=skip)
sub_expr.fix_pos(node)
return sub_expr
if isinstance(sub_expr, tuple):
return get_node(concepts_map, expression_as_tokens, sub_expr[0],
concept_key=concept_key, skip=sub_expr[1], is_bnf=is_bnf, sya=sya)
start, length = _index(expression_as_tokens, sub_expr, skip)
# special case of python source code
if "+" in sub_expr and sub_expr.strip() != "+":
return SCN(sub_expr, start, start + length - 1)
# try to match one of the concept from the map
concept_key = concept_key or sub_expr
concept_found = concepts_map.get(concept_key, None)
if concept_found:
concept_found = Concept().update_from(concept_found) # make a copy when massively used in tests
# if sya and len(concept_found.get_metadata().variables) > 0 and not is_bnf:
# return SyaConceptParserHelper(concept_found, start, start + length - 1)
if init_empty_body:
node = CNC(concept_found, sub_expr, start, start + length - 1, exclude_body=exclude_body)
init_body(node, concept_found, sub_expr)
@@ -1343,13 +1386,12 @@ def init_body(item, concept, value):
item.compiled[ConceptParts.BODY] = DoNotResolve(value)
def compute_expected_array(concepts_map, expression, expected, sya=False, init_empty_body=False, exclude_body=False):
def compute_expected_array(concepts_map, expression, expected, init_empty_body=False, exclude_body=False):
"""
Computes a simple but sufficient version of the result of infix_to_postfix()
:param concepts_map:
:param expression:
:param expected:
:param sya: if true, generate an SyaConceptParserHelper instead of a cnode
:param init_empty_body: if True adds the source in the body (actually in compiled.BODY)
:param exclude_body: do not include ConceptParts.BODY in comparison
:return:
@@ -1359,7 +1401,6 @@ def compute_expected_array(concepts_map, expression, expected, sya=False, init_e
concepts_map,
expression_as_tokens,
sub_expr,
sya=sya,
init_empty_body=init_empty_body,
exclude_body=exclude_body) for sub_expr in expected]
@@ -1398,27 +1439,6 @@ def get_source_code_node(start, text, concepts_map, id_manager=None):
return SourceCodeNode(start, start + len(tokens) - 1, tokens, text, python_node)
def resolve_test_concept(concept_map, hint):
if isinstance(hint, str):
return concept_map[hint]
if isinstance(hint, CC):
concept = concept_map[hint.concept_key]
compiled = {k: resolve_test_concept(concept_map, v) for k, v in hint.compiled.items()}
return CC(concept, source=hint.source, exclude_body=hint.exclude_body, **compiled)
if isinstance(hint, CMV):
concept = concept_map[hint.concept_key]
return CMV(concept, **hint.variables)
# CV
#
# CMV
#
# CIO
raise NotImplementedError()
def get_rete_conditions(*conditions):
"""
Transform a list of string into a list of Condition (Rete conditions)
+2 -2
View File
@@ -997,9 +997,9 @@ class TestBnfNodeParser(TestUsingMemoryBasedSheerka):
text = "one 'one' one plus two shoe"
unwanted_res = [CN("one"), SCN(" 'one' "), ("one", 1), UTN(" plus "), CN("two")]
unwanted_res = [CN("one"), SCN(" 'one' "), (CN("one"), 1), UTN(" plus "), CN("two")]
expected_res = [CNC("foo",
"one 'one' one plus two shoe",
source="one 'one' one plus two shoe",
x=CC("one"),
y=python_ret_val(" 'one' "),
z=CC("plus", source="one plus two", x="one", y="two"))]
+2 -2
View File
@@ -90,14 +90,14 @@ class TestSequenceNodeParser(TestUsingMemoryBasedSheerka):
("foo bar suffixed one", False, ["foo bar", " suffixed ", "one"]),
("foo bar one prefixed", False, ["foo bar", "one", " prefixed"]),
("foo bar one infix two", False, ["foo bar", "one", " infix ", "two"]),
("foo bar 1 + 1", False, ["foo bar", " 1 + 1"]),
("foo bar 1 + 1", False, ["foo bar", SCN(" 1 + 1")]),
("foo bar twenty one", False, ["foo bar", " twenty ", "one"]),
("foo bar x$!#", False, ["foo bar", " x$!#"]),
("suffixed one foo bar", False, ["suffixed ", "one", "foo bar"]),
("one prefixed foo bar", False, ["one", " prefixed ", "foo bar"]),
("one infix two foo bar", False, ["one", " infix ", "two", "foo bar"]),
("1 + 1 foo bar", False, ["1 + 1 ", "foo bar"]),
("1 + 1 foo bar", False, [SCN("1 + 1 "), "foo bar"]),
("twenty one foo bar", False, ["twenty ", "one", "foo bar"]),
("x$!# foo bar", False, ["x$!# ", "foo bar"]),
("func(one)", False, ["func(", "one", ")"]),
+79 -11
View File
@@ -7,17 +7,16 @@ from core.concept import Concept
from core.global_symbols import CONCEPT_COMPARISON_CONTEXT
from core.sheerka.Sheerka import RECOGNIZED_BY_KEY
from core.sheerka.services.SheerkaExecute import ParserInput
from core.tokenizer import Tokenizer, comparable_tokens
from core.tokenizer import Token, TokenKind, Tokenizer, comparable_tokens
from core.utils import get_text_from_tokens
from parsers.BaseExpressionParser import FunctionNode, FunctionParameter, NameExprNode
from parsers.BaseNodeParser import ConceptNode, SourceCodeNode, UnrecognizedTokensNode
from parsers.PythonParser import PythonNode
from parsers.SyaNodeParser import FunctionDetected, NoSyaConceptFound, NotEnoughParameters, SyaConceptParser, \
SyaNodeParser, \
SyaTokensParser, \
TokensNotFound, TooManyParameters
SyaNodeParser, SyaTokensParser, TokensNotFound, TooManyParameters
from tests.TestUsingFileBasedSheerka import TestUsingFileBasedSheerka
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
from tests.parsers.parsers_utils import CC, CN, CNC, SCN, UTN, compute_expected_array, get_test_obj, \
from tests.parsers.parsers_utils import CC, CN, CNC, RETVAL, SCN, SCWC, UTN, compute_expected_array, get_test_obj, \
prepare_nodes_comparison
cmap = {
@@ -34,7 +33,7 @@ cmap = {
}
class TestSyaNodeParser2(TestUsingMemoryBasedSheerka):
class TestSyaNodeParser(TestUsingMemoryBasedSheerka):
shared_ontology = None
@classmethod
@@ -46,11 +45,11 @@ class TestSyaNodeParser2(TestUsingMemoryBasedSheerka):
cmap["plus"].set_prop(BuiltinConcepts.ASSOCIATIVITY, "right")
cmap["mult"].set_prop(BuiltinConcepts.ASSOCIATIVITY, "right")
TestSyaNodeParser2.sheerka.set_is_greater_than(context,
BuiltinConcepts.PRECEDENCE,
cmap["mult"],
cmap["plus"],
CONCEPT_COMPARISON_CONTEXT)
TestSyaNodeParser.sheerka.set_is_greater_than(context,
BuiltinConcepts.PRECEDENCE,
cmap["mult"],
cmap["plus"],
CONCEPT_COMPARISON_CONTEXT)
cls.shared_ontology = sheerka.get_ontology(context)
sheerka.pop_ontology(context)
@@ -386,6 +385,30 @@ class TestSyaNodeParser2(TestUsingMemoryBasedSheerka):
assert concept_node_as_test_obj == resolved_expected
assert concept_node.concept.get_metadata().variables == [("a", "1 + 1 "), ("b", "2 + 2")]
def test_i_can_concept_parse_function(self):
sheerka, context = self.initialize_test()
expression = "one plus func(twenty two)"
param1 = self.get_real_node(cmap, expression, "one")
parser_input = ParserInput(expression).reset()
parser_input.seek(2)
sya_node_parser = SyaNodeParser()
tokens_parser = SyaTokensParser(context, sya_node_parser, parser_input)
tokens_parser.stack = [param1]
concept_parser = SyaConceptParser(tokens_parser, cmap["plus"], tokens_parser.stack)
concept_parser.parse()
concept_node = concept_parser.concept_node
assert not concept_parser.has_error()
assert len(concept_parser.expected) == 0
expected = CNC("plus", a=CNC("one"), b=SCWC("func(", ")", CN("twenties", source="twenty two")))
resolved_expected = compute_expected_array(cmap, expression, [expected])[0]
concept_node_as_test_obj = get_test_obj(concept_node, expected)
assert concept_node_as_test_obj == resolved_expected
assert concept_node.concept.get_metadata().variables == [("a", "one"), ("b", "func(twenty two)")]
def test_i_can_concept_parse_concepts_composition(self):
sheerka, context = self.initialize_test()
@@ -1385,6 +1408,25 @@ class TestSyaNodeParser2(TestUsingMemoryBasedSheerka):
# check metadata
assert expected_concept.get_metadata().variables == [("a", "twenty one")]
def test_i_can_parse_when_function(self):
sheerka, context, parser = self.init_parser()
text = "one plus func(twenty two)"
res = parser.parse(context, ParserInput(text))
wrapper = res.body
lexer_nodes = res.body.body
assert res.status
assert context.sheerka.isinstance(wrapper, BuiltinConcepts.PARSER_RESULT)
expected = [CNC("plus", a=CC("one"), b=[RETVAL("func(twenty two)")], source=text)]
_stack, _expected = prepare_nodes_comparison(cmap, text, lexer_nodes, expected)
assert _stack == _expected
# check the metadata
expected_concept = lexer_nodes[0].concept
assert expected_concept.get_metadata().variables == [("a", "one"), ("b", "func(twenty two)")]
def test_i_can_parse_sequences(self):
sheerka, context, parser = self.init_parser()
@@ -1501,6 +1543,32 @@ class TestSyaNodeParser2(TestUsingMemoryBasedSheerka):
_stack, _expected = prepare_nodes_comparison(concepts_map, text, lexer_nodes, expected)
assert _stack == _expected
def test_i_can_parse_when_expr_tokens(self):
sheerka, context, parser = self.init_parser()
text = "one plus func(twenty two)"
tokens = list(Tokenizer(text, yield_eof=False))
fun_token = tokens[4]
expr = FunctionNode(4, 9, tokens[4:10],
NameExprNode(4, 4, tokens[4:5]),
NameExprNode(9, 9, tokens[9:10]),
[FunctionParameter(NameExprNode(6, 8, tokens[6:9]), None)])
tokens[4:] = [Token(TokenKind.EXPR, expr, fun_token.index, fun_token.line, fun_token.column)]
res = parser.parse(context, ParserInput(None, tokens=tokens))
wrapper = res.body
lexer_nodes = res.body.body
assert res.status
assert context.sheerka.isinstance(wrapper, BuiltinConcepts.PARSER_RESULT)
expected = [CNC("plus", a=CC("one"), b=[RETVAL("func(twenty two)")], source=text)]
_stack, _expected = prepare_nodes_comparison(cmap, text, lexer_nodes, expected)
assert _stack == _expected
# check the metadata
expected_concept = lexer_nodes[0].concept
assert expected_concept.get_metadata().variables == [("a", "one"), ("b", "func(twenty two)")]
@pytest.mark.parametrize("text, expected_result", [
("one plus two foo bar baz", [CNC("plus", a="one", b="two"), UTN(" foo bar baz")]),
("one plus two foo bar", [CNC("plus", a="one", b="two"), UTN(" foo bar")]),