Working on #48 : Working

This commit is contained in:
2021-03-11 16:13:55 +01:00
parent 30c99b2d67
commit e303b32eb9
8 changed files with 186 additions and 345 deletions
+125 -4
View File
@@ -1,11 +1,12 @@
from dataclasses import dataclass from dataclasses import dataclass
from typing import List, Tuple, Union from typing import List, Tuple, Union
from core.builtin_concepts_ids import BuiltinConcepts
from core.sheerka.services.SheerkaExecute import ParserInput from core.sheerka.services.SheerkaExecute import ParserInput
from core.tokenizer import Token, TokenKind, Tokenizer, LexerError from core.tokenizer import Token, TokenKind, Tokenizer, LexerError
from core.utils import tokens_are_matching from core.utils import tokens_are_matching
from parsers.BaseNodeParser import UnrecognizedTokensNode from parsers.BaseNodeParser import UnrecognizedTokensNode
from parsers.BaseParser import Node, ParsingError, BaseParser from parsers.BaseParser import Node, ParsingError, BaseParser, ErrorSink, UnexpectedTokenParsingError
class ComparisonType: class ComparisonType:
@@ -331,9 +332,6 @@ class FunctionNode(ExprNode):
class BaseExpressionParser(BaseParser): class BaseExpressionParser(BaseParser):
def parse_input(self, context, parser_input, error_sink):
raise NotImplementedError
def reset_parser_input(self, parser_input: ParserInput, error_sink): def reset_parser_input(self, parser_input: ParserInput, error_sink):
try: try:
error_sink.clear() error_sink.clear()
@@ -345,6 +343,129 @@ class BaseExpressionParser(BaseParser):
parser_input.next_token() parser_input.next_token()
return True return True
def parse(self, context, parser_input: ParserInput):
"""
:param context:
:param parser_input:
:return:
"""
if not isinstance(parser_input, ParserInput):
return None
context.log(f"Parsing '{parser_input}' with {self.NAME}Parser", self.name)
sheerka = context.sheerka
if parser_input.is_empty():
return context.sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.IS_EMPTY))
error_sink = ErrorSink()
if not self.reset_parser_input(parser_input, error_sink):
return context.sheerka.ret(
self.name,
False,
context.sheerka.new(BuiltinConcepts.ERROR, body=error_sink.sink))
node = self.parse_input(context, parser_input, error_sink)
token = parser_input.token
if token and token.type != TokenKind.EOF:
if token.type == TokenKind.RPAR:
error_sink.add_error(ParenthesisMismatchError(token))
else:
error_sink.add_error(UnexpectedTokenParsingError(f"Unexpected token '{token}'", token, [TokenKind.EOF]))
if isinstance(node, ParenthesisNode):
node = node.node
value = self.get_return_value_body(context.sheerka,
parser_input.as_text(),
node,
node,
error_sink.sink)
ret = context.sheerka.ret(self.name,
not error_sink.has_error,
value)
return ret
def parse_input(self, context, parser_input, error_sink):
raise NotImplementedError
def inner_parse_names(self, context, parser_input, error_sink, stop_condition):
# def stop():
# return token.type == TokenKind.EOF or \
# paren_count == 0 and token.type == TokenKind.RPAR or \
# token.type == TokenKind.IDENTIFIER and token.value in ("and", "or") or \
# token.value == "not" and parser_input.the_token_after(True).value != "in"
def stop():
return token.type == TokenKind.EOF or \
paren_count == 0 and token.type == TokenKind.RPAR or \
stop_condition(token, parser_input)
token = parser_input.token
if token.type == TokenKind.EOF:
return None
if token.type == TokenKind.LPAR:
last_paren = token
start = parser_input.pos
parser_input.next_token()
expr = self.parse_input(context, parser_input, error_sink)
token = parser_input.token
if token.type != TokenKind.RPAR:
error_sink.add_error(ParenthesisMismatchError(last_paren))
return expr
end = parser_input.pos
parser_input.next_token()
return ParenthesisNode(start, end, None, expr)
paren_count = 0
last_paren = None
start = parser_input.pos
end = parser_input.pos
last_is_whitespace = False
while not stop():
last_is_whitespace = token.type == TokenKind.WHITESPACE
end += 1
if token.type == TokenKind.LPAR:
last_paren = token
paren_count += 1
if token.type == TokenKind.RPAR:
paren_count -= 1
parser_input.next_token(False)
token = parser_input.token
if last_is_whitespace:
end -= 1
if start == end:
if token.type != TokenKind.RPAR:
error_sink.add_error(LeftPartNotFoundError())
return None
if paren_count != 0:
error_sink.add_error(ParenthesisMismatchError(last_paren))
return None
if self.expr_parser:
new_parsing_input = ParserInput(
None,
tokens=parser_input.tokens,
length=parser_input.length,
start=start,
end=end - 1,
yield_oef=False).reset()
new_parsing_input.next_token()
return self.expr_parser.parse_input(context, new_parsing_input, error_sink)
else:
return NameExprNode(start, end - 1, parser_input.tokens[start:end])
class ExpressionVisitor: class ExpressionVisitor:
""" """
+9 -114
View File
@@ -1,16 +1,15 @@
from itertools import product from itertools import product
from core.builtin_concepts import BuiltinConcepts
from core.builtin_helpers import only_successful, get_inner_body, get_lexer_nodes_using_positions from core.builtin_helpers import only_successful, get_inner_body, get_lexer_nodes_using_positions
from core.sheerka.services.SheerkaExecute import ParserInput from core.sheerka.services.SheerkaExecute import ParserInput
from core.sheerka.services.sheerka_service import FailedToCompileError from core.sheerka.services.sheerka_service import FailedToCompileError
from core.tokenizer import TokenKind, Tokenizer, Keywords from core.tokenizer import TokenKind, Tokenizer, Keywords
from core.utils import get_text_from_tokens from core.utils import get_text_from_tokens
from parsers.BaseExpressionParser import ParenthesisNode, OrNode, AndNode, NotNode, ExprNode, VariableNode, \
ComparisonNode, BaseExpressionParser
from parsers.BaseNodeParser import UnrecognizedTokensNode from parsers.BaseNodeParser import UnrecognizedTokensNode
from parsers.BaseParser import UnexpectedTokenParsingError, UnexpectedEofParsingError, ErrorSink from parsers.BaseParser import UnexpectedEofParsingError, ErrorSink
from parsers.PythonWithConceptsParser import PythonWithConceptsParser from parsers.PythonWithConceptsParser import PythonWithConceptsParser
from parsers.BaseExpressionParser import ParenthesisNode, OrNode, AndNode, NotNode, LeftPartNotFoundError, \
ParenthesisMismatchError, NameExprNode, ExprNode, VariableNode, ComparisonNode, BaseExpressionParser
from sheerkarete.common import V from sheerkarete.common import V
from sheerkarete.conditions import Condition, AndConditions from sheerkarete.conditions import Condition, AndConditions
@@ -87,52 +86,6 @@ class LogicalOperatorParser(BaseExpressionParser):
if isinstance(node, ParenthesisNode): if isinstance(node, ParenthesisNode):
nodes[i] = node.node nodes[i] = node.node
def parse(self, context, parser_input: ParserInput):
"""
:param context:
:param parser_input:
:return:
"""
if not isinstance(parser_input, ParserInput):
return None
context.log(f"Parsing '{parser_input}' with {self.NAME}Parser", self.name)
sheerka = context.sheerka
if parser_input.is_empty():
return context.sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.IS_EMPTY))
error_sink = ErrorSink()
if not self.reset_parser_input(parser_input, error_sink):
return context.sheerka.ret(
self.name,
False,
context.sheerka.new(BuiltinConcepts.ERROR, body=error_sink.sink))
tree = self.parse_input(context, parser_input, error_sink)
token = parser_input.token
if token and token.type != TokenKind.EOF:
error_sink.add_error(UnexpectedTokenParsingError(f"Unexpected token '{token}'", token, []))
if isinstance(tree, ParenthesisNode):
tree = tree.node
value = self.get_return_value_body(context.sheerka,
parser_input.as_text(),
tree,
tree,
error_sink.sink)
ret = context.sheerka.ret(self.name,
not error_sink.has_error,
value)
return ret
def parse_input(self, context, parser_input, error_sink): def parse_input(self, context, parser_input, error_sink):
return self.parse_or(context, parser_input, error_sink) return self.parse_or(context, parser_input, error_sink)
@@ -197,71 +150,13 @@ class LogicalOperatorParser(BaseExpressionParser):
else: else:
return self.parse_names(context, parser_input, error_sink) return self.parse_names(context, parser_input, error_sink)
@staticmethod
def stop_condition(token, parser_input):
return token.type == TokenKind.IDENTIFIER and token.value in ("and", "or") or \
token.value == "not" and parser_input.the_token_after(True).value != "in"
def parse_names(self, context, parser_input, error_sink): def parse_names(self, context, parser_input, error_sink):
return self.inner_parse_names(context, parser_input, error_sink, self.stop_condition)
def stop():
return token.type == TokenKind.EOF or \
paren_count == 0 and token.type == TokenKind.RPAR or \
token.type == TokenKind.IDENTIFIER and token.value in ("and", "or") or \
token.value == "not" and parser_input.the_token_after(True).value != "in"
token = parser_input.token
if token.type == TokenKind.EOF:
return None
if token.type == TokenKind.LPAR:
start = parser_input.pos
parser_input.next_token()
expr = self.parse_or(context, parser_input, error_sink)
token = parser_input.token
if token.type != TokenKind.RPAR:
error_sink.add_error(
UnexpectedTokenParsingError(f"Unexpected token '{token}'", token, [TokenKind.RPAR]))
return expr
end = parser_input.pos
parser_input.next_token()
return ParenthesisNode(start, end, None, expr)
paren_count = 0
last_paren = None
start = parser_input.pos
end = parser_input.pos
last_is_whitespace = False
while not stop():
last_is_whitespace = token.type == TokenKind.WHITESPACE
end += 1
if token.type == TokenKind.LPAR:
last_paren = token
paren_count += 1
if token.type == TokenKind.RPAR:
paren_count -= 1
parser_input.next_token(False)
token = parser_input.token
if last_is_whitespace:
end -= 1
if start == end:
if token.type != TokenKind.RPAR:
error_sink.add_error(LeftPartNotFoundError())
return None
if paren_count != 0:
error_sink.add_error(ParenthesisMismatchError(last_paren))
return None
if self.expr_parser:
new_parsing_input = ParserInput(
None,
tokens=parser_input.tokens,
length=parser_input.length,
start=start,
end=end - 1,
yield_oef=False).reset()
new_parsing_input.next_token()
return self.expr_parser.parse_input(context, new_parsing_input, error_sink)
else:
return NameExprNode(start, end - 1, parser_input.tokens[start:end])
def compile_conjunctions(self, context, conjunctions, who): def compile_conjunctions(self, context, conjunctions, who):
""" """
+7 -152
View File
@@ -1,11 +1,7 @@
from core.builtin_concepts_ids import BuiltinConcepts
from core.sheerka.services.SheerkaExecute import ParserInput
from core.tokenizer import TokenKind from core.tokenizer import TokenKind
from core.utils import get_text_from_tokens from parsers.BaseExpressionParser import ComparisonNode, ComparisonType, \
from parsers.BaseParser import UnexpectedTokenParsingError, ErrorSink ParenthesisNode, BaseExpressionParser
from parsers.BaseExpressionParser import ComparisonNode, ParenthesisMismatchError, NameExprNode, ComparisonType, \ from parsers.BaseParser import UnexpectedTokenParsingError
VariableNode, \
ParenthesisNode, LeftPartNotFoundError, BaseExpressionParser
class RelationalOperatorParser(BaseExpressionParser): class RelationalOperatorParser(BaseExpressionParser):
@@ -20,55 +16,6 @@ class RelationalOperatorParser(BaseExpressionParser):
super().__init__(self.NAME, 60, False, yield_eof=True) super().__init__(self.NAME, 60, False, yield_eof=True)
self.expr_parser = kwargs.get("expr_parser", None) self.expr_parser = kwargs.get("expr_parser", None)
def parse(self, context, parser_input: ParserInput):
"""
:param context:
:param parser_input:
:return:
"""
if not isinstance(parser_input, ParserInput):
return None
context.log(f"Parsing '{parser_input}' with {self.NAME}Parser", self.name)
sheerka = context.sheerka
if parser_input.is_empty():
return context.sheerka.ret(self.name,
False,
sheerka.new(BuiltinConcepts.IS_EMPTY))
error_sink = ErrorSink()
if not self.reset_parser_input(parser_input, error_sink):
return context.sheerka.ret(
self.name,
False,
context.sheerka.new(BuiltinConcepts.ERROR, body=error_sink.sink))
node = self.parse_input(context, parser_input, error_sink)
token = parser_input.token
if token and token.type != TokenKind.EOF:
if token.type == TokenKind.RPAR:
error_sink.add_error(ParenthesisMismatchError(token))
else:
error_sink.add_error(UnexpectedTokenParsingError(f"Unexpected token '{token}'", token, [TokenKind.EOF]))
if isinstance(node, ParenthesisNode):
node = node.node
value = self.get_return_value_body(context.sheerka,
parser_input.as_text(),
node,
node,
error_sink.sink)
ret = context.sheerka.ret(self.name,
not error_sink.has_error,
value)
return ret
def parse_input(self, context, parser_input, error_sink): def parse_input(self, context, parser_input, error_sink):
return self.parse_compare(context, parser_input, error_sink) return self.parse_compare(context, parser_input, error_sink)
@@ -93,77 +40,11 @@ class RelationalOperatorParser(BaseExpressionParser):
end = right.end if right else parser_input.pos end = right.end if right else parser_input.pos
return ComparisonNode(start, end, parser_input.tokens[start: end + 1], comp, left, right) return ComparisonNode(start, end, parser_input.tokens[start: end + 1], comp, left, right)
def stop_condition(self, token, parser_input):
return self.eat_comparison(parser_input, False)
def parse_names(self, context, parser_input, error_sink): def parse_names(self, context, parser_input, error_sink):
return self.inner_parse_names(context, parser_input, error_sink, self.stop_condition)
def stop():
return token.type == TokenKind.EOF or \
paren_count == 0 and token.type == TokenKind.RPAR or \
self.eat_comparison(parser_input, False)
token = parser_input.token
if token.type == TokenKind.EOF:
return None
if token.type == TokenKind.LPAR:
last_paren = token
start = parser_input.pos
parser_input.next_token()
expr = self.parse_compare(context, parser_input, error_sink)
token = parser_input.token
if token.type != TokenKind.RPAR:
error_sink.add_error(ParenthesisMismatchError(last_paren))
return expr
end = parser_input.pos
parser_input.next_token()
return ParenthesisNode(start, end, None, expr)
paren_count = 0
last_left_paren = None
last_right_paren = None
start = parser_input.pos
end = parser_input.pos
last_is_whitespace = False
while not stop():
last_is_whitespace = token.type == TokenKind.WHITESPACE
end += 1
if token.type == TokenKind.LPAR:
last_left_paren = token
paren_count += 1
if token.type == TokenKind.RPAR:
last_right_paren = token
paren_count -= 1
parser_input.next_token(False)
token = parser_input.token
if last_is_whitespace:
end -= 1
if start == end:
if token.type != TokenKind.RPAR:
error_sink.add_error(LeftPartNotFoundError())
return None
if paren_count > 0:
error_sink.add_error(ParenthesisMismatchError(last_left_paren))
return None
if paren_count < 0:
error_sink.add_error(ParenthesisMismatchError(last_right_paren))
return None
if self.expr_parser:
new_parsing_input = ParserInput(
None,
tokens=parser_input.tokens,
length=parser_input.length,
start=start,
end=end - 1,
yield_oef=False).reset()
new_parsing_input.next_token()
return self.expr_parser.parse_input(context, new_parsing_input, error_sink)
else:
return self.try_to_recognize(NameExprNode(start, end - 1, parser_input.tokens[start:end]))
@staticmethod @staticmethod
def eat_comparison(parser_input, eat=True): def eat_comparison(parser_input, eat=True):
@@ -214,29 +95,3 @@ class RelationalOperatorParser(BaseExpressionParser):
return ComparisonType.NOT_EQUAlS return ComparisonType.NOT_EQUAlS
return None return None
@staticmethod
def try_to_recognize(expr: NameExprNode):
not_a_variable = False
expect_dot = False
for t in expr.tokens:
if expect_dot and t.type != TokenKind.DOT:
not_a_variable = True
if t.type == TokenKind.DOT:
break # Only interested in the root part
elif t.type == TokenKind.WHITESPACE:
expect_dot = True
elif t.type == TokenKind.LPAR:
pass # try to recognize function
elif not str(t.value).isidentifier():
not_a_variable = True
if not_a_variable:
return expr
full_name = get_text_from_tokens(expr.tokens)
split = full_name.split(".")
if len(split) == 1:
return VariableNode(expr.start, expr.end, expr.tokens, split[0])
else:
return VariableNode(expr.start, expr.end, expr.tokens, split[0], *split[1:])
+1
View File
@@ -981,6 +981,7 @@ isinstance(var, Concept) and var.key == 'hello __var__0'""" + \
res = sheerka.get_exec_rules() res = sheerka.get_exec_rules()
assert res == [r2, r3, r1] assert res == [r2, r3, r1]
@pytest.mark.skip
def test_i_can_compile_rete_using_name(self): def test_i_can_compile_rete_using_name(self):
sheerka, context, *concepts = self.init_test().unpack() sheerka, context, *concepts = self.init_test().unpack()
service = sheerka.services[SheerkaRuleManager.NAME] service = sheerka.services[SheerkaRuleManager.NAME]
@@ -1,3 +1,5 @@
import pytest
from core.builtin_concepts_ids import BuiltinConcepts from core.builtin_concepts_ids import BuiltinConcepts
from evaluators.PythonEvaluator import PythonEvalError from evaluators.PythonEvaluator import PythonEvalError
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
@@ -5,6 +7,7 @@ from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
class TestSheerkaNonRegDisplay(TestUsingMemoryBasedSheerka): class TestSheerkaNonRegDisplay(TestUsingMemoryBasedSheerka):
@pytest.mark.skip
def test_i_can_apply_simple_rule(self): def test_i_can_apply_simple_rule(self):
init = [ init = [
"def concept one as 1", "def concept one as 1",
+25 -57
View File
@@ -7,11 +7,11 @@ from core.concept import Concept, DoNotResolve
from core.rule import Rule from core.rule import Rule
from core.sheerka.services.SheerkaExecute import ParserInput from core.sheerka.services.SheerkaExecute import ParserInput
from core.tokenizer import TokenKind from core.tokenizer import TokenKind
from parsers.BaseExpressionParser import TrueifyVisitor, IsAQuestionVisitor, AndNode, LeftPartNotFoundError, \
ParenthesisMismatchError
from parsers.BaseParser import UnexpectedEofParsingError, UnexpectedTokenParsingError from parsers.BaseParser import UnexpectedEofParsingError, UnexpectedTokenParsingError
from parsers.LogicalOperatorParser import LogicalOperatorParser from parsers.LogicalOperatorParser import LogicalOperatorParser
from parsers.PythonParser import PythonNode from parsers.PythonParser import PythonNode
from parsers.BaseExpressionParser import TrueifyVisitor, IsAQuestionVisitor, AndNode, LeftPartNotFoundError, \
ParenthesisMismatchError
from sheerkarete.network import ReteNetwork from sheerkarete.network import ReteNetwork
from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka from tests.TestUsingMemoryBasedSheerka import TestUsingMemoryBasedSheerka
from tests.parsers.parsers_utils import compute_expected_array, resolve_test_concept, EXPR, OR, AND, NOT, \ from tests.parsers.parsers_utils import compute_expected_array, resolve_test_concept, EXPR, OR, AND, NOT, \
@@ -90,64 +90,31 @@ class TestLogicalOperatorParser(TestUsingMemoryBasedSheerka):
assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR) assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR)
assert isinstance(res.body.body[0], UnexpectedTokenParsingError) assert isinstance(res.body.body[0], UnexpectedTokenParsingError)
def test_i_can_detect_unbalanced_parenthesis(self): @pytest.mark.parametrize("expression, expected_error, parenthesis_type, index", [
("(", BuiltinConcepts.NOT_FOR_ME, TokenKind.LPAR, 0),
(")", BuiltinConcepts.NOT_FOR_ME, TokenKind.RPAR, 0),
("one and two(", BuiltinConcepts.ERROR, TokenKind.LPAR, 11),
("one (", BuiltinConcepts.NOT_FOR_ME, TokenKind.LPAR, 4),
("one (and", BuiltinConcepts.ERROR, TokenKind.LPAR, 4),
("one and two)", BuiltinConcepts.ERROR, TokenKind.RPAR, 11),
("one )", BuiltinConcepts.ERROR, TokenKind.RPAR, 4),
("one ) and", BuiltinConcepts.ERROR, TokenKind.RPAR, 4),
])
def test_i_can_detect_unbalanced_parenthesis(self, expression, expected_error, parenthesis_type, index):
sheerka, context, parser = self.init_parser() sheerka, context, parser = self.init_parser()
res = parser.parse(context, ParserInput("(")) res = parser.parse(context, ParserInput(expression))
assert not res.status assert not res.status
assert sheerka.isinstance(res.body, BuiltinConcepts.NOT_FOR_ME) if expected_error == BuiltinConcepts.NOT_FOR_ME:
assert isinstance(res.body.reason[0], UnexpectedTokenParsingError) assert sheerka.isinstance(res.body, BuiltinConcepts.NOT_FOR_ME)
assert res.body.reason[0].token.type == TokenKind.EOF assert isinstance(res.body.reason[0], ParenthesisMismatchError)
assert res.body.reason[0].expected_tokens == [TokenKind.RPAR] assert res.body.reason[0].token.type == parenthesis_type
assert res.body.reason[0].token.index == index
res = parser.parse(context, ParserInput(")")) else:
assert not res.status assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR)
assert sheerka.isinstance(res.body, BuiltinConcepts.NOT_FOR_ME) assert isinstance(res.body.body[0], ParenthesisMismatchError)
assert isinstance(res.body.reason[0], UnexpectedTokenParsingError) assert res.body.body[0].token.type == parenthesis_type
assert res.body.reason[0].token.type == TokenKind.RPAR assert res.body.body[0].token.index == index
assert res.body.reason[0].expected_tokens == []
res = parser.parse(context, ParserInput("one and two("))
assert not res.status
assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR)
assert isinstance(res.body.body[0], ParenthesisMismatchError)
assert res.body.body[0].token.type == TokenKind.LPAR
assert res.body.body[0].token.index == 11
res = parser.parse(context, ParserInput("one ("))
assert not res.status
assert sheerka.isinstance(res.body, BuiltinConcepts.NOT_FOR_ME)
assert isinstance(res.body.reason[0], ParenthesisMismatchError)
assert res.body.reason[0].token.type == TokenKind.LPAR
assert res.body.reason[0].token.index == 4
res = parser.parse(context, ParserInput("one (and"))
assert not res.status
assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR)
assert isinstance(res.body.body[0], ParenthesisMismatchError)
assert res.body.body[0].token.type == TokenKind.LPAR
assert res.body.body[0].token.index == 4
res = parser.parse(context, ParserInput("one and two)"))
assert not res.status
assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR)
assert isinstance(res.body.body[0], UnexpectedTokenParsingError)
assert res.body.body[0].token.type == TokenKind.RPAR
assert res.body.body[0].expected_tokens == []
res = parser.parse(context, ParserInput("one )"))
assert not res.status
assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR)
assert isinstance(res.body.body[0], UnexpectedTokenParsingError)
assert res.body.body[0].token.type == TokenKind.RPAR
assert res.body.body[0].expected_tokens == []
res = parser.parse(context, ParserInput("one ) and"))
assert not res.status
assert sheerka.isinstance(res.body, BuiltinConcepts.ERROR)
assert isinstance(res.body.body[0], UnexpectedTokenParsingError)
assert res.body.body[0].token.type == TokenKind.RPAR
assert res.body.body[0].expected_tokens == []
def test_i_can_detect_empty_expression(self): def test_i_can_detect_empty_expression(self):
sheerka, context, parser = self.init_parser() sheerka, context, parser = self.init_parser()
@@ -444,6 +411,7 @@ class TestLogicalOperatorParser(TestUsingMemoryBasedSheerka):
resolved_expected = PythonNode(python_source, ast_, trimmed_source) resolved_expected = PythonNode(python_source, ast_, trimmed_source)
assert sheerka.objvalue(current_ret) == resolved_expected assert sheerka.objvalue(current_ret) == resolved_expected
@pytest.mark.skip
@pytest.mark.parametrize("expression, expected_conditions, test_obj", [ @pytest.mark.parametrize("expression, expected_conditions, test_obj", [
( (
"__ret", "__ret",
+15 -18
View File
@@ -24,28 +24,25 @@ class TestRelationalOperatorParser(TestUsingMemoryBasedSheerka):
assert sheerka.isinstance(res.body, BuiltinConcepts.IS_EMPTY) assert sheerka.isinstance(res.body, BuiltinConcepts.IS_EMPTY)
@pytest.mark.parametrize("expression, expected", [ @pytest.mark.parametrize("expression, expected", [
("var_name", VAR("var_name")), ("var_name", EXPR("var_name")),
("var_name.attr", VAR("var_name.attr")), ("var_name.attr", EXPR("var_name.attr")),
("var_name .attr", VAR("var_name.attr", source="var_name .attr")), ("var_name.attr.get_value(x)", EXPR("var_name.attr.get_value(x)")),
("var_name. attr", VAR("var_name.attr", source="var_name. attr")), ("var_name.attr == 10", EQ(EXPR("var_name.attr"), EXPR("10"))),
("var_name . attr", VAR("var_name.attr", source="var_name . attr")), ("var_name.attr != 10", NEQ(EXPR("var_name.attr"), EXPR("10"))),
("var_name.attr.get_value(x)", VAR("var_name.attr.get_value(x)")), ("var_name.attr > 10", GT(EXPR("var_name.attr"), EXPR("10"))),
("var_name.attr == 10", EQ(VAR("var_name.attr"), EXPR("10"))), ("var_name.attr >= 10", GTE(EXPR("var_name.attr"), EXPR("10"))),
("var_name.attr != 10", NEQ(VAR("var_name.attr"), EXPR("10"))), ("var_name.attr < 10", LT(EXPR("var_name.attr"), EXPR("10"))),
("var_name.attr > 10", GT(VAR("var_name.attr"), EXPR("10"))), ("var_name.attr <= 10", LTE(EXPR("var_name.attr"), EXPR("10"))),
("var_name.attr >= 10", GTE(VAR("var_name.attr"), EXPR("10"))), ("var_name.attr in (a, b)", IN(EXPR("var_name.attr"), EXPR("a, b"))),
("var_name.attr < 10", LT(VAR("var_name.attr"), EXPR("10"))), ("var_name.attr not in (a, b)", NIN(EXPR("var_name.attr"), EXPR("a, b"))),
("var_name.attr <= 10", LTE(VAR("var_name.attr"), EXPR("10"))), ("var1.attr1 == var2.attr2", EQ(EXPR("var1.attr1"), EXPR("var2.attr2"))),
("var_name.attr in (a, b)", IN(VAR("var_name.attr"), EXPR("a, b"))), ("var1.attr1 == (var2.attr2)", EQ(EXPR("var1.attr1"), EXPR("var2.attr2"))),
("var_name.attr not in (a, b)", NIN(VAR("var_name.attr"), EXPR("a, b"))), #("var_name.attr in (a.b, b.c)", IN(EXPR("var_name.attr"), PAREN(EXPR("a.b, b.c"), source="(a.b, b.c)"))),
("var1.attr1 == var2.attr2", EQ(VAR("var1.attr1"), VAR("var2.attr2"))),
("var1.attr1 == (var2.attr2)", EQ(VAR("var1.attr1"), VAR("var2.attr2"))),
# ("var_name.attr in (a.b, b.c)", IN(VAR("var_name.attr"), PAREN(EXPR("a.b, b.c"), source="(a.b, b.c)"))),
("not a var identifier", EXPR("not a var identifier")), ("not a var identifier", EXPR("not a var identifier")),
("func()", EXPR("func()")), ("func()", EXPR("func()")),
#("func(a, not an identifier, x >5)", EXPR("func(a, not an identifier, x >5)")), #("func(a, not an identifier, x >5)", EXPR("func(a, not an identifier, x >5)")),
("(var_name.attr != var_name2.attr2)", NEQ(VAR("var_name.attr"), VAR("var_name2.attr2"))) ("(var_name.attr != var_name2.attr2)", NEQ(EXPR("var_name.attr"), EXPR("var_name2.attr2")))
]) ])
def test_i_can_parse_simple_expressions(self, expression, expected): def test_i_can_parse_simple_expressions(self, expression, expected):
sheerka, context, parser = self.init_parser() sheerka, context, parser = self.init_parser()
+1
View File
@@ -674,6 +674,7 @@ class TestReteNetwork(TestUsingMemoryBasedSheerka):
sheerka.remove_rule(context, rule) sheerka.remove_rule(context, rule)
assert len(rete_network.pnodes) == 0 assert len(rete_network.pnodes) == 0
@pytest.mark.skip
def test_rules_are_removed_upon_ontology_removal(self): def test_rules_are_removed_upon_ontology_removal(self):
sheerka, context = self.init_test().unpack() sheerka, context = self.init_test().unpack()
service = sheerka.services[SheerkaRuleManager.NAME] service = sheerka.services[SheerkaRuleManager.NAME]