Added ExactConceptParser
This commit is contained in:
@@ -0,0 +1,152 @@
|
||||
import pytest
|
||||
from os import path
|
||||
import shutil
|
||||
import os
|
||||
|
||||
from core.concept import Concept, Property
|
||||
from core.sheerka import Sheerka, ExecutionContext
|
||||
from parsers.DefaultParser import DefaultParser
|
||||
from parsers.ExactConceptParser import ExactConceptParser
|
||||
|
||||
tests_root = path.abspath("../build/tests")
|
||||
root_folder = "init_folder"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def init_test():
|
||||
if path.exists(tests_root):
|
||||
shutil.rmtree(tests_root)
|
||||
|
||||
if not path.exists(tests_root):
|
||||
os.makedirs(tests_root)
|
||||
current_pwd = os.getcwd()
|
||||
os.chdir(tests_root)
|
||||
|
||||
yield None
|
||||
|
||||
os.chdir(current_pwd)
|
||||
|
||||
|
||||
def test_i_can_compute_combinations():
|
||||
parser = ExactConceptParser()
|
||||
res = parser.combinations(["foo", "bar", "baz"])
|
||||
|
||||
assert res == {('foo', 'bar', 'baz'),
|
||||
('__var__0', 'bar', 'baz'),
|
||||
('foo', '__var__0', 'baz'),
|
||||
('foo', 'bar', '__var__0'),
|
||||
('__var__0', '__var__1', 'baz'),
|
||||
('__var__0', 'bar', '__var__1'),
|
||||
('foo', '__var__0', '__var__1'),
|
||||
('__var__0', '__var__1', '__var__2')}
|
||||
|
||||
|
||||
def test_i_can_compute_combinations_with_duplicates():
|
||||
parser = ExactConceptParser()
|
||||
res = parser.combinations(["foo", "bar", "foo"])
|
||||
|
||||
assert res == {('foo', 'bar', 'foo'),
|
||||
('__var__0', 'bar', '__var__0'),
|
||||
('foo', '__var__0', 'foo'),
|
||||
('__var__0', '__var__1', '__var__0'),
|
||||
('__var__1', '__var__0', '__var__1')}
|
||||
# TODO: the last tuple is not possible, so the algo can be improved
|
||||
|
||||
|
||||
def test_i_can_recognize_a_simple_concept():
|
||||
sheerka = get_sheerka()
|
||||
concept = get_concept("hello world", [])
|
||||
sheerka.add_in_cache(concept)
|
||||
source = "hello world"
|
||||
context = ExecutionContext(sheerka, "xxxx")
|
||||
results = ExactConceptParser().parse(context, source)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0].status
|
||||
assert results[0].value.key == concept.key
|
||||
|
||||
|
||||
def test_i_can_recognize_concepts_defined_several_times():
|
||||
sheerka = get_sheerka()
|
||||
sheerka.add_in_cache(get_concept("hello world", []))
|
||||
sheerka.add_in_cache(get_concept("hello a", ["a"]))
|
||||
|
||||
source = "hello world"
|
||||
context = ExecutionContext(sheerka, "xxxx")
|
||||
results = ExactConceptParser().parse(context, source)
|
||||
|
||||
assert len(results) == 2
|
||||
results = sorted(results, key=lambda x: x.value.name) # because of the usage of sets
|
||||
|
||||
assert results[0].status
|
||||
assert results[0].value.name == "hello a"
|
||||
assert results[0].value.props["a"].value == "world"
|
||||
|
||||
assert results[1].status
|
||||
assert results[1].value.name == "hello world"
|
||||
|
||||
|
||||
def test_i_can_recognize_a_concept_with_variables():
|
||||
sheerka = get_sheerka()
|
||||
concept = get_concept("a + b", ["a", "b"])
|
||||
sheerka.concepts_cache[concept.key] = concept
|
||||
source = "10 + 5"
|
||||
context = ExecutionContext(sheerka, "xxxx")
|
||||
results = ExactConceptParser().parse(context, source)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0].status
|
||||
assert results[0].value.key == concept.key
|
||||
assert results[0].value.props["a"].value == "10"
|
||||
assert results[0].value.props["b"].value == "5"
|
||||
|
||||
|
||||
def test_i_can_recognize_a_concept_with_duplicate_variables():
|
||||
sheerka = get_sheerka()
|
||||
concept = get_concept("a + b + a", ["a", "b"])
|
||||
sheerka.concepts_cache[concept.key] = concept
|
||||
source = "10 + 5 + 10"
|
||||
context = ExecutionContext(sheerka, "xxxx")
|
||||
results = ExactConceptParser().parse(context, source)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0].status
|
||||
assert results[0].value.key == concept.key
|
||||
assert results[0].value.props["a"].value == "10"
|
||||
assert results[0].value.props["b"].value == "5"
|
||||
|
||||
|
||||
def test_i_can_manage_unknown_concept():
|
||||
sheerka = get_sheerka()
|
||||
source = "def concept hello world" # this is not a concept by itself
|
||||
context = ExecutionContext(sheerka, "xxxx")
|
||||
res = ExactConceptParser().parse(context, source)
|
||||
|
||||
assert not res.status
|
||||
assert sheerka.isinstance(res.value, Sheerka.UNKNOWN_CONCEPT_NAME)
|
||||
|
||||
|
||||
def test_i_can_detect_concepts_too_long():
|
||||
sheerka = get_sheerka()
|
||||
source = "a very very long concept that cannot be an unique one"
|
||||
context = ExecutionContext(sheerka, "xxxx")
|
||||
res = ExactConceptParser().parse(context, source)
|
||||
|
||||
assert not res.status
|
||||
assert sheerka.isinstance(res.value, Sheerka.CONCEPT_TOO_LONG_CONCEPT_NAME)
|
||||
|
||||
|
||||
def get_concept(name, variables):
|
||||
c = Concept(name=name)
|
||||
if variables:
|
||||
for v in variables:
|
||||
c.props[v] = Property(v, None)
|
||||
c.init_key()
|
||||
return c
|
||||
|
||||
|
||||
def get_sheerka():
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize(root_folder)
|
||||
|
||||
return sheerka
|
||||
@@ -0,0 +1,38 @@
|
||||
import pytest
|
||||
|
||||
from core.concept import Concept
|
||||
|
||||
|
||||
@pytest.mark.parametrize("name, variables, expected", [
|
||||
("my name is a", ["a"], "my name is __var__0"),
|
||||
("a b c d", ["b", "c"], "a __var__0 __var__1 d"),
|
||||
("a 'b c' d", ["b", "c"], "a b c d"),
|
||||
("a | b", ["a", "b"], "__var__0 | __var__1"),
|
||||
("a b a c", ["a", "b"], "__var__0 __var__1 __var__0 c"),
|
||||
("a b a c", ["b", "a"], "__var__1 __var__0 __var__1 c"),
|
||||
])
|
||||
def test_i_can_get_concept_key(name, variables, expected):
|
||||
concept = Concept(name)
|
||||
for v in variables:
|
||||
concept.set_prop(v, None)
|
||||
|
||||
concept.init_key()
|
||||
assert concept.key == expected
|
||||
|
||||
|
||||
def test_i_can_serialize():
|
||||
"""
|
||||
Test concept.to_dict()
|
||||
:return:
|
||||
"""
|
||||
# TODO
|
||||
pass
|
||||
|
||||
|
||||
def test_i_can_deserialize():
|
||||
"""
|
||||
Test concept.from_dict()
|
||||
:return:
|
||||
"""
|
||||
# TODO
|
||||
pass
|
||||
+27
-22
@@ -1,10 +1,11 @@
|
||||
import pytest
|
||||
|
||||
from parsers.ExactConceptParser import ExactConceptParser
|
||||
from parsers.PythonParser import PythonParser, PythonNode, PythonErrorNode
|
||||
from parsers.tokenizer import Tokenizer, Token, TokenKind, Keywords, LexerError
|
||||
from core.tokenizer import Tokenizer, Token, TokenKind, Keywords, LexerError
|
||||
from parsers.DefaultParser import DefaultParser
|
||||
from parsers.DefaultParser import NumberNode, StringNode, VariableNode, TrueNode, FalseNode, NullNode, BinaryNode
|
||||
from parsers.DefaultParser import Node, UnexpectedTokenErrorNode, DefConceptNode, NopNode
|
||||
from parsers.DefaultParser import UnexpectedTokenErrorNode, DefConceptNode, NopNode
|
||||
import ast
|
||||
|
||||
|
||||
@@ -39,6 +40,7 @@ def null():
|
||||
def b(operator, left, right):
|
||||
return BinaryNode([], operator, left, right)
|
||||
|
||||
|
||||
def compare_ast(left, right):
|
||||
left_as_string = ast.dump(left)
|
||||
left_as_string = left_as_string.replace(", ctx=Load()", "")
|
||||
@@ -51,9 +53,8 @@ def compare_ast(left, right):
|
||||
return left_as_string == right_as_string
|
||||
|
||||
|
||||
|
||||
def test_i_can_tokenize():
|
||||
source = "+*-/{}[]() ,;:.?\n\n\r\r\r\nidentifier_0\t \t10.15 10 'string\n' \"another string\"="
|
||||
source = "+*-/{}[]() ,;:.?\n\n\r\r\r\nidentifier_0\t \t10.15 10 'string\n' \"another string\"=|&"
|
||||
tokens = list(Tokenizer(source))
|
||||
assert tokens[0] == Token(TokenKind.PLUS, "+", 0, 1, 1)
|
||||
assert tokens[1] == Token(TokenKind.STAR, "*", 1, 1, 2)
|
||||
@@ -85,6 +86,8 @@ def test_i_can_tokenize():
|
||||
assert tokens[27] == Token(TokenKind.WHITESPACE, " ", 59, 6, 1)
|
||||
assert tokens[28] == Token(TokenKind.STRING, '"another string"', 60, 6, 2)
|
||||
assert tokens[29] == Token(TokenKind.EQUALS, '=', 76, 6, 18)
|
||||
assert tokens[30] == Token(TokenKind.VBAR, '|', 77, 6, 19)
|
||||
assert tokens[31] == Token(TokenKind.AMPER, '&', 78, 6, 20)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
@@ -220,8 +223,8 @@ def test_i_can_recognize_keywords(text, expected):
|
||||
("def concept h as 1 + 1", "h", ast.Expression(ast.BinOp(left=ast.Num(n=1), op=ast.Add(), right=ast.Num(n=1)))),
|
||||
])
|
||||
def test_i_can_parse_def_concept(text, expected_name, expected_expr):
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
tree = parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
tree = parser.parse(None, text)
|
||||
assert isinstance(tree, DefConceptNode)
|
||||
assert tree.name == expected_name
|
||||
if isinstance(tree.body, PythonNode):
|
||||
@@ -230,8 +233,6 @@ def test_i_can_parse_def_concept(text, expected_name, expected_expr):
|
||||
assert tree.body == expected_expr
|
||||
|
||||
|
||||
|
||||
|
||||
def test_i_can_parse_complex_def_concept_statement():
|
||||
text = """def concept a plus b
|
||||
where a,b
|
||||
@@ -239,8 +240,8 @@ def test_i_can_parse_complex_def_concept_statement():
|
||||
post isinstance(res, int)
|
||||
as res = a + b
|
||||
"""
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
tree = parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
tree = parser.parse(None, text)
|
||||
assert not parser.has_error
|
||||
assert isinstance(tree, DefConceptNode)
|
||||
assert tree.name == "a plus b"
|
||||
@@ -261,19 +262,20 @@ def concept add one to a as:
|
||||
return x+1
|
||||
func(a)
|
||||
"""
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
tree = parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
tree = parser.parse(None, text)
|
||||
assert not parser.has_error
|
||||
assert isinstance(tree, DefConceptNode)
|
||||
|
||||
|
||||
def test_i_can_use_colon_to_declare_indentation2():
|
||||
text = """
|
||||
def concept add one to a as:
|
||||
def func(x):
|
||||
return x+1
|
||||
"""
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
tree = parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
tree = parser.parse(None, text)
|
||||
assert not parser.has_error
|
||||
assert isinstance(tree, DefConceptNode)
|
||||
|
||||
@@ -285,8 +287,8 @@ def concept add one to a as
|
||||
return x+1
|
||||
func(a)
|
||||
"""
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
tree = parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
tree = parser.parse(None, text)
|
||||
assert parser.has_error
|
||||
assert isinstance(tree, DefConceptNode)
|
||||
assert isinstance(parser.error_sink[0].exception, IndentationError)
|
||||
@@ -304,8 +306,8 @@ def concept add one to a as:
|
||||
func(a)
|
||||
func(b)
|
||||
"""
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
tree = parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
tree = parser.parse(None, text)
|
||||
assert parser.has_error
|
||||
assert isinstance(tree, DefConceptNode)
|
||||
assert isinstance(parser.error_sink[0], UnexpectedTokenErrorNode)
|
||||
@@ -319,8 +321,8 @@ func(b)
|
||||
("def concept as", Keywords.AS, ["<name>"]),
|
||||
])
|
||||
def test_i_can_detect_unexpected_token_error_in_def_concept(text, token_found, expected_tokens):
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
parser.parse(None, text)
|
||||
|
||||
assert parser.has_error
|
||||
assert isinstance(parser.error_sink[0], UnexpectedTokenErrorNode)
|
||||
@@ -335,7 +337,10 @@ def test_i_can_detect_unexpected_token_error_in_def_concept(text, token_found, e
|
||||
"def concept hello as 1+"
|
||||
])
|
||||
def test_i_can_detect_error_in_declaration(text):
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
parser.parse(None, text)
|
||||
assert parser.has_error
|
||||
assert isinstance(parser.error_sink[0], PythonErrorNode)
|
||||
|
||||
|
||||
|
||||
|
||||
+13
-8
@@ -37,8 +37,7 @@ def test_root_folder_is_created_after_initialization():
|
||||
|
||||
|
||||
def test_lists_of_concepts_is_initialized():
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize(root_folder)
|
||||
sheerka = get_sheerka()
|
||||
assert len(sheerka.concepts_cache) > 1
|
||||
|
||||
|
||||
@@ -53,14 +52,13 @@ def get_concept():
|
||||
return x+y
|
||||
func(a,b)
|
||||
"""
|
||||
parser = DefaultParser(text, PythonParser)
|
||||
return parser.parse()
|
||||
parser = DefaultParser(PythonParser)
|
||||
return parser.parse(None, text)
|
||||
|
||||
|
||||
def test_i_can_add_a_concept():
|
||||
sheerka = get_sheerka()
|
||||
concept = get_concept()
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize(root_folder)
|
||||
res = sheerka.add_concept(ExecutionContext(sheerka, "xxx"), concept)
|
||||
concept_found = res.value
|
||||
|
||||
@@ -76,7 +74,7 @@ def test_i_can_add_a_concept():
|
||||
assert isinstance(concept_found.codes[ConceptParts.POST], ast.Expression)
|
||||
assert isinstance(concept_found.codes[ConceptParts.BODY], ast.Module)
|
||||
|
||||
all_props = [p.name for p in concept_found.props]
|
||||
all_props = list(concept_found.props.keys())
|
||||
assert all_props == ["a", "b"]
|
||||
|
||||
assert concept_found.key == "__var__0 + __var__1"
|
||||
@@ -123,7 +121,14 @@ def test_i_can_instantiate_a_concept():
|
||||
"""
|
||||
Test the new() functionnality
|
||||
make sure that some Concept are singleton (ex Sheerka, True, False)
|
||||
but some other need a new instance everytime
|
||||
otherwise, make sure that new() returns a **new** instance
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def get_sheerka():
|
||||
sheerka = Sheerka()
|
||||
sheerka.initialize(root_folder)
|
||||
|
||||
return sheerka
|
||||
Reference in New Issue
Block a user