Refactored to allow ConceptEvaluator

This commit is contained in:
2019-11-14 22:04:38 +01:00
parent 576ce77740
commit 9e10e77737
30 changed files with 2406 additions and 1007 deletions
+350
View File
@@ -0,0 +1,350 @@
import os
import shutil
import pytest
from os import path
import ast
from core.builtin_concepts import ParserResultConcept, BuiltinConcepts, ReturnValueConcept
from core.sheerka import Sheerka, ExecutionContext
from parsers.BaseParser import BaseParser
from parsers.PythonParser import PythonParser, PythonNode, PythonErrorNode
from core.tokenizer import Keywords, Tokenizer
from parsers.DefaultParser import DefaultParser, NameNode, SyntaxErrorNode
# from parsers.DefaultParser import NumberNode, StringNode, VariableNode, TrueNode, FalseNode, NullNode, BinaryNode
from parsers.DefaultParser import UnexpectedTokenErrorNode, DefConceptNode, NopNode
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
# def nop():
# return NopNode()
#
#
# def n(number):
# return NumberNode([], number)
#
#
# def s(string, quote="'"):
# return StringNode([], string, quote)
#
#
# def v(name):
# return VariableNode([], name)
#
#
# def t():
# return TrueNode([])
#
#
# def f():
# return FalseNode([])
#
#
# def null():
# return NullNode([])
#
#
# def b(operator, left, right):
# return BinaryNode([], operator, left, right)
#
# def compare_ast(left, right):
# left_as_string = ast.dump(left)
# left_as_string = left_as_string.replace(", ctx=Load()", "")
# left_as_string = left_as_string.replace(", kind=None", "")
#
# right_as_string = right if isinstance(right, str) else ast.dump(right)
# right_as_string = right_as_string.replace(", ctx=Load()", "")
# right_as_string = right_as_string.replace(", kind=None", "")
#
# return left_as_string == right_as_string
#
def get_concept(name, where=None, pre=None, post=None, body=None):
concept = DefConceptNode([], name=NameNode(list(Tokenizer(name))))
if body:
concept.body = get_concept_part(body)
if where:
concept.where = get_concept_part(where)
if pre:
concept.pre = get_concept_part(pre)
if post:
concept.post = get_concept_part(post)
return concept
def get_context():
sheerka = Sheerka()
sheerka.initialize(root_folder)
return ExecutionContext("test", "xxx", sheerka)
def get_concept_part(part):
if isinstance(part, str):
node = PythonNode(part, ast.parse(part, mode="eval"))
return ReturnValueConcept(
who="Parsers:PythonParser",
status=True,
value=ParserResultConcept(
source=part,
parser=PythonParser(),
value=node))
if isinstance(part, PythonNode):
return ReturnValueConcept(
who="Parsers:PythonParser",
status=True,
value=ParserResultConcept(
source=part.source,
parser=PythonParser(),
value=part))
if isinstance(part, ReturnValueConcept):
return part
# @pytest.mark.parametrize("text, expected", [
# ("1", n(1)),
# ("+1", n(1)),
# ("-1", n(-1)),
# ("'foo'", s("foo")),
# ("identifier", v("identifier")),
# ("true", t()),
# ("false", f()),
# ("null", null()),
# ("1 * 2", b(TokenKind.STAR, n(1), n(2))),
# ("1 * 2/3", b(TokenKind.STAR, n(1), b(TokenKind.SLASH, n(2), n(3)))),
# ("1 + 2", b(TokenKind.PLUS, n(1), n(2))),
# ("1 + 2 - 3", b(TokenKind.PLUS, n(1), b(TokenKind.MINUS, n(2), n(3)))),
# ("1 + 2-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))),
# ("1 + 2 +-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))),
# ("1 + 2 * 3", b(TokenKind.PLUS, n(1), b(TokenKind.STAR, n(2), n(3)))),
# ("1 * 2 + 3", b(TokenKind.PLUS, b(TokenKind.STAR, n(1), n(2)), n(3))),
# ("(1 + 2) * 3", b(TokenKind.STAR, b(TokenKind.PLUS, n(1), n(2)), n(3))),
# ("1 * (2 + 3)", b(TokenKind.STAR, n(1), b(TokenKind.PLUS, n(2), n(3)))),
# ])
# def test_i_can_parse_simple_expression(text, expected):
# parser = DefaultParser(text, None)
# ast = parser.parse()
# assert ast.is_same(expected)
#
#
# @pytest.mark.parametrize("text, token_found, expected_tokens", [
# ("1+", TokenKind.EOF,
# [TokenKind.NUMBER, TokenKind.STRING, TokenKind.IDENTIFIER, 'true', 'false', 'null', TokenKind.LPAR]),
# ("(1+1", TokenKind.EOF, [TokenKind.RPAR])
# ])
# def test_i_can_detect_unexpected_end_of_code(text, token_found, expected_tokens):
# parser = DefaultParser(text, None)
# parser.parse()
#
# assert parser.has_error
# assert parser.error_sink[0].tokens[0].type == token_found
# assert parser.error_sink[0].expected_tokens == expected_tokens
@pytest.mark.parametrize("text, expected", [
("def concept hello", get_concept(name="hello")),
("def concept hello ", get_concept(name="hello")),
("def concept a + b", get_concept(name="a + b")),
("def concept a+b", get_concept(name="a + b")),
("def concept 'a+b'+c", get_concept(name="'a+b' + c")),
("def concept 'as if'", get_concept(name="'as if'")),
("def concept 'as' if", get_concept(name="'as if'")),
("def concept hello as 'hello'", get_concept(name="hello", body="'hello'")),
("def concept hello as 1", get_concept(name="hello", body="1")),
("def concept hello as 1 + 1", get_concept(name="hello", body="1 + 1")),
])
def test_i_can_parse_def_concept(text, expected):
parser = DefaultParser()
res = parser.parse(get_context(), text)
node = res.value.value
assert res.status
assert res.who == parser.name
assert res.value.source == text
assert isinstance(res.value, ParserResultConcept)
assert node == expected
def test_i_can_parse_complex_def_concept_statement():
text = """def concept a plus b
where a,b
pre isinstance(a, int) and isinstance(b, float)
post isinstance(res, int)
as res = a + b
"""
parser = DefaultParser()
res = parser.parse(get_context(), text)
return_value = res.value
expected_concept = get_concept(
name="a plus b",
where="a,b",
pre="isinstance(a, int) and isinstance(b, float)",
post="isinstance(res, int)",
body=PythonNode("res = a + b", ast.parse("res = a + b", mode="exec"))
)
assert res.status
assert isinstance(return_value, ParserResultConcept)
assert return_value.value == expected_concept
def test_i_can_have_mutilines_declarations():
text = """
def concept add one to a as
def func(x):
return x+1
func(a)
"""
expected_concept = get_concept(
name="add one to a ",
body=PythonNode(
"def func(x):\n return x+1\nfunc(a)",
ast.parse("def func(x):\n return x+1\nfunc(a)", mode="exec"))
)
parser = DefaultParser()
res = parser.parse(get_context(), text)
return_value = res.value
assert res.status
assert isinstance(return_value, ParserResultConcept)
assert return_value.value == expected_concept
def test_i_can_use_colon_to_use_indentation():
text = """
def concept add one to a as:
def func(x):
return x+1
func(a)
"""
expected_concept = get_concept(
name="add one to a ",
body=PythonNode(
"def func(x):\n return x+1\nfunc(a)",
ast.parse("def func(x):\n return x+1\nfunc(a)", mode="exec"))
)
parser = DefaultParser()
res = parser.parse(get_context(), text)
return_value = res.value
assert res.status
assert isinstance(return_value, ParserResultConcept)
assert return_value.value == expected_concept
def test_indentation_is_mandatory_after_a_colon():
text = """
def concept add one to a as:
def func(x):
return x+1
func(a)
"""
parser = DefaultParser()
res = parser.parse(get_context(), text)
return_value = res.value
assert not res.status
assert isinstance(return_value, ParserResultConcept)
assert isinstance(return_value.value[0], SyntaxErrorNode)
assert return_value.value[0].message == "Indentation not found."
def test_indentation_is_not_allowed_if_the_colon_is_missing():
text = """
def concept add one to a as
def func(x):
return x+1
func(a)
"""
context = get_context()
sheerka = context.sheerka
parser = DefaultParser()
res = parser.parse(context, text)
return_value = res.value
assert not res.status
assert isinstance(return_value, ParserResultConcept)
assert sheerka.isinstance(return_value.value[0], BuiltinConcepts.TOO_MANY_ERRORS)
def test_name_is_mandatory():
text = "def concept as 'hello'"
parser = DefaultParser()
res = parser.parse(get_context(), text)
return_value = res.value
assert not res.status
assert isinstance(return_value, ParserResultConcept)
assert isinstance(return_value.value[0], SyntaxErrorNode)
assert return_value.value[0].message == "Name is mandatory"
def test_concept_keyword_is_mandatory_but_the_concept_is_recognized():
text = "def hello as a where b pre c post d"
expected_concept = get_concept(name="hello", body="a", where="b", pre="c", post="d")
parser = DefaultParser()
res = parser.parse(get_context(), text)
return_value = res.value
assert not res.status
assert isinstance(return_value, ParserResultConcept)
assert isinstance(return_value.value[0], UnexpectedTokenErrorNode)
assert return_value.value[0].message == "Syntax error."
assert return_value.value[0].expected_tokens == [Keywords.CONCEPT]
assert return_value.try_parsed == expected_concept
@pytest.mark.parametrize("text", [
"def concept hello where 1+",
"def concept hello pre 1+",
"def concept hello post 1+",
"def concept hello as 1+"
])
def test_i_can_detect_error_in_declaration(text):
context = get_context()
sheerka = context.sheerka
parser = DefaultParser()
res = parser.parse(context, text)
return_value = res.value
assert not res.status
assert isinstance(return_value, ParserResultConcept)
assert sheerka.isinstance(return_value.value[0], BuiltinConcepts.TOO_MANY_ERRORS)
def test_new_line_is_not_allowed_in_the_name():
text = "def concept hello \n my friend as 'hello'"
parser = DefaultParser()
res = parser.parse(get_context(), text)
return_value = res.value
assert not res.status
assert return_value.value == [SyntaxErrorNode([], "Newline are not allowed in name.")]
+40 -28
View File
@@ -3,8 +3,10 @@ from os import path
import shutil
import os
from core.builtin_concepts import ParserResultConcept, BuiltinConcepts
from core.concept import Concept, Property
from core.sheerka import Sheerka, ExecutionContext
from core.tokenizer import Tokenizer
from parsers.DefaultParser import DefaultParser
from parsers.ExactConceptParser import ExactConceptParser
@@ -54,29 +56,28 @@ def test_i_can_compute_combinations_with_duplicates():
def test_i_can_recognize_a_simple_concept():
sheerka = get_sheerka()
context = get_context()
concept = get_concept("hello world", [])
sheerka.add_in_cache(concept)
context.sheerka.add_in_cache(concept)
source = "hello world"
context = ExecutionContext(sheerka, "xxxx")
results = ExactConceptParser().parse(context, source)
assert len(results) == 1
assert results[0].status
assert results[0].value.key == concept.key
assert results[0].value == concept
def test_i_can_recognize_concepts_defined_several_times():
sheerka = get_sheerka()
sheerka.add_in_cache(get_concept("hello world", []))
sheerka.add_in_cache(get_concept("hello a", ["a"]))
context = get_context()
context.sheerka.add_in_cache(get_concept("hello world", []))
context.sheerka.add_in_cache(get_concept("hello a", ["a"]))
source = "hello world"
context = ExecutionContext(sheerka, "xxxx")
results = ExactConceptParser().parse(context, source)
assert len(results) == 2
results = sorted(results, key=lambda x: x.value.name) # because of the usage of sets
results = sorted(results, key=lambda x: x.value.name) # because of the usage of sets
assert results[0].status
assert results[0].value.name == "hello a"
@@ -87,11 +88,10 @@ def test_i_can_recognize_concepts_defined_several_times():
def test_i_can_recognize_a_concept_with_variables():
sheerka = get_sheerka()
context = get_context()
concept = get_concept("a + b", ["a", "b"])
sheerka.concepts_cache[concept.key] = concept
context.sheerka.add_in_cache(concept)
source = "10 + 5"
context = ExecutionContext(sheerka, "xxxx")
results = ExactConceptParser().parse(context, source)
assert len(results) == 1
@@ -102,11 +102,10 @@ def test_i_can_recognize_a_concept_with_variables():
def test_i_can_recognize_a_concept_with_duplicate_variables():
sheerka = get_sheerka()
context = get_context()
concept = get_concept("a + b + a", ["a", "b"])
sheerka.concepts_cache[concept.key] = concept
context.sheerka.concepts_cache[concept.key] = concept
source = "10 + 5 + 10"
context = ExecutionContext(sheerka, "xxxx")
results = ExactConceptParser().parse(context, source)
assert len(results) == 1
@@ -117,23 +116,43 @@ def test_i_can_recognize_a_concept_with_duplicate_variables():
def test_i_can_manage_unknown_concept():
sheerka = get_sheerka()
context = get_context()
source = "def concept hello world" # this is not a concept by itself
context = ExecutionContext(sheerka, "xxxx")
res = ExactConceptParser().parse(context, source)
assert not res.status
assert sheerka.isinstance(res.value, Sheerka.UNKNOWN_CONCEPT_NAME)
assert context.sheerka.isinstance(res.value, BuiltinConcepts.UNKNOWN_CONCEPT)
assert res.value.obj == "def concept hello world"
def test_i_can_detect_concepts_too_long():
sheerka = get_sheerka()
context = get_context()
source = "a very very long concept that cannot be an unique one"
context = ExecutionContext(sheerka, "xxxx")
res = ExactConceptParser().parse(context, source)
assert not res.status
assert sheerka.isinstance(res.value, Sheerka.CONCEPT_TOO_LONG_CONCEPT_NAME)
assert context.sheerka.isinstance(res.value, BuiltinConcepts.CONCEPT_TOO_LONG)
assert res.value.obj == "a very very long concept that cannot be an unique one"
def test_i_can_detect_concept_from_tokens():
context = get_context()
concept = get_concept("hello world", [])
context.sheerka.add_in_cache(concept)
source = "hello world"
results = ExactConceptParser().parse(context, list(Tokenizer(source)))
assert len(results) == 1
assert results[0].status
assert results[0].value == concept
def get_context():
sheerka = Sheerka()
sheerka.initialize(root_folder)
return ExecutionContext("sheerka", "xxxx", sheerka)
def get_concept(name, variables):
@@ -143,10 +162,3 @@ def get_concept(name, variables):
c.props[v] = Property(v, None)
c.init_key()
return c
def get_sheerka():
sheerka = Sheerka()
sheerka.initialize(root_folder)
return sheerka
+77
View File
@@ -0,0 +1,77 @@
import ast
import os
import shutil
from os import path
import pytest
from core.builtin_concepts import ParserResultConcept
from core.sheerka import Sheerka, ExecutionContext
from core.tokenizer import Tokenizer
from parsers.BaseParser import BaseParser
from parsers.PythonParser import PythonNode, PythonParser, PythonErrorNode
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@pytest.fixture(autouse=True)
def init_test():
if path.exists(tests_root):
shutil.rmtree(tests_root)
if not path.exists(tests_root):
os.makedirs(tests_root)
current_pwd = os.getcwd()
os.chdir(tests_root)
yield None
os.chdir(current_pwd)
def get_context():
sheerka = Sheerka()
sheerka.initialize(root_folder)
return ExecutionContext("test", "xxx", sheerka)
@pytest.mark.parametrize("text, expected", [
("1+1", PythonNode("1+1", ast.parse("1+1", mode="eval"))),
("a=10", PythonNode("a=10", ast.parse("a=10", mode="exec"))),
])
def test_i_can_parse_a_simple_expression(text, expected):
parser = PythonParser()
res = parser.parse(get_context(), text)
assert res.status
assert res.who == parser.name
assert isinstance(res.value, ParserResultConcept)
assert res.value.value == expected
@pytest.mark.parametrize("text, expected", [
("1+1", PythonNode("1+1", ast.parse("1+1", mode="eval"))),
("a=10", PythonNode("a=10", ast.parse("a=10", mode="exec"))),
])
def test_i_can_parse_from_tokens(text, expected):
parser = PythonParser()
tokens = list(Tokenizer(text))
res = parser.parse(get_context(), tokens)
assert res.status
assert res.who == parser.name
assert isinstance(res.value, ParserResultConcept)
assert res.value.value == expected
def test_i_can_detect_error():
text = "1+"
parser = PythonParser()
res = parser.parse(get_context(), text)
assert not res.status
assert res.who == parser.name
assert isinstance(res.value, ParserResultConcept)
assert isinstance(res.value.value[0], PythonErrorNode)
assert isinstance(res.value.value[0].exception, SyntaxError)
-346
View File
@@ -1,346 +0,0 @@
import pytest
from parsers.ExactConceptParser import ExactConceptParser
from parsers.PythonParser import PythonParser, PythonNode, PythonErrorNode
from core.tokenizer import Tokenizer, Token, TokenKind, Keywords, LexerError
from parsers.DefaultParser import DefaultParser
from parsers.DefaultParser import NumberNode, StringNode, VariableNode, TrueNode, FalseNode, NullNode, BinaryNode
from parsers.DefaultParser import UnexpectedTokenErrorNode, DefConceptNode, NopNode
import ast
def nop():
return NopNode()
def n(number):
return NumberNode([], number)
def s(string, quote="'"):
return StringNode([], string, quote)
def v(name):
return VariableNode([], name)
def t():
return TrueNode([])
def f():
return FalseNode([])
def null():
return NullNode([])
def b(operator, left, right):
return BinaryNode([], operator, left, right)
def compare_ast(left, right):
left_as_string = ast.dump(left)
left_as_string = left_as_string.replace(", ctx=Load()", "")
left_as_string = left_as_string.replace(", kind=None", "")
right_as_string = right if isinstance(right, str) else ast.dump(right)
right_as_string = right_as_string.replace(", ctx=Load()", "")
right_as_string = right_as_string.replace(", kind=None", "")
return left_as_string == right_as_string
def test_i_can_tokenize():
source = "+*-/{}[]() ,;:.?\n\n\r\r\r\nidentifier_0\t \t10.15 10 'string\n' \"another string\"=|&"
tokens = list(Tokenizer(source))
assert tokens[0] == Token(TokenKind.PLUS, "+", 0, 1, 1)
assert tokens[1] == Token(TokenKind.STAR, "*", 1, 1, 2)
assert tokens[2] == Token(TokenKind.MINUS, "-", 2, 1, 3)
assert tokens[3] == Token(TokenKind.SLASH, "/", 3, 1, 4)
assert tokens[4] == Token(TokenKind.LBRACE, "{", 4, 1, 5)
assert tokens[5] == Token(TokenKind.RBRACE, "}", 5, 1, 6)
assert tokens[6] == Token(TokenKind.LBRACKET, "[", 6, 1, 7)
assert tokens[7] == Token(TokenKind.RBRACKET, "]", 7, 1, 8)
assert tokens[8] == Token(TokenKind.LPAR, "(", 8, 1, 9)
assert tokens[9] == Token(TokenKind.RPAR, ")", 9, 1, 10)
assert tokens[10] == Token(TokenKind.WHITESPACE, " ", 10, 1, 11)
assert tokens[11] == Token(TokenKind.COMMA, ",", 14, 1, 15)
assert tokens[12] == Token(TokenKind.SEMICOLON, ";", 15, 1, 16)
assert tokens[13] == Token(TokenKind.COLON, ":", 16, 1, 17)
assert tokens[14] == Token(TokenKind.DOT, ".", 17, 1, 18)
assert tokens[15] == Token(TokenKind.QMARK, "?", 18, 1, 19)
assert tokens[16] == Token(TokenKind.NEWLINE, "\n", 19, 1, 20)
assert tokens[17] == Token(TokenKind.NEWLINE, "\n\r", 20, 2, 1)
assert tokens[18] == Token(TokenKind.NEWLINE, "\r", 22, 3, 1)
assert tokens[19] == Token(TokenKind.NEWLINE, "\r\n", 23, 4, 1)
assert tokens[20] == Token(TokenKind.IDENTIFIER, "identifier_0", 25, 5, 1)
assert tokens[21] == Token(TokenKind.WHITESPACE, "\t \t", 37, 5, 13)
assert tokens[22] == Token(TokenKind.NUMBER, "10.15", 41, 5, 17)
assert tokens[23] == Token(TokenKind.WHITESPACE, " ", 46, 5, 22)
assert tokens[24] == Token(TokenKind.NUMBER, "10", 47, 5, 23)
assert tokens[25] == Token(TokenKind.WHITESPACE, " ", 49, 5, 25)
assert tokens[26] == Token(TokenKind.STRING, "'string\n'", 50, 5, 26)
assert tokens[27] == Token(TokenKind.WHITESPACE, " ", 59, 6, 1)
assert tokens[28] == Token(TokenKind.STRING, '"another string"', 60, 6, 2)
assert tokens[29] == Token(TokenKind.EQUALS, '=', 76, 6, 18)
assert tokens[30] == Token(TokenKind.VBAR, '|', 77, 6, 19)
assert tokens[31] == Token(TokenKind.AMPER, '&', 78, 6, 20)
@pytest.mark.parametrize("text, expected", [
("_ident", True),
("ident", True),
("ident123", True),
("ident_123", True),
("ident-like-this", True),
("àèùéû", True),
("011254", False),
("0abcd", False),
("-abcd", False)
])
def test_i_can_tokenize_identifiers(text, expected):
tokens = list(Tokenizer(text))
comparison = tokens[0].type == TokenKind.IDENTIFIER
assert comparison == expected
@pytest.mark.parametrize("text, error_text, index, line, column", [
("'string", "'string", 7, 1, 8),
('"string', '"string', 7, 1, 8),
('"a" + "string', '"string', 13, 1, 14),
('"a"\n\n"string', '"string', 12, 3, 8),
])
def test_i_can_detect_unfinished_strings(text, error_text, index, line, column):
with pytest.raises(LexerError) as e:
list(Tokenizer(text))
assert e.value.text == error_text
assert e.value.index == index
assert e.value.line == line
assert e.value.column == column
@pytest.mark.parametrize("text, expected_text, expected_newlines", [
("'foo'", "'foo'", 0),
('"foo"', '"foo"', 0),
("'foo\rbar'", "'foo\rbar'", 1),
("'foo\nbar'", "'foo\nbar'", 1),
("'foo\n\rbar'", "'foo\n\rbar'", 1),
("'foo\r\nbar'", "'foo\r\nbar'", 1),
("'foo\r\rbar'", "'foo\r\rbar'", 2),
("'foo\n\nbar'", "'foo\n\nbar'", 2),
("'foo\r\n\n\rbar'", "'foo\r\n\n\rbar'", 2),
("'\rfoo\rbar\r'", "'\rfoo\rbar\r'", 3),
("'\nfoo\nbar\n'", "'\nfoo\nbar\n'", 3),
("'\n\rfoo\r\n'", "'\n\rfoo\r\n'", 2),
(r"'foo\'bar'", r"'foo\'bar'", 0),
(r'"foo\"bar"', r'"foo\"bar"', 0),
('"foo"bar"', '"foo"', 0),
("'foo'bar'", "'foo'", 0),
])
def test_i_can_parse_strings(text, expected_text, expected_newlines):
lexer = Tokenizer(text)
text_found, nb_of_newlines = lexer.eat_string(0, 1, 1)
assert nb_of_newlines == expected_newlines
assert text_found == expected_text
@pytest.mark.parametrize("text", [
"1", "3.1415", "0.5", "01", "-5", "-5.10"
])
def test_i_can_parse_numbers(text):
tokens = list(Tokenizer(text))
assert tokens[0].type == TokenKind.NUMBER
assert tokens[0].value == text
@pytest.mark.parametrize("text, expected", [
("def", Keywords.DEF),
("concept", Keywords.CONCEPT),
("as", Keywords.AS),
("pre", Keywords.PRE),
("post", Keywords.POST)
])
def test_i_can_recognize_keywords(text, expected):
tokens = list(Tokenizer(text))
assert tokens[0].type == TokenKind.KEYWORD
assert tokens[0].value == expected
# @pytest.mark.parametrize("text, expected", [
# ("1", n(1)),
# ("+1", n(1)),
# ("-1", n(-1)),
# ("'foo'", s("foo")),
# ("identifier", v("identifier")),
# ("true", t()),
# ("false", f()),
# ("null", null()),
# ("1 * 2", b(TokenKind.STAR, n(1), n(2))),
# ("1 * 2/3", b(TokenKind.STAR, n(1), b(TokenKind.SLASH, n(2), n(3)))),
# ("1 + 2", b(TokenKind.PLUS, n(1), n(2))),
# ("1 + 2 - 3", b(TokenKind.PLUS, n(1), b(TokenKind.MINUS, n(2), n(3)))),
# ("1 + 2-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))),
# ("1 + 2 +-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))),
# ("1 + 2 * 3", b(TokenKind.PLUS, n(1), b(TokenKind.STAR, n(2), n(3)))),
# ("1 * 2 + 3", b(TokenKind.PLUS, b(TokenKind.STAR, n(1), n(2)), n(3))),
# ("(1 + 2) * 3", b(TokenKind.STAR, b(TokenKind.PLUS, n(1), n(2)), n(3))),
# ("1 * (2 + 3)", b(TokenKind.STAR, n(1), b(TokenKind.PLUS, n(2), n(3)))),
# ])
# def test_i_can_parse_simple_expression(text, expected):
# parser = DefaultParser(text, None)
# ast = parser.parse()
# assert ast.is_same(expected)
#
#
# @pytest.mark.parametrize("text, token_found, expected_tokens", [
# ("1+", TokenKind.EOF,
# [TokenKind.NUMBER, TokenKind.STRING, TokenKind.IDENTIFIER, 'true', 'false', 'null', TokenKind.LPAR]),
# ("(1+1", TokenKind.EOF, [TokenKind.RPAR])
# ])
# def test_i_can_detect_unexpected_end_of_code(text, token_found, expected_tokens):
# parser = DefaultParser(text, None)
# parser.parse()
#
# assert parser.has_error
# assert parser.error_sink[0].tokens[0].type == token_found
# assert parser.error_sink[0].expected_tokens == expected_tokens
@pytest.mark.parametrize("text, expected_name, expected_expr", [
("def concept hello", "hello", nop()),
("def concept hello ", "hello", nop()),
("def concept a+b", "a + b", nop()),
("def concept 'a+b'", "a+b", nop()),
("def concept 'a+b'+c", "a+b + c", nop()),
("def concept 'as if'", "as if", nop()),
("def concept 'as' if", "as if", nop()),
("def concept hello as 'hello'", "hello", ast.Expression(body=ast.Str(s='hello'))),
("def concept hello as 1", "hello", ast.Expression(body=ast.Num(n=1))),
("def concept h as 1 + 1", "h", ast.Expression(ast.BinOp(left=ast.Num(n=1), op=ast.Add(), right=ast.Num(n=1)))),
])
def test_i_can_parse_def_concept(text, expected_name, expected_expr):
parser = DefaultParser(PythonParser)
tree = parser.parse(None, text)
assert isinstance(tree, DefConceptNode)
assert tree.name == expected_name
if isinstance(tree.body, PythonNode):
assert compare_ast(tree.body.ast, expected_expr)
else:
assert tree.body == expected_expr
def test_i_can_parse_complex_def_concept_statement():
text = """def concept a plus b
where a,b
pre isinstance(a, int) and isinstance(b, float)
post isinstance(res, int)
as res = a + b
"""
parser = DefaultParser(PythonParser)
tree = parser.parse(None, text)
assert not parser.has_error
assert isinstance(tree, DefConceptNode)
assert tree.name == "a plus b"
assert tree.where.source == "a,b"
assert isinstance(tree.where.ast, ast.Expression)
assert tree.pre.source == "isinstance(a, int) and isinstance(b, float)"
assert isinstance(tree.pre.ast, ast.Expression)
assert tree.post.source == "isinstance(res, int)"
assert isinstance(tree.post.ast, ast.Expression)
assert tree.body.source == "res = a + b"
assert isinstance(tree.body.ast, ast.Module)
def test_i_can_use_colon_to_declare_indentation():
text = """
def concept add one to a as:
def func(x):
return x+1
func(a)
"""
parser = DefaultParser(PythonParser)
tree = parser.parse(None, text)
assert not parser.has_error
assert isinstance(tree, DefConceptNode)
def test_i_can_use_colon_to_declare_indentation2():
text = """
def concept add one to a as:
def func(x):
return x+1
"""
parser = DefaultParser(PythonParser)
tree = parser.parse(None, text)
assert not parser.has_error
assert isinstance(tree, DefConceptNode)
def test_without_colon_i_get_an_indent_error():
text = """
def concept add one to a as
def func(x):
return x+1
func(a)
"""
parser = DefaultParser(PythonParser)
tree = parser.parse(None, text)
assert parser.has_error
assert isinstance(tree, DefConceptNode)
assert isinstance(parser.error_sink[0].exception, IndentationError)
def test_i_can_detect_error():
"""
In this test, func(b) is not correctly indented while colon is specified after the 'as' keyword
"""
text = """
def concept add one to a as:
def func(x):
return x+1
func(a)
func(b)
"""
parser = DefaultParser(PythonParser)
tree = parser.parse(None, text)
assert parser.has_error
assert isinstance(tree, DefConceptNode)
assert isinstance(parser.error_sink[0], UnexpectedTokenErrorNode)
# check that the error is caused by 'func(b)'
assert parser.error_sink[0].tokens[0].line == 6
assert parser.error_sink[0].tokens[0].column == 1
@pytest.mark.parametrize("text, token_found, expected_tokens", [
("def hello as 'hello'", "hello", [Keywords.CONCEPT]),
("def concept as", Keywords.AS, ["<name>"]),
])
def test_i_can_detect_unexpected_token_error_in_def_concept(text, token_found, expected_tokens):
parser = DefaultParser(PythonParser)
parser.parse(None, text)
assert parser.has_error
assert isinstance(parser.error_sink[0], UnexpectedTokenErrorNode)
assert parser.error_sink[0].tokens[0].value == token_found
assert parser.error_sink[0].expected_tokens == expected_tokens
@pytest.mark.parametrize("text", [
"def concept hello where 1+",
"def concept hello pre 1+",
"def concept hello post 1+",
"def concept hello as 1+"
])
def test_i_can_detect_error_in_declaration(text):
parser = DefaultParser(PythonParser)
parser.parse(None, text)
assert parser.has_error
assert isinstance(parser.error_sink[0], PythonErrorNode)
+400 -59
View File
@@ -5,11 +5,13 @@ import os
from os import path
import shutil
from core.concept import Concept, ConceptParts, ReturnValueConcept
from core import utils
from core.builtin_concepts import BuiltinConcepts, ReturnValueConcept
from core.concept import Concept, ConceptParts
from core.sheerka import Sheerka, ExecutionContext
from parsers.DefaultParser import DefConceptNode, DefaultParser
from parsers.DefaultParser import DefaultParser
from parsers.PythonParser import PythonParser
from sdp.sheerkaDataProvider import SheerkaDataProvider
from sdp.sheerkaDataProvider import SheerkaDataProvider, SheerkaDataProviderDuplicateKeyError
tests_root = path.abspath("../build/tests")
root_folder = "init_folder"
@@ -36,68 +38,55 @@ def test_root_folder_is_created_after_initialization():
assert os.path.exists(root_folder), "init folder should be created"
def test_lists_of_concepts_is_initialized():
def test_i_can_list_builtin_concepts():
sheerka = get_sheerka()
assert len(sheerka.concepts_cache) > 1
builtins = list(sheerka.get_builtins_classes_as_dict())
assert str(BuiltinConcepts.ERROR) in builtins
assert str(BuiltinConcepts.RETURN_VALUE) in builtins
def get_concept():
text = """
def concept a+b
where isinstance(a, int) and isinstance(b, int)
pre isinstance(a, int) and isinstance(b, int)
post isinstance(res, int)
as:
def func(x,y):
return x+y
func(a,b)
"""
parser = DefaultParser(PythonParser)
return parser.parse(None, text)
def test_builtin_concepts_are_initialized():
sheerka = get_sheerka()
assert len(sheerka.concepts_cache) == len(BuiltinConcepts)
for concept_name in BuiltinConcepts:
assert str(concept_name) in sheerka.concepts_cache
assert sheerka.sdp.get_safe(sheerka.CONCEPTS_ENTRY, str(concept_name)) is not None
for key, concept_class in sheerka.get_builtins_classes_as_dict().items():
assert isinstance(sheerka.concepts_cache[key], concept_class)
def test_builtin_concepts_can_be_updated():
sheerka = get_sheerka()
loaded_sheerka = sheerka.get(BuiltinConcepts.SHEERKA)
loaded_sheerka.desc = "I have a description"
sheerka.sdp.modify("Test", sheerka.CONCEPTS_ENTRY, loaded_sheerka.key, loaded_sheerka)
sheerka = get_sheerka()
loaded_sheerka = sheerka.get(BuiltinConcepts.SHEERKA)
assert loaded_sheerka.desc == "I have a description"
def test_i_can_add_a_concept():
sheerka = get_sheerka()
concept = get_concept()
res = sheerka.add_concept(ExecutionContext(sheerka, "xxx"), concept)
concept_found = res.value
concept = get_default_concept()
res = sheerka.create_new_concept(get_context(sheerka), concept)
assert res.status
assert concept_found == Concept(
name="a + b",
where="isinstance(a, int) and isinstance(b, int)",
pre="isinstance(a, int) and isinstance(b, int)",
post="isinstance(res, int)",
body="def func(x,y):\n return x+y\nfunc(a,b)")
assert isinstance(concept_found.codes[ConceptParts.WHERE], ast.Expression)
assert isinstance(concept_found.codes[ConceptParts.PRE], ast.Expression)
assert isinstance(concept_found.codes[ConceptParts.POST], ast.Expression)
assert isinstance(concept_found.codes[ConceptParts.BODY], ast.Module)
assert sheerka.isinstance(res.value, BuiltinConcepts.NEW_CONCEPT)
all_props = list(concept_found.props.keys())
assert all_props == ["a", "b"]
concept_found = res.value.body
for prop in Concept.props_to_serialize:
assert getattr(concept_found, prop) == getattr(concept, prop)
assert concept_found.key == "__var__0 + __var__1"
assert concept_found.id == "1001"
assert path.exists(sheerka.sdp.get_obj_path(SheerkaDataProvider.ObjectsFolder,
"4f249487410db35d8bcbcf4521acb3dd8354978804cd99bbc4de17a323b2f237"))
@pytest.mark.parametrize("text, expected", [
("1 + 1", 2),
("sheerka.test()", 'I have access to Sheerka !')
])
def test_i_can_eval_simple_python_expressions(text, expected):
sheerka = Sheerka(debug=True)
sheerka.initialize(root_folder)
res = sheerka.eval(text)
assert len(res) == 1
assert res[0].status
assert res[0].value.body == expected
assert sheerka.isinstance(res[0].value, ReturnValueConcept())
assert concept.key in sheerka.concepts_cache
assert path.exists(sheerka.sdp.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_found.get_digest()))
def test_i_cannot_add_the_same_concept_twice():
@@ -105,30 +94,382 @@ def test_i_cannot_add_the_same_concept_twice():
Checks that duplicated concepts are managed by sheerka, not by sheerka.sdp
:return:
"""
pass
sheerka = get_sheerka()
concept = get_default_concept()
sheerka.create_new_concept(get_context(sheerka), concept)
res = sheerka.create_new_concept(get_context(sheerka), concept)
assert not res.status
assert sheerka.isinstance(res.value, BuiltinConcepts.ERROR)
assert res.value.body.args[0] == "Duplicate object."
def test_i_can_get_a_concept():
def test_i_can_get_a_builtin_concept_by_their_enum_or_the_string():
"""
Checks that a concept can be found its name
even when there are variables in the name (ex 'hello + a' or 'a + b' )
:return:
"""
pass
sheerka = get_sheerka()
for key in sheerka.get_builtins_classes_as_dict():
assert sheerka.get(key) is not None
assert sheerka.get(str(key)) is not None
def test_i_can_get_new_concept():
sheerka = get_sheerka()
concept = get_default_concept()
sheerka.create_new_concept(get_context(sheerka), concept)
from_cache = sheerka.get(concept.key)
assert from_cache is not None
assert from_cache.key == concept.key
assert from_cache == concept
def test_i_first_look_in_local_cache():
sheerka = get_sheerka()
concept = get_default_concept()
sheerka.create_new_concept(get_context(sheerka), concept)
sheerka.concepts_cache[concept.key].pre = "I have modified the concept in cache"
from_cache = sheerka.get(concept.key)
assert from_cache is not None
assert from_cache.key == concept.key
assert from_cache.pre == "I have modified the concept in cache"
def test_i_can_get_a_known_concept_when_not_in_cache():
"""
When not in cache, uses sdp
:return:
"""
sheerka = get_sheerka()
concept = get_default_concept()
sheerka.create_new_concept(get_context(sheerka), concept)
sheerka.concepts_cache = {} # reset the cache
loaded = sheerka.get(concept.key)
assert loaded is not None
assert loaded == concept
def test_unknown_concept_is_return_when_the_concept_is_not_found():
sheerka = get_sheerka()
loaded = sheerka.get("fake_key")
assert loaded is not None
assert sheerka.isinstance(loaded, BuiltinConcepts.UNKNOWN_CONCEPT)
assert loaded.body == "fake_key"
def test_i_can_instantiate_a_builtin_concept_when_it_has_its_own_class():
sheerka = get_sheerka()
ret = sheerka.new(BuiltinConcepts.RETURN_VALUE, who="who", status="status", value="value", message="message")
assert isinstance(ret, ReturnValueConcept)
assert ret.key == str(BuiltinConcepts.RETURN_VALUE)
assert ret.who == "who"
assert ret.status == "status"
assert ret.value == "value"
assert ret.message == "message"
def test_i_can_instantiate_a_builtin_concept_when_no_specific_class():
sheerka = get_sheerka()
ret = sheerka.new(BuiltinConcepts.UNKNOWN_CONCEPT, body="fake_concept")
assert isinstance(ret, Concept)
assert ret.key == str(BuiltinConcepts.UNKNOWN_CONCEPT)
assert ret.body == "fake_concept"
def test_i_can_instantiate_a_concept():
sheerka = get_sheerka()
concept = get_default_concept()
sheerka.create_new_concept(get_context(sheerka), concept)
new = sheerka.new(concept.key, a=10, b="value")
assert sheerka.isinstance(new, concept)
for prop in Concept.props_to_serialize:
assert getattr(new, prop) == getattr(concept, prop)
assert new.props["a"].value == 10
assert new.props["b"].value == "value"
def test_instances_are_different_when_asking_for_new():
sheerka = get_sheerka()
concept = get_default_concept()
sheerka.create_new_concept(get_context(sheerka), concept)
new1 = sheerka.new(concept.key, a=10, b="value")
new2 = sheerka.new(concept.key, a=10, b="value")
assert new1 == new2
assert id(new1) != id(new2)
def test_i_get_the_same_instance_when_is_unique_is_true():
sheerka = get_sheerka()
concept = get_unique_concept()
sheerka.create_new_concept(get_context(sheerka), concept)
new1 = sheerka.new(concept.key, a=10, b="value")
new2 = sheerka.new(concept.key, a=10, b="value")
assert new1 == new2
assert id(new1) == id(new2)
def test_i_cannot_instantiate_an_unknown_concept():
sheerka = get_sheerka()
new = sheerka.new("fake_concept")
assert sheerka.isinstance(new, BuiltinConcepts.UNKNOWN_CONCEPT)
assert new.body == "fake_concept"
def test_i_cannot_instantiate_when_properties_are_not_recognized():
sheerka = get_sheerka()
concept = get_default_concept()
sheerka.create_new_concept(get_context(sheerka), concept)
new = sheerka.new(concept.key, a=10, c="value")
assert sheerka.isinstance(new, BuiltinConcepts.UNKNOWN_PROPERTY)
assert new.property_name == "c"
assert sheerka.isinstance(new.concept, concept)
def test_i_can_use_expect_one_when_empty():
sheerka = get_sheerka()
res = sheerka.expect_one(get_context(sheerka), [])
assert not res.status
assert sheerka.isinstance(res.value, BuiltinConcepts.IS_EMPTY)
def test_i_can_use_expect_one_when_too_many_success():
sheerka = get_sheerka()
items = [
ReturnValueConcept("who", True, None),
ReturnValueConcept("who", True, None),
]
res = sheerka.expect_one(get_context(sheerka), items)
assert not res.status
assert sheerka.isinstance(res.value, BuiltinConcepts.TOO_MANY_SUCCESS)
assert res.value.obj == items
def test_i_can_use_expect_when_only_errors_1():
sheerka = get_sheerka()
items = [
ReturnValueConcept("who", False, None),
]
res = sheerka.expect_one(get_context(sheerka), items)
assert not res.status
assert sheerka.isinstance(res.value, BuiltinConcepts.TOO_MANY_ERRORS)
assert res.value.obj == items
def test_i_can_use_expect_when_only_errors_2():
sheerka = get_sheerka()
items = [
ReturnValueConcept("who", False, None),
ReturnValueConcept("who", False, None),
]
res = sheerka.expect_one(get_context(sheerka), items)
assert not res.status
assert sheerka.isinstance(res.value, BuiltinConcepts.TOO_MANY_ERRORS)
assert res.value.obj == items
def test_i_can_use_expect_one_when_one_success_1():
sheerka = get_sheerka()
items = [
ReturnValueConcept("who", True, None),
]
res = sheerka.expect_one(get_context(sheerka), items)
assert res.status
assert res == items[0]
def test_i_can_use_expect_one_when_one_success_2():
sheerka = get_sheerka()
items = [
ReturnValueConcept("who", False, None),
ReturnValueConcept("who", True, None),
ReturnValueConcept("who", False, None),
]
res = sheerka.expect_one(get_context(sheerka), items)
assert res.status
assert res == items[1]
def test_i_can_use_expect_one_when_not_a_list_true():
sheerka = get_sheerka()
res = sheerka.expect_one(get_context(sheerka), ReturnValueConcept("who", True, None))
assert res.status
assert res == ReturnValueConcept("who", True, None)
def test_i_can_use_expect_one_when_not_a_list_false():
sheerka = get_sheerka()
res = sheerka.expect_one(get_context(sheerka), ReturnValueConcept("who", False, None))
assert not res.status
assert sheerka.isinstance(res.value, BuiltinConcepts.TOO_MANY_ERRORS)
assert res.value.obj == [ReturnValueConcept("who", False, None)]
@pytest.mark.parametrize("text, expected", [
("1 + 1", 2),
("sheerka.test()", 'I have access to Sheerka !')
])
def test_i_can_eval_simple_python_expressions(text, expected):
sheerka = get_sheerka()
res = sheerka.eval(text)
assert len(res) == 1
assert res[0].status
assert res[0].value == expected
def test_i_can_eval_simple_concept():
sheerka = get_sheerka()
concept = Concept(name="one", body="1").init_key()
sheerka.add_in_cache(concept)
text = "one"
res = sheerka.eval(text)
assert len(res) == 1
assert res[0].status
assert res[0].value == 1
def test_i_can_eval_def_concept_request():
text = """
def concept a + b
where isinstance(a, int) and isinstance(b, int)
pre isinstance(a, int) and isinstance(b, int)
post isinstance(res, int)
as:
def func(x,y):
return x+y
func(a,b)
"""
Test the new() functionnality
make sure that some Concept are singleton (ex Sheerka, True, False)
otherwise, make sure that new() returns a **new** instance
expected = get_default_concept()
expected.id = "1001"
expected.desc = None
expected.init_key()
sheerka = get_sheerka()
res = sheerka.eval(text)
assert len(res) == 1
assert res[0].status
assert sheerka.isinstance(res[0].value, BuiltinConcepts.NEW_CONCEPT)
concept_saved = res[0].value.body
for prop in Concept.props_to_serialize:
assert getattr(concept_saved, prop) == getattr(expected, prop)
assert concept_saved.key in sheerka.concepts_cache
assert path.exists(sheerka.sdp.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_saved.get_digest()))
def test_i_can_eval_def_concept_part_when_one_part_is_a_ref_of_another_concept():
"""
In this test, we test that the properties of 'concept a xx b' (which are 'a' and 'b')
are correctly detected, because of the concept 'a plus b' in its body
:return:
"""
pass
sheerka = get_sheerka()
# concept 'a plus b' is known
concept_a_plus_b = Concept(name="a plus b").set_prop("a").set_prop("b").init_key()
sheerka.add_in_cache(concept_a_plus_b)
res = sheerka.eval("def concept a xx b as a plus b")
expected = Concept(name="a xx b", body="a plus b").set_prop("a").set_prop("b").init_key()
expected.id = "1001"
assert len(res) == 1
assert res[0].status
assert sheerka.isinstance(res[0].value, BuiltinConcepts.NEW_CONCEPT)
concept_saved = res[0].value.body
for prop in Concept.props_to_serialize:
assert getattr(concept_saved, prop) == getattr(expected, prop)
assert concept_saved.key in sheerka.concepts_cache
assert path.exists(sheerka.sdp.get_obj_path(SheerkaDataProvider.ObjectsFolder, concept_saved.get_digest()))
def test_i_cannot_eval_the_same_def_concept_twice():
text = """
def concept a + b
where isinstance(a, int) and isinstance(b, int)
pre isinstance(a, int) and isinstance(b, int)
post isinstance(res, int)
as:
def func(x,y):
return x+y
func(a,b)
"""
sheerka = get_sheerka()
sheerka.eval(text)
res = sheerka.eval(text)
assert len(res) == 1
assert not res[0].status
assert sheerka.isinstance(res[0].value, BuiltinConcepts.CONCEPT_ALREADY_DEFINED)
def get_sheerka():
sheerka = Sheerka()
sheerka.initialize(root_folder)
return sheerka
return sheerka
def get_context(sheerka):
return ExecutionContext("test", "xxx", sheerka)
def get_default_concept():
concept = Concept(
name="a + b",
where="isinstance(a, int) and isinstance(b, int)",
pre="isinstance(a, int) and isinstance(b, int)",
post="isinstance(res, int)",
body="def func(x,y):\n return x+y\nfunc(a,b)",
desc="specific description")
concept.set_prop("a", "value1")
concept.set_prop("b", "value2")
return concept
def get_unique_concept():
return Concept(name="unique", is_unique=True)
+68 -7
View File
@@ -122,7 +122,7 @@ class ObjWithDigestNoKey:
self.b == obj.b
def __repr__(self):
return f"ObjNoKey({self.a}, {self.b})"
return f"ObjWithDigestNoKey({self.a}, {self.b})"
def get_digest(self):
return str(self.a) + str(self.b)
@@ -142,7 +142,7 @@ class ObjWithDigestWithKey:
self.b == obj.b
def __repr__(self):
return f"ObjNoKey({self.a}, {self.b})"
return f"ObjWithDigestWithKey({self.a}, {self.b})"
def get_key(self):
return self.a
@@ -466,7 +466,7 @@ def test_i_cannot_add_the_same_digest_twice_in_the_same_entry():
assert error.value.obj.get_digest() == ObjWithDigestNoKey("a", "b").get_digest()
assert error.value.key == "entry"
assert error.value.args[0] == "duplicate key"
assert error.value.args[0] == "Duplicate object."
def test_i_cannot_add_the_same_digest_twice_in_the_same_entry2():
@@ -483,7 +483,7 @@ def test_i_cannot_add_the_same_digest_twice_in_the_same_entry2():
assert error.value.obj.get_digest() == ObjWithDigestNoKey("a", "b").get_digest()
assert error.value.key == "entry"
assert error.value.args[0] == "duplicate key"
assert error.value.args[0] == "Duplicate object."
def test_i_cannot_add_the_same_digest_twice_in_the_same_entry3():
@@ -499,7 +499,7 @@ def test_i_cannot_add_the_same_digest_twice_in_the_same_entry3():
assert error.value.obj.get_digest() == ObjWithDigestWithKey("a", "b").get_digest()
assert error.value.key == "entry.a"
assert error.value.args[0] == "duplicate key"
assert error.value.args[0] == "Duplicate object."
def test_i_cannot_add_the_same_digest_twice_in_the_same_entry4():
@@ -516,7 +516,7 @@ def test_i_cannot_add_the_same_digest_twice_in_the_same_entry4():
assert error.value.obj.get_digest() == ObjWithDigestWithKey("a", "b").get_digest()
assert error.value.key == "entry.a"
assert error.value.args[0] == "duplicate key"
assert error.value.args[0] == "Duplicate object."
def test_i_can_get_and_set_key():
@@ -1198,7 +1198,7 @@ def test_i_can_remove_from_cache():
assert not sdp.in_cache(category, key)
def test_i_can_test_than_an_entry_exits():
def test_i_can_test_than_an_entry_exists():
sdp = SheerkaDataProvider(".sheerka")
assert not sdp.exists("entry")
@@ -1206,6 +1206,67 @@ def test_i_can_test_than_an_entry_exits():
assert sdp.exists("entry")
def test_i_can_test_if_a_key_exists():
sdp = SheerkaDataProvider(".sheerka")
obj = ObjWithDigestWithKey("key", "value")
assert not sdp.exists("entry")
assert not sdp.exists("entry", obj.get_key())
sdp.add(evt_digest, "entry", obj)
assert not sdp.exists("entry", "wrong_key")
assert sdp.exists("entry", obj.get_key())
def test_i_can_test_that_the_object_exists():
sdp = SheerkaDataProvider(".sheerka")
obj = ObjWithDigestWithKey("key", "value")
assert not sdp.exists("entry")
assert not sdp.exists("entry", obj.get_key())
assert not sdp.exists("entry", obj.get_key(), obj.get_digest())
# test for a single item under the key
sdp.add(evt_digest, "entry", obj)
assert not sdp.exists("entry", obj.get_key(), "wrong_digest")
assert sdp.exists("entry", obj.get_key(), obj.get_digest())
# test for a list item under the key
sdp.add(evt_digest, "entry2", ObjWithDigestWithKey("key", "value2"))
assert not sdp.exists("entry2", obj.get_key(), obj.get_digest())
sdp.add(evt_digest, "entry2", ObjWithDigestWithKey("key", "value3"))
assert not sdp.exists("entry2", obj.get_key(), obj.get_digest())
sdp.add(evt_digest, "entry2", obj)
assert sdp.exists("entry2", obj.get_key(), obj.get_digest())
def test_i_can_test_than_the_object_exists_when_using_references():
sdp = SheerkaDataProvider(".sheerka")
sdp.serializer.register(PickleSerializer(lambda o: isinstance(o, ObjWithDigestWithKey)))
obj = ObjWithDigestWithKey("key", "value")
assert not sdp.exists("entry")
assert not sdp.exists("entry", obj.get_key())
assert not sdp.exists("entry", obj.get_key(), obj.get_digest())
# test for a single item under the key
sdp.add(evt_digest, "entry", obj, use_ref=True)
assert not sdp.exists("entry", obj.get_key(), "wrong_digest")
assert sdp.exists("entry", obj.get_key(), obj.get_digest())
# test for a list item under the key
sdp.add(evt_digest, "entry2", ObjWithDigestWithKey("key", "value2"), use_ref=True)
assert not sdp.exists("entry2", obj.get_key(), obj.get_digest())
sdp.add(evt_digest, "entry2", ObjWithDigestWithKey("key", "value3"), use_ref=True)
assert not sdp.exists("entry2", obj.get_key(), obj.get_digest())
sdp.add(evt_digest, "entry2", obj, use_ref=True)
assert sdp.exists("entry2", obj.get_key(), obj.get_digest())
def test_i_can_save_and_load_object_ref_with_history():
sdp = SheerkaDataProvider(".sheerka")
obj = ObjDumpJson("my_key", "value1")
+119
View File
@@ -0,0 +1,119 @@
import pytest
from core.tokenizer import Tokenizer, Token, TokenKind, LexerError, Keywords
def test_i_can_tokenize():
source = "+*-/{}[]() ,;:.?\n\n\r\r\r\nidentifier_0\t \t10.15 10 'string\n' \"another string\"=|&"
tokens = list(Tokenizer(source))
assert tokens[0] == Token(TokenKind.PLUS, "+", 0, 1, 1)
assert tokens[1] == Token(TokenKind.STAR, "*", 1, 1, 2)
assert tokens[2] == Token(TokenKind.MINUS, "-", 2, 1, 3)
assert tokens[3] == Token(TokenKind.SLASH, "/", 3, 1, 4)
assert tokens[4] == Token(TokenKind.LBRACE, "{", 4, 1, 5)
assert tokens[5] == Token(TokenKind.RBRACE, "}", 5, 1, 6)
assert tokens[6] == Token(TokenKind.LBRACKET, "[", 6, 1, 7)
assert tokens[7] == Token(TokenKind.RBRACKET, "]", 7, 1, 8)
assert tokens[8] == Token(TokenKind.LPAR, "(", 8, 1, 9)
assert tokens[9] == Token(TokenKind.RPAR, ")", 9, 1, 10)
assert tokens[10] == Token(TokenKind.WHITESPACE, " ", 10, 1, 11)
assert tokens[11] == Token(TokenKind.COMMA, ",", 14, 1, 15)
assert tokens[12] == Token(TokenKind.SEMICOLON, ";", 15, 1, 16)
assert tokens[13] == Token(TokenKind.COLON, ":", 16, 1, 17)
assert tokens[14] == Token(TokenKind.DOT, ".", 17, 1, 18)
assert tokens[15] == Token(TokenKind.QMARK, "?", 18, 1, 19)
assert tokens[16] == Token(TokenKind.NEWLINE, "\n", 19, 1, 20)
assert tokens[17] == Token(TokenKind.NEWLINE, "\n\r", 20, 2, 1)
assert tokens[18] == Token(TokenKind.NEWLINE, "\r", 22, 3, 1)
assert tokens[19] == Token(TokenKind.NEWLINE, "\r\n", 23, 4, 1)
assert tokens[20] == Token(TokenKind.IDENTIFIER, "identifier_0", 25, 5, 1)
assert tokens[21] == Token(TokenKind.WHITESPACE, "\t \t", 37, 5, 13)
assert tokens[22] == Token(TokenKind.NUMBER, "10.15", 41, 5, 17)
assert tokens[23] == Token(TokenKind.WHITESPACE, " ", 46, 5, 22)
assert tokens[24] == Token(TokenKind.NUMBER, "10", 47, 5, 23)
assert tokens[25] == Token(TokenKind.WHITESPACE, " ", 49, 5, 25)
assert tokens[26] == Token(TokenKind.STRING, "'string\n'", 50, 5, 26)
assert tokens[27] == Token(TokenKind.WHITESPACE, " ", 59, 6, 1)
assert tokens[28] == Token(TokenKind.STRING, '"another string"', 60, 6, 2)
assert tokens[29] == Token(TokenKind.EQUALS, '=', 76, 6, 18)
assert tokens[30] == Token(TokenKind.VBAR, '|', 77, 6, 19)
assert tokens[31] == Token(TokenKind.AMPER, '&', 78, 6, 20)
@pytest.mark.parametrize("text, expected", [
("_ident", True),
("ident", True),
("ident123", True),
("ident_123", True),
("ident-like-this", True),
("àèùéû", True),
("011254", False),
("0abcd", False),
("-abcd", False)
])
def test_i_can_tokenize_identifiers(text, expected):
tokens = list(Tokenizer(text))
comparison = tokens[0].type == TokenKind.IDENTIFIER
assert comparison == expected
@pytest.mark.parametrize("text, error_text, index, line, column", [
("'string", "'string", 7, 1, 8),
('"string', '"string', 7, 1, 8),
('"a" + "string', '"string', 13, 1, 14),
('"a"\n\n"string', '"string', 12, 3, 8),
])
def test_i_can_detect_unfinished_strings(text, error_text, index, line, column):
with pytest.raises(LexerError) as e:
list(Tokenizer(text))
assert e.value.text == error_text
assert e.value.index == index
assert e.value.line == line
assert e.value.column == column
@pytest.mark.parametrize("text, expected_text, expected_newlines", [
("'foo'", "'foo'", 0),
('"foo"', '"foo"', 0),
("'foo\rbar'", "'foo\rbar'", 1),
("'foo\nbar'", "'foo\nbar'", 1),
("'foo\n\rbar'", "'foo\n\rbar'", 1),
("'foo\r\nbar'", "'foo\r\nbar'", 1),
("'foo\r\rbar'", "'foo\r\rbar'", 2),
("'foo\n\nbar'", "'foo\n\nbar'", 2),
("'foo\r\n\n\rbar'", "'foo\r\n\n\rbar'", 2),
("'\rfoo\rbar\r'", "'\rfoo\rbar\r'", 3),
("'\nfoo\nbar\n'", "'\nfoo\nbar\n'", 3),
("'\n\rfoo\r\n'", "'\n\rfoo\r\n'", 2),
(r"'foo\'bar'", r"'foo\'bar'", 0),
(r'"foo\"bar"', r'"foo\"bar"', 0),
('"foo"bar"', '"foo"', 0),
("'foo'bar'", "'foo'", 0),
])
def test_i_can_parse_strings(text, expected_text, expected_newlines):
lexer = Tokenizer(text)
text_found, nb_of_newlines = lexer.eat_string(0, 1, 1)
assert nb_of_newlines == expected_newlines
assert text_found == expected_text
@pytest.mark.parametrize("text", [
"1", "3.1415", "0.5", "01", "-5", "-5.10"
])
def test_i_can_parse_numbers(text):
tokens = list(Tokenizer(text))
assert tokens[0].type == TokenKind.NUMBER
assert tokens[0].value == text
@pytest.mark.parametrize("text, expected", [
("def", Keywords.DEF),
("concept", Keywords.CONCEPT),
("as", Keywords.AS),
("pre", Keywords.PRE),
("post", Keywords.POST)
])
def test_i_can_recognize_keywords(text, expected):
tokens = list(Tokenizer(text))
assert tokens[0].type == TokenKind.KEYWORD
assert tokens[0].value == expected
+11 -1
View File
@@ -6,7 +6,17 @@ import pytest
(None, "",),
([], ""),
(["hello", "world"], "hello world"),
(["hello world", "my friend"], '"hello world" "my friend"')
# (["hello world", "my friend"], '"hello world" "my friend"')
])
def test_i_can_create_string_from_a_list(lst, as_string):
assert core.utils.sysarg_to_string(lst) == as_string
def test_i_can_get_classes():
classes = list(core.utils.get_classes("core.builtin_concepts"))
error_concept = core.utils.get_class("core.builtin_concepts.ErrorConcept")
return_value_concept = core.utils.get_class("core.builtin_concepts.ReturnValueConcept")
assert len(classes) > 2
assert error_concept in classes
assert return_value_concept in classes