362 lines
11 KiB
Python
362 lines
11 KiB
Python
import pytest
|
|
import ast
|
|
|
|
from core.builtin_concepts import ParserResultConcept, BuiltinConcepts, ReturnValueConcept
|
|
from core.sheerka import Sheerka, ExecutionContext
|
|
from parsers.ConceptLexerParser import OrderedChoice, StrMatch, ConceptMatch
|
|
from parsers.PythonParser import PythonParser, PythonNode
|
|
from core.tokenizer import Keywords, Tokenizer
|
|
from parsers.DefaultParser import DefaultParser, NameNode, SyntaxErrorNode
|
|
from parsers.DefaultParser import UnexpectedTokenErrorNode, DefConceptNode
|
|
from parsers.BnfParser import BnfParser
|
|
|
|
|
|
# def nop():
|
|
# return NopNode()
|
|
#
|
|
#
|
|
# def n(number):
|
|
# return NumberNode([], number)
|
|
#
|
|
#
|
|
# def s(string, quote="'"):
|
|
# return StringNode([], string, quote)
|
|
#
|
|
#
|
|
# def v(name):
|
|
# return VariableNode([], name)
|
|
#
|
|
#
|
|
# def t():
|
|
# return TrueNode([])
|
|
#
|
|
#
|
|
# def f():
|
|
# return FalseNode([])
|
|
#
|
|
#
|
|
# def null():
|
|
# return NullNode([])
|
|
#
|
|
#
|
|
# def b(operator, left, right):
|
|
# return BinaryNode([], operator, left, right)
|
|
|
|
#
|
|
# def compare_ast(left, right):
|
|
# left_as_string = ast.dump(left)
|
|
# left_as_string = left_as_string.replace(", ctx=Load()", "")
|
|
# left_as_string = left_as_string.replace(", kind=None", "")
|
|
#
|
|
# right_as_string = right if isinstance(right, str) else ast.dump(right)
|
|
# right_as_string = right_as_string.replace(", ctx=Load()", "")
|
|
# right_as_string = right_as_string.replace(", kind=None", "")
|
|
#
|
|
# return left_as_string == right_as_string
|
|
#
|
|
|
|
def get_concept(name, where=None, pre=None, post=None, body=None, definition=None):
|
|
concept = DefConceptNode([], name=NameNode(list(Tokenizer(name))))
|
|
|
|
if body:
|
|
concept.body = get_concept_part(body)
|
|
if where:
|
|
concept.where = get_concept_part(where)
|
|
if pre:
|
|
concept.pre = get_concept_part(pre)
|
|
if post:
|
|
concept.post = get_concept_part(post)
|
|
if definition:
|
|
concept.definition = ReturnValueConcept(
|
|
"Parsers:RegexParser",
|
|
True,
|
|
definition)
|
|
|
|
return concept
|
|
|
|
|
|
def get_context():
|
|
sheerka = Sheerka(skip_builtins_in_db=True)
|
|
sheerka.initialize("mem://")
|
|
return ExecutionContext("test", "xxx", sheerka)
|
|
|
|
|
|
def get_concept_part(part):
|
|
if isinstance(part, str):
|
|
node = PythonNode(part, ast.parse(part, mode="eval"))
|
|
return ReturnValueConcept(
|
|
who="Parsers:DefaultParser",
|
|
status=True,
|
|
value=ParserResultConcept(
|
|
source=part,
|
|
parser=PythonParser(),
|
|
value=node))
|
|
|
|
if isinstance(part, PythonNode):
|
|
return ReturnValueConcept(
|
|
who="Parsers:DefaultParser",
|
|
status=True,
|
|
value=ParserResultConcept(
|
|
source=part.source,
|
|
parser=PythonParser(),
|
|
value=part))
|
|
|
|
if isinstance(part, ReturnValueConcept):
|
|
return part
|
|
|
|
# @pytest.mark.parametrize("text, expected", [
|
|
# ("1", n(1)),
|
|
# ("+1", n(1)),
|
|
# ("-1", n(-1)),
|
|
# ("'foo'", s("foo")),
|
|
# ("identifier", v("identifier")),
|
|
# ("true", t()),
|
|
# ("false", f()),
|
|
# ("null", null()),
|
|
# ("1 * 2", b(TokenKind.STAR, n(1), n(2))),
|
|
# ("1 * 2/3", b(TokenKind.STAR, n(1), b(TokenKind.SLASH, n(2), n(3)))),
|
|
# ("1 + 2", b(TokenKind.PLUS, n(1), n(2))),
|
|
# ("1 + 2 - 3", b(TokenKind.PLUS, n(1), b(TokenKind.MINUS, n(2), n(3)))),
|
|
# ("1 + 2-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))),
|
|
# ("1 + 2 +-3", b(TokenKind.PLUS, n(1), b(TokenKind.PLUS, n(2), n(-3)))),
|
|
# ("1 + 2 * 3", b(TokenKind.PLUS, n(1), b(TokenKind.STAR, n(2), n(3)))),
|
|
# ("1 * 2 + 3", b(TokenKind.PLUS, b(TokenKind.STAR, n(1), n(2)), n(3))),
|
|
# ("(1 + 2) * 3", b(TokenKind.STAR, b(TokenKind.PLUS, n(1), n(2)), n(3))),
|
|
# ("1 * (2 + 3)", b(TokenKind.STAR, n(1), b(TokenKind.PLUS, n(2), n(3)))),
|
|
# ])
|
|
# def test_i_can_parse_simple_expression(text, expected):
|
|
# parser = DefaultParser(text, None)
|
|
# ast = parser.parse()
|
|
# assert ast.is_same(expected)
|
|
#
|
|
#
|
|
# @pytest.mark.parametrize("text, token_found, expected_tokens", [
|
|
# ("1+", TokenKind.EOF,
|
|
# [TokenKind.NUMBER, TokenKind.STRING, TokenKind.IDENTIFIER, 'true', 'false', 'null', TokenKind.LPAR]),
|
|
# ("(1+1", TokenKind.EOF, [TokenKind.RPAR])
|
|
# ])
|
|
# def test_i_can_detect_unexpected_end_of_code(text, token_found, expected_tokens):
|
|
# parser = DefaultParser(text, None)
|
|
# parser.parse()
|
|
#
|
|
# assert parser.has_error
|
|
# assert parser.error_sink[0].tokens[0].type == token_found
|
|
# assert parser.error_sink[0].expected_tokens == expected_tokens
|
|
|
|
|
|
@pytest.mark.parametrize("text, expected", [
|
|
("def concept hello", get_concept(name="hello")),
|
|
("def concept hello ", get_concept(name="hello")),
|
|
("def concept a + b", get_concept(name="a + b")),
|
|
("def concept a+b", get_concept(name="a + b")),
|
|
("def concept 'a+b'+c", get_concept(name="'a+b' + c")),
|
|
("def concept 'as if'", get_concept(name="'as if'")),
|
|
("def concept 'as' if", get_concept(name="'as if'")),
|
|
("def concept hello as 'hello'", get_concept(name="hello", body="'hello'")),
|
|
("def concept hello as 1", get_concept(name="hello", body="1")),
|
|
("def concept hello as 1 + 1", get_concept(name="hello", body="1 + 1")),
|
|
])
|
|
def test_i_can_parse_def_concept(text, expected):
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
node = res.value.value
|
|
|
|
assert res.status
|
|
assert res.who == parser.name
|
|
assert res.value.source == text
|
|
assert isinstance(res.value, ParserResultConcept)
|
|
assert node == expected
|
|
|
|
|
|
def test_i_can_parse_complex_def_concept_statement():
|
|
text = """def concept a plus b
|
|
where a,b
|
|
pre isinstance(a, int) and isinstance(b, float)
|
|
post isinstance(res, int)
|
|
as res = a + b
|
|
"""
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
return_value = res.value
|
|
expected_concept = get_concept(
|
|
name="a plus b",
|
|
where="a,b",
|
|
pre="isinstance(a, int) and isinstance(b, float)",
|
|
post="isinstance(res, int)",
|
|
body=PythonNode("res = a + b", ast.parse("res = a + b", mode="exec"))
|
|
)
|
|
|
|
assert res.status
|
|
assert isinstance(return_value, ParserResultConcept)
|
|
assert return_value.value == expected_concept
|
|
|
|
|
|
def test_i_can_have_mutilines_declarations():
|
|
text = """
|
|
def concept add one to a as
|
|
def func(x):
|
|
return x+1
|
|
func(a)
|
|
"""
|
|
|
|
expected_concept = get_concept(
|
|
name="add one to a ",
|
|
body=PythonNode(
|
|
"def func(x):\n return x+1\nfunc(a)",
|
|
ast.parse("def func(x):\n return x+1\nfunc(a)", mode="exec"))
|
|
)
|
|
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
return_value = res.value
|
|
|
|
assert res.status
|
|
assert isinstance(return_value, ParserResultConcept)
|
|
assert return_value.value == expected_concept
|
|
|
|
|
|
def test_i_can_use_colon_to_use_indentation():
|
|
text = """
|
|
def concept add one to a as:
|
|
def func(x):
|
|
return x+1
|
|
func(a)
|
|
"""
|
|
|
|
expected_concept = get_concept(
|
|
name="add one to a ",
|
|
body=PythonNode(
|
|
"def func(x):\n return x+1\nfunc(a)",
|
|
ast.parse("def func(x):\n return x+1\nfunc(a)", mode="exec"))
|
|
)
|
|
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
return_value = res.value
|
|
|
|
assert res.status
|
|
assert isinstance(return_value, ParserResultConcept)
|
|
assert return_value.value == expected_concept
|
|
|
|
|
|
def test_indentation_is_mandatory_after_a_colon():
|
|
text = """
|
|
def concept add one to a as:
|
|
def func(x):
|
|
return x+1
|
|
func(a)
|
|
"""
|
|
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
return_value = res.value
|
|
|
|
assert not res.status
|
|
assert isinstance(return_value, ParserResultConcept)
|
|
assert isinstance(return_value.value[0], SyntaxErrorNode)
|
|
assert return_value.value[0].message == "Indentation not found."
|
|
|
|
|
|
def test_indentation_is_not_allowed_if_the_colon_is_missing():
|
|
text = """
|
|
def concept add one to a as
|
|
def func(x):
|
|
return x+1
|
|
func(a)
|
|
"""
|
|
context = get_context()
|
|
sheerka = context.sheerka
|
|
|
|
parser = DefaultParser()
|
|
res = parser.parse(context, text)
|
|
return_value = res.value
|
|
|
|
assert not res.status
|
|
assert isinstance(return_value, ParserResultConcept)
|
|
assert sheerka.isinstance(return_value.value[0], BuiltinConcepts.TOO_MANY_ERRORS)
|
|
|
|
|
|
def test_name_is_mandatory():
|
|
text = "def concept as 'hello'"
|
|
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
return_value = res.value
|
|
|
|
assert not res.status
|
|
assert isinstance(return_value, ParserResultConcept)
|
|
assert isinstance(return_value.value[0], SyntaxErrorNode)
|
|
assert return_value.value[0].message == "Name is mandatory"
|
|
|
|
|
|
def test_concept_keyword_is_mandatory_but_the_concept_is_recognized():
|
|
text = "def hello as a where b pre c post d"
|
|
|
|
expected_concept = get_concept(name="hello", body="a", where="b", pre="c", post="d")
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
return_value = res.value
|
|
|
|
assert not res.status
|
|
assert isinstance(return_value, ParserResultConcept)
|
|
assert isinstance(return_value.value[0], UnexpectedTokenErrorNode)
|
|
assert return_value.value[0].message == "Syntax error."
|
|
assert return_value.value[0].expected_tokens == [Keywords.CONCEPT]
|
|
assert return_value.try_parsed == expected_concept
|
|
|
|
|
|
@pytest.mark.parametrize("text", [
|
|
"def concept hello where 1+",
|
|
"def concept hello pre 1+",
|
|
"def concept hello post 1+",
|
|
"def concept hello as 1+"
|
|
])
|
|
def test_i_can_detect_error_in_declaration(text):
|
|
context = get_context()
|
|
sheerka = context.sheerka
|
|
|
|
parser = DefaultParser()
|
|
res = parser.parse(context, text)
|
|
return_value = res.value
|
|
|
|
assert not res.status
|
|
assert isinstance(return_value, ParserResultConcept)
|
|
assert sheerka.isinstance(return_value.value[0], BuiltinConcepts.TOO_MANY_ERRORS)
|
|
|
|
|
|
def test_new_line_is_not_allowed_in_the_name():
|
|
text = "def concept hello \n my friend as 'hello'"
|
|
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
return_value = res.value
|
|
|
|
assert not res.status
|
|
assert return_value.value == [SyntaxErrorNode([], "Newline are not allowed in name.")]
|
|
|
|
|
|
def test_i_can_parse_def_concept_from_regex():
|
|
text = "def concept name from bnf a_concept | 'a_string' as __definition[0]"
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
node = res.value.value
|
|
definition = OrderedChoice(ConceptMatch("a_concept"), StrMatch("a_string"))
|
|
parser_result = ParserResultConcept(BnfParser(), "a_concept | 'a_string'", definition, definition)
|
|
expected = get_concept(name="name", body="__definition[0]", definition=parser_result)
|
|
|
|
assert res.status
|
|
assert res.who == parser.name
|
|
assert res.value.source == text
|
|
assert isinstance(res.value, ParserResultConcept)
|
|
assert node == expected
|
|
|
|
|
|
def test_i_can_detect_empty_bnf_declaration():
|
|
text = "def concept name from bnf as __definition[0]"
|
|
|
|
parser = DefaultParser()
|
|
res = parser.parse(get_context(), text)
|
|
|
|
assert not res.status
|
|
assert res.value.value[0] == SyntaxErrorNode([], "Empty declaration")
|