import core.utils import pytest from core.concept import ConceptParts, Concept from core.tokenizer import Token, TokenKind def get_tokens(lst): res = [] for e in lst: if e == " ": res.append(Token(TokenKind.WHITESPACE, " ", 0, 0, 0)) elif e == "\n": res.append(Token(TokenKind.NEWLINE, "\n", 0, 0, 0)) elif e == "": res.append(Token(TokenKind.EOF, "\n", 0, 0, 0)) else: res.append(Token(TokenKind.IDENTIFIER, e, 0, 0, 0)) return res @pytest.mark.parametrize("lst, as_string", [ (None, "",), ([], ""), (["hello", "world"], "hello world"), # (["hello world", "my friend"], '"hello world" "my friend"') ]) def test_i_can_create_string_from_a_list(lst, as_string): assert core.utils.sysarg_to_string(lst) == as_string def test_i_can_get_classes(): classes = list(core.utils.get_classes("core.builtin_concepts")) error_concept = core.utils.get_class("core.builtin_concepts.ErrorConcept") return_value_concept = core.utils.get_class("core.builtin_concepts.ReturnValueConcept") assert len(classes) > 2 assert error_concept in classes assert return_value_concept in classes def test_i_can_get_base_classes(): classes = list(core.utils.get_classes_from_package("parsers")) # example of classes that should be in the result base_parser = core.utils.get_class("parsers.BaseParser.BaseParser") default_parser = core.utils.get_class("parsers.DefaultParser.DefaultParser") exact_concept_parser = core.utils.get_class("parsers.ExactConceptParser.ExactConceptParser") python_parser = core.utils.get_class("parsers.PythonParser.PythonParser") node = core.utils.get_class("parsers.BaseParser.Node") def_concept_node = core.utils.get_class("parsers.DefaultParser.DefConceptNode") python_node = core.utils.get_class("parsers.PythonParser.PythonNode") assert base_parser in classes assert default_parser in classes assert exact_concept_parser in classes assert python_parser in classes assert node in classes assert def_concept_node in classes assert python_node in classes def test_i_can_get_sub_classes(): sub_classes = core.utils.get_sub_classes("parsers", "parsers.BaseParser.BaseParser") # example of classes that should be (or not) in the result base_parser = core.utils.get_class("parsers.BaseParser.BaseParser") default_parser = core.utils.get_class("parsers.DefaultParser.DefaultParser") exact_concept_parser = core.utils.get_class("parsers.ExactConceptParser.ExactConceptParser") python_parser = core.utils.get_class("parsers.PythonParser.PythonParser") bnf_node_parser = core.utils.get_class("parsers.BnfNodeParser.BnfNodeParser") assert base_parser not in sub_classes assert default_parser in sub_classes assert exact_concept_parser in sub_classes assert python_parser in sub_classes assert bnf_node_parser in sub_classes @pytest.mark.parametrize("a,b, expected", [ ([], [], []), ([], ['a'], ['a']), ([[]], ['a'], [['a']]), (['a'], [], ['a']), ([['a']], [], [['a']]), ([['a']], ['b'], [['a', 'b']]), ([['a'], ['b']], ['c'], [['a', 'c'], ['b', 'c']]), ([['a1', 'a2'], ['b1', 'b2', 'b3']], ['c'], [['a1', 'a2', 'c'], ['b1', 'b2', 'b3', 'c']]), ([[]], ['a', 'b'], [['a'], ['b']]), ([['a'], ['b']], ['c', 'd', 'e'], [['a', 'c'], ['b', 'c'], ['a', 'd'], ['b', 'd'], ['a', 'e'], ['b', 'e']]), ]) def test_i_can_product(a, b, expected): res = core.utils.product(a, b) assert res == expected @pytest.mark.parametrize("input_as_list, expected_as_list", [ ([" "], []), ([" ", "one"], ["one"]), (["one", " "], ["one"]), ([" ", "one", " "], ["one"]), (["\n", "one"], ["one"]), (["one", "\n"], ["one"]), (["\n", "one", "\n"], ["one"]), ([" ", "\n", "one"], ["one"]), (["one", " ", "\n"], ["one"]), ([" ", "\n", "one", " ", "\n"], ["one"]), (["\n", " ", "one"], ["one"]), (["one", "\n", " "], ["one"]), (["\n", " ", "one", "\n", " "], ["one"]), ([" ", "\n", " ", "one"], ["one"]), (["one", " ", "\n", " "], ["one"]), ([" ", "\n", " ", "one", " ", "\n", " "], ["one"]), (["\n", " ", "\n", "one"], ["one"]), (["one", "\n", " ", "\n"], ["one"]), (["\n", " ", "\n", "one", "\n", " ", "\n"], ["one"]), ]) def test_i_can_strip(input_as_list, expected_as_list): actual = core.utils.strip_tokens(get_tokens(input_as_list)) expected = get_tokens(expected_as_list) assert actual == expected def test_by_default_eof_is_not_stripped(): actual = core.utils.strip_tokens(get_tokens(["one", "two", " ", "\n", ""])) expected = get_tokens(["one", "two", " ", "\n", ""]) assert actual == expected def test_i_can_strip_eof(): actual = core.utils.strip_tokens(get_tokens(["one", "two", " ", "\n", ""]), True) expected = get_tokens(["one", "two"]) assert actual == expected def test_i_can_escape(): actual = core.utils.escape_char("hello 'world' my friend", "'") assert actual == "hello \\'world\\' my friend" @pytest.mark.parametrize("text, expected_key, expected_id", [ (None, None, None), (10, None, None), ("", None, None), ("xxx", None, None), ("c:", None, None), ("c:key", None, None), ("c:key:", "key", None), ("c:key|id", None, None), ("c:key|id:", "key", "id"), ("c:|id:", None, "id"), ("c:key|:", "key", None), ]) def test_i_can_unstr_concept(text, expected_key, expected_id): k, i = core.utils.unstr_concept(text) assert k == expected_key assert i == expected_id def test_i_can_str_concept(): assert core.utils.str_concept(("key", "id")) == "c:key|id:" assert core.utils.str_concept((None, "id")) == "c:|id:" assert core.utils.str_concept(("key", None)) == "c:key:" assert core.utils.str_concept((None, None)) == "" assert core.utils.str_concept(("key", "id"), skip_key=True) == "c:|id:" concept = Concept("foo").init_key() assert core.utils.str_concept(concept) == "c:foo:" concept.metadata.id = "1001" assert core.utils.str_concept(concept) == "c:foo|1001:" assert core.utils.str_concept(concept, skip_key=True) == "c:|1001:" @pytest.mark.parametrize("text, expected", [ (None, None), (10, None), ("", None), ("xxx", None), ("xxx.", None), ("xxx.yyy", None), ("core.concept.ConceptParts.BODY", ConceptParts.BODY), ]) def test_i_can_decode_enum(text, expected): actual = core.utils.decode_enum(text) assert actual == expected def test_encode_concept_key_id(): assert core.utils.encode_concept(("key", "id")) == "__C__KEY_key__ID_id__C__" assert core.utils.encode_concept((None, "id")) == "__C__KEY_00None00__ID_id__C__" assert core.utils.encode_concept(("key", None)) == "__C__KEY_key__ID_00None00__C__" assert core.utils.encode_concept(("key", "id"), True) == "__C__USE_CONCEPT__KEY_key__ID_id__C__" assert core.utils.encode_concept(("k + y", "id")) == "__C__KEY_k000y__ID_id__C__" concept = Concept("foo").init_key() assert core.utils.encode_concept(concept) == "__C__KEY_foo__ID_00None00__C__" concept.metadata.id = "1001" assert core.utils.encode_concept(concept) == "__C__KEY_foo__ID_1001__C__" def test_decode_concept_key_id(): assert core.utils.decode_concept("__C__KEY_key__ID_id__C__") == ("key", "id", False) assert core.utils.decode_concept("__C__KEY_00None00__ID_id__C__") == (None, "id", False) assert core.utils.decode_concept("__C__KEY_key__ID_00None00__C__") == ("key", None, False) assert core.utils.decode_concept("__C__USE_CONCEPT__KEY_key__ID_id__C__") == ("key", "id", True)