Working on #21 : Working on SyaConceptsParser.py
This commit is contained in:
@@ -1,8 +1,10 @@
|
||||
import inspect
|
||||
from contextlib import contextmanager
|
||||
|
||||
import pytest
|
||||
|
||||
from helpers import GetNextId
|
||||
from parsers.tokenizer import Token
|
||||
from server.authentication import User
|
||||
|
||||
DEFAULT_ONTOLOGY_NAME = "current_test_"
|
||||
@@ -95,3 +97,18 @@ class NewOntology:
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.sheerka.om.revert_ontology(self.context, self.ontology)
|
||||
return False
|
||||
|
||||
|
||||
def simple_token_compare(a, b):
|
||||
return a.type == b.type and a.value == b.value
|
||||
|
||||
|
||||
@contextmanager
|
||||
def comparable_tokens():
|
||||
eq = Token.__eq__
|
||||
ne = Token.__ne__
|
||||
setattr(Token, "__eq__", simple_token_compare)
|
||||
setattr(Token, "__ne__", lambda a, b: not simple_token_compare(a, b))
|
||||
yield
|
||||
setattr(Token, "__eq__", eq)
|
||||
setattr(Token, "__ne__", ne)
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from conftest import NewOntology
|
||||
from conftest import NewOntology, comparable_tokens
|
||||
from evaluators.base_evaluator import MultipleChoices
|
||||
from helpers import get_concept, get_concepts, get_parser_input
|
||||
from helpers import _mt, get_concept, get_concepts, get_parser_input
|
||||
from parsers.SyaConceptsParser import SyaConceptsParser
|
||||
from parsers.tokenizer import Tokenizer
|
||||
|
||||
|
||||
class TestSyaConceptsParser(BaseTest):
|
||||
@@ -13,13 +14,28 @@ class TestSyaConceptsParser(BaseTest):
|
||||
def parser(self):
|
||||
return SyaConceptsParser()
|
||||
|
||||
@pytest.mark.parametrize("concept_key, expected_list", [
|
||||
["a long token name", [("a long token name", 0)]],
|
||||
["__var__0 __var__1 __var__2", [("", 3)]],
|
||||
["__var__0 __var__1 prefixed", [(" prefixed", 2)]],
|
||||
["suffixed __var__0 __var__1", [("suffixed ", 0), ["", 2]]],
|
||||
["__var__0 __var__1 infixed __var__0 __var__1", [(" infixed ", 2), ["", 2]]],
|
||||
["if __var__0 __var__1 then __var__2 end", [("if ", 0), (" then ", 2), (" end", 1)]]
|
||||
])
|
||||
def test_i_can_initialize_expected_parameters(self, parser, concept_key, expected_list):
|
||||
resolved_expected_list = [(list(Tokenizer(source, yield_eof=False)), nb) for source, nb in expected_list]
|
||||
actual = parser._get_expected_tokens(concept_key)
|
||||
|
||||
with comparable_tokens():
|
||||
assert actual == resolved_expected_list
|
||||
|
||||
def test_i_can_parse_a_simple_case(self, context, parser):
|
||||
with NewOntology("test_i_can_parse_a_simple_case"):
|
||||
with NewOntology(context, "test_i_can_parse_a_simple_case"):
|
||||
get_concepts(context, get_concept("a plus b", variables=["a", "b"]), use_sheerka=True)
|
||||
|
||||
pi = get_parser_input("1 plus 2")
|
||||
res = parser.parse(context, pi)
|
||||
|
||||
expected = []
|
||||
expected = [_mt("1001", a="1 ", b=" 2")]
|
||||
assert res == MultipleChoices([expected])
|
||||
assert not parser.error_sink
|
||||
|
||||
Reference in New Issue
Block a user