diff --git a/src/evaluators/RecognizeSimpleConcept.py b/src/evaluators/RecognizeSimpleConcept.py index fb4d92a..0f68de4 100644 --- a/src/evaluators/RecognizeSimpleConcept.py +++ b/src/evaluators/RecognizeSimpleConcept.py @@ -3,7 +3,7 @@ from core.ExecutionContext import ContextActions, ExecutionContext from core.ReturnValue import ReturnValue from core.concept import Concept from evaluators.base_evaluator import EvaluatorEvalResult, EvaluatorMatchResult, NotForMe, OneReturnValueEvaluator -from parsers.SimpleParserParser import SimpleConceptsParser +from parsers.SimpleConceptsParser import SimpleConceptsParser from parsers.state_machine import MetadataToken diff --git a/src/parsers/SimpleParserParser.py b/src/parsers/SimpleConceptsParser.py similarity index 97% rename from src/parsers/SimpleParserParser.py rename to src/parsers/SimpleConceptsParser.py index 296331b..e123803 100644 --- a/src/parsers/SimpleParserParser.py +++ b/src/parsers/SimpleConceptsParser.py @@ -7,7 +7,8 @@ from parsers.tokenizer import Token, TokenKind, Tokenizer class SimpleConceptsParser: """" - This class to parser concepts with no variable + This class is to parse concepts with no parameter + ex : def concept I am a new concept It parses a sequence of concepts """ diff --git a/src/parsers/SyaConceptsParser.py b/src/parsers/SyaConceptsParser.py new file mode 100644 index 0000000..99b9309 --- /dev/null +++ b/src/parsers/SyaConceptsParser.py @@ -0,0 +1,42 @@ +from parsers.state_machine import End, ManageUnrecognized, PrepareReadTokens, ReadConcept, ReadTokens, Start, \ + StateMachine, StateMachineContext +from parsers.tokenizer import Token + + +class SyaConceptsParser: + """" + This class is to parse concepts with parameter + ex : def concept a plus b as a + b + It parses a sequence of concepts + """ + + def __init__(self): + tokens_wkf = { + Start("start", next_states=["prepare read tokens"]), + PrepareReadTokens("prepare read tokens", next_states=["read tokens"]), + ReadTokens("read tokens", next_states=["read tokens", "eof", "concepts found"]), + ManageUnrecognized("eof", next_states=["end"]), + ManageUnrecognized("concepts found", next_states=["#concept_wkf"]), + End("end", next_states=None) + } + + concept_wkf = { + Start("start", next_states=["read concept"]), + ReadConcept("read concept", next_states=["#tokens_wkf"]), + } + + self.workflows = { + "#tokens_wkf": {t.name: t for t in tokens_wkf}, + "#concept_wkf": {t.name: t for t in concept_wkf}, + } + self.error_sink = [] + + @staticmethod + def get_metadata_from_first_token(context, token: Token): + pass + + def parse(self, context, parser_input): + sm = StateMachine(self.workflows) + sm_context = StateMachineContext(context, parser_input, self.get_metadata_from_first_token) + sm.run("#tokens_wkf", "start", sm_context) + pass diff --git a/src/parsers/state_machine.py b/src/parsers/state_machine.py index fcec99e..b92cd95 100644 --- a/src/parsers/state_machine.py +++ b/src/parsers/state_machine.py @@ -61,7 +61,7 @@ class ConceptToRecognize: Holds information about the concept to recognize """ metadata: ConceptMetadata - expected_tokens: list + expected_tokens: Any resolution_method: Literal["name", "key", "id"] # which attribute was used to resolve the concept diff --git a/tests/helpers.py b/tests/helpers.py index 053b692..32d1f2a 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -1,3 +1,5 @@ +from typing import Literal + from common.global_symbols import NotInit from common.utils import unstr_concept from core.ExecutionContext import ExecutionContext @@ -492,7 +494,12 @@ def _ut(buffer, start=0, end=-1): return UnrecognizedToken(buffer, start, end) -def _mt(concept_id, start=0, end=-1, resolution_method="id", parser="simple"): +def _mt(concept_id, + start=0, + end=-1, + resolution_method: Literal["name", "key", "id"] = "id", + parser="simple", + **kwargs): """ helper to MetadataToken :param concept_id: @@ -505,7 +512,7 @@ def _mt(concept_id, start=0, end=-1, resolution_method="id", parser="simple"): :rtype: """ name, _id = unstr_concept(concept_id) - if _id is None: - return MetadataToken(get_metadata(id=concept_id), start, end, resolution_method, parser) - else: - return MetadataToken(get_metadata(id=_id, name=name), start, end, resolution_method, parser) + variables = [(k, v) for k, v in kwargs.items()] if kwargs else None + metadata = get_metadata(id=concept_id, variables=variables) if _id is None \ + else get_metadata(id=_id, name=name, variables=variables) + return MetadataToken(metadata, start, end, resolution_method, parser) diff --git a/tests/parsers/test_SimpleConceptsParser.py b/tests/parsers/test_SimpleConceptsParser.py index 1a6d82b..b69a428 100644 --- a/tests/parsers/test_SimpleConceptsParser.py +++ b/tests/parsers/test_SimpleConceptsParser.py @@ -4,7 +4,7 @@ from base import BaseTest from conftest import NewOntology from evaluators.base_evaluator import MultipleChoices from helpers import _mt, _ut, get_concepts, get_from, get_metadata, get_parser_input -from parsers.SimpleParserParser import SimpleConceptsParser +from parsers.SimpleConceptsParser import SimpleConceptsParser class TestSimpleConceptsParser(BaseTest): diff --git a/tests/parsers/test_SyaConceptsParser.py b/tests/parsers/test_SyaConceptsParser.py new file mode 100644 index 0000000..262a7a6 --- /dev/null +++ b/tests/parsers/test_SyaConceptsParser.py @@ -0,0 +1,25 @@ +import pytest + +from base import BaseTest +from conftest import NewOntology +from evaluators.base_evaluator import MultipleChoices +from helpers import get_concept, get_concepts, get_parser_input +from parsers.SyaConceptsParser import SyaConceptsParser + + +class TestSyaConceptsParser(BaseTest): + + @pytest.fixture() + def parser(self): + return SyaConceptsParser() + + def test_i_can_parse_a_simple_case(self, context, parser): + with NewOntology("test_i_can_parse_a_simple_case"): + get_concepts(context, get_concept("a plus b", variables=["a", "b"]), use_sheerka=True) + + pi = get_parser_input("1 plus 2") + res = parser.parse(context, pi) + + expected = [] + assert res == MultipleChoices([expected]) + assert not parser.error_sink