Working on #21 : Created classes
This commit is contained in:
@@ -3,7 +3,7 @@ from core.ExecutionContext import ContextActions, ExecutionContext
|
||||
from core.ReturnValue import ReturnValue
|
||||
from core.concept import Concept
|
||||
from evaluators.base_evaluator import EvaluatorEvalResult, EvaluatorMatchResult, NotForMe, OneReturnValueEvaluator
|
||||
from parsers.SimpleParserParser import SimpleConceptsParser
|
||||
from parsers.SimpleConceptsParser import SimpleConceptsParser
|
||||
from parsers.state_machine import MetadataToken
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,8 @@ from parsers.tokenizer import Token, TokenKind, Tokenizer
|
||||
|
||||
class SimpleConceptsParser:
|
||||
""""
|
||||
This class to parser concepts with no variable
|
||||
This class is to parse concepts with no parameter
|
||||
ex : def concept I am a new concept
|
||||
It parses a sequence of concepts
|
||||
"""
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
from parsers.state_machine import End, ManageUnrecognized, PrepareReadTokens, ReadConcept, ReadTokens, Start, \
|
||||
StateMachine, StateMachineContext
|
||||
from parsers.tokenizer import Token
|
||||
|
||||
|
||||
class SyaConceptsParser:
|
||||
""""
|
||||
This class is to parse concepts with parameter
|
||||
ex : def concept a plus b as a + b
|
||||
It parses a sequence of concepts
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
tokens_wkf = {
|
||||
Start("start", next_states=["prepare read tokens"]),
|
||||
PrepareReadTokens("prepare read tokens", next_states=["read tokens"]),
|
||||
ReadTokens("read tokens", next_states=["read tokens", "eof", "concepts found"]),
|
||||
ManageUnrecognized("eof", next_states=["end"]),
|
||||
ManageUnrecognized("concepts found", next_states=["#concept_wkf"]),
|
||||
End("end", next_states=None)
|
||||
}
|
||||
|
||||
concept_wkf = {
|
||||
Start("start", next_states=["read concept"]),
|
||||
ReadConcept("read concept", next_states=["#tokens_wkf"]),
|
||||
}
|
||||
|
||||
self.workflows = {
|
||||
"#tokens_wkf": {t.name: t for t in tokens_wkf},
|
||||
"#concept_wkf": {t.name: t for t in concept_wkf},
|
||||
}
|
||||
self.error_sink = []
|
||||
|
||||
@staticmethod
|
||||
def get_metadata_from_first_token(context, token: Token):
|
||||
pass
|
||||
|
||||
def parse(self, context, parser_input):
|
||||
sm = StateMachine(self.workflows)
|
||||
sm_context = StateMachineContext(context, parser_input, self.get_metadata_from_first_token)
|
||||
sm.run("#tokens_wkf", "start", sm_context)
|
||||
pass
|
||||
@@ -61,7 +61,7 @@ class ConceptToRecognize:
|
||||
Holds information about the concept to recognize
|
||||
"""
|
||||
metadata: ConceptMetadata
|
||||
expected_tokens: list
|
||||
expected_tokens: Any
|
||||
resolution_method: Literal["name", "key", "id"] # which attribute was used to resolve the concept
|
||||
|
||||
|
||||
|
||||
+12
-5
@@ -1,3 +1,5 @@
|
||||
from typing import Literal
|
||||
|
||||
from common.global_symbols import NotInit
|
||||
from common.utils import unstr_concept
|
||||
from core.ExecutionContext import ExecutionContext
|
||||
@@ -492,7 +494,12 @@ def _ut(buffer, start=0, end=-1):
|
||||
return UnrecognizedToken(buffer, start, end)
|
||||
|
||||
|
||||
def _mt(concept_id, start=0, end=-1, resolution_method="id", parser="simple"):
|
||||
def _mt(concept_id,
|
||||
start=0,
|
||||
end=-1,
|
||||
resolution_method: Literal["name", "key", "id"] = "id",
|
||||
parser="simple",
|
||||
**kwargs):
|
||||
"""
|
||||
helper to MetadataToken
|
||||
:param concept_id:
|
||||
@@ -505,7 +512,7 @@ def _mt(concept_id, start=0, end=-1, resolution_method="id", parser="simple"):
|
||||
:rtype:
|
||||
"""
|
||||
name, _id = unstr_concept(concept_id)
|
||||
if _id is None:
|
||||
return MetadataToken(get_metadata(id=concept_id), start, end, resolution_method, parser)
|
||||
else:
|
||||
return MetadataToken(get_metadata(id=_id, name=name), start, end, resolution_method, parser)
|
||||
variables = [(k, v) for k, v in kwargs.items()] if kwargs else None
|
||||
metadata = get_metadata(id=concept_id, variables=variables) if _id is None \
|
||||
else get_metadata(id=_id, name=name, variables=variables)
|
||||
return MetadataToken(metadata, start, end, resolution_method, parser)
|
||||
|
||||
@@ -4,7 +4,7 @@ from base import BaseTest
|
||||
from conftest import NewOntology
|
||||
from evaluators.base_evaluator import MultipleChoices
|
||||
from helpers import _mt, _ut, get_concepts, get_from, get_metadata, get_parser_input
|
||||
from parsers.SimpleParserParser import SimpleConceptsParser
|
||||
from parsers.SimpleConceptsParser import SimpleConceptsParser
|
||||
|
||||
|
||||
class TestSimpleConceptsParser(BaseTest):
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
import pytest
|
||||
|
||||
from base import BaseTest
|
||||
from conftest import NewOntology
|
||||
from evaluators.base_evaluator import MultipleChoices
|
||||
from helpers import get_concept, get_concepts, get_parser_input
|
||||
from parsers.SyaConceptsParser import SyaConceptsParser
|
||||
|
||||
|
||||
class TestSyaConceptsParser(BaseTest):
|
||||
|
||||
@pytest.fixture()
|
||||
def parser(self):
|
||||
return SyaConceptsParser()
|
||||
|
||||
def test_i_can_parse_a_simple_case(self, context, parser):
|
||||
with NewOntology("test_i_can_parse_a_simple_case"):
|
||||
get_concepts(context, get_concept("a plus b", variables=["a", "b"]), use_sheerka=True)
|
||||
|
||||
pi = get_parser_input("1 plus 2")
|
||||
res = parser.parse(context, pi)
|
||||
|
||||
expected = []
|
||||
assert res == MultipleChoices([expected])
|
||||
assert not parser.error_sink
|
||||
Reference in New Issue
Block a user