Tokenizer exceptions are not catched

This commit is contained in:
2019-12-31 18:28:04 +01:00
parent 197b0700fa
commit adcbc6bb2e
12 changed files with 131 additions and 39 deletions
+20 -2
View File
@@ -5,12 +5,11 @@ from core.builtin_concepts import ParserResultConcept, BuiltinConcepts, ReturnVa
from core.sheerka import Sheerka, ExecutionContext
from parsers.ConceptLexerParser import OrderedChoice, StrMatch, ConceptMatch
from parsers.PythonParser import PythonParser, PythonNode
from core.tokenizer import Keywords, Tokenizer
from core.tokenizer import Keywords, Tokenizer, LexerError
from parsers.DefaultParser import DefaultParser, NameNode, SyntaxErrorNode, CannotHandleErrorNode, IsaConceptNode
from parsers.DefaultParser import UnexpectedTokenErrorNode, DefConceptNode
from parsers.BnfParser import BnfParser
from sdp.sheerkaDataProvider import Event
@@ -321,3 +320,22 @@ def test_i_cannot_parse_invalid_entries(text):
assert not res.status
assert isinstance(res.body, ParserResultConcept)
assert isinstance(res.body.body[0], UnexpectedTokenErrorNode)
@pytest.mark.parametrize("text, error_msg, error_text", [
("'name", "Missing Trailing quote", "'name"),
("foo isa 'name", "Missing Trailing quote", "'name"),
("def concept 'name", "Missing Trailing quote", "'name"),
("def concept name as 'body", "Missing Trailing quote", "'body"),
("def concept name from bnf 'expression", "Missing Trailing quote", "'expression"),
("def concept c::", "Concept name not found", ""),
])
def test_i_cannot_parse_when_tokenizer_fails(text, error_msg, error_text):
parser = DefaultParser()
res = parser.parse(get_context(), text)
assert not res.status
assert isinstance(res.body, ParserResultConcept)
assert isinstance(res.body.body[0], LexerError)
assert res.body.body[0].message == error_msg
assert res.body.body[0].text == error_text