Tokenizer exceptions are not catched

This commit is contained in:
2019-12-31 18:28:04 +01:00
parent 197b0700fa
commit adcbc6bb2e
12 changed files with 131 additions and 39 deletions
+20
View File
@@ -416,3 +416,23 @@ def test_eval_does_not_break_valid_result():
assert len(res) == 1
assert res[0].status
assert res[0].body == 3
@pytest.mark.parametrize("text", [
"'hello",
'"foo" + "string',
"c::",
"c:foo\nbar:",
"c:foo",
"def concept 'name",
"def concept name from bnf 'name"
])
def test_i_can_manage_tokenizer_error(text):
sheerka = get_sheerka()
sheerka.add_in_cache(Concept("foo"))
res = sheerka.evaluate_user_input(text)
assert len(res) > 1
for r in [r for r in res if r.who.startswith("parsers.")]:
assert not r.status