Tokenizer exceptions are not catched
This commit is contained in:
@@ -66,11 +66,11 @@ def test_i_can_tokenize_identifiers(text, expected):
|
||||
('"string', "Missing Trailing quote", '"string', 7, 1, 8),
|
||||
('"a" + "string', "Missing Trailing quote", '"string', 13, 1, 14),
|
||||
('"a"\n\n"string', "Missing Trailing quote", '"string', 12, 3, 8),
|
||||
("c::", "Context name not found", "", 2, 1, 3),
|
||||
("c::", "Concept name not found", "", 2, 1, 3),
|
||||
("c:foo\nbar:", "New line is forbidden in concept name", "foo", 5, 1, 6),
|
||||
("c:foo", "Missing ending colon", "foo", 5, 1, 6)
|
||||
])
|
||||
def test_i_can_detect_unfinished_strings(text, message, error_text, index, line, column):
|
||||
def test_i_can_detect_tokenizer_errors(text, message, error_text, index, line, column):
|
||||
with pytest.raises(LexerError) as e:
|
||||
list(Tokenizer(text))
|
||||
assert e.value.message == message
|
||||
|
||||
Reference in New Issue
Block a user