Tokenizer exceptions are not catched

This commit is contained in:
2019-12-31 18:28:04 +01:00
parent 197b0700fa
commit adcbc6bb2e
12 changed files with 131 additions and 39 deletions
+7 -2
View File
@@ -183,6 +183,7 @@ class DefaultParser(BaseParser):
self._current = next(self.lexer_iter)
except StopIteration:
self._current = None
return
def parse(self, context, text):
@@ -195,8 +196,12 @@ class DefaultParser(BaseParser):
self.log_result(context, text, ret)
return ret
self.reset_parser(context, text)
tree = self.parse_statement()
tree = None
try:
self.reset_parser(context, text)
tree = self.parse_statement()
except core.tokenizer.LexerError as e:
self.add_error(e, False)
# If a error is found it must be sent to error_sink
# tree must contain what was recognized