Tokenizer exceptions are not catched
This commit is contained in:
@@ -183,6 +183,7 @@ class DefaultParser(BaseParser):
|
||||
self._current = next(self.lexer_iter)
|
||||
except StopIteration:
|
||||
self._current = None
|
||||
|
||||
return
|
||||
|
||||
def parse(self, context, text):
|
||||
@@ -195,8 +196,12 @@ class DefaultParser(BaseParser):
|
||||
self.log_result(context, text, ret)
|
||||
return ret
|
||||
|
||||
self.reset_parser(context, text)
|
||||
tree = self.parse_statement()
|
||||
tree = None
|
||||
try:
|
||||
self.reset_parser(context, text)
|
||||
tree = self.parse_statement()
|
||||
except core.tokenizer.LexerError as e:
|
||||
self.add_error(e, False)
|
||||
|
||||
# If a error is found it must be sent to error_sink
|
||||
# tree must contain what was recognized
|
||||
|
||||
Reference in New Issue
Block a user