Tokenizer exceptions are not catched

This commit is contained in:
2019-12-31 18:28:04 +01:00
parent 197b0700fa
commit adcbc6bb2e
12 changed files with 131 additions and 39 deletions
+12 -2
View File
@@ -555,7 +555,12 @@ class ConceptLexerParser(BaseParser):
self.text = text
if isinstance(text, str):
self.tokens = list(Tokenizer(text))
try:
self.tokens = list(Tokenizer(text))
except core.tokenizer.LexerError as e:
self.add_error(self.sheerka.new(BuiltinConcepts.ERROR, body=e), False)
return False
else:
self.tokens = list(text)
self.tokens.append(Token(TokenKind.EOF, "", -1, -1, -1)) # make sure to finish with end of file token
@@ -563,6 +568,7 @@ class ConceptLexerParser(BaseParser):
self.token = None
self.pos = -1
self.next_token()
return True
def get_token(self) -> Token:
return self.token
@@ -724,7 +730,11 @@ class ConceptLexerParser(BaseParser):
context.sheerka.new(BuiltinConcepts.IS_EMPTY)
)
self.reset_parser(context, text)
if not self.reset_parser(context, text):
return self.sheerka.ret(
self.name,
False,
context.sheerka.new(BuiltinConcepts.ERROR, body=self.error_sink))
concepts_found = [[]]
unrecognized_tokens = None