Tokenizer exceptions are not catched
This commit is contained in:
@@ -2,7 +2,7 @@ import logging
|
||||
|
||||
from core.builtin_concepts import ReturnValueConcept, BuiltinConcepts
|
||||
from parsers.BaseParser import BaseParser
|
||||
from core.tokenizer import Tokenizer, Keywords, TokenKind
|
||||
from core.tokenizer import Tokenizer, Keywords, TokenKind, LexerError
|
||||
from core.concept import VARIABLE_PREFIX
|
||||
|
||||
|
||||
@@ -27,7 +27,12 @@ class ExactConceptParser(BaseParser):
|
||||
context.log(self.verbose_log, f"Parsing '{text}'", self.name)
|
||||
res = []
|
||||
sheerka = context.sheerka
|
||||
words = self.get_words(text)
|
||||
try:
|
||||
words = self.get_words(text)
|
||||
except LexerError as e:
|
||||
context.log(self.verbose_log, f"Error found in tokenizer {e}", self.name)
|
||||
return sheerka.ret(self.name, False, sheerka.new(BuiltinConcepts.ERROR, body=e))
|
||||
|
||||
if len(words) > self.MAX_WORDS_SIZE:
|
||||
context.log(self.verbose_log, f"Max words reached. Stopping.", self.name)
|
||||
return sheerka.ret(self.name, False, sheerka.new(BuiltinConcepts.CONCEPT_TOO_LONG, body=text))
|
||||
|
||||
Reference in New Issue
Block a user