Tokenizer exceptions are not catched

This commit is contained in:
2019-12-31 18:28:04 +01:00
parent 197b0700fa
commit adcbc6bb2e
12 changed files with 131 additions and 39 deletions
+7 -2
View File
@@ -2,7 +2,7 @@ import logging
from core.builtin_concepts import ReturnValueConcept, BuiltinConcepts
from parsers.BaseParser import BaseParser
from core.tokenizer import Tokenizer, Keywords, TokenKind
from core.tokenizer import Tokenizer, Keywords, TokenKind, LexerError
from core.concept import VARIABLE_PREFIX
@@ -27,7 +27,12 @@ class ExactConceptParser(BaseParser):
context.log(self.verbose_log, f"Parsing '{text}'", self.name)
res = []
sheerka = context.sheerka
words = self.get_words(text)
try:
words = self.get_words(text)
except LexerError as e:
context.log(self.verbose_log, f"Error found in tokenizer {e}", self.name)
return sheerka.ret(self.name, False, sheerka.new(BuiltinConcepts.ERROR, body=e))
if len(words) > self.MAX_WORDS_SIZE:
context.log(self.verbose_log, f"Max words reached. Stopping.", self.name)
return sheerka.ret(self.name, False, sheerka.new(BuiltinConcepts.CONCEPT_TOO_LONG, body=text))