Added basic implentation for where
This commit is contained in:
+27
-17
@@ -266,10 +266,10 @@ class Tokenizer:
|
||||
self.column = 1
|
||||
self.line += 1
|
||||
elif c == "c" and self.i + 1 < self.text_len and self.text[self.i + 1] == ":":
|
||||
concept_name = self.eat_concept_name(self.i + 2, self.line, self.column)
|
||||
yield Token(TokenKind.CONCEPT, concept_name, self.i, self.line, self.column)
|
||||
self.i += len(concept_name) + 3
|
||||
self.column += len(concept_name) + 3
|
||||
name, id, length = self.eat_concept(self.i + 2, self.line, self.column + 2)
|
||||
yield Token(TokenKind.CONCEPT, (name, id), self.i, self.line, self.column)
|
||||
self.i += length + 2
|
||||
self.column += length + 2
|
||||
elif c.isalpha() or c == "_":
|
||||
identifier = self.eat_identifier(self.i)
|
||||
token_type = TokenKind.KEYWORD if identifier in self.KEYWORDS else TokenKind.IDENTIFIER
|
||||
@@ -297,31 +297,41 @@ class Tokenizer:
|
||||
|
||||
yield Token(TokenKind.EOF, "", self.i, self.line, self.column)
|
||||
|
||||
def eat_concept_name(self, start, line, column):
|
||||
result = ""
|
||||
def eat_concept(self, start, line, column):
|
||||
key, id, buffer = None, None, ""
|
||||
i = start
|
||||
end_colon_found = False
|
||||
processing_key = True
|
||||
|
||||
while i < self.text_len:
|
||||
c = self.text[i]
|
||||
|
||||
c = self.text[i]
|
||||
if c == "\n":
|
||||
raise LexerError(f"New line is forbidden in concept name", result, i, line, column + 2 + len(result))
|
||||
raise LexerError(f"New line in concept name", self.text[start:i], i, line, column + i - start)
|
||||
|
||||
if c == ":":
|
||||
end_colon_found = True
|
||||
if processing_key:
|
||||
key = buffer if buffer else None
|
||||
else:
|
||||
id = buffer if buffer else None
|
||||
i += 1 # eat the colon
|
||||
break
|
||||
|
||||
result += c
|
||||
if c == "|":
|
||||
key = buffer if buffer else None
|
||||
buffer = ""
|
||||
processing_key = False
|
||||
i += 1
|
||||
continue
|
||||
|
||||
buffer += c
|
||||
i += 1
|
||||
else:
|
||||
raise LexerError(f"Missing ending colon", self.text[start:i], i, line, column + i - start)
|
||||
|
||||
if not end_colon_found:
|
||||
raise LexerError(f"Missing ending colon", result, i, line, column + 2 + len(result))
|
||||
if (key, id) == (None, None):
|
||||
raise LexerError(f"Concept identifiers not found", "", start, line, column)
|
||||
|
||||
if result == "":
|
||||
raise LexerError(f"Concept name not found", result, start, line, column + 2 + len(result))
|
||||
|
||||
return result
|
||||
return key, id, i - start
|
||||
|
||||
def eat_whitespace(self, start):
|
||||
result = self.text[start]
|
||||
|
||||
Reference in New Issue
Block a user