Added first version of DebugManager. Implemented draft of the rule engine
This commit is contained in:
@@ -4,7 +4,7 @@ from core.tokenizer import Tokenizer, Token, TokenKind, LexerError
|
||||
|
||||
def test_i_can_tokenize():
|
||||
source = "+*-/{}[]() ,;:.?\n\n\r\r\r\nidentifier_0\t \t10.15 10 'string\n' \"another string\"=|&<>c:name:"
|
||||
source += "$£€!_identifier°~_^\\`==#__var__10r/regex\nregex/"
|
||||
source += "$£€!_identifier°~_^\\`==#__var__10r/regex\nregex/r:xxx|1:"
|
||||
tokens = list(Tokenizer(source))
|
||||
assert tokens[0] == Token(TokenKind.PLUS, "+", 0, 1, 1)
|
||||
assert tokens[1] == Token(TokenKind.STAR, "*", 1, 1, 2)
|
||||
@@ -56,12 +56,15 @@ def test_i_can_tokenize():
|
||||
assert tokens[47] == Token(TokenKind.HASH, '#', 111, 6, 54)
|
||||
assert tokens[48] == Token(TokenKind.VAR_DEF, '__var__10', 112, 6, 55)
|
||||
assert tokens[49] == Token(TokenKind.REGEX, '/regex\nregex/', 121, 6, 64)
|
||||
assert tokens[50] == Token(TokenKind.RULE, ("xxx", "1"), 135, 7, 7)
|
||||
|
||||
assert tokens[50] == Token(TokenKind.EOF, '', 135, 7, 7)
|
||||
assert tokens[51] == Token(TokenKind.EOF, '', 143, 7, 15)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("_ident", True),
|
||||
("__ident", True),
|
||||
("___ident", True),
|
||||
("ident", True),
|
||||
("ident123", True),
|
||||
("ident_123", True),
|
||||
@@ -168,3 +171,29 @@ def test_i_can_parse_concept_token(text, expected):
|
||||
|
||||
assert tokens[0].type == TokenKind.CONCEPT
|
||||
assert tokens[0].value == expected
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("r:key:", ("key", None)),
|
||||
("r:key|id:", ("key", "id")),
|
||||
("r:key|:", ("key", None)),
|
||||
("r:|id:", (None, "id")),
|
||||
("r:125:", ("125", None)),
|
||||
])
|
||||
def test_i_can_parse_concept_token(text, expected):
|
||||
tokens = list(Tokenizer(text))
|
||||
|
||||
assert tokens[0].type == TokenKind.RULE
|
||||
assert tokens[0].value == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
("r|regex|", "|regex|"),
|
||||
("r/regex/", "/regex/"),
|
||||
("r'regex'", "'regex'"),
|
||||
('r"regex"', '"regex"'),
|
||||
])
|
||||
def test_i_can_parse_regex_token(text, expected):
|
||||
tokens = list(Tokenizer(text))
|
||||
|
||||
assert tokens[0].type == TokenKind.REGEX
|
||||
assert tokens[0].value == expected
|
||||
|
||||
Reference in New Issue
Block a user