Fixed #9 : I can parse 'def concept'
This commit is contained in:
@@ -5,7 +5,7 @@ from parsers.tokenizer import LexerError, Token, TokenKind, Tokenizer
|
||||
|
||||
def test_i_can_tokenize():
|
||||
source = "+*-/{}[]() ,;:.?\n\n\r\r\r\nidentifier_0\t \t10.15 10 'string\n' \"another string\"=|&<>c:name:"
|
||||
source += "$£€!_identifier°~_^\\`==#__var__10r/regex\nregex/r:xxx#1:**//%that's"
|
||||
source += "$£€!_identifier°~_^\\`==#__var__10r/regex\nregex/r:xxx#1:**//%"
|
||||
tokens = list(Tokenizer(source))
|
||||
assert tokens[0] == Token(TokenKind.PLUS, "+", 0, 1, 1)
|
||||
assert tokens[1] == Token(TokenKind.STAR, "*", 1, 1, 2)
|
||||
@@ -61,11 +61,8 @@ def test_i_can_tokenize():
|
||||
assert tokens[51] == Token(TokenKind.STARSTAR, "**", 143, 7, 15)
|
||||
assert tokens[52] == Token(TokenKind.SLASHSLASH, "//", 145, 7, 17)
|
||||
assert tokens[53] == Token(TokenKind.PERCENT, "%", 147, 7, 19)
|
||||
assert tokens[54] == Token(TokenKind.IDENTIFIER, "that", 148, 7, 20)
|
||||
assert tokens[55] == Token(TokenKind.QUOTE, "'", 152, 7, 24)
|
||||
assert tokens[56] == Token(TokenKind.IDENTIFIER, "s", 153, 7, 25)
|
||||
|
||||
assert tokens[57] == Token(TokenKind.EOF, '', 154, 7, 26)
|
||||
assert tokens[54] == Token(TokenKind.EOF, '', 148, 7, 20)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, expected", [
|
||||
@@ -209,3 +206,13 @@ def test_i_can_parse_regex_token(text, expected):
|
||||
assert tokens[0].str_value == "r" + expected
|
||||
assert tokens[0].repr_value == "r" + expected
|
||||
assert tokens[0].strip_quote == expected[1:-1]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text, parse_quote, expected", [
|
||||
("a='hello'", False, ["a", "=", "'hello'"]),
|
||||
("a='hello'", True, ["a", "=", "'", "hello", "'"]),
|
||||
("a= 'hello'", True, ["a", "=", " ", "'hello'"]),
|
||||
])
|
||||
def test_i_can_choose_to_parse_quote(text, parse_quote, expected):
|
||||
tokens = list(Tokenizer(text, parse_quote=parse_quote, yield_eof=False))
|
||||
assert [t.value for t in tokens] == expected
|
||||
|
||||
Reference in New Issue
Block a user