@@ -33,12 +33,19 @@ def disallows_uncommon_control_characters():
33
33
"\x07 " , "Cannot contain the invalid character '\\ x07'." , (1 , 1 )
34
34
)
35
35
36
- # noinspection PyArgumentEqualDefault
37
36
def accepts_bom_header ():
38
37
token = lex_one ("\uFEFF foo" )
39
38
assert token == Token (TokenKind .NAME , 2 , 5 , 1 , 3 , None , "foo" )
40
39
41
- # noinspection PyArgumentEqualDefault
40
+ def tracks_line_breaks ():
41
+ assert lex_one ("foo" ) == Token (TokenKind .NAME , 0 , 3 , 1 , 1 , None , "foo" )
42
+ assert lex_one ("\n foo" ) == Token (TokenKind .NAME , 1 , 4 , 2 , 1 , None , "foo" )
43
+ assert lex_one ("\r foo" ) == Token (TokenKind .NAME , 1 , 4 , 2 , 1 , None , "foo" )
44
+ assert lex_one ("\r \n foo" ) == Token (TokenKind .NAME , 2 , 5 , 2 , 1 , None , "foo" )
45
+ assert lex_one ("\n \r foo" ) == Token (TokenKind .NAME , 2 , 5 , 3 , 1 , None , "foo" )
46
+ assert lex_one ("\r \r \n \n foo" ) == Token (TokenKind .NAME , 4 , 7 , 4 , 1 , None , "foo" )
47
+ assert lex_one ("\n \n \r \r foo" ) == Token (TokenKind .NAME , 4 , 7 , 5 , 1 , None , "foo" )
48
+
42
49
def records_line_and_column ():
43
50
token = lex_one ("\n \r \n \r foo\n " )
44
51
assert token == Token (TokenKind .NAME , 8 , 11 , 4 , 3 , None , "foo" )
@@ -50,7 +57,6 @@ def can_be_stringified_or_pyutils_inspected():
50
57
assert repr (token ) == "<Token Name 'foo' 1:1>"
51
58
assert inspect (token ) == repr (token )
52
59
53
- # noinspection PyArgumentEqualDefault
54
60
def skips_whitespace_and_comments ():
55
61
token = lex_one ("\n \n foo\n \n \n " )
56
62
assert token == Token (TokenKind .NAME , 6 , 9 , 3 , 5 , None , "foo" )
@@ -114,6 +120,7 @@ def lexes_empty_string():
114
120
115
121
# noinspection PyArgumentEqualDefault
116
122
def lexes_strings ():
123
+ assert lex_one ('""' ) == Token (TokenKind .STRING , 0 , 2 , 1 , 1 , None , "" )
117
124
assert lex_one ('"simple"' ) == Token (
118
125
TokenKind .STRING , 0 , 8 , 1 , 1 , None , "simple"
119
126
)
@@ -135,6 +142,8 @@ def lexes_strings():
135
142
136
143
def lex_reports_useful_string_errors ():
137
144
assert_syntax_error ('"' , "Unterminated string." , (1 , 2 ))
145
+ assert_syntax_error ('"""' , "Unterminated string." , (1 , 4 ))
146
+ assert_syntax_error ('""""' , "Unterminated string." , (1 , 5 ))
138
147
assert_syntax_error ('"no end quote' , "Unterminated string." , (1 , 14 ))
139
148
assert_syntax_error (
140
149
"'single quotes'" ,
@@ -175,6 +184,7 @@ def lex_reports_useful_string_errors():
175
184
176
185
# noinspection PyArgumentEqualDefault
177
186
def lexes_block_strings ():
187
+ assert lex_one ('""""""' ) == Token (TokenKind .BLOCK_STRING , 0 , 6 , 1 , 1 , None , "" )
178
188
assert lex_one ('"""simple"""' ) == Token (
179
189
TokenKind .BLOCK_STRING , 0 , 12 , 1 , 1 , None , "simple"
180
190
)
@@ -276,10 +286,22 @@ def lex_reports_useful_number_errors():
276
286
assert_syntax_error (
277
287
"00" , "Invalid number, unexpected digit after 0: '0'." , (1 , 2 )
278
288
)
289
+ assert_syntax_error (
290
+ "01" , "Invalid number, unexpected digit after 0: '1'." , (1 , 2 )
291
+ )
292
+ assert_syntax_error (
293
+ "01.23" , "Invalid number, unexpected digit after 0: '1'." , (1 , 2 )
294
+ )
279
295
assert_syntax_error ("+1" , "Cannot parse the unexpected character '+'." , (1 , 1 ))
280
296
assert_syntax_error (
281
297
"1." , "Invalid number, expected digit but got: <EOF>." , (1 , 3 )
282
298
)
299
+ assert_syntax_error (
300
+ "1e" , "Invalid number, expected digit but got: <EOF>." , (1 , 3 )
301
+ )
302
+ assert_syntax_error (
303
+ "1E" , "Invalid number, expected digit but got: <EOF>." , (1 , 3 )
304
+ )
283
305
assert_syntax_error (
284
306
"1.e1" , "Invalid number, expected digit but got: 'e'." , (1 , 3 )
285
307
)
@@ -298,6 +320,15 @@ def lex_reports_useful_number_errors():
298
320
assert_syntax_error (
299
321
"1.0eA" , "Invalid number, expected digit but got: 'A'." , (1 , 5 )
300
322
)
323
+ assert_syntax_error (
324
+ "1.2e3e" , "Invalid number, expected digit but got: 'e'." , (1 , 6 )
325
+ )
326
+ assert_syntax_error (
327
+ "1.2e3.4" , "Invalid number, expected digit but got: '.'." , (1 , 6 )
328
+ )
329
+ assert_syntax_error (
330
+ "1.23.4" , "Invalid number, expected digit but got: '.'." , (1 , 5 )
331
+ )
301
332
302
333
# noinspection PyArgumentEqualDefault
303
334
def lexes_punctuation ():
0 commit comments