Skip to content

Commit d1705da

Browse files
authored
Update Lib/test/test_tokenize.py
1 parent 19a58c5 commit d1705da

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

Lib/test/test_tokenize.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1174,7 +1174,7 @@ def readline():
11741174

11751175
# skip the initial encoding token and the end tokens
11761176
tokens = list(_tokenize(readline(), encoding='utf-8'))[:-2]
1177-
expected_tokens = [TokenInfo(3, '"ЉЊЈЁЂ"', (1, 0), (1, 7), '"ЉЊЈЁЂ"\n')]
1177+
expected_tokens = [TokenInfo(3, '"ЉЊЈЁЂ"', (1, 0), (1, 7), '"ЉЊЈЁЂ"')]
11781178
self.assertEqual(tokens, expected_tokens,
11791179
"bytes not decoded with encoding")
11801180

0 commit comments

Comments
 (0)