Skip to content

Commit e9c50b0

Browse files
committed
Allow 0-prefixed underscore literals
Signed-off-by: Pablo Galindo <[email protected]>
1 parent 2b99bf7 commit e9c50b0

File tree

2 files changed

+33
-1
lines changed

2 files changed

+33
-1
lines changed

Lib/test/test_tokenize.py

+32
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,12 @@ def number_token(s):
284284
# this won't work with compound complex inputs
285285
continue
286286
self.assertEqual(number_token(lit), lit)
287+
# Valid cases with extra underscores in the tokenize module
288+
# See gh-105549 for context
289+
extra_valid_cases = {"0_7", "09_99"}
287290
for lit in INVALID_UNDERSCORE_LITERALS:
291+
if lit in extra_valid_cases:
292+
continue
288293
try:
289294
number_token(lit)
290295
except TokenError:
@@ -1873,6 +1878,33 @@ def test_indentation_semantics_retained(self):
18731878
self.check_roundtrip(code)
18741879

18751880

1881+
class InvalidPythonTests(TestCase):
1882+
def test_number_followed_by_name(self):
1883+
# See issue #gh-105549
1884+
source = "xsin(x)"
1885+
expected_tokens = [
1886+
TokenInfo(type=token.NAME, string='xsin', start=(1, 0), end=(1, 4), line='xsin(x)\n'),
1887+
TokenInfo(type=token.OP, string='(', start=(1, 4), end=(1, 5), line='xsin(x)\n'),
1888+
TokenInfo(type=token.NAME, string='x', start=(1, 5), end=(1, 6), line='xsin(x)\n'),
1889+
TokenInfo(type=token.OP, string=')', start=(1, 6), end=(1, 7), line='xsin(x)\n'),
1890+
TokenInfo(type=token.NEWLINE, string='', start=(1, 7), end=(1, 8), line='xsin(x)\n'),
1891+
TokenInfo(type=token.ENDMARKER, string='', start=(2, 0), end=(2, 0), line='')
1892+
]
1893+
1894+
tokens = list(generate_tokens(StringIO(source).readline))
1895+
self.assertEqual(tokens, expected_tokens)
1896+
1897+
def test_number_starting_with_zero(self):
1898+
source = "01234"
1899+
expected_tokens = [
1900+
TokenInfo(type=token.NUMBER, string='01234', start=(1, 0), end=(1, 5), line='01234\n'),
1901+
TokenInfo(type=token.NEWLINE, string='', start=(1, 5), end=(1, 6), line='01234\n'),
1902+
TokenInfo(type=token.ENDMARKER, string='', start=(2, 0), end=(2, 0), line='')
1903+
]
1904+
1905+
tokens = list(generate_tokens(StringIO(source).readline))
1906+
self.assertEqual(tokens, expected_tokens)
1907+
18761908
class CTokenizeTest(TestCase):
18771909
def check_tokenize(self, s, expected):
18781910
# Format the tokens in s in a table format.

Parser/tokenizer.c

+1-1
Original file line numberDiff line numberDiff line change
@@ -2325,7 +2325,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
23252325
else if (c == 'j' || c == 'J') {
23262326
goto imaginary;
23272327
}
2328-
else if (nonzero) {
2328+
else if (nonzero && !tok->tok_extra_tokens) {
23292329
/* Old-style octal: now disallowed. */
23302330
tok_backup(tok, c);
23312331
return MAKE_TOKEN(syntaxerror_known_range(

0 commit comments

Comments
 (0)