Skip to content

Commit 2f8c22f

Browse files
[3.12] gh-105042: Disable unmatched parens syntax error in python tokenize (GH-105061) (#105120)
gh-105042: Disable unmatched parens syntax error in python tokenize (GH-105061) (cherry picked from commit 70f315c) Co-authored-by: Lysandros Nikolaou <[email protected]>
1 parent 4729100 commit 2f8c22f

File tree

5 files changed

+49
-34
lines changed

5 files changed

+49
-34
lines changed

Lib/test/inspect_fodder.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,3 +113,8 @@ async def asyncf(self):
113113
# after asyncf - line 113
114114
# end of WhichComments - line 114
115115
# after WhichComments - line 115
116+
117+
# Test that getsource works on a line that includes
118+
# a closing parenthesis with the opening paren being in another line
119+
(
120+
); after_closing = lambda: 1

Lib/test/test_inspect.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -557,7 +557,8 @@ def test_getclasses(self):
557557

558558
def test_getfunctions(self):
559559
functions = inspect.getmembers(mod, inspect.isfunction)
560-
self.assertEqual(functions, [('eggs', mod.eggs),
560+
self.assertEqual(functions, [('after_closing', mod.after_closing),
561+
('eggs', mod.eggs),
561562
('lobbest', mod.lobbest),
562563
('spam', mod.spam)])
563564

@@ -641,6 +642,7 @@ def test_getsource(self):
641642
self.assertSourceEqual(git.abuse, 29, 39)
642643
self.assertSourceEqual(mod.StupidGit, 21, 51)
643644
self.assertSourceEqual(mod.lobbest, 75, 76)
645+
self.assertSourceEqual(mod.after_closing, 120, 120)
644646

645647
def test_getsourcefile(self):
646648
self.assertEqual(normcase(inspect.getsourcefile(mod.spam)), modfile)

Lib/test/test_tokenize.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1100,6 +1100,13 @@ def test_newline_after_parenthesized_block_with_comment(self):
11001100
NEWLINE '\\n' (4, 1) (4, 2)
11011101
""")
11021102

1103+
def test_closing_parenthesis_from_different_line(self):
1104+
self.check_tokenize("); x", """\
1105+
OP ')' (1, 0) (1, 1)
1106+
OP ';' (1, 1) (1, 2)
1107+
NAME 'x' (1, 3) (1, 4)
1108+
""")
1109+
11031110
class GenerateTokensTest(TokenizeTest):
11041111
def check_tokenize(self, s, expected):
11051112
# Format the tokens in s in a table format.

Parser/tokenizer.c

Lines changed: 33 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -2496,41 +2496,42 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
24962496
case ')':
24972497
case ']':
24982498
case '}':
2499-
if (!tok->level) {
2500-
if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') {
2501-
return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed"));
2502-
}
2499+
if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') {
2500+
return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed"));
2501+
}
2502+
if (!tok->tok_extra_tokens && !tok->level) {
25032503
return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c));
25042504
}
2505-
tok->level--;
2506-
int opening = tok->parenstack[tok->level];
2507-
if (!((opening == '(' && c == ')') ||
2508-
(opening == '[' && c == ']') ||
2509-
(opening == '{' && c == '}')))
2510-
{
2511-
/* If the opening bracket belongs to an f-string's expression
2512-
part (e.g. f"{)}") and the closing bracket is an arbitrary
2513-
nested expression, then instead of matching a different
2514-
syntactical construct with it; we'll throw an unmatched
2515-
parentheses error. */
2516-
if (INSIDE_FSTRING(tok) && opening == '{') {
2517-
assert(current_tok->curly_bracket_depth >= 0);
2518-
int previous_bracket = current_tok->curly_bracket_depth - 1;
2519-
if (previous_bracket == current_tok->curly_bracket_expr_start_depth) {
2520-
return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c));
2505+
if (tok->level > 0) {
2506+
tok->level--;
2507+
int opening = tok->parenstack[tok->level];
2508+
if (!tok->tok_extra_tokens && !((opening == '(' && c == ')') ||
2509+
(opening == '[' && c == ']') ||
2510+
(opening == '{' && c == '}'))) {
2511+
/* If the opening bracket belongs to an f-string's expression
2512+
part (e.g. f"{)}") and the closing bracket is an arbitrary
2513+
nested expression, then instead of matching a different
2514+
syntactical construct with it; we'll throw an unmatched
2515+
parentheses error. */
2516+
if (INSIDE_FSTRING(tok) && opening == '{') {
2517+
assert(current_tok->curly_bracket_depth >= 0);
2518+
int previous_bracket = current_tok->curly_bracket_depth - 1;
2519+
if (previous_bracket == current_tok->curly_bracket_expr_start_depth) {
2520+
return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c));
2521+
}
2522+
}
2523+
if (tok->parenlinenostack[tok->level] != tok->lineno) {
2524+
return MAKE_TOKEN(syntaxerror(tok,
2525+
"closing parenthesis '%c' does not match "
2526+
"opening parenthesis '%c' on line %d",
2527+
c, opening, tok->parenlinenostack[tok->level]));
2528+
}
2529+
else {
2530+
return MAKE_TOKEN(syntaxerror(tok,
2531+
"closing parenthesis '%c' does not match "
2532+
"opening parenthesis '%c'",
2533+
c, opening));
25212534
}
2522-
}
2523-
if (tok->parenlinenostack[tok->level] != tok->lineno) {
2524-
return MAKE_TOKEN(syntaxerror(tok,
2525-
"closing parenthesis '%c' does not match "
2526-
"opening parenthesis '%c' on line %d",
2527-
c, opening, tok->parenlinenostack[tok->level]));
2528-
}
2529-
else {
2530-
return MAKE_TOKEN(syntaxerror(tok,
2531-
"closing parenthesis '%c' does not match "
2532-
"opening parenthesis '%c'",
2533-
c, opening));
25342535
}
25352536
}
25362537

Python/Python-tokenize.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ _tokenizer_error(struct tok_state *tok)
8282
msg = "invalid token";
8383
break;
8484
case E_EOF:
85-
if (tok->level) {
85+
if (tok->level > 0) {
8686
PyErr_Format(PyExc_SyntaxError,
8787
"parenthesis '%c' was never closed",
8888
tok->parenstack[tok->level-1]);

0 commit comments

Comments
 (0)