Skip to content

Commit dbe8857

Browse files
committed
bpo-42864: Improve error messages regarding unclosed parentheses
1 parent e5fe509 commit dbe8857

File tree

6 files changed

+77
-8
lines changed

6 files changed

+77
-8
lines changed

Lib/test/test_codeop.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,6 @@ def test_incomplete(self):
160160
ai("","eval")
161161
ai("\n","eval")
162162
ai("(","eval")
163-
ai("(\n\n\n","eval")
164163
ai("(9+","eval")
165164
ai("9+ \\","eval")
166165
ai("lambda z: \\","eval")

Lib/test/test_grammar.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ def test_eof_error(self):
260260
for s in samples:
261261
with self.assertRaises(SyntaxError) as cm:
262262
compile(s, "<test>", "exec")
263-
self.assertIn("unexpected EOF", str(cm.exception))
263+
self.assertIn("was never closed", str(cm.exception))
264264

265265
var_annot_global: int # a global annotated is necessary for test_var_annot
266266

Lib/test/test_pdb.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1649,10 +1649,10 @@ def test_errors_in_command(self):
16491649

16501650
self.assertEqual(stdout.splitlines()[1:], [
16511651
'-> pass',
1652-
'(Pdb) *** SyntaxError: unexpected EOF while parsing',
1652+
'(Pdb) *** SyntaxError: \'(\' was never closed',
16531653

16541654
'(Pdb) ENTERING RECURSIVE DEBUGGER',
1655-
'*** SyntaxError: unexpected EOF while parsing',
1655+
'*** SyntaxError: \'(\' was never closed',
16561656
'LEAVING RECURSIVE DEBUGGER',
16571657

16581658
'(Pdb) ENTERING RECURSIVE DEBUGGER',

Parser/pegen.c

Lines changed: 69 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -324,7 +324,15 @@ tokenizer_error(Parser *p)
324324
RAISE_SYNTAX_ERROR("EOL while scanning string literal");
325325
return -1;
326326
case E_EOF:
327-
RAISE_SYNTAX_ERROR("unexpected EOF while parsing");
327+
if (p->tok->level) {
328+
RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError,
329+
p->tok->parenlinenostack[p->tok->level-1],
330+
p->tok->parencolstack[p->tok->level-1],
331+
"'%c' was never closed",
332+
p->tok->parenstack[p->tok->level-1]);
333+
} else {
334+
RAISE_SYNTAX_ERROR("unexpected EOF while parsing");
335+
}
328336
return -1;
329337
case E_DEDENT:
330338
RAISE_INDENTATION_ERROR("unindent does not match any outer indentation level");
@@ -1151,6 +1159,53 @@ reset_parser_state(Parser *p)
11511159
p->call_invalid_rules = 1;
11521160
}
11531161

1162+
int
1163+
_PyPegen_check_tokenizer_errors(Parser *p) {
1164+
// Tokenize the whole input to see if there are any tokenization
1165+
// errors such as mistmatching parentheses. These will get priority
1166+
// over generic syntax errors only if the line number of the error is
1167+
// before the one that we had for the generic error.
1168+
1169+
// We don't want to tokenize to the end for interactive input
1170+
if (p->tok->prompt != NULL) {
1171+
return 0;
1172+
}
1173+
1174+
const char *start;
1175+
const char *end;
1176+
int type;
1177+
1178+
Token *current_token = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1];
1179+
Py_ssize_t current_err_line = current_token->lineno;
1180+
1181+
// Save the tokenizer buffers to restore them later in case we found nothing
1182+
struct tok_state saved_tok;
1183+
memcpy(&saved_tok, p->tok, sizeof(struct tok_state));
1184+
1185+
while (1) {
1186+
type = PyTokenizer_Get(p->tok, &start, &end);
1187+
if (type == ERRORTOKEN) {
1188+
if (p->tok->level != 0) {
1189+
int error_lineno = p->tok->parenlinenostack[p->tok->level-1];
1190+
int error_col = p->tok->parencolstack[p->tok->level-1];
1191+
if (current_err_line > error_lineno) {
1192+
RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError,
1193+
error_lineno, error_col,
1194+
"'%c' was never closed",
1195+
p->tok->parenstack[p->tok->level-1]);
1196+
return -1;
1197+
}
1198+
}
1199+
break;
1200+
}
1201+
if (type == ENDMARKER) {
1202+
break;
1203+
}
1204+
}
1205+
memcpy(p->tok, &saved_tok, sizeof(struct tok_state));
1206+
return 0;
1207+
}
1208+
11541209
void *
11551210
_PyPegen_run_parser(Parser *p)
11561211
{
@@ -1164,8 +1219,16 @@ _PyPegen_run_parser(Parser *p)
11641219
if (p->fill == 0) {
11651220
RAISE_SYNTAX_ERROR("error at start before reading any input");
11661221
}
1167-
else if (p->tok->done == E_EOF) {
1168-
RAISE_SYNTAX_ERROR("unexpected EOF while parsing");
1222+
else if (p->tok->done == E_EOF) {
1223+
if (p->tok->level) {
1224+
RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError,
1225+
p->tok->parenlinenostack[p->tok->level-1],
1226+
p->tok->parencolstack[p->tok->level-1],
1227+
"'%c' was never closed",
1228+
p->tok->parenstack[p->tok->level-1]);
1229+
} else {
1230+
RAISE_SYNTAX_ERROR("unexpected EOF while parsing");
1231+
}
11691232
}
11701233
else {
11711234
if (p->tokens[p->fill-1]->type == INDENT) {
@@ -1175,6 +1238,9 @@ _PyPegen_run_parser(Parser *p)
11751238
RAISE_INDENTATION_ERROR("unexpected unindent");
11761239
}
11771240
else {
1241+
if (_PyPegen_check_tokenizer_errors(p)) {
1242+
return NULL;
1243+
}
11781244
RAISE_SYNTAX_ERROR("invalid syntax");
11791245
}
11801246
}

Parser/tokenizer.c

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ tok_new(void)
6464
tok->tabsize = TABSIZE;
6565
tok->indent = 0;
6666
tok->indstack[0] = 0;
67-
6867
tok->atbol = 1;
6968
tok->pendin = 0;
7069
tok->prompt = tok->nextprompt = NULL;
@@ -1396,6 +1395,9 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
13961395

13971396
/* Check for EOF and errors now */
13981397
if (c == EOF) {
1398+
if (tok->level) {
1399+
return ERRORTOKEN;
1400+
}
13991401
return tok->done == E_EOF ? ENDMARKER : ERRORTOKEN;
14001402
}
14011403

@@ -1818,6 +1820,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
18181820
}
18191821
tok->parenstack[tok->level] = c;
18201822
tok->parenlinenostack[tok->level] = tok->lineno;
1823+
tok->parencolstack[tok->level] = tok->start - tok->line_start;
18211824
tok->level++;
18221825
break;
18231826
case ')':

Parser/tokenizer.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ struct tok_state {
4545
/* Used to allow free continuations inside them */
4646
char parenstack[MAXLEVEL];
4747
int parenlinenostack[MAXLEVEL];
48+
int parencolstack[MAXLEVEL];
4849
PyObject *filename;
4950
/* Stuff for checking on different tab sizes */
5051
int altindstack[MAXINDENT]; /* Stack of alternate indents */

0 commit comments

Comments
 (0)