[3.10] bpo-46240: Correct the error for unclosed parentheses when the tokenizer is not finished (GH-30378). (GH-30819)
(cherry picked from commit 70f415fb8b632247e28d87998642317ca7a652ae)
Co-authored-by: Pablo Galindo Salgado <Pablogsal@gmail.com>
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 802dc9a..606e685 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -227,7 +227,7 @@
check('x = "a', 1, 5)
check('lambda x: x = 2', 1, 1)
check('f{a + b + c}', 1, 2)
- check('[file for str(file) in []\n])', 1, 11)
+ check('[file for str(file) in []\n]', 1, 11)
check('a = « hello » « world »', 1, 5)
check('[\nfile\nfor str(file)\nin\n[]\n]', 3, 5)
check('[file for\n str(file) in []]', 2, 2)
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index b5bebb3..7aa93a0 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -1513,6 +1513,9 @@
for paren in "([{":
self._check_error(paren + "1 + 2", f"\\{paren}' was never closed")
+ for paren in "([{":
+ self._check_error(f"a = {paren} 1, 2, 3\nb=3", f"\\{paren}' was never closed")
+
for paren in ")]}":
self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'")
diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-01-03-23-31-25.bpo-46240.8lGjeK.rst b/Misc/NEWS.d/next/Core and Builtins/2022-01-03-23-31-25.bpo-46240.8lGjeK.rst
new file mode 100644
index 0000000..a7702eb
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2022-01-03-23-31-25.bpo-46240.8lGjeK.rst
@@ -0,0 +1,3 @@
+Correct the error message for unclosed parentheses when the tokenizer
+doesn't reach the end of the source when the error is reported. Patch by
+Pablo Galindo
diff --git a/Parser/pegen.c b/Parser/pegen.c
index f9812c0..26143f5 100644
--- a/Parser/pegen.c
+++ b/Parser/pegen.c
@@ -1342,7 +1342,8 @@
if (PyErr_Occurred()) {
// Prioritize tokenizer errors to custom syntax errors raised
// on the second phase only if the errors come from the parser.
- if (p->tok->done == E_DONE && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
+ int is_tok_ok = (p->tok->done == E_DONE || p->tok->done == E_OK);
+ if (is_tok_ok && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
_PyPegen_check_tokenizer_errors(p);
}
return NULL;