(Merge 3.4) Issue #22599: Enhance tokenize.open() to be able to call it during
Python finalization.
Before the module kept a reference to the builtins module, but the module
attributes are cleared during Python finalization. Instead, keep directly a
reference to the open() function.
This enhancement is not perfect, calling tokenize.open() can still fail if
called very late during Python finalization. Usually, the function is called
by the linecache module which is called to display a traceback or emit a
warning.
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index e3b28bd..6bd6fa6 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -6,6 +6,8 @@
import re
from test.support import run_unittest, Error, captured_output
from test.support import TESTFN, unlink, cpython_only
+from test.script_helper import assert_python_ok
+import textwrap
import traceback
@@ -169,6 +171,37 @@
# Issue #18960: coding spec should has no effect
do_test("0\n# coding: GBK\n", "h\xe9 ho", 'utf-8', 5)
+ def test_print_traceback_at_exit(self):
+ # Issue #22599: Ensure that it is possible to use the traceback module
+ # to display an exception at Python exit
+ code = textwrap.dedent("""
+ import sys
+ import traceback
+
+ class PrintExceptionAtExit(object):
+ def __init__(self):
+ try:
+ x = 1 / 0
+ except Exception:
+ self.exc_info = sys.exc_info()
+ # self.exc_info[1] (traceback) contains frames:
+ # explicitly clear the reference to self in the current
+ # frame to break a reference cycle
+ self = None
+
+ def __del__(self):
+ traceback.print_exception(*self.exc_info)
+
+ # Keep a reference in the module namespace to call the destructor
+ # when the module is unloaded
+ obj = PrintExceptionAtExit()
+ """)
+ rc, stdout, stderr = assert_python_ok('-c', code)
+ expected = [b'Traceback (most recent call last):',
+ b' File "<string>", line 8, in __init__',
+ b'ZeroDivisionError: division by zero']
+ self.assertEqual(stderr.splitlines(), expected)
+
class TracebackFormatTests(unittest.TestCase):
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 742abd1..51da484 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -24,7 +24,6 @@
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
'Michael Foord')
-import builtins
from codecs import lookup, BOM_UTF8
import collections
from io import TextIOWrapper
@@ -431,11 +430,13 @@
return default, [first, second]
+_builtin_open = open
+
def open(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
- buffer = builtins.open(filename, 'rb')
+ buffer = _builtin_open(filename, 'rb')
encoding, lines = detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)
@@ -658,7 +659,7 @@
# Tokenize the input
if args.filename:
filename = args.filename
- with builtins.open(filename, 'rb') as f:
+ with _builtin_open(filename, 'rb') as f:
tokens = list(tokenize(f.readline))
else:
filename = "<stdin>"