Issue #23840: tokenize.open() now closes the temporary binary file on error to
fix a resource warning.
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index ed4153c..cf18bf9 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -435,11 +435,15 @@
     detect_encoding().
     """
     buffer = _builtin_open(filename, 'rb')
-    encoding, lines = detect_encoding(buffer.readline)
-    buffer.seek(0)
-    text = TextIOWrapper(buffer, encoding, line_buffering=True)
-    text.mode = 'r'
-    return text
+    try:
+        encoding, lines = detect_encoding(buffer.readline)
+        buffer.seek(0)
+        text = TextIOWrapper(buffer, encoding, line_buffering=True)
+        text.mode = 'r'
+        return text
+    except:
+        buffer.close()
+        raise
 
 
 def tokenize(readline):