Issue #23840: tokenize.open() now closes the temporary binary file on error to
fix a resource warning.
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 4a8be3b..9842207 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -646,7 +646,7 @@
                      STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
                      open as tokenize_open, Untokenizer)
 from io import BytesIO
-from unittest import TestCase
+from unittest import TestCase, mock
 import os, sys, glob
 import token
 
@@ -1058,6 +1058,14 @@
             ins = Bunk(lines, path)
             detect_encoding(ins.readline)
 
+    def test_open_error(self):
+        # Issue #23840: open() must close the binary file on error
+        m = BytesIO(b'#coding:xxx')
+        with mock.patch('tokenize._builtin_open', return_value=m):
+            self.assertRaises(SyntaxError, tokenize_open, 'foobar')
+        self.assertTrue(m.closed)
+
+
 
 class TestTokenize(TestCase):
 
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index ed4153c..cf18bf9 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -435,11 +435,15 @@
     detect_encoding().
     """
     buffer = _builtin_open(filename, 'rb')
-    encoding, lines = detect_encoding(buffer.readline)
-    buffer.seek(0)
-    text = TextIOWrapper(buffer, encoding, line_buffering=True)
-    text.mode = 'r'
-    return text
+    try:
+        encoding, lines = detect_encoding(buffer.readline)
+        buffer.seek(0)
+        text = TextIOWrapper(buffer, encoding, line_buffering=True)
+        text.mode = 'r'
+        return text
+    except:
+        buffer.close()
+        raise
 
 
 def tokenize(readline):
diff --git a/Misc/NEWS b/Misc/NEWS
index 74c9114..5d93c29 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -59,6 +59,9 @@
 Library
 -------
 
+- Issue #23840: tokenize.open() now closes the temporary binary file on error
+  to fix a resource warning.
+
 - Issue #24257: Fixed segmentation fault in sqlite3.Row constructor with faked
   cursor type.