Issue #25388: Fixed tokenizer hang when processing undecodable source code
with a null byte.
diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py
index cfc6389..c166ff1 100644
--- a/Lib/test/test_compile.py
+++ b/Lib/test/test_compile.py
@@ -3,6 +3,9 @@
import sys
import _ast
from test import test_support
+from test import script_helper
+import os
+import tempfile
import textwrap
class TestSpecifics(unittest.TestCase):
@@ -555,6 +558,19 @@
ast.body = [_ast.BoolOp()]
self.assertRaises(TypeError, compile, ast, '<ast>', 'exec')
+ def test_yet_more_evil_still_undecodable(self):
+ # Issue #25388
+ src = b"#\x00\n#\xfd\n"
+ tmpd = tempfile.mkdtemp()
+ try:
+ fn = os.path.join(tmpd, "bad.py")
+ with open(fn, "wb") as fp:
+ fp.write(src)
+ rc, out, err = script_helper.assert_python_failure(fn)
+ finally:
+ test_support.rmtree(tmpd)
+ self.assertIn(b"Non-ASCII", err)
+
class TestStackSize(unittest.TestCase):
# These tests check that the computed stack size for a code object