Check unicode identifier directly instead of converting
it to an 8bit string first.
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index f20f676..fca0061 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -1085,6 +1085,13 @@
raise TestFailed, "['foo\\0bar'] slots not caught"
try:
class C(object):
+ __slots__ = ["foo\u1234bar"]
+ except TypeError:
+ pass
+ else:
+ raise TestFailed, "['foo\\u1234bar'] slots not caught"
+ try:
+ class C(object):
__slots__ = ["1"]
except TypeError:
pass