Check unicode identifier directly instead of converting
it to an 8bit string first.
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index f20f676..fca0061 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -1085,6 +1085,13 @@
         raise TestFailed, "['foo\\0bar'] slots not caught"
     try:
         class C(object):
+            __slots__ = ["foo\u1234bar"]
+    except TypeError:
+        pass
+    else:
+        raise TestFailed, "['foo\\u1234bar'] slots not caught"
+    try:
+        class C(object):
             __slots__ = ["1"]
     except TypeError:
         pass
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index bc5fad1..b826eb4 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -1561,7 +1561,7 @@
 static int
 valid_identifier(PyObject *s)
 {
-	unsigned char *p;
+	Py_UNICODE *p;
 	Py_ssize_t i, n;
 
 	if (!PyUnicode_Check(s)) {
@@ -1570,14 +1570,14 @@
 			     s->ob_type->tp_name);
 		return 0;
 	}
-	p = (unsigned char *) PyUnicode_AsString(s);
-	n = strlen((char*)p)/*XXX PyString_GET_SIZE(s)*/;
+	p = PyUnicode_AS_UNICODE(s);
+	n = PyUnicode_GET_SIZE(s);
 	/* We must reject an empty name.  As a hack, we bump the
 	   length to 1 so that the loop will balk on the trailing \0. */
 	if (n == 0)
 		n = 1;
 	for (i = 0; i < n; i++, p++) {
-		if (!(i == 0 ? isalpha(*p) : isalnum(*p)) && *p != '_') {
+		if (i > 255 || (!(i == 0 ? isalpha(*p) : isalnum(*p)) && *p != '_')) {
 			PyErr_SetString(PyExc_TypeError,
 					"__slots__ must be identifiers");
 			return 0;