give the threading API PEP 8 names
diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py
index e35ccee..cabf580 100644
--- a/Lib/multiprocessing/dummy/__init__.py
+++ b/Lib/multiprocessing/dummy/__init__.py
@@ -48,24 +48,24 @@
         threading.Thread.start(self)

 

     def get_exitcode(self):

-        if self._start_called and not self.isAlive():

+        if self._start_called and not self.is_alive():

             return 0

         else:

             return None

 

     # XXX

     if sys.version_info < (3, 0):

-        is_alive = threading.Thread.isAlive.im_func

-        get_name = threading.Thread.getName.im_func

-        set_name = threading.Thread.setName.im_func

-        is_daemon = threading.Thread.isDaemon.im_func

-        set_daemon = threading.Thread.setDaemon.im_func

+        is_alive = threading.Thread.is_alive.im_func

+        get_name = threading.Thread.get_name.im_func

+        set_name = threading.Thread.set_name.im_func

+        is_daemon = threading.Thread.is_daemon.im_func

+        set_daemon = threading.Thread.set_daemon.im_func

     else:

-        is_alive = threading.Thread.isAlive

-        get_name = threading.Thread.getName

-        set_name = threading.Thread.setName

-        is_daemon = threading.Thread.isDaemon

-        set_daemon = threading.Thread.setDaemon

+        is_alive = threading.Thread.is_alive

+        get_name = threading.Thread.get_name

+        set_name = threading.Thread.set_name

+        is_daemon = threading.Thread.is_daemon

+        set_daemon = threading.Thread.set_daemon

 

 #

 #

@@ -74,22 +74,22 @@
 class Condition(threading._Condition):

     # XXX

     if sys.version_info < (3, 0):

-        notify_all = threading._Condition.notifyAll.im_func

+        notify_all = threading._Condition.notify_all.im_func

     else:

-        notify_all = threading._Condition.notifyAll

+        notify_all = threading._Condition.notify_all

 

 #

 #

 #

 

 Process = DummyProcess

-current_process = threading.currentThread

+current_process = threading.current_thread

 current_process()._children = weakref.WeakKeyDictionary()

 

 def active_children():

     children = current_process()._children

     for p in list(children):

-        if not p.isAlive():

+        if not p.is_alive():

             children.pop(p, None)

     return list(children)

 

diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py
index 4af2107..908c193 100644
--- a/Lib/multiprocessing/managers.py
+++ b/Lib/multiprocessing/managers.py
@@ -169,7 +169,7 @@
                     except (OSError, IOError):

                         continue

                     t = threading.Thread(target=self.handle_request, args=(c,))

-                    t.setDaemon(True)

+                    t.set_daemon(True)

                     t.start()

             except (KeyboardInterrupt, SystemExit):

                 pass

@@ -216,7 +216,7 @@
         Handle requests from the proxies in a particular process/thread

         '''

         util.debug('starting server thread to service %r',

-                   threading.currentThread().getName())

+                   threading.current_thread().get_name())

 

         recv = conn.recv

         send = conn.send

@@ -266,7 +266,7 @@
 

             except EOFError:

                 util.debug('got EOF -- exiting thread serving %r',

-                           threading.currentThread().getName())

+                           threading.current_thread().get_name())

                 sys.exit(0)

 

             except Exception:

@@ -279,7 +279,7 @@
                     send(('#UNSERIALIZABLE', repr(msg)))

             except Exception, e:

                 util.info('exception in thread serving %r',

-                        threading.currentThread().getName())

+                        threading.current_thread().get_name())

                 util.info(' ... message was %r', msg)

                 util.info(' ... exception was %r', e)

                 conn.close()

@@ -401,7 +401,7 @@
         '''

         Spawn a new thread to serve this connection

         '''

-        threading.currentThread().setName(name)

+        threading.current_thread().set_name(name)

         c.send(('#RETURN', None))

         self.serve_client(c)

 

@@ -715,8 +715,8 @@
     def _connect(self):

         util.debug('making connection to manager')

         name = current_process().get_name()

-        if threading.currentThread().getName() != 'MainThread':

-            name += '|' + threading.currentThread().getName()

+        if threading.current_thread().get_name() != 'MainThread':

+            name += '|' + threading.current_thread().get_name()

         conn = self._Client(self._token.address, authkey=self._authkey)

         dispatch(conn, None, 'accept_connection', (name,))

         self._tls.connection = conn

@@ -729,7 +729,7 @@
             conn = self._tls.connection

         except AttributeError:

             util.debug('thread %r does not own a connection',

-                       threading.currentThread().getName())

+                       threading.current_thread().get_name())

             self._connect()

             conn = self._tls.connection

 

@@ -790,7 +790,7 @@
         # the process owns no more references to objects for this manager

         if not idset and hasattr(tls, 'connection'):

             util.debug('thread %r has no more proxies so closing conn',

-                       threading.currentThread().getName())

+                       threading.current_thread().get_name())

             tls.connection.close()

             del tls.connection

             

@@ -969,13 +969,13 @@
 

 class ConditionProxy(AcquirerProxy):

     # XXX will Condition.notfyAll() name be available in Py3.0?

-    _exposed_ = ('acquire', 'release', 'wait', 'notify', 'notifyAll')

+    _exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')

     def wait(self, timeout=None):

         return self._callmethod('wait', (timeout,))

     def notify(self):

         return self._callmethod('notify')

     def notify_all(self):

-        return self._callmethod('notifyAll')

+        return self._callmethod('notify_all')

 

 class EventProxy(BaseProxy):

     # XXX will Event.isSet name be available in Py3.0?

diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py
index b455893..0255c86 100644
--- a/Lib/multiprocessing/pool.py
+++ b/Lib/multiprocessing/pool.py
@@ -107,7 +107,7 @@
             target=Pool._handle_tasks,

             args=(self._taskqueue, self._quick_put, self._outqueue, self._pool)

             )

-        self._task_handler.setDaemon(True)

+        self._task_handler.set_daemon(True)

         self._task_handler._state = RUN

         self._task_handler.start()

 

@@ -115,7 +115,7 @@
             target=Pool._handle_results,

             args=(self._outqueue, self._quick_get, self._cache)

             )

-        self._result_handler.setDaemon(True)

+        self._result_handler.set_daemon(True)

         self._result_handler._state = RUN

         self._result_handler.start()

 

@@ -213,7 +213,7 @@
 

     @staticmethod

     def _handle_tasks(taskqueue, put, outqueue, pool):

-        thread = threading.currentThread()

+        thread = threading.current_thread()

 

         for taskseq, set_length in iter(taskqueue.get, None):

             i = -1

@@ -252,7 +252,7 @@
 

     @staticmethod

     def _handle_results(outqueue, get, cache):

-        thread = threading.currentThread()

+        thread = threading.current_thread()

 

         while 1:

             try:

@@ -346,7 +346,7 @@
         # task_handler may be blocked trying to put items on inqueue

         debug('removing tasks from inqueue until task handler finished')

         inqueue._rlock.acquire()

-        while task_handler.isAlive() and inqueue._reader.poll():

+        while task_handler.is_alive() and inqueue._reader.poll():

             inqueue._reader.recv()

             time.sleep(0)

 

@@ -362,7 +362,7 @@
         debug('helping task handler/workers to finish')

         cls._help_stuff_finish(inqueue, task_handler, len(pool))

 

-        assert result_handler.isAlive() or len(cache) == 0

+        assert result_handler.is_alive() or len(cache) == 0

         

         result_handler._state = TERMINATE

         outqueue.put(None)                  # sentinel

@@ -591,6 +591,6 @@
         try:

             inqueue.queue.clear()

             inqueue.queue.extend([None] * size)

-            inqueue.not_empty.notifyAll()

+            inqueue.not_empty.notify_all()

         finally:

             inqueue.not_empty.release()

diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py
index 07d5aa9..78cb362 100644
--- a/Lib/multiprocessing/queues.py
+++ b/Lib/multiprocessing/queues.py
@@ -155,7 +155,7 @@
                   self._wlock, self._writer.close),

             name='QueueFeederThread'

             )

-        self._thread.setDaemon(True)

+        self._thread.set_daemon(True)

 

         debug('doing self._thread.start()')

         self._thread.start()

diff --git a/Lib/multiprocessing/reduction.py b/Lib/multiprocessing/reduction.py
index aa77075..0d6cf4f 100644
--- a/Lib/multiprocessing/reduction.py
+++ b/Lib/multiprocessing/reduction.py
@@ -84,7 +84,7 @@
                 debug('starting listener and thread for sending handles')

                 _listener = Listener(authkey=current_process().get_authkey())

                 t = threading.Thread(target=_serve)

-                t.setDaemon(True)

+                t.set_daemon(True)

                 t.start()

         finally:

             _lock.release()

diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py
index aa09857..d642032 100644
--- a/Lib/multiprocessing/synchronize.py
+++ b/Lib/multiprocessing/synchronize.py
@@ -109,8 +109,8 @@
         try:

             if self._semlock._is_mine():

                 name = current_process().get_name()

-                if threading.currentThread().getName() != 'MainThread':

-                    name += '|' + threading.currentThread().getName()

+                if threading.current_thread().get_name() != 'MainThread':

+                    name += '|' + threading.current_thread().get_name()

             elif self._semlock._get_value() == 1:

                 name = 'None'

             elif self._semlock._count() > 0:

@@ -134,8 +134,8 @@
         try:

             if self._semlock._is_mine():

                 name = current_process().get_name()

-                if threading.currentThread().getName() != 'MainThread':

-                    name += '|' + threading.currentThread().getName()

+                if threading.current_thread().get_name() != 'MainThread':

+                    name += '|' + threading.current_thread().get_name()

                 count = self._semlock._count()

             elif self._semlock._get_value() == 1:

                 name, count = 'None', 0