Branch merge
diff --git a/.hgeol b/.hgeol
index 0b0841b..afb1e6b 100644
--- a/.hgeol
+++ b/.hgeol
@@ -27,9 +27,10 @@
 **.zip  = BIN
 
 Lib/email/test/data/msg_26.txt = BIN
-Lib/test/test_email/data/msg_26.txt = BIN
-Lib/test/sndhdrdata/sndhdr.* = BIN
+Lib/test/cjkencodings/* = BIN
 Lib/test/decimaltestdata/*.decTest = BIN
+Lib/test/sndhdrdata/sndhdr.* = BIN
+Lib/test/test_email/data/msg_26.txt = BIN
 
 # All other files (which presumably are human-editable) are "native".
 # This must be the last rule!
diff --git a/.hgtags b/.hgtags
index 6427e41..c3a2c2c 100644
--- a/.hgtags
+++ b/.hgtags
@@ -85,3 +85,5 @@
 acf3e24dd0d0dfd1e20c907d696d3da965a8f56f v3.2rc2
 18c1f52896501c7ee13b038454a39acb45a87979 v3.2rc3
 a222a015e28d8ae9af3899258dc6c15c3d40add0 v3.2
+8ffac2337a3323323d02153ac919fd1483176652 v3.2.1b1
+cfa9364997c7f2e67b9cbb45c3a5fa3bba4e4999 v3.2.1rc1
diff --git a/Doc/c-api/buffer.rst b/Doc/c-api/buffer.rst
index 5a34bc0..d98ece3 100644
--- a/Doc/c-api/buffer.rst
+++ b/Doc/c-api/buffer.rst
@@ -314,7 +314,7 @@
 .. c:function:: void PyBuffer_FillContiguousStrides(int ndim, Py_ssize_t *shape, Py_ssize_t *strides, Py_ssize_t itemsize, char fortran)
 
    Fill the *strides* array with byte-strides of a contiguous (C-style if
-   *fortran* is ``'C'`` or Fortran-style if *fortran* is ``'F'`` array of the
+   *fortran* is ``'C'`` or Fortran-style if *fortran* is ``'F'``) array of the
    given shape with the given number of bytes per element.
 
 
diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst
index 26e0716..41cdd6b 100644
--- a/Doc/c-api/veryhigh.rst
+++ b/Doc/c-api/veryhigh.rst
@@ -27,18 +27,17 @@
 
 .. c:function:: int Py_Main(int argc, wchar_t **argv)
 
-   The main program for the standard interpreter.  This is made
-   available for programs which embed Python.  The *argc* and *argv*
-   parameters should be prepared exactly as those which are passed to
-   a C program's :c:func:`main` function (converted to wchar_t
-   according to the user's locale).  It is important to note that the
-   argument list may be modified (but the contents of the strings
-   pointed to by the argument list are not). The return value will be
-   the integer passed to the :func:`sys.exit` function, ``1`` if the
-   interpreter exits due to an exception, or ``2`` if the parameter
+   The main program for the standard interpreter.  This is made available for
+   programs which embed Python.  The *argc* and *argv* parameters should be
+   prepared exactly as those which are passed to a C program's :c:func:`main`
+   function (converted to wchar_t according to the user's locale).  It is
+   important to note that the argument list may be modified (but the contents of
+   the strings pointed to by the argument list are not). The return value will
+   be ``0`` if the interpreter exits normally (i.e., without an exception),
+   ``1`` if the interpreter exits due to an exception, or ``2`` if the parameter
    list does not represent a valid Python command line.
 
-   Note that if an otherwise unhandled :exc:`SystemError` is raised, this
+   Note that if an otherwise unhandled :exc:`SystemExit` is raised, this
    function will not return ``1``, but exit the process, as long as
    ``Py_InspectFlag`` is not set.
 
@@ -85,7 +84,7 @@
    there was an error, there is no way to get the exception information. For the
    meaning of *flags*, see below.
 
-   Note that if an otherwise unhandled :exc:`SystemError` is raised, this
+   Note that if an otherwise unhandled :exc:`SystemExit` is raised, this
    function will not return ``-1``, but exit the process, as long as
    ``Py_InspectFlag`` is not set.
 
diff --git a/Doc/distutils/introduction.rst b/Doc/distutils/introduction.rst
index b772b01..8dc604d 100644
--- a/Doc/distutils/introduction.rst
+++ b/Doc/distutils/introduction.rst
@@ -187,7 +187,7 @@
 module distribution
    a collection of Python modules distributed together as a single downloadable
    resource and meant to be installed *en masse*.  Examples of some well-known
-   module distributions are Numeric Python, PyXML, PIL (the Python Imaging
+   module distributions are NumPy, SciPy, PIL (the Python Imaging
    Library), or mxBase.  (This would be called a *package*, except that term is
    already taken in the Python context: a single module distribution may contain
    zero, one, or many Python packages.)
diff --git a/Doc/documenting/style.rst b/Doc/documenting/style.rst
index 9baab12..1fdc139 100644
--- a/Doc/documenting/style.rst
+++ b/Doc/documenting/style.rst
@@ -109,6 +109,110 @@
    The name of the operating system developed at AT&T Bell Labs in the early
    1970s.
 
+Affirmative Tone
+----------------
+
+The documentation focuses on affirmatively stating what the language does and
+how to use it effectively.
+
+Except for certain security risks or segfault risks, the docs should avoid
+wording along the lines of "feature x is dangerous" or "experts only".  These
+kinds of value judgments belong in external blogs and wikis, not in the core
+documentation.
+
+Bad example (creating worry in the mind of a reader):
+
+    Warning: failing to explicitly close a file could result in lost data or
+    excessive resource consumption.  Never rely on reference counting to
+    automatically close a file.
+
+Good example (establishing confident knowledge in the effective use of the language):
+
+    A best practice for using files is use a try/finally pair to explicitly
+    close a file after it is used.  Alternatively, using a with-statement can
+    achieve the same effect.  This assures that files are flushed and file
+    descriptor resources are released in a timely manner.
+
+Economy of Expression
+---------------------
+
+More documentation is not necessarily better documentation.  Error on the side
+of being succinct.
+
+It is an unfortunate fact that making documentation longer can be an impediment
+to understanding and can result in even more ways to misread or misinterpret the
+text.  Long descriptions full of corner cases and caveats can create the
+impression that a function is more complex or harder to use than it actually is.
+
+The documentation for :func:`super` is an example of where a good deal of
+information was condensed into a few short paragraphs.  Discussion of
+:func:`super` could have filled a chapter in a book, but it is often easier to
+grasp a terse description than a lengthy narrative.
+
+
+Code Examples
+-------------
+
+Short code examples can be a useful adjunct to understanding.  Readers can often
+grasp a simple example more quickly than they can digest a formal description in
+prose.
+
+People learn faster with concrete, motivating examples that match the context of
+a typical use case.  For instance, the :func:`str.rpartition` method is better
+demonstrated with an example splitting the domain from a URL than it would be
+with an example of removing the last word from a line of Monty Python dialog.
+
+The ellipsis for the :attr:`sys.ps2` secondary interpreter prompt should only be
+used sparingly, where it is necessary to clearly differentiate between input
+lines and output lines.  Besides contributing visual clutter, it makes it
+difficult for readers to cut-and-paste examples so they can experiment with
+variations.
+
+Code Equivalents
+----------------
+
+Giving pure Python code equivalents (or approximate equivalents) can be a useful
+adjunct to a prose description.  A documenter should carefully weigh whether the
+code equivalent adds value.
+
+A good example is the code equivalent for :func:`all`.  The short 4-line code
+equivalent is easily digested; it re-emphasizes the early-out behavior; and it
+clarifies the handling of the corner-case where the iterable is empty.  In
+addition, it serves as a model for people wanting to implement a commonly
+requested alternative where :func:`all` would return the specific object
+evaluating to False whenever the function terminates early.
+
+A more questionable example is the code for :func:`itertools.groupby`.  Its code
+equivalent borders on being too complex to be a quick aid to understanding.
+Despite its complexity, the code equivalent was kept because it serves as a
+model to alternative implementations and because the operation of the "grouper"
+is more easily shown in code than in English prose.
+
+An example of when not to use a code equivalent is for the :func:`oct` function.
+The exact steps in converting a number to octal doesn't add value for a user
+trying to learn what the function does.
+
+Audience
+--------
+
+The tone of the tutorial (and all the docs) needs to be respectful of the
+reader's intelligence.  Don't presume that the readers are stupid.  Lay out the
+relevant information, show motivating use cases, provide glossary links, and do
+our best to connect-the-dots, but don't talk down to them or waste their time.
+
+The tutorial is meant for newcomers, many of whom will be using the tutorial to
+evaluate the language as a whole.  The experience needs to be positive and not
+leave the reader with worries that something bad will happen if they make a
+misstep.  The tutorial serves as guide for intelligent and curious readers,
+saving details for the how-to guides and other sources.
+
+Be careful accepting requests for documentation changes from the rare but vocal
+category of reader who is looking for vindication for one of their programming
+errors ("I made a mistake, therefore the docs must be wrong ...").  Typically,
+the documentation wasn't consulted until after the error was made.  It is
+unfortunate, but typically no documentation edit would have saved the user from
+making false assumptions about the language ("I was surprised by ...").
+
 
 .. _Apple Publications Style Guide: http://developer.apple.com/mac/library/documentation/UserExperience/Conceptual/APStyleGuide/APSG_2009.pdf
 
diff --git a/Doc/howto/sockets.rst b/Doc/howto/sockets.rst
index 3927544..04e9b98 100644
--- a/Doc/howto/sockets.rst
+++ b/Doc/howto/sockets.rst
@@ -43,10 +43,10 @@
 History
 -------
 
-Of the various forms of IPC (*Inter Process Communication*), sockets are by far
-the most popular.  On any given platform, there are likely to be other forms of
-IPC that are faster, but for cross-platform communication, sockets are about the
-only game in town.
+Of the various forms of :abbr:`IPC (Inter Process Communication)`,
+sockets are by far the most popular.  On any given platform, there are
+likely to be other forms of IPC that are faster, but for
+cross-platform communication, sockets are about the only game in town.
 
 They were invented in Berkeley as part of the BSD flavor of Unix. They spread
 like wildfire with the Internet. With good reason --- the combination of sockets
@@ -66,13 +66,14 @@
    # - the normal http port
    s.connect(("www.mcmillan-inc.com", 80))
 
-When the ``connect`` completes, the socket ``s`` can now be used to send in a
-request for the text of this page. The same socket will read the reply, and then
-be destroyed. That's right - destroyed. Client sockets are normally only used
-for one exchange (or a small set of sequential exchanges).
+When the ``connect`` completes, the socket ``s`` can be used to send
+in a request for the text of the page. The same socket will read the
+reply, and then be destroyed. That's right, destroyed. Client sockets
+are normally only used for one exchange (or a small set of sequential
+exchanges).
 
 What happens in the web server is a bit more complex. First, the web server
-creates a "server socket". ::
+creates a "server socket"::
 
    #create an INET, STREAMing socket
    serversocket = socket.socket(
@@ -96,7 +97,7 @@
 queue up as many as 5 connect requests (the normal max) before refusing outside
 connections. If the rest of the code is written properly, that should be plenty.
 
-OK, now we have a "server" socket, listening on port 80. Now we enter the
+Now that we have a "server" socket, listening on port 80, we can enter the
 mainloop of the web server::
 
    while True:
@@ -145,7 +146,7 @@
 
 Now there are two sets of verbs to use for communication. You can use ``send``
 and ``recv``, or you can transform your client socket into a file-like beast and
-use ``read`` and ``write``. The latter is the way Java presents their sockets.
+use ``read`` and ``write``. The latter is the way Java presents its sockets.
 I'm not going to talk about it here, except to warn you that you need to use
 ``flush`` on sockets. These are buffered "files", and a common mistake is to
 ``write`` something, and then ``read`` for a reply. Without a ``flush`` in
@@ -166,11 +167,11 @@
 about that some on the next page.
 
 A protocol like HTTP uses a socket for only one transfer. The client sends a
-request, the reads a reply.  That's it. The socket is discarded. This means that
+request, then reads a reply.  That's it. The socket is discarded. This means that
 a client can detect the end of the reply by receiving 0 bytes.
 
 But if you plan to reuse your socket for further transfers, you need to realize
-that *there is no "EOT" (End of Transfer) on a socket.* I repeat: if a socket
+that *there is no* :abbr:`EOT (End of Transfer)` *on a socket.* I repeat: if a socket
 ``send`` or ``recv`` returns after handling 0 bytes, the connection has been
 broken.  If the connection has *not* been broken, you may wait on a ``recv``
 forever, because the socket will *not* tell you that there's nothing more to
@@ -336,7 +337,7 @@
 
 In C, coding ``select`` is fairly complex. In Python, it's a piece of cake, but
 it's close enough to the C version that if you understand ``select`` in Python,
-you'll have little trouble with it in C. ::
+you'll have little trouble with it in C::
 
    ready_to_read, ready_to_write, in_error = \
                   select.select(
@@ -353,10 +354,9 @@
 thing to do - give it a nice long timeout (say a minute) unless you have good
 reason to do otherwise.
 
-In return, you will get three lists. They have the sockets that are actually
+In return, you will get three lists. They contain the sockets that are actually
 readable, writable and in error. Each of these lists is a subset (possibly
-empty) of the corresponding list you passed in. And if you put a socket in more
-than one input list, it will only be (at most) in one output list.
+empty) of the corresponding list you passed in.
 
 If a socket is in the output readable list, you can be
 as-close-to-certain-as-we-ever-get-in-this-business that a ``recv`` on that
diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst
index 6039a15..f54af32 100644
--- a/Doc/library/collections.rst
+++ b/Doc/library/collections.rst
@@ -971,10 +971,6 @@
    class OrderedCounter(Counter, OrderedDict):
         'Counter that remembers the order elements are first encountered'
 
-        def __init__(self, iterable=None, **kwds):
-            OrderedDict.__init__(self)
-            Counter.__init__(self, iterable, **kwds)
-
         def __repr__(self):
             return '%s(%r)' % (self.__class__.__name__, OrderedDict(self))
 
diff --git a/Doc/library/configparser.rst b/Doc/library/configparser.rst
index 1a88bbd..c84e423 100644
--- a/Doc/library/configparser.rst
+++ b/Doc/library/configparser.rst
@@ -1064,6 +1064,10 @@
       given *section*.  Optional arguments have the same meaning as for the
       :meth:`get` method.
 
+      .. versionchanged:: 3.2
+         Items present in *vars* no longer appear in the result. The previous
+         behaviour mixed actual parser options with variables provided for
+         interpolation.
 
    .. method:: set(section, option, value)
 
diff --git a/Doc/library/faulthandler.rst b/Doc/library/faulthandler.rst
index c0b6625..0c00f8f 100644
--- a/Doc/library/faulthandler.rst
+++ b/Doc/library/faulthandler.rst
@@ -36,21 +36,22 @@
 Dump the traceback
 ------------------
 
-.. function:: dump_traceback(file=sys.stderr, all_threads=False)
+.. function:: dump_traceback(file=sys.stderr, all_threads=True)
 
-   Dump the traceback of the current thread, or of all threads if *all_threads*
-   is ``True``, into *file*.
+   Dump the traceback of all threads, or of the current thread if *all_threads*
+   is ``False``, into *file*.
 
 
 Fault handler state
 -------------------
 
-.. function:: enable(file=sys.stderr, all_threads=False)
+.. function:: enable(file=sys.stderr, all_threads=True)
 
    Enable the fault handler: install handlers for :const:`SIGSEGV`,
    :const:`SIGFPE`, :const:`SIGABRT`, :const:`SIGBUS` and :const:`SIGILL`
-   signals to dump the Python traceback. It dumps the traceback of the current
-   thread, or all threads if *all_threads* is ``True``, into *file*.
+   signals to dump the Python traceback. It dumps the traceback of the all
+   threads, or of the current thread if *all_threads* is ``False``, into
+   *file*.
 
 .. function:: disable()
 
@@ -86,11 +87,11 @@
 Dump the traceback on a user signal
 -----------------------------------
 
-.. function:: register(signum, file=sys.stderr, all_threads=False)
+.. function:: register(signum, file=sys.stderr, all_threads=True)
 
    Register a user signal: install a handler for the *signum* signal to dump
-   the traceback of the current thread, or of all threads if *all_threads* is
-   ``True``, into *file*.
+   the traceback of all threads, or of the current thread if *all_threads* is
+   ``False``, into *file*.
 
    Not available on Windows.
 
@@ -123,7 +124,7 @@
     >>> ctypes.string_at(0)
     Fatal Python error: Segmentation fault
 
-    Traceback (most recent call first):
+    Current thread 0x00007fb899f39700:
       File "/home/python/cpython/Lib/ctypes/__init__.py", line 486 in string_at
       File "<stdin>", line 1 in <module>
     Segmentation fault
diff --git a/Doc/library/ftplib.rst b/Doc/library/ftplib.rst
index 8283a45..f4205f4 100644
--- a/Doc/library/ftplib.rst
+++ b/Doc/library/ftplib.rst
@@ -254,13 +254,12 @@
 
    Retrieve a file or directory listing in ASCII transfer mode.  *cmd* should be
    an appropriate ``RETR`` command (see :meth:`retrbinary`) or a command such as
-   ``LIST``, ``NLST`` or ``MLSD`` (usually just the string ``'LIST'``).
+   ``LIST`` or ``NLST`` (usually just the string ``'LIST'``).
    ``LIST`` retrieves a list of files and information about those files.
-   ``NLST`` retrieves a list of file names.  On some servers, ``MLSD`` retrieves
-   a machine readable list of files and information about those files.  The
-   *callback* function is called for each line with a string argument containing
-   the line with the trailing CRLF stripped.  The default *callback* prints the
-   line to ``sys.stdout``.
+   ``NLST`` retrieves a list of file names.
+   The *callback* function is called for each line with a string argument
+   containing the line with the trailing CRLF stripped.  The default *callback*
+   prints the line to ``sys.stdout``.
 
 
 .. method:: FTP.set_pasv(boolean)
@@ -320,6 +319,20 @@
    in :meth:`transfercmd`.
 
 
+.. method:: FTP.mlsd(path="", facts=[])
+
+   List a directory in a standardized format by using MLSD command
+   (:rfc:`3659`).  If *path* is omitted the current directory is assumed.
+   *facts* is a list of strings representing the type of information desired
+   (e.g. ``["type", "size", "perm"]``).  Return a generator object yielding a
+   tuple of two elements for every file found in path.  First element is the
+   file name, the second one is a dictionary containing facts about the file
+   name.  Content of this dictionary might be limited by the *facts* argument
+   but server is not guaranteed to return all requested facts.
+
+   .. versionadded:: 3.3
+
+
 .. method:: FTP.nlst(argument[, ...])
 
    Return a list of file names as returned by the ``NLST`` command.  The
@@ -327,6 +340,8 @@
    directory).  Multiple arguments can be used to pass non-standard options to
    the ``NLST`` command.
 
+   .. deprecated:: 3.3 use :meth:`mlsd` instead.
+
 
 .. method:: FTP.dir(argument[, ...])
 
@@ -337,6 +352,8 @@
    as a *callback* function as for :meth:`retrlines`; the default prints to
    ``sys.stdout``.  This method returns ``None``.
 
+   .. deprecated:: 3.3 use :meth:`mlsd` instead.
+
 
 .. method:: FTP.rename(fromname, toname)
 
diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
index 3020128..8b7eef5 100644
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -498,11 +498,14 @@
    of the *value* argument, however there is a standard formatting syntax that
    is used by most built-in types: :ref:`formatspec`.
 
-   .. note::
+   The default *format_spec* is an empty string which usually gives the same
+   effect as calling ``str(value)``.
 
-      ``format(value, format_spec)`` merely calls
-      ``value.__format__(format_spec)``.
-
+   A call to ``format(value, format_spec)`` is translated to
+   ``type(value).__format__(format_spec)`` which bypasses the instance
+   dictionary when searching for the value's :meth:`__format__` method.  A
+   :exc:`TypeError` exception is raised if the method is not found or if either
+   the *format_spec* or the return value are not strings.
 
 .. function:: frozenset([iterable])
    :noindex:
diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst
index e3a3a10..b30a661 100644
--- a/Doc/library/http.server.rst
+++ b/Doc/library/http.server.rst
@@ -179,19 +179,29 @@
 
    .. method:: send_response(code, message=None)
 
-      Sends a response header and logs the accepted request. The HTTP response
-      line is sent, followed by *Server* and *Date* headers. The values for
-      these two headers are picked up from the :meth:`version_string` and
-      :meth:`date_time_string` methods, respectively.
+      Adds a response header to the headers buffer and logs the accepted
+      request. The HTTP response line is written to the internal buffer,
+      followed by *Server* and *Date* headers. The values for these two headers
+      are picked up from the :meth:`version_string` and
+      :meth:`date_time_string` methods, respectively. If the server does not
+      intend to send any other headers using the :meth:`send_header` method,
+      then :meth:`send_response` should be followed by a :meth:`end_headers`
+      call.
+
+      .. versionchanged:: 3.3
+         Headers are stored to an internal buffer and :meth:`end_headers`
+         needs to be called explicitly.
+
 
    .. method:: send_header(keyword, value)
 
-      Stores the HTTP header to an internal buffer which will be written to the
-      output stream when :meth:`end_headers` method is invoked.
-      *keyword* should specify the header keyword, with *value*
-      specifying its value.
+      Adds the HTTP header to an internal buffer which will be written to the
+      output stream when either :meth:`end_headers` or :meth:`flush_headers` is
+      invoked. *keyword* should specify the header keyword, with *value*
+      specifying its value. Note that, after the send_header calls are done,
+      :meth:`end_headers` MUST BE called in order to complete the operation.
 
-      .. versionchanged:: 3.2 Storing the headers in an internal buffer
+      .. versionchanged:: 3.2 Headers are stored in an internal buffer.
 
 
    .. method:: send_response_only(code, message=None)
@@ -205,10 +215,19 @@
 
    .. method:: end_headers()
 
-      Write the buffered HTTP headers to the output stream and send a blank
-      line, indicating the end of the HTTP headers in the response.
+      Adds a blank line
+      (indicating the end of the HTTP headers in the response)
+      to the headers buffer and calls :meth:`flush_headers()`.
 
-      .. versionchanged:: 3.2 Writing the buffered headers to the output stream.
+      .. versionchanged:: 3.2
+         The buffered headers are written to the output stream.
+
+   .. method:: flush_headers()
+
+      Finally send the headers to the output stream and flush the internal
+      headers buffer.
+
+      .. versionadded:: 3.3
 
    .. method:: log_request(code='-', size='-')
 
diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst
index 1d92fe5..592e4b0 100644
--- a/Doc/library/imaplib.rst
+++ b/Doc/library/imaplib.rst
@@ -64,14 +64,21 @@
 There's also a subclass for secure connections:
 
 
-.. class:: IMAP4_SSL(host='', port=IMAP4_SSL_PORT, keyfile=None, certfile=None)
+.. class:: IMAP4_SSL(host='', port=IMAP4_SSL_PORT, keyfile=None, certfile=None, ssl_context=None)
 
    This is a subclass derived from :class:`IMAP4` that connects over an SSL
    encrypted socket (to use this class you need a socket module that was compiled
    with SSL support).  If *host* is not specified, ``''`` (the local host) is used.
    If *port* is omitted, the standard IMAP4-over-SSL port (993) is used.  *keyfile*
    and *certfile* are also optional - they can contain a PEM formatted private key
-   and certificate chain file for the SSL connection.
+   and certificate chain file for the SSL connection. *ssl_context* parameter is a
+   :class:`ssl.SSLContext` object which allows bundling SSL configuration
+   options, certificates and private keys into a single (potentially long-lived)
+   structure. Note that the *keyfile*/*certfile* parameters are mutually exclusive with *ssl_context*,
+   a :class:`ValueError` is thrown if *keyfile*/*certfile* is provided along with *ssl_context*.
+
+   .. versionchanged:: 3.3
+      *ssl_context* parameter added.
 
 
 The second subclass allows for connections created by a child process:
diff --git a/Doc/library/math.rst b/Doc/library/math.rst
index 98c5b33..d68cf11 100644
--- a/Doc/library/math.rst
+++ b/Doc/library/math.rst
@@ -184,6 +184,14 @@
    result is calculated in a way which is accurate for *x* near zero.
 
 
+.. function:: log2(x)
+
+   Return the base-2 logarithm of *x*. This is usually more accurate than
+   ``log(x, 2)``.
+
+   .. versionadded:: 3.3
+
+
 .. function:: log10(x)
 
    Return the base-10 logarithm of *x*.  This is usually more accurate
diff --git a/Doc/library/modulefinder.rst b/Doc/library/modulefinder.rst
index d42c6ab..97ace60 100644
--- a/Doc/library/modulefinder.rst
+++ b/Doc/library/modulefinder.rst
@@ -25,8 +25,7 @@
 .. function:: ReplacePackage(oldname, newname)
 
    Allows specifying that the module named *oldname* is in fact the package named
-   *newname*.  The most common usage would be  to handle how the :mod:`_xmlplus`
-   package replaces the :mod:`xml` package.
+   *newname*.
 
 
 .. class:: ModuleFinder(path=None, debug=0, excludes=[], replace_paths=[])
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index eca51dc..6ef6d9d 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -1298,6 +1298,7 @@
           O_NOCTTY
           O_SHLOCK
           O_EXLOCK
+          O_CLOEXEC
 
    These constants are only available on Unix.
 
@@ -1346,7 +1347,26 @@
       Using :func:`access` to check if a user is authorized to e.g. open a file
       before actually doing so using :func:`open` creates a security hole,
       because the user might exploit the short time interval between checking
-      and opening the file to manipulate it.
+      and opening the file to manipulate it. It's preferable to use :term:`EAFP`
+      techniques. For example::
+
+         if os.access("myfile", os.R_OK):
+             with open("myfile") as fp:
+                 return fp.read()
+         return "some default data"
+
+      is better written as::
+
+         try:
+             fp = open("myfile")
+         except IOError as e:
+             if e.errno == errno.EACCESS:
+                 return "some default data"
+             # Not a permission error.
+             raise
+         else:
+             with fp:
+                 return fp.read()
 
    .. note::
 
@@ -2284,6 +2304,8 @@
    will be set to *sig*. The Windows version of :func:`kill` additionally takes
    process handles to be killed.
 
+   See also :func:`signal.pthread_kill`.
+
    .. versionadded:: 3.2
       Windows support.
 
diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst
index 9ab12ee..fc54208 100644
--- a/Doc/library/pprint.rst
+++ b/Doc/library/pprint.rst
@@ -189,37 +189,105 @@
 
 .. _pprint-example:
 
-pprint Example
---------------
+Example
+-------
 
-This example demonstrates several uses of the :func:`pprint` function and its
-parameters.
+To demonstrate several uses of the :func:`pprint` function and its parameters,
+let's fetch information about a package from PyPI::
 
+   >>> import json
    >>> import pprint
-   >>> tup = ('spam', ('eggs', ('lumberjack', ('knights', ('ni', ('dead',
-   ... ('parrot', ('fresh fruit',))))))))
-   >>> stuff = ['a' * 10, tup, ['a' * 30, 'b' * 30], ['c' * 20, 'd' * 20]]
-   >>> pprint.pprint(stuff)
-   ['aaaaaaaaaa',
-    ('spam',
-     ('eggs',
-      ('lumberjack',
-       ('knights', ('ni', ('dead', ('parrot', ('fresh fruit',)))))))),
-    ['aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'],
-    ['cccccccccccccccccccc', 'dddddddddddddddddddd']]
-   >>> pprint.pprint(stuff, depth=3)
-   ['aaaaaaaaaa',
-    ('spam', ('eggs', (...))),
-    ['aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'],
-    ['cccccccccccccccccccc', 'dddddddddddddddddddd']]
-   >>> pprint.pprint(stuff, width=60)
-   ['aaaaaaaaaa',
-    ('spam',
-     ('eggs',
-      ('lumberjack',
-       ('knights',
-        ('ni', ('dead', ('parrot', ('fresh fruit',)))))))),
-    ['aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
-     'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'],
-    ['cccccccccccccccccccc', 'dddddddddddddddddddd']]
+   >>> from urllib.request import urlopen
+   >>> with urlopen('http://pypi.python.org/pypi/configparser/json') as url:
+   ...     http_info = url.info()
+   ...     raw_data = url.read().decode(http_info.get_content_charset())
+   >>> package_data = json.loads(raw_data)
+   >>> result = {'headers': http_info.items(), 'body': package_data}
 
+In its basic form, :func:`pprint` shows the whole object::
+
+   >>> pprint.pprint(result)
+   {'body': {'info': {'_pypi_hidden': False,
+                      '_pypi_ordering': 12,
+                      'classifiers': ['Development Status :: 4 - Beta',
+                                      'Intended Audience :: Developers',
+                                      'License :: OSI Approved :: MIT License',
+                                      'Natural Language :: English',
+                                      'Operating System :: OS Independent',
+                                      'Programming Language :: Python',
+                                      'Programming Language :: Python :: 2',
+                                      'Programming Language :: Python :: 2.6',
+                                      'Programming Language :: Python :: 2.7',
+                                      'Topic :: Software Development :: Libraries',
+                                      'Topic :: Software Development :: Libraries :: Python Modules'],
+                      'download_url': 'UNKNOWN',
+                      'home_page': 'http://docs.python.org/py3k/library/configparser.html',
+                      'keywords': 'configparser ini parsing conf cfg configuration file',
+                      'license': 'MIT',
+                      'name': 'configparser',
+                      'package_url': 'http://pypi.python.org/pypi/configparser',
+                      'platform': 'any',
+                      'release_url': 'http://pypi.python.org/pypi/configparser/3.2.0r3',
+                      'requires_python': None,
+                      'stable_version': None,
+                      'summary': 'This library brings the updated configparser from Python 3.2+ to Python 2.6-2.7.',
+                      'version': '3.2.0r3'},
+           'urls': [{'comment_text': '',
+                     'downloads': 47,
+                     'filename': 'configparser-3.2.0r3.tar.gz',
+                     'has_sig': False,
+                     'md5_digest': '8500fd87c61ac0de328fc996fce69b96',
+                     'packagetype': 'sdist',
+                     'python_version': 'source',
+                     'size': 32281,
+                     'upload_time': '2011-05-10T16:28:50',
+                     'url': 'http://pypi.python.org/packages/source/c/configparser/configparser-3.2.0r3.tar.gz'}]},
+   'headers': [('Date', 'Sat, 14 May 2011 12:48:52 GMT'),
+               ('Server', 'Apache/2.2.16 (Debian)'),
+               ('Content-Disposition', 'inline'),
+               ('Connection', 'close'),
+               ('Transfer-Encoding', 'chunked'),
+               ('Content-Type', 'application/json; charset="UTF-8"')]}
+
+The result can be limited to a certain *depth* (ellipsis is used for deeper
+contents)::
+
+   >>> pprint.pprint(result, depth=3)
+   {'body': {'info': {'_pypi_hidden': False,
+                      '_pypi_ordering': 12,
+                      'classifiers': [...],
+                      'download_url': 'UNKNOWN',
+                      'home_page': 'http://docs.python.org/py3k/library/configparser.html',
+                      'keywords': 'configparser ini parsing conf cfg configuration file',
+                      'license': 'MIT',
+                      'name': 'configparser',
+                      'package_url': 'http://pypi.python.org/pypi/configparser',
+                      'platform': 'any',
+                      'release_url': 'http://pypi.python.org/pypi/configparser/3.2.0r3',
+                      'requires_python': None,
+                      'stable_version': None,
+                      'summary': 'This library brings the updated configparser from Python 3.2+ to Python 2.6-2.7.',
+                      'version': '3.2.0r3'},
+           'urls': [{...}]},
+   'headers': [('Date', 'Sat, 14 May 2011 12:48:52 GMT'),
+               ('Server', 'Apache/2.2.16 (Debian)'),
+               ('Content-Disposition', 'inline'),
+               ('Connection', 'close'),
+               ('Transfer-Encoding', 'chunked'),
+               ('Content-Type', 'application/json; charset="UTF-8"')]}
+
+Additionally, maximum *width* can be suggested. If a long object cannot be
+split, the specified width will be exceeded::
+
+   >>> pprint.pprint(result['headers'], width=30)
+   [('Date',
+     'Sat, 14 May 2011 12:48:52 GMT'),
+    ('Server',
+     'Apache/2.2.16 (Debian)'),
+    ('Content-Disposition',
+     'inline'),
+    ('Connection', 'close'),
+    ('Transfer-Encoding',
+     'chunked'),
+    ('Content-Type',
+     'application/json; charset="UTF-8"')]
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
index b1c3804..606825c 100644
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -1301,56 +1301,70 @@
 to combine those into a single master regular expression and to loop over
 successive matches::
 
-    Token = collections.namedtuple('Token', 'typ value line column')
+    import collections
+    import re
+
+    Token = collections.namedtuple('Token', ['typ', 'value', 'line', 'column'])
 
     def tokenize(s):
-        keywords = {'IF', 'THEN', 'FOR', 'NEXT', 'GOSUB', 'RETURN'}
-        tok_spec = [
-            ('NUMBER', r'\d+(\.\d*)?'), # Integer or decimal number
-            ('ASSIGN', r':='),          # Assignment operator
-            ('END', ';'),               # Statement terminator
-            ('ID', r'[A-Za-z]+'),       # Identifiers
-            ('OP', r'[+*\/\-]'),        # Arithmetic operators
-            ('NEWLINE', r'\n'),         # Line endings
-            ('SKIP', r'[ \t]'),         # Skip over spaces and tabs
+        keywords = {'IF', 'THEN', 'ENDIF', 'FOR', 'NEXT', 'GOSUB', 'RETURN'}
+        token_specification = [
+            ('NUMBER',  r'\d+(\.\d*)?'), # Integer or decimal number
+            ('ASSIGN',  r':='),          # Assignment operator
+            ('END',     r';'),           # Statement terminator
+            ('ID',      r'[A-Za-z]+'),   # Identifiers
+            ('OP',      r'[+*\/\-]'),    # Arithmetic operators
+            ('NEWLINE', r'\n'),          # Line endings
+            ('SKIP',    r'[ \t]'),       # Skip over spaces and tabs
         ]
-        tok_re = '|'.join('(?P<%s>%s)' % pair for pair in tok_spec)
-        gettok = re.compile(tok_re).match
+        tok_regex = '|'.join('(?P<%s>%s)' % pair for pair in token_specification)
+        get_token = re.compile(tok_regex).match
         line = 1
         pos = line_start = 0
-        mo = gettok(s)
+        mo = get_token(s)
         while mo is not None:
             typ = mo.lastgroup
             if typ == 'NEWLINE':
                 line_start = pos
                 line += 1
             elif typ != 'SKIP':
+                val = mo.group(typ)
                 if typ == 'ID' and val in keywords:
                     typ = val
-                yield Token(typ, mo.group(typ), line, mo.start()-line_start)
+                yield Token(typ, val, line, mo.start()-line_start)
             pos = mo.end()
-            mo = gettok(s, pos)
+            mo = get_token(s, pos)
         if pos != len(s):
             raise RuntimeError('Unexpected character %r on line %d' %(s[pos], line))
 
-    >>> statements = '''\
-        total := total + price * quantity;
-        tax := price * 0.05;
+    statements = '''
+        IF quantity THEN
+            total := total + price * quantity;
+            tax := price * 0.05;
+        ENDIF;
     '''
-    >>> for token in tokenize(statements):
-    ...     print(token)
-    ...
-    Token(typ='ID', value='total', line=1, column=8)
-    Token(typ='ASSIGN', value=':=', line=1, column=14)
-    Token(typ='ID', value='total', line=1, column=17)
-    Token(typ='OP', value='+', line=1, column=23)
-    Token(typ='ID', value='price', line=1, column=25)
-    Token(typ='OP', value='*', line=1, column=31)
-    Token(typ='ID', value='quantity', line=1, column=33)
-    Token(typ='END', value=';', line=1, column=41)
-    Token(typ='ID', value='tax', line=2, column=9)
-    Token(typ='ASSIGN', value=':=', line=2, column=13)
-    Token(typ='ID', value='price', line=2, column=16)
-    Token(typ='OP', value='*', line=2, column=22)
-    Token(typ='NUMBER', value='0.05', line=2, column=24)
-    Token(typ='END', value=';', line=2, column=28)
+
+    for token in tokenize(statements):
+        print(token)
+
+The tokenizer produces the following output::
+
+    Token(typ='IF', value='IF', line=2, column=5)
+    Token(typ='ID', value='quantity', line=2, column=8)
+    Token(typ='THEN', value='THEN', line=2, column=17)
+    Token(typ='ID', value='total', line=3, column=9)
+    Token(typ='ASSIGN', value=':=', line=3, column=15)
+    Token(typ='ID', value='total', line=3, column=18)
+    Token(typ='OP', value='+', line=3, column=24)
+    Token(typ='ID', value='price', line=3, column=26)
+    Token(typ='OP', value='*', line=3, column=32)
+    Token(typ='ID', value='quantity', line=3, column=34)
+    Token(typ='END', value=';', line=3, column=42)
+    Token(typ='ID', value='tax', line=4, column=9)
+    Token(typ='ASSIGN', value=':=', line=4, column=13)
+    Token(typ='ID', value='price', line=4, column=16)
+    Token(typ='OP', value='*', line=4, column=22)
+    Token(typ='NUMBER', value='0.05', line=4, column=24)
+    Token(typ='END', value=';', line=4, column=28)
+    Token(typ='ENDIF', value='ENDIF', line=5, column=5)
+    Token(typ='END', value=';', line=5, column=10)
diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst
index f318cfa..68a5d2c 100644
--- a/Doc/library/signal.rst
+++ b/Doc/library/signal.rst
@@ -179,6 +179,29 @@
    will then be called.  Returns nothing.  Not on Windows. (See the Unix man page
    :manpage:`signal(2)`.)
 
+   See also :func:`sigwait` and :func:`sigpending`.
+
+
+.. function:: pthread_kill(thread_id, signum)
+
+   Send the signal *signum* to the thread *thread_id*, another thread in the same
+   process as the caller.  The signal is asynchronously directed to thread.
+
+   *thread_id* can be read from the :attr:`~threading.Thread.ident` attribute
+   of :attr:`threading.Thread`.  For example,
+   ``threading.current_thread().ident`` gives the identifier of the current
+   thread.
+
+   If *signum* is 0, then no signal is sent, but error checking is still
+   performed; this can be used to check if a thread is still running.
+
+   Availability: Unix (see the man page :manpage:`pthread_kill(3)` for further
+   information).
+
+   See also :func:`os.kill`.
+
+   .. versionadded:: 3.3
+
 
 .. function:: pthread_sigmask(how, mask)
 
@@ -206,6 +229,8 @@
    Availability: Unix. See the man page :manpage:`sigprocmask(3)` and
    :manpage:`pthread_sigmask(3)` for further information.
 
+   See also :func:`pause`, :func:`sigpending` and :func:`sigwait`.
+
    .. versionadded:: 3.3
 
 
@@ -237,13 +262,17 @@
 
 .. function:: set_wakeup_fd(fd)
 
-   Set the wakeup fd to *fd*.  When a signal is received, a ``'\0'`` byte is
-   written to the fd.  This can be used by a library to wakeup a poll or select
-   call, allowing the signal to be fully processed.
+   Set the wakeup file descriptor to *fd*.  When a signal is received, the
+   signal number is written as a single byte into the fd.  This can be used by
+   a library to wakeup a poll or select call, allowing the signal to be fully
+   processed.
 
    The old wakeup fd is returned.  *fd* must be non-blocking.  It is up to the
    library to remove any bytes before calling poll or select again.
 
+   Use for example ``struct.unpack('%uB' % len(data), data)`` to decode the
+   signal numbers list.
+
    When threads are enabled, this function can only be called from the main thread;
    attempting to call it from other threads will cause a :exc:`ValueError`
    exception to be raised.
@@ -283,6 +312,34 @@
    :const:`SIGTERM`. A :exc:`ValueError` will be raised in any other case.
 
 
+.. function:: sigpending()
+
+   Examine the set of signals that are pending for delivery to the calling
+   thread (i.e., the signals which have been raised while blocked).  Return the
+   set of the pending signals.
+
+   Availability: Unix (see the man page :manpage:`sigpending(2)` for further
+   information).
+
+   See also :func:`pause`, :func:`pthread_sigmask` and :func:`sigwait`.
+
+   .. versionadded:: 3.3
+
+
+.. function:: sigwait(sigset)
+
+   Suspend execution of the calling thread until the delivery of one of the
+   signals specified in the signal set *sigset*.  The function accepts the signal
+   (removes it from the pending list of signals), and returns the signal number.
+
+   Availability: Unix (see the man page :manpage:`sigwait(3)` for further
+   information).
+
+   See also :func:`pause`, :func:`pthread_sigmask` and :func:`sigpending`.
+
+   .. versionadded:: 3.3
+
+
 .. _signal-example:
 
 Example
diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst
index cfd5018..b432d3e 100644
--- a/Doc/library/smtplib.rst
+++ b/Doc/library/smtplib.rst
@@ -49,7 +49,7 @@
       Support for the :keyword:`with` statement was added.
 
 
-.. class:: SMTP_SSL(host='', port=0, local_hostname=None, keyfile=None, certfile=None[, timeout])
+.. class:: SMTP_SSL(host='', port=0, local_hostname=None, keyfile=None, certfile=None[, timeout], context=None)
 
    A :class:`SMTP_SSL` instance behaves exactly the same as instances of
    :class:`SMTP`. :class:`SMTP_SSL` should be used for situations where SSL is
@@ -57,11 +57,16 @@
    not appropriate. If *host* is not specified, the local host is used. If
    *port* is zero, the standard SMTP-over-SSL port (465) is used. *keyfile*
    and *certfile* are also optional, and can contain a PEM formatted private key
-   and certificate chain file for the SSL connection. The optional *timeout*
+   and certificate chain file for the SSL connection. *context* also optional, can contain
+   a SSLContext, and is an alternative to keyfile and certfile; If it is specified both
+   keyfile and certfile must be None.  The optional *timeout*
    parameter specifies a timeout in seconds for blocking operations like the
    connection attempt (if not specified, the global default timeout setting
    will be used).
 
+   .. versionchanged:: 3.3
+      *context* was added.
+
 
 .. class:: LMTP(host='', port=LMTP_PORT, local_hostname=None)
 
@@ -256,7 +261,7 @@
       No suitable authentication method was found.
 
 
-.. method:: SMTP.starttls(keyfile=None, certfile=None)
+.. method:: SMTP.starttls(keyfile=None, certfile=None, context=None)
 
    Put the SMTP connection in TLS (Transport Layer Security) mode.  All SMTP
    commands that follow will be encrypted.  You should then call :meth:`ehlo`
@@ -265,6 +270,9 @@
    If *keyfile* and *certfile* are provided, these are passed to the :mod:`socket`
    module's :func:`ssl` function.
 
+   Optional *context* parameter is a :class:`ssl.SSLContext` object; This is an alternative to
+   using a keyfile and a certfile and if specified both *keyfile* and *certfile* should be None.
+
    If there has been no previous ``EHLO`` or ``HELO`` command this session,
    this method tries ESMTP ``EHLO`` first.
 
@@ -277,6 +285,9 @@
    :exc:`RuntimeError`
      SSL/TLS support is not available to your Python interpreter.
 
+   .. versionchanged:: 3.3
+      *context* was added.
+
 
 .. method:: SMTP.sendmail(from_addr, to_addrs, msg, mail_options=[], rcpt_options=[])
 
diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst
index 65533df..7ce7705 100644
--- a/Doc/library/socket.rst
+++ b/Doc/library/socket.rst
@@ -536,6 +536,39 @@
    .. versionadded:: 3.3
 
 
+.. function:: if_nameindex()
+
+   Returns a list of network interface information
+   (index, name as a string) tuples.
+   :exc:`socket.error` if the system call fails for any reason.
+
+   Availability: Unix.
+
+   .. versionadded:: 3.3
+
+
+.. function:: if_nametoindex(if_name)
+
+   Returns a network interface index number corresponding to an
+   interface name string.
+   :exc:`socket.error` if no interface with the given name exists.
+
+   Availability: Unix.
+
+   .. versionadded:: 3.3
+
+
+.. function:: if_indextoname(if_index)
+
+   Returns a network interface name string corresponding to a
+   interface index.
+   :exc:`socket.error` if no interface with the given index exists.
+
+   Availability: Unix.
+
+   .. versionadded:: 3.3
+
+
 .. data:: SocketType
 
    This is a Python type object that represents the socket object type. It is the
@@ -662,8 +695,8 @@
 .. method:: socket.listen(backlog)
 
    Listen for connections made to the socket.  The *backlog* argument specifies the
-   maximum number of queued connections and should be at least 1; the maximum value
-   is system-dependent (usually 5).
+   maximum number of queued connections and should be at least 0; the maximum value
+   is system-dependent (usually 5), the minimum value is forced to 0.
 
 
 .. method:: socket.makefile(mode='r', buffering=None, *, encoding=None, \
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
index a20cabc..a528a03 100644
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -162,6 +162,20 @@
 Random generation
 ^^^^^^^^^^^^^^^^^
 
+.. function:: RAND_bytes(num)
+
+   Returns *num* cryptographically strong pseudo-random bytes.
+
+   .. versionadded:: 3.3
+
+.. function:: RAND_pseudo_bytes(num)
+
+   Returns (bytes, is_cryptographic): bytes are *num* pseudo-random bytes,
+   is_cryptographic is True if the bytes generated are cryptographically
+   strong.
+
+   .. versionadded:: 3.3
+
 .. function:: RAND_status()
 
    Returns True if the SSL pseudo-random number generator has been seeded with
@@ -171,7 +185,7 @@
 
 .. function:: RAND_egd(path)
 
-   If you are running an entropy-gathering daemon (EGD) somewhere, and ``path``
+   If you are running an entropy-gathering daemon (EGD) somewhere, and *path*
    is the pathname of a socket connection open to it, this will read 256 bytes
    of randomness from the socket, and add it to the SSL pseudo-random number
    generator to increase the security of generated secret keys.  This is
@@ -182,8 +196,8 @@
 
 .. function:: RAND_add(bytes, entropy)
 
-   Mixes the given ``bytes`` into the SSL pseudo-random number generator.  The
-   parameter ``entropy`` (a float) is a lower bound on the entropy contained in
+   Mixes the given *bytes* into the SSL pseudo-random number generator.  The
+   parameter *entropy* (a float) is a lower bound on the entropy contained in
    string (so you can always use :const:`0.0`).  See :rfc:`1750` for more
    information on sources of entropy.
 
@@ -292,6 +306,9 @@
 
    Selects SSL version 2 as the channel encryption protocol.
 
+   This protocol is not available if OpenSSL is compiled with OPENSSL_NO_SSL2
+   flag.
+
    .. warning::
 
       SSL version 2 is insecure.  Its use is highly discouraged.
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index c0c75f7..4a7efe1 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -1047,6 +1047,15 @@
    *start* and *end* are interpreted as in slice notation.  Return ``-1`` if
    *sub* is not found.
 
+   .. note::
+
+      The :meth:`~str.find` method should be used only if you need to know the
+      position of *sub*.  To check if *sub* is a substring or not, use the
+      :keyword:`in` operator::
+
+         >>> 'Py' in 'Python'
+         True
+
 
 .. method:: str.format(*args, **kwargs)
 
diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst
index bb48849..a0d8471 100644
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -219,8 +219,8 @@
    *creationflags*, if given, can be :data:`CREATE_NEW_CONSOLE` or
    :data:`CREATE_NEW_PROCESS_GROUP`. (Windows only)
 
-   Popen objects are supported as context managers via the :keyword:`with` statement,
-   closing any open file descriptors on exit.
+   Popen objects are supported as context managers via the :keyword:`with` statement:
+   on exit, standard file descriptors are closed, and the process is waited for.
    ::
 
       with Popen(["ifconfig"], stdout=PIPE) as proc:
@@ -311,7 +311,7 @@
 
 .. function:: check_output(*popenargs, timeout=None, **kwargs)
 
-   Run command with arguments and return its output as a byte string.
+   Run command with arguments and return its output as a bytes object.
 
    If the exit code was non-zero it raises a :exc:`CalledProcessError`.  The
    :exc:`CalledProcessError` object will have the return code in the
@@ -447,8 +447,9 @@
 
    Interact with process: Send data to stdin.  Read data from stdout and stderr,
    until end-of-file is reached.  Wait for process to terminate.  The optional
-   *input* argument should be a byte string to be sent to the child process, or
-   ``None``, if no data should be sent to the child.
+   *input* argument should be data to be sent to the child process, or
+   ``None``, if no data should be sent to the child.  The type of *input*
+   must be bytes or, if *universal_newlines* was ``True``, a string.
 
    :meth:`communicate` returns a tuple ``(stdoutdata, stderrdata)``.
 
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index 2bcf958..dfc2412 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -805,7 +805,7 @@
    Python.
 
    The highest possible limit is platform-dependent.  A user may need to set the
-   limit higher when she has a program that requires deep recursion and a platform
+   limit higher when they have a program that requires deep recursion and a platform
    that supports a higher limit.  This should be done with care, because a too-high
    limit can lead to a crash.
 
diff --git a/Doc/library/test.rst b/Doc/library/test.rst
index 9d02b0a..5656b23 100644
--- a/Doc/library/test.rst
+++ b/Doc/library/test.rst
@@ -218,14 +218,14 @@
 
 .. data:: verbose
 
-   :const:`True` when verbose output is enabled. Should be checked when more
+   ``True`` when verbose output is enabled. Should be checked when more
    detailed information is desired about a running test. *verbose* is set by
    :mod:`test.regrtest`.
 
 
 .. data:: is_jython
 
-   :const:`True` if the running interpreter is Jython.
+   ``True`` if the running interpreter is Jython.
 
 
 .. data:: TESTFN
@@ -233,6 +233,7 @@
    Set to a name that is safe to use as the name of a temporary file.  Any
    temporary file that is created should be closed and unlinked (removed).
 
+
 The :mod:`test.support` module defines the following functions:
 
 
@@ -244,7 +245,7 @@
 
 .. function:: is_resource_enabled(resource)
 
-   Return :const:`True` if *resource* is enabled and available. The list of
+   Return ``True`` if *resource* is enabled and available. The list of
    available resources is only set when :mod:`test.regrtest` is executing the
    tests.
 
@@ -253,7 +254,7 @@
 
    Raise :exc:`ResourceDenied` if *resource* is not available. *msg* is the
    argument to :exc:`ResourceDenied` if it is raised. Always returns
-   :const:`True` if called by a function whose ``__name__`` is ``'__main__'``.
+   ``True`` if called by a function whose ``__name__`` is ``'__main__'``.
    Used when tests are executed by :mod:`test.regrtest`.
 
 
@@ -281,6 +282,15 @@
    This will run all tests defined in the named module.
 
 
+.. function:: run_doctest(module, verbosity=None)
+
+   Run :func:`doctest.testmod` on the given *module*.  Return
+   ``(failure_count, test_count)``.
+
+   If *verbosity* is ``None``, :func:`doctest.testmod` is run with verbosity
+   set to :data:`verbose`.  Otherwise, it is run with verbosity set to
+   ``None``.
+
 .. function:: check_warnings(\*filters, quiet=True)
 
    A convenience wrapper for :func:`warnings.catch_warnings()` that makes it
@@ -291,12 +301,12 @@
 
    ``check_warnings`` accepts 2-tuples of the form ``("message regexp",
    WarningCategory)`` as positional arguments. If one or more *filters* are
-   provided, or if the optional keyword argument *quiet* is :const:`False`,
+   provided, or if the optional keyword argument *quiet* is ``False``,
    it checks to make sure the warnings are as expected:  each specified filter
    must match at least one of the warnings raised by the enclosed code or the
    test fails, and if any warnings are raised that do not match any of the
    specified filters the test fails.  To disable the first of these checks,
-   set *quiet* to :const:`True`.
+   set *quiet* to ``True``.
 
    If no arguments are specified, it defaults to::
 
@@ -311,7 +321,7 @@
    representing the most recent warning can also be accessed directly through
    the recorder object (see example below).  If no warning has been raised,
    then any of the attributes that would otherwise be expected on an object
-   representing a warning will return :const:`None`.
+   representing a warning will return ``None``.
 
    The recorder object also has a :meth:`reset` method, which clears the
    warnings list.
@@ -349,7 +359,7 @@
 
 .. function:: captured_stdout()
 
-   This is a context manager that runs the :keyword:`with` statement body using
+   A context manager that runs the :keyword:`with` statement body using
    a :class:`StringIO.StringIO` object as sys.stdout.  That object can be
    retrieved using the ``as`` clause of the :keyword:`with` statement.
 
@@ -360,6 +370,50 @@
       assert s.getvalue() == "hello"
 
 
+.. function:: temp_cwd(name='tempcwd', quiet=False, path=None)
+
+   A context manager that temporarily changes the current working
+   directory (CWD).
+
+   An existing path may be provided as *path*, in which case this function
+   makes no changes to the file system.
+
+   Otherwise, the new CWD is created in the current directory and it's named
+   *name*.  If *quiet* is ``False`` and it's not possible to create or
+   change the CWD, an error is raised.  If it's ``True``, only a warning
+   is raised and the original CWD is used.
+
+
+.. function:: temp_umask(umask)
+
+   A context manager that temporarily sets the process umask.
+
+
+.. function:: can_symlink()
+
+   Return ``True`` if the OS supports symbolic links, ``False``
+   otherwise.
+
+
+.. function:: skip_unless_symlink()
+
+   A decorator for running tests that require support for symbolic links.
+
+
+.. function:: run_with_locale(catstr, *locales)
+
+   A decorator for running a function in a different locale, correctly
+   resetting it after it has finished.  *catstr* is the locale category as
+   a string (for example ``"LC_ALL"``).  The *locales* passed will be tried
+   sequentially, and the first valid locale will be used.
+
+
+.. function:: make_bad_fd()
+
+   Create an invalid file descriptor by opening and closing a temporary file,
+   and returning its descripor.
+
+
 .. function:: import_module(name, deprecated=False)
 
    This function imports and returns the named module. Unlike a normal
@@ -367,7 +421,7 @@
    cannot be imported.
 
    Module and package deprecation messages are suppressed during this import
-   if *deprecated* is :const:`True`.
+   if *deprecated* is ``True``.
 
    .. versionadded:: 3.1
 
@@ -391,7 +445,7 @@
    ``sys.modules`` when the fresh import is complete.
 
    Module and package deprecation messages are suppressed during this import
-   if *deprecated* is :const:`True`.
+   if *deprecated* is ``True``.
 
    This function will raise :exc:`unittest.SkipTest` is the named module
    cannot be imported.
@@ -408,6 +462,48 @@
    .. versionadded:: 3.1
 
 
+.. function:: bind_port(sock, host=HOST)
+
+   Bind the socket to a free port and return the port number.  Relies on
+   ephemeral ports in order to ensure we are using an unbound port.  This is
+   important as many tests may be running simultaneously, especially in a
+   buildbot environment.  This method raises an exception if the
+   ``sock.family`` is :const:`~socket.AF_INET` and ``sock.type`` is
+   :const:`~socket.SOCK_STREAM`, and the socket has
+   :const:`~socket.SO_REUSEADDR` or :const:`~socket.SO_REUSEPORT` set on it.
+   Tests should never set these socket options for TCP/IP sockets.
+   The only case for setting these options is testing multicasting via
+   multiple UDP sockets.
+
+   Additionally, if the :const:`~socket.SO_EXCLUSIVEADDRUSE` socket option is
+   available (i.e. on Windows), it will be set on the socket.  This will
+   prevent anyone else from binding to our host/port for the duration of the
+   test.
+
+
+.. function:: find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM)
+
+   Returns an unused port that should be suitable for binding.  This is
+   achieved by creating a temporary socket with the same family and type as
+   the ``sock`` parameter (default is :const:`~socket.AF_INET`,
+   :const:`~socket.SOCK_STREAM`),
+   and binding it to the specified host address (defaults to ``0.0.0.0``)
+   with the port set to 0, eliciting an unused ephemeral port from the OS.
+   The temporary socket is then closed and deleted, and the ephemeral port is
+   returned.
+
+   Either this method or :func:`bind_port` should be used for any tests
+   where a server socket needs to be bound to a particular port for the
+   duration of the test.
+   Which one to use depends on whether the calling code is creating a python
+   socket, or if an unused port needs to be provided in a constructor
+   or passed to an external program (i.e. the ``-accept`` argument to
+   openssl's s_server mode).  Always prefer :func:`bind_port` over
+   :func:`find_unused_port` where possible.  Using a hard coded port is
+   discouraged since it can makes multiple instances of the test impossible to
+   run simultaneously, which is a problem for buildbots.
+
+
 The :mod:`test.support` module defines the following classes:
 
 .. class:: TransientResource(exc, **kwargs)
diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst
index a623f3c..191d5b9 100644
--- a/Doc/library/unittest.rst
+++ b/Doc/library/unittest.rst
@@ -860,10 +860,11 @@
    | <TestCase.assertNotIsInstance>`         |                             |               |
    +-----------------------------------------+-----------------------------+---------------+
 
-   All the assert methods (except :meth:`assertRaises`,
-   :meth:`assertRaisesRegex`, :meth:`assertWarns`, :meth:`assertWarnsRegex`)
-   accept a *msg* argument that, if specified, is used as the error message on
-   failure (see also :data:`longMessage`).
+   All the assert methods accept a *msg* argument that, if specified, is used
+   as the error message on failure (see also :data:`longMessage`).
+   Note that the *msg* keyword argument can be passed to :meth:`assertRaises`,
+   :meth:`assertRaisesRegex`, :meth:`assertWarns`, :meth:`assertWarnsRegex`
+   only when they are used as a context manager.
 
    .. method:: assertEqual(first, second, msg=None)
 
@@ -957,7 +958,7 @@
    +---------------------------------------------------------+--------------------------------------+------------+
 
    .. method:: assertRaises(exception, callable, *args, **kwds)
-               assertRaises(exception)
+               assertRaises(exception, msg=None)
 
       Test that an exception is raised when *callable* is called with any
       positional or keyword arguments that are also passed to
@@ -966,12 +967,16 @@
       To catch any of a group of exceptions, a tuple containing the exception
       classes may be passed as *exception*.
 
-      If only the *exception* argument is given, returns a context manager so
-      that the code under test can be written inline rather than as a function::
+      If only the *exception* and possibly the *msg* arguments are given,
+      return a context manager so that the code under test can be written
+      inline rather than as a function::
 
          with self.assertRaises(SomeException):
              do_something()
 
+      When used as a context manager, :meth:`assertRaises` accepts the
+      additional keyword argument *msg*.
+
       The context manager will store the caught exception object in its
       :attr:`exception` attribute.  This can be useful if the intention
       is to perform additional checks on the exception raised::
@@ -988,9 +993,12 @@
       .. versionchanged:: 3.2
          Added the :attr:`exception` attribute.
 
+      .. versionchanged:: 3.3
+         Added the *msg* keyword argument when used as a context manager.
+
 
    .. method:: assertRaisesRegex(exception, regex, callable, *args, **kwds)
-               assertRaisesRegex(exception, regex)
+               assertRaisesRegex(exception, regex, msg=None)
 
       Like :meth:`assertRaises` but also tests that *regex* matches
       on the string representation of the raised exception.  *regex* may be
@@ -1007,12 +1015,16 @@
 
       .. versionadded:: 3.1
          under the name ``assertRaisesRegexp``.
+
       .. versionchanged:: 3.2
          Renamed to :meth:`assertRaisesRegex`.
 
+      .. versionchanged:: 3.3
+         Added the *msg* keyword argument when used as a context manager.
+
 
    .. method:: assertWarns(warning, callable, *args, **kwds)
-               assertWarns(warning)
+               assertWarns(warning, msg=None)
 
       Test that a warning is triggered when *callable* is called with any
       positional or keyword arguments that are also passed to
@@ -1021,12 +1033,16 @@
       To catch any of a group of warnings, a tuple containing the warning
       classes may be passed as *warnings*.
 
-      If only the *warning* argument is given, returns a context manager so
-      that the code under test can be written inline rather than as a function::
+      If only the *warning* and possibly the *msg* arguments are given,
+      returns a context manager so that the code under test can be written
+      inline rather than as a function::
 
          with self.assertWarns(SomeWarning):
              do_something()
 
+      When used as a context manager, :meth:`assertRaises` accepts the
+      additional keyword argument *msg*.
+
       The context manager will store the caught warning object in its
       :attr:`warning` attribute, and the source line which triggered the
       warnings in the :attr:`filename` and :attr:`lineno` attributes.
@@ -1044,9 +1060,12 @@
 
       .. versionadded:: 3.2
 
+      .. versionchanged:: 3.3
+         Added the *msg* keyword argument when used as a context manager.
+
 
    .. method:: assertWarnsRegex(warning, regex, callable, *args, **kwds)
-               assertWarnsRegex(warning, regex)
+               assertWarnsRegex(warning, regex, msg=None)
 
       Like :meth:`assertWarns` but also tests that *regex* matches on the
       message of the triggered warning.  *regex* may be a regular expression
@@ -1064,6 +1083,8 @@
 
       .. versionadded:: 3.2
 
+      .. versionchanged:: 3.3
+         Added the *msg* keyword argument when used as a context manager.
 
 
    There are also other methods used to perform more specific checks, such as:
diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst
index 3e24956..3c02333 100644
--- a/Doc/library/urllib.request.rst
+++ b/Doc/library/urllib.request.rst
@@ -240,10 +240,11 @@
 
 .. class:: HTTPBasicAuthHandler(password_mgr=None)
 
-   Handle authentication with the remote host. *password_mgr*, if given, should be
-   something that is compatible with :class:`HTTPPasswordMgr`; refer to section
-   :ref:`http-password-mgr` for information on the interface that must be
-   supported.
+   Handle authentication with the remote host. *password_mgr*, if given, should
+   be something that is compatible with :class:`HTTPPasswordMgr`; refer to
+   section :ref:`http-password-mgr` for information on the interface that must
+   be supported. HTTPBasicAuthHandler will raise a :exc:`ValueError` when
+   presented with a wrong Authentication scheme.
 
 
 .. class:: ProxyBasicAuthHandler(password_mgr=None)
@@ -265,10 +266,19 @@
 
 .. class:: HTTPDigestAuthHandler(password_mgr=None)
 
-   Handle authentication with the remote host. *password_mgr*, if given, should be
-   something that is compatible with :class:`HTTPPasswordMgr`; refer to section
-   :ref:`http-password-mgr` for information on the interface that must be
-   supported.
+   Handle authentication with the remote host. *password_mgr*, if given, should
+   be something that is compatible with :class:`HTTPPasswordMgr`; refer to
+   section :ref:`http-password-mgr` for information on the interface that must
+   be supported. When both Digest Authentication Handler and Basic
+   Authentication Handler are both added, Digest Authentication is always tried
+   first. If the Digest Authentication returns a 40x response again, it is sent
+   to Basic Authentication handler to Handle.  This Handler method will raise a
+   :exc:`ValueError` when presented with an authentication scheme other than
+   Digest or Basic.
+
+   .. versionchanged:: 3.3
+      Raise :exc:`ValueError` on unsupported Authentication Scheme.
+
 
 
 .. class:: ProxyDigestAuthHandler(password_mgr=None)
diff --git a/Doc/library/wsgiref.rst b/Doc/library/wsgiref.rst
index 3969ea4..1fd3451 100644
--- a/Doc/library/wsgiref.rst
+++ b/Doc/library/wsgiref.rst
@@ -122,8 +122,8 @@
       def simple_app(environ, start_response):
           setup_testing_defaults(environ)
 
-          status = b'200 OK'
-          headers = [(b'Content-type', b'text/plain; charset=utf-8')]
+          status = '200 OK'
+          headers = [('Content-type', 'text/plain; charset=utf-8')]
 
           start_response(status, headers)
 
@@ -414,8 +414,8 @@
       # Our callable object which is intentionally not compliant to the
       # standard, so the validator is going to break
       def simple_app(environ, start_response):
-          status = b'200 OK' # HTTP Status
-          headers = [(b'Content-type', b'text/plain')] # HTTP Headers
+          status = '200 OK' # HTTP Status
+          headers = [('Content-type', 'text/plain')] # HTTP Headers
           start_response(status, headers)
 
           # This is going to break because we need to return a list, and
@@ -754,8 +754,8 @@
    # is a dictionary containing CGI-style envrironment variables and the
    # second variable is the callable object (see PEP 333).
    def hello_world_app(environ, start_response):
-       status = b'200 OK' # HTTP Status
-       headers = [(b'Content-type', b'text/plain; charset=utf-8')] # HTTP Headers
+       status = '200 OK' # HTTP Status
+       headers = [('Content-type', 'text/plain; charset=utf-8')] # HTTP Headers
        start_response(status, headers)
 
        # The returned object is going to be printed
diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst
index 070967b..ab5476d 100644
--- a/Doc/library/xml.dom.minidom.rst
+++ b/Doc/library/xml.dom.minidom.rst
@@ -60,12 +60,8 @@
 You can also create a :class:`Document` by calling a method on a "DOM
 Implementation" object.  You can get this object either by calling the
 :func:`getDOMImplementation` function in the :mod:`xml.dom` package or the
-:mod:`xml.dom.minidom` module. Using the implementation from the
-:mod:`xml.dom.minidom` module will always return a :class:`Document` instance
-from the minidom implementation, while the version from :mod:`xml.dom` may
-provide an alternate implementation (this is likely if you have the `PyXML
-package <http://pyxml.sourceforge.net/>`_ installed).  Once you have a
-:class:`Document`, you can add child nodes to it to populate the DOM::
+:mod:`xml.dom.minidom` module.  Once you have a :class:`Document`, you
+can add child nodes to it to populate the DOM::
 
    from xml.dom.minidom import getDOMImplementation
 
diff --git a/Doc/library/xml.dom.rst b/Doc/library/xml.dom.rst
index c1786a6..297fc0c 100644
--- a/Doc/library/xml.dom.rst
+++ b/Doc/library/xml.dom.rst
@@ -30,13 +30,6 @@
 their terminology.  The Python mapping of the API is substantially based on the
 DOM Level 2 recommendation.
 
-.. XXX PyXML is dead...
-.. The mapping of the Level 3 specification, currently
-   only available in draft form, is being developed by the `Python XML Special
-   Interest Group <http://www.python.org/sigs/xml-sig/>`_ as part of the `PyXML
-   package <http://pyxml.sourceforge.net/>`_.  Refer to the documentation bundled
-   with that package for information on the current state of DOM Level 3 support.
-
 .. What if your needs are somewhere between SAX and the DOM?  Perhaps
    you cannot afford to load the entire tree in memory but you find the
    SAX model somewhat cumbersome and low-level.  There is also a module
diff --git a/Doc/tools/sphinxext/susp-ignored.csv b/Doc/tools/sphinxext/susp-ignored.csv
index ef98091..6dde323 100644
--- a/Doc/tools/sphinxext/susp-ignored.csv
+++ b/Doc/tools/sphinxext/susp-ignored.csv
@@ -386,3 +386,112 @@
 whatsnew/3.2,,:affe,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
 whatsnew/3.2,,:deaf,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
 whatsnew/3.2,,:feed,"netloc='[dead:beef:cafe:5417:affe:8FA3:deaf:feed]',"
+documenting/markup,33,.. sectionauthor:,.. sectionauthor:: Guido van Rossum <guido@python.org>
+documenting/markup,42,:mod,:mod:`parrot` -- Dead parrot access
+documenting/markup,42,`,:mod:`parrot` -- Dead parrot access
+documenting/markup,42,.. module:,.. module:: parrot
+documenting/markup,42,:platform,":platform: Unix, Windows"
+documenting/markup,42,:synopsis,:synopsis: Analyze and reanimate dead parrots.
+documenting/markup,42,.. moduleauthor:,.. moduleauthor:: Eric Cleese <eric@python.invalid>
+documenting/markup,42,.. moduleauthor:,.. moduleauthor:: John Idle <john@python.invalid>
+documenting/markup,88,:noindex,:noindex:
+documenting/markup,95,.. function:,.. function:: spam(eggs)
+documenting/markup,95,:noindex,:noindex:
+documenting/markup,101,.. method:,.. method:: FileInput.input(...)
+documenting/markup,121,:function,c:function
+documenting/markup,121,.. c:,".. c:function:: PyObject* PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems)"
+documenting/markup,121,::,".. c:function:: PyObject* PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems)"
+documenting/markup,131,:member,c:member
+documenting/markup,131,.. c:,.. c:member:: PyObject* PyTypeObject.tp_bases
+documenting/markup,131,::,.. c:member:: PyObject* PyTypeObject.tp_bases
+documenting/markup,139,:macro,c:macro
+documenting/markup,143,:type,c:type
+documenting/markup,150,:var,c:var
+documenting/markup,150,.. cvar:,.. cvar:: PyObject* PyClass_Type
+documenting/markup,179,.. function:,".. function:: repeat([repeat=3[, number=1000000]])"
+documenting/markup,210,.. decorator:,.. decorator:: removename
+documenting/markup,210,.. decorator:,.. decorator:: setnewname(name)
+documenting/markup,210,:func,:func:
+documenting/markup,237,.. class:,.. class:: Spam
+documenting/markup,237,.. data:,.. data:: ham
+documenting/markup,237,.. data:,.. data:: Spam.eggs
+documenting/markup,250,:meth,:meth:
+documenting/markup,263,.. cmdoption:,.. cmdoption:: -m <module>
+documenting/markup,281,.. describe:,.. describe:: opcode
+documenting/markup,310,.. highlightlang:,.. highlightlang:: c
+documenting/markup,330,.. literalinclude:,.. literalinclude:: example.py
+documenting/markup,345,:rolename,:rolename:`content`
+documenting/markup,345,`,:rolename:`content`
+documenting/markup,350,:role,:role:`title <target>`
+documenting/markup,350,`,:role:`title <target>`
+documenting/markup,356,:meth,:meth:`~Queue.Queue.get`
+documenting/markup,356,`,:meth:`~Queue.Queue.get`
+documenting/markup,404,:func,:func:`filter`
+documenting/markup,404,`,:func:`filter`
+documenting/markup,404,:func,:func:`foo.filter`
+documenting/markup,404,`,:func:`foo.filter`
+documenting/markup,410,:func,:func:`open`
+documenting/markup,410,`,:func:`open`
+documenting/markup,410,:func,:func:`.open`
+documenting/markup,410,`,:func:`.open`
+documenting/markup,426,:data,c:data
+documenting/markup,430,:func,c:func
+documenting/markup,434,:macro,c:macro
+documenting/markup,438,:type,c:type
+documenting/markup,443,:member,c:member
+documenting/markup,493,:file,... is installed in :file:`/usr/lib/python2.{x}/site-packages` ...
+documenting/markup,493,`,... is installed in :file:`/usr/lib/python2.{x}/site-packages` ...
+documenting/markup,512,:kbd,:kbd:`C-x C-f`
+documenting/markup,512,`,:kbd:`C-x C-f`
+documenting/markup,512,:kbd,:kbd:`Control-x Control-f`
+documenting/markup,512,`,:kbd:`Control-x Control-f`
+documenting/markup,526,:mailheader,:mailheader:`Content-Type`
+documenting/markup,526,`,:mailheader:`Content-Type`
+documenting/markup,535,:manpage,:manpage:`ls(1)`
+documenting/markup,535,`,:manpage:`ls(1)`
+documenting/markup,551,:menuselection,:menuselection:`Start --> Programs`
+documenting/markup,551,`,:menuselection:`Start --> Programs`
+documenting/markup,566,`,``code``
+documenting/markup,585,:file,:file:
+documenting/markup,585,`,``code``
+documenting/markup,615,:ref,:ref:`label-name`
+documenting/markup,615,`,:ref:`label-name`
+documenting/markup,619,:ref,"It refers to the section itself, see :ref:`my-reference-label`."
+documenting/markup,619,`,"It refers to the section itself, see :ref:`my-reference-label`."
+documenting/markup,628,:ref,:ref:
+documenting/markup,630,:ref,:ref:`link text <reference-label>`
+documenting/markup,630,`,:ref:`link text <reference-label>`
+documenting/markup,651,.. note:,.. note::
+documenting/markup,678,.. versionadded:,.. versionadded:: 3.1
+documenting/markup,703,::,.. impl-detail::
+documenting/markup,703,::,.. impl-detail:: This shortly mentions an implementation detail.
+documenting/markup,723,.. seealso:,.. seealso::
+documenting/markup,723,:mod,Module :mod:`zipfile`
+documenting/markup,723,`,Module :mod:`zipfile`
+documenting/markup,723,:mod,Documentation of the :mod:`zipfile` standard module.
+documenting/markup,723,`,Documentation of the :mod:`zipfile` standard module.
+documenting/markup,723,`,"`GNU tar manual, Basic Tar Format <http://link>`_"
+documenting/markup,737,.. centered:,.. centered::
+documenting/markup,782,.. toctree:,.. toctree::
+documenting/markup,782,:maxdepth,:maxdepth: 2
+documenting/markup,798,.. index:,.. index::
+documenting/markup,828,.. index:,".. index:: BNF, grammar, syntax, notation"
+documenting/markup,859,`,"unaryneg ::= ""-"" `integer`"
+documenting/markup,864,.. productionlist:,.. productionlist::
+documenting/markup,864,`,"try1_stmt: ""try"" "":"" `suite`"
+documenting/markup,864,`,": (""except"" [`expression` ["","" `target`]] "":"" `suite`)+"
+documenting/markup,864,`,": [""else"" "":"" `suite`]"
+documenting/markup,864,`,": [""finally"" "":"" `suite`]"
+documenting/markup,864,`,"try2_stmt: ""try"" "":"" `suite`"
+documenting/markup,864,`,": ""finally"" "":"" `suite`"
+library/pprint,209,::,"'classifiers': ['Development Status :: 4 - Beta',"
+library/pprint,209,::,"'Intended Audience :: Developers',"
+library/pprint,209,::,"'License :: OSI Approved :: MIT License',"
+library/pprint,209,::,"'Natural Language :: English',"
+library/pprint,209,::,"'Operating System :: OS Independent',"
+library/pprint,209,::,"'Programming Language :: Python',"
+library/pprint,209,::,"'Programming Language :: Python :: 2',"
+library/pprint,209,::,"'Programming Language :: Python :: 2.6',"
+library/pprint,209,::,"'Programming Language :: Python :: 2.7',"
+library/pprint,209,::,"'Topic :: Software Development :: Libraries',"
+library/pprint,209,::,"'Topic :: Software Development :: Libraries :: Python Modules'],"
diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst
index 0ab4fc8..529665f 100644
--- a/Doc/whatsnew/3.3.rst
+++ b/Doc/whatsnew/3.3.rst
@@ -76,6 +76,33 @@
  * :envvar:`PYTHONFAULTHANDLER`
  * :option:`-X` ``faulthandler``
 
+
+math
+----
+
+The :mod:`math` module has a new function:
+
+  * :func:`~math.log2`: return the base-2 logarithm of *x*
+    (Written by Mark Dickinson in :issue:`11888`).
+
+
+nntplib
+-------
+
+The :class:`nntplib.NNTP` class now supports the context manager protocol to
+unconditionally consume :exc:`socket.error` exceptions and to close the NNTP
+connection when done::
+
+  >>> from nntplib import NNTP
+  >>> with nntplib.NNTP('news.gmane.org') as n:
+  ...     n.group('gmane.comp.python.committers')
+  ...
+  ('211 1454 1 1454 gmane.comp.python.committers', '1454', '1', '1454', 'gmane.comp.python.committers')
+  >>>
+
+(Contributed by Giampaolo Rodolà in :issue:`9795`)
+
+
 os
 --
 
@@ -96,21 +123,6 @@
 
   (Patch submitted by Giampaolo Rodolà in :issue:`10784`.)
 
-nntplib
--------
-
-The :class:`nntplib.NNTP` class now supports the context manager protocol to
-unconditionally consume :exc:`socket.error` exceptions and to close the NNTP
-connection when done::
-
-  >>> from nntplib import NNTP
-  >>> with nntplib.NNTP('news.gmane.org') as n:
-  ...     n.group('gmane.comp.python.committers')
-  ...
-  ('211 1454 1 1454 gmane.comp.python.committers', '1454', '1', '1454', 'gmane.comp.python.committers')
-  >>>
-
-(Contributed by Giampaolo Rodolà in :issue:`9795`)
 
 sys
 ---
@@ -120,13 +132,34 @@
 
   (:issue:`11223`)
 
+
 signal
 ------
 
-* The :mod:`signal` module has a new :func:`~signal.pthread_sigmask` function
-  to fetch and/or change the signal mask of the calling thread.
+* The :mod:`signal` module has new functions:
 
-  (Contributed by Jean-Paul Calderone in :issue:`8407`)
+  * :func:`~signal.pthread_sigmask`: fetch and/or change the signal mask of the
+    calling thread (Contributed by Jean-Paul Calderone in :issue:`8407`) ;
+  * :func:`~signal.pthread_kill`: send a signal to a thread ;
+  * :func:`~signal.sigpending`: examine pending functions ;
+  * :func:`~signal.sigwait`: wait a signal.
+
+* The signal handler writes the signal number as a single byte instead of
+  a nul byte into the wakeup file descriptor. So it is possible to wait more
+  than one signal and know which signals were raised.
+
+* :func:`signal.signal` and :func:`signal.siginterrupt` raise an OSError,
+  instead of a RuntimeError: OSError has an errno attribute.
+
+
+ssl
+---
+
+The :mod:`ssl` module has new functions:
+
+  * :func:`~ssl.RAND_bytes`: generate cryptographically strong
+    pseudo-random bytes.
+  * :func:`~ssl.RAND_pseudo_bytes`: generate pseudo-random bytes.
 
 
 Optimizations
diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py
index 53e3dd7..4b447ac 100644
--- a/Lib/collections/__init__.py
+++ b/Lib/collections/__init__.py
@@ -887,7 +887,7 @@
     def pop(self, i=-1): return self.data.pop(i)
     def remove(self, item): self.data.remove(item)
     def clear(self): self.data.clear()
-    def copy(self): return self.data.copy()
+    def copy(self): return self.__class__(self)
     def count(self, item): return self.data.count(item)
     def index(self, item, *args): return self.data.index(item, *args)
     def reverse(self): self.data.reverse()
diff --git a/Lib/decimal.py b/Lib/decimal.py
index f5277c5..1b11b10 100644
--- a/Lib/decimal.py
+++ b/Lib/decimal.py
@@ -1871,6 +1871,7 @@
         """
 
         other = _convert_other(other, raiseit=True)
+        third = _convert_other(third, raiseit=True)
 
         # compute product; raise InvalidOperation if either operand is
         # a signaling NaN or if the product is zero times infinity.
@@ -1900,7 +1901,6 @@
                                        str(int(self._int) * int(other._int)),
                                        self._exp + other._exp)
 
-        third = _convert_other(third, raiseit=True)
         return product.__add__(third, context)
 
     def _power_modulo(self, other, modulo, context=None):
diff --git a/Lib/distutils/command/build_scripts.py b/Lib/distutils/command/build_scripts.py
index 8b08bfe..31be793 100644
--- a/Lib/distutils/command/build_scripts.py
+++ b/Lib/distutils/command/build_scripts.py
@@ -11,9 +11,10 @@
 from distutils.dep_util import newer
 from distutils.util import convert_path, Mixin2to3
 from distutils import log
+import tokenize
 
 # check if Python is called on the first line with this expression
-first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
+first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$')
 
 class build_scripts(Command):
 
@@ -74,12 +75,14 @@
             # that way, we'll get accurate feedback if we can read the
             # script.
             try:
-                f = open(script, "r")
+                f = open(script, "rb")
             except IOError:
                 if not self.dry_run:
                     raise
                 f = None
             else:
+                encoding, lines = tokenize.detect_encoding(f.readline)
+                f.seek(0)
                 first_line = f.readline()
                 if not first_line:
                     self.warn("%s is an empty file (skipping)" % script)
@@ -88,27 +91,46 @@
                 match = first_line_re.match(first_line)
                 if match:
                     adjust = True
-                    post_interp = match.group(1) or ''
+                    post_interp = match.group(1) or b''
 
             if adjust:
                 log.info("copying and adjusting %s -> %s", script,
                          self.build_dir)
                 updated_files.append(outfile)
                 if not self.dry_run:
-                    outf = open(outfile, "w")
                     if not sysconfig.python_build:
-                        outf.write("#!%s%s\n" %
-                                   (self.executable,
-                                    post_interp))
+                        executable = self.executable
                     else:
-                        outf.write("#!%s%s\n" %
-                                   (os.path.join(
+                        executable = os.path.join(
                             sysconfig.get_config_var("BINDIR"),
                            "python%s%s" % (sysconfig.get_config_var("VERSION"),
-                                           sysconfig.get_config_var("EXE"))),
-                                    post_interp))
-                    outf.writelines(f.readlines())
-                    outf.close()
+                                           sysconfig.get_config_var("EXE")))
+                    executable = os.fsencode(executable)
+                    shebang = b"#!" + executable + post_interp + b"\n"
+                    # Python parser starts to read a script using UTF-8 until
+                    # it gets a #coding:xxx cookie. The shebang has to be the
+                    # first line of a file, the #coding:xxx cookie cannot be
+                    # written before. So the shebang has to be decodable from
+                    # UTF-8.
+                    try:
+                        shebang.decode('utf-8')
+                    except UnicodeDecodeError:
+                        raise ValueError(
+                            "The shebang ({!r}) is not decodable "
+                            "from utf-8".format(shebang))
+                    # If the script is encoded to a custom encoding (use a
+                    # #coding:xxx cookie), the shebang has to be decodable from
+                    # the script encoding too.
+                    try:
+                        shebang.decode(encoding)
+                    except UnicodeDecodeError:
+                        raise ValueError(
+                            "The shebang ({!r}) is not decodable "
+                            "from the script encoding ({})"
+                            .format(shebang, encoding))
+                    with open(outfile, "wb") as outf:
+                        outf.write(shebang)
+                        outf.writelines(f.readlines())
                 if f:
                     f.close()
             else:
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 897b7d6..06bbc01 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -428,7 +428,7 @@
         cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
         if cur_target == '':
             cur_target = cfg_target
-            os.putenv('MACOSX_DEPLOYMENT_TARGET', cfg_target)
+            os.environ['MACOSX_DEPLOYMENT_TARGET'] = cfg_target
         elif [int(x) for x in cfg_target.split('.')] > [int(x) for x in cur_target.split('.')]:
             my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" during configure'
                 % (cur_target, cfg_target))
diff --git a/Lib/distutils/tests/test_build_ext.py b/Lib/distutils/tests/test_build_ext.py
index dcba75f..a5b9700 100644
--- a/Lib/distutils/tests/test_build_ext.py
+++ b/Lib/distutils/tests/test_build_ext.py
@@ -2,6 +2,7 @@
 import os
 import shutil
 from io import StringIO
+import textwrap
 
 from distutils.core import Distribution
 from distutils.command.build_ext import build_ext
@@ -34,7 +35,9 @@
         self.tmp_dir = self.mkdtemp()
         self.sys_path = sys.path, sys.path[:]
         sys.path.append(self.tmp_dir)
-        shutil.copy(_get_source_filename(), self.tmp_dir)
+        filename = _get_source_filename()
+        if os.path.exists(filename):
+            shutil.copy(filename, self.tmp_dir)
         if sys.version > "2.6":
             import site
             self.old_user_base = site.USER_BASE
@@ -64,6 +67,8 @@
     def test_build_ext(self):
         global ALREADY_TESTED
         xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
+        if not os.path.exists(xx_c):
+            return
         xx_ext = Extension('xx', [xx_c])
         dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
         dist.package_dir = self.tmp_dir
@@ -419,6 +424,67 @@
         wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
         self.assertEqual(wanted, path)
 
+
+    @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
+    def test_deployment_target(self):
+        self._try_compile_deployment_target()
+
+        orig_environ = os.environ
+        os.environ = orig_environ.copy()
+        self.addCleanup(setattr, os, 'environ', orig_environ)
+
+        os.environ['MACOSX_DEPLOYMENT_TARGET']='10.1'
+        self._try_compile_deployment_target()
+
+
+    def _try_compile_deployment_target(self):
+        deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')
+
+        with open(deptarget_c, 'w') as fp:
+            fp.write(textwrap.dedent('''\
+                #include <AvailabilityMacros.h>
+
+                int dummy;
+
+                #if TARGET != MAC_OS_X_VERSION_MIN_REQUIRED
+                #error "Unexpected target"
+                #endif
+
+            '''))
+
+        target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+        target = tuple(map(int, target.split('.')))
+        target = '%02d%01d0' % target
+
+        deptarget_ext = Extension(
+            'deptarget',
+            [deptarget_c],
+            extra_compile_args=['-DTARGET=%s'%(target,)],
+        )
+        dist = Distribution({
+            'name': 'deptarget',
+            'ext_modules': [deptarget_ext]
+        })
+        dist.package_dir = self.tmp_dir
+        cmd = build_ext(dist)
+        cmd.build_lib = self.tmp_dir
+        cmd.build_temp = self.tmp_dir
+
+        try:
+            old_stdout = sys.stdout
+            if not support.verbose:
+                # silence compiler output
+                sys.stdout = StringIO()
+            try:
+                cmd.ensure_finalized()
+                cmd.run()
+            finally:
+                sys.stdout = old_stdout
+
+        except CompileError:
+            self.fail("Wrong deployment target during compilation")
+
+
 def test_suite():
     src = _get_source_filename()
     if not os.path.exists(src):
diff --git a/Lib/distutils/tests/test_build_py.py b/Lib/distutils/tests/test_build_py.py
index da3232c..00a57cc 100644
--- a/Lib/distutils/tests/test_build_py.py
+++ b/Lib/distutils/tests/test_build_py.py
@@ -58,7 +58,8 @@
         pkgdest = os.path.join(destination, "pkg")
         files = os.listdir(pkgdest)
         self.assertTrue("__init__.py" in files)
-        self.assertTrue("__init__.pyc" in files)
+        if not sys.dont_write_bytecode:
+            self.assertTrue("__init__.pyc" in files)
         self.assertTrue("README.txt" in files)
 
     def test_empty_package_dir (self):
diff --git a/Lib/distutils/tests/test_util.py b/Lib/distutils/tests/test_util.py
index 8ff5ae2..1a06d4c 100644
--- a/Lib/distutils/tests/test_util.py
+++ b/Lib/distutils/tests/test_util.py
@@ -92,7 +92,7 @@
                    ('Darwin Kernel Version 8.11.1: '
                     'Wed Oct 10 18:23:28 PDT 2007; '
                     'root:xnu-792.25.20~1/RELEASE_I386'), 'i386'))
-        os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
+        get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
 
         get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g '
                                        '-fwrapv -O3 -Wall -Wstrict-prototypes')
@@ -105,7 +105,7 @@
             sys.maxsize = cursize
 
         # macbook with fat binaries (fat, universal or fat64)
-        os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.4'
+        get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4'
         get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot '
                                        '/Developer/SDKs/MacOSX10.4u.sdk  '
                                        '-fno-strict-aliasing -fno-common '
@@ -113,6 +113,10 @@
 
         self.assertEqual(get_platform(), 'macosx-10.4-fat')
 
+        os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.1'
+        self.assertEqual(get_platform(), 'macosx-10.4-fat')
+
+
         get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot '
                                        '/Developer/SDKs/MacOSX10.4u.sdk  '
                                        '-fno-strict-aliasing -fno-common '
@@ -147,6 +151,7 @@
 
             self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,))
 
+
         # linux debian sarge
         os.name = 'posix'
         sys.version = ('2.3.5 (#1, Jul  4 2007, 17:28:59) '
diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py
index ce3cd6c..d6f89d6 100644
--- a/Lib/distutils/util.py
+++ b/Lib/distutils/util.py
@@ -96,9 +96,7 @@
         from distutils.sysconfig import get_config_vars
         cfgvars = get_config_vars()
 
-        macver = os.environ.get('MACOSX_DEPLOYMENT_TARGET')
-        if not macver:
-            macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
+        macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
 
         if 1:
             # Always calculate the release of the running machine,
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
index e836b72..d15a135 100644
--- a/Lib/ftplib.py
+++ b/Lib/ftplib.py
@@ -247,12 +247,13 @@
         This does not follow the procedure from the RFC to send Telnet
         IP and Synch; that doesn't seem to work with the servers I've
         tried.  Instead, just send the ABOR command as OOB data.'''
-        line = 'ABOR' + CRLF
+        line = b'ABOR' + B_CRLF
         if self.debugging > 1: print('*put urgent*', self.sanitize(line))
         self.sock.sendall(line, MSG_OOB)
         resp = self.getmultiline()
         if resp[:3] not in {'426', '225', '226'}:
             raise error_proto(resp)
+        return resp
 
     def sendcmd(self, cmd):
         '''Send a command and return the response.'''
@@ -426,7 +427,7 @@
         """Retrieve data in line mode.  A new port is created for you.
 
         Args:
-          cmd: A RETR, LIST, NLST, or MLSD command.
+          cmd: A RETR, LIST, or NLST command.
           callback: An optional single parameter callable that is called
                     for each line with the trailing CRLF stripped.
                     [default: print_line()]
@@ -527,6 +528,34 @@
                 cmd = cmd + (' ' + arg)
         self.retrlines(cmd, func)
 
+    def mlsd(self, path="", facts=[]):
+        '''List a directory in a standardized format by using MLSD
+        command (RFC-3659). If path is omitted the current directory
+        is assumed. "facts" is a list of strings representing the type
+        of information desired (e.g. ["type", "size", "perm"]).
+
+        Return a generator object yielding a tuple of two elements
+        for every file found in path.
+        First element is the file name, the second one is a dictionary
+        including a variable number of "facts" depending on the server
+        and whether "facts" argument has been provided.
+        '''
+        if facts:
+            self.sendcmd("OPTS MLST " + ";".join(facts) + ";")
+        if path:
+            cmd = "MLSD %s" % path
+        else:
+            cmd = "MLSD"
+        lines = []
+        self.retrlines(cmd, lines.append)
+        for line in lines:
+            facts_found, _, name = line.rstrip(CRLF).partition(' ')
+            entry = {}
+            for fact in facts_found[:-1].split(";"):
+                key, _, value = fact.partition("=")
+                entry[key.lower()] = value
+            yield (name, entry)
+
     def rename(self, fromname, toname):
         '''Rename a file.'''
         resp = self.sendcmd('RNFR ' + fromname)
@@ -788,6 +817,15 @@
                 conn.close()
             return self.voidresp()
 
+        def abort(self):
+            # overridden as we can't pass MSG_OOB flag to sendall()
+            line = b'ABOR' + B_CRLF
+            self.sock.sendall(line)
+            resp = self.getmultiline()
+            if resp[:3] not in {'426', '225', '226'}:
+                raise error_proto(resp)
+            return resp
+
     __all__.append('FTP_TLS')
     all_errors = (Error, IOError, EOFError, ssl.SSLError)
 
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index 0d7e325..9108095 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -64,26 +64,29 @@
 
 
 def __get_builtin_constructor(name):
-    if name in ('SHA1', 'sha1'):
-        import _sha1
-        return _sha1.sha1
-    elif name in ('MD5', 'md5'):
-        import _md5
-        return _md5.md5
-    elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
-        import _sha256
-        bs = name[3:]
-        if bs == '256':
-            return _sha256.sha256
-        elif bs == '224':
-            return _sha256.sha224
-    elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
-        import _sha512
-        bs = name[3:]
-        if bs == '512':
-            return _sha512.sha512
-        elif bs == '384':
-            return _sha512.sha384
+    try:
+        if name in ('SHA1', 'sha1'):
+            import _sha1
+            return _sha1.sha1
+        elif name in ('MD5', 'md5'):
+            import _md5
+            return _md5.md5
+        elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
+            import _sha256
+            bs = name[3:]
+            if bs == '256':
+                return _sha256.sha256
+            elif bs == '224':
+                return _sha256.sha224
+        elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
+            import _sha512
+            bs = name[3:]
+            if bs == '512':
+                return _sha512.sha512
+            elif bs == '384':
+                return _sha512.sha384
+    except ImportError:
+        pass  # no extension module, this hash is unsupported.
 
     raise ValueError('unsupported hash type %s' % name)
 
diff --git a/Lib/http/server.py b/Lib/http/server.py
index 6aacbbd..1d193f8 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -355,6 +355,7 @@
 
         """
         self.send_response_only(100)
+        self.flush_headers()
         return True
 
     def handle_one_request(self):
@@ -432,7 +433,8 @@
             self.wfile.write(content.encode('UTF-8', 'replace'))
 
     def send_response(self, code, message=None):
-        """Send the response header and log the response code.
+        """Add the response header to the headers buffer and log the
+        response code.
 
         Also send two standard headers with the server software
         version and the current date.
@@ -451,11 +453,14 @@
             else:
                 message = ''
         if self.request_version != 'HTTP/0.9':
-            self.wfile.write(("%s %d %s\r\n" %
-                              (self.protocol_version, code, message)).encode('latin-1', 'strict'))
+            if not hasattr(self, '_headers_buffer'):
+                self._headers_buffer = []
+            self._headers_buffer.append(("%s %d %s\r\n" %
+                    (self.protocol_version, code, message)).encode(
+                        'latin-1', 'strict'))
 
     def send_header(self, keyword, value):
-        """Send a MIME header."""
+        """Send a MIME header to the headers buffer."""
         if self.request_version != 'HTTP/0.9':
             if not hasattr(self, '_headers_buffer'):
                 self._headers_buffer = []
@@ -472,6 +477,10 @@
         """Send the blank line ending the MIME headers."""
         if self.request_version != 'HTTP/0.9':
             self._headers_buffer.append(b"\r\n")
+            self.flush_headers()
+
+    def flush_headers(self):
+        if hasattr(self, '_headers_buffer'):
             self.wfile.write(b"".join(self._headers_buffer))
             self._headers_buffer = []
 
@@ -1081,6 +1090,7 @@
             env.setdefault(k, "")
 
         self.send_response(200, "Script output follows")
+        self.flush_headers()
 
         decoded_query = query.replace('+', ' ')
 
diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py
index 381bb00..3f5d556 100644
--- a/Lib/idlelib/IOBinding.py
+++ b/Lib/idlelib/IOBinding.py
@@ -309,17 +309,20 @@
             return "yes"
         message = "Do you want to save %s before closing?" % (
             self.filename or "this untitled document")
-        m = tkMessageBox.Message(
-            title="Save On Close",
-            message=message,
-            icon=tkMessageBox.QUESTION,
-            type=tkMessageBox.YESNOCANCEL,
-            master=self.text)
-        reply = m.show()
-        if reply == "yes":
+        confirm = tkMessageBox.askyesnocancel(
+                  title="Save On Close",
+                  message=message,
+                  default=tkMessageBox.YES,
+                  master=self.text)
+        if confirm:
+            reply = "yes"
             self.save(None)
             if not self.get_saved():
                 reply = "cancel"
+        elif confirm is None:
+            reply = "cancel"
+        else:
+            reply = "no"
         self.text.focus_set()
         return reply
 
@@ -328,7 +331,7 @@
             self.save_as(event)
         else:
             if self.writefile(self.filename):
-                self.set_saved(1)
+                self.set_saved(True)
                 try:
                     self.editwin.store_file_breaks()
                 except AttributeError:  # may be a PyShell
@@ -420,15 +423,12 @@
             self.text.insert("end-1c", "\n")
 
     def print_window(self, event):
-        m = tkMessageBox.Message(
-            title="Print",
-            message="Print to Default Printer",
-            icon=tkMessageBox.QUESTION,
-            type=tkMessageBox.OKCANCEL,
-            default=tkMessageBox.OK,
-            master=self.text)
-        reply = m.show()
-        if reply != tkMessageBox.OK:
+        confirm = tkMessageBox.askokcancel(
+                  title="Print",
+                  message="Print to Default Printer",
+                  default=tkMessageBox.OK,
+                  master=self.text)
+        if not confirm:
             self.text.focus_set()
             return "break"
         tempfilename = None
@@ -443,8 +443,8 @@
             if not self.writefile(tempfilename):
                 os.unlink(tempfilename)
                 return "break"
-        platform=os.name
-        printPlatform=1
+        platform = os.name
+        printPlatform = True
         if platform == 'posix': #posix platform
             command = idleConf.GetOption('main','General',
                                          'print-command-posix')
@@ -452,7 +452,7 @@
         elif platform == 'nt': #win32 platform
             command = idleConf.GetOption('main','General','print-command-win')
         else: #no printing for this platform
-            printPlatform=0
+            printPlatform = False
         if printPlatform:  #we can try to print for this platform
             command = command % filename
             pipe = os.popen(command, "r")
@@ -466,7 +466,7 @@
                 output = "Printing command: %s\n" % repr(command) + output
                 tkMessageBox.showerror("Print status", output, master=self.text)
         else:  #no printing for this platform
-            message="Printing is not enabled for this platform: %s" % platform
+            message = "Printing is not enabled for this platform: %s" % platform
             tkMessageBox.showinfo("Print status", message, master=self.text)
         if tempfilename:
             os.unlink(tempfilename)
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
index deeb4a6..0a4f98e 100644
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,18 +1,25 @@
-What's New in IDLE 3.1.4?
+What's New in IDLE 3.2.1?
 =========================
 
-*Release date: XX-XXX-XX*
+*Release date: 15-May-11*
+
+- Issue #6378: Further adjust idle.bat to start associated Python
+
+- Issue #11896: Save on Close failed despite selecting "Yes" in dialog.
+
+- Issue #1028: Ctrl-space binding to show completions was causing IDLE to exit.
+  Tk < 8.5 was sending invalid Unicode null; replaced with valid null.
 
 - <Home> toggle failing on Tk 8.5, causing IDLE exits and strange selection
   behavior. Issue 4676.  Improve selection extension behaviour.
-- <Home> toggle non-functional when NumLock set on Windows.  Issue 3851.
 
+- <Home> toggle non-functional when NumLock set on Windows.  Issue 3851.
 
 
 What's New in IDLE 3.1b1?
 =========================
 
-*Release date: 27-Jun-09*
+*Release date: 06-May-09*
 
 - Use of 'filter' in keybindingDialog.py was causing custom key assignment to
   fail.  Patch 5707 amaury.forgeotdarc.
@@ -58,7 +65,7 @@
   extract port from command line when warnings are present.
 
 - Tk 8.5 Text widget requires 'wordprocessor' tabstyle attr to handle
-  mixed space/tab properly. Issue 5120, patch by Guilherme Polo.
+  mixed space/tab properly. Issue 5129, patch by Guilherme Polo.
 
 - Issue #3549: On MacOS the preferences menu was not present
 
diff --git a/Lib/idlelib/ScriptBinding.py b/Lib/idlelib/ScriptBinding.py
index c42b29d..90972b5 100644
--- a/Lib/idlelib/ScriptBinding.py
+++ b/Lib/idlelib/ScriptBinding.py
@@ -27,6 +27,7 @@
 from idlelib import PyShell, IOBinding
 
 from idlelib.configHandler import idleConf
+from idlelib import macosxSupport
 
 indent_message = """Error: Inconsistent indentation detected!
 
@@ -52,6 +53,9 @@
         self.flist = self.editwin.flist
         self.root = self.editwin.root
 
+        if macosxSupport.runningAsOSXApp():
+            self.editwin.text_frame.bind('<<run-module-event-2>>', self._run_module_event)
+
     def check_module_event(self, event):
         filename = self.getfilename()
         if not filename:
@@ -116,14 +120,27 @@
             shell.set_warning_stream(saved_stream)
 
     def run_module_event(self, event):
+        if macosxSupport.runningAsOSXApp():
+            # Tk-Cocoa in MacOSX is broken until at least
+            # Tk 8.5.9, and without this rather
+            # crude workaround IDLE would hang when a user
+            # tries to run a module using the keyboard shortcut
+            # (the menu item works fine).
+            self.editwin.text_frame.after(200,
+                lambda: self.editwin.text_frame.event_generate('<<run-module-event-2>>'))
+            return 'break'
+        else:
+            return self._run_module_event(event)
+
+    def _run_module_event(self, event):
         """Run the module after setting up the environment.
 
         First check the syntax.  If OK, make sure the shell is active and
         then transfer the arguments, set the run environment's working
         directory to the directory of the module being executed and also
         add that directory to its sys.path if not already included.
-
         """
+
         filename = self.getfilename()
         if not filename:
             return 'break'
@@ -174,9 +191,9 @@
             if autosave and filename:
                 self.editwin.io.save(None)
             else:
-                reply = self.ask_save_dialog()
+                confirm = self.ask_save_dialog()
                 self.editwin.text.focus_set()
-                if reply == "ok":
+                if confirm:
                     self.editwin.io.save(None)
                     filename = self.editwin.io.filename
                 else:
@@ -185,13 +202,11 @@
 
     def ask_save_dialog(self):
         msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
-        mb = tkMessageBox.Message(title="Save Before Run or Check",
-                                  message=msg,
-                                  icon=tkMessageBox.QUESTION,
-                                  type=tkMessageBox.OKCANCEL,
-                                  default=tkMessageBox.OK,
-                                  master=self.editwin.text)
-        return mb.show()
+        confirm = tkMessageBox.askokcancel(title="Save Before Run or Check",
+                                           message=msg,
+                                           default=tkMessageBox.OK,
+                                           master=self.editwin.text)
+        return confirm
 
     def errorbox(self, title, message):
         # XXX This should really be a function of EditorWindow...
diff --git a/Lib/idlelib/idle.bat b/Lib/idlelib/idle.bat
index cc653dc..e77b96e 100755
--- a/Lib/idlelib/idle.bat
+++ b/Lib/idlelib/idle.bat
@@ -1,4 +1,4 @@
 @echo off
 rem Start IDLE using the appropriate Python interpreter
 set CURRDIR=%~dp0
-start "%CURRDIR%..\..\pythonw.exe" "%CURRDIR%idle.pyw" %1 %2 %3 %4 %5 %6 %7 %8 %9
+start "IDLE" "%CURRDIR%..\..\pythonw.exe" "%CURRDIR%idle.pyw" %1 %2 %3 %4 %5 %6 %7 %8 %9
diff --git a/Lib/imaplib.py b/Lib/imaplib.py
index 1022e77..142e27b 100644
--- a/Lib/imaplib.py
+++ b/Lib/imaplib.py
@@ -1177,25 +1177,40 @@
 
         """IMAP4 client class over SSL connection
 
-        Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile]]]])
+        Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile[, ssl_context]]]]])
 
                 host - host's name (default: localhost);
-                port - port number (default: standard IMAP4 SSL port).
+                port - port number (default: standard IMAP4 SSL port);
                 keyfile - PEM formatted file that contains your private key (default: None);
                 certfile - PEM formatted certificate chain file (default: None);
+                ssl_context - a SSLContext object that contains your certificate chain
+                              and private key (default: None)
+                Note: if ssl_context is provided, then parameters keyfile or
+                certfile should not be set otherwise ValueError is thrown.
 
         for more documentation see the docstring of the parent class IMAP4.
         """
 
 
-        def __init__(self, host = '', port = IMAP4_SSL_PORT, keyfile = None, certfile = None):
+        def __init__(self, host='', port=IMAP4_SSL_PORT, keyfile=None, certfile=None, ssl_context=None):
+            if ssl_context is not None and keyfile is not None:
+                raise ValueError("ssl_context and keyfile arguments are mutually "
+                                 "exclusive")
+            if ssl_context is not None and certfile is not None:
+                raise ValueError("ssl_context and certfile arguments are mutually "
+                                 "exclusive")
+
             self.keyfile = keyfile
             self.certfile = certfile
+            self.ssl_context = ssl_context
             IMAP4.__init__(self, host, port)
 
         def _create_socket(self):
             sock = IMAP4._create_socket(self)
-            return ssl.wrap_socket(sock, self.keyfile, self.certfile)
+            if self.ssl_context:
+                return self.ssl_context.wrap_socket(sock)
+            else:
+                return ssl.wrap_socket(sock, self.keyfile, self.certfile)
 
         def open(self, host='', port=IMAP4_SSL_PORT):
             """Setup connection to remote server on "host:port".
diff --git a/Lib/importlib/test/frozen/test_loader.py b/Lib/importlib/test/frozen/test_loader.py
index c05e22c..b685ef5 100644
--- a/Lib/importlib/test/frozen/test_loader.py
+++ b/Lib/importlib/test/frozen/test_loader.py
@@ -3,20 +3,21 @@
 import unittest
 from .. import abc
 from .. import util
-
+from test.support import captured_stdout
 
 class LoaderTests(abc.LoaderTests):
 
     def test_module(self):
-        with util.uncache('__hello__'):
+        with util.uncache('__hello__'), captured_stdout() as stdout:
             module = machinery.FrozenImporter.load_module('__hello__')
             check = {'__name__': '__hello__', '__file__': '<frozen>',
                     '__package__': '', '__loader__': machinery.FrozenImporter}
             for attr, value in check.items():
                 self.assertEqual(getattr(module, attr), value)
+            self.assertEqual(stdout.getvalue(), 'Hello world!\n')
 
     def test_package(self):
-        with util.uncache('__phello__'):
+        with util.uncache('__phello__'),  captured_stdout() as stdout:
             module = machinery.FrozenImporter.load_module('__phello__')
             check = {'__name__': '__phello__', '__file__': '<frozen>',
                      '__package__': '__phello__', '__path__': ['__phello__'],
@@ -26,9 +27,11 @@
                 self.assertEqual(attr_value, value,
                                  "for __phello__.%s, %r != %r" %
                                  (attr, attr_value, value))
+            self.assertEqual(stdout.getvalue(), 'Hello world!\n')
 
     def test_lacking_parent(self):
-        with util.uncache('__phello__', '__phello__.spam'):
+        with util.uncache('__phello__', '__phello__.spam'), \
+             captured_stdout() as stdout:
             module = machinery.FrozenImporter.load_module('__phello__.spam')
             check = {'__name__': '__phello__.spam', '__file__': '<frozen>',
                     '__package__': '__phello__',
@@ -38,12 +41,15 @@
                 self.assertEqual(attr_value, value,
                                  "for __phello__.spam.%s, %r != %r" %
                                  (attr, attr_value, value))
+            self.assertEqual(stdout.getvalue(), 'Hello world!\n')
 
     def test_module_reuse(self):
-        with util.uncache('__hello__'):
+        with util.uncache('__hello__'), captured_stdout() as stdout:
             module1 = machinery.FrozenImporter.load_module('__hello__')
             module2 = machinery.FrozenImporter.load_module('__hello__')
             self.assertTrue(module1 is module2)
+            self.assertEqual(stdout.getvalue(),
+                             'Hello world!\nHello world!\n')
 
     def test_state_after_failure(self):
         # No way to trigger an error in a frozen module.
@@ -62,10 +68,12 @@
     def test_get_code(self):
         # Make sure that the code object is good.
         name = '__hello__'
-        code = machinery.FrozenImporter.get_code(name)
-        mod = imp.new_module(name)
-        exec(code, mod.__dict__)
-        self.assertTrue(hasattr(mod, 'initialized'))
+        with captured_stdout() as stdout:
+            code = machinery.FrozenImporter.get_code(name)
+            mod = imp.new_module(name)
+            exec(code, mod.__dict__)
+            self.assertTrue(hasattr(mod, 'initialized'))
+            self.assertEqual(stdout.getvalue(), 'Hello world!\n')
 
     def test_get_source(self):
         # Should always return None.
diff --git a/Lib/json/decoder.py b/Lib/json/decoder.py
index a19cf0f..3174e31 100644
--- a/Lib/json/decoder.py
+++ b/Lib/json/decoder.py
@@ -5,7 +5,7 @@
 import sys
 import struct
 
-from json.scanner import make_scanner
+from json import scanner
 try:
     from _json import scanstring as c_scanstring
 except ImportError:
@@ -340,7 +340,7 @@
         self.parse_array = JSONArray
         self.parse_string = scanstring
         self.memo = {}
-        self.scan_once = make_scanner(self)
+        self.scan_once = scanner.make_scanner(self)
 
 
     def decode(self, s, _w=WHITESPACE.match):
diff --git a/Lib/locale.py b/Lib/locale.py
index 7b987be..0f6fe53 100644
--- a/Lib/locale.py
+++ b/Lib/locale.py
@@ -643,7 +643,7 @@
     'tactis':                       'TACTIS',
     'euc_jp':                       'eucJP',
     'euc_kr':                       'eucKR',
-    'utf_8':                        'UTF8',
+    'utf_8':                        'UTF-8',
     'koi8_r':                       'KOI8-R',
     'koi8_u':                       'KOI8-U',
     # XXX This list is still incomplete. If you know more
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index 0086808..f17db0e 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -446,8 +446,12 @@
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         if hasattr(s, 'settimeout'):
             s.settimeout(timeout)
-        s.connect((self.host, self.port))
-        return s
+        try:
+            s.connect((self.host, self.port))
+            return s
+        except socket.error:
+            s.close()
+            raise
 
     def createSocket(self):
         """
@@ -752,8 +756,7 @@
         """
         Closes the socket.
         """
-        if self.unixsocket:
-            self.socket.close()
+        self.socket.close()
         logging.Handler.close(self)
 
     def mapPriority(self, levelName):
diff --git a/Lib/mailbox.py b/Lib/mailbox.py
index ace1709..0e4f99b 100644
--- a/Lib/mailbox.py
+++ b/Lib/mailbox.py
@@ -258,19 +258,24 @@
     def __init__(self, dirname, factory=None, create=True):
         """Initialize a Maildir instance."""
         Mailbox.__init__(self, dirname, factory, create)
+        self._paths = {
+            'tmp': os.path.join(self._path, 'tmp'),
+            'new': os.path.join(self._path, 'new'),
+            'cur': os.path.join(self._path, 'cur'),
+            }
         if not os.path.exists(self._path):
             if create:
                 os.mkdir(self._path, 0o700)
-                os.mkdir(os.path.join(self._path, 'tmp'), 0o700)
-                os.mkdir(os.path.join(self._path, 'new'), 0o700)
-                os.mkdir(os.path.join(self._path, 'cur'), 0o700)
+                for path in self._paths.values():
+                    os.mkdir(path, 0o700)
             else:
                 raise NoSuchMailboxError(self._path)
         self._toc = {}
-        self._last_read = None          # Records last time we read cur/new
-        # NOTE: we manually invalidate _last_read each time we do any
-        # modifications ourselves, otherwise we might get tripped up by
-        # bogus mtime behaviour on some systems (see issue #6896).
+        self._toc_mtimes = {}
+        for subdir in ('cur', 'new'):
+            self._toc_mtimes[subdir] = os.path.getmtime(self._paths[subdir])
+        self._last_read = time.time()  # Records last time we read cur/new
+        self._skewfactor = 0.1         # Adjust if os/fs clocks are skewing
 
     def add(self, message):
         """Add message and return assigned key."""
@@ -307,15 +312,11 @@
                 raise
         if isinstance(message, MaildirMessage):
             os.utime(dest, (os.path.getatime(dest), message.get_date()))
-        # Invalidate cached toc
-        self._last_read = None
         return uniq
 
     def remove(self, key):
         """Remove the keyed message; raise KeyError if it doesn't exist."""
         os.remove(os.path.join(self._path, self._lookup(key)))
-        # Invalidate cached toc (only on success)
-        self._last_read = None
 
     def discard(self, key):
         """If the keyed message exists, remove it."""
@@ -350,8 +351,6 @@
         if isinstance(message, MaildirMessage):
             os.utime(new_path, (os.path.getatime(new_path),
                                 message.get_date()))
-        # Invalidate cached toc
-        self._last_read = None
 
     def get_message(self, key):
         """Return a Message representation or raise a KeyError."""
@@ -407,8 +406,8 @@
     def flush(self):
         """Write any pending changes to disk."""
         # Maildir changes are always written immediately, so there's nothing
-        # to do except invalidate our cached toc.
-        self._last_read = None
+        # to do.
+        pass
 
     def lock(self):
         """Lock the mailbox."""
@@ -506,36 +505,39 @@
 
     def _refresh(self):
         """Update table of contents mapping."""
-        if self._last_read is not None:
-            for subdir in ('new', 'cur'):
-                mtime = os.path.getmtime(os.path.join(self._path, subdir))
-                if mtime > self._last_read:
-                    break
-            else:
+        # If it has been less than two seconds since the last _refresh() call,
+        # we have to unconditionally re-read the mailbox just in case it has
+        # been modified, because os.path.mtime() has a 2 sec resolution in the
+        # most common worst case (FAT) and a 1 sec resolution typically.  This
+        # results in a few unnecessary re-reads when _refresh() is called
+        # multiple times in that interval, but once the clock ticks over, we
+        # will only re-read as needed.  Because the filesystem might be being
+        # served by an independent system with its own clock, we record and
+        # compare with the mtimes from the filesystem.  Because the other
+        # system's clock might be skewing relative to our clock, we add an
+        # extra delta to our wait.  The default is one tenth second, but is an
+        # instance variable and so can be adjusted if dealing with a
+        # particularly skewed or irregular system.
+        if time.time() - self._last_read > 2 + self._skewfactor:
+            refresh = False
+            for subdir in self._toc_mtimes:
+                mtime = os.path.getmtime(self._paths[subdir])
+                if mtime > self._toc_mtimes[subdir]:
+                    refresh = True
+                self._toc_mtimes[subdir] = mtime
+            if not refresh:
                 return
-
-        # We record the current time - 1sec so that, if _refresh() is called
-        # again in the same second, we will always re-read the mailbox
-        # just in case it's been modified.  (os.path.mtime() only has
-        # 1sec resolution.)  This results in a few unnecessary re-reads
-        # when _refresh() is called multiple times in the same second,
-        # but once the clock ticks over, we will only re-read as needed.
-        now = time.time() - 1
-
+        # Refresh toc
         self._toc = {}
-        def update_dir (subdir):
-            path = os.path.join(self._path, subdir)
+        for subdir in self._toc_mtimes:
+            path = self._paths[subdir]
             for entry in os.listdir(path):
                 p = os.path.join(path, entry)
                 if os.path.isdir(p):
                     continue
                 uniq = entry.split(self.colon)[0]
                 self._toc[uniq] = os.path.join(subdir, entry)
-
-        update_dir('new')
-        update_dir('cur')
-
-        self._last_read = now
+        self._last_read = time.time()
 
     def _lookup(self, key):
         """Use TOC to return subpath for given key, or raise a KeyError."""
diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py
index 646a785..f033ba9 100644
--- a/Lib/modulefinder.py
+++ b/Lib/modulefinder.py
@@ -35,9 +35,10 @@
 
 replacePackageMap = {}
 
-# This ReplacePackage mechanism allows modulefinder to work around the
-# way the _xmlplus package injects itself under the name "xml" into
-# sys.modules at runtime by calling ReplacePackage("_xmlplus", "xml")
+# This ReplacePackage mechanism allows modulefinder to work around
+# situations in which a package injects itself under the name
+# of another package into sys.modules at runtime by calling
+# ReplacePackage("real_package_name", "faked_package_name")
 # before running ModuleFinder.
 
 def ReplacePackage(oldname, newname):
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
index d6627e5..415e210 100644
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -34,19 +34,27 @@
 
 __all__ = [ 'Client', 'Listener', 'Pipe' ]
 
+import io
 import os
 import sys
+import pickle
+import select
 import socket
+import struct
 import errno
 import time
 import tempfile
 import itertools
 
 import _multiprocessing
-from multiprocessing import current_process, AuthenticationError
+from multiprocessing import current_process, AuthenticationError, BufferTooShort
 from multiprocessing.util import get_temp_dir, Finalize, sub_debug, debug
-from multiprocessing.forking import duplicate, close
-
+try:
+    from _multiprocessing import win32
+except ImportError:
+    if sys.platform == 'win32':
+        raise
+    win32 = None
 
 #
 #
@@ -111,6 +119,281 @@
         raise ValueError('address type of %r unrecognized' % address)
 
 #
+# Connection classes
+#
+
+class _ConnectionBase:
+    _handle = None
+
+    def __init__(self, handle, readable=True, writable=True):
+        handle = handle.__index__()
+        if handle < 0:
+            raise ValueError("invalid handle")
+        if not readable and not writable:
+            raise ValueError(
+                "at least one of `readable` and `writable` must be True")
+        self._handle = handle
+        self._readable = readable
+        self._writable = writable
+
+    def __del__(self):
+        if self._handle is not None:
+            self._close()
+
+    def _check_closed(self):
+        if self._handle is None:
+            raise IOError("handle is closed")
+
+    def _check_readable(self):
+        if not self._readable:
+            raise IOError("connection is write-only")
+
+    def _check_writable(self):
+        if not self._writable:
+            raise IOError("connection is read-only")
+
+    def _bad_message_length(self):
+        if self._writable:
+            self._readable = False
+        else:
+            self.close()
+        raise IOError("bad message length")
+
+    @property
+    def closed(self):
+        """True if the connection is closed"""
+        return self._handle is None
+
+    @property
+    def readable(self):
+        """True if the connection is readable"""
+        return self._readable
+
+    @property
+    def writable(self):
+        """True if the connection is writable"""
+        return self._writable
+
+    def fileno(self):
+        """File descriptor or handle of the connection"""
+        self._check_closed()
+        return self._handle
+
+    def close(self):
+        """Close the connection"""
+        if self._handle is not None:
+            try:
+                self._close()
+            finally:
+                self._handle = None
+
+    def send_bytes(self, buf, offset=0, size=None):
+        """Send the bytes data from a bytes-like object"""
+        self._check_closed()
+        self._check_writable()
+        m = memoryview(buf)
+        # HACK for byte-indexing of non-bytewise buffers (e.g. array.array)
+        if m.itemsize > 1:
+            m = memoryview(bytes(m))
+        n = len(m)
+        if offset < 0:
+            raise ValueError("offset is negative")
+        if n < offset:
+            raise ValueError("buffer length < offset")
+        if size is None:
+            size = n - offset
+        elif size < 0:
+            raise ValueError("size is negative")
+        elif offset + size > n:
+            raise ValueError("buffer length < offset + size")
+        self._send_bytes(m[offset:offset + size])
+
+    def send(self, obj):
+        """Send a (picklable) object"""
+        self._check_closed()
+        self._check_writable()
+        buf = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
+        self._send_bytes(memoryview(buf))
+
+    def recv_bytes(self, maxlength=None):
+        """
+        Receive bytes data as a bytes object.
+        """
+        self._check_closed()
+        self._check_readable()
+        if maxlength is not None and maxlength < 0:
+            raise ValueError("negative maxlength")
+        buf = self._recv_bytes(maxlength)
+        if buf is None:
+            self._bad_message_length()
+        return buf.getvalue()
+
+    def recv_bytes_into(self, buf, offset=0):
+        """
+        Receive bytes data into a writeable buffer-like object.
+        Return the number of bytes read.
+        """
+        self._check_closed()
+        self._check_readable()
+        with memoryview(buf) as m:
+            # Get bytesize of arbitrary buffer
+            itemsize = m.itemsize
+            bytesize = itemsize * len(m)
+            if offset < 0:
+                raise ValueError("negative offset")
+            elif offset > bytesize:
+                raise ValueError("offset too large")
+            result = self._recv_bytes()
+            size = result.tell()
+            if bytesize < offset + size:
+                raise BufferTooShort(result.getvalue())
+            # Message can fit in dest
+            result.seek(0)
+            result.readinto(m[offset // itemsize :
+                              (offset + size) // itemsize])
+            return size
+
+    def recv(self):
+        """Receive a (picklable) object"""
+        self._check_closed()
+        self._check_readable()
+        buf = self._recv_bytes()
+        return pickle.loads(buf.getbuffer())
+
+    def poll(self, timeout=0.0):
+        """Whether there is any input available to be read"""
+        self._check_closed()
+        self._check_readable()
+        if timeout < 0.0:
+            timeout = None
+        return self._poll(timeout)
+
+
+if win32:
+
+    class PipeConnection(_ConnectionBase):
+        """
+        Connection class based on a Windows named pipe.
+        """
+
+        def _close(self):
+            win32.CloseHandle(self._handle)
+
+        def _send_bytes(self, buf):
+            nwritten = win32.WriteFile(self._handle, buf)
+            assert nwritten == len(buf)
+
+        def _recv_bytes(self, maxsize=None):
+            buf = io.BytesIO()
+            bufsize = 512
+            if maxsize is not None:
+                bufsize = min(bufsize, maxsize)
+            try:
+                firstchunk, complete = win32.ReadFile(self._handle, bufsize)
+            except IOError as e:
+                if e.errno == win32.ERROR_BROKEN_PIPE:
+                    raise EOFError
+                raise
+            lenfirstchunk = len(firstchunk)
+            buf.write(firstchunk)
+            if complete:
+                return buf
+            navail, nleft = win32.PeekNamedPipe(self._handle)
+            if maxsize is not None and lenfirstchunk + nleft > maxsize:
+                return None
+            lastchunk, complete = win32.ReadFile(self._handle, nleft)
+            assert complete
+            buf.write(lastchunk)
+            return buf
+
+        def _poll(self, timeout):
+            navail, nleft = win32.PeekNamedPipe(self._handle)
+            if navail > 0:
+                return True
+            elif timeout == 0.0:
+                return False
+            # Setup a polling loop (translated straight from old
+            # pipe_connection.c)
+            if timeout < 0.0:
+                deadline = None
+            else:
+                deadline = time.time() + timeout
+            delay = 0.001
+            max_delay = 0.02
+            while True:
+                time.sleep(delay)
+                navail, nleft = win32.PeekNamedPipe(self._handle)
+                if navail > 0:
+                    return True
+                if deadline and time.time() > deadline:
+                    return False
+                if delay < max_delay:
+                    delay += 0.001
+
+
+class Connection(_ConnectionBase):
+    """
+    Connection class based on an arbitrary file descriptor (Unix only), or
+    a socket handle (Windows).
+    """
+
+    if win32:
+        def _close(self):
+            win32.closesocket(self._handle)
+        _write = win32.send
+        _read = win32.recv
+    else:
+        def _close(self):
+            os.close(self._handle)
+        _write = os.write
+        _read = os.read
+
+    def _send(self, buf, write=_write):
+        remaining = len(buf)
+        while True:
+            n = write(self._handle, buf)
+            remaining -= n
+            if remaining == 0:
+                break
+            buf = buf[n:]
+
+    def _recv(self, size, read=_read):
+        buf = io.BytesIO()
+        remaining = size
+        while remaining > 0:
+            chunk = read(self._handle, remaining)
+            n = len(chunk)
+            if n == 0:
+                if remaining == size:
+                    raise EOFError
+                else:
+                    raise IOError("got end of file during message")
+            buf.write(chunk)
+            remaining -= n
+        return buf
+
+    def _send_bytes(self, buf):
+        # For wire compatibility with 3.2 and lower
+        n = len(buf)
+        self._send(struct.pack("=i", len(buf)))
+        # The condition is necessary to avoid "broken pipe" errors
+        # when sending a 0-length buffer if the other end closed the pipe.
+        if n > 0:
+            self._send(buf)
+
+    def _recv_bytes(self, maxsize=None):
+        buf = self._recv(4)
+        size, = struct.unpack("=i", buf.getvalue())
+        if maxsize is not None and size > maxsize:
+            return None
+        return self._recv(size)
+
+    def _poll(self, timeout):
+        r = select.select([self._handle], [], [], timeout)[0]
+        return bool(r)
+
+
+#
 # Public functions
 #
 
@@ -186,21 +469,17 @@
         '''
         if duplex:
             s1, s2 = socket.socketpair()
-            c1 = _multiprocessing.Connection(os.dup(s1.fileno()))
-            c2 = _multiprocessing.Connection(os.dup(s2.fileno()))
-            s1.close()
-            s2.close()
+            c1 = Connection(s1.detach())
+            c2 = Connection(s2.detach())
         else:
             fd1, fd2 = os.pipe()
-            c1 = _multiprocessing.Connection(fd1, writable=False)
-            c2 = _multiprocessing.Connection(fd2, readable=False)
+            c1 = Connection(fd1, writable=False)
+            c2 = Connection(fd2, readable=False)
 
         return c1, c2
 
 else:
 
-    from _multiprocessing import win32
-
     def Pipe(duplex=True):
         '''
         Returns pair of connection objects at either end of a pipe
@@ -234,8 +513,8 @@
             if e.args[0] != win32.ERROR_PIPE_CONNECTED:
                 raise
 
-        c1 = _multiprocessing.PipeConnection(h1, writable=duplex)
-        c2 = _multiprocessing.PipeConnection(h2, readable=duplex)
+        c1 = PipeConnection(h1, writable=duplex)
+        c2 = PipeConnection(h2, readable=duplex)
 
         return c1, c2
 
@@ -266,7 +545,7 @@
     def accept(self):
         s, self._last_accepted = self._socket.accept()
         fd = duplicate(s.fileno())
-        conn = _multiprocessing.Connection(fd)
+        conn = Connection(fd)
         s.close()
         return conn
 
@@ -298,7 +577,7 @@
             raise
 
         fd = duplicate(s.fileno())
-    conn = _multiprocessing.Connection(fd)
+    conn = Connection(fd)
     return conn
 
 #
@@ -345,7 +624,7 @@
             except WindowsError as e:
                 if e.args[0] != win32.ERROR_PIPE_CONNECTED:
                     raise
-            return _multiprocessing.PipeConnection(handle)
+            return PipeConnection(handle)
 
         @staticmethod
         def _finalize_pipe_listener(queue, address):
@@ -377,7 +656,7 @@
         win32.SetNamedPipeHandleState(
             h, win32.PIPE_READMODE_MESSAGE, None, None
             )
-        return _multiprocessing.PipeConnection(h)
+        return PipeConnection(h)
 
 #
 # Authentication stuff
@@ -451,3 +730,7 @@
     global xmlrpclib
     import xmlrpc.client as xmlrpclib
     return ConnectionWrapper(Client(*args, **kwds), _xml_dumps, _xml_loads)
+
+
+# Late import because of circular import
+from multiprocessing.forking import duplicate, close
diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py
index cc7c326..3d95557 100644
--- a/Lib/multiprocessing/forking.py
+++ b/Lib/multiprocessing/forking.py
@@ -183,7 +183,7 @@
     import time
 
     from pickle import dump, load, HIGHEST_PROTOCOL
-    from _multiprocessing import win32, Connection, PipeConnection
+    from _multiprocessing import win32
     from .util import Finalize
 
     def dump(obj, file, protocol=None):
@@ -411,6 +411,9 @@
     # Make (Pipe)Connection picklable
     #
 
+    # Late import because of circular import
+    from .connection import Connection, PipeConnection
+
     def reduce_connection(conn):
         if not Popen.thread_is_spawning():
             raise RuntimeError(
diff --git a/Lib/multiprocessing/reduction.py b/Lib/multiprocessing/reduction.py
index 6e5e5bc..b32c725 100644
--- a/Lib/multiprocessing/reduction.py
+++ b/Lib/multiprocessing/reduction.py
@@ -44,7 +44,7 @@
 from multiprocessing import current_process
 from multiprocessing.forking import Popen, duplicate, close, ForkingPickler
 from multiprocessing.util import register_after_fork, debug, sub_debug
-from multiprocessing.connection import Client, Listener
+from multiprocessing.connection import Client, Listener, Connection
 
 
 #
@@ -159,7 +159,7 @@
     return new_handle
 
 #
-# Register `_multiprocessing.Connection` with `ForkingPickler`
+# Register `Connection` with `ForkingPickler`
 #
 
 def reduce_connection(conn):
@@ -168,11 +168,11 @@
 
 def rebuild_connection(reduced_handle, readable, writable):
     handle = rebuild_handle(reduced_handle)
-    return _multiprocessing.Connection(
+    return Connection(
         handle, readable=readable, writable=writable
         )
 
-ForkingPickler.register(_multiprocessing.Connection, reduce_connection)
+ForkingPickler.register(Connection, reduce_connection)
 
 #
 # Register `socket.socket` with `ForkingPickler`
@@ -201,6 +201,7 @@
 #
 
 if sys.platform == 'win32':
+    from multiprocessing.connection import PipeConnection
 
     def reduce_pipe_connection(conn):
         rh = reduce_handle(conn.fileno())
@@ -208,8 +209,8 @@
 
     def rebuild_pipe_connection(reduced_handle, readable, writable):
         handle = rebuild_handle(reduced_handle)
-        return _multiprocessing.PipeConnection(
+        return PipeConnection(
             handle, readable=readable, writable=writable
             )
 
-    ForkingPickler.register(_multiprocessing.PipeConnection, reduce_pipe_connection)
+    ForkingPickler.register(PipeConnection, reduce_pipe_connection)
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index 419342d..ec8a7ab 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -654,7 +654,7 @@
     # Non-Windows operating systems fake this method with an XP
     # approximation.
     def _getfinalpathname(f):
-        return abspath(f)
+        return normcase(abspath(f))
 
 def samefile(f1, f2):
     "Test whether two pathnames reference the same actual file"
diff --git a/Lib/packaging/__init__.py b/Lib/packaging/__init__.py
new file mode 100644
index 0000000..93b6117
--- /dev/null
+++ b/Lib/packaging/__init__.py
@@ -0,0 +1,17 @@
+"""Support for packaging, distribution and installation of Python projects.
+
+Third-party tools can use parts of packaging as building blocks
+without causing the other modules to be imported:
+
+    import packaging.version
+    import packaging.metadata
+    import packaging.pypi.simple
+    import packaging.tests.pypi_server
+"""
+
+from logging import getLogger
+
+__all__ = ['__version__', 'logger']
+
+__version__ = "1.0a3"
+logger = getLogger('packaging')
diff --git a/Lib/packaging/_trove.py b/Lib/packaging/_trove.py
new file mode 100644
index 0000000..9a8719c
--- /dev/null
+++ b/Lib/packaging/_trove.py
@@ -0,0 +1,552 @@
+"""Temporary helper for create."""
+
+# XXX get the list from PyPI and cache it instead of hardcoding
+
+# XXX see if it would be more useful to store it as another structure
+# than a list of strings
+
+all_classifiers = [
+'Development Status :: 1 - Planning',
+'Development Status :: 2 - Pre-Alpha',
+'Development Status :: 3 - Alpha',
+'Development Status :: 4 - Beta',
+'Development Status :: 5 - Production/Stable',
+'Development Status :: 6 - Mature',
+'Development Status :: 7 - Inactive',
+'Environment :: Console',
+'Environment :: Console :: Curses',
+'Environment :: Console :: Framebuffer',
+'Environment :: Console :: Newt',
+'Environment :: Console :: svgalib',
+"Environment :: Handhelds/PDA's",
+'Environment :: MacOS X',
+'Environment :: MacOS X :: Aqua',
+'Environment :: MacOS X :: Carbon',
+'Environment :: MacOS X :: Cocoa',
+'Environment :: No Input/Output (Daemon)',
+'Environment :: Other Environment',
+'Environment :: Plugins',
+'Environment :: Web Environment',
+'Environment :: Web Environment :: Buffet',
+'Environment :: Web Environment :: Mozilla',
+'Environment :: Web Environment :: ToscaWidgets',
+'Environment :: Win32 (MS Windows)',
+'Environment :: X11 Applications',
+'Environment :: X11 Applications :: Gnome',
+'Environment :: X11 Applications :: GTK',
+'Environment :: X11 Applications :: KDE',
+'Environment :: X11 Applications :: Qt',
+'Framework :: BFG',
+'Framework :: Buildout',
+'Framework :: Chandler',
+'Framework :: CubicWeb',
+'Framework :: Django',
+'Framework :: IDLE',
+'Framework :: Paste',
+'Framework :: Plone',
+'Framework :: Pylons',
+'Framework :: Setuptools Plugin',
+'Framework :: Trac',
+'Framework :: TurboGears',
+'Framework :: TurboGears :: Applications',
+'Framework :: TurboGears :: Widgets',
+'Framework :: Twisted',
+'Framework :: ZODB',
+'Framework :: Zope2',
+'Framework :: Zope3',
+'Intended Audience :: Customer Service',
+'Intended Audience :: Developers',
+'Intended Audience :: Education',
+'Intended Audience :: End Users/Desktop',
+'Intended Audience :: Financial and Insurance Industry',
+'Intended Audience :: Healthcare Industry',
+'Intended Audience :: Information Technology',
+'Intended Audience :: Legal Industry',
+'Intended Audience :: Manufacturing',
+'Intended Audience :: Other Audience',
+'Intended Audience :: Religion',
+'Intended Audience :: Science/Research',
+'Intended Audience :: System Administrators',
+'Intended Audience :: Telecommunications Industry',
+'License :: Aladdin Free Public License (AFPL)',
+'License :: DFSG approved',
+'License :: Eiffel Forum License (EFL)',
+'License :: Free For Educational Use',
+'License :: Free For Home Use',
+'License :: Free for non-commercial use',
+'License :: Freely Distributable',
+'License :: Free To Use But Restricted',
+'License :: Freeware',
+'License :: Netscape Public License (NPL)',
+'License :: Nokia Open Source License (NOKOS)',
+'License :: OSI Approved',
+'License :: OSI Approved :: Academic Free License (AFL)',
+'License :: OSI Approved :: Apache Software License',
+'License :: OSI Approved :: Apple Public Source License',
+'License :: OSI Approved :: Artistic License',
+'License :: OSI Approved :: Attribution Assurance License',
+'License :: OSI Approved :: BSD License',
+'License :: OSI Approved :: Common Public License',
+'License :: OSI Approved :: Eiffel Forum License',
+'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)',
+'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)',
+'License :: OSI Approved :: GNU Affero General Public License v3',
+'License :: OSI Approved :: GNU Free Documentation License (FDL)',
+'License :: OSI Approved :: GNU General Public License (GPL)',
+'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
+'License :: OSI Approved :: IBM Public License',
+'License :: OSI Approved :: Intel Open Source License',
+'License :: OSI Approved :: ISC License (ISCL)',
+'License :: OSI Approved :: Jabber Open Source License',
+'License :: OSI Approved :: MIT License',
+'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)',
+'License :: OSI Approved :: Motosoto License',
+'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)',
+'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)',
+'License :: OSI Approved :: Nethack General Public License',
+'License :: OSI Approved :: Nokia Open Source License',
+'License :: OSI Approved :: Open Group Test Suite License',
+'License :: OSI Approved :: Python License (CNRI Python License)',
+'License :: OSI Approved :: Python Software Foundation License',
+'License :: OSI Approved :: Qt Public License (QPL)',
+'License :: OSI Approved :: Ricoh Source Code Public License',
+'License :: OSI Approved :: Sleepycat License',
+'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)',
+'License :: OSI Approved :: Sun Public License',
+'License :: OSI Approved :: University of Illinois/NCSA Open Source License',
+'License :: OSI Approved :: Vovida Software License 1.0',
+'License :: OSI Approved :: W3C License',
+'License :: OSI Approved :: X.Net License',
+'License :: OSI Approved :: zlib/libpng License',
+'License :: OSI Approved :: Zope Public License',
+'License :: Other/Proprietary License',
+'License :: Public Domain',
+'License :: Repoze Public License',
+'Natural Language :: Afrikaans',
+'Natural Language :: Arabic',
+'Natural Language :: Bengali',
+'Natural Language :: Bosnian',
+'Natural Language :: Bulgarian',
+'Natural Language :: Catalan',
+'Natural Language :: Chinese (Simplified)',
+'Natural Language :: Chinese (Traditional)',
+'Natural Language :: Croatian',
+'Natural Language :: Czech',
+'Natural Language :: Danish',
+'Natural Language :: Dutch',
+'Natural Language :: English',
+'Natural Language :: Esperanto',
+'Natural Language :: Finnish',
+'Natural Language :: French',
+'Natural Language :: German',
+'Natural Language :: Greek',
+'Natural Language :: Hebrew',
+'Natural Language :: Hindi',
+'Natural Language :: Hungarian',
+'Natural Language :: Icelandic',
+'Natural Language :: Indonesian',
+'Natural Language :: Italian',
+'Natural Language :: Japanese',
+'Natural Language :: Javanese',
+'Natural Language :: Korean',
+'Natural Language :: Latin',
+'Natural Language :: Latvian',
+'Natural Language :: Macedonian',
+'Natural Language :: Malay',
+'Natural Language :: Marathi',
+'Natural Language :: Norwegian',
+'Natural Language :: Panjabi',
+'Natural Language :: Persian',
+'Natural Language :: Polish',
+'Natural Language :: Portuguese',
+'Natural Language :: Portuguese (Brazilian)',
+'Natural Language :: Romanian',
+'Natural Language :: Russian',
+'Natural Language :: Serbian',
+'Natural Language :: Slovak',
+'Natural Language :: Slovenian',
+'Natural Language :: Spanish',
+'Natural Language :: Swedish',
+'Natural Language :: Tamil',
+'Natural Language :: Telugu',
+'Natural Language :: Thai',
+'Natural Language :: Turkish',
+'Natural Language :: Ukranian',
+'Natural Language :: Urdu',
+'Natural Language :: Vietnamese',
+'Operating System :: BeOS',
+'Operating System :: MacOS',
+'Operating System :: MacOS :: MacOS 9',
+'Operating System :: MacOS :: MacOS X',
+'Operating System :: Microsoft',
+'Operating System :: Microsoft :: MS-DOS',
+'Operating System :: Microsoft :: Windows',
+'Operating System :: Microsoft :: Windows :: Windows 3.1 or Earlier',
+'Operating System :: Microsoft :: Windows :: Windows 95/98/2000',
+'Operating System :: Microsoft :: Windows :: Windows CE',
+'Operating System :: Microsoft :: Windows :: Windows NT/2000',
+'Operating System :: OS/2',
+'Operating System :: OS Independent',
+'Operating System :: Other OS',
+'Operating System :: PalmOS',
+'Operating System :: PDA Systems',
+'Operating System :: POSIX',
+'Operating System :: POSIX :: AIX',
+'Operating System :: POSIX :: BSD',
+'Operating System :: POSIX :: BSD :: BSD/OS',
+'Operating System :: POSIX :: BSD :: FreeBSD',
+'Operating System :: POSIX :: BSD :: NetBSD',
+'Operating System :: POSIX :: BSD :: OpenBSD',
+'Operating System :: POSIX :: GNU Hurd',
+'Operating System :: POSIX :: HP-UX',
+'Operating System :: POSIX :: IRIX',
+'Operating System :: POSIX :: Linux',
+'Operating System :: POSIX :: Other',
+'Operating System :: POSIX :: SCO',
+'Operating System :: POSIX :: SunOS/Solaris',
+'Operating System :: Unix',
+'Programming Language :: Ada',
+'Programming Language :: APL',
+'Programming Language :: ASP',
+'Programming Language :: Assembly',
+'Programming Language :: Awk',
+'Programming Language :: Basic',
+'Programming Language :: C',
+'Programming Language :: C#',
+'Programming Language :: C++',
+'Programming Language :: Cold Fusion',
+'Programming Language :: Cython',
+'Programming Language :: Delphi/Kylix',
+'Programming Language :: Dylan',
+'Programming Language :: Eiffel',
+'Programming Language :: Emacs-Lisp',
+'Programming Language :: Erlang',
+'Programming Language :: Euler',
+'Programming Language :: Euphoria',
+'Programming Language :: Forth',
+'Programming Language :: Fortran',
+'Programming Language :: Haskell',
+'Programming Language :: Java',
+'Programming Language :: JavaScript',
+'Programming Language :: Lisp',
+'Programming Language :: Logo',
+'Programming Language :: ML',
+'Programming Language :: Modula',
+'Programming Language :: Objective C',
+'Programming Language :: Object Pascal',
+'Programming Language :: OCaml',
+'Programming Language :: Other',
+'Programming Language :: Other Scripting Engines',
+'Programming Language :: Pascal',
+'Programming Language :: Perl',
+'Programming Language :: PHP',
+'Programming Language :: Pike',
+'Programming Language :: Pliant',
+'Programming Language :: PL/SQL',
+'Programming Language :: PROGRESS',
+'Programming Language :: Prolog',
+'Programming Language :: Python',
+'Programming Language :: Python :: 2',
+'Programming Language :: Python :: 2.3',
+'Programming Language :: Python :: 2.4',
+'Programming Language :: Python :: 2.5',
+'Programming Language :: Python :: 2.6',
+'Programming Language :: Python :: 2.7',
+'Programming Language :: Python :: 3',
+'Programming Language :: Python :: 3.0',
+'Programming Language :: Python :: 3.1',
+'Programming Language :: Python :: 3.2',
+'Programming Language :: REBOL',
+'Programming Language :: Rexx',
+'Programming Language :: Ruby',
+'Programming Language :: Scheme',
+'Programming Language :: Simula',
+'Programming Language :: Smalltalk',
+'Programming Language :: SQL',
+'Programming Language :: Tcl',
+'Programming Language :: Unix Shell',
+'Programming Language :: Visual Basic',
+'Programming Language :: XBasic',
+'Programming Language :: YACC',
+'Programming Language :: Zope',
+'Topic :: Adaptive Technologies',
+'Topic :: Artistic Software',
+'Topic :: Communications',
+'Topic :: Communications :: BBS',
+'Topic :: Communications :: Chat',
+'Topic :: Communications :: Chat :: AOL Instant Messenger',
+'Topic :: Communications :: Chat :: ICQ',
+'Topic :: Communications :: Chat :: Internet Relay Chat',
+'Topic :: Communications :: Chat :: Unix Talk',
+'Topic :: Communications :: Conferencing',
+'Topic :: Communications :: Email',
+'Topic :: Communications :: Email :: Address Book',
+'Topic :: Communications :: Email :: Email Clients (MUA)',
+'Topic :: Communications :: Email :: Filters',
+'Topic :: Communications :: Email :: Mailing List Servers',
+'Topic :: Communications :: Email :: Mail Transport Agents',
+'Topic :: Communications :: Email :: Post-Office',
+'Topic :: Communications :: Email :: Post-Office :: IMAP',
+'Topic :: Communications :: Email :: Post-Office :: POP3',
+'Topic :: Communications :: Fax',
+'Topic :: Communications :: FIDO',
+'Topic :: Communications :: File Sharing',
+'Topic :: Communications :: File Sharing :: Gnutella',
+'Topic :: Communications :: File Sharing :: Napster',
+'Topic :: Communications :: Ham Radio',
+'Topic :: Communications :: Internet Phone',
+'Topic :: Communications :: Telephony',
+'Topic :: Communications :: Usenet News',
+'Topic :: Database',
+'Topic :: Database :: Database Engines/Servers',
+'Topic :: Database :: Front-Ends',
+'Topic :: Desktop Environment',
+'Topic :: Desktop Environment :: File Managers',
+'Topic :: Desktop Environment :: Gnome',
+'Topic :: Desktop Environment :: GNUstep',
+'Topic :: Desktop Environment :: K Desktop Environment (KDE)',
+'Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes',
+'Topic :: Desktop Environment :: PicoGUI',
+'Topic :: Desktop Environment :: PicoGUI :: Applications',
+'Topic :: Desktop Environment :: PicoGUI :: Themes',
+'Topic :: Desktop Environment :: Screen Savers',
+'Topic :: Desktop Environment :: Window Managers',
+'Topic :: Desktop Environment :: Window Managers :: Afterstep',
+'Topic :: Desktop Environment :: Window Managers :: Afterstep :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Applets',
+'Topic :: Desktop Environment :: Window Managers :: Blackbox',
+'Topic :: Desktop Environment :: Window Managers :: Blackbox :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: CTWM',
+'Topic :: Desktop Environment :: Window Managers :: CTWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Epplets',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR15',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR16',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR17',
+'Topic :: Desktop Environment :: Window Managers :: Fluxbox',
+'Topic :: Desktop Environment :: Window Managers :: Fluxbox :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: FVWM',
+'Topic :: Desktop Environment :: Window Managers :: FVWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: IceWM',
+'Topic :: Desktop Environment :: Window Managers :: IceWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: MetaCity',
+'Topic :: Desktop Environment :: Window Managers :: MetaCity :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Oroborus',
+'Topic :: Desktop Environment :: Window Managers :: Oroborus :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes 0.30',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes pre-0.30',
+'Topic :: Desktop Environment :: Window Managers :: Waimea',
+'Topic :: Desktop Environment :: Window Managers :: Waimea :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Applets',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: XFCE',
+'Topic :: Desktop Environment :: Window Managers :: XFCE :: Themes',
+'Topic :: Documentation',
+'Topic :: Education',
+'Topic :: Education :: Computer Aided Instruction (CAI)',
+'Topic :: Education :: Testing',
+'Topic :: Games/Entertainment',
+'Topic :: Games/Entertainment :: Arcade',
+'Topic :: Games/Entertainment :: Board Games',
+'Topic :: Games/Entertainment :: First Person Shooters',
+'Topic :: Games/Entertainment :: Fortune Cookies',
+'Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)',
+'Topic :: Games/Entertainment :: Puzzle Games',
+'Topic :: Games/Entertainment :: Real Time Strategy',
+'Topic :: Games/Entertainment :: Role-Playing',
+'Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games',
+'Topic :: Games/Entertainment :: Simulation',
+'Topic :: Games/Entertainment :: Turn Based Strategy',
+'Topic :: Home Automation',
+'Topic :: Internet',
+'Topic :: Internet :: File Transfer Protocol (FTP)',
+'Topic :: Internet :: Finger',
+'Topic :: Internet :: Log Analysis',
+'Topic :: Internet :: Name Service (DNS)',
+'Topic :: Internet :: Proxy Servers',
+'Topic :: Internet :: WAP',
+'Topic :: Internet :: WWW/HTTP',
+'Topic :: Internet :: WWW/HTTP :: Browsers',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters',
+'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
+'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
+'Topic :: Internet :: WWW/HTTP :: Site Management',
+'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking',
+'Topic :: Internet :: WWW/HTTP :: WSGI',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
+'Topic :: Internet :: Z39.50',
+'Topic :: Multimedia',
+'Topic :: Multimedia :: Graphics',
+'Topic :: Multimedia :: Graphics :: 3D Modeling',
+'Topic :: Multimedia :: Graphics :: 3D Rendering',
+'Topic :: Multimedia :: Graphics :: Capture',
+'Topic :: Multimedia :: Graphics :: Capture :: Digital Camera',
+'Topic :: Multimedia :: Graphics :: Capture :: Scanners',
+'Topic :: Multimedia :: Graphics :: Capture :: Screen Capture',
+'Topic :: Multimedia :: Graphics :: Editors',
+'Topic :: Multimedia :: Graphics :: Editors :: Raster-Based',
+'Topic :: Multimedia :: Graphics :: Editors :: Vector-Based',
+'Topic :: Multimedia :: Graphics :: Graphics Conversion',
+'Topic :: Multimedia :: Graphics :: Presentation',
+'Topic :: Multimedia :: Graphics :: Viewers',
+'Topic :: Multimedia :: Sound/Audio',
+'Topic :: Multimedia :: Sound/Audio :: Analysis',
+'Topic :: Multimedia :: Sound/Audio :: Capture/Recording',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Writing',
+'Topic :: Multimedia :: Sound/Audio :: Conversion',
+'Topic :: Multimedia :: Sound/Audio :: Editors',
+'Topic :: Multimedia :: Sound/Audio :: MIDI',
+'Topic :: Multimedia :: Sound/Audio :: Mixers',
+'Topic :: Multimedia :: Sound/Audio :: Players',
+'Topic :: Multimedia :: Sound/Audio :: Players :: MP3',
+'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis',
+'Topic :: Multimedia :: Sound/Audio :: Speech',
+'Topic :: Multimedia :: Video',
+'Topic :: Multimedia :: Video :: Capture',
+'Topic :: Multimedia :: Video :: Conversion',
+'Topic :: Multimedia :: Video :: Display',
+'Topic :: Multimedia :: Video :: Non-Linear Editor',
+'Topic :: Office/Business',
+'Topic :: Office/Business :: Financial',
+'Topic :: Office/Business :: Financial :: Accounting',
+'Topic :: Office/Business :: Financial :: Investment',
+'Topic :: Office/Business :: Financial :: Point-Of-Sale',
+'Topic :: Office/Business :: Financial :: Spreadsheet',
+'Topic :: Office/Business :: Groupware',
+'Topic :: Office/Business :: News/Diary',
+'Topic :: Office/Business :: Office Suites',
+'Topic :: Office/Business :: Scheduling',
+'Topic :: Other/Nonlisted Topic',
+'Topic :: Printing',
+'Topic :: Religion',
+'Topic :: Scientific/Engineering',
+'Topic :: Scientific/Engineering :: Artificial Intelligence',
+'Topic :: Scientific/Engineering :: Astronomy',
+'Topic :: Scientific/Engineering :: Atmospheric Science',
+'Topic :: Scientific/Engineering :: Bio-Informatics',
+'Topic :: Scientific/Engineering :: Chemistry',
+'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)',
+'Topic :: Scientific/Engineering :: GIS',
+'Topic :: Scientific/Engineering :: Human Machine Interfaces',
+'Topic :: Scientific/Engineering :: Image Recognition',
+'Topic :: Scientific/Engineering :: Information Analysis',
+'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator',
+'Topic :: Scientific/Engineering :: Mathematics',
+'Topic :: Scientific/Engineering :: Medical Science Apps.',
+'Topic :: Scientific/Engineering :: Physics',
+'Topic :: Scientific/Engineering :: Visualization',
+'Topic :: Security',
+'Topic :: Security :: Cryptography',
+'Topic :: Sociology',
+'Topic :: Sociology :: Genealogy',
+'Topic :: Sociology :: History',
+'Topic :: Software Development',
+'Topic :: Software Development :: Assemblers',
+'Topic :: Software Development :: Bug Tracking',
+'Topic :: Software Development :: Build Tools',
+'Topic :: Software Development :: Code Generators',
+'Topic :: Software Development :: Compilers',
+'Topic :: Software Development :: Debuggers',
+'Topic :: Software Development :: Disassemblers',
+'Topic :: Software Development :: Documentation',
+'Topic :: Software Development :: Embedded Systems',
+'Topic :: Software Development :: Internationalization',
+'Topic :: Software Development :: Interpreters',
+'Topic :: Software Development :: Libraries',
+'Topic :: Software Development :: Libraries :: Application Frameworks',
+'Topic :: Software Development :: Libraries :: Java Libraries',
+'Topic :: Software Development :: Libraries :: Perl Modules',
+'Topic :: Software Development :: Libraries :: PHP Classes',
+'Topic :: Software Development :: Libraries :: Pike Modules',
+'Topic :: Software Development :: Libraries :: pygame',
+'Topic :: Software Development :: Libraries :: Python Modules',
+'Topic :: Software Development :: Libraries :: Ruby Modules',
+'Topic :: Software Development :: Libraries :: Tcl Extensions',
+'Topic :: Software Development :: Localization',
+'Topic :: Software Development :: Object Brokering',
+'Topic :: Software Development :: Object Brokering :: CORBA',
+'Topic :: Software Development :: Pre-processors',
+'Topic :: Software Development :: Quality Assurance',
+'Topic :: Software Development :: Testing',
+'Topic :: Software Development :: Testing :: Traffic Generation',
+'Topic :: Software Development :: User Interfaces',
+'Topic :: Software Development :: Version Control',
+'Topic :: Software Development :: Version Control :: CVS',
+'Topic :: Software Development :: Version Control :: RCS',
+'Topic :: Software Development :: Version Control :: SCCS',
+'Topic :: Software Development :: Widget Sets',
+'Topic :: System',
+'Topic :: System :: Archiving',
+'Topic :: System :: Archiving :: Backup',
+'Topic :: System :: Archiving :: Compression',
+'Topic :: System :: Archiving :: Mirroring',
+'Topic :: System :: Archiving :: Packaging',
+'Topic :: System :: Benchmark',
+'Topic :: System :: Boot',
+'Topic :: System :: Boot :: Init',
+'Topic :: System :: Clustering',
+'Topic :: System :: Console Fonts',
+'Topic :: System :: Distributed Computing',
+'Topic :: System :: Emulators',
+'Topic :: System :: Filesystems',
+'Topic :: System :: Hardware',
+'Topic :: System :: Hardware :: Hardware Drivers',
+'Topic :: System :: Hardware :: Mainframes',
+'Topic :: System :: Hardware :: Symmetric Multi-processing',
+'Topic :: System :: Installation/Setup',
+'Topic :: System :: Logging',
+'Topic :: System :: Monitoring',
+'Topic :: System :: Networking',
+'Topic :: System :: Networking :: Firewalls',
+'Topic :: System :: Networking :: Monitoring',
+'Topic :: System :: Networking :: Monitoring :: Hardware Watchdog',
+'Topic :: System :: Networking :: Time Synchronization',
+'Topic :: System :: Operating System',
+'Topic :: System :: Operating System Kernels',
+'Topic :: System :: Operating System Kernels :: BSD',
+'Topic :: System :: Operating System Kernels :: GNU Hurd',
+'Topic :: System :: Operating System Kernels :: Linux',
+'Topic :: System :: Power (UPS)',
+'Topic :: System :: Recovery Tools',
+'Topic :: System :: Shells',
+'Topic :: System :: Software Distribution',
+'Topic :: System :: Systems Administration',
+'Topic :: System :: Systems Administration :: Authentication/Directory',
+'Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP',
+'Topic :: System :: Systems Administration :: Authentication/Directory :: NIS',
+'Topic :: System :: System Shells',
+'Topic :: Terminals',
+'Topic :: Terminals :: Serial',
+'Topic :: Terminals :: Telnet',
+'Topic :: Terminals :: Terminal Emulators/X Terminals',
+'Topic :: Text Editors',
+'Topic :: Text Editors :: Documentation',
+'Topic :: Text Editors :: Emacs',
+'Topic :: Text Editors :: Integrated Development Environments (IDE)',
+'Topic :: Text Editors :: Text Processing',
+'Topic :: Text Editors :: Word Processors',
+'Topic :: Text Processing',
+'Topic :: Text Processing :: Filters',
+'Topic :: Text Processing :: Fonts',
+'Topic :: Text Processing :: General',
+'Topic :: Text Processing :: Indexing',
+'Topic :: Text Processing :: Linguistic',
+'Topic :: Text Processing :: Markup',
+'Topic :: Text Processing :: Markup :: HTML',
+'Topic :: Text Processing :: Markup :: LaTeX',
+'Topic :: Text Processing :: Markup :: SGML',
+'Topic :: Text Processing :: Markup :: VRML',
+'Topic :: Text Processing :: Markup :: XML',
+'Topic :: Utilities',
+]
diff --git a/Lib/packaging/command/__init__.py b/Lib/packaging/command/__init__.py
new file mode 100644
index 0000000..6a37850
--- /dev/null
+++ b/Lib/packaging/command/__init__.py
@@ -0,0 +1,56 @@
+"""Subpackage containing all standard commands."""
+
+from packaging.errors import PackagingModuleError
+from packaging.util import resolve_name
+
+__all__ = ['get_command_names', 'set_command', 'get_command_class',
+           'STANDARD_COMMANDS']
+
+_COMMANDS = {
+    'check': 'packaging.command.check.check',
+    'test': 'packaging.command.test.test',
+    'build': 'packaging.command.build.build',
+    'build_py': 'packaging.command.build_py.build_py',
+    'build_ext': 'packaging.command.build_ext.build_ext',
+    'build_clib': 'packaging.command.build_clib.build_clib',
+    'build_scripts': 'packaging.command.build_scripts.build_scripts',
+    'clean': 'packaging.command.clean.clean',
+    'install_dist': 'packaging.command.install_dist.install_dist',
+    'install_lib': 'packaging.command.install_lib.install_lib',
+    'install_headers': 'packaging.command.install_headers.install_headers',
+    'install_scripts': 'packaging.command.install_scripts.install_scripts',
+    'install_data': 'packaging.command.install_data.install_data',
+    'install_distinfo':
+        'packaging.command.install_distinfo.install_distinfo',
+    'sdist': 'packaging.command.sdist.sdist',
+    'bdist': 'packaging.command.bdist.bdist',
+    'bdist_dumb': 'packaging.command.bdist_dumb.bdist_dumb',
+    'bdist_wininst': 'packaging.command.bdist_wininst.bdist_wininst',
+    'register': 'packaging.command.register.register',
+    'upload': 'packaging.command.upload.upload',
+    'upload_docs': 'packaging.command.upload_docs.upload_docs'}
+
+STANDARD_COMMANDS = set(_COMMANDS)
+
+
+def get_command_names():
+    """Return registered commands"""
+    return sorted(_COMMANDS)
+
+
+def set_command(location):
+    cls = resolve_name(location)
+    # XXX we want to do the duck-type checking here
+    _COMMANDS[cls.get_command_name()] = cls
+
+
+def get_command_class(name):
+    """Return the registered command"""
+    try:
+        cls = _COMMANDS[name]
+        if isinstance(cls, str):
+            cls = resolve_name(cls)
+            _COMMANDS[name] = cls
+        return cls
+    except KeyError:
+        raise PackagingModuleError("Invalid command %s" % name)
diff --git a/Lib/packaging/command/bdist.py b/Lib/packaging/command/bdist.py
new file mode 100644
index 0000000..4338a97
--- /dev/null
+++ b/Lib/packaging/command/bdist.py
@@ -0,0 +1,141 @@
+"""Create a built (binary) distribution.
+
+If a --formats option was given on the command line, this command will
+call the corresponding bdist_* commands; if the option was absent, a
+bdist_* command depending on the current platform will be called.
+"""
+
+import os
+
+from packaging import util
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError, PackagingOptionError
+
+
+def show_formats():
+    """Print list of available formats (arguments to "--format" option).
+    """
+    from packaging.fancy_getopt import FancyGetopt
+    formats = []
+    for format in bdist.format_commands:
+        formats.append(("formats=" + format, None,
+                        bdist.format_command[format][1]))
+    pretty_printer = FancyGetopt(formats)
+    pretty_printer.print_help("List of available distribution formats:")
+
+
+class bdist(Command):
+
+    description = "create a built (binary) distribution"
+
+    user_options = [('bdist-base=', 'b',
+                     "temporary directory for creating built distributions"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % util.get_platform()),
+                    ('formats=', None,
+                     "formats for distribution (comma-separated list)"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in "
+                     "[default: dist]"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('owner=', 'u',
+                     "Owner name used when creating a tar file"
+                     " [default: current user]"),
+                    ('group=', 'g',
+                     "Group name used when creating a tar file"
+                     " [default: current group]"),
+                   ]
+
+    boolean_options = ['skip-build']
+
+    help_options = [
+        ('help-formats', None,
+         "lists available distribution formats", show_formats),
+        ]
+
+    # This is of course very simplistic.  The various UNIX family operating
+    # systems have their specific formats, but they are out of scope for us;
+    # bdist_dumb is, well, dumb; it's more a building block for other
+    # packaging tools than a real end-user binary format.
+    default_format = {'posix': 'gztar',
+                      'nt': 'zip',
+                      'os2': 'zip'}
+
+    # Establish the preferred order (for the --help-formats option).
+    format_commands = ['gztar', 'bztar', 'ztar', 'tar',
+                       'wininst', 'zip', 'msi']
+
+    # And the real information.
+    format_command = {'gztar': ('bdist_dumb', "gzip'ed tar file"),
+                      'bztar': ('bdist_dumb', "bzip2'ed tar file"),
+                      'ztar':  ('bdist_dumb', "compressed tar file"),
+                      'tar':   ('bdist_dumb', "tar file"),
+                      'wininst': ('bdist_wininst',
+                                  "Windows executable installer"),
+                      'zip':   ('bdist_dumb', "ZIP file"),
+                      'msi':   ('bdist_msi',  "Microsoft Installer")
+                      }
+
+
+    def initialize_options(self):
+        self.bdist_base = None
+        self.plat_name = None
+        self.formats = None
+        self.dist_dir = None
+        self.skip_build = False
+        self.group = None
+        self.owner = None
+
+    def finalize_options(self):
+        # have to finalize 'plat_name' before 'bdist_base'
+        if self.plat_name is None:
+            if self.skip_build:
+                self.plat_name = util.get_platform()
+            else:
+                self.plat_name = self.get_finalized_command('build').plat_name
+
+        # 'bdist_base' -- parent of per-built-distribution-format
+        # temporary directories (eg. we'll probably have
+        # "build/bdist.<plat>/dumb", etc.)
+        if self.bdist_base is None:
+            build_base = self.get_finalized_command('build').build_base
+            self.bdist_base = os.path.join(build_base,
+                                           'bdist.' + self.plat_name)
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise PackagingPlatformError("don't know how to create built distributions " + \
+                      "on platform %s" % os.name)
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+    def run(self):
+        # Figure out which sub-commands we need to run.
+        commands = []
+        for format in self.formats:
+            try:
+                commands.append(self.format_command[format][0])
+            except KeyError:
+                raise PackagingOptionError("invalid format '%s'" % format)
+
+        # Reinitialize and run each command.
+        for i in range(len(self.formats)):
+            cmd_name = commands[i]
+            sub_cmd = self.get_reinitialized_command(cmd_name)
+
+            # passing the owner and group names for tar archiving
+            if cmd_name == 'bdist_dumb':
+                sub_cmd.owner = self.owner
+                sub_cmd.group = self.group
+
+            # If we're going to need to run this command again, tell it to
+            # keep its temporary files around so subsequent runs go faster.
+            if cmd_name in commands[i+1:]:
+                sub_cmd.keep_temp = True
+            self.run_command(cmd_name)
diff --git a/Lib/packaging/command/bdist_dumb.py b/Lib/packaging/command/bdist_dumb.py
new file mode 100644
index 0000000..f74b720
--- /dev/null
+++ b/Lib/packaging/command/bdist_dumb.py
@@ -0,0 +1,137 @@
+"""Create a "dumb" built distribution.
+
+A dumb distribution is just an archive meant to be unpacked under
+sys.prefix or sys.exec_prefix.
+"""
+
+import os
+
+from shutil import rmtree
+from sysconfig import get_python_version
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError
+from packaging import logger
+
+class bdist_dumb(Command):
+
+    description = 'create a "dumb" built distribution'
+
+    user_options = [('bdist-dir=', 'd',
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('format=', 'f',
+                     "archive format to create (tar, ztar, gztar, zip)"),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('relative', None,
+                     "build the archive using relative paths"
+                     "(default: false)"),
+                    ('owner=', 'u',
+                     "Owner name used when creating a tar file"
+                     " [default: current user]"),
+                    ('group=', 'g',
+                     "Group name used when creating a tar file"
+                     " [default: current group]"),
+                   ]
+
+    boolean_options = ['keep-temp', 'skip-build', 'relative']
+
+    default_format = { 'posix': 'gztar',
+                       'nt': 'zip',
+                       'os2': 'zip' }
+
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.format = None
+        self.keep_temp = False
+        self.dist_dir = None
+        self.skip_build = False
+        self.relative = False
+        self.owner = None
+        self.group = None
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'dumb')
+
+        if self.format is None:
+            try:
+                self.format = self.default_format[os.name]
+            except KeyError:
+                raise PackagingPlatformError(("don't know how to create dumb built distributions " +
+                       "on platform %s") % os.name)
+
+        self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
+
+    def run(self):
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.get_reinitialized_command('install_dist',
+                                                 reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        logger.info("installing to %s", self.bdist_dir)
+        self.run_command('install_dist')
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        archive_basename = "%s.%s" % (self.distribution.get_fullname(),
+                                      self.plat_name)
+
+        # OS/2 objects to any ":" characters in a filename (such as when
+        # a timestamp is used in a version) so change them to hyphens.
+        if os.name == "os2":
+            archive_basename = archive_basename.replace(":", "-")
+
+        pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            if (self.distribution.has_ext_modules() and
+                (install.install_base != install.install_platbase)):
+                raise PackagingPlatformError(
+                    "can't make a dumb built distribution where base and "
+                    "platbase are different (%r, %r)" %
+                    (install.install_base, install.install_platbase))
+            else:
+                archive_root = os.path.join(
+                    self.bdist_dir,
+                    self._ensure_relative(install.install_base))
+
+        # Make the archive
+        filename = self.make_archive(pseudoinstall_root,
+                                     self.format, root_dir=archive_root,
+                                     owner=self.owner, group=self.group)
+        if self.distribution.has_ext_modules():
+            pyversion = get_python_version()
+        else:
+            pyversion = 'any'
+        self.distribution.dist_files.append(('bdist_dumb', pyversion,
+                                             filename))
+
+        if not self.keep_temp:
+            if self.dry_run:
+                logger.info('removing %s', self.bdist_dir)
+            else:
+                rmtree(self.bdist_dir)
+
+    def _ensure_relative(self, path):
+        # copied from dir_util, deleted
+        drive, path = os.path.splitdrive(path)
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
diff --git a/Lib/packaging/command/bdist_msi.py b/Lib/packaging/command/bdist_msi.py
new file mode 100644
index 0000000..493f8b3
--- /dev/null
+++ b/Lib/packaging/command/bdist_msi.py
@@ -0,0 +1,740 @@
+"""Create a Microsoft Installer (.msi) binary distribution."""
+
+# Copyright (C) 2005, 2006 Martin von Löwis
+# Licensed to PSF under a Contributor Agreement.
+
+import sys
+import os
+import msilib
+
+
+from sysconfig import get_python_version
+from shutil import rmtree
+from packaging.command.cmd import Command
+from packaging.version import NormalizedVersion
+from packaging.errors import PackagingOptionError
+from packaging import logger as log
+from packaging.util import get_platform
+from msilib import schema, sequence, text
+from msilib import Directory, Feature, Dialog, add_data
+
+class MSIVersion(NormalizedVersion):
+    """
+    MSI ProductVersion must be strictly numeric.
+    MSIVersion disallows prerelease and postrelease versions.
+    """
+    def __init__(self, *args, **kwargs):
+        super(MSIVersion, self).__init__(*args, **kwargs)
+        if not self.is_final:
+            raise ValueError("ProductVersion must be strictly numeric")
+
+class PyDialog(Dialog):
+    """Dialog class with a fixed layout: controls at the top, then a ruler,
+    then a list of buttons: back, next, cancel. Optionally a bitmap at the
+    left."""
+    def __init__(self, *args, **kw):
+        """Dialog(database, name, x, y, w, h, attributes, title, first,
+        default, cancel, bitmap=true)"""
+        Dialog.__init__(self, *args)
+        ruler = self.h - 36
+        #if kw.get("bitmap", True):
+        #    self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
+        self.line("BottomLine", 0, ruler, self.w, 0)
+
+    def title(self, title):
+        "Set the title text of the dialog at the top."
+        # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
+        # text, in VerdanaBold10
+        self.text("Title", 15, 10, 320, 60, 0x30003,
+                  r"{\VerdanaBold10}%s" % title)
+
+    def back(self, title, next, name = "Back", active = 1):
+        """Add a back button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
+
+    def cancel(self, title, next, name = "Cancel", active = 1):
+        """Add a cancel button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
+
+    def next(self, title, next, name = "Next", active = 1):
+        """Add a Next button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
+
+    def xbutton(self, name, title, next, xpos):
+        """Add a button with a given title, the tab-next button,
+        its name in the Control table, giving its x position; the
+        y-position is aligned with the other buttons.
+
+        Return the button, so that events can be associated"""
+        return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
+
+class bdist_msi(Command):
+
+    description = "create a Microsoft Installer (.msi) binary distribution"
+
+    user_options = [('bdist-dir=', None,
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('target-version=', None,
+                     "require a specific python version" +
+                     " on the target system"),
+                    ('no-target-compile', 'c',
+                     "do not compile .py to .pyc on the target system"),
+                    ('no-target-optimize', 'o',
+                     "do not compile .py to .pyo (optimized)"
+                     "on the target system"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('install-script=', None,
+                     "basename of installation script to be run after"
+                     "installation or before deinstallation"),
+                    ('pre-install-script=', None,
+                     "Fully qualified filename of a script to be run before "
+                     "any files are installed.  This script need not be in the "
+                     "distribution"),
+                   ]
+
+    boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+                       'skip-build']
+
+    all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
+                    '2.5', '2.6', '2.7', '2.8', '2.9',
+                    '3.0', '3.1', '3.2', '3.3', '3.4',
+                    '3.5', '3.6', '3.7', '3.8', '3.9']
+    other_version = 'X'
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = False
+        self.no_target_compile = False
+        self.no_target_optimize = False
+        self.target_version = None
+        self.dist_dir = None
+        self.skip_build = False
+        self.install_script = None
+        self.pre_install_script = None
+        self.versions = None
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'msi')
+        short_version = get_python_version()
+        if (not self.target_version) and self.distribution.has_ext_modules():
+            self.target_version = short_version
+        if self.target_version:
+            self.versions = [self.target_version]
+            if not self.skip_build and self.distribution.has_ext_modules()\
+               and self.target_version != short_version:
+                raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
+                      " option must be specified" % (short_version,))
+        else:
+            self.versions = list(self.all_versions)
+
+        self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
+
+        if self.pre_install_script:
+            raise PackagingOptionError("the pre-install-script feature is not yet implemented")
+
+        if self.install_script:
+            for script in self.distribution.scripts:
+                if self.install_script == os.path.basename(script):
+                    break
+            else:
+                raise PackagingOptionError("install_script '%s' not found in scripts" % \
+                      self.install_script)
+        self.install_script_key = None
+
+
+    def run(self):
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.get_reinitialized_command('install_dist',
+                                                 reinit_subcommands=True)
+        install.prefix = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        install_lib = self.get_reinitialized_command('install_lib')
+        # we do not want to include pyc or pyo files
+        install_lib.compile = False
+        install_lib.optimize = 0
+
+        if self.distribution.has_ext_modules():
+            # If we are building an installer for a Python version other
+            # than the one we are currently running, then we need to ensure
+            # our build_lib reflects the other Python version rather than ours.
+            # Note that for target_version!=sys.version, we must have skipped the
+            # build step, so there is no issue with enforcing the build of this
+            # version.
+            target_version = self.target_version
+            if not target_version:
+                assert self.skip_build, "Should have already checked this"
+                target_version = sys.version[0:3]
+            plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+            build = self.get_finalized_command('build')
+            build.build_lib = os.path.join(build.build_base,
+                                           'lib' + plat_specifier)
+
+        log.info("installing to %s", self.bdist_dir)
+        install.ensure_finalized()
+
+        # avoid warning of 'install_lib' about installing
+        # into a directory not in sys.path
+        sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+        install.run()
+
+        del sys.path[0]
+
+        self.mkpath(self.dist_dir)
+        fullname = self.distribution.get_fullname()
+        installer_name = self.get_installer_filename(fullname)
+        installer_name = os.path.abspath(installer_name)
+        if os.path.exists(installer_name): os.unlink(installer_name)
+
+        metadata = self.distribution.metadata
+        author = metadata.author
+        if not author:
+            author = metadata.maintainer
+        if not author:
+            author = "UNKNOWN"
+        version = MSIVersion(metadata.get_version())
+        # Prefix ProductName with Python x.y, so that
+        # it sorts together with the other Python packages
+        # in Add-Remove-Programs (APR)
+        fullname = self.distribution.get_fullname()
+        if self.target_version:
+            product_name = "Python %s %s" % (self.target_version, fullname)
+        else:
+            product_name = "Python %s" % (fullname)
+        self.db = msilib.init_database(installer_name, schema,
+                product_name, msilib.gen_uuid(),
+                str(version), author)
+        msilib.add_tables(self.db, sequence)
+        props = [('DistVersion', version)]
+        email = metadata.author_email or metadata.maintainer_email
+        if email:
+            props.append(("ARPCONTACT", email))
+        if metadata.url:
+            props.append(("ARPURLINFOABOUT", metadata.url))
+        if props:
+            add_data(self.db, 'Property', props)
+
+        self.add_find_python()
+        self.add_files()
+        self.add_scripts()
+        self.add_ui()
+        self.db.Commit()
+
+        if hasattr(self.distribution, 'dist_files'):
+            tup = 'bdist_msi', self.target_version or 'any', fullname
+            self.distribution.dist_files.append(tup)
+
+        if not self.keep_temp:
+            log.info("removing temporary build directory %s", self.bdist_dir)
+            if not self.dry_run:
+                rmtree(self.bdist_dir)
+
+    def add_files(self):
+        db = self.db
+        cab = msilib.CAB("distfiles")
+        rootdir = os.path.abspath(self.bdist_dir)
+
+        root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
+        f = Feature(db, "Python", "Python", "Everything",
+                    0, 1, directory="TARGETDIR")
+
+        items = [(f, root, '')]
+        for version in self.versions + [self.other_version]:
+            target = "TARGETDIR" + version
+            name = default = "Python" + version
+            desc = "Everything"
+            if version is self.other_version:
+                title = "Python from another location"
+                level = 2
+            else:
+                title = "Python %s from registry" % version
+                level = 1
+            f = Feature(db, name, title, desc, 1, level, directory=target)
+            dir = Directory(db, cab, root, rootdir, target, default)
+            items.append((f, dir, version))
+        db.Commit()
+
+        seen = {}
+        for feature, dir, version in items:
+            todo = [dir]
+            while todo:
+                dir = todo.pop()
+                for file in os.listdir(dir.absolute):
+                    afile = os.path.join(dir.absolute, file)
+                    if os.path.isdir(afile):
+                        short = "%s|%s" % (dir.make_short(file), file)
+                        default = file + version
+                        newdir = Directory(db, cab, dir, file, default, short)
+                        todo.append(newdir)
+                    else:
+                        if not dir.component:
+                            dir.start_component(dir.logical, feature, 0)
+                        if afile not in seen:
+                            key = seen[afile] = dir.add_file(file)
+                            if file==self.install_script:
+                                if self.install_script_key:
+                                    raise PackagingOptionError(
+                                          "Multiple files with name %s" % file)
+                                self.install_script_key = '[#%s]' % key
+                        else:
+                            key = seen[afile]
+                            add_data(self.db, "DuplicateFile",
+                                [(key + version, dir.component, key, None, dir.logical)])
+            db.Commit()
+        cab.commit(db)
+
+    def add_find_python(self):
+        """Adds code to the installer to compute the location of Python.
+
+        Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
+        registry for each version of Python.
+
+        Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
+        else from PYTHON.MACHINE.X.Y.
+
+        Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
+
+        start = 402
+        for ver in self.versions:
+            install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
+            machine_reg = "python.machine." + ver
+            user_reg = "python.user." + ver
+            machine_prop = "PYTHON.MACHINE." + ver
+            user_prop = "PYTHON.USER." + ver
+            machine_action = "PythonFromMachine" + ver
+            user_action = "PythonFromUser" + ver
+            exe_action = "PythonExe" + ver
+            target_dir_prop = "TARGETDIR" + ver
+            exe_prop = "PYTHON" + ver
+            if msilib.Win64:
+                # type: msidbLocatorTypeRawValue + msidbLocatorType64bit
+                Type = 2+16
+            else:
+                Type = 2
+            add_data(self.db, "RegLocator",
+                    [(machine_reg, 2, install_path, None, Type),
+                     (user_reg, 1, install_path, None, Type)])
+            add_data(self.db, "AppSearch",
+                    [(machine_prop, machine_reg),
+                     (user_prop, user_reg)])
+            add_data(self.db, "CustomAction",
+                    [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
+                     (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
+                     (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
+                    ])
+            add_data(self.db, "InstallExecuteSequence",
+                    [(machine_action, machine_prop, start),
+                     (user_action, user_prop, start + 1),
+                     (exe_action, None, start + 2),
+                    ])
+            add_data(self.db, "InstallUISequence",
+                    [(machine_action, machine_prop, start),
+                     (user_action, user_prop, start + 1),
+                     (exe_action, None, start + 2),
+                    ])
+            add_data(self.db, "Condition",
+                    [("Python" + ver, 0, "NOT TARGETDIR" + ver)])
+            start += 4
+            assert start < 500
+
+    def add_scripts(self):
+        if self.install_script:
+            start = 6800
+            for ver in self.versions + [self.other_version]:
+                install_action = "install_script." + ver
+                exe_prop = "PYTHON" + ver
+                add_data(self.db, "CustomAction",
+                        [(install_action, 50, exe_prop, self.install_script_key)])
+                add_data(self.db, "InstallExecuteSequence",
+                        [(install_action, "&Python%s=3" % ver, start)])
+                start += 1
+        # XXX pre-install scripts are currently refused in finalize_options()
+        #     but if this feature is completed, it will also need to add
+        #     entries for each version as the above code does
+        if self.pre_install_script:
+            scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
+            with open(scriptfn, "w") as f:
+                # The batch file will be executed with [PYTHON], so that %1
+                # is the path to the Python interpreter; %0 will be the path
+                # of the batch file.
+                # rem ="""
+                # %1 %0
+                # exit
+                # """
+                # <actual script>
+                f.write('rem ="""\n%1 %0\nexit\n"""\n')
+                with open(self.pre_install_script) as fp:
+                    f.write(fp.read())
+            add_data(self.db, "Binary",
+                     [("PreInstall", msilib.Binary(scriptfn)),
+                     ])
+            add_data(self.db, "CustomAction",
+                     [("PreInstall", 2, "PreInstall", None),
+                     ])
+            add_data(self.db, "InstallExecuteSequence",
+                     [("PreInstall", "NOT Installed", 450),
+                     ])
+
+    def add_ui(self):
+        db = self.db
+        x = y = 50
+        w = 370
+        h = 300
+        title = "[ProductName] Setup"
+
+        # see "Dialog Style Bits"
+        modal = 3      # visible | modal
+        modeless = 1   # visible
+
+        # UI customization properties
+        add_data(db, "Property",
+                 # See "DefaultUIFont Property"
+                 [("DefaultUIFont", "DlgFont8"),
+                  # See "ErrorDialog Style Bit"
+                  ("ErrorDialog", "ErrorDlg"),
+                  ("Progress1", "Install"),   # modified in maintenance type dlg
+                  ("Progress2", "installs"),
+                  ("MaintenanceForm_Action", "Repair"),
+                  # possible values: ALL, JUSTME
+                  ("WhichUsers", "ALL")
+                 ])
+
+        # Fonts, see "TextStyle Table"
+        add_data(db, "TextStyle",
+                 [("DlgFont8", "Tahoma", 9, None, 0),
+                  ("DlgFontBold8", "Tahoma", 8, None, 1), #bold
+                  ("VerdanaBold10", "Verdana", 10, None, 1),
+                  ("VerdanaRed9", "Verdana", 9, 255, 0),
+                 ])
+
+        # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
+        # Numbers indicate sequence; see sequence.py for how these action integrate
+        add_data(db, "InstallUISequence",
+                 [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
+                  ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
+                  # In the user interface, assume all-users installation if privileged.
+                  ("SelectFeaturesDlg", "Not Installed", 1230),
+                  # XXX no support for resume installations yet
+                  #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
+                  ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
+                  ("ProgressDlg", None, 1280)])
+
+        add_data(db, 'ActionText', text.ActionText)
+        add_data(db, 'UIText', text.UIText)
+        #####################################################################
+        # Standard dialogs: FatalError, UserExit, ExitDialog
+        fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
+                     "Finish", "Finish", "Finish")
+        fatal.title("[ProductName] Installer ended prematurely")
+        fatal.back("< Back", "Finish", active = 0)
+        fatal.cancel("Cancel", "Back", active = 0)
+        fatal.text("Description1", 15, 70, 320, 80, 0x30003,
+                   "[ProductName] setup ended prematurely because of an error.  Your system has not been modified.  To install this program at a later time, please run the installation again.")
+        fatal.text("Description2", 15, 155, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c=fatal.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Exit")
+
+        user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
+                     "Finish", "Finish", "Finish")
+        user_exit.title("[ProductName] Installer was interrupted")
+        user_exit.back("< Back", "Finish", active = 0)
+        user_exit.cancel("Cancel", "Back", active = 0)
+        user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
+                   "[ProductName] setup was interrupted.  Your system has not been modified.  "
+                   "To install this program at a later time, please run the installation again.")
+        user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c = user_exit.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Exit")
+
+        exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
+                             "Finish", "Finish", "Finish")
+        exit_dialog.title("Completing the [ProductName] Installer")
+        exit_dialog.back("< Back", "Finish", active = 0)
+        exit_dialog.cancel("Cancel", "Back", active = 0)
+        exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c = exit_dialog.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Return")
+
+        #####################################################################
+        # Required dialog: FilesInUse, ErrorDlg
+        inuse = PyDialog(db, "FilesInUse",
+                         x, y, w, h,
+                         19,                # KeepModeless|Modal|Visible
+                         title,
+                         "Retry", "Retry", "Retry", bitmap=False)
+        inuse.text("Title", 15, 6, 200, 15, 0x30003,
+                   r"{\DlgFontBold8}Files in Use")
+        inuse.text("Description", 20, 23, 280, 20, 0x30003,
+               "Some files that need to be updated are currently in use.")
+        inuse.text("Text", 20, 55, 330, 50, 3,
+                   "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
+        inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
+                      None, None, None)
+        c=inuse.back("Exit", "Ignore", name="Exit")
+        c.event("EndDialog", "Exit")
+        c=inuse.next("Ignore", "Retry", name="Ignore")
+        c.event("EndDialog", "Ignore")
+        c=inuse.cancel("Retry", "Exit", name="Retry")
+        c.event("EndDialog","Retry")
+
+        # See "Error Dialog". See "ICE20" for the required names of the controls.
+        error = Dialog(db, "ErrorDlg",
+                       50, 10, 330, 101,
+                       65543,       # Error|Minimize|Modal|Visible
+                       title,
+                       "ErrorText", None, None)
+        error.text("ErrorText", 50,9,280,48,3, "")
+        #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
+        error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
+        error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
+        error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
+        error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
+        error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
+        error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
+        error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
+
+        #####################################################################
+        # Global "Query Cancel" dialog
+        cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
+                        "No", "No", "No")
+        cancel.text("Text", 48, 15, 194, 30, 3,
+                    "Are you sure you want to cancel [ProductName] installation?")
+        #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
+        #               "py.ico", None, None)
+        c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
+        c.event("EndDialog", "Exit")
+
+        c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
+        c.event("EndDialog", "Return")
+
+        #####################################################################
+        # Global "Wait for costing" dialog
+        costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
+                         "Return", "Return", "Return")
+        costing.text("Text", 48, 15, 194, 30, 3,
+                     "Please wait while the installer finishes determining your disk space requirements.")
+        c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
+        c.event("EndDialog", "Exit")
+
+        #####################################################################
+        # Preparation dialog: no user input except cancellation
+        prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
+                        "Cancel", "Cancel", "Cancel")
+        prep.text("Description", 15, 70, 320, 40, 0x30003,
+                  "Please wait while the Installer prepares to guide you through the installation.")
+        prep.title("Welcome to the [ProductName] Installer")
+        c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
+        c.mapping("ActionText", "Text")
+        c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
+        c.mapping("ActionData", "Text")
+        prep.back("Back", None, active=0)
+        prep.next("Next", None, active=0)
+        c=prep.cancel("Cancel", None)
+        c.event("SpawnDialog", "CancelDlg")
+
+        #####################################################################
+        # Feature (Python directory) selection
+        seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
+                        "Next", "Next", "Cancel")
+        seldlg.title("Select Python Installations")
+
+        seldlg.text("Hint", 15, 30, 300, 20, 3,
+                    "Select the Python locations where %s should be installed."
+                    % self.distribution.get_fullname())
+
+        seldlg.back("< Back", None, active=0)
+        c = seldlg.next("Next >", "Cancel")
+        order = 1
+        c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
+        for version in self.versions + [self.other_version]:
+            order += 1
+            c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
+                    "FEATURE_SELECTED AND &Python%s=3" % version,
+                    ordering=order)
+        c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
+        c.event("EndDialog", "Return", ordering=order + 2)
+        c = seldlg.cancel("Cancel", "Features")
+        c.event("SpawnDialog", "CancelDlg")
+
+        c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
+                           "FEATURE", None, "PathEdit", None)
+        c.event("[FEATURE_SELECTED]", "1")
+        ver = self.other_version
+        install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
+        dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
+
+        c = seldlg.text("Other", 15, 200, 300, 15, 3,
+                        "Provide an alternate Python location")
+        c.condition("Enable", install_other_cond)
+        c.condition("Show", install_other_cond)
+        c.condition("Disable", dont_install_other_cond)
+        c.condition("Hide", dont_install_other_cond)
+
+        c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
+                           "TARGETDIR" + ver, None, "Next", None)
+        c.condition("Enable", install_other_cond)
+        c.condition("Show", install_other_cond)
+        c.condition("Disable", dont_install_other_cond)
+        c.condition("Hide", dont_install_other_cond)
+
+        #####################################################################
+        # Disk cost
+        cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
+                        "OK", "OK", "OK", bitmap=False)
+        cost.text("Title", 15, 6, 200, 15, 0x30003,
+                  "{\DlgFontBold8}Disk Space Requirements")
+        cost.text("Description", 20, 20, 280, 20, 0x30003,
+                  "The disk space required for the installation of the selected features.")
+        cost.text("Text", 20, 53, 330, 60, 3,
+                  "The highlighted volumes (if any) do not have enough disk space "
+              "available for the currently selected features.  You can either "
+              "remove some files from the highlighted volumes, or choose to "
+              "install less features onto local drive(s), or select different "
+              "destination drive(s).")
+        cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
+                     None, "{120}{70}{70}{70}{70}", None, None)
+        cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
+
+        #####################################################################
+        # WhichUsers Dialog. Only available on NT, and for privileged users.
+        # This must be run before FindRelatedProducts, because that will
+        # take into account whether the previous installation was per-user
+        # or per-machine. We currently don't support going back to this
+        # dialog after "Next" was selected; to support this, we would need to
+        # find how to reset the ALLUSERS property, and how to re-run
+        # FindRelatedProducts.
+        # On Windows9x, the ALLUSERS property is ignored on the command line
+        # and in the Property table, but installer fails according to the documentation
+        # if a dialog attempts to set ALLUSERS.
+        whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
+                            "AdminInstall", "Next", "Cancel")
+        whichusers.title("Select whether to install [ProductName] for all users of this computer.")
+        # A radio group with two options: allusers, justme
+        g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
+                                  "WhichUsers", "", "Next")
+        g.add("ALL", 0, 5, 150, 20, "Install for all users")
+        g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
+
+        whichusers.back("Back", None, active=0)
+
+        c = whichusers.next("Next >", "Cancel")
+        c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
+        c.event("EndDialog", "Return", ordering = 2)
+
+        c = whichusers.cancel("Cancel", "AdminInstall")
+        c.event("SpawnDialog", "CancelDlg")
+
+        #####################################################################
+        # Installation Progress dialog (modeless)
+        progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
+                            "Cancel", "Cancel", "Cancel", bitmap=False)
+        progress.text("Title", 20, 15, 200, 15, 0x30003,
+                      "{\DlgFontBold8}[Progress1] [ProductName]")
+        progress.text("Text", 35, 65, 300, 30, 3,
+                      "Please wait while the Installer [Progress2] [ProductName]. "
+                      "This may take several minutes.")
+        progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
+
+        c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
+        c.mapping("ActionText", "Text")
+
+        #c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
+        #c.mapping("ActionData", "Text")
+
+        c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
+                           None, "Progress done", None, None)
+        c.mapping("SetProgress", "Progress")
+
+        progress.back("< Back", "Next", active=False)
+        progress.next("Next >", "Cancel", active=False)
+        progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
+
+        ###################################################################
+        # Maintenance type: repair/uninstall
+        maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
+                         "Next", "Next", "Cancel")
+        maint.title("Welcome to the [ProductName] Setup Wizard")
+        maint.text("BodyText", 15, 63, 330, 42, 3,
+                   "Select whether you want to repair or remove [ProductName].")
+        g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
+                            "MaintenanceForm_Action", "", "Next")
+        #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
+        g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
+        g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
+
+        maint.back("< Back", None, active=False)
+        c=maint.next("Finish", "Cancel")
+        # Change installation: Change progress dialog to "Change", then ask
+        # for feature selection
+        #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
+        #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
+
+        # Reinstall: Change progress dialog to "Repair", then invoke reinstall
+        # Also set list of reinstalled features to "ALL"
+        c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
+        c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
+        c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
+        c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
+
+        # Uninstall: Change progress to "Remove", then invoke uninstall
+        # Also set list of removed features to "ALL"
+        c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
+        c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
+        c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
+        c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
+
+        # Close dialog when maintenance action scheduled
+        c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
+        #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
+
+        maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
+
+    def get_installer_filename(self, fullname):
+        # Factored out to allow overriding in subclasses
+        if self.target_version:
+            base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
+                                            self.target_version)
+        else:
+            base_name = "%s.%s.msi" % (fullname, self.plat_name)
+        installer_name = os.path.join(self.dist_dir, base_name)
+        return installer_name
diff --git a/Lib/packaging/command/bdist_wininst.py b/Lib/packaging/command/bdist_wininst.py
new file mode 100644
index 0000000..dbb74ea
--- /dev/null
+++ b/Lib/packaging/command/bdist_wininst.py
@@ -0,0 +1,342 @@
+"""Create an executable installer for Windows."""
+
+# FIXME synchronize bytes/str use with same file in distutils
+
+import sys
+import os
+
+from shutil import rmtree
+from sysconfig import get_python_version
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError, PackagingPlatformError
+from packaging import logger
+from packaging.util import get_platform
+
+
+class bdist_wininst(Command):
+
+    description = "create an executable installer for Windows"
+
+    user_options = [('bdist-dir=', None,
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('target-version=', None,
+                     "require a specific python version" +
+                     " on the target system"),
+                    ('no-target-compile', 'c',
+                     "do not compile .py to .pyc on the target system"),
+                    ('no-target-optimize', 'o',
+                     "do not compile .py to .pyo (optimized)"
+                     "on the target system"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('bitmap=', 'b',
+                     "bitmap to use for the installer instead of python-powered logo"),
+                    ('title=', 't',
+                     "title to display on the installer background instead of default"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('install-script=', None,
+                     "basename of installation script to be run after"
+                     "installation or before deinstallation"),
+                    ('pre-install-script=', None,
+                     "Fully qualified filename of a script to be run before "
+                     "any files are installed.  This script need not be in the "
+                     "distribution"),
+                    ('user-access-control=', None,
+                     "specify Vista's UAC handling - 'none'/default=no "
+                     "handling, 'auto'=use UAC if target Python installed for "
+                     "all users, 'force'=always use UAC"),
+                   ]
+
+    boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+                       'skip-build']
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = False
+        self.no_target_compile = False
+        self.no_target_optimize = False
+        self.target_version = None
+        self.dist_dir = None
+        self.bitmap = None
+        self.title = None
+        self.skip_build = False
+        self.install_script = None
+        self.pre_install_script = None
+        self.user_access_control = None
+
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            if self.skip_build and self.plat_name:
+                # If build is skipped and plat_name is overridden, bdist will
+                # not see the correct 'plat_name' - so set that up manually.
+                bdist = self.distribution.get_command_obj('bdist')
+                bdist.plat_name = self.plat_name
+                # next the command will be initialized using that name
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'wininst')
+        if not self.target_version:
+            self.target_version = ""
+        if not self.skip_build and self.distribution.has_ext_modules():
+            short_version = get_python_version()
+            if self.target_version and self.target_version != short_version:
+                raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
+                      " option must be specified" % (short_version,))
+            self.target_version = short_version
+
+        self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
+
+        if self.install_script:
+            for script in self.distribution.scripts:
+                if self.install_script == os.path.basename(script):
+                    break
+            else:
+                raise PackagingOptionError("install_script '%s' not found in scripts" % \
+                      self.install_script)
+
+    def run(self):
+        if (sys.platform != "win32" and
+            (self.distribution.has_ext_modules() or
+             self.distribution.has_c_libraries())):
+            raise PackagingPlatformError \
+                  ("distribution contains extensions and/or C libraries; "
+                   "must be compiled on a Windows 32 platform")
+
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.get_reinitialized_command('install',
+                                                 reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+        install.plat_name = self.plat_name
+
+        install_lib = self.get_reinitialized_command('install_lib')
+        # we do not want to include pyc or pyo files
+        install_lib.compile = False
+        install_lib.optimize = 0
+
+        if self.distribution.has_ext_modules():
+            # If we are building an installer for a Python version other
+            # than the one we are currently running, then we need to ensure
+            # our build_lib reflects the other Python version rather than ours.
+            # Note that for target_version!=sys.version, we must have skipped the
+            # build step, so there is no issue with enforcing the build of this
+            # version.
+            target_version = self.target_version
+            if not target_version:
+                assert self.skip_build, "Should have already checked this"
+                target_version = sys.version[0:3]
+            plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+            build = self.get_finalized_command('build')
+            build.build_lib = os.path.join(build.build_base,
+                                           'lib' + plat_specifier)
+
+        # Use a custom scheme for the zip-file, because we have to decide
+        # at installation time which scheme to use.
+        for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
+            value = key.upper()
+            if key == 'headers':
+                value = value + '/Include/$dist_name'
+            setattr(install,
+                    'install_' + key,
+                    value)
+
+        logger.info("installing to %s", self.bdist_dir)
+        install.ensure_finalized()
+
+        # avoid warning of 'install_lib' about installing
+        # into a directory not in sys.path
+        sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+        install.run()
+
+        del sys.path[0]
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        from tempfile import NamedTemporaryFile
+        archive_basename = NamedTemporaryFile().name
+        fullname = self.distribution.get_fullname()
+        arcname = self.make_archive(archive_basename, "zip",
+                                    root_dir=self.bdist_dir)
+        # create an exe containing the zip-file
+        self.create_exe(arcname, fullname, self.bitmap)
+        if self.distribution.has_ext_modules():
+            pyversion = get_python_version()
+        else:
+            pyversion = 'any'
+        self.distribution.dist_files.append(('bdist_wininst', pyversion,
+                                             self.get_installer_filename(fullname)))
+        # remove the zip-file again
+        logger.debug("removing temporary file '%s'", arcname)
+        os.remove(arcname)
+
+        if not self.keep_temp:
+            if self.dry_run:
+                logger.info('removing %s', self.bdist_dir)
+            else:
+                rmtree(self.bdist_dir)
+
+    def get_inidata(self):
+        # Return data describing the installation.
+
+        lines = []
+        metadata = self.distribution.metadata
+
+        # Write the [metadata] section.
+        lines.append("[metadata]")
+
+        # 'info' will be displayed in the installer's dialog box,
+        # describing the items to be installed.
+        info = (metadata.long_description or '') + '\n'
+
+        # Escape newline characters
+        def escape(s):
+            return s.replace("\n", "\\n")
+
+        for name in ["author", "author_email", "description", "maintainer",
+                     "maintainer_email", "name", "url", "version"]:
+            data = getattr(metadata, name, "")
+            if data:
+                info = info + ("\n    %s: %s" % \
+                               (name.capitalize(), escape(data)))
+                lines.append("%s=%s" % (name, escape(data)))
+
+        # The [setup] section contains entries controlling
+        # the installer runtime.
+        lines.append("\n[Setup]")
+        if self.install_script:
+            lines.append("install_script=%s" % self.install_script)
+        lines.append("info=%s" % escape(info))
+        lines.append("target_compile=%d" % (not self.no_target_compile))
+        lines.append("target_optimize=%d" % (not self.no_target_optimize))
+        if self.target_version:
+            lines.append("target_version=%s" % self.target_version)
+        if self.user_access_control:
+            lines.append("user_access_control=%s" % self.user_access_control)
+
+        title = self.title or self.distribution.get_fullname()
+        lines.append("title=%s" % escape(title))
+        import time
+        import packaging
+        build_info = "Built %s with packaging-%s" % \
+                     (time.ctime(time.time()), packaging.__version__)
+        lines.append("build_info=%s" % build_info)
+        return "\n".join(lines)
+
+    def create_exe(self, arcname, fullname, bitmap=None):
+        import struct
+
+        self.mkpath(self.dist_dir)
+
+        cfgdata = self.get_inidata()
+
+        installer_name = self.get_installer_filename(fullname)
+        logger.info("creating %s", installer_name)
+
+        if bitmap:
+            with open(bitmap, "rb") as fp:
+                bitmapdata = fp.read()
+            bitmaplen = len(bitmapdata)
+        else:
+            bitmaplen = 0
+
+        with open(installer_name, "wb") as file:
+            file.write(self.get_exe_bytes())
+            if bitmap:
+                file.write(bitmapdata)
+
+            # Convert cfgdata from unicode to ascii, mbcs encoded
+            if isinstance(cfgdata, str):
+                cfgdata = cfgdata.encode("mbcs")
+
+            # Append the pre-install script
+            cfgdata = cfgdata + "\0"
+            if self.pre_install_script:
+                with open(self.pre_install_script) as fp:
+                    script_data = fp.read()
+                cfgdata = cfgdata + script_data + "\n\0"
+            else:
+                # empty pre-install script
+                cfgdata = cfgdata + "\0"
+            file.write(cfgdata)
+
+            # The 'magic number' 0x1234567B is used to make sure that the
+            # binary layout of 'cfgdata' is what the wininst.exe binary
+            # expects.  If the layout changes, increment that number, make
+            # the corresponding changes to the wininst.exe sources, and
+            # recompile them.
+            header = struct.pack("<iii",
+                                 0x1234567B,       # tag
+                                 len(cfgdata),     # length
+                                 bitmaplen,        # number of bytes in bitmap
+                                 )
+            file.write(header)
+            with open(arcname, "rb") as fp:
+                file.write(fp.read())
+
+    def get_installer_filename(self, fullname):
+        # Factored out to allow overriding in subclasses
+        if self.target_version:
+            # if we create an installer for a specific python version,
+            # it's better to include this in the name
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.%s-py%s.exe" %
+                                           (fullname, self.plat_name, self.target_version))
+        else:
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.%s.exe" % (fullname, self.plat_name))
+        return installer_name
+
+    def get_exe_bytes(self):
+        from packaging.compiler.msvccompiler import get_build_version
+        # If a target-version other than the current version has been
+        # specified, then using the MSVC version from *this* build is no good.
+        # Without actually finding and executing the target version and parsing
+        # its sys.version, we just hard-code our knowledge of old versions.
+        # NOTE: Possible alternative is to allow "--target-version" to
+        # specify a Python executable rather than a simple version string.
+        # We can then execute this program to obtain any info we need, such
+        # as the real sys.version string for the build.
+        cur_version = get_python_version()
+        if self.target_version and self.target_version != cur_version:
+            # If the target version is *later* than us, then we assume they
+            # use what we use
+            # string compares seem wrong, but are what sysconfig.py itself uses
+            if self.target_version > cur_version:
+                bv = get_build_version()
+            else:
+                if self.target_version < "2.4":
+                    bv = 6.0
+                else:
+                    bv = 7.1
+        else:
+            # for current version - use authoritative check.
+            bv = get_build_version()
+
+        # wininst-x.y.exe is in the same directory as this file
+        directory = os.path.dirname(__file__)
+        # we must use a wininst-x.y.exe built with the same C compiler
+        # used for python.  XXX What about mingw, borland, and so on?
+
+        # if plat_name starts with "win" but is not "win32"
+        # we want to strip "win" and leave the rest (e.g. -amd64)
+        # for all other cases, we don't want any suffix
+        if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
+            sfix = self.plat_name[3:]
+        else:
+            sfix = ''
+
+        filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
+        with open(filename, "rb") as fp:
+            return fp.read()
diff --git a/Lib/packaging/command/build.py b/Lib/packaging/command/build.py
new file mode 100644
index 0000000..6580fd1
--- /dev/null
+++ b/Lib/packaging/command/build.py
@@ -0,0 +1,151 @@
+"""Main build command, which calls the other build_* commands."""
+
+import sys
+import os
+
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError
+from packaging.compiler import show_compilers
+
+
+class build(Command):
+
+    description = "build everything needed to install"
+
+    user_options = [
+        ('build-base=', 'b',
+         "base directory for build library"),
+        ('build-purelib=', None,
+         "build directory for platform-neutral distributions"),
+        ('build-platlib=', None,
+         "build directory for platform-specific distributions"),
+        ('build-lib=', None,
+         "build directory for all distribution (defaults to either " +
+         "build-purelib or build-platlib"),
+        ('build-scripts=', None,
+         "build directory for scripts"),
+        ('build-temp=', 't',
+         "temporary build directory"),
+        ('plat-name=', 'p',
+         "platform name to build for, if supported "
+         "(default: %s)" % get_platform()),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('debug', 'g',
+         "compile extensions and libraries with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('executable=', 'e',
+         "specify final destination interpreter path (build.py)"),
+        ('use-2to3', None,
+         "use 2to3 to make source python 3.x compatible"),
+        ('convert-2to3-doctests', None,
+         "use 2to3 to convert doctests in seperate text files"),
+        ('use-2to3-fixers', None,
+         "list additional fixers opted for during 2to3 conversion"),
+        ]
+
+    boolean_options = ['debug', 'force']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options(self):
+        self.build_base = 'build'
+        # these are decided only after 'build_base' has its final value
+        # (unless overridden by the user or client)
+        self.build_purelib = None
+        self.build_platlib = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.compiler = None
+        self.plat_name = None
+        self.debug = None
+        self.force = False
+        self.executable = None
+        self.use_2to3 = False
+        self.convert_2to3_doctests = None
+        self.use_2to3_fixers = None
+
+    def finalize_options(self):
+        if self.plat_name is None:
+            self.plat_name = get_platform()
+        else:
+            # plat-name only supported for windows (other platforms are
+            # supported via ./configure flags, if at all).  Avoid misleading
+            # other platforms.
+            if os.name != 'nt':
+                raise PackagingOptionError(
+                            "--plat-name only supported on Windows (try "
+                            "using './configure --help' on your platform)")
+
+        plat_specifier = ".%s-%s" % (self.plat_name, sys.version[0:3])
+
+        # Make it so Python 2.x and Python 2.x with --with-pydebug don't
+        # share the same build directories. Doing so confuses the build
+        # process for C modules
+        if hasattr(sys, 'gettotalrefcount'):
+            plat_specifier += '-pydebug'
+
+        # 'build_purelib' and 'build_platlib' just default to 'lib' and
+        # 'lib.<plat>' under the base build directory.  We only use one of
+        # them for a given distribution, though --
+        if self.build_purelib is None:
+            self.build_purelib = os.path.join(self.build_base, 'lib')
+        if self.build_platlib is None:
+            self.build_platlib = os.path.join(self.build_base,
+                                              'lib' + plat_specifier)
+
+        # 'build_lib' is the actual directory that we will use for this
+        # particular module distribution -- if user didn't supply it, pick
+        # one of 'build_purelib' or 'build_platlib'.
+        if self.build_lib is None:
+            if self.distribution.ext_modules:
+                self.build_lib = self.build_platlib
+            else:
+                self.build_lib = self.build_purelib
+
+        # 'build_temp' -- temporary directory for compiler turds,
+        # "build/temp.<plat>"
+        if self.build_temp is None:
+            self.build_temp = os.path.join(self.build_base,
+                                           'temp' + plat_specifier)
+        if self.build_scripts is None:
+            self.build_scripts = os.path.join(self.build_base,
+                                              'scripts-' + sys.version[0:3])
+
+        if self.executable is None:
+            self.executable = os.path.normpath(sys.executable)
+
+    def run(self):
+        # Run all relevant sub-commands.  This will be some subset of:
+        #  - build_py      - pure Python modules
+        #  - build_clib    - standalone C libraries
+        #  - build_ext     - Python extension modules
+        #  - build_scripts - Python scripts
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+    # -- Predicates for the sub-command list ---------------------------
+
+    def has_pure_modules(self):
+        return self.distribution.has_pure_modules()
+
+    def has_c_libraries(self):
+        return self.distribution.has_c_libraries()
+
+    def has_ext_modules(self):
+        return self.distribution.has_ext_modules()
+
+    def has_scripts(self):
+        return self.distribution.has_scripts()
+
+    sub_commands = [('build_py', has_pure_modules),
+                    ('build_clib', has_c_libraries),
+                    ('build_ext', has_ext_modules),
+                    ('build_scripts', has_scripts),
+                   ]
diff --git a/Lib/packaging/command/build_clib.py b/Lib/packaging/command/build_clib.py
new file mode 100644
index 0000000..4a24996
--- /dev/null
+++ b/Lib/packaging/command/build_clib.py
@@ -0,0 +1,198 @@
+"""Build C/C++ libraries.
+
+This command is useful to build libraries that are included in the
+distribution and needed by extension modules.
+"""
+
+# XXX this module has *lots* of code ripped-off quite transparently from
+# build_ext.py -- not surprisingly really, as the work required to build
+# a static library from a collection of C source files is not really all
+# that different from what's required to build a shared object file from
+# a collection of C source files.  Nevertheless, I haven't done the
+# necessary refactoring to account for the overlap in code between the
+# two modules, mainly because a number of subtle details changed in the
+# cut 'n paste.  Sigh.
+
+import os
+from packaging.command.cmd import Command
+from packaging.errors import PackagingSetupError
+from packaging.compiler import customize_compiler
+from packaging import logger
+
+
+def show_compilers():
+    from packaging.compiler import show_compilers
+    show_compilers()
+
+
+class build_clib(Command):
+
+    description = "build C/C++ libraries used by extension modules"
+
+    user_options = [
+        ('build-clib=', 'b',
+         "directory to build C/C++ libraries to"),
+        ('build-temp=', 't',
+         "directory to put temporary build by-products"),
+        ('debug', 'g',
+         "compile with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ]
+
+    boolean_options = ['debug', 'force']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options(self):
+        self.build_clib = None
+        self.build_temp = None
+
+        # List of libraries to build
+        self.libraries = None
+
+        # Compilation options for all libraries
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.debug = None
+        self.force = False
+        self.compiler = None
+
+
+    def finalize_options(self):
+        # This might be confusing: both build-clib and build-temp default
+        # to build-temp as defined by the "build" command.  This is because
+        # I think that C libraries are really just temporary build
+        # by-products, at least from the point of view of building Python
+        # extensions -- but I want to keep my options open.
+        self.set_undefined_options('build',
+                                   ('build_temp', 'build_clib'),
+                                   ('build_temp', 'build_temp'),
+                                   'compiler', 'debug', 'force')
+
+        self.libraries = self.distribution.libraries
+        if self.libraries:
+            self.check_library_list(self.libraries)
+
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # XXX same as for build_ext -- what about 'self.define' and
+        # 'self.undef' ?
+
+    def run(self):
+        if not self.libraries:
+            return
+
+        # Yech -- this is cut 'n pasted from build_ext.py!
+        from packaging.compiler import new_compiler
+        self.compiler = new_compiler(compiler=self.compiler,
+                                     dry_run=self.dry_run,
+                                     force=self.force)
+        customize_compiler(self.compiler)
+
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for name, value in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+
+        self.build_libraries(self.libraries)
+
+
+    def check_library_list(self, libraries):
+        """Ensure that the list of libraries is valid.
+
+        `library` is presumably provided as a command option 'libraries'.
+        This method checks that it is a list of 2-tuples, where the tuples
+        are (library_name, build_info_dict).
+
+        Raise PackagingSetupError if the structure is invalid anywhere;
+        just returns otherwise.
+        """
+        if not isinstance(libraries, list):
+            raise PackagingSetupError("'libraries' option must be a list of tuples")
+
+        for lib in libraries:
+            if not isinstance(lib, tuple) and len(lib) != 2:
+                raise PackagingSetupError("each element of 'libraries' must a 2-tuple")
+
+            name, build_info = lib
+
+            if not isinstance(name, str):
+                raise PackagingSetupError("first element of each tuple in 'libraries' " + \
+                      "must be a string (the library name)")
+            if '/' in name or (os.sep != '/' and os.sep in name):
+                raise PackagingSetupError(("bad library name '%s': " +
+                       "may not contain directory separators") % \
+                      lib[0])
+
+            if not isinstance(build_info, dict):
+                raise PackagingSetupError("second element of each tuple in 'libraries' " + \
+                      "must be a dictionary (build info)")
+
+    def get_library_names(self):
+        # Assume the library list is valid -- 'check_library_list()' is
+        # called from 'finalize_options()', so it should be!
+        if not self.libraries:
+            return None
+
+        lib_names = []
+        for lib_name, build_info in self.libraries:
+            lib_names.append(lib_name)
+        return lib_names
+
+
+    def get_source_files(self):
+        self.check_library_list(self.libraries)
+        filenames = []
+        for lib_name, build_info in self.libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise PackagingSetupError(("in 'libraries' option (library '%s'), "
+                       "'sources' must be present and must be "
+                       "a list of source filenames") % lib_name)
+
+            filenames.extend(sources)
+        return filenames
+
+    def build_libraries(self, libraries):
+        for lib_name, build_info in libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise PackagingSetupError(("in 'libraries' option (library '%s'), " +
+                       "'sources' must be present and must be " +
+                       "a list of source filenames") % lib_name)
+            sources = list(sources)
+
+            logger.info("building '%s' library", lib_name)
+
+            # First, compile the source code to object files in the library
+            # directory.  (This should probably change to putting object
+            # files in a temporary build directory.)
+            macros = build_info.get('macros')
+            include_dirs = build_info.get('include_dirs')
+            objects = self.compiler.compile(sources,
+                                            output_dir=self.build_temp,
+                                            macros=macros,
+                                            include_dirs=include_dirs,
+                                            debug=self.debug)
+
+            # Now "link" the object files together into a static library.
+            # (On Unix at least, this isn't really linking -- it just
+            # builds an archive.  Whatever.)
+            self.compiler.create_static_lib(objects, lib_name,
+                                            output_dir=self.build_clib,
+                                            debug=self.debug)
diff --git a/Lib/packaging/command/build_ext.py b/Lib/packaging/command/build_ext.py
new file mode 100644
index 0000000..403e5fd
--- /dev/null
+++ b/Lib/packaging/command/build_ext.py
@@ -0,0 +1,666 @@
+"""Build extension modules."""
+
+# FIXME Is this module limited to C extensions or do C++ extensions work too?
+# The docstring of this module said that C++ was not supported, but other
+# comments contradict that.
+
+import os
+import re
+import sys
+import logging
+import sysconfig
+
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import (CCompilerError, CompileError, PackagingError,
+                               PackagingPlatformError, PackagingSetupError)
+from packaging.compiler import customize_compiler, show_compilers
+from packaging.util import newer_group
+from packaging.compiler.extension import Extension
+from packaging import logger
+
+import site
+HAS_USER_SITE = True
+
+if os.name == 'nt':
+    from packaging.compiler.msvccompiler import get_build_version
+    MSVC_VERSION = int(get_build_version())
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile \
+    (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+class build_ext(Command):
+
+    description = "build C/C++ extension modules (compile/link to build directory)"
+
+    # XXX thoughts on how to deal with complex command-line options like
+    # these, i.e. how to make it so fancy_getopt can suck them off the
+    # command line and make it look like setup.py defined the appropriate
+    # lists of tuples of what-have-you.
+    #   - each command needs a callback to process its command-line options
+    #   - Command.__init__() needs access to its share of the whole
+    #     command line (must ultimately come from
+    #     Distribution.parse_command_line())
+    #   - it then calls the current command class' option-parsing
+    #     callback to deal with weird options like -D, which have to
+    #     parse the option text and churn out some custom data
+    #     structure
+    #   - that data structure (in this case, a list of 2-tuples)
+    #     will then be present in the command object by the time
+    #     we get to finalize_options() (i.e. the constructor
+    #     takes care of both command-line and client options
+    #     in between initialize_options() and finalize_options())
+
+    sep_by = " (separated by '%s')" % os.pathsep
+    user_options = [
+        ('build-lib=', 'b',
+         "directory for compiled extension modules"),
+        ('build-temp=', 't',
+         "directory for temporary files (build by-products)"),
+        ('plat-name=', 'p',
+         "platform name to cross-compile for, if supported "
+         "(default: %s)" % get_platform()),
+        ('inplace', 'i',
+         "ignore build-lib and put compiled extensions into the source " +
+         "directory alongside your pure Python modules"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files" + sep_by),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries" + sep_by),
+        ('rpath=', 'R',
+         "directories to search for shared C libraries at runtime"),
+        ('link-objects=', 'O',
+         "extra explicit link objects to include in the link"),
+        ('debug', 'g',
+         "compile/link with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('swig-opts=', None,
+         "list of SWIG command-line options"),
+        ('swig=', None,
+         "path to the SWIG executable"),
+        ]
+
+    boolean_options = ['inplace', 'debug', 'force']
+
+    if HAS_USER_SITE:
+        user_options.append(('user', None,
+                             "add user include, library and rpath"))
+        boolean_options.append('user')
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options(self):
+        self.extensions = None
+        self.build_lib = None
+        self.plat_name = None
+        self.build_temp = None
+        self.inplace = False
+        self.package = None
+
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.libraries = None
+        self.library_dirs = None
+        self.rpath = None
+        self.link_objects = None
+        self.debug = None
+        self.force = None
+        self.compiler = None
+        self.swig = None
+        self.swig_opts = None
+        if HAS_USER_SITE:
+            self.user = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   'build_lib', 'build_temp', 'compiler',
+                                   'debug', 'force', 'plat_name')
+
+        if self.package is None:
+            self.package = self.distribution.ext_package
+
+        # Ensure that the list of extensions is valid, i.e. it is a list of
+        # Extension objects.
+        self.extensions = self.distribution.ext_modules
+        if self.extensions:
+            if not isinstance(self.extensions, (list, tuple)):
+                type_name = (self.extensions is None and 'None'
+                            or type(self.extensions).__name__)
+                raise PackagingSetupError(
+                    "'ext_modules' must be a sequence of Extension instances,"
+                    " not %s" % (type_name,))
+            for i, ext in enumerate(self.extensions):
+                if isinstance(ext, Extension):
+                    continue                # OK! (assume type-checking done
+                                            # by Extension constructor)
+                type_name = (ext is None and 'None' or type(ext).__name__)
+                raise PackagingSetupError(
+                    "'ext_modules' item %d must be an Extension instance,"
+                    " not %s" % (i, type_name))
+
+        # Make sure Python's include directories (for Python.h, pyconfig.h,
+        # etc.) are in the include search path.
+        py_include = sysconfig.get_path('include')
+        plat_py_include = sysconfig.get_path('platinclude')
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # Put the Python "system" include dir at the end, so that
+        # any local include dirs take precedence.
+        self.include_dirs.append(py_include)
+        if plat_py_include != py_include:
+            self.include_dirs.append(plat_py_include)
+
+        if isinstance(self.libraries, str):
+            self.libraries = [self.libraries]
+
+        # Life is easier if we're not forever checking for None, so
+        # simplify these options to empty lists if unset
+        if self.libraries is None:
+            self.libraries = []
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif isinstance(self.library_dirs, str):
+            self.library_dirs = self.library_dirs.split(os.pathsep)
+
+        if self.rpath is None:
+            self.rpath = []
+        elif isinstance(self.rpath, str):
+            self.rpath = self.rpath.split(os.pathsep)
+
+        # for extensions under windows use different directories
+        # for Release and Debug builds.
+        # also Python's library directory must be appended to library_dirs
+        if os.name == 'nt':
+            # the 'libs' directory is for binary installs - we assume that
+            # must be the *native* platform.  But we don't really support
+            # cross-compiling via a binary install anyway, so we let it go.
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+            if self.debug:
+                self.build_temp = os.path.join(self.build_temp, "Debug")
+            else:
+                self.build_temp = os.path.join(self.build_temp, "Release")
+
+            # Append the source distribution include and library directories,
+            # this allows distutils on windows to work in the source tree
+            self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
+            if MSVC_VERSION == 9:
+                # Use the .lib files for the correct architecture
+                if self.plat_name == 'win32':
+                    suffix = ''
+                else:
+                    # win-amd64 or win-ia64
+                    suffix = self.plat_name[4:]
+                new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
+                if suffix:
+                    new_lib = os.path.join(new_lib, suffix)
+                self.library_dirs.append(new_lib)
+
+            elif MSVC_VERSION == 8:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VS8.0'))
+            elif MSVC_VERSION == 7:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VS7.1'))
+            else:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VC6'))
+
+        # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
+        # import libraries in its "Config" subdirectory
+        if os.name == 'os2':
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
+
+        # for extensions under Cygwin and AtheOS Python's library directory must be
+        # appended to library_dirs
+        if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
+            if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+                # building third party extensions
+                self.library_dirs.append(os.path.join(sys.prefix, "lib",
+                                  "python" + sysconfig.get_python_version(),
+                                                      "config"))
+            else:
+                # building python standard extensions
+                self.library_dirs.append(os.curdir)
+
+        # for extensions under Linux or Solaris with a shared Python library,
+        # Python's library directory must be appended to library_dirs
+        sysconfig.get_config_var('Py_ENABLE_SHARED')
+        if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
+             or sys.platform.startswith('sunos'))
+            and sysconfig.get_config_var('Py_ENABLE_SHARED')):
+            if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+                # building third party extensions
+                self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
+            else:
+                # building python standard extensions
+                self.library_dirs.append(os.curdir)
+
+        # The argument parsing will result in self.define being a string, but
+        # it has to be a list of 2-tuples.  All the preprocessor symbols
+        # specified by the 'define' option will be set to '1'.  Multiple
+        # symbols can be separated with commas.
+
+        if self.define:
+            defines = self.define.split(',')
+            self.define = [(symbol, '1') for symbol in defines]
+
+        # The option for macros to undefine is also a string from the
+        # option parsing, but has to be a list.  Multiple symbols can also
+        # be separated with commas here.
+        if self.undef:
+            self.undef = self.undef.split(',')
+
+        if self.swig_opts is None:
+            self.swig_opts = []
+        else:
+            self.swig_opts = self.swig_opts.split(' ')
+
+        # Finally add the user include and library directories if requested
+        if HAS_USER_SITE and self.user:
+            user_include = os.path.join(site.USER_BASE, "include")
+            user_lib = os.path.join(site.USER_BASE, "lib")
+            if os.path.isdir(user_include):
+                self.include_dirs.append(user_include)
+            if os.path.isdir(user_lib):
+                self.library_dirs.append(user_lib)
+                self.rpath.append(user_lib)
+
+    def run(self):
+        from packaging.compiler import new_compiler
+
+        # 'self.extensions', as supplied by setup.py, is a list of
+        # Extension instances.  See the documentation for Extension (in
+        # distutils.extension) for details.
+        if not self.extensions:
+            return
+
+        # If we were asked to build any C/C++ libraries, make sure that the
+        # directory where we put them is in the library search path for
+        # linking extensions.
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.libraries.extend(build_clib.get_library_names() or [])
+            self.library_dirs.append(build_clib.build_clib)
+
+        # Temporary kludge until we remove the verbose arguments and use
+        # logging everywhere
+        verbose = logger.getEffectiveLevel() >= logging.DEBUG
+
+        # Setup the CCompiler object that we'll use to do all the
+        # compiling and linking
+        self.compiler_obj = new_compiler(compiler=self.compiler,
+                                         verbose=verbose,
+                                         dry_run=self.dry_run,
+                                         force=self.force)
+
+        customize_compiler(self.compiler_obj)
+        # If we are cross-compiling, init the compiler now (if we are not
+        # cross-compiling, init would not hurt, but people may rely on
+        # late initialization of compiler even if they shouldn't...)
+        if os.name == 'nt' and self.plat_name != get_platform():
+            self.compiler_obj.initialize(self.plat_name)
+
+        # And make sure that any compile/link-related options (which might
+        # come from the command line or from the setup script) are set in
+        # that CCompiler object -- that way, they automatically apply to
+        # all compiling and linking done here.
+        if self.include_dirs is not None:
+            self.compiler_obj.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for name, value in self.define:
+                self.compiler_obj.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler_obj.undefine_macro(macro)
+        if self.libraries is not None:
+            self.compiler_obj.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            self.compiler_obj.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            self.compiler_obj.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            self.compiler_obj.set_link_objects(self.link_objects)
+
+        # Now actually compile and link everything.
+        self.build_extensions()
+
+    def get_source_files(self):
+        filenames = []
+
+        # Wouldn't it be neat if we knew the names of header files too...
+        for ext in self.extensions:
+            filenames.extend(ext.sources)
+
+        return filenames
+
+    def get_outputs(self):
+        # And build the list of output (built) filenames.  Note that this
+        # ignores the 'inplace' flag, and assumes everything goes in the
+        # "build" tree.
+        outputs = []
+        for ext in self.extensions:
+            outputs.append(self.get_ext_fullpath(ext.name))
+        return outputs
+
+    def build_extensions(self):
+        for ext in self.extensions:
+            try:
+                self.build_extension(ext)
+            except (CCompilerError, PackagingError, CompileError) as e:
+                if not ext.optional:
+                    raise
+                logger.warning('%s: building extension %r failed: %s',
+                               self.get_command_name(), ext.name, e)
+
+    def build_extension(self, ext):
+        sources = ext.sources
+        if sources is None or not isinstance(sources, (list, tuple)):
+            raise PackagingSetupError(("in 'ext_modules' option (extension '%s'), " +
+                   "'sources' must be present and must be " +
+                   "a list of source filenames") % ext.name)
+        sources = list(sources)
+
+        ext_path = self.get_ext_fullpath(ext.name)
+        depends = sources + ext.depends
+        if not (self.force or newer_group(depends, ext_path, 'newer')):
+            logger.debug("skipping '%s' extension (up-to-date)", ext.name)
+            return
+        else:
+            logger.info("building '%s' extension", ext.name)
+
+        # First, scan the sources for SWIG definition files (.i), run
+        # SWIG on 'em to create .c files, and modify the sources list
+        # accordingly.
+        sources = self.swig_sources(sources, ext)
+
+        # Next, compile the source code to object files.
+
+        # XXX not honouring 'define_macros' or 'undef_macros' -- the
+        # CCompiler API needs to change to accommodate this, and I
+        # want to do one thing at a time!
+
+        # Two possible sources for extra compiler arguments:
+        #   - 'extra_compile_args' in Extension object
+        #   - CFLAGS environment variable (not particularly
+        #     elegant, but people seem to expect it and I
+        #     guess it's useful)
+        # The environment variable should take precedence, and
+        # any sensible compiler will give precedence to later
+        # command-line args.  Hence we combine them in order:
+        extra_args = ext.extra_compile_args or []
+
+        macros = ext.define_macros[:]
+        for undef in ext.undef_macros:
+            macros.append((undef,))
+
+        objects = self.compiler_obj.compile(sources,
+                                            output_dir=self.build_temp,
+                                            macros=macros,
+                                            include_dirs=ext.include_dirs,
+                                            debug=self.debug,
+                                            extra_postargs=extra_args,
+                                            depends=ext.depends)
+
+        # XXX -- this is a Vile HACK!
+        #
+        # The setup.py script for Python on Unix needs to be able to
+        # get this list so it can perform all the clean up needed to
+        # avoid keeping object files around when cleaning out a failed
+        # build of an extension module.  Since Packaging does not
+        # track dependencies, we have to get rid of intermediates to
+        # ensure all the intermediates will be properly re-built.
+        #
+        self._built_objects = objects[:]
+
+        # Now link the object files together into a "shared object" --
+        # of course, first we have to figure out all the other things
+        # that go into the mix.
+        if ext.extra_objects:
+            objects.extend(ext.extra_objects)
+        extra_args = ext.extra_link_args or []
+
+        # Detect target language, if not provided
+        language = ext.language or self.compiler_obj.detect_language(sources)
+
+        self.compiler_obj.link_shared_object(
+            objects, ext_path,
+            libraries=self.get_libraries(ext),
+            library_dirs=ext.library_dirs,
+            runtime_library_dirs=ext.runtime_library_dirs,
+            extra_postargs=extra_args,
+            export_symbols=self.get_export_symbols(ext),
+            debug=self.debug,
+            build_temp=self.build_temp,
+            target_lang=language)
+
+
+    def swig_sources(self, sources, extension):
+        """Walk the list of source files in 'sources', looking for SWIG
+        interface (.i) files.  Run SWIG on all that are found, and
+        return a modified 'sources' list with SWIG source files replaced
+        by the generated C (or C++) files.
+        """
+        new_sources = []
+        swig_sources = []
+        swig_targets = {}
+
+        # XXX this drops generated C/C++ files into the source tree, which
+        # is fine for developers who want to distribute the generated
+        # source -- but there should be an option to put SWIG output in
+        # the temp dir.
+
+        if ('-c++' in self.swig_opts or '-c++' in extension.swig_opts):
+            target_ext = '.cpp'
+        else:
+            target_ext = '.c'
+
+        for source in sources:
+            base, ext = os.path.splitext(source)
+            if ext == ".i":             # SWIG interface file
+                new_sources.append(base + '_wrap' + target_ext)
+                swig_sources.append(source)
+                swig_targets[source] = new_sources[-1]
+            else:
+                new_sources.append(source)
+
+        if not swig_sources:
+            return new_sources
+
+        swig = self.swig or self.find_swig()
+        swig_cmd = [swig, "-python"]
+        swig_cmd.extend(self.swig_opts)
+
+        # Do not override commandline arguments
+        if not self.swig_opts:
+            for o in extension.swig_opts:
+                swig_cmd.append(o)
+
+        for source in swig_sources:
+            target = swig_targets[source]
+            logger.info("swigging %s to %s", source, target)
+            self.spawn(swig_cmd + ["-o", target, source])
+
+        return new_sources
+
+    def find_swig(self):
+        """Return the name of the SWIG executable.  On Unix, this is
+        just "swig" -- it should be in the PATH.  Tries a bit harder on
+        Windows.
+        """
+
+        if os.name == "posix":
+            return "swig"
+        elif os.name == "nt":
+
+            # Look for SWIG in its standard installation directory on
+            # Windows (or so I presume!).  If we find it there, great;
+            # if not, act like Unix and assume it's in the PATH.
+            for vers in ("1.3", "1.2", "1.1"):
+                fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+                if os.path.isfile(fn):
+                    return fn
+            else:
+                return "swig.exe"
+
+        elif os.name == "os2":
+            # assume swig available in the PATH.
+            return "swig.exe"
+
+        else:
+            raise PackagingPlatformError(("I don't know how to find (much less run) SWIG "
+                   "on platform '%s'") % os.name)
+
+    # -- Name generators -----------------------------------------------
+    # (extension names, filenames, whatever)
+    def get_ext_fullpath(self, ext_name):
+        """Returns the path of the filename for a given extension.
+
+        The file is located in `build_lib` or directly in the package
+        (inplace option).
+        """
+        fullname = self.get_ext_fullname(ext_name)
+        modpath = fullname.split('.')
+        filename = self.get_ext_filename(modpath[-1])
+
+        if not self.inplace:
+            # no further work needed
+            # returning :
+            #   build_dir/package/path/filename
+            filename = os.path.join(*modpath[:-1]+[filename])
+            return os.path.join(self.build_lib, filename)
+
+        # the inplace option requires to find the package directory
+        # using the build_py command for that
+        package = '.'.join(modpath[0:-1])
+        build_py = self.get_finalized_command('build_py')
+        package_dir = os.path.abspath(build_py.get_package_dir(package))
+
+        # returning
+        #   package_dir/filename
+        return os.path.join(package_dir, filename)
+
+    def get_ext_fullname(self, ext_name):
+        """Returns the fullname of a given extension name.
+
+        Adds the `package.` prefix"""
+        if self.package is None:
+            return ext_name
+        else:
+            return self.package + '.' + ext_name
+
+    def get_ext_filename(self, ext_name):
+        r"""Convert the name of an extension (eg. "foo.bar") into the name
+        of the file from which it will be loaded (eg. "foo/bar.so", or
+        "foo\bar.pyd").
+        """
+        ext_path = ext_name.split('.')
+        # OS/2 has an 8 character module (extension) limit :-(
+        if os.name == "os2":
+            ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
+        # extensions in debug_mode are named 'module_d.pyd' under windows
+        so_ext = sysconfig.get_config_var('SO')
+        if os.name == 'nt' and self.debug:
+            return os.path.join(*ext_path) + '_d' + so_ext
+        return os.path.join(*ext_path) + so_ext
+
+    def get_export_symbols(self, ext):
+        """Return the list of symbols that a shared extension has to
+        export.  This either uses 'ext.export_symbols' or, if it's not
+        provided, "init" + module_name.  Only relevant on Windows, where
+        the .pyd file (DLL) must export the module "init" function.
+        """
+        initfunc_name = "PyInit_" + ext.name.split('.')[-1]
+        if initfunc_name not in ext.export_symbols:
+            ext.export_symbols.append(initfunc_name)
+        return ext.export_symbols
+
+    def get_libraries(self, ext):
+        """Return the list of libraries to link against when building a
+        shared extension.  On most platforms, this is just 'ext.libraries';
+        on Windows and OS/2, we add the Python library (eg. python20.dll).
+        """
+        # The python library is always needed on Windows.  For MSVC, this
+        # is redundant, since the library is mentioned in a pragma in
+        # pyconfig.h that MSVC groks.  The other Windows compilers all seem
+        # to need it mentioned explicitly, though, so that's what we do.
+        # Append '_d' to the python import library on debug builds.
+        if sys.platform == "win32":
+            from packaging.compiler.msvccompiler import MSVCCompiler
+            if not isinstance(self.compiler_obj, MSVCCompiler):
+                template = "python%d%d"
+                if self.debug:
+                    template = template + '_d'
+                pythonlib = (template %
+                       (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                # don't extend ext.libraries, it may be shared with other
+                # extensions, it is a reference to the original list
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
+        elif sys.platform == "os2emx":
+            # EMX/GCC requires the python library explicitly, and I
+            # believe VACPP does as well (though not confirmed) - AIM Apr01
+            template = "python%d%d"
+            # debug versions of the main DLL aren't supported, at least
+            # not at this time - AIM Apr01
+            #if self.debug:
+            #    template = template + '_d'
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "cygwin":
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "atheos":
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # Get SHLIBS from Makefile
+            extra = []
+            for lib in sysconfig.get_config_var('SHLIBS').split():
+                if lib.startswith('-l'):
+                    extra.append(lib[2:])
+                else:
+                    extra.append(lib)
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib, "m"] + extra
+
+        elif sys.platform == 'darwin':
+            # Don't use the default code below
+            return ext.libraries
+
+        else:
+            if sysconfig.get_config_var('Py_ENABLE_SHARED'):
+                pythonlib = 'python{}.{}{}'.format(
+                    sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff,
+                    sys.abiflags)
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
diff --git a/Lib/packaging/command/build_py.py b/Lib/packaging/command/build_py.py
new file mode 100644
index 0000000..360f4c9
--- /dev/null
+++ b/Lib/packaging/command/build_py.py
@@ -0,0 +1,410 @@
+"""Build pure Python modules (just copy to build directory)."""
+
+import os
+import sys
+from glob import glob
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError, PackagingFileError
+from packaging.util import convert_path
+from packaging.compat import Mixin2to3
+
+# marking public APIs
+__all__ = ['build_py']
+
+class build_py(Command, Mixin2to3):
+
+    description = "build pure Python modules (copy to build directory)"
+
+    user_options = [
+        ('build-lib=', 'd', "directory to build (copy) to"),
+        ('compile', 'c', "compile .py to .pyc"),
+        ('no-compile', None, "don't compile .py files [default]"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+        ('use-2to3', None,
+         "use 2to3 to make source python 3.x compatible"),
+        ('convert-2to3-doctests', None,
+         "use 2to3 to convert doctests in seperate text files"),
+        ('use-2to3-fixers', None,
+         "list additional fixers opted for during 2to3 conversion"),
+        ]
+
+    boolean_options = ['compile', 'force']
+    negative_opt = {'no-compile' : 'compile'}
+
+    def initialize_options(self):
+        self.build_lib = None
+        self.py_modules = None
+        self.package = None
+        self.package_data = None
+        self.package_dir = None
+        self.compile = False
+        self.optimize = 0
+        self.force = None
+        self._updated_files = []
+        self._doctests_2to3 = []
+        self.use_2to3 = False
+        self.convert_2to3_doctests = None
+        self.use_2to3_fixers = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   'use_2to3', 'use_2to3_fixers',
+                                   'convert_2to3_doctests', 'build_lib',
+                                   'force')
+
+        # Get the distribution options that are aliases for build_py
+        # options -- list of packages and list of modules.
+        self.packages = self.distribution.packages
+        self.py_modules = self.distribution.py_modules
+        self.package_data = self.distribution.package_data
+        self.package_dir = None
+        if self.distribution.package_dir is not None:
+            self.package_dir = convert_path(self.distribution.package_dir)
+        self.data_files = self.get_data_files()
+
+        # Ick, copied straight from install_lib.py (fancy_getopt needs a
+        # type system!  Hell, *everything* needs a type system!!!)
+        if not isinstance(self.optimize, int):
+            try:
+                self.optimize = int(self.optimize)
+                assert 0 <= self.optimize <= 2
+            except (ValueError, AssertionError):
+                raise PackagingOptionError("optimize must be 0, 1, or 2")
+
+    def run(self):
+        # XXX copy_file by default preserves atime and mtime.  IMHO this is
+        # the right thing to do, but perhaps it should be an option -- in
+        # particular, a site administrator might want installed files to
+        # reflect the time of installation rather than the last
+        # modification time before the installed release.
+
+        # XXX copy_file by default preserves mode, which appears to be the
+        # wrong thing to do: if a file is read-only in the working
+        # directory, we want it to be installed read/write so that the next
+        # installation of the same module distribution can overwrite it
+        # without problems.  (This might be a Unix-specific issue.)  Thus
+        # we turn off 'preserve_mode' when copying to the build directory,
+        # since the build directory is supposed to be exactly what the
+        # installation will look like (ie. we preserve mode when
+        # installing).
+
+        # Two options control which modules will be installed: 'packages'
+        # and 'py_modules'.  The former lets us work with whole packages, not
+        # specifying individual modules at all; the latter is for
+        # specifying modules one-at-a-time.
+
+        if self.py_modules:
+            self.build_modules()
+        if self.packages:
+            self.build_packages()
+            self.build_package_data()
+
+        if self.use_2to3 and self._updated_files:
+            self.run_2to3(self._updated_files, self._doctests_2to3,
+                                            self.use_2to3_fixers)
+
+        self.byte_compile(self.get_outputs(include_bytecode=False))
+
+    # -- Top-level worker functions ------------------------------------
+
+    def get_data_files(self):
+        """Generate list of '(package,src_dir,build_dir,filenames)' tuples.
+
+        Helper function for `finalize_options()`.
+        """
+        data = []
+        if not self.packages:
+            return data
+        for package in self.packages:
+            # Locate package source directory
+            src_dir = self.get_package_dir(package)
+
+            # Compute package build directory
+            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+            # Length of path to strip from found files
+            plen = 0
+            if src_dir:
+                plen = len(src_dir)+1
+
+            # Strip directory from globbed filenames
+            filenames = [
+                file[plen:] for file in self.find_data_files(package, src_dir)
+                ]
+            data.append((package, src_dir, build_dir, filenames))
+        return data
+
+    def find_data_files(self, package, src_dir):
+        """Return filenames for package's data files in 'src_dir'.
+
+        Helper function for `get_data_files()`.
+        """
+        globs = (self.package_data.get('', [])
+                 + self.package_data.get(package, []))
+        files = []
+        for pattern in globs:
+            # Each pattern has to be converted to a platform-specific path
+            filelist = glob(os.path.join(src_dir, convert_path(pattern)))
+            # Files that match more than one pattern are only added once
+            files.extend(fn for fn in filelist if fn not in files)
+        return files
+
+    def build_package_data(self):
+        """Copy data files into build directory.
+
+        Helper function for `run()`.
+        """
+        # FIXME add tests for this method
+        for package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                srcfile = os.path.join(src_dir, filename)
+                self.mkpath(os.path.dirname(target))
+                outf, copied = self.copy_file(srcfile,
+                               target, preserve_mode=False)
+                if copied and srcfile in self.distribution.convert_2to3.doctests:
+                    self._doctests_2to3.append(outf)
+
+    # XXX - this should be moved to the Distribution class as it is not
+    # only needed for build_py. It also has no dependencies on this class.
+    def get_package_dir(self, package):
+        """Return the directory, relative to the top of the source
+           distribution, where package 'package' should be found
+           (at least according to the 'package_dir' option, if any)."""
+
+        path = package.split('.')
+        if self.package_dir is not None:
+            path.insert(0, self.package_dir)
+
+        if len(path) > 0:
+            return os.path.join(*path)
+
+        return ''
+
+    def check_package(self, package, package_dir):
+        """Helper function for `find_package_modules()` and `find_modules()'.
+        """
+        # Empty dir name means current directory, which we can probably
+        # assume exists.  Also, os.path.exists and isdir don't know about
+        # my "empty string means current dir" convention, so we have to
+        # circumvent them.
+        if package_dir != "":
+            if not os.path.exists(package_dir):
+                raise PackagingFileError(
+                      "package directory '%s' does not exist" % package_dir)
+            if not os.path.isdir(package_dir):
+                raise PackagingFileError(
+                       "supposed package directory '%s' exists, "
+                       "but is not a directory" % package_dir)
+
+        # Require __init__.py for all but the "root package"
+        if package:
+            init_py = os.path.join(package_dir, "__init__.py")
+            if os.path.isfile(init_py):
+                return init_py
+            else:
+                logger.warning(("package init file '%s' not found " +
+                                "(or not a regular file)"), init_py)
+
+        # Either not in a package at all (__init__.py not expected), or
+        # __init__.py doesn't exist -- so don't return the filename.
+        return None
+
+    def check_module(self, module, module_file):
+        if not os.path.isfile(module_file):
+            logger.warning("file %s (for module %s) not found",
+                           module_file, module)
+            return False
+        else:
+            return True
+
+    def find_package_modules(self, package, package_dir):
+        self.check_package(package, package_dir)
+        module_files = glob(os.path.join(package_dir, "*.py"))
+        modules = []
+        if self.distribution.script_name is not None:
+            setup_script = os.path.abspath(self.distribution.script_name)
+        else:
+            setup_script = None
+
+        for f in module_files:
+            abs_f = os.path.abspath(f)
+            if abs_f != setup_script:
+                module = os.path.splitext(os.path.basename(f))[0]
+                modules.append((package, module, f))
+            else:
+                logger.debug("excluding %s", setup_script)
+        return modules
+
+    def find_modules(self):
+        """Finds individually-specified Python modules, ie. those listed by
+        module name in 'self.py_modules'.  Returns a list of tuples (package,
+        module_base, filename): 'package' is a tuple of the path through
+        package-space to the module; 'module_base' is the bare (no
+        packages, no dots) module name, and 'filename' is the path to the
+        ".py" file (relative to the distribution root) that implements the
+        module.
+        """
+        # Map package names to tuples of useful info about the package:
+        #    (package_dir, checked)
+        # package_dir - the directory where we'll find source files for
+        #   this package
+        # checked - true if we have checked that the package directory
+        #   is valid (exists, contains __init__.py, ... ?)
+        packages = {}
+
+        # List of (package, module, filename) tuples to return
+        modules = []
+
+        # We treat modules-in-packages almost the same as toplevel modules,
+        # just the "package" for a toplevel is empty (either an empty
+        # string or empty list, depending on context).  Differences:
+        #   - don't check for __init__.py in directory for empty package
+        for module in self.py_modules:
+            path = module.split('.')
+            package = '.'.join(path[0:-1])
+            module_base = path[-1]
+
+            try:
+                package_dir, checked = packages[package]
+            except KeyError:
+                package_dir = self.get_package_dir(package)
+                checked = False
+
+            if not checked:
+                init_py = self.check_package(package, package_dir)
+                packages[package] = (package_dir, 1)
+                if init_py:
+                    modules.append((package, "__init__", init_py))
+
+            # XXX perhaps we should also check for just .pyc files
+            # (so greedy closed-source bastards can distribute Python
+            # modules too)
+            module_file = os.path.join(package_dir, module_base + ".py")
+            if not self.check_module(module, module_file):
+                continue
+
+            modules.append((package, module_base, module_file))
+
+        return modules
+
+    def find_all_modules(self):
+        """Compute the list of all modules that will be built, whether
+        they are specified one-module-at-a-time ('self.py_modules') or
+        by whole packages ('self.packages').  Return a list of tuples
+        (package, module, module_file), just like 'find_modules()' and
+        'find_package_modules()' do."""
+        modules = []
+        if self.py_modules:
+            modules.extend(self.find_modules())
+        if self.packages:
+            for package in self.packages:
+                package_dir = self.get_package_dir(package)
+                m = self.find_package_modules(package, package_dir)
+                modules.extend(m)
+        return modules
+
+    def get_source_files(self):
+        sources = [module[-1] for module in self.find_all_modules()]
+        sources += [
+            os.path.join(src_dir, filename)
+            for package, src_dir, build_dir, filenames in self.data_files
+            for filename in filenames]
+        return sources
+
+    def get_module_outfile(self, build_dir, package, module):
+        outfile_path = [build_dir] + list(package) + [module + ".py"]
+        return os.path.join(*outfile_path)
+
+    def get_outputs(self, include_bytecode=True):
+        modules = self.find_all_modules()
+        outputs = []
+        for package, module, module_file in modules:
+            package = package.split('.')
+            filename = self.get_module_outfile(self.build_lib, package, module)
+            outputs.append(filename)
+            if include_bytecode:
+                if self.compile:
+                    outputs.append(filename + "c")
+                if self.optimize > 0:
+                    outputs.append(filename + "o")
+
+        outputs += [
+            os.path.join(build_dir, filename)
+            for package, src_dir, build_dir, filenames in self.data_files
+            for filename in filenames]
+
+        return outputs
+
+    def build_module(self, module, module_file, package):
+        if isinstance(package, str):
+            package = package.split('.')
+        elif not isinstance(package, (list, tuple)):
+            raise TypeError(
+                  "'package' must be a string (dot-separated), list, or tuple")
+
+        # Now put the module source file into the "build" area -- this is
+        # easy, we just copy it somewhere under self.build_lib (the build
+        # directory for Python source).
+        outfile = self.get_module_outfile(self.build_lib, package, module)
+        dir = os.path.dirname(outfile)
+        self.mkpath(dir)
+        return self.copy_file(module_file, outfile, preserve_mode=False)
+
+    def build_modules(self):
+        modules = self.find_modules()
+        for package, module, module_file in modules:
+
+            # Now "build" the module -- ie. copy the source file to
+            # self.build_lib (the build directory for Python source).
+            # (Actually, it gets copied to the directory for this package
+            # under self.build_lib.)
+            self.build_module(module, module_file, package)
+
+    def build_packages(self):
+        for package in self.packages:
+
+            # Get list of (package, module, module_file) tuples based on
+            # scanning the package directory.  'package' is only included
+            # in the tuple so that 'find_modules()' and
+            # 'find_package_tuples()' have a consistent interface; it's
+            # ignored here (apart from a sanity check).  Also, 'module' is
+            # the *unqualified* module name (ie. no dots, no package -- we
+            # already know its package!), and 'module_file' is the path to
+            # the .py file, relative to the current directory
+            # (ie. including 'package_dir').
+            package_dir = self.get_package_dir(package)
+            modules = self.find_package_modules(package, package_dir)
+
+            # Now loop over the modules we found, "building" each one (just
+            # copy it to self.build_lib).
+            for package_, module, module_file in modules:
+                assert package == package_
+                self.build_module(module, module_file, package)
+
+    def byte_compile(self, files):
+        if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode:
+            logger.warning('%s: byte-compiling is disabled, skipping.',
+                           self.get_command_name())
+            return
+
+        from packaging.util import byte_compile
+        prefix = self.build_lib
+        if prefix[-1] != os.sep:
+            prefix = prefix + os.sep
+
+        # XXX this code is essentially the same as the 'byte_compile()
+        # method of the "install_lib" command, except for the determination
+        # of the 'prefix' string.  Hmmm.
+
+        if self.compile:
+            byte_compile(files, optimize=0,
+                         force=self.force, prefix=prefix, dry_run=self.dry_run)
+        if self.optimize > 0:
+            byte_compile(files, optimize=self.optimize,
+                         force=self.force, prefix=prefix, dry_run=self.dry_run)
diff --git a/Lib/packaging/command/build_scripts.py b/Lib/packaging/command/build_scripts.py
new file mode 100644
index 0000000..fe14e0a
--- /dev/null
+++ b/Lib/packaging/command/build_scripts.py
@@ -0,0 +1,154 @@
+"""Build scripts (copy to build dir and fix up shebang line)."""
+
+import os
+import re
+import sysconfig
+import tokenize
+
+from packaging.command.cmd import Command
+from packaging.util import convert_path, newer
+from packaging import logger
+from packaging.compat import Mixin2to3
+
+
+# check if Python is called on the first line with this expression
+first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$')
+
+class build_scripts(Command, Mixin2to3):
+
+    description = "build scripts (copy and fix up shebang line)"
+
+    user_options = [
+        ('build-dir=', 'd', "directory to build (copy) to"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps"),
+        ('executable=', 'e', "specify final destination interpreter path"),
+        ]
+
+    boolean_options = ['force']
+
+
+    def initialize_options(self):
+        self.build_dir = None
+        self.scripts = None
+        self.force = None
+        self.executable = None
+        self.outfiles = None
+        self.use_2to3 = False
+        self.convert_2to3_doctests = None
+        self.use_2to3_fixers = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   ('build_scripts', 'build_dir'),
+                                   'use_2to3', 'use_2to3_fixers',
+                                   'convert_2to3_doctests', 'force',
+                                   'executable')
+        self.scripts = self.distribution.scripts
+
+    def get_source_files(self):
+        return self.scripts
+
+    def run(self):
+        if not self.scripts:
+            return
+        copied_files = self.copy_scripts()
+        if self.use_2to3 and copied_files:
+            self._run_2to3(copied_files, fixers=self.use_2to3_fixers)
+
+    def copy_scripts(self):
+        """Copy each script listed in 'self.scripts'; if it's marked as a
+        Python script in the Unix way (first line matches 'first_line_re',
+        ie. starts with "\#!" and contains "python"), then adjust the first
+        line to refer to the current Python interpreter as we copy.
+        """
+        self.mkpath(self.build_dir)
+        outfiles = []
+        for script in self.scripts:
+            adjust = False
+            script = convert_path(script)
+            outfile = os.path.join(self.build_dir, os.path.basename(script))
+            outfiles.append(outfile)
+
+            if not self.force and not newer(script, outfile):
+                logger.debug("not copying %s (up-to-date)", script)
+                continue
+
+            # Always open the file, but ignore failures in dry-run mode --
+            # that way, we'll get accurate feedback if we can read the
+            # script.
+            try:
+                f = open(script, "rb")
+            except IOError:
+                if not self.dry_run:
+                    raise
+                f = None
+            else:
+                encoding, lines = tokenize.detect_encoding(f.readline)
+                f.seek(0)
+                first_line = f.readline()
+                if not first_line:
+                    logger.warning('%s: %s is an empty file (skipping)',
+                                   self.get_command_name(),  script)
+                    continue
+
+                match = first_line_re.match(first_line)
+                if match:
+                    adjust = True
+                    post_interp = match.group(1) or b''
+
+            if adjust:
+                logger.info("copying and adjusting %s -> %s", script,
+                         self.build_dir)
+                if not self.dry_run:
+                    if not sysconfig.is_python_build():
+                        executable = self.executable
+                    else:
+                        executable = os.path.join(
+                            sysconfig.get_config_var("BINDIR"),
+                           "python%s%s" % (sysconfig.get_config_var("VERSION"),
+                                           sysconfig.get_config_var("EXE")))
+                    executable = os.fsencode(executable)
+                    shebang = b"#!" + executable + post_interp + b"\n"
+                    # Python parser starts to read a script using UTF-8 until
+                    # it gets a #coding:xxx cookie. The shebang has to be the
+                    # first line of a file, the #coding:xxx cookie cannot be
+                    # written before. So the shebang has to be decodable from
+                    # UTF-8.
+                    try:
+                        shebang.decode('utf-8')
+                    except UnicodeDecodeError:
+                        raise ValueError(
+                            "The shebang ({!r}) is not decodable "
+                            "from utf-8".format(shebang))
+                    # If the script is encoded to a custom encoding (use a
+                    # #coding:xxx cookie), the shebang has to be decodable from
+                    # the script encoding too.
+                    try:
+                        shebang.decode(encoding)
+                    except UnicodeDecodeError:
+                        raise ValueError(
+                            "The shebang ({!r}) is not decodable "
+                            "from the script encoding ({})"
+                            .format(shebang, encoding))
+                    with open(outfile, "wb") as outf:
+                        outf.write(shebang)
+                        outf.writelines(f.readlines())
+                if f:
+                    f.close()
+            else:
+                if f:
+                    f.close()
+                self.copy_file(script, outfile)
+
+        if os.name == 'posix':
+            for file in outfiles:
+                if self.dry_run:
+                    logger.info("changing mode of %s", file)
+                else:
+                    oldmode = os.stat(file).st_mode & 0o7777
+                    newmode = (oldmode | 0o555) & 0o7777
+                    if newmode != oldmode:
+                        logger.info("changing mode of %s from %o to %o",
+                                 file, oldmode, newmode)
+                        os.chmod(file, newmode)
+        return outfiles
diff --git a/Lib/packaging/command/check.py b/Lib/packaging/command/check.py
new file mode 100644
index 0000000..94c4a97
--- /dev/null
+++ b/Lib/packaging/command/check.py
@@ -0,0 +1,88 @@
+"""Check PEP compliance of metadata."""
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingSetupError
+from packaging.util import resolve_name
+
+class check(Command):
+
+    description = "check PEP compliance of metadata"
+
+    user_options = [('metadata', 'm', 'Verify metadata'),
+                    ('all', 'a',
+                     ('runs extended set of checks')),
+                    ('strict', 's',
+                     'Will exit with an error if a check fails')]
+
+    boolean_options = ['metadata', 'all', 'strict']
+
+    def initialize_options(self):
+        """Sets default values for options."""
+        self.all = False
+        self.metadata = True
+        self.strict = False
+        self._warnings = []
+
+    def finalize_options(self):
+        pass
+
+    def warn(self, msg, *args):
+        """Wrapper around logging that also remembers messages."""
+        # XXX we could use a special handler for this, but would need to test
+        # if it works even if the logger has a too high level
+        self._warnings.append((msg, args))
+        return logger.warning(self.get_command_name() + msg, *args)
+
+    def run(self):
+        """Runs the command."""
+        # perform the various tests
+        if self.metadata:
+            self.check_metadata()
+        if self.all:
+            self.check_restructuredtext()
+            self.check_hooks_resolvable()
+
+        # let's raise an error in strict mode, if we have at least
+        # one warning
+        if self.strict and len(self._warnings) > 0:
+            msg = '\n'.join(msg % args for msg, args in self._warnings)
+            raise PackagingSetupError(msg)
+
+    def check_metadata(self):
+        """Ensures that all required elements of metadata are supplied.
+
+        name, version, URL, author
+
+        Warns if any are missing.
+        """
+        missing, warnings = self.distribution.metadata.check(strict=True)
+        if missing != []:
+            self.warn('missing required metadata: %s', ', '.join(missing))
+        for warning in warnings:
+            self.warn(warning)
+
+    def check_restructuredtext(self):
+        """Checks if the long string fields are reST-compliant."""
+        missing, warnings = self.distribution.metadata.check(restructuredtext=True)
+        if self.distribution.metadata.docutils_support:
+            for warning in warnings:
+                line = warning[-1].get('line')
+                if line is None:
+                    warning = warning[1]
+                else:
+                    warning = '%s (line %s)' % (warning[1], line)
+                self.warn(warning)
+        elif self.strict:
+            raise PackagingSetupError('The docutils package is needed.')
+
+    def check_hooks_resolvable(self):
+        for options in self.distribution.command_options.values():
+            for hook_kind in ("pre_hook", "post_hook"):
+                if hook_kind not in options:
+                    break
+                for hook_name in options[hook_kind][1].values():
+                    try:
+                        resolve_name(hook_name)
+                    except ImportError:
+                        self.warn('name %r cannot be resolved', hook_name)
diff --git a/Lib/packaging/command/clean.py b/Lib/packaging/command/clean.py
new file mode 100644
index 0000000..4f60f4e
--- /dev/null
+++ b/Lib/packaging/command/clean.py
@@ -0,0 +1,76 @@
+"""Clean up temporary files created by the build command."""
+
+# Contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>
+
+import os
+from shutil import rmtree
+from packaging.command.cmd import Command
+from packaging import logger
+
+class clean(Command):
+
+    description = "clean up temporary files from 'build' command"
+    user_options = [
+        ('build-base=', 'b',
+         "base build directory (default: 'build.build-base')"),
+        ('build-lib=', None,
+         "build directory for all modules (default: 'build.build-lib')"),
+        ('build-temp=', 't',
+         "temporary build directory (default: 'build.build-temp')"),
+        ('build-scripts=', None,
+         "build directory for scripts (default: 'build.build-scripts')"),
+        ('bdist-base=', None,
+         "temporary directory for built distributions"),
+        ('all', 'a',
+         "remove all build output, not just temporary by-products")
+    ]
+
+    boolean_options = ['all']
+
+    def initialize_options(self):
+        self.build_base = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.bdist_base = None
+        self.all = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build', 'build_base', 'build_lib',
+                                   'build_scripts', 'build_temp')
+        self.set_undefined_options('bdist', 'bdist_base')
+
+    def run(self):
+        # remove the build/temp.<plat> directory (unless it's already
+        # gone)
+        if os.path.exists(self.build_temp):
+            if self.dry_run:
+                logger.info('removing %s', self.build_temp)
+            else:
+                rmtree(self.build_temp)
+        else:
+            logger.debug("'%s' does not exist -- can't clean it",
+                      self.build_temp)
+
+        if self.all:
+            # remove build directories
+            for directory in (self.build_lib,
+                              self.bdist_base,
+                              self.build_scripts):
+                if os.path.exists(directory):
+                    if self.dry_run:
+                        logger.info('removing %s', directory)
+                    else:
+                        rmtree(directory)
+                else:
+                    logger.warning("'%s' does not exist -- can't clean it",
+                                directory)
+
+        # just for the heck of it, try to remove the base build directory:
+        # we might have emptied it right now, but if not we don't care
+        if not self.dry_run:
+            try:
+                os.rmdir(self.build_base)
+                logger.info("removing '%s'", self.build_base)
+            except OSError:
+                pass
diff --git a/Lib/packaging/command/cmd.py b/Lib/packaging/command/cmd.py
new file mode 100644
index 0000000..fa56aa6
--- /dev/null
+++ b/Lib/packaging/command/cmd.py
@@ -0,0 +1,440 @@
+"""Base class for commands."""
+
+import os
+import re
+from shutil import copyfile, move, make_archive
+from packaging import util
+from packaging import logger
+from packaging.errors import PackagingOptionError
+
+
+class Command:
+    """Abstract base class for defining command classes, the "worker bees"
+    of the Packaging.  A useful analogy for command classes is to think of
+    them as subroutines with local variables called "options".  The options
+    are "declared" in 'initialize_options()' and "defined" (given their
+    final values, aka "finalized") in 'finalize_options()', both of which
+    must be defined by every command class.  The distinction between the
+    two is necessary because option values might come from the outside
+    world (command line, config file, ...), and any options dependent on
+    other options must be computed *after* these outside influences have
+    been processed -- hence 'finalize_options()'.  The "body" of the
+    subroutine, where it does all its work based on the values of its
+    options, is the 'run()' method, which must also be implemented by every
+    command class.
+    """
+
+    # 'sub_commands' formalizes the notion of a "family" of commands,
+    # eg. "install_dist" as the parent with sub-commands "install_lib",
+    # "install_headers", etc.  The parent of a family of commands
+    # defines 'sub_commands' as a class attribute; it's a list of
+    #    (command_name : string, predicate : unbound_method | string | None)
+    # tuples, where 'predicate' is a method of the parent command that
+    # determines whether the corresponding command is applicable in the
+    # current situation.  (Eg. we "install_headers" is only applicable if
+    # we have any C header files to install.)  If 'predicate' is None,
+    # that command is always applicable.
+    #
+    # 'sub_commands' is usually defined at the *end* of a class, because
+    # predicates can be unbound methods, so they must already have been
+    # defined.  The canonical example is the "install_dist" command.
+    sub_commands = []
+
+    # Pre and post command hooks are run just before or just after the command
+    # itself. They are simple functions that receive the command instance. They
+    # are specified as callable objects or dotted strings (for lazy loading).
+    pre_hook = None
+    post_hook = None
+
+    # -- Creation/initialization methods -------------------------------
+
+    def __init__(self, dist):
+        """Create and initialize a new Command object.  Most importantly,
+        invokes the 'initialize_options()' method, which is the real
+        initializer and depends on the actual command being instantiated.
+        """
+        # late import because of mutual dependence between these classes
+        from packaging.dist import Distribution
+
+        if not isinstance(dist, Distribution):
+            raise TypeError("dist must be a Distribution instance")
+        if self.__class__ is Command:
+            raise RuntimeError("Command is an abstract class")
+
+        self.distribution = dist
+        self.initialize_options()
+
+        # Per-command versions of the global flags, so that the user can
+        # customize Packaging' behaviour command-by-command and let some
+        # commands fall back on the Distribution's behaviour.  None means
+        # "not defined, check self.distribution's copy", while 0 or 1 mean
+        # false and true (duh).  Note that this means figuring out the real
+        # value of each flag is a touch complicated -- hence "self._dry_run"
+        # will be handled by a property, below.
+        # XXX This needs to be fixed. [I changed it to a property--does that
+        #     "fix" it?]
+        self._dry_run = None
+
+        # Some commands define a 'self.force' option to ignore file
+        # timestamps, but methods defined *here* assume that
+        # 'self.force' exists for all commands.  So define it here
+        # just to be safe.
+        self.force = None
+
+        # The 'help' flag is just used for command line parsing, so
+        # none of that complicated bureaucracy is needed.
+        self.help = False
+
+        # 'finalized' records whether or not 'finalize_options()' has been
+        # called.  'finalize_options()' itself should not pay attention to
+        # this flag: it is the business of 'ensure_finalized()', which
+        # always calls 'finalize_options()', to respect/update it.
+        self.finalized = False
+
+    # XXX A more explicit way to customize dry_run would be better.
+    @property
+    def dry_run(self):
+        if self._dry_run is None:
+            return getattr(self.distribution, 'dry_run')
+        else:
+            return self._dry_run
+
+    def ensure_finalized(self):
+        if not self.finalized:
+            self.finalize_options()
+        self.finalized = True
+
+    # Subclasses must define:
+    #   initialize_options()
+    #     provide default values for all options; may be customized by
+    #     setup script, by options from config file(s), or by command-line
+    #     options
+    #   finalize_options()
+    #     decide on the final values for all options; this is called
+    #     after all possible intervention from the outside world
+    #     (command line, option file, etc.) has been processed
+    #   run()
+    #     run the command: do whatever it is we're here to do,
+    #     controlled by the command's various option values
+
+    def initialize_options(self):
+        """Set default values for all the options that this command
+        supports.  Note that these defaults may be overridden by other
+        commands, by the setup script, by config files, or by the
+        command line.  Thus, this is not the place to code dependencies
+        between options; generally, 'initialize_options()' implementations
+        are just a bunch of "self.foo = None" assignments.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError(
+            "abstract method -- subclass %s must override" % self.__class__)
+
+    def finalize_options(self):
+        """Set final values for all the options that this command supports.
+        This is always called as late as possible, ie.  after any option
+        assignments from the command line or from other commands have been
+        done.  Thus, this is the place to code option dependencies: if
+        'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
+        long as 'foo' still has the same value it was assigned in
+        'initialize_options()'.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError(
+            "abstract method -- subclass %s must override" % self.__class__)
+
+    def dump_options(self, header=None, indent=""):
+        if header is None:
+            header = "command options for '%s':" % self.get_command_name()
+        logger.info(indent + header)
+        indent = indent + "  "
+        negative_opt = getattr(self, 'negative_opt', ())
+        for option, _, _ in self.user_options:
+            if option in negative_opt:
+                continue
+            option = option.replace('-', '_')
+            if option[-1] == "=":
+                option = option[:-1]
+            value = getattr(self, option)
+            logger.info(indent + "%s = %s", option, value)
+
+    def run(self):
+        """A command's raison d'etre: carry out the action it exists to
+        perform, controlled by the options initialized in
+        'initialize_options()', customized by other commands, the setup
+        script, the command line and config files, and finalized in
+        'finalize_options()'.  All terminal output and filesystem
+        interaction should be done by 'run()'.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError(
+            "abstract method -- subclass %s must override" % self.__class__)
+
+    # -- External interface --------------------------------------------
+    # (called by outsiders)
+
+    def get_source_files(self):
+        """Return the list of files that are used as inputs to this command,
+        i.e. the files used to generate the output files.  The result is used
+        by the `sdist` command in determining the set of default files.
+
+        Command classes should implement this method if they operate on files
+        from the source tree.
+        """
+        return []
+
+    def get_outputs(self):
+        """Return the list of files that would be produced if this command
+        were actually run.  Not affected by the "dry-run" flag or whether
+        any other commands have been run.
+
+        Command classes should implement this method if they produce any
+        output files that get consumed by another command.  e.g., `build_ext`
+        returns the list of built extension modules, but not any temporary
+        files used in the compilation process.
+        """
+        return []
+
+    # -- Option validation methods -------------------------------------
+    # (these are very handy in writing the 'finalize_options()' method)
+    #
+    # NB. the general philosophy here is to ensure that a particular option
+    # value meets certain type and value constraints.  If not, we try to
+    # force it into conformance (eg. if we expect a list but have a string,
+    # split the string on comma and/or whitespace).  If we can't force the
+    # option into conformance, raise PackagingOptionError.  Thus, command
+    # classes need do nothing more than (eg.)
+    #   self.ensure_string_list('foo')
+    # and they can be guaranteed that thereafter, self.foo will be
+    # a list of strings.
+
+    def _ensure_stringlike(self, option, what, default=None):
+        val = getattr(self, option)
+        if val is None:
+            setattr(self, option, default)
+            return default
+        elif not isinstance(val, str):
+            raise PackagingOptionError("'%s' must be a %s (got `%s`)" %
+                                       (option, what, val))
+        return val
+
+    def ensure_string(self, option, default=None):
+        """Ensure that 'option' is a string; if not defined, set it to
+        'default'.
+        """
+        self._ensure_stringlike(option, "string", default)
+
+    def ensure_string_list(self, option):
+        r"""Ensure that 'option' is a list of strings.  If 'option' is
+        currently a string, we split it either on /,\s*/ or /\s+/, so
+        "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
+        ["foo", "bar", "baz"].
+        """
+        val = getattr(self, option)
+        if val is None:
+            return
+        elif isinstance(val, str):
+            setattr(self, option, re.split(r',\s*|\s+', val))
+        else:
+            if isinstance(val, list):
+                # checks if all elements are str
+                ok = True
+                for element in val:
+                    if not isinstance(element, str):
+                        ok = False
+                        break
+            else:
+                ok = False
+
+            if not ok:
+                raise PackagingOptionError(
+                    "'%s' must be a list of strings (got %r)" % (option, val))
+
+    def _ensure_tested_string(self, option, tester,
+                              what, error_fmt, default=None):
+        val = self._ensure_stringlike(option, what, default)
+        if val is not None and not tester(val):
+            raise PackagingOptionError(
+                ("error in '%s' option: " + error_fmt) % (option, val))
+
+    def ensure_filename(self, option):
+        """Ensure that 'option' is the name of an existing file."""
+        self._ensure_tested_string(option, os.path.isfile,
+                                   "filename",
+                                   "'%s' does not exist or is not a file")
+
+    def ensure_dirname(self, option):
+        self._ensure_tested_string(option, os.path.isdir,
+                                   "directory name",
+                                   "'%s' does not exist or is not a directory")
+
+    # -- Convenience methods for commands ------------------------------
+
+    @classmethod
+    def get_command_name(cls):
+        if hasattr(cls, 'command_name'):
+            return cls.command_name
+        else:
+            return cls.__name__
+
+    def set_undefined_options(self, src_cmd, *options):
+        """Set values of undefined options from another command.
+
+        Undefined options are options set to None, which is the convention
+        used to indicate that an option has not been changed between
+        'initialize_options()' and 'finalize_options()'.  This method is
+        usually called from 'finalize_options()' for options that depend on
+        some other command rather than another option of the same command,
+        typically subcommands.
+
+        The 'src_cmd' argument is the other command from which option values
+        will be taken (a command object will be created for it if necessary);
+        the remaining positional arguments are strings that give the name of
+        the option to set. If the name is different on the source and target
+        command, you can pass a tuple with '(name_on_source, name_on_dest)' so
+        that 'self.name_on_dest' will be set from 'src_cmd.name_on_source'.
+        """
+        src_cmd_obj = self.distribution.get_command_obj(src_cmd)
+        src_cmd_obj.ensure_finalized()
+        for obj in options:
+            if isinstance(obj, tuple):
+                src_option, dst_option = obj
+            else:
+                src_option, dst_option = obj, obj
+            if getattr(self, dst_option) is None:
+                setattr(self, dst_option,
+                        getattr(src_cmd_obj, src_option))
+
+    def get_finalized_command(self, command, create=True):
+        """Wrapper around Distribution's 'get_command_obj()' method: find
+        (create if necessary and 'create' is true) the command object for
+        'command', call its 'ensure_finalized()' method, and return the
+        finalized command object.
+        """
+        cmd_obj = self.distribution.get_command_obj(command, create)
+        cmd_obj.ensure_finalized()
+        return cmd_obj
+
+    def get_reinitialized_command(self, command, reinit_subcommands=False):
+        return self.distribution.get_reinitialized_command(
+            command, reinit_subcommands)
+
+    def run_command(self, command):
+        """Run some other command: uses the 'run_command()' method of
+        Distribution, which creates and finalizes the command object if
+        necessary and then invokes its 'run()' method.
+        """
+        self.distribution.run_command(command)
+
+    def get_sub_commands(self):
+        """Determine the sub-commands that are relevant in the current
+        distribution (ie., that need to be run).  This is based on the
+        'sub_commands' class attribute: each tuple in that list may include
+        a method that we call to determine if the subcommand needs to be
+        run for the current distribution.  Return a list of command names.
+        """
+        commands = []
+        for sub_command in self.sub_commands:
+            if len(sub_command) == 2:
+                cmd_name, method = sub_command
+                if method is None or method(self):
+                    commands.append(cmd_name)
+            else:
+                commands.append(sub_command)
+        return commands
+
+    # -- External world manipulation -----------------------------------
+
+    def execute(self, func, args, msg=None, level=1):
+        util.execute(func, args, msg, dry_run=self.dry_run)
+
+    def mkpath(self, name, mode=0o777, dry_run=None, verbose=0):
+        if dry_run is None:
+            dry_run = self.dry_run
+        name = os.path.normpath(name)
+        if os.path.isdir(name) or name == '':
+            return
+        if dry_run:
+            head = ''
+            for part in name.split(os.sep):
+                logger.info("created directory %s%s", head, part)
+                head += part + os.sep
+            return
+        os.makedirs(name, mode)
+
+    def copy_file(self, infile, outfile,
+                  preserve_mode=True, preserve_times=True, link=None, level=1):
+        """Copy a file respecting verbose, dry-run and force flags.  (The
+        former two default to whatever is in the Distribution object, and
+        the latter defaults to false for commands that don't define it.)"""
+        if self.dry_run:
+            # XXX add a comment
+            return
+        if os.path.isdir(outfile):
+            outfile = os.path.join(outfile, os.path.split(infile)[-1])
+        copyfile(infile, outfile)
+        return outfile, None  # XXX
+
+    def copy_tree(self, infile, outfile, preserve_mode=True,
+                  preserve_times=True, preserve_symlinks=False, level=1):
+        """Copy an entire directory tree respecting verbose, dry-run,
+        and force flags.
+        """
+        if self.dry_run:
+            return  # see if we want to display something
+
+
+        return util.copy_tree(infile, outfile, preserve_mode, preserve_times,
+            preserve_symlinks, not self.force, dry_run=self.dry_run)
+
+    def move_file(self, src, dst, level=1):
+        """Move a file respecting the dry-run flag."""
+        if self.dry_run:
+            return  # XXX log ?
+        return move(src, dst)
+
+    def spawn(self, cmd, search_path=True, level=1):
+        """Spawn an external command respecting dry-run flag."""
+        from packaging.util import spawn
+        spawn(cmd, search_path, dry_run=self.dry_run)
+
+    def make_archive(self, base_name, format, root_dir=None, base_dir=None,
+                     owner=None, group=None):
+        return make_archive(base_name, format, root_dir,
+                            base_dir, dry_run=self.dry_run,
+                            owner=owner, group=group)
+
+    def make_file(self, infiles, outfile, func, args,
+                  exec_msg=None, skip_msg=None, level=1):
+        """Special case of 'execute()' for operations that process one or
+        more input files and generate one output file.  Works just like
+        'execute()', except the operation is skipped and a different
+        message printed if 'outfile' already exists and is newer than all
+        files listed in 'infiles'.  If the command defined 'self.force',
+        and it is true, then the command is unconditionally run -- does no
+        timestamp checks.
+        """
+        if skip_msg is None:
+            skip_msg = "skipping %s (inputs unchanged)" % outfile
+
+        # Allow 'infiles' to be a single string
+        if isinstance(infiles, str):
+            infiles = (infiles,)
+        elif not isinstance(infiles, (list, tuple)):
+            raise TypeError(
+                "'infiles' must be a string, or a list or tuple of strings")
+
+        if exec_msg is None:
+            exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
+
+        # If 'outfile' must be regenerated (either because it doesn't
+        # exist, is out-of-date, or the 'force' flag is true) then
+        # perform the action that presumably regenerates it
+        if self.force or util.newer_group(infiles, outfile):
+            self.execute(func, args, exec_msg, level)
+
+        # Otherwise, print the "skip" message
+        else:
+            logger.debug(skip_msg)
diff --git a/Lib/packaging/command/command_template b/Lib/packaging/command/command_template
new file mode 100644
index 0000000..a12d32b
--- /dev/null
+++ b/Lib/packaging/command/command_template
@@ -0,0 +1,35 @@
+"""Do X and Y."""
+
+from packaging import logger
+from packaging.command.cmd import Command
+
+
+class x(Command):
+
+    # Brief (40-50 characters) description of the command
+    description = ""
+
+    # List of option tuples: long name, short name (None if no short
+    # name), and help string.
+    user_options = [
+        ('', '',  # long option, short option (one letter) or None
+         ""),  # help text
+        ]
+
+    def initialize_options(self):
+        self. = None
+        self. = None
+        self. = None
+
+    def finalize_options(self):
+        if self.x is None:
+            self.x = ...
+
+    def run(self):
+        ...
+        logger.info(...)
+
+        if not self.dry_run:
+            ...
+
+        self.execute(..., dry_run=self.dry_run)
diff --git a/Lib/packaging/command/config.py b/Lib/packaging/command/config.py
new file mode 100644
index 0000000..264c139
--- /dev/null
+++ b/Lib/packaging/command/config.py
@@ -0,0 +1,349 @@
+"""Prepare the build.
+
+This module provides config, a (mostly) empty command class
+that exists mainly to be sub-classed by specific module distributions and
+applications.  The idea is that while every "config" command is different,
+at least they're all named the same, and users always see "config" in the
+list of standard commands.  Also, this is a good place to put common
+configure-like tasks: "try to compile this C code", or "figure out where
+this header file lives".
+"""
+
+import os
+import re
+
+from packaging.command.cmd import Command
+from packaging.errors import PackagingExecError
+from packaging.compiler import customize_compiler
+from packaging import logger
+
+LANG_EXT = {'c': '.c', 'c++': '.cxx'}
+
+class config(Command):
+
+    description = "prepare the build"
+
+    user_options = [
+        ('compiler=', None,
+         "specify the compiler type"),
+        ('cc=', None,
+         "specify the compiler executable"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files"),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries"),
+
+        ('noisy', None,
+         "show every action (compile, link, run, ...) taken"),
+        ('dump-source', None,
+         "dump generated source files before attempting to compile them"),
+        ]
+
+
+    # The three standard command methods: since the "config" command
+    # does nothing by default, these are empty.
+
+    def initialize_options(self):
+        self.compiler = None
+        self.cc = None
+        self.include_dirs = None
+        self.libraries = None
+        self.library_dirs = None
+
+        # maximal output for now
+        self.noisy = True
+        self.dump_source = True
+
+        # list of temporary files generated along-the-way that we have
+        # to clean at some point
+        self.temp_files = []
+
+    def finalize_options(self):
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        elif isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        if self.libraries is None:
+            self.libraries = []
+        elif isinstance(self.libraries, str):
+            self.libraries = [self.libraries]
+
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif isinstance(self.library_dirs, str):
+            self.library_dirs = self.library_dirs.split(os.pathsep)
+
+    def run(self):
+        pass
+
+
+    # Utility methods for actual "config" commands.  The interfaces are
+    # loosely based on Autoconf macros of similar names.  Sub-classes
+    # may use these freely.
+
+    def _check_compiler(self):
+        """Check that 'self.compiler' really is a CCompiler object;
+        if not, make it one.
+        """
+        # We do this late, and only on-demand, because this is an expensive
+        # import.
+        from packaging.compiler.ccompiler import CCompiler
+        from packaging.compiler import new_compiler
+        if not isinstance(self.compiler, CCompiler):
+            self.compiler = new_compiler(compiler=self.compiler,
+                                         dry_run=self.dry_run, force=True)
+            customize_compiler(self.compiler)
+            if self.include_dirs:
+                self.compiler.set_include_dirs(self.include_dirs)
+            if self.libraries:
+                self.compiler.set_libraries(self.libraries)
+            if self.library_dirs:
+                self.compiler.set_library_dirs(self.library_dirs)
+
+
+    def _gen_temp_sourcefile(self, body, headers, lang):
+        filename = "_configtest" + LANG_EXT[lang]
+        with open(filename, "w") as file:
+            if headers:
+                for header in headers:
+                    file.write("#include <%s>\n" % header)
+                file.write("\n")
+            file.write(body)
+            if body[-1] != "\n":
+                file.write("\n")
+        return filename
+
+    def _preprocess(self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        out = "_configtest.i"
+        self.temp_files.extend((src, out))
+        self.compiler.preprocess(src, out, include_dirs=include_dirs)
+        return src, out
+
+    def _compile(self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        if self.dump_source:
+            dump_file(src, "compiling '%s':" % src)
+        obj = self.compiler.object_filenames([src])[0]
+        self.temp_files.extend((src, obj))
+        self.compiler.compile([src], include_dirs=include_dirs)
+        return src, obj
+
+    def _link(self, body, headers, include_dirs, libraries, library_dirs,
+              lang):
+        src, obj = self._compile(body, headers, include_dirs, lang)
+        prog = os.path.splitext(os.path.basename(src))[0]
+        self.compiler.link_executable([obj], prog,
+                                      libraries=libraries,
+                                      library_dirs=library_dirs,
+                                      target_lang=lang)
+
+        if self.compiler.exe_extension is not None:
+            prog = prog + self.compiler.exe_extension
+        self.temp_files.append(prog)
+
+        return src, obj, prog
+
+    def _clean(self, *filenames):
+        if not filenames:
+            filenames = self.temp_files
+            self.temp_files = []
+        logger.info("removing: %s", ' '.join(filenames))
+        for filename in filenames:
+            try:
+                os.remove(filename)
+            except OSError:
+                pass
+
+
+    # XXX these ignore the dry-run flag: what to do, what to do? even if
+    # you want a dry-run build, you still need some sort of configuration
+    # info.  My inclination is to make it up to the real config command to
+    # consult 'dry_run', and assume a default (minimal) configuration if
+    # true.  The problem with trying to do it here is that you'd have to
+    # return either true or false from all the 'try' methods, neither of
+    # which is correct.
+
+    # XXX need access to the header search path and maybe default macros.
+
+    def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
+        """Construct a source file from 'body' (a string containing lines
+        of C/C++ code) and 'headers' (a list of header files to include)
+        and run it through the preprocessor.  Return true if the
+        preprocessor succeeded, false if there were any errors.
+        ('body' probably isn't of much use, but what the heck.)
+        """
+        from packaging.compiler.ccompiler import CompileError
+        self._check_compiler()
+        ok = True
+        try:
+            self._preprocess(body, headers, include_dirs, lang)
+        except CompileError:
+            ok = False
+
+        self._clean()
+        return ok
+
+    def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
+                   lang="c"):
+        """Construct a source file (just like 'try_cpp()'), run it through
+        the preprocessor, and return true if any line of the output matches
+        'pattern'.  'pattern' should either be a compiled regex object or a
+        string containing a regex.  If both 'body' and 'headers' are None,
+        preprocesses an empty file -- which can be useful to determine the
+        symbols the preprocessor and compiler set by default.
+        """
+        self._check_compiler()
+        src, out = self._preprocess(body, headers, include_dirs, lang)
+
+        if isinstance(pattern, str):
+            pattern = re.compile(pattern)
+
+        with open(out) as file:
+            match = False
+            while True:
+                line = file.readline()
+                if line == '':
+                    break
+                if pattern.search(line):
+                    match = True
+                    break
+
+        self._clean()
+        return match
+
+    def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
+        """Try to compile a source file built from 'body' and 'headers'.
+        Return true on success, false otherwise.
+        """
+        from packaging.compiler.ccompiler import CompileError
+        self._check_compiler()
+        try:
+            self._compile(body, headers, include_dirs, lang)
+            ok = True
+        except CompileError:
+            ok = False
+
+        logger.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_link(self, body, headers=None, include_dirs=None, libraries=None,
+                 library_dirs=None, lang="c"):
+        """Try to compile and link a source file, built from 'body' and
+        'headers', to executable form.  Return true on success, false
+        otherwise.
+        """
+        from packaging.compiler.ccompiler import CompileError, LinkError
+        self._check_compiler()
+        try:
+            self._link(body, headers, include_dirs,
+                       libraries, library_dirs, lang)
+            ok = True
+        except (CompileError, LinkError):
+            ok = False
+
+        logger.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_run(self, body, headers=None, include_dirs=None, libraries=None,
+                library_dirs=None, lang="c"):
+        """Try to compile, link to an executable, and run a program
+        built from 'body' and 'headers'.  Return true on success, false
+        otherwise.
+        """
+        from packaging.compiler.ccompiler import CompileError, LinkError
+        self._check_compiler()
+        try:
+            src, obj, exe = self._link(body, headers, include_dirs,
+                                       libraries, library_dirs, lang)
+            self.spawn([exe])
+            ok = True
+        except (CompileError, LinkError, PackagingExecError):
+            ok = False
+
+        logger.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+
+    # -- High-level methods --------------------------------------------
+    # (these are the ones that are actually likely to be useful
+    # when implementing a real-world config command!)
+
+    def check_func(self, func, headers=None, include_dirs=None,
+                   libraries=None, library_dirs=None, decl=False, call=False):
+
+        """Determine if function 'func' is available by constructing a
+        source file that refers to 'func', and compiles and links it.
+        If everything succeeds, returns true; otherwise returns false.
+
+        The constructed source file starts out by including the header
+        files listed in 'headers'.  If 'decl' is true, it then declares
+        'func' (as "int func()"); you probably shouldn't supply 'headers'
+        and set 'decl' true in the same call, or you might get errors about
+        a conflicting declarations for 'func'.  Finally, the constructed
+        'main()' function either references 'func' or (if 'call' is true)
+        calls it.  'libraries' and 'library_dirs' are used when
+        linking.
+        """
+
+        self._check_compiler()
+        body = []
+        if decl:
+            body.append("int %s ();" % func)
+        body.append("int main () {")
+        if call:
+            body.append("  %s();" % func)
+        else:
+            body.append("  %s;" % func)
+        body.append("}")
+        body = "\n".join(body) + "\n"
+
+        return self.try_link(body, headers, include_dirs,
+                             libraries, library_dirs)
+
+    def check_lib(self, library, library_dirs=None, headers=None,
+                  include_dirs=None, other_libraries=[]):
+        """Determine if 'library' is available to be linked against,
+        without actually checking that any particular symbols are provided
+        by it.  'headers' will be used in constructing the source file to
+        be compiled, but the only effect of this is to check if all the
+        header files listed are available.  Any libraries listed in
+        'other_libraries' will be included in the link, in case 'library'
+        has symbols that depend on other libraries.
+        """
+        self._check_compiler()
+        return self.try_link("int main (void) { }",
+                             headers, include_dirs,
+                             [library]+other_libraries, library_dirs)
+
+    def check_header(self, header, include_dirs=None, library_dirs=None,
+                     lang="c"):
+        """Determine if the system header file named by 'header_file'
+        exists and can be found by the preprocessor; return true if so,
+        false otherwise.
+        """
+        return self.try_cpp(body="/* No body */", headers=[header],
+                            include_dirs=include_dirs)
+
+
+def dump_file(filename, head=None):
+    """Dumps a file content into log.info.
+
+    If head is not None, will be dumped before the file content.
+    """
+    if head is None:
+        logger.info(filename)
+    else:
+        logger.info(head)
+    with open(filename) as file:
+        logger.info(file.read())
diff --git a/Lib/packaging/command/install_data.py b/Lib/packaging/command/install_data.py
new file mode 100644
index 0000000..9ca6279
--- /dev/null
+++ b/Lib/packaging/command/install_data.py
@@ -0,0 +1,79 @@
+"""Install platform-independent data files."""
+
+# Contributed by Bastian Kleineidam
+
+import os
+from shutil import Error
+from sysconfig import get_paths, format_value
+from packaging import logger
+from packaging.util import convert_path
+from packaging.command.cmd import Command
+
+
+class install_data(Command):
+
+    description = "install platform-independent data files"
+
+    user_options = [
+        ('install-dir=', 'd',
+         "base directory for installing data files "
+         "(default: installation base dir)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ]
+
+    boolean_options = ['force']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.outfiles = []
+        self.data_files_out = []
+        self.root = None
+        self.force = False
+        self.data_files = self.distribution.data_files
+        self.warn_dir = True
+
+    def finalize_options(self):
+        self.set_undefined_options('install_dist',
+                                   ('install_data', 'install_dir'),
+                                   'root', 'force')
+
+    def run(self):
+        self.mkpath(self.install_dir)
+        for _file in self.data_files.items():
+            destination = convert_path(self.expand_categories(_file[1]))
+            dir_dest = os.path.abspath(os.path.dirname(destination))
+
+            self.mkpath(dir_dest)
+            try:
+                out = self.copy_file(_file[0], dir_dest)[0]
+            except Error as e:
+                logger.warning('%s: %s', self.get_command_name(), e)
+                out = destination
+
+            self.outfiles.append(out)
+            self.data_files_out.append((_file[0], destination))
+
+    def expand_categories(self, path_with_categories):
+        local_vars = get_paths()
+        local_vars['distribution.name'] = self.distribution.metadata['Name']
+        expanded_path = format_value(path_with_categories, local_vars)
+        expanded_path = format_value(expanded_path, local_vars)
+        if '{' in expanded_path and '}' in expanded_path:
+            logger.warning(
+                '%s: unable to expand %s, some categories may be missing',
+                self.get_command_name(), path_with_categories)
+        return expanded_path
+
+    def get_source_files(self):
+        return list(self.data_files)
+
+    def get_inputs(self):
+        return list(self.data_files)
+
+    def get_outputs(self):
+        return self.outfiles
+
+    def get_resources_out(self):
+        return self.data_files_out
diff --git a/Lib/packaging/command/install_dist.py b/Lib/packaging/command/install_dist.py
new file mode 100644
index 0000000..dfe6df2
--- /dev/null
+++ b/Lib/packaging/command/install_dist.py
@@ -0,0 +1,625 @@
+"""Main install command, which calls the other install_* commands."""
+
+import sys
+import os
+
+import sysconfig
+from sysconfig import get_config_vars, get_paths, get_path, get_config_var
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError
+from packaging.util import write_file
+from packaging.util import convert_path, change_root, get_platform
+from packaging.errors import PackagingOptionError
+
+
+HAS_USER_SITE = True
+
+
+class install_dist(Command):
+
+    description = "install everything from build directory"
+
+    user_options = [
+        # Select installation scheme and set base director(y|ies)
+        ('prefix=', None,
+         "installation prefix"),
+        ('exec-prefix=', None,
+         "(Unix only) prefix for platform-specific files"),
+        ('home=', None,
+         "(Unix only) home directory to install under"),
+
+        # Or just set the base director(y|ies)
+        ('install-base=', None,
+         "base installation directory (instead of --prefix or --home)"),
+        ('install-platbase=', None,
+         "base installation directory for platform-specific files " +
+         "(instead of --exec-prefix or --home)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+
+        # Or explicitly set the installation scheme
+        ('install-purelib=', None,
+         "installation directory for pure Python module distributions"),
+        ('install-platlib=', None,
+         "installation directory for non-pure module distributions"),
+        ('install-lib=', None,
+         "installation directory for all module distributions " +
+         "(overrides --install-purelib and --install-platlib)"),
+
+        ('install-headers=', None,
+         "installation directory for C/C++ headers"),
+        ('install-scripts=', None,
+         "installation directory for Python scripts"),
+        ('install-data=', None,
+         "installation directory for data files"),
+
+        # Byte-compilation options -- see install_lib.py for details, as
+        # these are duplicated from there (but only install_lib does
+        # anything with them).
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         'also compile with optimization: -O1 for "python -O", '
+         '-O2 for "python -OO", and -O0 to disable [default: -O0]'),
+
+        # Miscellaneous control options
+        ('force', 'f',
+         "force installation (overwrite any existing files)"),
+        ('skip-build', None,
+         "skip rebuilding everything (for testing/debugging)"),
+
+        # Where to install documentation (eventually!)
+        #('doc-format=', None, "format of documentation to generate"),
+        #('install-man=', None, "directory for Unix man pages"),
+        #('install-html=', None, "directory for HTML documentation"),
+        #('install-info=', None, "directory for GNU info files"),
+
+        # XXX use a name that makes clear this is the old format
+        ('record=', None,
+         "filename in which to record a list of installed files "
+         "(not PEP 376-compliant)"),
+        ('resources=', None,
+         "data files mapping"),
+
+        # .dist-info related arguments, read by install_dist_info
+        ('no-distinfo', None,
+         "do not create a .dist-info directory"),
+        ('installer=', None,
+         "the name of the installer"),
+        ('requested', None,
+         "generate a REQUESTED file (i.e."),
+        ('no-requested', None,
+         "do not generate a REQUESTED file"),
+        ('no-record', None,
+         "do not generate a RECORD file"),
+        ]
+
+    boolean_options = ['compile', 'force', 'skip-build', 'no-distinfo',
+                       'requested', 'no-record']
+
+    if HAS_USER_SITE:
+        user_options.append(
+            ('user', None,
+             "install in user site-packages directory [%s]" %
+             get_path('purelib', '%s_user' % os.name)))
+
+        boolean_options.append('user')
+
+    negative_opt = {'no-compile': 'compile', 'no-requested': 'requested'}
+
+    def initialize_options(self):
+        # High-level options: these select both an installation base
+        # and scheme.
+        self.prefix = None
+        self.exec_prefix = None
+        self.home = None
+        if HAS_USER_SITE:
+            self.user = False
+
+        # These select only the installation base; it's up to the user to
+        # specify the installation scheme (currently, that means supplying
+        # the --install-{platlib,purelib,scripts,data} options).
+        self.install_base = None
+        self.install_platbase = None
+        self.root = None
+
+        # These options are the actual installation directories; if not
+        # supplied by the user, they are filled in using the installation
+        # scheme implied by prefix/exec-prefix/home and the contents of
+        # that installation scheme.
+        self.install_purelib = None     # for pure module distributions
+        self.install_platlib = None     # non-pure (dists w/ extensions)
+        self.install_headers = None     # for C/C++ headers
+        self.install_lib = None         # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+        if HAS_USER_SITE:
+            self.install_userbase = get_config_var('userbase')
+            self.install_usersite = get_path('purelib', '%s_user' % os.name)
+
+        self.compile = None
+        self.optimize = None
+
+        # These two are for putting non-packagized distributions into their
+        # own directory and creating a .pth file if it makes sense.
+        # 'extra_path' comes from the setup file; 'install_path_file' can
+        # be turned off if it makes no sense to install a .pth file.  (But
+        # better to install it uselessly than to guess wrong and not
+        # install it when it's necessary and would be used!)  Currently,
+        # 'install_path_file' is always true unless some outsider meddles
+        # with it.
+        self.extra_path = None
+        self.install_path_file = True
+
+        # 'force' forces installation, even if target files are not
+        # out-of-date.  'skip_build' skips running the "build" command,
+        # handy if you know it's not necessary.  'warn_dir' (which is *not*
+        # a user option, it's just there so the bdist_* commands can turn
+        # it off) determines whether we warn about installing to a
+        # directory not in sys.path.
+        self.force = False
+        self.skip_build = False
+        self.warn_dir = True
+
+        # These are only here as a conduit from the 'build' command to the
+        # 'install_*' commands that do the real work.  ('build_base' isn't
+        # actually used anywhere, but it might be useful in future.)  They
+        # are not user options, because if the user told the install
+        # command where the build directory is, that wouldn't affect the
+        # build command.
+        self.build_base = None
+        self.build_lib = None
+
+        # Not defined yet because we don't know anything about
+        # documentation yet.
+        #self.install_man = None
+        #self.install_html = None
+        #self.install_info = None
+
+        self.record = None
+        self.resources = None
+
+        # .dist-info related options
+        self.no_distinfo = None
+        self.installer = None
+        self.requested = None
+        self.no_record = None
+        self.no_resources = None
+
+    # -- Option finalizing methods -------------------------------------
+    # (This is rather more involved than for most commands,
+    # because this is where the policy for installing third-
+    # party Python modules on various platforms given a wide
+    # array of user input is decided.  Yes, it's quite complex!)
+
+    def finalize_options(self):
+        # This method (and its pliant slaves, like 'finalize_unix()',
+        # 'finalize_other()', and 'select_scheme()') is where the default
+        # installation directories for modules, extension modules, and
+        # anything else we care to install from a Python module
+        # distribution.  Thus, this code makes a pretty important policy
+        # statement about how third-party stuff is added to a Python
+        # installation!  Note that the actual work of installation is done
+        # by the relatively simple 'install_*' commands; they just take
+        # their orders from the installation directory options determined
+        # here.
+
+        # Check for errors/inconsistencies in the options; first, stuff
+        # that's wrong on any platform.
+
+        if ((self.prefix or self.exec_prefix or self.home) and
+            (self.install_base or self.install_platbase)):
+            raise PackagingOptionError(
+                "must supply either prefix/exec-prefix/home or "
+                "install-base/install-platbase -- not both")
+
+        if self.home and (self.prefix or self.exec_prefix):
+            raise PackagingOptionError(
+                "must supply either home or prefix/exec-prefix -- not both")
+
+        if HAS_USER_SITE and self.user and (
+                self.prefix or self.exec_prefix or self.home or
+                self.install_base or self.install_platbase):
+            raise PackagingOptionError(
+                "can't combine user with prefix/exec_prefix/home or "
+                "install_base/install_platbase")
+
+        # Next, stuff that's wrong (or dubious) only on certain platforms.
+        if os.name != "posix":
+            if self.exec_prefix:
+                logger.warning(
+                    '%s: exec-prefix option ignored on this platform',
+                    self.get_command_name())
+                self.exec_prefix = None
+
+        # Now the interesting logic -- so interesting that we farm it out
+        # to other methods.  The goal of these methods is to set the final
+        # values for the install_{lib,scripts,data,...}  options, using as
+        # input a heady brew of prefix, exec_prefix, home, install_base,
+        # install_platbase, user-supplied versions of
+        # install_{purelib,platlib,lib,scripts,data,...}, and the
+        # INSTALL_SCHEME dictionary above.  Phew!
+
+        self.dump_dirs("pre-finalize_{unix,other}")
+
+        if os.name == 'posix':
+            self.finalize_unix()
+        else:
+            self.finalize_other()
+
+        self.dump_dirs("post-finalize_{unix,other}()")
+
+        # Expand configuration variables, tilde, etc. in self.install_base
+        # and self.install_platbase -- that way, we can use $base or
+        # $platbase in the other installation directories and not worry
+        # about needing recursive variable expansion (shudder).
+
+        py_version = sys.version.split()[0]
+        prefix, exec_prefix, srcdir, projectbase = get_config_vars(
+            'prefix', 'exec_prefix', 'srcdir', 'projectbase')
+
+        metadata = self.distribution.metadata
+        self.config_vars = {
+            'dist_name': metadata['Name'],
+            'dist_version': metadata['Version'],
+            'dist_fullname': metadata.get_fullname(),
+            'py_version': py_version,
+            'py_version_short': py_version[:3],
+            'py_version_nodot': py_version[:3:2],
+            'sys_prefix': prefix,
+            'prefix': prefix,
+            'sys_exec_prefix': exec_prefix,
+            'exec_prefix': exec_prefix,
+            'srcdir': srcdir,
+            'projectbase': projectbase,
+            }
+
+        if HAS_USER_SITE:
+            self.config_vars['userbase'] = self.install_userbase
+            self.config_vars['usersite'] = self.install_usersite
+
+        self.expand_basedirs()
+
+        self.dump_dirs("post-expand_basedirs()")
+
+        # Now define config vars for the base directories so we can expand
+        # everything else.
+        self.config_vars['base'] = self.install_base
+        self.config_vars['platbase'] = self.install_platbase
+
+        # Expand "~" and configuration variables in the installation
+        # directories.
+        self.expand_dirs()
+
+        self.dump_dirs("post-expand_dirs()")
+
+        # Create directories in the home dir:
+        if HAS_USER_SITE and self.user:
+            self.create_home_path()
+
+        # Pick the actual directory to install all modules to: either
+        # install_purelib or install_platlib, depending on whether this
+        # module distribution is pure or not.  Of course, if the user
+        # already specified install_lib, use their selection.
+        if self.install_lib is None:
+            if self.distribution.ext_modules:  # has extensions: non-pure
+                self.install_lib = self.install_platlib
+            else:
+                self.install_lib = self.install_purelib
+
+        # Convert directories from Unix /-separated syntax to the local
+        # convention.
+        self.convert_paths('lib', 'purelib', 'platlib',
+                           'scripts', 'data', 'headers')
+        if HAS_USER_SITE:
+            self.convert_paths('userbase', 'usersite')
+
+        # Well, we're not actually fully completely finalized yet: we still
+        # have to deal with 'extra_path', which is the hack for allowing
+        # non-packagized module distributions (hello, Numerical Python!) to
+        # get their own directories.
+        self.handle_extra_path()
+        self.install_libbase = self.install_lib  # needed for .pth file
+        self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+        # If a new root directory was supplied, make all the installation
+        # dirs relative to it.
+        if self.root is not None:
+            self.change_roots('libbase', 'lib', 'purelib', 'platlib',
+                              'scripts', 'data', 'headers')
+
+        self.dump_dirs("after prepending root")
+
+        # Find out the build directories, ie. where to install from.
+        self.set_undefined_options('build', 'build_base', 'build_lib')
+
+        # Punt on doc directories for now -- after all, we're punting on
+        # documentation completely!
+
+        if self.no_distinfo is None:
+            self.no_distinfo = False
+
+    def finalize_unix(self):
+        """Finalize options for posix platforms."""
+        if self.install_base is not None or self.install_platbase is not None:
+            if ((self.install_lib is None and
+                 self.install_purelib is None and
+                 self.install_platlib is None) or
+                self.install_headers is None or
+                self.install_scripts is None or
+                self.install_data is None):
+                raise PackagingOptionError(
+                    "install-base or install-platbase supplied, but "
+                    "installation scheme is incomplete")
+            return
+
+        if HAS_USER_SITE and self.user:
+            if self.install_userbase is None:
+                raise PackagingPlatformError(
+                    "user base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme("posix_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("posix_home")
+        else:
+            if self.prefix is None:
+                if self.exec_prefix is not None:
+                    raise PackagingOptionError(
+                        "must not supply exec-prefix without prefix")
+
+                self.prefix = os.path.normpath(sys.prefix)
+                self.exec_prefix = os.path.normpath(sys.exec_prefix)
+
+            else:
+                if self.exec_prefix is None:
+                    self.exec_prefix = self.prefix
+
+            self.install_base = self.prefix
+            self.install_platbase = self.exec_prefix
+            self.select_scheme("posix_prefix")
+
+    def finalize_other(self):
+        """Finalize options for non-posix platforms"""
+        if HAS_USER_SITE and self.user:
+            if self.install_userbase is None:
+                raise PackagingPlatformError(
+                    "user base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme(os.name + "_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("posix_home")
+        else:
+            if self.prefix is None:
+                self.prefix = os.path.normpath(sys.prefix)
+
+            self.install_base = self.install_platbase = self.prefix
+            try:
+                self.select_scheme(os.name)
+            except KeyError:
+                raise PackagingPlatformError(
+                    "no support for installation on '%s'" % os.name)
+
+    def dump_dirs(self, msg):
+        """Dump the list of user options."""
+        logger.debug(msg + ":")
+        for opt in self.user_options:
+            opt_name = opt[0]
+            if opt_name[-1] == "=":
+                opt_name = opt_name[0:-1]
+            if opt_name in self.negative_opt:
+                opt_name = self.negative_opt[opt_name]
+                opt_name = opt_name.replace('-', '_')
+                val = not getattr(self, opt_name)
+            else:
+                opt_name = opt_name.replace('-', '_')
+                val = getattr(self, opt_name)
+            logger.debug("  %s: %s", opt_name, val)
+
+    def select_scheme(self, name):
+        """Set the install directories by applying the install schemes."""
+        # it's the caller's problem if they supply a bad name!
+        scheme = get_paths(name, expand=False)
+        for key, value in scheme.items():
+            if key == 'platinclude':
+                key = 'headers'
+                value = os.path.join(value, self.distribution.metadata['Name'])
+            attrname = 'install_' + key
+            if hasattr(self, attrname):
+                if getattr(self, attrname) is None:
+                    setattr(self, attrname, value)
+
+    def _expand_attrs(self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name == 'posix' or os.name == 'nt':
+                    val = os.path.expanduser(val)
+                # see if we want to push this work in sysconfig XXX
+                val = sysconfig._subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+    def expand_basedirs(self):
+        """Call `os.path.expanduser` on install_{base,platbase} and root."""
+        self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+    def expand_dirs(self):
+        """Call `os.path.expanduser` on install dirs."""
+        self._expand_attrs(['install_purelib', 'install_platlib',
+                            'install_lib', 'install_headers',
+                            'install_scripts', 'install_data'])
+
+    def convert_paths(self, *names):
+        """Call `convert_path` over `names`."""
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, convert_path(getattr(self, attr)))
+
+    def handle_extra_path(self):
+        """Set `path_file` and `extra_dirs` using `extra_path`."""
+        if self.extra_path is None:
+            self.extra_path = self.distribution.extra_path
+
+        if self.extra_path is not None:
+            if isinstance(self.extra_path, str):
+                self.extra_path = self.extra_path.split(',')
+
+            if len(self.extra_path) == 1:
+                path_file = extra_dirs = self.extra_path[0]
+            elif len(self.extra_path) == 2:
+                path_file, extra_dirs = self.extra_path
+            else:
+                raise PackagingOptionError(
+                    "'extra_path' option must be a list, tuple, or "
+                    "comma-separated string with 1 or 2 elements")
+
+            # convert to local form in case Unix notation used (as it
+            # should be in setup scripts)
+            extra_dirs = convert_path(extra_dirs)
+        else:
+            path_file = None
+            extra_dirs = ''
+
+        # XXX should we warn if path_file and not extra_dirs? (in which
+        # case the path file would be harmless but pointless)
+        self.path_file = path_file
+        self.extra_dirs = extra_dirs
+
+    def change_roots(self, *names):
+        """Change the install direcories pointed by name using root."""
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+    def create_home_path(self):
+        """Create directories under ~."""
+        if HAS_USER_SITE and not self.user:
+            return
+        home = convert_path(os.path.expanduser("~"))
+        for name, path in self.config_vars.items():
+            if path.startswith(home) and not os.path.isdir(path):
+                os.makedirs(path, 0o700)
+
+    # -- Command execution methods -------------------------------------
+
+    def run(self):
+        """Runs the command."""
+        # Obviously have to build before we can install
+        if not self.skip_build:
+            self.run_command('build')
+            # If we built for any other platform, we can't install.
+            build_plat = self.distribution.get_command_obj('build').plat_name
+            # check warn_dir - it is a clue that the 'install_dist' is happening
+            # internally, and not to sys.path, so we don't check the platform
+            # matches what we are running.
+            if self.warn_dir and build_plat != get_platform():
+                raise PackagingPlatformError("Can't install when "
+                                             "cross-compiling")
+
+        # Run all sub-commands (at least those that need to be run)
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        if self.path_file:
+            self.create_path_file()
+
+        # write list of installed files, if requested.
+        if self.record:
+            outputs = self.get_outputs()
+            if self.root:               # strip any package prefix
+                root_len = len(self.root)
+                for counter in range(len(outputs)):
+                    outputs[counter] = outputs[counter][root_len:]
+            self.execute(write_file,
+                         (self.record, outputs),
+                         "writing list of installed files to '%s'" %
+                         self.record)
+
+        normpath, normcase = os.path.normpath, os.path.normcase
+        sys_path = [normcase(normpath(p)) for p in sys.path]
+        install_lib = normcase(normpath(self.install_lib))
+        if (self.warn_dir and
+            not (self.path_file and self.install_path_file) and
+            install_lib not in sys_path):
+            logger.debug(("modules installed to '%s', which is not in "
+                       "Python's module search path (sys.path) -- "
+                       "you'll have to change the search path yourself"),
+                       self.install_lib)
+
+    def create_path_file(self):
+        """Creates the .pth file"""
+        filename = os.path.join(self.install_libbase,
+                                self.path_file + ".pth")
+        if self.install_path_file:
+            self.execute(write_file,
+                         (filename, [self.extra_dirs]),
+                         "creating %s" % filename)
+        else:
+            logger.warning('%s: path file %r not created',
+                           self.get_command_name(),  filename)
+
+    # -- Reporting methods ---------------------------------------------
+
+    def get_outputs(self):
+        """Assembles the outputs of all the sub-commands."""
+        outputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            # Add the contents of cmd.get_outputs(), ensuring
+            # that outputs doesn't contain duplicate entries
+            for filename in cmd.get_outputs():
+                if filename not in outputs:
+                    outputs.append(filename)
+
+        if self.path_file and self.install_path_file:
+            outputs.append(os.path.join(self.install_libbase,
+                                        self.path_file + ".pth"))
+
+        return outputs
+
+    def get_inputs(self):
+        """Returns the inputs of all the sub-commands"""
+        # XXX gee, this looks familiar ;-(
+        inputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            inputs.extend(cmd.get_inputs())
+
+        return inputs
+
+    # -- Predicates for sub-command list -------------------------------
+
+    def has_lib(self):
+        """Returns true if the current distribution has any Python
+        modules to install."""
+        return (self.distribution.has_pure_modules() or
+                self.distribution.has_ext_modules())
+
+    def has_headers(self):
+        """Returns true if the current distribution has any headers to
+        install."""
+        return self.distribution.has_headers()
+
+    def has_scripts(self):
+        """Returns true if the current distribution has any scripts to.
+        install."""
+        return self.distribution.has_scripts()
+
+    def has_data(self):
+        """Returns true if the current distribution has any data to.
+        install."""
+        return self.distribution.has_data_files()
+
+    # 'sub_commands': a list of commands this command might have to run to
+    # get its work done.  See cmd.py for more info.
+    sub_commands = [('install_lib', has_lib),
+                    ('install_headers', has_headers),
+                    ('install_scripts', has_scripts),
+                    ('install_data', has_data),
+                    # keep install_distinfo last, as it needs the record
+                    # with files to be completely generated
+                    ('install_distinfo', lambda self: not self.no_distinfo),
+                   ]
diff --git a/Lib/packaging/command/install_distinfo.py b/Lib/packaging/command/install_distinfo.py
new file mode 100644
index 0000000..3390a1f
--- /dev/null
+++ b/Lib/packaging/command/install_distinfo.py
@@ -0,0 +1,175 @@
+"""Create the PEP 376-compliant .dist-info directory."""
+
+# Forked from the former install_egg_info command by Josip Djolonga
+
+import csv
+import os
+import re
+import hashlib
+
+from packaging.command.cmd import Command
+from packaging import logger
+from shutil import rmtree
+
+
+class install_distinfo(Command):
+
+    description = 'create a .dist-info directory for the distribution'
+
+    user_options = [
+        ('distinfo-dir=', None,
+         "directory where the the .dist-info directory will be installed"),
+        ('installer=', None,
+         "the name of the installer"),
+        ('requested', None,
+         "generate a REQUESTED file"),
+        ('no-requested', None,
+         "do not generate a REQUESTED file"),
+        ('no-record', None,
+         "do not generate a RECORD file"),
+        ('no-resources', None,
+         "do not generate a RESSOURCES list installed file")
+    ]
+
+    boolean_options = ['requested', 'no-record', 'no-resources']
+
+    negative_opt = {'no-requested': 'requested'}
+
+    def initialize_options(self):
+        self.distinfo_dir = None
+        self.installer = None
+        self.requested = None
+        self.no_record = None
+        self.no_resources = None
+
+    def finalize_options(self):
+        self.set_undefined_options('install_dist',
+                                   'installer', 'requested', 'no_record')
+
+        self.set_undefined_options('install_lib',
+                                   ('install_dir', 'distinfo_dir'))
+
+        if self.installer is None:
+            # FIXME distutils or packaging?
+            # + document default in the option help text above and in install
+            self.installer = 'distutils'
+        if self.requested is None:
+            self.requested = True
+        if self.no_record is None:
+            self.no_record = False
+        if self.no_resources is None:
+            self.no_resources = False
+
+        metadata = self.distribution.metadata
+
+        basename = "%s-%s.dist-info" % (
+            to_filename(safe_name(metadata['Name'])),
+            to_filename(safe_version(metadata['Version'])))
+
+        self.distinfo_dir = os.path.join(self.distinfo_dir, basename)
+        self.outputs = []
+
+    def run(self):
+        # FIXME dry-run should be used at a finer level, so that people get
+        # useful logging output and can have an idea of what the command would
+        # have done
+        if not self.dry_run:
+            target = self.distinfo_dir
+
+            if os.path.isdir(target) and not os.path.islink(target):
+                rmtree(target)
+            elif os.path.exists(target):
+                self.execute(os.unlink, (self.distinfo_dir,),
+                             "removing " + target)
+
+            self.execute(os.makedirs, (target,), "creating " + target)
+
+            metadata_path = os.path.join(self.distinfo_dir, 'METADATA')
+            logger.info('creating %s', metadata_path)
+            self.distribution.metadata.write(metadata_path)
+            self.outputs.append(metadata_path)
+
+            installer_path = os.path.join(self.distinfo_dir, 'INSTALLER')
+            logger.info('creating %s', installer_path)
+            with open(installer_path, 'w') as f:
+                f.write(self.installer)
+            self.outputs.append(installer_path)
+
+            if self.requested:
+                requested_path = os.path.join(self.distinfo_dir, 'REQUESTED')
+                logger.info('creating %s', requested_path)
+                open(requested_path, 'wb').close()
+                self.outputs.append(requested_path)
+
+
+            if not self.no_resources:
+                install_data = self.get_finalized_command('install_data')
+                if install_data.get_resources_out() != []:
+                    resources_path = os.path.join(self.distinfo_dir,
+                                                  'RESOURCES')
+                    logger.info('creating %s', resources_path)
+                    with open(resources_path, 'wb') as f:
+                        writer = csv.writer(f, delimiter=',',
+                                            lineterminator='\n',
+                                            quotechar='"')
+                        for tuple in install_data.get_resources_out():
+                            writer.writerow(tuple)
+
+                        self.outputs.append(resources_path)
+
+            if not self.no_record:
+                record_path = os.path.join(self.distinfo_dir, 'RECORD')
+                logger.info('creating %s', record_path)
+                with open(record_path, 'w', encoding='utf-8') as f:
+                    writer = csv.writer(f, delimiter=',',
+                                        lineterminator='\n',
+                                        quotechar='"')
+
+                    install = self.get_finalized_command('install_dist')
+
+                    for fpath in install.get_outputs():
+                        if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
+                            # do not put size and md5 hash, as in PEP-376
+                            writer.writerow((fpath, '', ''))
+                        else:
+                            size = os.path.getsize(fpath)
+                            with open(fpath, 'rb') as fp:
+                                hash = hashlib.md5()
+                                hash.update(fp.read())
+                            md5sum = hash.hexdigest()
+                            writer.writerow((fpath, md5sum, size))
+
+                    # add the RECORD file itself
+                    writer.writerow((record_path, '', ''))
+                    self.outputs.append(record_path)
+
+    def get_outputs(self):
+        return self.outputs
+
+
+# The following functions are taken from setuptools' pkg_resources module.
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """Convert an arbitrary string to a standard version string
+
+    Spaces become dots, and all other non-alphanumeric characters become
+    dashes, with runs of multiple dashes condensed to a single dash.
+    """
+    version = version.replace(' ', '.')
+    return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-', '_')
diff --git a/Lib/packaging/command/install_headers.py b/Lib/packaging/command/install_headers.py
new file mode 100644
index 0000000..e043d6b
--- /dev/null
+++ b/Lib/packaging/command/install_headers.py
@@ -0,0 +1,43 @@
+"""Install C/C++ header files to the Python include directory."""
+
+from packaging.command.cmd import Command
+
+
+# XXX force is never used
+class install_headers(Command):
+
+    description = "install C/C++ header files"
+
+    user_options = [('install-dir=', 'd',
+                     "directory to install header files to"),
+                    ('force', 'f',
+                     "force installation (overwrite existing files)"),
+                   ]
+
+    boolean_options = ['force']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.force = False
+        self.outfiles = []
+
+    def finalize_options(self):
+        self.set_undefined_options('install_dist',
+                                   ('install_headers', 'install_dir'),
+                                   'force')
+
+    def run(self):
+        headers = self.distribution.headers
+        if not headers:
+            return
+
+        self.mkpath(self.install_dir)
+        for header in headers:
+            out = self.copy_file(header, self.install_dir)[0]
+            self.outfiles.append(out)
+
+    def get_inputs(self):
+        return self.distribution.headers or []
+
+    def get_outputs(self):
+        return self.outfiles
diff --git a/Lib/packaging/command/install_lib.py b/Lib/packaging/command/install_lib.py
new file mode 100644
index 0000000..5ff9cee
--- /dev/null
+++ b/Lib/packaging/command/install_lib.py
@@ -0,0 +1,222 @@
+"""Install all modules (extensions and pure Python)."""
+
+import os
+import sys
+import logging
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError
+
+
+# Extension for Python source files.
+if hasattr(os, 'extsep'):
+    PYTHON_SOURCE_EXTENSION = os.extsep + "py"
+else:
+    PYTHON_SOURCE_EXTENSION = ".py"
+
+class install_lib(Command):
+
+    description = "install all modules (extensions and pure Python)"
+
+    # The byte-compilation options are a tad confusing.  Here are the
+    # possible scenarios:
+    #   1) no compilation at all (--no-compile --no-optimize)
+    #   2) compile .pyc only (--compile --no-optimize; default)
+    #   3) compile .pyc and "level 1" .pyo (--compile --optimize)
+    #   4) compile "level 1" .pyo only (--no-compile --optimize)
+    #   5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
+    #   6) compile "level 2" .pyo only (--no-compile --optimize-more)
+    #
+    # The UI for this is two option, 'compile' and 'optimize'.
+    # 'compile' is strictly boolean, and only decides whether to
+    # generate .pyc files.  'optimize' is three-way (0, 1, or 2), and
+    # decides both whether to generate .pyo files and what level of
+    # optimization to use.
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+        ('build-dir=','b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('skip-build', None, "skip the build steps"),
+        ]
+
+    boolean_options = ['force', 'compile', 'skip-build']
+    negative_opt = {'no-compile' : 'compile'}
+
+    def initialize_options(self):
+        # let the 'install_dist' command dictate our installation directory
+        self.install_dir = None
+        self.build_dir = None
+        self.force = False
+        self.compile = None
+        self.optimize = None
+        self.skip_build = None
+
+    def finalize_options(self):
+        # Get all the information we need to install pure Python modules
+        # from the umbrella 'install_dist' command -- build (source) directory,
+        # install (target) directory, and whether to compile .py files.
+        self.set_undefined_options('install_dist',
+                                   ('build_lib', 'build_dir'),
+                                   ('install_lib', 'install_dir'),
+                                   'force', 'compile', 'optimize', 'skip_build')
+
+        if self.compile is None:
+            self.compile = True
+        if self.optimize is None:
+            self.optimize = 0
+
+        if not isinstance(self.optimize, int):
+            try:
+                self.optimize = int(self.optimize)
+                if self.optimize not in (0, 1, 2):
+                    raise AssertionError
+            except (ValueError, AssertionError):
+                raise PackagingOptionError("optimize must be 0, 1, or 2")
+
+    def run(self):
+        # Make sure we have built everything we need first
+        self.build()
+
+        # Install everything: simply dump the entire contents of the build
+        # directory to the installation directory (that's the beauty of
+        # having a build directory!)
+        outfiles = self.install()
+
+        # (Optionally) compile .py to .pyc
+        if outfiles is not None and self.distribution.has_pure_modules():
+            self.byte_compile(outfiles)
+
+    # -- Top-level worker functions ------------------------------------
+    # (called from 'run()')
+
+    def build(self):
+        if not self.skip_build:
+            if self.distribution.has_pure_modules():
+                self.run_command('build_py')
+            if self.distribution.has_ext_modules():
+                self.run_command('build_ext')
+
+    def install(self):
+        if os.path.isdir(self.build_dir):
+            outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        else:
+            logger.warning(
+                '%s: %r does not exist -- no Python modules to install',
+                self.get_command_name(), self.build_dir)
+            return
+        return outfiles
+
+    def byte_compile(self, files):
+        if getattr(sys, 'dont_write_bytecode'):
+            # XXX do we want this?  because a Python runs without bytecode
+            # doesn't mean that the *dists should not contain bytecode
+            #--or does it?
+            logger.warning('%s: byte-compiling is disabled, skipping.',
+                           self.get_command_name())
+            return
+
+        from packaging.util import byte_compile
+
+        # Get the "--root" directory supplied to the "install_dist" command,
+        # and use it as a prefix to strip off the purported filename
+        # encoded in bytecode files.  This is far from complete, but it
+        # should at least generate usable bytecode in RPM distributions.
+        install_root = self.get_finalized_command('install_dist').root
+
+        # Temporary kludge until we remove the verbose arguments and use
+        # logging everywhere
+        verbose = logger.getEffectiveLevel() >= logging.DEBUG
+
+        if self.compile:
+            byte_compile(files, optimize=0,
+                         force=self.force, prefix=install_root,
+                         dry_run=self.dry_run)
+        if self.optimize > 0:
+            byte_compile(files, optimize=self.optimize,
+                         force=self.force, prefix=install_root,
+                         verbose=verbose,
+                         dry_run=self.dry_run)
+
+
+    # -- Utility methods -----------------------------------------------
+
+    def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
+        if not has_any:
+            return []
+
+        build_cmd = self.get_finalized_command(build_cmd)
+        build_files = build_cmd.get_outputs()
+        build_dir = getattr(build_cmd, cmd_option)
+
+        prefix_len = len(build_dir) + len(os.sep)
+        outputs = []
+        for file in build_files:
+            outputs.append(os.path.join(output_dir, file[prefix_len:]))
+
+        return outputs
+
+    def _bytecode_filenames(self, py_filenames):
+        bytecode_files = []
+        for py_file in py_filenames:
+            # Since build_py handles package data installation, the
+            # list of outputs can contain more than just .py files.
+            # Make sure we only report bytecode for the .py files.
+            ext = os.path.splitext(os.path.normcase(py_file))[1]
+            if ext != PYTHON_SOURCE_EXTENSION:
+                continue
+            if self.compile:
+                bytecode_files.append(py_file + "c")
+            if self.optimize > 0:
+                bytecode_files.append(py_file + "o")
+
+        return bytecode_files
+
+
+    # -- External interface --------------------------------------------
+    # (called by outsiders)
+
+    def get_outputs(self):
+        """Return the list of files that would be installed if this command
+        were actually run.  Not affected by the "dry-run" flag or whether
+        modules have actually been built yet.
+        """
+        pure_outputs = \
+            self._mutate_outputs(self.distribution.has_pure_modules(),
+                                 'build_py', 'build_lib',
+                                 self.install_dir)
+        if self.compile:
+            bytecode_outputs = self._bytecode_filenames(pure_outputs)
+        else:
+            bytecode_outputs = []
+
+        ext_outputs = \
+            self._mutate_outputs(self.distribution.has_ext_modules(),
+                                 'build_ext', 'build_lib',
+                                 self.install_dir)
+
+        return pure_outputs + bytecode_outputs + ext_outputs
+
+    def get_inputs(self):
+        """Get the list of files that are input to this command, ie. the
+        files that get installed as they are named in the build tree.
+        The files in this list correspond one-to-one to the output
+        filenames returned by 'get_outputs()'.
+        """
+        inputs = []
+
+        if self.distribution.has_pure_modules():
+            build_py = self.get_finalized_command('build_py')
+            inputs.extend(build_py.get_outputs())
+
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            inputs.extend(build_ext.get_outputs())
+
+        return inputs
diff --git a/Lib/packaging/command/install_scripts.py b/Lib/packaging/command/install_scripts.py
new file mode 100644
index 0000000..cfacbe2
--- /dev/null
+++ b/Lib/packaging/command/install_scripts.py
@@ -0,0 +1,59 @@
+"""Install scripts."""
+
+# Contributed by Bastian Kleineidam
+
+import os
+from packaging.command.cmd import Command
+from packaging import logger
+
+class install_scripts(Command):
+
+    description = "install scripts (Python or otherwise)"
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install scripts to"),
+        ('build-dir=','b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('skip-build', None, "skip the build steps"),
+    ]
+
+    boolean_options = ['force', 'skip-build']
+
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.force = False
+        self.build_dir = None
+        self.skip_build = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build', ('build_scripts', 'build_dir'))
+        self.set_undefined_options('install_dist',
+                                   ('install_scripts', 'install_dir'),
+                                   'force', 'skip_build')
+
+    def run(self):
+        if not self.skip_build:
+            self.run_command('build_scripts')
+
+        if not os.path.exists(self.build_dir):
+            self.outfiles = []
+            return
+
+        self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        if os.name == 'posix':
+            # Set the executable bits (owner, group, and world) on
+            # all the scripts we just installed.
+            for file in self.get_outputs():
+                if self.dry_run:
+                    logger.info("changing mode of %s", file)
+                else:
+                    mode = (os.stat(file).st_mode | 0o555) & 0o7777
+                    logger.info("changing mode of %s to %o", file, mode)
+                    os.chmod(file, mode)
+
+    def get_inputs(self):
+        return self.distribution.scripts or []
+
+    def get_outputs(self):
+        return self.outfiles or []
diff --git a/Lib/packaging/command/register.py b/Lib/packaging/command/register.py
new file mode 100644
index 0000000..962afdc
--- /dev/null
+++ b/Lib/packaging/command/register.py
@@ -0,0 +1,282 @@
+"""Register a release with a project index."""
+
+# Contributed by Richard Jones
+
+import io
+import getpass
+import urllib.error
+import urllib.parse
+import urllib.request
+
+from packaging import logger
+from packaging.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY,
+                            DEFAULT_REALM, get_pypirc_path)
+from packaging.command.cmd import Command
+
+class register(Command):
+
+    description = "register a release with PyPI"
+    user_options = [
+        ('repository=', 'r',
+         "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+        ('show-response', None,
+         "display full response text from server"),
+        ('list-classifiers', None,
+         "list valid Trove classifiers"),
+        ('strict', None ,
+         "stop the registration if the metadata is not fully compliant")
+        ]
+
+    boolean_options = ['show-response', 'list-classifiers', 'strict']
+
+    def initialize_options(self):
+        self.repository = None
+        self.realm = None
+        self.show_response = False
+        self.list_classifiers = False
+        self.strict = False
+
+    def finalize_options(self):
+        if self.repository is None:
+            self.repository = DEFAULT_REPOSITORY
+        if self.realm is None:
+            self.realm = DEFAULT_REALM
+
+    def run(self):
+        self._set_config()
+
+        # Check the package metadata
+        check = self.distribution.get_command_obj('check')
+        if check.strict != self.strict and not check.all:
+            # If check was already run but with different options,
+            # re-run it
+            check.strict = self.strict
+            check.all = True
+            self.distribution.have_run.pop('check', None)
+            self.run_command('check')
+
+        if self.dry_run:
+            self.verify_metadata()
+        elif self.list_classifiers:
+            self.classifiers()
+        else:
+            self.send_metadata()
+
+    def _set_config(self):
+        ''' Reads the configuration file and set attributes.
+        '''
+        config = read_pypirc(self.repository, self.realm)
+        if config != {}:
+            self.username = config['username']
+            self.password = config['password']
+            self.repository = config['repository']
+            self.realm = config['realm']
+            self.has_config = True
+        else:
+            if self.repository not in ('pypi', DEFAULT_REPOSITORY):
+                raise ValueError('%s not found in .pypirc' % self.repository)
+            if self.repository == 'pypi':
+                self.repository = DEFAULT_REPOSITORY
+            self.has_config = False
+
+    def classifiers(self):
+        ''' Fetch the list of classifiers from the server.
+        '''
+        response = urllib.request.urlopen(self.repository+'?:action=list_classifiers')
+        logger.info(response.read())
+
+    def verify_metadata(self):
+        ''' Send the metadata to the package index server to be checked.
+        '''
+        # send the info to the server and report the result
+        code, result = self.post_to_server(self.build_post_data('verify'))
+        logger.info('server response (%s): %s', code, result)
+
+
+    def send_metadata(self):
+        ''' Send the metadata to the package index server.
+
+            Well, do the following:
+            1. figure who the user is, and then
+            2. send the data as a Basic auth'ed POST.
+
+            First we try to read the username/password from $HOME/.pypirc,
+            which is a ConfigParser-formatted file with a section
+            [distutils] containing username and password entries (both
+            in clear text). Eg:
+
+                [distutils]
+                index-servers =
+                    pypi
+
+                [pypi]
+                username: fred
+                password: sekrit
+
+            Otherwise, to figure who the user is, we offer the user three
+            choices:
+
+             1. use existing login,
+             2. register as a new user, or
+             3. set the password to a random string and email the user.
+
+        '''
+        # TODO factor registration out into another method
+        # TODO use print to print, not logging
+
+        # see if we can short-cut and get the username/password from the
+        # config
+        if self.has_config:
+            choice = '1'
+            username = self.username
+            password = self.password
+        else:
+            choice = 'x'
+            username = password = ''
+
+        # get the user's login info
+        choices = '1 2 3 4'.split()
+        while choice not in choices:
+            logger.info('''\
+We need to know who you are, so please choose either:
+ 1. use your existing login,
+ 2. register as a new user,
+ 3. have the server generate a new password for you (and email it to you), or
+ 4. quit
+Your selection [default 1]: ''')
+
+            choice = input()
+            if not choice:
+                choice = '1'
+            elif choice not in choices:
+                print('Please choose one of the four options!')
+
+        if choice == '1':
+            # get the username and password
+            while not username:
+                username = input('Username: ')
+            while not password:
+                password = getpass.getpass('Password: ')
+
+            # set up the authentication
+            auth = urllib.request.HTTPPasswordMgr()
+            host = urllib.parse.urlparse(self.repository)[1]
+            auth.add_password(self.realm, host, username, password)
+            # send the info to the server and report the result
+            code, result = self.post_to_server(self.build_post_data('submit'),
+                auth)
+            logger.info('Server response (%s): %s', code, result)
+
+            # possibly save the login
+            if code == 200:
+                if self.has_config:
+                    # sharing the password in the distribution instance
+                    # so the upload command can reuse it
+                    self.distribution.password = password
+                else:
+                    logger.info(
+                        'I can store your PyPI login so future submissions '
+                        'will be faster.\n(the login will be stored in %s)',
+                        get_pypirc_path())
+                    choice = 'X'
+                    while choice.lower() not in 'yn':
+                        choice = input('Save your login (y/N)?')
+                        if not choice:
+                            choice = 'n'
+                    if choice.lower() == 'y':
+                        generate_pypirc(username, password)
+
+        elif choice == '2':
+            data = {':action': 'user'}
+            data['name'] = data['password'] = data['email'] = ''
+            data['confirm'] = None
+            while not data['name']:
+                data['name'] = input('Username: ')
+            while data['password'] != data['confirm']:
+                while not data['password']:
+                    data['password'] = getpass.getpass('Password: ')
+                while not data['confirm']:
+                    data['confirm'] = getpass.getpass(' Confirm: ')
+                if data['password'] != data['confirm']:
+                    data['password'] = ''
+                    data['confirm'] = None
+                    print("Password and confirm don't match!")
+            while not data['email']:
+                data['email'] = input('   EMail: ')
+            code, result = self.post_to_server(data)
+            if code != 200:
+                logger.info('server response (%s): %s', code, result)
+            else:
+                logger.info('you will receive an email shortly; follow the '
+                            'instructions in it to complete registration.')
+        elif choice == '3':
+            data = {':action': 'password_reset'}
+            data['email'] = ''
+            while not data['email']:
+                data['email'] = input('Your email address: ')
+            code, result = self.post_to_server(data)
+            logger.info('server response (%s): %s', code, result)
+
+    def build_post_data(self, action):
+        # figure the data to send - the metadata plus some additional
+        # information used by the package server
+        data = self.distribution.metadata.todict()
+        data[':action'] = action
+        return data
+
+    # XXX to be refactored with upload.upload_file
+    def post_to_server(self, data, auth=None):
+        ''' Post a query to the server, and return a string response.
+        '''
+        if 'name' in data:
+            logger.info('Registering %s to %s', data['name'], self.repository)
+        # Build up the MIME payload for the urllib2 POST data
+        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = '\n--' + boundary
+        end_boundary = sep_boundary + '--'
+        body = io.StringIO()
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if not isinstance(value, (tuple, list)):
+                value = [value]
+
+            for value in value:
+                body.write(sep_boundary)
+                body.write('\nContent-Disposition: form-data; name="%s"'%key)
+                body.write("\n\n")
+                body.write(value)
+                if value and value[-1] == '\r':
+                    body.write('\n')  # write an extra newline (lurve Macs)
+        body.write(end_boundary)
+        body.write("\n")
+        body = body.getvalue()
+
+        # build the Request
+        headers = {
+            'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
+            'Content-length': str(len(body))
+        }
+        req = urllib.request.Request(self.repository, body, headers)
+
+        # handle HTTP and include the Basic Auth handler
+        opener = urllib.request.build_opener(
+            urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
+        )
+        data = ''
+        try:
+            result = opener.open(req)
+        except urllib.error.HTTPError as e:
+            if self.show_response:
+                data = e.fp.read()
+            result = e.code, e.msg
+        except urllib.error.URLError as e:
+            result = 500, str(e)
+        else:
+            if self.show_response:
+                data = result.read()
+            result = 200, 'OK'
+        if self.show_response:
+            dashes = '-' * 75
+            logger.info('%s%s%s', dashes, data, dashes)
+
+        return result
diff --git a/Lib/packaging/command/sdist.py b/Lib/packaging/command/sdist.py
new file mode 100644
index 0000000..a28019b
--- /dev/null
+++ b/Lib/packaging/command/sdist.py
@@ -0,0 +1,375 @@
+"""Create a source distribution."""
+
+import os
+import sys
+import re
+from io import StringIO
+from glob import glob
+from shutil import get_archive_formats, rmtree
+
+from packaging import logger
+from packaging.util import resolve_name
+from packaging.errors import (PackagingPlatformError, PackagingOptionError,
+                              PackagingModuleError, PackagingFileError)
+from packaging.command import get_command_names
+from packaging.command.cmd import Command
+from packaging.manifest import Manifest
+
+
+def show_formats():
+    """Print all possible values for the 'formats' option (used by
+    the "--help-formats" command-line option).
+    """
+    from packaging.fancy_getopt import FancyGetopt
+    formats = sorted(('formats=' + name, None, desc)
+                     for name, desc in get_archive_formats())
+    FancyGetopt(formats).print_help(
+        "List of available source distribution formats:")
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\w\n', re.M)
+_COMMENTED_LINE = re.compile('^#.*\n$|^\w*\n$', re.M)
+
+
+class sdist(Command):
+
+    description = "create a source distribution (tarball, zip file, etc.)"
+
+    user_options = [
+        ('manifest=', 'm',
+         "name of manifest file [default: MANIFEST]"),
+        ('use-defaults', None,
+         "include the default file set in the manifest "
+         "[default; disable with --no-defaults]"),
+        ('no-defaults', None,
+         "don't include the default file set"),
+        ('prune', None,
+         "specifically exclude files/directories that should not be "
+         "distributed (build tree, RCS/CVS dirs, etc.) "
+         "[default; disable with --no-prune]"),
+        ('no-prune', None,
+         "don't automatically exclude anything"),
+        ('manifest-only', 'o',
+         "just regenerate the manifest and then stop "),
+        ('formats=', None,
+         "formats for source distribution (comma-separated list)"),
+        ('keep-temp', 'k',
+         "keep the distribution tree around after creating " +
+         "archive file(s)"),
+        ('dist-dir=', 'd',
+         "directory to put the source distribution archive(s) in "
+         "[default: dist]"),
+        ('check-metadata', None,
+         "Ensure that all required elements of metadata "
+         "are supplied. Warn if any missing. [default]"),
+        ('owner=', 'u',
+         "Owner name used when creating a tar file [default: current user]"),
+        ('group=', 'g',
+         "Group name used when creating a tar file [default: current group]"),
+        ('manifest-builders=', None,
+         "manifest builders (comma-separated list)"),
+        ]
+
+    boolean_options = ['use-defaults', 'prune',
+                       'manifest-only', 'keep-temp', 'check-metadata']
+
+    help_options = [
+        ('help-formats', None,
+         "list available distribution formats", show_formats),
+        ]
+
+    negative_opt = {'no-defaults': 'use-defaults',
+                    'no-prune': 'prune'}
+
+    default_format = {'posix': 'gztar',
+                      'nt': 'zip'}
+
+    def initialize_options(self):
+        self.manifest = None
+        # 'use_defaults': if true, we will include the default file set
+        # in the manifest
+        self.use_defaults = True
+        self.prune = True
+        self.manifest_only = False
+        self.formats = None
+        self.keep_temp = False
+        self.dist_dir = None
+
+        self.archive_files = None
+        self.metadata_check = True
+        self.owner = None
+        self.group = None
+        self.filelist = None
+        self.manifest_builders = None
+
+    def _check_archive_formats(self, formats):
+        supported_formats = [name for name, desc in get_archive_formats()]
+        for format in formats:
+            if format not in supported_formats:
+                return format
+        return None
+
+    def finalize_options(self):
+        if self.manifest is None:
+            self.manifest = "MANIFEST"
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise PackagingPlatformError("don't know how to create source "
+                       "distributions on platform %s" % os.name)
+
+        bad_format = self._check_archive_formats(self.formats)
+        if bad_format:
+            raise PackagingOptionError("unknown archive format '%s'" \
+                        % bad_format)
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+        if self.filelist is None:
+            self.filelist = Manifest()
+
+        if self.manifest_builders is None:
+            self.manifest_builders = []
+        else:
+            if isinstance(self.manifest_builders, str):
+                self.manifest_builders = self.manifest_builders.split(',')
+            builders = []
+            for builder in self.manifest_builders:
+                builder = builder.strip()
+                if builder == '':
+                    continue
+                try:
+                    builder = resolve_name(builder)
+                except ImportError as e:
+                    raise PackagingModuleError(e)
+
+                builders.append(builder)
+
+            self.manifest_builders = builders
+
+    def run(self):
+        # 'filelist' contains the list of files that will make up the
+        # manifest
+        self.filelist.clear()
+
+        # Check the package metadata
+        if self.metadata_check:
+            self.run_command('check')
+
+        # Do whatever it takes to get the list of files to process
+        # (process the manifest template, read an existing manifest,
+        # whatever).  File list is accumulated in 'self.filelist'.
+        self.get_file_list()
+
+        # If user just wanted us to regenerate the manifest, stop now.
+        if self.manifest_only:
+            return
+
+        # Otherwise, go ahead and create the source distribution tarball,
+        # or zipfile, or whatever.
+        self.make_distribution()
+
+    def get_file_list(self):
+        """Figure out the list of files to include in the source
+        distribution, and put it in 'self.filelist'.  This might involve
+        reading the manifest template (and writing the manifest), or just
+        reading the manifest, or just using the default file set -- it all
+        depends on the user's options.
+        """
+        template_exists = len(self.distribution.extra_files) > 0
+        if not template_exists:
+            logger.warning('%s: using default file list',
+                           self.get_command_name())
+        self.filelist.findall()
+
+        if self.use_defaults:
+            self.add_defaults()
+        if template_exists:
+            template = '\n'.join(self.distribution.extra_files)
+            self.filelist.read_template(StringIO(template))
+
+        # call manifest builders, if any.
+        for builder in self.manifest_builders:
+            builder(self.distribution, self.filelist)
+
+        if self.prune:
+            self.prune_file_list()
+
+        self.filelist.write(self.manifest)
+
+    def add_defaults(self):
+        """Add all the default files to self.filelist:
+          - README or README.txt
+          - test/test*.py
+          - all pure Python modules mentioned in setup script
+          - all files pointed by package_data (build_py)
+          - all files defined in data_files.
+          - all files defined as scripts.
+          - all C sources listed as part of extensions or C libraries
+            in the setup script (doesn't catch C headers!)
+        Warns if (README or README.txt) or setup.py are missing; everything
+        else is optional.
+        """
+        standards = [('README', 'README.txt')]
+        for fn in standards:
+            if isinstance(fn, tuple):
+                alts = fn
+                got_it = False
+                for fn in alts:
+                    if os.path.exists(fn):
+                        got_it = True
+                        self.filelist.append(fn)
+                        break
+
+                if not got_it:
+                    logger.warning(
+                        '%s: standard file not found: should have one of %s',
+                        self.get_command_name(), ', '.join(alts))
+            else:
+                if os.path.exists(fn):
+                    self.filelist.append(fn)
+                else:
+                    logger.warning('%s: standard file %r not found',
+                                   self.get_command_name(), fn)
+
+        optional = ['test/test*.py', 'setup.cfg']
+        for pattern in optional:
+            files = [f for f in glob(pattern) if os.path.isfile(f)]
+            if files:
+                self.filelist.extend(files)
+
+        for cmd_name in get_command_names():
+            try:
+                cmd_obj = self.get_finalized_command(cmd_name)
+            except PackagingOptionError:
+                pass
+            else:
+                self.filelist.extend(cmd_obj.get_source_files())
+
+    def prune_file_list(self):
+        """Prune off branches that might slip into the file list as created
+        by 'read_template()', but really don't belong there:
+          * the build tree (typically "build")
+          * the release tree itself (only an issue if we ran "sdist"
+            previously with --keep-temp, or it aborted)
+          * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
+        """
+        build = self.get_finalized_command('build')
+        base_dir = self.distribution.get_fullname()
+
+        self.filelist.exclude_pattern(None, prefix=build.build_base)
+        self.filelist.exclude_pattern(None, prefix=base_dir)
+
+        # pruning out vcs directories
+        # both separators are used under win32
+        if sys.platform == 'win32':
+            seps = r'/|\\'
+        else:
+            seps = '/'
+
+        vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
+                    '_darcs']
+        vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
+        self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
+
+    def make_release_tree(self, base_dir, files):
+        """Create the directory tree that will become the source
+        distribution archive.  All directories implied by the filenames in
+        'files' are created under 'base_dir', and then we hard link or copy
+        (if hard linking is unavailable) those files into place.
+        Essentially, this duplicates the developer's source tree, but in a
+        directory named after the distribution, containing only the files
+        to be distributed.
+        """
+        # Create all the directories under 'base_dir' necessary to
+        # put 'files' there; the 'mkpath()' is just so we don't die
+        # if the manifest happens to be empty.
+        self.mkpath(base_dir)
+        self.create_tree(base_dir, files, dry_run=self.dry_run)
+
+        # And walk over the list of files, either making a hard link (if
+        # os.link exists) to each one that doesn't already exist in its
+        # corresponding location under 'base_dir', or copying each file
+        # that's out-of-date in 'base_dir'.  (Usually, all files will be
+        # out-of-date, because by default we blow away 'base_dir' when
+        # we're done making the distribution archives.)
+
+        if hasattr(os, 'link'):        # can make hard links on this system
+            link = 'hard'
+            msg = "making hard links in %s..." % base_dir
+        else:                           # nope, have to copy
+            link = None
+            msg = "copying files to %s..." % base_dir
+
+        if not files:
+            logger.warning("no files to distribute -- empty manifest?")
+        else:
+            logger.info(msg)
+
+        for file in self.distribution.metadata.requires_files:
+            if file not in files:
+                msg = "'%s' must be included explicitly in 'extra_files'" \
+                        % file
+                raise PackagingFileError(msg)
+
+        for file in files:
+            if not os.path.isfile(file):
+                logger.warning("'%s' not a regular file -- skipping", file)
+            else:
+                dest = os.path.join(base_dir, file)
+                self.copy_file(file, dest, link=link)
+
+        self.distribution.metadata.write(os.path.join(base_dir, 'PKG-INFO'))
+
+    def make_distribution(self):
+        """Create the source distribution(s).  First, we create the release
+        tree with 'make_release_tree()'; then, we create all required
+        archive files (according to 'self.formats') from the release tree.
+        Finally, we clean up by blowing away the release tree (unless
+        'self.keep_temp' is true).  The list of archive files created is
+        stored so it can be retrieved later by 'get_archive_files()'.
+        """
+        # Don't warn about missing metadata here -- should be (and is!)
+        # done elsewhere.
+        base_dir = self.distribution.get_fullname()
+        base_name = os.path.join(self.dist_dir, base_dir)
+
+        self.make_release_tree(base_dir, self.filelist.files)
+        archive_files = []              # remember names of files we create
+        # tar archive must be created last to avoid overwrite and remove
+        if 'tar' in self.formats:
+            self.formats.append(self.formats.pop(self.formats.index('tar')))
+
+        for fmt in self.formats:
+            file = self.make_archive(base_name, fmt, base_dir=base_dir,
+                                     owner=self.owner, group=self.group)
+            archive_files.append(file)
+            self.distribution.dist_files.append(('sdist', '', file))
+
+        self.archive_files = archive_files
+
+        if not self.keep_temp:
+            if self.dry_run:
+                logger.info('removing %s', base_dir)
+            else:
+                rmtree(base_dir)
+
+    def get_archive_files(self):
+        """Return the list of archive files created when the command
+        was run, or None if the command hasn't run yet.
+        """
+        return self.archive_files
+
+    def create_tree(self, base_dir, files, mode=0o777, verbose=1,
+                    dry_run=False):
+        need_dir = set()
+        for file in files:
+            need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
+
+        # Now create them
+        for dir in sorted(need_dir):
+            self.mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
diff --git a/Lib/packaging/command/test.py b/Lib/packaging/command/test.py
new file mode 100644
index 0000000..7f9015b
--- /dev/null
+++ b/Lib/packaging/command/test.py
@@ -0,0 +1,81 @@
+"""Run the project's test suite."""
+
+import os
+import sys
+import logging
+import unittest
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.database import get_distribution
+from packaging.errors import PackagingOptionError
+from packaging.util import resolve_name
+
+
+class test(Command):
+
+    description = "run the project's test suite"
+
+    user_options = [
+        ('suite=', 's',
+         "test suite to run (for example: 'some_module.test_suite')"),
+        ('runner=', None,
+         "test runner to be called."),
+        ('tests-require=', None,
+         "list of distributions required to run the test suite."),
+    ]
+
+    def initialize_options(self):
+        self.suite = None
+        self.runner = None
+        self.tests_require = []
+
+    def finalize_options(self):
+        self.build_lib = self.get_finalized_command("build").build_lib
+        for requirement in self.tests_require:
+            if get_distribution(requirement) is None:
+                logger.warning("test dependency %s is not installed, "
+                               "tests may fail", requirement)
+        if (not self.suite and not self.runner and
+            self.get_ut_with_discovery() is None):
+            raise PackagingOptionError(
+                "no test discovery available, please give a 'suite' or "
+                "'runner' option or install unittest2")
+
+    def get_ut_with_discovery(self):
+        if hasattr(unittest.TestLoader, "discover"):
+            return unittest
+        else:
+            try:
+                import unittest2
+                return unittest2
+            except ImportError:
+                return None
+
+    def run(self):
+        prev_syspath = sys.path[:]
+        try:
+            # build release
+            build = self.get_reinitialized_command('build')
+            self.run_command('build')
+            sys.path.insert(0, build.build_lib)
+
+            # Temporary kludge until we remove the verbose arguments and use
+            # logging everywhere
+            logger = logging.getLogger('packaging')
+            verbose = logger.getEffectiveLevel() >= logging.DEBUG
+            verbosity = verbose + 1
+
+            # run the tests
+            if self.runner:
+                resolve_name(self.runner)()
+            elif self.suite:
+                runner = unittest.TextTestRunner(verbosity=verbosity)
+                runner.run(resolve_name(self.suite)())
+            elif self.get_ut_with_discovery():
+                ut = self.get_ut_with_discovery()
+                test_suite = ut.TestLoader().discover(os.curdir)
+                runner = ut.TextTestRunner(verbosity=verbosity)
+                runner.run(test_suite)
+        finally:
+            sys.path[:] = prev_syspath
diff --git a/Lib/packaging/command/upload.py b/Lib/packaging/command/upload.py
new file mode 100644
index 0000000..df265c9
--- /dev/null
+++ b/Lib/packaging/command/upload.py
@@ -0,0 +1,201 @@
+"""Upload a distribution to a project index."""
+
+import os
+import socket
+import logging
+import platform
+import urllib.parse
+from io import BytesIO
+from base64 import standard_b64encode
+from hashlib import md5
+from urllib.error import HTTPError
+from urllib.request import urlopen, Request
+
+from packaging import logger
+from packaging.errors import PackagingOptionError
+from packaging.util import (spawn, read_pypirc, DEFAULT_REPOSITORY,
+                            DEFAULT_REALM)
+from packaging.command.cmd import Command
+
+
+class upload(Command):
+
+    description = "upload distribution to PyPI"
+
+    user_options = [
+        ('repository=', 'r',
+         "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+        ('show-response', None,
+         "display full response text from server"),
+        ('sign', 's',
+         "sign files to upload using gpg"),
+        ('identity=', 'i',
+         "GPG identity used to sign files"),
+        ('upload-docs', None,
+         "upload documentation too"),
+        ]
+
+    boolean_options = ['show-response', 'sign']
+
+    def initialize_options(self):
+        self.repository = None
+        self.realm = None
+        self.show_response = False
+        self.username = ''
+        self.password = ''
+        self.show_response = False
+        self.sign = False
+        self.identity = None
+        self.upload_docs = False
+
+    def finalize_options(self):
+        if self.repository is None:
+            self.repository = DEFAULT_REPOSITORY
+        if self.realm is None:
+            self.realm = DEFAULT_REALM
+        if self.identity and not self.sign:
+            raise PackagingOptionError(
+                "Must use --sign for --identity to have meaning")
+        config = read_pypirc(self.repository, self.realm)
+        if config != {}:
+            self.username = config['username']
+            self.password = config['password']
+            self.repository = config['repository']
+            self.realm = config['realm']
+
+        # getting the password from the distribution
+        # if previously set by the register command
+        if not self.password and self.distribution.password:
+            self.password = self.distribution.password
+
+    def run(self):
+        if not self.distribution.dist_files:
+            raise PackagingOptionError(
+                "No dist file created in earlier command")
+        for command, pyversion, filename in self.distribution.dist_files:
+            self.upload_file(command, pyversion, filename)
+        if self.upload_docs:
+            upload_docs = self.get_finalized_command("upload_docs")
+            upload_docs.repository = self.repository
+            upload_docs.username = self.username
+            upload_docs.password = self.password
+            upload_docs.run()
+
+    # XXX to be refactored with register.post_to_server
+    def upload_file(self, command, pyversion, filename):
+        # Makes sure the repository URL is compliant
+        scheme, netloc, url, params, query, fragments = \
+            urllib.parse.urlparse(self.repository)
+        if params or query or fragments:
+            raise AssertionError("Incompatible url %s" % self.repository)
+
+        if scheme not in ('http', 'https'):
+            raise AssertionError("unsupported scheme " + scheme)
+
+        # Sign if requested
+        if self.sign:
+            gpg_args = ["gpg", "--detach-sign", "-a", filename]
+            if self.identity:
+                gpg_args[2:2] = ["--local-user", self.identity]
+            spawn(gpg_args,
+                  dry_run=self.dry_run)
+
+        # Fill in the data - send all the metadata in case we need to
+        # register a new release
+        with open(filename, 'rb') as f:
+            content = f.read()
+
+        data = self.distribution.metadata.todict()
+
+        # extra upload infos
+        data[':action'] = 'file_upload'
+        data['protcol_version'] = '1'
+        data['content'] = (os.path.basename(filename), content)
+        data['filetype'] = command
+        data['pyversion'] = pyversion
+        data['md5_digest'] = md5(content).hexdigest()
+
+        if command == 'bdist_dumb':
+            data['comment'] = 'built for %s' % platform.platform(terse=True)
+
+        if self.sign:
+            with open(filename + '.asc') as fp:
+                sig = fp.read()
+            data['gpg_signature'] = [
+                (os.path.basename(filename) + ".asc", sig)]
+
+        # set up the authentication
+        # The exact encoding of the authentication string is debated.
+        # Anyway PyPI only accepts ascii for both username or password.
+        user_pass = (self.username + ":" + self.password).encode('ascii')
+        auth = b"Basic " + standard_b64encode(user_pass)
+
+        # Build up the MIME payload for the POST data
+        boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = b'\n--' + boundary
+        end_boundary = sep_boundary + b'--'
+        body = BytesIO()
+
+        file_fields = ('content', 'gpg_signature')
+
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if not isinstance(value, tuple):
+                value = [value]
+
+            content_dispo = '\nContent-Disposition: form-data; name="%s"' % key
+
+            if key in file_fields:
+                filename_, content = value
+                filename_ = ';filename="%s"' % filename_
+                body.write(sep_boundary)
+                body.write(content_dispo.encode('utf-8'))
+                body.write(filename_.encode('utf-8'))
+                body.write(b"\n\n")
+                body.write(content)
+            else:
+                for value in value:
+                    value = str(value).encode('utf-8')
+                    body.write(sep_boundary)
+                    body.write(content_dispo.encode('utf-8'))
+                    body.write(b"\n\n")
+                    body.write(value)
+                    if value and value.endswith(b'\r'):
+                        # write an extra newline (lurve Macs)
+                        body.write(b'\n')
+
+        body.write(end_boundary)
+        body.write(b"\n")
+        body = body.getvalue()
+
+        logger.info("Submitting %s to %s", filename, self.repository)
+
+        # build the Request
+        headers = {'Content-type':
+                        'multipart/form-data; boundary=%s' %
+                        boundary.decode('ascii'),
+                   'Content-length': str(len(body)),
+                   'Authorization': auth}
+
+        request = Request(self.repository, data=body,
+                          headers=headers)
+        # send the data
+        try:
+            result = urlopen(request)
+            status = result.code
+            reason = result.msg
+        except socket.error as e:
+            logger.error(e)
+            return
+        except HTTPError as e:
+            status = e.code
+            reason = e.msg
+
+        if status == 200:
+            logger.info('Server response (%s): %s', status, reason)
+        else:
+            logger.error('Upload failed (%s): %s', status, reason)
+
+        if self.show_response and logger.isEnabledFor(logging.INFO):
+            sep = '-' * 75
+            logger.info('%s\n%s\n%s', sep, result.read().decode(), sep)
diff --git a/Lib/packaging/command/upload_docs.py b/Lib/packaging/command/upload_docs.py
new file mode 100644
index 0000000..47e6217
--- /dev/null
+++ b/Lib/packaging/command/upload_docs.py
@@ -0,0 +1,172 @@
+"""Upload HTML documentation to a project index."""
+
+import os
+import base64
+import socket
+import zipfile
+import logging
+import http.client
+import urllib.parse
+from io import BytesIO
+
+from packaging import logger
+from packaging.util import read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM
+from packaging.errors import PackagingFileError
+from packaging.command.cmd import Command
+
+
+def zip_dir(directory):
+    """Compresses recursively contents of directory into a BytesIO object"""
+    destination = BytesIO()
+    with zipfile.ZipFile(destination, "w") as zip_file:
+        for root, dirs, files in os.walk(directory):
+            for name in files:
+                full = os.path.join(root, name)
+                relative = root[len(directory):].lstrip(os.path.sep)
+                dest = os.path.join(relative, name)
+                zip_file.write(full, dest)
+    return destination
+
+
+# grabbed from
+#    http://code.activestate.com/recipes/
+#    146306-http-client-to-post-using-multipartform-data/
+# TODO factor this out for use by install and command/upload
+
+def encode_multipart(fields, files, boundary=None):
+    """
+    *fields* is a sequence of (name: str, value: str) elements for regular
+    form fields, *files* is a sequence of (name: str, filename: str, value:
+    bytes) elements for data to be uploaded as files.
+
+    Returns (content_type: bytes, body: bytes) ready for http.client.HTTP.
+    """
+    if boundary is None:
+        boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+    elif not isinstance(boundary, bytes):
+        raise TypeError('boundary is not bytes but %r' % type(boundary))
+
+    l = []
+    for key, value in fields:
+        l.extend((
+            b'--' + boundary,
+            ('Content-Disposition: form-data; name="%s"' %
+             key).encode('utf-8'),
+            b'',
+            value.encode('utf-8')))
+
+    for key, filename, value in files:
+        l.extend((
+            b'--' + boundary,
+            ('Content-Disposition: form-data; name="%s"; filename="%s"' %
+             (key, filename)).encode('utf-8'),
+            b'',
+            value))
+    l.append(b'--' + boundary + b'--')
+    l.append(b'')
+
+    body = b'\r\n'.join(l)
+
+    content_type = b'multipart/form-data; boundary=' + boundary
+    return content_type, body
+
+
+class upload_docs(Command):
+
+    description = "upload HTML documentation to PyPI"
+
+    user_options = [
+        ('repository=', 'r',
+         "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+        ('show-response', None,
+         "display full response text from server"),
+        ('upload-dir=', None,
+         "directory to upload"),
+        ]
+
+    def initialize_options(self):
+        self.repository = None
+        self.realm = None
+        self.show_response = False
+        self.upload_dir = None
+        self.username = ''
+        self.password = ''
+
+    def finalize_options(self):
+        if self.repository is None:
+            self.repository = DEFAULT_REPOSITORY
+        if self.realm is None:
+            self.realm = DEFAULT_REALM
+        if self.upload_dir is None:
+            build = self.get_finalized_command('build')
+            self.upload_dir = os.path.join(build.build_base, "docs")
+            if not os.path.isdir(self.upload_dir):
+                self.upload_dir = os.path.join(build.build_base, "doc")
+        logger.info('Using upload directory %s', self.upload_dir)
+        self.verify_upload_dir(self.upload_dir)
+        config = read_pypirc(self.repository, self.realm)
+        if config != {}:
+            self.username = config['username']
+            self.password = config['password']
+            self.repository = config['repository']
+            self.realm = config['realm']
+
+    def verify_upload_dir(self, upload_dir):
+        self.ensure_dirname('upload_dir')
+        index_location = os.path.join(upload_dir, "index.html")
+        if not os.path.exists(index_location):
+            mesg = "No 'index.html found in docs directory (%s)"
+            raise PackagingFileError(mesg % upload_dir)
+
+    def run(self):
+        name = self.distribution.metadata['Name']
+        version = self.distribution.metadata['Version']
+        zip_file = zip_dir(self.upload_dir)
+
+        fields = [(':action', 'doc_upload'),
+                  ('name', name), ('version', version)]
+        files = [('content', name, zip_file.getvalue())]
+        content_type, body = encode_multipart(fields, files)
+
+        credentials = self.username + ':' + self.password
+        auth = b"Basic " + base64.encodebytes(credentials.encode()).strip()
+
+        logger.info("Submitting documentation to %s", self.repository)
+
+        scheme, netloc, url, params, query, fragments = urllib.parse.urlparse(
+            self.repository)
+        if scheme == "http":
+            conn = http.client.HTTPConnection(netloc)
+        elif scheme == "https":
+            conn = http.client.HTTPSConnection(netloc)
+        else:
+            raise AssertionError("unsupported scheme %r" % scheme)
+
+        try:
+            conn.connect()
+            conn.putrequest("POST", url)
+            conn.putheader('Content-type', content_type)
+            conn.putheader('Content-length', str(len(body)))
+            conn.putheader('Authorization', auth)
+            conn.endheaders()
+            conn.send(body)
+
+        except socket.error as e:
+            logger.error(e)
+            return
+
+        r = conn.getresponse()
+
+        if r.status == 200:
+            logger.info('Server response (%s): %s', r.status, r.reason)
+        elif r.status == 301:
+            location = r.getheader('Location')
+            if location is None:
+                location = 'http://packages.python.org/%s/' % name
+            logger.info('Upload successful. Visit %s', location)
+        else:
+            logger.error('Upload failed (%s): %s', r.status, r.reason)
+
+        if self.show_response and logger.isEnabledFor(logging.INFO):
+            sep = '-' * 75
+            logger.info('%s\n%s\n%s', sep, r.read().decode('utf-8'), sep)
diff --git a/Lib/packaging/command/wininst-10.0-amd64.exe b/Lib/packaging/command/wininst-10.0-amd64.exe
new file mode 100644
index 0000000..11f98cd
--- /dev/null
+++ b/Lib/packaging/command/wininst-10.0-amd64.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-10.0.exe b/Lib/packaging/command/wininst-10.0.exe
new file mode 100644
index 0000000..8ac6e19
--- /dev/null
+++ b/Lib/packaging/command/wininst-10.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-6.0.exe b/Lib/packaging/command/wininst-6.0.exe
new file mode 100644
index 0000000..f57c855
--- /dev/null
+++ b/Lib/packaging/command/wininst-6.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-7.1.exe b/Lib/packaging/command/wininst-7.1.exe
new file mode 100644
index 0000000..1433bc1
--- /dev/null
+++ b/Lib/packaging/command/wininst-7.1.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-8.0.exe b/Lib/packaging/command/wininst-8.0.exe
new file mode 100644
index 0000000..7403bfa
--- /dev/null
+++ b/Lib/packaging/command/wininst-8.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-9.0-amd64.exe b/Lib/packaging/command/wininst-9.0-amd64.exe
new file mode 100644
index 0000000..11d8011
--- /dev/null
+++ b/Lib/packaging/command/wininst-9.0-amd64.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-9.0.exe b/Lib/packaging/command/wininst-9.0.exe
new file mode 100644
index 0000000..dadb31d
--- /dev/null
+++ b/Lib/packaging/command/wininst-9.0.exe
Binary files differ
diff --git a/Lib/packaging/compat.py b/Lib/packaging/compat.py
new file mode 100644
index 0000000..a82efd3
--- /dev/null
+++ b/Lib/packaging/compat.py
@@ -0,0 +1,57 @@
+"""Compatibility helpers.
+
+This module provides classes, variables and imports which are used to
+support packaging across Python 2.x and 3.x.
+"""
+
+from packaging import logger
+
+
+# XXX Having two classes with the same name is not a good thing.
+# XXX 2to3-related code should move from util to this module
+
+# TODO Move common code here: PY3 (bool indicating if we're on 3.x), any, etc.
+
+try:
+    from packaging.util import Mixin2to3 as _Mixin2to3
+    _CONVERT = True
+    _KLASS = _Mixin2to3
+except ImportError:
+    _CONVERT = False
+    _KLASS = object
+
+__all__ = ['Mixin2to3']
+
+
+class Mixin2to3(_KLASS):
+    """ The base class which can be used for refactoring. When run under
+    Python 3.0, the run_2to3 method provided by Mixin2to3 is overridden.
+    When run on Python 2.x, it merely creates a class which overrides run_2to3,
+    yet does nothing in particular with it.
+    """
+    if _CONVERT:
+
+        def _run_2to3(self, files, doctests=[], fixers=[]):
+            """ Takes a list of files and doctests, and performs conversion
+            on those.
+              - First, the files which contain the code(`files`) are converted.
+              - Second, the doctests in `files` are converted.
+              - Thirdly, the doctests in `doctests` are converted.
+            """
+            if fixers:
+                self.fixer_names = fixers
+
+            logger.info('converting Python code')
+            _KLASS.run_2to3(self, files)
+
+            logger.info('converting doctests in Python files')
+            _KLASS.run_2to3(self, files, doctests_only=True)
+
+            if doctests != []:
+                logger.info('converting doctest in text files')
+                _KLASS.run_2to3(self, doctests, doctests_only=True)
+    else:
+        # If run on Python 2.x, there is nothing to do.
+
+        def _run_2to3(self, files, doctests=[], fixers=[]):
+            pass
diff --git a/Lib/packaging/compiler/__init__.py b/Lib/packaging/compiler/__init__.py
new file mode 100644
index 0000000..34315aa
--- /dev/null
+++ b/Lib/packaging/compiler/__init__.py
@@ -0,0 +1,282 @@
+"""Compiler abstraction model used by packaging.
+
+An abstract base class is defined in the ccompiler submodule, and
+concrete implementations suitable for various platforms are defined in
+the other submodules.  The extension module is also placed in this
+package.
+
+In general, code should not instantiate compiler classes directly but
+use the new_compiler and customize_compiler functions provided in this
+module.
+
+The compiler system has a registration API: get_default_compiler,
+set_compiler, show_compilers.
+"""
+
+import os
+import sys
+import re
+import sysconfig
+
+from packaging.util import resolve_name
+from packaging.errors import PackagingPlatformError
+from packaging import logger
+
+def customize_compiler(compiler):
+    """Do any platform-specific customization of a CCompiler instance.
+
+    Mainly needed on Unix, so we can plug in the information that
+    varies across Unices and is stored in Python's Makefile.
+    """
+    if compiler.name == "unix":
+        cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags = (
+            sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
+                                      'CCSHARED', 'LDSHARED', 'SO', 'AR',
+                                      'ARFLAGS'))
+
+        if 'CC' in os.environ:
+            cc = os.environ['CC']
+        if 'CXX' in os.environ:
+            cxx = os.environ['CXX']
+        if 'LDSHARED' in os.environ:
+            ldshared = os.environ['LDSHARED']
+        if 'CPP' in os.environ:
+            cpp = os.environ['CPP']
+        else:
+            cpp = cc + " -E"           # not always
+        if 'LDFLAGS' in os.environ:
+            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+        if 'CFLAGS' in os.environ:
+            cflags = opt + ' ' + os.environ['CFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CFLAGS']
+        if 'CPPFLAGS' in os.environ:
+            cpp = cpp + ' ' + os.environ['CPPFLAGS']
+            cflags = cflags + ' ' + os.environ['CPPFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+        if 'AR' in os.environ:
+            ar = os.environ['AR']
+        if 'ARFLAGS' in os.environ:
+            archiver = ar + ' ' + os.environ['ARFLAGS']
+        else:
+            if ar_flags is not None:
+                archiver = ar + ' ' + ar_flags
+            else:
+                # see if its the proper default value
+                # mmm I don't want to backport the makefile
+                archiver = ar + ' rc'
+
+        cc_cmd = cc + ' ' + cflags
+        compiler.set_executables(
+            preprocessor=cpp,
+            compiler=cc_cmd,
+            compiler_so=cc_cmd + ' ' + ccshared,
+            compiler_cxx=cxx,
+            linker_so=ldshared,
+            linker_exe=cc,
+            archiver=archiver)
+
+        compiler.shared_lib_extension = so_ext
+
+
+# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
+# type for that platform. Keys are interpreted as re match
+# patterns. Order is important; platform mappings are preferred over
+# OS names.
+_default_compilers = (
+
+    # Platform string mappings
+
+    # on a cygwin built python we can use gcc like an ordinary UNIXish
+    # compiler
+    ('cygwin.*', 'unix'),
+    ('os2emx', 'emx'),
+
+    # OS name mappings
+    ('posix', 'unix'),
+    ('nt', 'msvc'),
+
+    )
+
+def get_default_compiler(osname=None, platform=None):
+    """ Determine the default compiler to use for the given platform.
+
+        osname should be one of the standard Python OS names (i.e. the
+        ones returned by os.name) and platform the common value
+        returned by sys.platform for the platform in question.
+
+        The default values are os.name and sys.platform in case the
+        parameters are not given.
+
+    """
+    if osname is None:
+        osname = os.name
+    if platform is None:
+        platform = sys.platform
+    for pattern, compiler in _default_compilers:
+        if re.match(pattern, platform) is not None or \
+           re.match(pattern, osname) is not None:
+            return compiler
+    # Defaults to Unix compiler
+    return 'unix'
+
+
+# compiler mapping
+# XXX useful to expose them? (i.e. get_compiler_names)
+_COMPILERS = {
+    'unix': 'packaging.compiler.unixccompiler.UnixCCompiler',
+    'msvc': 'packaging.compiler.msvccompiler.MSVCCompiler',
+    'cygwin': 'packaging.compiler.cygwinccompiler.CygwinCCompiler',
+    'mingw32': 'packaging.compiler.cygwinccompiler.Mingw32CCompiler',
+    'bcpp': 'packaging.compiler.bcppcompiler.BCPPCompiler',
+}
+
+def set_compiler(location):
+    """Add or change a compiler"""
+    cls = resolve_name(location)
+    # XXX we want to check the class here
+    _COMPILERS[cls.name] = cls
+
+
+def show_compilers():
+    """Print list of available compilers (used by the "--help-compiler"
+    options to "build", "build_ext", "build_clib").
+    """
+    from packaging.fancy_getopt import FancyGetopt
+    compilers = []
+
+    for name, cls in _COMPILERS.items():
+        if isinstance(cls, str):
+            cls = resolve_name(cls)
+            _COMPILERS[name] = cls
+
+        compilers.append(("compiler=" + name, None, cls.description))
+
+    compilers.sort()
+    pretty_printer = FancyGetopt(compilers)
+    pretty_printer.print_help("List of available compilers:")
+
+
+def new_compiler(plat=None, compiler=None, verbose=0, dry_run=False,
+                 force=False):
+    """Generate an instance of some CCompiler subclass for the supplied
+    platform/compiler combination.  'plat' defaults to 'os.name'
+    (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
+    for that platform.  Currently only 'posix' and 'nt' are supported, and
+    the default compilers are "traditional Unix interface" (UnixCCompiler
+    class) and Visual C++ (MSVCCompiler class).  Note that it's perfectly
+    possible to ask for a Unix compiler object under Windows, and a
+    Microsoft compiler object under Unix -- if you supply a value for
+    'compiler', 'plat' is ignored.
+    """
+    if plat is None:
+        plat = os.name
+
+    try:
+        if compiler is None:
+            compiler = get_default_compiler(plat)
+
+        cls = _COMPILERS[compiler]
+    except KeyError:
+        msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+        if compiler is not None:
+            msg = msg + " with '%s' compiler" % compiler
+        raise PackagingPlatformError(msg)
+
+    if isinstance(cls, str):
+        cls = resolve_name(cls)
+        _COMPILERS[compiler] = cls
+
+
+    # XXX The None is necessary to preserve backwards compatibility
+    # with classes that expect verbose to be the first positional
+    # argument.
+    return cls(None, dry_run, force)
+
+
+def gen_preprocess_options(macros, include_dirs):
+    """Generate C pre-processor options (-D, -U, -I) as used by at least
+    two types of compilers: the typical Unix compiler and Visual C++.
+    'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
+    means undefine (-U) macro 'name', and (name,value) means define (-D)
+    macro 'name' to 'value'.  'include_dirs' is just a list of directory
+    names to be added to the header file search path (-I).  Returns a list
+    of command-line options suitable for either Unix compilers or Visual
+    C++.
+    """
+    # XXX it would be nice (mainly aesthetic, and so we don't generate
+    # stupid-looking command lines) to go over 'macros' and eliminate
+    # redundant definitions/undefinitions (ie. ensure that only the
+    # latest mention of a particular macro winds up on the command
+    # line).  I don't think it's essential, though, since most (all?)
+    # Unix C compilers only pay attention to the latest -D or -U
+    # mention of a macro on their command line.  Similar situation for
+    # 'include_dirs'.  I'm punting on both for now.  Anyways, weeding out
+    # redundancies like this should probably be the province of
+    # CCompiler, since the data structures used are inherited from it
+    # and therefore common to all CCompiler classes.
+
+    pp_opts = []
+    for macro in macros:
+
+        if not isinstance(macro, tuple) and 1 <= len(macro) <= 2:
+            raise TypeError(
+                "bad macro definition '%s': each element of 'macros'"
+                "list must be a 1- or 2-tuple" % macro)
+
+        if len(macro) == 1:        # undefine this macro
+            pp_opts.append("-U%s" % macro[0])
+        elif len(macro) == 2:
+            if macro[1] is None:    # define with no explicit value
+                pp_opts.append("-D%s" % macro[0])
+            else:
+                # XXX *don't* need to be clever about quoting the
+                # macro value here, because we're going to avoid the
+                # shell at all costs when we spawn the command!
+                pp_opts.append("-D%s=%s" % macro)
+
+    for dir in include_dirs:
+        pp_opts.append("-I%s" % dir)
+
+    return pp_opts
+
+
+def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
+    """Generate linker options for searching library directories and
+    linking with specific libraries.
+
+    'libraries' and 'library_dirs' are, respectively, lists of library names
+    (not filenames!) and search directories.  Returns a list of command-line
+    options suitable for use with some compiler (depending on the two format
+    strings passed in).
+    """
+    lib_opts = []
+
+    for dir in library_dirs:
+        lib_opts.append(compiler.library_dir_option(dir))
+
+    for dir in runtime_library_dirs:
+        opt = compiler.runtime_library_dir_option(dir)
+        if isinstance(opt, list):
+            lib_opts.extend(opt)
+        else:
+            lib_opts.append(opt)
+
+    # XXX it's important that we *not* remove redundant library mentions!
+    # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
+    # resolve all symbols.  I just hope we never have to say "-lfoo obj.o
+    # -lbar" to get things to work -- that's certainly a possibility, but a
+    # pretty nasty way to arrange your C code.
+
+    for lib in libraries:
+        lib_dir, lib_name = os.path.split(lib)
+        if lib_dir != '':
+            lib_file = compiler.find_library_file([lib_dir], lib_name)
+            if lib_file is not None:
+                lib_opts.append(lib_file)
+            else:
+                logger.warning("no library file corresponding to "
+                              "'%s' found (skipping)" % lib)
+        else:
+            lib_opts.append(compiler.library_option(lib))
+
+    return lib_opts
diff --git a/Lib/packaging/compiler/bcppcompiler.py b/Lib/packaging/compiler/bcppcompiler.py
new file mode 100644
index 0000000..63b6d8b
--- /dev/null
+++ b/Lib/packaging/compiler/bcppcompiler.py
@@ -0,0 +1,356 @@
+"""CCompiler implementation for the Borland C++ compiler."""
+
+# This implementation by Lyle Johnson, based on the original msvccompiler.py
+# module and using the directions originally published by Gordon Williams.
+
+# XXX looks like there's a LOT of overlap between these two classes:
+# someone should sit down and factor out the common code as
+# WindowsCCompiler!  --GPW
+
+import os
+
+from packaging.errors import (PackagingExecError, CompileError, LibError,
+                              LinkError, UnknownFileError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_preprocess_options
+from packaging.file_util import write_file
+from packaging.dep_util import newer
+from packaging import logger
+
+
+class BCPPCompiler(CCompiler) :
+    """Concrete class that implements an interface to the Borland C/C++
+    compiler, as defined by the CCompiler abstract class.
+    """
+
+    name = 'bcpp'
+    description = 'Borland C++ Compiler'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = _c_extensions + _cpp_extensions
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+        CCompiler.__init__(self, verbose, dry_run, force)
+
+        # These executables are assumed to all be in the path.
+        # Borland doesn't seem to use any special registry settings to
+        # indicate their installation locations.
+
+        self.cc = "bcc32.exe"
+        self.linker = "ilink32.exe"
+        self.lib = "tlib.exe"
+
+        self.preprocess_options = None
+        self.compile_options = ['/tWM', '/O2', '/q', '/g0']
+        self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
+
+        self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
+        self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
+        self.ldflags_static = []
+        self.ldflags_exe = ['/Gn', '/q', '/x']
+        self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
+
+
+    # -- Worker methods ------------------------------------------------
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=False,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+        compile_opts = extra_preargs or []
+        compile_opts.append('-c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            # XXX why do the normpath here?
+            src = os.path.normpath(src)
+            obj = os.path.normpath(obj)
+            # XXX _setup_compile() did a mkpath() too but before the normpath.
+            # Is it possible to skip the normpath?
+            self.mkpath(os.path.dirname(obj))
+
+            if ext == '.res':
+                # This is already a binary file -- skip it.
+                continue # the 'for' loop
+            if ext == '.rc':
+                # This needs to be compiled to a .res file -- do it now.
+                try:
+                    self.spawn(["brcc32", "-fo", obj, src])
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue # the 'for' loop
+
+            # The next two are both for the real compiler.
+            if ext in self._c_extensions:
+                input_opt = ""
+            elif ext in self._cpp_extensions:
+                input_opt = "-P"
+            else:
+                # Unknown file type -- no extra options.  The compiler
+                # will probably fail, but let it just in case this is a
+                # file the compiler recognizes even if we don't.
+                input_opt = ""
+
+            output_opt = "-o" + obj
+
+            # Compiler command line syntax is: "bcc32 [options] file(s)".
+            # Note that the source file names must appear at the end of
+            # the command line.
+            try:
+                self.spawn([self.cc] + compile_opts + pp_opts +
+                           [input_opt, output_opt] +
+                           extra_postargs + [src])
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+        return objects
+
+
+    def create_static_lib(self, objects, output_libname, output_dir=None,
+                          debug=False, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        output_filename = \
+            self.library_filename(output_libname, output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            lib_args = [output_filename, '/u'] + objects
+            if debug:
+                pass                    # XXX what goes here?
+            try:
+                self.spawn([self.lib] + lib_args)
+            except PackagingExecError as msg:
+                raise LibError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+
+        # XXX this ignores 'build_temp'!  should follow the lead of
+        # msvccompiler.py
+
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        if runtime_library_dirs:
+            logger.warning("don't know what to do with "
+                           "'runtime_library_dirs': %r", runtime_library_dirs)
+
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+
+            # Figure out linker args based on type of target.
+            if target_desc == CCompiler.EXECUTABLE:
+                startup_obj = 'c0w32'
+                if debug:
+                    ld_args = self.ldflags_exe_debug[:]
+                else:
+                    ld_args = self.ldflags_exe[:]
+            else:
+                startup_obj = 'c0d32'
+                if debug:
+                    ld_args = self.ldflags_shared_debug[:]
+                else:
+                    ld_args = self.ldflags_shared[:]
+
+
+            # Create a temporary exports file for use by the linker
+            if export_symbols is None:
+                def_file = ''
+            else:
+                head, tail = os.path.split(output_filename)
+                modname, ext = os.path.splitext(tail)
+                temp_dir = os.path.dirname(objects[0]) # preserve tree structure
+                def_file = os.path.join(temp_dir, '%s.def' % modname)
+                contents = ['EXPORTS']
+                for sym in (export_symbols or []):
+                    contents.append('  %s=_%s' % (sym, sym))
+                self.execute(write_file, (def_file, contents),
+                             "writing %s" % def_file)
+
+            # Borland C++ has problems with '/' in paths
+            objects2 = [os.path.normpath(o) for o in objects]
+            # split objects in .obj and .res files
+            # Borland C++ needs them at different positions in the command line
+            objects = [startup_obj]
+            resources = []
+            for file in objects2:
+                base, ext = os.path.splitext(os.path.normcase(file))
+                if ext == '.res':
+                    resources.append(file)
+                else:
+                    objects.append(file)
+
+
+            for l in library_dirs:
+                ld_args.append("/L%s" % os.path.normpath(l))
+            ld_args.append("/L.") # we sometimes use relative paths
+
+            # list of object files
+            ld_args.extend(objects)
+
+            # XXX the command line syntax for Borland C++ is a bit wonky;
+            # certain filenames are jammed together in one big string, but
+            # comma-delimited.  This doesn't mesh too well with the
+            # Unix-centric attitude (with a DOS/Windows quoting hack) of
+            # 'spawn()', so constructing the argument list is a bit
+            # awkward.  Note that doing the obvious thing and jamming all
+            # the filenames and commas into one argument would be wrong,
+            # because 'spawn()' would quote any filenames with spaces in
+            # them.  Arghghh!.  Apparently it works fine as coded...
+
+            # name of dll/exe file
+            ld_args.extend((',',output_filename))
+            # no map file and start libraries
+            ld_args.append(',,')
+
+            for lib in libraries:
+                # see if we find it and if there is a bcpp specific lib
+                # (xxx_bcpp.lib)
+                libfile = self.find_library_file(library_dirs, lib, debug)
+                if libfile is None:
+                    ld_args.append(lib)
+                    # probably a BCPP internal library -- don't warn
+                else:
+                    # full name which prefers bcpp_xxx.lib over xxx.lib
+                    ld_args.append(libfile)
+
+            # some default libraries
+            ld_args.append('import32')
+            ld_args.append('cw32mt')
+
+            # def file for export symbols
+            ld_args.extend((',',def_file))
+            # add resource files
+            ld_args.append(',')
+            ld_args.extend(resources)
+
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                self.spawn([self.linker] + ld_args)
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    # -- Miscellaneous methods -----------------------------------------
+
+
+    def find_library_file(self, dirs, lib, debug=False):
+        # List of effective library names to try, in order of preference:
+        # xxx_bcpp.lib is better than xxx.lib
+        # and xxx_d.lib is better than xxx.lib if debug is set
+        #
+        # The "_bcpp" suffix is to handle a Python installation for people
+        # with multiple compilers (primarily Packaging hackers, I suspect
+        # ;-).  The idea is they'd have one static library for each
+        # compiler they care about, since (almost?) every Windows compiler
+        # seems to have a different format for static libraries.
+        if debug:
+            dlib = (lib + "_d")
+            try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
+        else:
+            try_names = (lib + "_bcpp", lib)
+
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename(name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # overwrite the one from CCompiler to support rc and res-files
+    def object_filenames(self, source_filenames, strip_dir=False,
+                         output_dir=''):
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            base, ext = os.path.splitext(os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc','.res']):
+                raise UnknownFileError("unknown file type '%s' (from '%s')" % \
+                      (ext, src_name))
+            if strip_dir:
+                base = os.path.basename(base)
+            if ext == '.res':
+                # these can go unchanged
+                obj_names.append(os.path.join(output_dir, base + ext))
+            elif ext == '.rc':
+                # these need to be compiled to .res-files
+                obj_names.append(os.path.join(output_dir, base + '.res'))
+            else:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.obj_extension))
+        return obj_names
+
+
+    def preprocess(self, source, output_file=None, macros=None,
+                   include_dirs=None, extra_preargs=None,
+                   extra_postargs=None):
+        _, macros, include_dirs = \
+            self._fix_compile_args(None, macros, include_dirs)
+        pp_opts = gen_preprocess_options(macros, include_dirs)
+        pp_args = ['cpp32.exe'] + pp_opts
+        if output_file is not None:
+            pp_args.append('-o' + output_file)
+        if extra_preargs:
+            pp_args[:0] = extra_preargs
+        if extra_postargs:
+            pp_args.extend(extra_postargs)
+        pp_args.append(source)
+
+        # We need to preprocess: either we're being forced to, or the
+        # source file is newer than the target (or the target doesn't
+        # exist).
+        if self.force or output_file is None or newer(source, output_file):
+            if output_file:
+                self.mkpath(os.path.dirname(output_file))
+            try:
+                self.spawn(pp_args)
+            except PackagingExecError as msg:
+                print(msg)
+                raise CompileError(msg)
diff --git a/Lib/packaging/compiler/ccompiler.py b/Lib/packaging/compiler/ccompiler.py
new file mode 100644
index 0000000..ef806a2
--- /dev/null
+++ b/Lib/packaging/compiler/ccompiler.py
@@ -0,0 +1,865 @@
+"""Abstract base class for compilers.
+
+This modules contains CCompiler, an abstract base class that defines the
+interface for the compiler abstraction model used by packaging.
+"""
+
+import os
+import sys
+from shutil import move
+from packaging import logger
+from packaging.util import split_quoted, execute, newer_group, spawn
+from packaging.errors import (CompileError, LinkError, UnknownFileError)
+from packaging.compiler import gen_preprocess_options
+
+
+class CCompiler:
+    """Abstract base class to define the interface that must be implemented
+    by real compiler classes.  Also has some utility methods used by
+    several compiler classes.
+
+    The basic idea behind a compiler abstraction class is that each
+    instance can be used for all the compile/link steps in building a
+    single project.  Thus, attributes common to all of those compile and
+    link steps -- include directories, macros to define, libraries to link
+    against, etc. -- are attributes of the compiler instance.  To allow for
+    variability in how individual files are treated, most of those
+    attributes may be varied on a per-compilation or per-link basis.
+    """
+
+    # 'name' is a class attribute that identifies this class.  It
+    # keeps code that wants to know what kind of compiler it's dealing with
+    # from having to import all possible compiler classes just to do an
+    # 'isinstance'.
+    name = None
+    description = None
+
+    # XXX things not handled by this compiler abstraction model:
+    #   * client can't provide additional options for a compiler,
+    #     e.g. warning, optimization, debugging flags.  Perhaps this
+    #     should be the domain of concrete compiler abstraction classes
+    #     (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
+    #     class should have methods for the common ones.
+    #   * can't completely override the include or library searchg
+    #     path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
+    #     I'm not sure how widely supported this is even by Unix
+    #     compilers, much less on other platforms.  And I'm even less
+    #     sure how useful it is; maybe for cross-compiling, but
+    #     support for that is a ways off.  (And anyways, cross
+    #     compilers probably have a dedicated binary with the
+    #     right paths compiled in.  I hope.)
+    #   * can't do really freaky things with the library list/library
+    #     dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
+    #     different versions of libfoo.a in different locations.  I
+    #     think this is useless without the ability to null out the
+    #     library search path anyways.
+
+
+    # Subclasses that rely on the standard filename generation methods
+    # implemented below should override these; see the comment near
+    # those methods ('object_filenames()' et. al.) for details:
+    src_extensions = None               # list of strings
+    obj_extension = None                # string
+    static_lib_extension = None
+    shared_lib_extension = None         # string
+    static_lib_format = None            # format string
+    shared_lib_format = None            # prob. same as static_lib_format
+    exe_extension = None                # string
+
+    # Default language settings. language_map is used to detect a source
+    # file or Extension target language, checking source filenames.
+    # language_order is used to detect the language precedence, when deciding
+    # what language to use when mixing source types. For example, if some
+    # extension has two files with ".c" extension, and one with ".cpp", it
+    # is still linked as c++.
+    language_map = {".c": "c",
+                    ".cc": "c++",
+                    ".cpp": "c++",
+                    ".cxx": "c++",
+                    ".m": "objc",
+                   }
+    language_order = ["c++", "objc", "c"]
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+        self.dry_run = dry_run
+        self.force = force
+        self.verbose = verbose
+
+        # 'output_dir': a common output directory for object, library,
+        # shared object, and shared library files
+        self.output_dir = None
+
+        # 'macros': a list of macro definitions (or undefinitions).  A
+        # macro definition is a 2-tuple (name, value), where the value is
+        # either a string or None (no explicit value).  A macro
+        # undefinition is a 1-tuple (name,).
+        self.macros = []
+
+        # 'include_dirs': a list of directories to search for include files
+        self.include_dirs = []
+
+        # 'libraries': a list of libraries to include in any link
+        # (library names, not filenames: eg. "foo" not "libfoo.a")
+        self.libraries = []
+
+        # 'library_dirs': a list of directories to search for libraries
+        self.library_dirs = []
+
+        # 'runtime_library_dirs': a list of directories to search for
+        # shared libraries/objects at runtime
+        self.runtime_library_dirs = []
+
+        # 'objects': a list of object files (or similar, such as explicitly
+        # named library files) to include on any link
+        self.objects = []
+
+        for key, value in self.executables.items():
+            self.set_executable(key, value)
+
+    def set_executables(self, **args):
+        """Define the executables (and options for them) that will be run
+        to perform the various stages of compilation.  The exact set of
+        executables that may be specified here depends on the compiler
+        class (via the 'executables' class attribute), but most will have:
+          compiler      the C/C++ compiler
+          linker_so     linker used to create shared objects and libraries
+          linker_exe    linker used to create binary executables
+          archiver      static library creator
+
+        On platforms with a command line (Unix, DOS/Windows), each of these
+        is a string that will be split into executable name and (optional)
+        list of arguments.  (Splitting the string is done similarly to how
+        Unix shells operate: words are delimited by spaces, but quotes and
+        backslashes can override this.  See
+        'distutils.util.split_quoted()'.)
+        """
+
+        # Note that some CCompiler implementation classes will define class
+        # attributes 'cpp', 'cc', etc. with hard-coded executable names;
+        # this is appropriate when a compiler class is for exactly one
+        # compiler/OS combination (eg. MSVCCompiler).  Other compiler
+        # classes (UnixCCompiler, in particular) are driven by information
+        # discovered at run-time, since there are many different ways to do
+        # basically the same things with Unix C compilers.
+
+        for key, value in args.items():
+            if key not in self.executables:
+                raise ValueError("unknown executable '%s' for class %s" % \
+                      (key, self.__class__.__name__))
+            self.set_executable(key, value)
+
+    def set_executable(self, key, value):
+        if isinstance(value, str):
+            setattr(self, key, split_quoted(value))
+        else:
+            setattr(self, key, value)
+
+    def _find_macro(self, name):
+        i = 0
+        for defn in self.macros:
+            if defn[0] == name:
+                return i
+            i = i + 1
+        return None
+
+    def _check_macro_definitions(self, definitions):
+        """Ensures that every element of 'definitions' is a valid macro
+        definition, ie. either (name,value) 2-tuple or a (name,) tuple.  Do
+        nothing if all definitions are OK, raise TypeError otherwise.
+        """
+        for defn in definitions:
+            if not (isinstance(defn, tuple) and
+                    (len(defn) == 1 or
+                     (len(defn) == 2 and
+                      (isinstance(defn[1], str) or defn[1] is None))) and
+                    isinstance(defn[0], str)):
+                raise TypeError(("invalid macro definition '%s': " % defn) + \
+                      "must be tuple (string,), (string, string), or " + \
+                      "(string, None)")
+
+
+    # -- Bookkeeping methods -------------------------------------------
+
+    def define_macro(self, name, value=None):
+        """Define a preprocessor macro for all compilations driven by this
+        compiler object.  The optional parameter 'value' should be a
+        string; if it is not supplied, then the macro will be defined
+        without an explicit value and the exact outcome depends on the
+        compiler used (XXX true? does ANSI say anything about this?)
+        """
+        # Delete from the list of macro definitions/undefinitions if
+        # already there (so that this one will take precedence).
+        i = self._find_macro(name)
+        if i is not None:
+            del self.macros[i]
+
+        defn = (name, value)
+        self.macros.append(defn)
+
+    def undefine_macro(self, name):
+        """Undefine a preprocessor macro for all compilations driven by
+        this compiler object.  If the same macro is defined by
+        'define_macro()' and undefined by 'undefine_macro()' the last call
+        takes precedence (including multiple redefinitions or
+        undefinitions).  If the macro is redefined/undefined on a
+        per-compilation basis (ie. in the call to 'compile()'), then that
+        takes precedence.
+        """
+        # Delete from the list of macro definitions/undefinitions if
+        # already there (so that this one will take precedence).
+        i = self._find_macro(name)
+        if i is not None:
+            del self.macros[i]
+
+        undefn = (name,)
+        self.macros.append(undefn)
+
+    def add_include_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        header files.  The compiler is instructed to search directories in
+        the order in which they are supplied by successive calls to
+        'add_include_dir()'.
+        """
+        self.include_dirs.append(dir)
+
+    def set_include_dirs(self, dirs):
+        """Set the list of directories that will be searched to 'dirs' (a
+        list of strings).  Overrides any preceding calls to
+        'add_include_dir()'; subsequence calls to 'add_include_dir()' add
+        to the list passed to 'set_include_dirs()'.  This does not affect
+        any list of standard include directories that the compiler may
+        search by default.
+        """
+        self.include_dirs = dirs[:]
+
+    def add_library(self, libname):
+        """Add 'libname' to the list of libraries that will be included in
+        all links driven by this compiler object.  Note that 'libname'
+        should *not* be the name of a file containing a library, but the
+        name of the library itself: the actual filename will be inferred by
+        the linker, the compiler, or the compiler class (depending on the
+        platform).
+
+        The linker will be instructed to link against libraries in the
+        order they were supplied to 'add_library()' and/or
+        'set_libraries()'.  It is perfectly valid to duplicate library
+        names; the linker will be instructed to link against libraries as
+        many times as they are mentioned.
+        """
+        self.libraries.append(libname)
+
+    def set_libraries(self, libnames):
+        """Set the list of libraries to be included in all links driven by
+        this compiler object to 'libnames' (a list of strings).  This does
+        not affect any standard system libraries that the linker may
+        include by default.
+        """
+        self.libraries = libnames[:]
+
+
+    def add_library_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        libraries specified to 'add_library()' and 'set_libraries()'.  The
+        linker will be instructed to search for libraries in the order they
+        are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
+        """
+        self.library_dirs.append(dir)
+
+    def set_library_dirs(self, dirs):
+        """Set the list of library search directories to 'dirs' (a list of
+        strings).  This does not affect any standard library search path
+        that the linker may search by default.
+        """
+        self.library_dirs = dirs[:]
+
+    def add_runtime_library_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        shared libraries at runtime.
+        """
+        self.runtime_library_dirs.append(dir)
+
+    def set_runtime_library_dirs(self, dirs):
+        """Set the list of directories to search for shared libraries at
+        runtime to 'dirs' (a list of strings).  This does not affect any
+        standard search path that the runtime linker may search by
+        default.
+        """
+        self.runtime_library_dirs = dirs[:]
+
+    def add_link_object(self, object):
+        """Add 'object' to the list of object files (or analogues, such as
+        explicitly named library files or the output of "resource
+        compilers") to be included in every link driven by this compiler
+        object.
+        """
+        self.objects.append(object)
+
+    def set_link_objects(self, objects):
+        """Set the list of object files (or analogues) to be included in
+        every link to 'objects'.  This does not affect any standard object
+        files that the linker may include by default (such as system
+        libraries).
+        """
+        self.objects = objects[:]
+
+
+    # -- Private utility methods --------------------------------------
+    # (here for the convenience of subclasses)
+
+    # Helper method to prep compiler in subclass compile() methods
+    def _setup_compile(self, outdir, macros, incdirs, sources, depends,
+                       extra):
+        """Process arguments and decide which source files to compile."""
+        if outdir is None:
+            outdir = self.output_dir
+        elif not isinstance(outdir, str):
+            raise TypeError("'output_dir' must be a string or None")
+
+        if macros is None:
+            macros = self.macros
+        elif isinstance(macros, list):
+            macros = macros + (self.macros or [])
+        else:
+            raise TypeError("'macros' (if supplied) must be a list of tuples")
+
+        if incdirs is None:
+            incdirs = self.include_dirs
+        elif isinstance(incdirs, (list, tuple)):
+            incdirs = list(incdirs) + (self.include_dirs or [])
+        else:
+            raise TypeError(
+                "'include_dirs' (if supplied) must be a list of strings")
+
+        if extra is None:
+            extra = []
+
+        # Get the list of expected output (object) files
+        objects = self.object_filenames(sources,
+                                        strip_dir=False,
+                                        output_dir=outdir)
+        assert len(objects) == len(sources)
+
+        pp_opts = gen_preprocess_options(macros, incdirs)
+
+        build = {}
+        for i in range(len(sources)):
+            src = sources[i]
+            obj = objects[i]
+            ext = os.path.splitext(src)[1]
+            self.mkpath(os.path.dirname(obj))
+            build[obj] = (src, ext)
+
+        return macros, objects, extra, pp_opts, build
+
+    def _get_cc_args(self, pp_opts, debug, before):
+        # works for unixccompiler, emxccompiler, cygwinccompiler
+        cc_args = pp_opts + ['-c']
+        if debug:
+            cc_args[:0] = ['-g']
+        if before:
+            cc_args[:0] = before
+        return cc_args
+
+    def _fix_compile_args(self, output_dir, macros, include_dirs):
+        """Typecheck and fix-up some of the arguments to the 'compile()'
+        method, and return fixed-up values.  Specifically: if 'output_dir'
+        is None, replaces it with 'self.output_dir'; ensures that 'macros'
+        is a list, and augments it with 'self.macros'; ensures that
+        'include_dirs' is a list, and augments it with 'self.include_dirs'.
+        Guarantees that the returned values are of the correct type,
+        i.e. for 'output_dir' either string or None, and for 'macros' and
+        'include_dirs' either list or None.
+        """
+        if output_dir is None:
+            output_dir = self.output_dir
+        elif not isinstance(output_dir, str):
+            raise TypeError("'output_dir' must be a string or None")
+
+        if macros is None:
+            macros = self.macros
+        elif isinstance(macros, list):
+            macros = macros + (self.macros or [])
+        else:
+            raise TypeError("'macros' (if supplied) must be a list of tuples")
+
+        if include_dirs is None:
+            include_dirs = self.include_dirs
+        elif isinstance(include_dirs, (list, tuple)):
+            include_dirs = list(include_dirs) + (self.include_dirs or [])
+        else:
+            raise TypeError(
+                "'include_dirs' (if supplied) must be a list of strings")
+
+        return output_dir, macros, include_dirs
+
+    def _fix_object_args(self, objects, output_dir):
+        """Typecheck and fix up some arguments supplied to various methods.
+        Specifically: ensure that 'objects' is a list; if output_dir is
+        None, replace with self.output_dir.  Return fixed versions of
+        'objects' and 'output_dir'.
+        """
+        if not isinstance(objects, (list, tuple)):
+            raise TypeError("'objects' must be a list or tuple of strings")
+        objects = list(objects)
+
+        if output_dir is None:
+            output_dir = self.output_dir
+        elif not isinstance(output_dir, str):
+            raise TypeError("'output_dir' must be a string or None")
+
+        return objects, output_dir
+
+    def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+        """Typecheck and fix up some of the arguments supplied to the
+        'link_*' methods.  Specifically: ensure that all arguments are
+        lists, and augment them with their permanent versions
+        (eg. 'self.libraries' augments 'libraries').  Return a tuple with
+        fixed versions of all arguments.
+        """
+        if libraries is None:
+            libraries = self.libraries
+        elif isinstance(libraries, (list, tuple)):
+            libraries = list(libraries) + (self.libraries or [])
+        else:
+            raise TypeError(
+                "'libraries' (if supplied) must be a list of strings")
+
+        if library_dirs is None:
+            library_dirs = self.library_dirs
+        elif isinstance(library_dirs, (list, tuple)):
+            library_dirs = list(library_dirs) + (self.library_dirs or [])
+        else:
+            raise TypeError(
+                "'library_dirs' (if supplied) must be a list of strings")
+
+        if runtime_library_dirs is None:
+            runtime_library_dirs = self.runtime_library_dirs
+        elif isinstance(runtime_library_dirs, (list, tuple)):
+            runtime_library_dirs = (list(runtime_library_dirs) +
+                                    (self.runtime_library_dirs or []))
+        else:
+            raise TypeError("'runtime_library_dirs' (if supplied) "
+                            "must be a list of strings")
+
+        return libraries, library_dirs, runtime_library_dirs
+
+    def _need_link(self, objects, output_file):
+        """Return true if we need to relink the files listed in 'objects'
+        to recreate 'output_file'.
+        """
+        if self.force:
+            return True
+        else:
+            if self.dry_run:
+                newer = newer_group(objects, output_file, missing='newer')
+            else:
+                newer = newer_group(objects, output_file)
+            return newer
+
+    def detect_language(self, sources):
+        """Detect the language of a given file, or list of files. Uses
+        language_map, and language_order to do the job.
+        """
+        if not isinstance(sources, list):
+            sources = [sources]
+        lang = None
+        index = len(self.language_order)
+        for source in sources:
+            base, ext = os.path.splitext(source)
+            extlang = self.language_map.get(ext)
+            try:
+                extindex = self.language_order.index(extlang)
+                if extindex < index:
+                    lang = extlang
+                    index = extindex
+            except ValueError:
+                pass
+        return lang
+
+    # -- Worker methods ------------------------------------------------
+    # (must be implemented by subclasses)
+
+    def preprocess(self, source, output_file=None, macros=None,
+                   include_dirs=None, extra_preargs=None, extra_postargs=None):
+        """Preprocess a single C/C++ source file, named in 'source'.
+        Output will be written to file named 'output_file', or stdout if
+        'output_file' not supplied.  'macros' is a list of macro
+        definitions as for 'compile()', which will augment the macros set
+        with 'define_macro()' and 'undefine_macro()'.  'include_dirs' is a
+        list of directory names that will be added to the default list.
+
+        Raises PreprocessError on failure.
+        """
+        pass
+
+    def compile(self, sources, output_dir=None, macros=None,
+                include_dirs=None, debug=False, extra_preargs=None,
+                extra_postargs=None, depends=None):
+        """Compile one or more source files.
+
+        'sources' must be a list of filenames, most likely C/C++
+        files, but in reality anything that can be handled by a
+        particular compiler and compiler class (eg. MSVCCompiler can
+        handle resource files in 'sources').  Return a list of object
+        filenames, one per source filename in 'sources'.  Depending on
+        the implementation, not all source files will necessarily be
+        compiled, but all corresponding object filenames will be
+        returned.
+
+        If 'output_dir' is given, object files will be put under it, while
+        retaining their original path component.  That is, "foo/bar.c"
+        normally compiles to "foo/bar.o" (for a Unix implementation); if
+        'output_dir' is "build", then it would compile to
+        "build/foo/bar.o".
+
+        'macros', if given, must be a list of macro definitions.  A macro
+        definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
+        The former defines a macro; if the value is None, the macro is
+        defined without an explicit value.  The 1-tuple case undefines a
+        macro.  Later definitions/redefinitions/ undefinitions take
+        precedence.
+
+        'include_dirs', if given, must be a list of strings, the
+        directories to add to the default include file search path for this
+        compilation only.
+
+        'debug' is a boolean; if true, the compiler will be instructed to
+        output debug symbols in (or alongside) the object file(s).
+
+        'extra_preargs' and 'extra_postargs' are implementation- dependent.
+        On platforms that have the notion of a command line (e.g. Unix,
+        DOS/Windows), they are most likely lists of strings: extra
+        command-line arguments to prepand/append to the compiler command
+        line.  On other platforms, consult the implementation class
+        documentation.  In any event, they are intended as an escape hatch
+        for those occasions when the abstract compiler framework doesn't
+        cut the mustard.
+
+        'depends', if given, is a list of filenames that all targets
+        depend on.  If a source file is older than any file in
+        depends, then the source file will be recompiled.  This
+        supports dependency tracking, but only at a coarse
+        granularity.
+
+        Raises CompileError on failure.
+        """
+        # A concrete compiler class can either override this method
+        # entirely or implement _compile().
+
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+        cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
+
+        # Return *all* object filenames, not just the ones we just built.
+        return objects
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        """Compile 'src' to product 'obj'."""
+
+        # A concrete compiler class that does not override compile()
+        # should implement _compile().
+        pass
+
+    def create_static_lib(self, objects, output_libname, output_dir=None,
+                          debug=False, target_lang=None):
+        """Link a bunch of stuff together to create a static library file.
+        The "bunch of stuff" consists of the list of object files supplied
+        as 'objects', the extra object files supplied to
+        'add_link_object()' and/or 'set_link_objects()', the libraries
+        supplied to 'add_library()' and/or 'set_libraries()', and the
+        libraries supplied as 'libraries' (if any).
+
+        'output_libname' should be a library name, not a filename; the
+        filename will be inferred from the library name.  'output_dir' is
+        the directory where the library file will be put.
+
+        'debug' is a boolean; if true, debugging information will be
+        included in the library (note that on most platforms, it is the
+        compile step where this matters: the 'debug' flag is included here
+        just for consistency).
+
+        'target_lang' is the target language for which the given objects
+        are being compiled. This allows specific linkage time treatment of
+        certain languages.
+
+        Raises LibError on failure.
+        """
+        pass
+
+    # values for target_desc parameter in link()
+    SHARED_OBJECT = "shared_object"
+    SHARED_LIBRARY = "shared_library"
+    EXECUTABLE = "executable"
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        """Link a bunch of stuff together to create an executable or
+        shared library file.
+
+        The "bunch of stuff" consists of the list of object files supplied
+        as 'objects'.  'output_filename' should be a filename.  If
+        'output_dir' is supplied, 'output_filename' is relative to it
+        (i.e. 'output_filename' can provide directory components if
+        needed).
+
+        'libraries' is a list of libraries to link against.  These are
+        library names, not filenames, since they're translated into
+        filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
+        on Unix and "foo.lib" on DOS/Windows).  However, they can include a
+        directory component, which means the linker will look in that
+        specific directory rather than searching all the normal locations.
+
+        'library_dirs', if supplied, should be a list of directories to
+        search for libraries that were specified as bare library names
+        (ie. no directory component).  These are on top of the system
+        default and those supplied to 'add_library_dir()' and/or
+        'set_library_dirs()'.  'runtime_library_dirs' is a list of
+        directories that will be embedded into the shared library and used
+        to search for other shared libraries that *it* depends on at
+        run-time.  (This may only be relevant on Unix.)
+
+        'export_symbols' is a list of symbols that the shared library will
+        export.  (This appears to be relevant only on Windows.)
+
+        'debug' is as for 'compile()' and 'create_static_lib()', with the
+        slight distinction that it actually matters on most platforms (as
+        opposed to 'create_static_lib()', which includes a 'debug' flag
+        mostly for form's sake).
+
+        'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
+        of course that they supply command-line arguments for the
+        particular linker being used).
+
+        'target_lang' is the target language for which the given objects
+        are being compiled. This allows specific linkage time treatment of
+        certain languages.
+
+        Raises LinkError on failure.
+        """
+        raise NotImplementedError
+
+
+    # Old 'link_*()' methods, rewritten to use the new 'link()' method.
+
+    def link_shared_lib(self, objects, output_libname, output_dir=None,
+                        libraries=None, library_dirs=None,
+                        runtime_library_dirs=None, export_symbols=None,
+                        debug=False, extra_preargs=None, extra_postargs=None,
+                        build_temp=None, target_lang=None):
+        self.link(CCompiler.SHARED_LIBRARY, objects,
+                  self.library_filename(output_libname, lib_type='shared'),
+                  output_dir,
+                  libraries, library_dirs, runtime_library_dirs,
+                  export_symbols, debug,
+                  extra_preargs, extra_postargs, build_temp, target_lang)
+
+    def link_shared_object(self, objects, output_filename, output_dir=None,
+                           libraries=None, library_dirs=None,
+                           runtime_library_dirs=None, export_symbols=None,
+                           debug=False, extra_preargs=None, extra_postargs=None,
+                           build_temp=None, target_lang=None):
+        self.link(CCompiler.SHARED_OBJECT, objects,
+                  output_filename, output_dir,
+                  libraries, library_dirs, runtime_library_dirs,
+                  export_symbols, debug,
+                  extra_preargs, extra_postargs, build_temp, target_lang)
+
+    def link_executable(self, objects, output_progname, output_dir=None,
+                        libraries=None, library_dirs=None,
+                        runtime_library_dirs=None, debug=False,
+                        extra_preargs=None, extra_postargs=None,
+                        target_lang=None):
+        self.link(CCompiler.EXECUTABLE, objects,
+                  self.executable_filename(output_progname), output_dir,
+                  libraries, library_dirs, runtime_library_dirs, None,
+                  debug, extra_preargs, extra_postargs, None, target_lang)
+
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function; there is
+    # no appropriate default implementation so subclasses should
+    # implement all of these.
+
+    def library_dir_option(self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for libraries.
+        """
+        raise NotImplementedError
+
+    def runtime_library_dir_option(self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for runtime libraries.
+        """
+        raise NotImplementedError
+
+    def library_option(self, lib):
+        """Return the compiler option to add 'dir' to the list of libraries
+        linked into the shared library or executable.
+        """
+        raise NotImplementedError
+
+    def has_function(self, funcname, includes=None, include_dirs=None,
+                     libraries=None, library_dirs=None):
+        """Return a boolean indicating whether funcname is supported on
+        the current platform.  The optional arguments can be used to
+        augment the compilation environment.
+        """
+
+        # this can't be included at module scope because it tries to
+        # import math which might not be available at that point - maybe
+        # the necessary logic should just be inlined?
+        import tempfile
+        if includes is None:
+            includes = []
+        if include_dirs is None:
+            include_dirs = []
+        if libraries is None:
+            libraries = []
+        if library_dirs is None:
+            library_dirs = []
+        fd, fname = tempfile.mkstemp(".c", funcname, text=True)
+        with os.fdopen(fd, "w") as f:
+            for incl in includes:
+                f.write("""#include "%s"\n""" % incl)
+            f.write("""\
+main (int argc, char **argv) {
+    %s();
+}
+""" % funcname)
+        try:
+            objects = self.compile([fname], include_dirs=include_dirs)
+        except CompileError:
+            return False
+
+        try:
+            self.link_executable(objects, "a.out",
+                                 libraries=libraries,
+                                 library_dirs=library_dirs)
+        except (LinkError, TypeError):
+            return False
+        return True
+
+    def find_library_file(self, dirs, lib, debug=False):
+        """Search the specified list of directories for a static or shared
+        library file 'lib' and return the full path to that file.  If
+        'debug' is true, look for a debugging version (if that makes sense on
+        the current platform).  Return None if 'lib' wasn't found in any of
+        the specified directories.
+        """
+        raise NotImplementedError
+
+    # -- Filename generation methods -----------------------------------
+
+    # The default implementation of the filename generating methods are
+    # prejudiced towards the Unix/DOS/Windows view of the world:
+    #   * object files are named by replacing the source file extension
+    #     (eg. .c/.cpp -> .o/.obj)
+    #   * library files (shared or static) are named by plugging the
+    #     library name and extension into a format string, eg.
+    #     "lib%s.%s" % (lib_name, ".a") for Unix static libraries
+    #   * executables are named by appending an extension (possibly
+    #     empty) to the program name: eg. progname + ".exe" for
+    #     Windows
+    #
+    # To reduce redundant code, these methods expect to find
+    # several attributes in the current object (presumably defined
+    # as class attributes):
+    #   * src_extensions -
+    #     list of C/C++ source file extensions, eg. ['.c', '.cpp']
+    #   * obj_extension -
+    #     object file extension, eg. '.o' or '.obj'
+    #   * static_lib_extension -
+    #     extension for static library files, eg. '.a' or '.lib'
+    #   * shared_lib_extension -
+    #     extension for shared library/object files, eg. '.so', '.dll'
+    #   * static_lib_format -
+    #     format string for generating static library filenames,
+    #     eg. 'lib%s.%s' or '%s.%s'
+    #   * shared_lib_format
+    #     format string for generating shared library filenames
+    #     (probably same as static_lib_format, since the extension
+    #     is one of the intended parameters to the format string)
+    #   * exe_extension -
+    #     extension for executable files, eg. '' or '.exe'
+
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            base, ext = os.path.splitext(src_name)
+            base = os.path.splitdrive(base)[1]  # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                raise UnknownFileError("unknown file type '%s' (from '%s')" %
+                                       (ext, src_name))
+            if strip_dir:
+                base = os.path.basename(base)
+            obj_names.append(os.path.join(output_dir,
+                                          base + self.obj_extension))
+        return obj_names
+
+    def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
+        assert output_dir is not None
+        if strip_dir:
+            basename = os.path.basename(basename)
+        return os.path.join(output_dir, basename + self.shared_lib_extension)
+
+    def executable_filename(self, basename, strip_dir=False, output_dir=''):
+        assert output_dir is not None
+        if strip_dir:
+            basename = os.path.basename(basename)
+        return os.path.join(output_dir, basename + (self.exe_extension or ''))
+
+    def library_filename(self, libname, lib_type='static',     # or 'shared'
+                         strip_dir=False, output_dir=''):
+        assert output_dir is not None
+        if lib_type not in ("static", "shared", "dylib"):
+            raise ValueError(
+                "'lib_type' must be 'static', 'shared' or 'dylib'")
+        fmt = getattr(self, lib_type + "_lib_format")
+        ext = getattr(self, lib_type + "_lib_extension")
+
+        dir, base = os.path.split(libname)
+        filename = fmt % (base, ext)
+        if strip_dir:
+            dir = ''
+
+        return os.path.join(output_dir, dir, filename)
+
+
+    # -- Utility methods -----------------------------------------------
+
+    def execute(self, func, args, msg=None, level=1):
+        execute(func, args, msg, self.dry_run)
+
+    def spawn(self, cmd):
+        spawn(cmd, dry_run=self.dry_run)
+
+    def move_file(self, src, dst):
+        logger.info("moving %r to %r", src, dst)
+        if self.dry_run:
+            return
+        return move(src, dst)
+
+    def mkpath(self, name, mode=0o777):
+        name = os.path.normpath(name)
+        if os.path.isdir(name) or name == '':
+            return
+        if self.dry_run:
+            head = ''
+            for part in name.split(os.sep):
+                logger.info("created directory %s%s", head, part)
+                head += part + os.sep
+            return
+        os.makedirs(name, mode)
diff --git a/Lib/packaging/compiler/cygwinccompiler.py b/Lib/packaging/compiler/cygwinccompiler.py
new file mode 100644
index 0000000..7bfa611
--- /dev/null
+++ b/Lib/packaging/compiler/cygwinccompiler.py
@@ -0,0 +1,355 @@
+"""CCompiler implementations for Cygwin and mingw32 versions of GCC.
+
+This module contains the CygwinCCompiler class, a subclass of
+UnixCCompiler that handles the Cygwin port of the GNU C compiler to
+Windows, and the Mingw32CCompiler class which handles the mingw32 port
+of GCC (same as cygwin in no-cygwin mode).
+"""
+
+# problems:
+#
+# * if you use a msvc compiled python version (1.5.2)
+#   1. you have to insert a __GNUC__ section in its config.h
+#   2. you have to generate a import library for its dll
+#      - create a def-file for python??.dll
+#      - create a import library using
+#             dlltool --dllname python15.dll --def python15.def \
+#                       --output-lib libpython15.a
+#
+#   see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+#
+# * We put export_symbols in a def-file, and don't use
+#   --export-all-symbols because it doesn't worked reliable in some
+#   tested configurations. And because other windows compilers also
+#   need their symbols specified this no serious problem.
+#
+# tested configurations:
+#
+# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
+#   (after patching python's config.h and for C++ some other include files)
+#   see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
+#   (ld doesn't support -shared, so we use dllwrap)
+# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
+#   - its dllwrap doesn't work, there is a bug in binutils 2.10.90
+#     see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
+#   - using gcc -mdll instead dllwrap doesn't work without -static because
+#     it tries to link against dlls instead their import libraries. (If
+#     it finds the dll first.)
+#     By specifying -static we force ld to link against the import libraries,
+#     this is windows standard and there are normally not the necessary symbols
+#     in the dlls.
+#   *** only the version of June 2000 shows these problems
+# * cygwin gcc 3.2/ld 2.13.90 works
+#   (ld supports -shared)
+# * mingw gcc 3.2/ld 2.13 works
+#   (ld supports -shared)
+
+
+import os
+import sys
+import copy
+
+from packaging import logger
+from packaging.compiler.unixccompiler import UnixCCompiler
+from packaging.util import write_file
+from packaging.errors import PackagingExecError, CompileError, UnknownFileError
+from packaging.util import get_compiler_versions
+import sysconfig
+
+
+def get_msvcr():
+    """Include the appropriate MSVC runtime library if Python was built
+    with MSVC 7.0 or later.
+    """
+    msc_pos = sys.version.find('MSC v.')
+    if msc_pos != -1:
+        msc_ver = sys.version[msc_pos+6:msc_pos+10]
+        if msc_ver == '1300':
+            # MSVC 7.0
+            return ['msvcr70']
+        elif msc_ver == '1310':
+            # MSVC 7.1
+            return ['msvcr71']
+        elif msc_ver == '1400':
+            # VS2005 / MSVC 8.0
+            return ['msvcr80']
+        elif msc_ver == '1500':
+            # VS2008 / MSVC 9.0
+            return ['msvcr90']
+        else:
+            raise ValueError("Unknown MS Compiler version %s " % msc_ver)
+
+
+class CygwinCCompiler(UnixCCompiler):
+    """ Handles the Cygwin port of the GNU C compiler to Windows.
+    """
+    name = 'cygwin'
+    description = 'Cygwin port of GNU C Compiler for Win32'
+    obj_extension = ".o"
+    static_lib_extension = ".a"
+    shared_lib_extension = ".dll"
+    static_lib_format = "lib%s%s"
+    shared_lib_format = "%s%s"
+    exe_extension = ".exe"
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+
+        UnixCCompiler.__init__(self, verbose, dry_run, force)
+
+        status, details = check_config_h()
+        logger.debug("Python's GCC status: %s (details: %s)", status, details)
+        if status is not CONFIG_H_OK:
+            self.warn(
+                "Python's pyconfig.h doesn't seem to support your compiler. "
+                "Reason: %s. "
+                "Compiling may fail because of undefined preprocessor macros."
+                % details)
+
+        self.gcc_version, self.ld_version, self.dllwrap_version = \
+            get_compiler_versions()
+        logger.debug(self.name + ": gcc %s, ld %s, dllwrap %s\n",
+                     self.gcc_version,
+                     self.ld_version,
+                     self.dllwrap_version)
+
+        # ld_version >= "2.10.90" and < "2.13" should also be able to use
+        # gcc -mdll instead of dllwrap
+        # Older dllwraps had own version numbers, newer ones use the
+        # same as the rest of binutils ( also ld )
+        # dllwrap 2.10.90 is buggy
+        if self.ld_version >= "2.10.90":
+            self.linker_dll = "gcc"
+        else:
+            self.linker_dll = "dllwrap"
+
+        # ld_version >= "2.13" support -shared so use it instead of
+        # -mdll -static
+        if self.ld_version >= "2.13":
+            shared_option = "-shared"
+        else:
+            shared_option = "-mdll -static"
+
+        # Hard-code GCC because that's what this is all about.
+        # XXX optimization, warnings etc. should be customizable.
+        self.set_executables(compiler='gcc -mcygwin -O -Wall',
+                             compiler_so='gcc -mcygwin -mdll -O -Wall',
+                             compiler_cxx='g++ -mcygwin -O -Wall',
+                             linker_exe='gcc -mcygwin',
+                             linker_so=('%s -mcygwin %s' %
+                                        (self.linker_dll, shared_option)))
+
+        # cygwin and mingw32 need different sets of libraries
+        if self.gcc_version == "2.91.57":
+            # cygwin shouldn't need msvcrt, but without the dlls will crash
+            # (gcc version 2.91.57) -- perhaps something about initialization
+            self.dll_libraries=["msvcrt"]
+            self.warn(
+                "Consider upgrading to a newer version of gcc")
+        else:
+            # Include the appropriate MSVC runtime library if Python was built
+            # with MSVC 7.0 or later.
+            self.dll_libraries = get_msvcr()
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        """Compile the source by spawning GCC and windres if needed."""
+        if ext == '.rc' or ext == '.res':
+            # gcc needs '.res' and '.rc' compiled to object files !!!
+            try:
+                self.spawn(["windres", "-i", src, "-o", obj])
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+        else: # for other files use the C-compiler
+            try:
+                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+                           extra_postargs)
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        """Link the objects."""
+        # use separate copies, so we can modify the lists
+        extra_preargs = copy.copy(extra_preargs or [])
+        libraries = copy.copy(libraries or [])
+        objects = copy.copy(objects or [])
+
+        # Additional libraries
+        libraries.extend(self.dll_libraries)
+
+        # handle export symbols by creating a def-file
+        # with executables this only works with gcc/ld as linker
+        if ((export_symbols is not None) and
+            (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+            # (The linker doesn't do anything if output is up-to-date.
+            # So it would probably better to check if we really need this,
+            # but for this we had to insert some unchanged parts of
+            # UnixCCompiler, and this is not what we want.)
+
+            # we want to put some files in the same directory as the
+            # object files are, build_temp doesn't help much
+            # where are the object files
+            temp_dir = os.path.dirname(objects[0])
+            # name of dll to give the helper files the same base name
+            dll_name, dll_extension = os.path.splitext(
+                os.path.basename(output_filename))
+
+            # generate the filenames for these files
+            def_file = os.path.join(temp_dir, dll_name + ".def")
+            lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
+
+            # Generate .def file
+            contents = [
+                "LIBRARY %s" % os.path.basename(output_filename),
+                "EXPORTS"]
+            for sym in export_symbols:
+                contents.append(sym)
+            self.execute(write_file, (def_file, contents),
+                         "writing %s" % def_file)
+
+            # next add options for def-file and to creating import libraries
+
+            # dllwrap uses different options than gcc/ld
+            if self.linker_dll == "dllwrap":
+                extra_preargs.extend(("--output-lib", lib_file))
+                # for dllwrap we have to use a special option
+                extra_preargs.extend(("--def", def_file))
+            # we use gcc/ld here and can be sure ld is >= 2.9.10
+            else:
+                # doesn't work: bfd_close build\...\libfoo.a: Invalid operation
+                #extra_preargs.extend(("-Wl,--out-implib,%s" % lib_file))
+                # for gcc/ld the def-file is specified as any object files
+                objects.append(def_file)
+
+        #end: if ((export_symbols is not None) and
+        #        (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+
+        # who wants symbols and a many times larger output file
+        # should explicitly switch the debug mode on
+        # otherwise we let dllwrap/ld strip the output file
+        # (On my machine: 10KB < stripped_file < ??100KB
+        #   unstripped_file = stripped_file + XXX KB
+        #  ( XXX=254 for a typical python extension))
+        if not debug:
+            extra_preargs.append("-s")
+
+        UnixCCompiler.link(self, target_desc, objects, output_filename,
+                           output_dir, libraries, library_dirs,
+                           runtime_library_dirs,
+                           None, # export_symbols, we do this in our def-file
+                           debug, extra_preargs, extra_postargs, build_temp,
+                           target_lang)
+
+    # -- Miscellaneous methods -----------------------------------------
+
+    def object_filenames(self, source_filenames, strip_dir=False,
+                         output_dir=''):
+        """Adds supports for rc and res files."""
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            base, ext = os.path.splitext(os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc','.res']):
+                raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext in ('.res', '.rc'):
+                # these need to be compiled to object files
+                obj_names.append (os.path.join(output_dir,
+                                              base + ext + self.obj_extension))
+            else:
+                obj_names.append (os.path.join(output_dir,
+                                               base + self.obj_extension))
+        return obj_names
+
+# the same as cygwin plus some additional parameters
+class Mingw32CCompiler(CygwinCCompiler):
+    """ Handles the Mingw32 port of the GNU C compiler to Windows.
+    """
+    name = 'mingw32'
+    description = 'MinGW32 compiler'
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+
+        CygwinCCompiler.__init__ (self, verbose, dry_run, force)
+
+        # ld_version >= "2.13" support -shared so use it instead of
+        # -mdll -static
+        if self.ld_version >= "2.13":
+            shared_option = "-shared"
+        else:
+            shared_option = "-mdll -static"
+
+        # A real mingw32 doesn't need to specify a different entry point,
+        # but cygwin 2.91.57 in no-cygwin-mode needs it.
+        if self.gcc_version <= "2.91.57":
+            entry_point = '--entry _DllMain@12'
+        else:
+            entry_point = ''
+
+        self.set_executables(compiler='gcc -mno-cygwin -O -Wall',
+                             compiler_so='gcc -mno-cygwin -mdll -O -Wall',
+                             compiler_cxx='g++ -mno-cygwin -O -Wall',
+                             linker_exe='gcc -mno-cygwin',
+                             linker_so='%s -mno-cygwin %s %s'
+                                        % (self.linker_dll, shared_option,
+                                           entry_point))
+        # Maybe we should also append -mthreads, but then the finished
+        # dlls need another dll (mingwm10.dll see Mingw32 docs)
+        # (-mthreads: Support thread-safe exception handling on `Mingw32')
+
+        # no additional libraries needed
+        self.dll_libraries=[]
+
+        # Include the appropriate MSVC runtime library if Python was built
+        # with MSVC 7.0 or later.
+        self.dll_libraries = get_msvcr()
+
+# Because these compilers aren't configured in Python's pyconfig.h file by
+# default, we should at least warn the user if he is using a unmodified
+# version.
+
+CONFIG_H_OK = "ok"
+CONFIG_H_NOTOK = "not ok"
+CONFIG_H_UNCERTAIN = "uncertain"
+
+def check_config_h():
+    """Check if the current Python installation appears amenable to building
+    extensions with GCC.
+
+    Returns a tuple (status, details), where 'status' is one of the following
+    constants:
+
+    - CONFIG_H_OK: all is well, go ahead and compile
+    - CONFIG_H_NOTOK: doesn't look good
+    - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
+
+    'details' is a human-readable string explaining the situation.
+
+    Note there are two ways to conclude "OK": either 'sys.version' contains
+    the string "GCC" (implying that this Python was built with GCC), or the
+    installed "pyconfig.h" contains the string "__GNUC__".
+    """
+
+    # XXX since this function also checks sys.version, it's not strictly a
+    # "pyconfig.h" check -- should probably be renamed...
+    # if sys.version contains GCC then python was compiled with GCC, and the
+    # pyconfig.h file should be OK
+    if "GCC" in sys.version:
+        return CONFIG_H_OK, "sys.version mentions 'GCC'"
+
+    # let's see if __GNUC__ is mentioned in python.h
+    fn = sysconfig.get_config_h_filename()
+    try:
+        with open(fn) as config_h:
+            if "__GNUC__" in config_h.read():
+                return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
+            else:
+                return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
+    except IOError as exc:
+        return (CONFIG_H_UNCERTAIN,
+                "couldn't read '%s': %s" % (fn, exc.strerror))
diff --git a/Lib/packaging/compiler/extension.py b/Lib/packaging/compiler/extension.py
new file mode 100644
index 0000000..66f6e9a
--- /dev/null
+++ b/Lib/packaging/compiler/extension.py
@@ -0,0 +1,121 @@
+"""Class representing C/C++ extension modules."""
+
+from packaging import logger
+
+# This class is really only used by the "build_ext" command, so it might
+# make sense to put it in distutils.command.build_ext.  However, that
+# module is already big enough, and I want to make this class a bit more
+# complex to simplify some common cases ("foo" module in "foo.c") and do
+# better error-checking ("foo.c" actually exists).
+#
+# Also, putting this in build_ext.py means every setup script would have to
+# import that large-ish module (indirectly, through distutils.core) in
+# order to do anything.
+
+
+class Extension:
+    """Just a collection of attributes that describes an extension
+    module and everything needed to build it (hopefully in a portable
+    way, but there are hooks that let you be as unportable as you need).
+
+    Instance attributes:
+      name : string
+        the full name of the extension, including any packages -- ie.
+        *not* a filename or pathname, but Python dotted name
+      sources : [string]
+        list of source filenames, relative to the distribution root
+        (where the setup script lives), in Unix form (slash-separated)
+        for portability.  Source files may be C, C++, SWIG (.i),
+        platform-specific resource files, or whatever else is recognized
+        by the "build_ext" command as source for a Python extension.
+      include_dirs : [string]
+        list of directories to search for C/C++ header files (in Unix
+        form for portability)
+      define_macros : [(name : string, value : string|None)]
+        list of macros to define; each macro is defined using a 2-tuple,
+        where 'value' is either the string to define it to or None to
+        define it without a particular value (equivalent of "#define
+        FOO" in source or -DFOO on Unix C compiler command line)
+      undef_macros : [string]
+        list of macros to undefine explicitly
+      library_dirs : [string]
+        list of directories to search for C/C++ libraries at link time
+      libraries : [string]
+        list of library names (not filenames or paths) to link against
+      runtime_library_dirs : [string]
+        list of directories to search for C/C++ libraries at run time
+        (for shared extensions, this is when the extension is loaded)
+      extra_objects : [string]
+        list of extra files to link with (eg. object files not implied
+        by 'sources', static library that must be explicitly specified,
+        binary resource files, etc.)
+      extra_compile_args : [string]
+        any extra platform- and compiler-specific information to use
+        when compiling the source files in 'sources'.  For platforms and
+        compilers where "command line" makes sense, this is typically a
+        list of command-line arguments, but for other platforms it could
+        be anything.
+      extra_link_args : [string]
+        any extra platform- and compiler-specific information to use
+        when linking object files together to create the extension (or
+        to create a new static Python interpreter).  Similar
+        interpretation as for 'extra_compile_args'.
+      export_symbols : [string]
+        list of symbols to be exported from a shared extension.  Not
+        used on all platforms, and not generally necessary for Python
+        extensions, which typically export exactly one symbol: "init" +
+        extension_name.
+      swig_opts : [string]
+        any extra options to pass to SWIG if a source file has the .i
+        extension.
+      depends : [string]
+        list of files that the extension depends on
+      language : string
+        extension language (i.e. "c", "c++", "objc"). Will be detected
+        from the source extensions if not provided.
+      optional : boolean
+        specifies that a build failure in the extension should not abort the
+        build process, but simply not install the failing extension.
+    """
+
+    # **kwargs are allowed so that a warning is emitted instead of an
+    # exception
+    def __init__(self, name, sources, include_dirs=None, define_macros=None,
+                 undef_macros=None, library_dirs=None, libraries=None,
+                 runtime_library_dirs=None, extra_objects=None,
+                 extra_compile_args=None, extra_link_args=None,
+                 export_symbols=None, swig_opts=None, depends=None,
+                 language=None, optional=None, **kw):
+        if not isinstance(name, str):
+            raise AssertionError("'name' must be a string")
+
+        if not isinstance(sources, list):
+            raise AssertionError("'sources' must be a list of strings")
+
+        for v in sources:
+            if not isinstance(v, str):
+                raise AssertionError("'sources' must be a list of strings")
+
+        self.name = name
+        self.sources = sources
+        self.include_dirs = include_dirs or []
+        self.define_macros = define_macros or []
+        self.undef_macros = undef_macros or []
+        self.library_dirs = library_dirs or []
+        self.libraries = libraries or []
+        self.runtime_library_dirs = runtime_library_dirs or []
+        self.extra_objects = extra_objects or []
+        self.extra_compile_args = extra_compile_args or []
+        self.extra_link_args = extra_link_args or []
+        self.export_symbols = export_symbols or []
+        self.swig_opts = swig_opts or []
+        self.depends = depends or []
+        self.language = language
+        self.optional = optional
+
+        # If there are unknown keyword options, warn about them
+        if len(kw) > 0:
+            options = [repr(option) for option in kw]
+            options = ', '.join(sorted(options))
+            logger.warning(
+                'unknown arguments given to Extension: %s', options)
diff --git a/Lib/packaging/compiler/msvc9compiler.py b/Lib/packaging/compiler/msvc9compiler.py
new file mode 100644
index 0000000..43fc5fa
--- /dev/null
+++ b/Lib/packaging/compiler/msvc9compiler.py
@@ -0,0 +1,720 @@
+"""CCompiler implementation for the Microsoft Visual Studio 2008 compiler.
+
+The MSVCCompiler class is compatible with VS 2005 and VS 2008.  Legacy
+support for older versions of VS are in the msvccompiler module.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+#   finding DevStudio (through the registry)
+# ported to VS2005 and VS 2008 by Christian Heimes
+import os
+import subprocess
+import sys
+import re
+
+from packaging.errors import (PackagingExecError, PackagingPlatformError,
+                              CompileError, LibError, LinkError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_lib_options
+from packaging import logger
+from packaging.util import get_platform
+
+import winreg
+
+RegOpenKeyEx = winreg.OpenKeyEx
+RegEnumKey = winreg.EnumKey
+RegEnumValue = winreg.EnumValue
+RegError = winreg.error
+
+HKEYS = (winreg.HKEY_USERS,
+         winreg.HKEY_CURRENT_USER,
+         winreg.HKEY_LOCAL_MACHINE,
+         winreg.HKEY_CLASSES_ROOT)
+
+VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
+WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
+NET_BASE = r"Software\Microsoft\.NETFramework"
+
+# A map keyed by get_platform() return values to values accepted by
+# 'vcvarsall.bat'.  Note a cross-compile may combine these (eg, 'x86_amd64' is
+# the param to cross-compile on x86 targetting amd64.)
+PLAT_TO_VCVARS = {
+    'win32' : 'x86',
+    'win-amd64' : 'amd64',
+    'win-ia64' : 'ia64',
+}
+
+
+class Reg:
+    """Helper class to read values from the registry
+    """
+
+    def get_value(cls, path, key):
+        for base in HKEYS:
+            d = cls.read_values(base, path)
+            if d and key in d:
+                return d[key]
+        raise KeyError(key)
+    get_value = classmethod(get_value)
+
+    def read_keys(cls, base, key):
+        """Return list of registry keys."""
+        try:
+            handle = RegOpenKeyEx(base, key)
+        except RegError:
+            return None
+        L = []
+        i = 0
+        while True:
+            try:
+                k = RegEnumKey(handle, i)
+            except RegError:
+                break
+            L.append(k)
+            i += 1
+        return L
+    read_keys = classmethod(read_keys)
+
+    def read_values(cls, base, key):
+        """Return dict of registry keys and values.
+
+        All names are converted to lowercase.
+        """
+        try:
+            handle = RegOpenKeyEx(base, key)
+        except RegError:
+            return None
+        d = {}
+        i = 0
+        while True:
+            try:
+                name, value, type = RegEnumValue(handle, i)
+            except RegError:
+                break
+            name = name.lower()
+            d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
+            i += 1
+        return d
+    read_values = classmethod(read_values)
+
+    def convert_mbcs(s):
+        dec = getattr(s, "decode", None)
+        if dec is not None:
+            try:
+                s = dec("mbcs")
+            except UnicodeError:
+                pass
+        return s
+    convert_mbcs = staticmethod(convert_mbcs)
+
+class MacroExpander:
+
+    def __init__(self, version):
+        self.macros = {}
+        self.vsbase = VS_BASE % version
+        self.load_macros(version)
+
+    def set_macro(self, macro, path, key):
+        self.macros["$(%s)" % macro] = Reg.get_value(path, key)
+
+    def load_macros(self, version):
+        self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
+        self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
+        self.set_macro("FrameworkDir", NET_BASE, "installroot")
+        try:
+            if version >= 8.0:
+                self.set_macro("FrameworkSDKDir", NET_BASE,
+                               "sdkinstallrootv2.0")
+            else:
+                raise KeyError("sdkinstallrootv2.0")
+        except KeyError:
+            raise PackagingPlatformError(
+            """Python was built with Visual Studio 2008;
+extensions must be built with a compiler than can generate compatible binaries.
+Visual Studio 2008 was not found on this system. If you have Cygwin installed,
+you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
+
+        if version >= 9.0:
+            self.set_macro("FrameworkVersion", self.vsbase, "clr version")
+            self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
+        else:
+            p = r"Software\Microsoft\NET Framework Setup\Product"
+            for base in HKEYS:
+                try:
+                    h = RegOpenKeyEx(base, p)
+                except RegError:
+                    continue
+                key = RegEnumKey(h, 0)
+                d = Reg.get_value(base, r"%s\%s" % (p, key))
+                self.macros["$(FrameworkVersion)"] = d["version"]
+
+    def sub(self, s):
+        for k, v in self.macros.items():
+            s = s.replace(k, v)
+        return s
+
+def get_build_version():
+    """Return the version of MSVC that was used to build Python.
+
+    For Python 2.3 and up, the version number is included in
+    sys.version.  For earlier versions, assume the compiler is MSVC 6.
+    """
+    prefix = "MSC v."
+    i = sys.version.find(prefix)
+    if i == -1:
+        return 6
+    i = i + len(prefix)
+    s, rest = sys.version[i:].split(" ", 1)
+    majorVersion = int(s[:-2]) - 6
+    minorVersion = int(s[2:3]) / 10.0
+    # I don't think paths are affected by minor version in version 6
+    if majorVersion == 6:
+        minorVersion = 0
+    if majorVersion >= 6:
+        return majorVersion + minorVersion
+    # else we don't know what version of the compiler this is
+    return None
+
+def normalize_and_reduce_paths(paths):
+    """Return a list of normalized paths with duplicates removed.
+
+    The current order of paths is maintained.
+    """
+    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
+    reduced_paths = []
+    for p in paths:
+        np = os.path.normpath(p)
+        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+        if np not in reduced_paths:
+            reduced_paths.append(np)
+    return reduced_paths
+
+def removeDuplicates(variable):
+    """Remove duplicate values of an environment variable.
+    """
+    oldList = variable.split(os.pathsep)
+    newList = []
+    for i in oldList:
+        if i not in newList:
+            newList.append(i)
+    newVariable = os.pathsep.join(newList)
+    return newVariable
+
+def find_vcvarsall(version):
+    """Find the vcvarsall.bat file
+
+    At first it tries to find the productdir of VS 2008 in the registry. If
+    that fails it falls back to the VS90COMNTOOLS env var.
+    """
+    vsbase = VS_BASE % version
+    try:
+        productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
+                                   "productdir")
+    except KeyError:
+        logger.debug("Unable to find productdir in registry")
+        productdir = None
+
+    if not productdir or not os.path.isdir(productdir):
+        toolskey = "VS%0.f0COMNTOOLS" % version
+        toolsdir = os.environ.get(toolskey, None)
+
+        if toolsdir and os.path.isdir(toolsdir):
+            productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
+            productdir = os.path.abspath(productdir)
+            if not os.path.isdir(productdir):
+                logger.debug("%s is not a valid directory", productdir)
+                return None
+        else:
+            logger.debug("env var %s is not set or invalid", toolskey)
+    if not productdir:
+        logger.debug("no productdir found")
+        return None
+    vcvarsall = os.path.join(productdir, "vcvarsall.bat")
+    if os.path.isfile(vcvarsall):
+        return vcvarsall
+    logger.debug("unable to find vcvarsall.bat")
+    return None
+
+def query_vcvarsall(version, arch="x86"):
+    """Launch vcvarsall.bat and read the settings from its environment
+    """
+    vcvarsall = find_vcvarsall(version)
+    interesting = set(("include", "lib", "libpath", "path"))
+    result = {}
+
+    if vcvarsall is None:
+        raise PackagingPlatformError("Unable to find vcvarsall.bat")
+    logger.debug("calling 'vcvarsall.bat %s' (version=%s)", arch, version)
+    popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+
+    stdout, stderr = popen.communicate()
+    if popen.wait() != 0:
+        raise PackagingPlatformError(stderr.decode("mbcs"))
+
+    stdout = stdout.decode("mbcs")
+    for line in stdout.split("\n"):
+        line = Reg.convert_mbcs(line)
+        if '=' not in line:
+            continue
+        line = line.strip()
+        key, value = line.split('=', 1)
+        key = key.lower()
+        if key in interesting:
+            if value.endswith(os.pathsep):
+                value = value[:-1]
+            result[key] = removeDuplicates(value)
+
+    if len(result) != len(interesting):
+        raise ValueError(str(list(result)))
+
+    return result
+
+# More globals
+VERSION = get_build_version()
+if VERSION < 8.0:
+    raise PackagingPlatformError("VC %0.1f is not supported by this module" % VERSION)
+# MACROS = MacroExpander(VERSION)
+
+class MSVCCompiler(CCompiler) :
+    """Concrete class that implements an interface to Microsoft Visual C++,
+       as defined by the CCompiler abstract class."""
+
+    name = 'msvc'
+    description = 'Microsoft Visual C++'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+    _rc_extensions = ['.rc']
+    _mc_extensions = ['.mc']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = (_c_extensions + _cpp_extensions +
+                      _rc_extensions + _mc_extensions)
+    res_extension = '.res'
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+        CCompiler.__init__(self, verbose, dry_run, force)
+        self.__version = VERSION
+        self.__root = r"Software\Microsoft\VisualStudio"
+        # self.__macros = MACROS
+        self.__paths = []
+        # target platform (.plat_name is consistent with 'bdist')
+        self.plat_name = None
+        self.__arch = None # deprecated name
+        self.initialized = False
+
+    def initialize(self, plat_name=None):
+        # multi-init means we would need to check platform same each time...
+        assert not self.initialized, "don't init multiple times"
+        if plat_name is None:
+            plat_name = get_platform()
+        # sanity check for platforms to prevent obscure errors later.
+        ok_plats = 'win32', 'win-amd64', 'win-ia64'
+        if plat_name not in ok_plats:
+            raise PackagingPlatformError("--plat-name must be one of %s" %
+                                         (ok_plats,))
+
+        if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
+            # Assume that the SDK set up everything alright; don't try to be
+            # smarter
+            self.cc = "cl.exe"
+            self.linker = "link.exe"
+            self.lib = "lib.exe"
+            self.rc = "rc.exe"
+            self.mc = "mc.exe"
+        else:
+            # On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
+            # to cross compile, you use 'x86_amd64'.
+            # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
+            # compile use 'x86' (ie, it runs the x86 compiler directly)
+            # No idea how itanium handles this, if at all.
+            if plat_name == get_platform() or plat_name == 'win32':
+                # native build or cross-compile to win32
+                plat_spec = PLAT_TO_VCVARS[plat_name]
+            else:
+                # cross compile from win32 -> some 64bit
+                plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
+                            PLAT_TO_VCVARS[plat_name]
+
+            vc_env = query_vcvarsall(VERSION, plat_spec)
+
+            # take care to only use strings in the environment.
+            self.__paths = vc_env['path'].split(os.pathsep)
+            os.environ['lib'] = vc_env['lib']
+            os.environ['include'] = vc_env['include']
+
+            if len(self.__paths) == 0:
+                raise PackagingPlatformError("Python was built with %s, "
+                       "and extensions need to be built with the same "
+                       "version of the compiler, but it isn't installed."
+                       % self.__product)
+
+            self.cc = self.find_exe("cl.exe")
+            self.linker = self.find_exe("link.exe")
+            self.lib = self.find_exe("lib.exe")
+            self.rc = self.find_exe("rc.exe")   # resource compiler
+            self.mc = self.find_exe("mc.exe")   # message compiler
+            #self.set_path_env_var('lib')
+            #self.set_path_env_var('include')
+
+        # extend the MSVC path with the current path
+        try:
+            for p in os.environ['path'].split(';'):
+                self.__paths.append(p)
+        except KeyError:
+            pass
+        self.__paths = normalize_and_reduce_paths(self.__paths)
+        os.environ['path'] = ";".join(self.__paths)
+
+        self.preprocess_options = None
+        if self.__arch == "x86":
+            self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3',
+                                     '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
+                                          '/Z7', '/D_DEBUG']
+        else:
+            # Win64
+            self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
+                                     '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+                                          '/Z7', '/D_DEBUG']
+
+        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+        if self.__version >= 7:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None'
+                ]
+        self.ldflags_static = [ '/nologo']
+
+        self.initialized = True
+
+    # -- Worker methods ------------------------------------------------
+
+    def object_filenames(self,
+                         source_filenames,
+                         strip_dir=False,
+                         output_dir=''):
+        # Copied from ccompiler.py, extended to return .res as 'object'-file
+        # for .rc input file
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            base, ext = os.path.splitext(src_name)
+            base = os.path.splitdrive(base)[1] # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                # Better to raise an exception instead of silently continuing
+                # and later complain about sources and targets having
+                # different lengths
+                raise CompileError("Don't know how to compile %s" % src_name)
+            if strip_dir:
+                base = os.path.basename(base)
+            if ext in self._rc_extensions:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.res_extension))
+            elif ext in self._mc_extensions:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.res_extension))
+            else:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.obj_extension))
+        return obj_names
+
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=False,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        if not self.initialized:
+            self.initialize()
+        compile_info = self._setup_compile(output_dir, macros, include_dirs,
+                                           sources, depends, extra_postargs)
+        macros, objects, extra_postargs, pp_opts, build = compile_info
+
+        compile_opts = extra_preargs or []
+        compile_opts.append('/c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            if debug:
+                # pass the full pathname to MSVC in debug mode,
+                # this allows the debugger to find the source file
+                # without asking the user to browse for it
+                src = os.path.abspath(src)
+
+            if ext in self._c_extensions:
+                input_opt = "/Tc" + src
+            elif ext in self._cpp_extensions:
+                input_opt = "/Tp" + src
+            elif ext in self._rc_extensions:
+                # compile .RC to .RES file
+                input_opt = src
+                output_opt = "/fo" + obj
+                try:
+                    self.spawn([self.rc] + pp_opts +
+                               [output_opt] + [input_opt])
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue
+            elif ext in self._mc_extensions:
+                # Compile .MC to .RC file to .RES file.
+                #   * '-h dir' specifies the directory for the
+                #     generated include file
+                #   * '-r dir' specifies the target directory of the
+                #     generated RC file and the binary message resource
+                #     it includes
+                #
+                # For now (since there are no options to change this),
+                # we use the source-directory for the include file and
+                # the build directory for the RC file and message
+                # resources. This works at least for win32all.
+                h_dir = os.path.dirname(src)
+                rc_dir = os.path.dirname(obj)
+                try:
+                    # first compile .MC to .RC and .H file
+                    self.spawn([self.mc] +
+                               ['-h', h_dir, '-r', rc_dir] + [src])
+                    base, _ = os.path.splitext(os.path.basename(src))
+                    rc_file = os.path.join(rc_dir, base + '.rc')
+                    # then compile .RC to .RES file
+                    self.spawn([self.rc] +
+                               ["/fo" + obj] + [rc_file])
+
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue
+            else:
+                # how to handle this file?
+                raise CompileError("Don't know how to compile %s to %s"
+                                   % (src, obj))
+
+            output_opt = "/Fo" + obj
+            try:
+                self.spawn([self.cc] + compile_opts + pp_opts +
+                           [input_opt, output_opt] +
+                           extra_postargs)
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+        return objects
+
+
+    def create_static_lib(self,
+                          objects,
+                          output_libname,
+                          output_dir=None,
+                          debug=False,
+                          target_lang=None):
+
+        if not self.initialized:
+            self.initialize()
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        output_filename = self.library_filename(output_libname,
+                                                output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            lib_args = objects + ['/OUT:' + output_filename]
+            if debug:
+                pass # XXX what goes here?
+            try:
+                self.spawn([self.lib] + lib_args)
+            except PackagingExecError as msg:
+                raise LibError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        if not self.initialized:
+            self.initialize()
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        fixed_args = self._fix_lib_args(libraries, library_dirs,
+                                        runtime_library_dirs)
+        libraries, library_dirs, runtime_library_dirs = fixed_args
+
+        if runtime_library_dirs:
+            self.warn("don't know what to do with 'runtime_library_dirs': "
+                      + str(runtime_library_dirs))
+
+        lib_opts = gen_lib_options(self,
+                                   library_dirs, runtime_library_dirs,
+                                   libraries)
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+            if target_desc == CCompiler.EXECUTABLE:
+                if debug:
+                    ldflags = self.ldflags_shared_debug[1:]
+                else:
+                    ldflags = self.ldflags_shared[1:]
+            else:
+                if debug:
+                    ldflags = self.ldflags_shared_debug
+                else:
+                    ldflags = self.ldflags_shared
+
+            export_opts = []
+            for sym in (export_symbols or []):
+                export_opts.append("/EXPORT:" + sym)
+
+            ld_args = (ldflags + lib_opts + export_opts +
+                       objects + ['/OUT:' + output_filename])
+
+            # The MSVC linker generates .lib and .exp files, which cannot be
+            # suppressed by any linker switches. The .lib files may even be
+            # needed! Make sure they are generated in the temporary build
+            # directory. Since they have different names for debug and release
+            # builds, they can go into the same directory.
+            build_temp = os.path.dirname(objects[0])
+            if export_symbols is not None:
+                dll_name, dll_ext = os.path.splitext(
+                    os.path.basename(output_filename))
+                implib_file = os.path.join(
+                    build_temp,
+                    self.library_filename(dll_name))
+                ld_args.append('/IMPLIB:' + implib_file)
+
+            # Embedded manifests are recommended - see MSDN article titled
+            # "How to: Embed a Manifest Inside a C/C++ Application"
+            # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
+            # Ask the linker to generate the manifest in the temp dir, so
+            # we can embed it later.
+            temp_manifest = os.path.join(
+                    build_temp,
+                    os.path.basename(output_filename) + ".manifest")
+            ld_args.append('/MANIFESTFILE:' + temp_manifest)
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                self.spawn([self.linker] + ld_args)
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+
+            # embed the manifest
+            # XXX - this is somewhat fragile - if mt.exe fails, distutils
+            # will still consider the DLL up-to-date, but it will not have a
+            # manifest.  Maybe we should link to a temp file?  OTOH, that
+            # implies a build environment error that shouldn't go undetected.
+            if target_desc == CCompiler.EXECUTABLE:
+                mfid = 1
+            else:
+                mfid = 2
+                self._remove_visual_c_ref(temp_manifest)
+            out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
+            try:
+                self.spawn(['mt.exe', '-nologo', '-manifest',
+                            temp_manifest, out_arg])
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    def _remove_visual_c_ref(self, manifest_file):
+        try:
+            # Remove references to the Visual C runtime, so they will
+            # fall through to the Visual C dependency of Python.exe.
+            # This way, when installed for a restricted user (e.g.
+            # runtimes are not in WinSxS folder, but in Python's own
+            # folder), the runtimes do not need to be in every folder
+            # with .pyd's.
+            with open(manifest_file) as manifest_f:
+                manifest_buf = manifest_f.read()
+            pattern = re.compile(
+                r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
+                r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
+                re.DOTALL)
+            manifest_buf = re.sub(pattern, "", manifest_buf)
+            pattern = "<dependentAssembly>\s*</dependentAssembly>"
+            manifest_buf = re.sub(pattern, "", manifest_buf)
+            with open(manifest_file, 'w') as manifest_f:
+                manifest_f.write(manifest_buf)
+        except IOError:
+            pass
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "/LIBPATH:" + dir
+
+    def runtime_library_dir_option(self, dir):
+        raise PackagingPlatformError(
+              "don't know how to set runtime library search path for MSVC++")
+
+    def library_option(self, lib):
+        return self.library_filename(lib)
+
+
+    def find_library_file(self, dirs, lib, debug=False):
+        # Prefer a debugging library if found (and requested), but deal
+        # with it if we don't have one.
+        if debug:
+            try_names = [lib + "_d", lib]
+        else:
+            try_names = [lib]
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename(name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # Helper methods for using the MSVC registry settings
+
+    def find_exe(self, exe):
+        """Return path to an MSVC executable program.
+
+        Tries to find the program in several places: first, one of the
+        MSVC program search paths from the registry; next, the directories
+        in the PATH environment variable.  If any of those work, return an
+        absolute path that is known to exist.  If none of them work, just
+        return the original program name, 'exe'.
+        """
+        for p in self.__paths:
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        # didn't find it; try existing path
+        for p in os.environ['Path'].split(';'):
+            fn = os.path.join(os.path.abspath(p),exe)
+            if os.path.isfile(fn):
+                return fn
+
+        return exe
diff --git a/Lib/packaging/compiler/msvccompiler.py b/Lib/packaging/compiler/msvccompiler.py
new file mode 100644
index 0000000..97f76bb
--- /dev/null
+++ b/Lib/packaging/compiler/msvccompiler.py
@@ -0,0 +1,636 @@
+"""CCompiler implementation for old Microsoft Visual Studio compilers.
+
+For a compiler compatible with VS 2005 and 2008, use msvc9compiler.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+#   finding DevStudio (through the registry)
+
+
+import sys
+import os
+
+from packaging.errors import (PackagingExecError, PackagingPlatformError,
+                              CompileError, LibError, LinkError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_lib_options
+from packaging import logger
+
+_can_read_reg = False
+try:
+    import winreg
+
+    _can_read_reg = True
+    hkey_mod = winreg
+
+    RegOpenKeyEx = winreg.OpenKeyEx
+    RegEnumKey = winreg.EnumKey
+    RegEnumValue = winreg.EnumValue
+    RegError = winreg.error
+
+except ImportError:
+    try:
+        import win32api
+        import win32con
+        _can_read_reg = True
+        hkey_mod = win32con
+
+        RegOpenKeyEx = win32api.RegOpenKeyEx
+        RegEnumKey = win32api.RegEnumKey
+        RegEnumValue = win32api.RegEnumValue
+        RegError = win32api.error
+
+    except ImportError:
+        logger.warning(
+            "can't read registry to find the necessary compiler setting;\n"
+            "make sure that Python modules _winreg, win32api or win32con "
+            "are installed.")
+
+if _can_read_reg:
+    HKEYS = (hkey_mod.HKEY_USERS,
+             hkey_mod.HKEY_CURRENT_USER,
+             hkey_mod.HKEY_LOCAL_MACHINE,
+             hkey_mod.HKEY_CLASSES_ROOT)
+
+
+def read_keys(base, key):
+    """Return list of registry keys."""
+
+    try:
+        handle = RegOpenKeyEx(base, key)
+    except RegError:
+        return None
+    L = []
+    i = 0
+    while True:
+        try:
+            k = RegEnumKey(handle, i)
+        except RegError:
+            break
+        L.append(k)
+        i = i + 1
+    return L
+
+
+def read_values(base, key):
+    """Return dict of registry keys and values.
+
+    All names are converted to lowercase.
+    """
+    try:
+        handle = RegOpenKeyEx(base, key)
+    except RegError:
+        return None
+    d = {}
+    i = 0
+    while True:
+        try:
+            name, value, type = RegEnumValue(handle, i)
+        except RegError:
+            break
+        name = name.lower()
+        d[convert_mbcs(name)] = convert_mbcs(value)
+        i = i + 1
+    return d
+
+
+def convert_mbcs(s):
+    enc = getattr(s, "encode", None)
+    if enc is not None:
+        try:
+            s = enc("mbcs")
+        except UnicodeError:
+            pass
+    return s
+
+
+class MacroExpander:
+
+    def __init__(self, version):
+        self.macros = {}
+        self.load_macros(version)
+
+    def set_macro(self, macro, path, key):
+        for base in HKEYS:
+            d = read_values(base, path)
+            if d:
+                self.macros["$(%s)" % macro] = d[key]
+                break
+
+    def load_macros(self, version):
+        vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
+        self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
+        self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
+        net = r"Software\Microsoft\.NETFramework"
+        self.set_macro("FrameworkDir", net, "installroot")
+        try:
+            if version > 7.0:
+                self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
+            else:
+                self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
+        except KeyError:
+            raise PackagingPlatformError(
+"""Python was built with Visual Studio 2003; extensions must be built with
+a compiler than can generate compatible binaries. Visual Studio 2003 was
+not found on this system. If you have Cygwin installed, you can try
+compiling with MingW32, by passing "-c mingw32" to setup.py.""")
+# XXX update this comment for setup.cfg
+
+        p = r"Software\Microsoft\NET Framework Setup\Product"
+        for base in HKEYS:
+            try:
+                h = RegOpenKeyEx(base, p)
+            except RegError:
+                continue
+            key = RegEnumKey(h, 0)
+            d = read_values(base, r"%s\%s" % (p, key))
+            self.macros["$(FrameworkVersion)"] = d["version"]
+
+    def sub(self, s):
+        for k, v in self.macros.items():
+            s = s.replace(k, v)
+        return s
+
+
+def get_build_version():
+    """Return the version of MSVC that was used to build Python.
+
+    For Python 2.3 and up, the version number is included in
+    sys.version.  For earlier versions, assume the compiler is MSVC 6.
+    """
+
+    prefix = "MSC v."
+    i = sys.version.find(prefix)
+    if i == -1:
+        return 6
+    i = i + len(prefix)
+    s, rest = sys.version[i:].split(" ", 1)
+    majorVersion = int(s[:-2]) - 6
+    minorVersion = int(s[2:3]) / 10.0
+    # I don't think paths are affected by minor version in version 6
+    if majorVersion == 6:
+        minorVersion = 0
+    if majorVersion >= 6:
+        return majorVersion + minorVersion
+    # else we don't know what version of the compiler this is
+    return None
+
+
+def get_build_architecture():
+    """Return the processor architecture.
+
+    Possible results are "Intel", "Itanium", or "AMD64".
+    """
+
+    prefix = " bit ("
+    i = sys.version.find(prefix)
+    if i == -1:
+        return "Intel"
+    j = sys.version.find(")", i)
+    return sys.version[i+len(prefix):j]
+
+
+def normalize_and_reduce_paths(paths):
+    """Return a list of normalized paths with duplicates removed.
+
+    The current order of paths is maintained.
+    """
+    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
+    reduced_paths = []
+    for p in paths:
+        np = os.path.normpath(p)
+        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+        if np not in reduced_paths:
+            reduced_paths.append(np)
+    return reduced_paths
+
+
+class MSVCCompiler(CCompiler):
+    """Concrete class that implements an interface to Microsoft Visual C++,
+       as defined by the CCompiler abstract class."""
+
+    name = 'msvc'
+    description = "Microsoft Visual C++"
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+    _rc_extensions = ['.rc']
+    _mc_extensions = ['.mc']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = (_c_extensions + _cpp_extensions +
+                      _rc_extensions + _mc_extensions)
+    res_extension = '.res'
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+        CCompiler.__init__(self, verbose, dry_run, force)
+        self.__version = get_build_version()
+        self.__arch = get_build_architecture()
+        if self.__arch == "Intel":
+            # x86
+            if self.__version >= 7:
+                self.__root = r"Software\Microsoft\VisualStudio"
+                self.__macros = MacroExpander(self.__version)
+            else:
+                self.__root = r"Software\Microsoft\Devstudio"
+            self.__product = "Visual Studio version %s" % self.__version
+        else:
+            # Win64. Assume this was built with the platform SDK
+            self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
+
+        self.initialized = False
+
+    def initialize(self):
+        self.__paths = []
+        if ("DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and
+            self.find_exe("cl.exe")):
+            # Assume that the SDK set up everything alright; don't try to be
+            # smarter
+            self.cc = "cl.exe"
+            self.linker = "link.exe"
+            self.lib = "lib.exe"
+            self.rc = "rc.exe"
+            self.mc = "mc.exe"
+        else:
+            self.__paths = self.get_msvc_paths("path")
+
+            if len(self.__paths) == 0:
+                raise PackagingPlatformError("Python was built with %s "
+                    "and extensions need to be built with the same "
+                    "version of the compiler, but it isn't installed." %
+                    self.__product)
+
+            self.cc = self.find_exe("cl.exe")
+            self.linker = self.find_exe("link.exe")
+            self.lib = self.find_exe("lib.exe")
+            self.rc = self.find_exe("rc.exe")   # resource compiler
+            self.mc = self.find_exe("mc.exe")   # message compiler
+            self.set_path_env_var('lib')
+            self.set_path_env_var('include')
+
+        # extend the MSVC path with the current path
+        try:
+            for p in os.environ['path'].split(';'):
+                self.__paths.append(p)
+        except KeyError:
+            pass
+        self.__paths = normalize_and_reduce_paths(self.__paths)
+        os.environ['path'] = ';'.join(self.__paths)
+
+        self.preprocess_options = None
+        if self.__arch == "Intel":
+            self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GX',
+                                    '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
+                                          '/Z7', '/D_DEBUG']
+        else:
+            # Win64
+            self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GS-',
+                                    '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+                                          '/Z7', '/D_DEBUG']
+
+        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+        if self.__version >= 7:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
+                ]
+        else:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
+                ]
+        self.ldflags_static = [ '/nologo']
+
+        self.initialized = True
+
+    # -- Worker methods ------------------------------------------------
+
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
+        # Copied from ccompiler.py, extended to return .res as 'object'-file
+        # for .rc input file
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            base, ext = os.path.splitext(src_name)
+            base = os.path.splitdrive(base)[1]  # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                # Better to raise an exception instead of silently continuing
+                # and later complain about sources and targets having
+                # different lengths
+                raise CompileError("Don't know how to compile %s" % src_name)
+            if strip_dir:
+                base = os.path.basename(base)
+            if ext in self._rc_extensions:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.res_extension))
+            elif ext in self._mc_extensions:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.res_extension))
+            else:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.obj_extension))
+        return obj_names
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=False,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        if not self.initialized:
+            self.initialize()
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+
+        compile_opts = extra_preargs or []
+        compile_opts.append('/c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            if debug:
+                # pass the full pathname to MSVC in debug mode,
+                # this allows the debugger to find the source file
+                # without asking the user to browse for it
+                src = os.path.abspath(src)
+
+            if ext in self._c_extensions:
+                input_opt = "/Tc" + src
+            elif ext in self._cpp_extensions:
+                input_opt = "/Tp" + src
+            elif ext in self._rc_extensions:
+                # compile .RC to .RES file
+                input_opt = src
+                output_opt = "/fo" + obj
+                try:
+                    self.spawn([self.rc] + pp_opts +
+                               [output_opt] + [input_opt])
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue
+            elif ext in self._mc_extensions:
+
+                # Compile .MC to .RC file to .RES file.
+                #   * '-h dir' specifies the directory for the
+                #     generated include file
+                #   * '-r dir' specifies the target directory of the
+                #     generated RC file and the binary message resource
+                #     it includes
+                #
+                # For now (since there are no options to change this),
+                # we use the source-directory for the include file and
+                # the build directory for the RC file and message
+                # resources. This works at least for win32all.
+
+                h_dir = os.path.dirname(src)
+                rc_dir = os.path.dirname(obj)
+                try:
+                    # first compile .MC to .RC and .H file
+                    self.spawn([self.mc] +
+                               ['-h', h_dir, '-r', rc_dir] + [src])
+                    base, _ = os.path.splitext(os.path.basename(src))
+                    rc_file = os.path.join(rc_dir, base + '.rc')
+                    # then compile .RC to .RES file
+                    self.spawn([self.rc] +
+                                ["/fo" + obj] + [rc_file])
+
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue
+            else:
+                # how to handle this file?
+                raise CompileError(
+                    "Don't know how to compile %s to %s" %
+                    (src, obj))
+
+            output_opt = "/Fo" + obj
+            try:
+                self.spawn([self.cc] + compile_opts + pp_opts +
+                           [input_opt, output_opt] +
+                           extra_postargs)
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+        return objects
+
+    def create_static_lib(self, objects, output_libname, output_dir=None,
+                          debug=False, target_lang=None):
+        if not self.initialized:
+            self.initialize()
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        output_filename = \
+            self.library_filename(output_libname, output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            lib_args = objects + ['/OUT:' + output_filename]
+            if debug:
+                pass                    # XXX what goes here?
+            try:
+                self.spawn([self.lib] + lib_args)
+            except PackagingExecError as msg:
+                raise LibError(msg)
+
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+
+        if not self.initialized:
+            self.initialize()
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        if runtime_library_dirs:
+            self.warn("don't know what to do with 'runtime_library_dirs': %s"
+                      % (runtime_library_dirs,))
+
+        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+                                   libraries)
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+
+            if target_desc == CCompiler.EXECUTABLE:
+                if debug:
+                    ldflags = self.ldflags_shared_debug[1:]
+                else:
+                    ldflags = self.ldflags_shared[1:]
+            else:
+                if debug:
+                    ldflags = self.ldflags_shared_debug
+                else:
+                    ldflags = self.ldflags_shared
+
+            export_opts = []
+            for sym in (export_symbols or []):
+                export_opts.append("/EXPORT:" + sym)
+
+            ld_args = (ldflags + lib_opts + export_opts +
+                       objects + ['/OUT:' + output_filename])
+
+            # The MSVC linker generates .lib and .exp files, which cannot be
+            # suppressed by any linker switches. The .lib files may even be
+            # needed! Make sure they are generated in the temporary build
+            # directory. Since they have different names for debug and release
+            # builds, they can go into the same directory.
+            if export_symbols is not None:
+                dll_name, dll_ext = os.path.splitext(
+                    os.path.basename(output_filename))
+                implib_file = os.path.join(
+                    os.path.dirname(objects[0]),
+                    self.library_filename(dll_name))
+                ld_args.append('/IMPLIB:' + implib_file)
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                self.spawn([self.linker] + ld_args)
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "/LIBPATH:" + dir
+
+    def runtime_library_dir_option(self, dir):
+        raise PackagingPlatformError("don't know how to set runtime library search path for MSVC++")
+
+    def library_option(self, lib):
+        return self.library_filename(lib)
+
+    def find_library_file(self, dirs, lib, debug=False):
+        # Prefer a debugging library if found (and requested), but deal
+        # with it if we don't have one.
+        if debug:
+            try_names = [lib + "_d", lib]
+        else:
+            try_names = [lib]
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename(name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # Helper methods for using the MSVC registry settings
+
+    def find_exe(self, exe):
+        """Return path to an MSVC executable program.
+
+        Tries to find the program in several places: first, one of the
+        MSVC program search paths from the registry; next, the directories
+        in the PATH environment variable.  If any of those work, return an
+        absolute path that is known to exist.  If none of them work, just
+        return the original program name, 'exe'.
+        """
+
+        for p in self.__paths:
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        # didn't find it; try existing path
+        for p in os.environ['Path'].split(';'):
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        return exe
+
+    def get_msvc_paths(self, path, platform='x86'):
+        """Get a list of devstudio directories (include, lib or path).
+
+        Return a list of strings.  The list will be empty if unable to
+        access the registry or appropriate registry keys not found.
+        """
+
+        if not _can_read_reg:
+            return []
+
+        path = path + " dirs"
+        if self.__version >= 7:
+            key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
+                   % (self.__root, self.__version))
+        else:
+            key = (r"%s\6.0\Build System\Components\Platforms"
+                   r"\Win32 (%s)\Directories" % (self.__root, platform))
+
+        for base in HKEYS:
+            d = read_values(base, key)
+            if d:
+                if self.__version >= 7:
+                    return self.__macros.sub(d[path]).split(";")
+                else:
+                    return d[path].split(";")
+        # MSVC 6 seems to create the registry entries we need only when
+        # the GUI is run.
+        if self.__version == 6:
+            for base in HKEYS:
+                if read_values(base, r"%s\6.0" % self.__root) is not None:
+                    self.warn("It seems you have Visual Studio 6 installed, "
+                        "but the expected registry settings are not present.\n"
+                        "You must at least run the Visual Studio GUI once "
+                        "so that these entries are created.")
+                    break
+        return []
+
+    def set_path_env_var(self, name):
+        """Set environment variable 'name' to an MSVC path type value.
+
+        This is equivalent to a SET command prior to execution of spawned
+        commands.
+        """
+
+        if name == "lib":
+            p = self.get_msvc_paths("library")
+        else:
+            p = self.get_msvc_paths(name)
+        if p:
+            os.environ[name] = ';'.join(p)
+
+
+if get_build_version() >= 8.0:
+    logger.debug("importing new compiler from distutils.msvc9compiler")
+    OldMSVCCompiler = MSVCCompiler
+    from packaging.compiler.msvc9compiler import MSVCCompiler
+    # get_build_architecture not really relevant now we support cross-compile
+    from packaging.compiler.msvc9compiler import MacroExpander
diff --git a/Lib/packaging/compiler/unixccompiler.py b/Lib/packaging/compiler/unixccompiler.py
new file mode 100644
index 0000000..8c24c0f
--- /dev/null
+++ b/Lib/packaging/compiler/unixccompiler.py
@@ -0,0 +1,339 @@
+"""CCompiler implementation for Unix compilers.
+
+This module contains the UnixCCompiler class, a subclass of CCompiler
+that handles the "typical" Unix-style command-line C compiler:
+  * macros defined with -Dname[=value]
+  * macros undefined with -Uname
+  * include search directories specified with -Idir
+  * libraries specified with -lllib
+  * library search directories specified with -Ldir
+  * compile handled by 'cc' (or similar) executable with -c option:
+    compiles .c to .o
+  * link static library handled by 'ar' command (possibly with 'ranlib')
+  * link shared library handled by 'cc -shared'
+"""
+
+import os, sys
+
+from packaging.util import newer
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_preprocess_options, gen_lib_options
+from packaging.errors import (PackagingExecError, CompileError,
+                               LibError, LinkError)
+from packaging import logger
+import sysconfig
+
+
+# XXX Things not currently handled:
+#   * optimization/debug/warning flags; we just use whatever's in Python's
+#     Makefile and live with it.  Is this adequate?  If not, we might
+#     have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
+#     SunCCompiler, and I suspect down that road lies madness.
+#   * even if we don't know a warning flag from an optimization flag,
+#     we need some way for outsiders to feed preprocessor/compiler/linker
+#     flags in to us -- eg. a sysadmin might want to mandate certain flags
+#     via a site config file, or a user might want to set something for
+#     compiling this module distribution only via the setup.py command
+#     line, whatever.  As long as these options come from something on the
+#     current system, they can be as system-dependent as they like, and we
+#     should just happily stuff them into the preprocessor/compiler/linker
+#     options and carry on.
+
+def _darwin_compiler_fixup(compiler_so, cc_args):
+    """
+    This function will strip '-isysroot PATH' and '-arch ARCH' from the
+    compile flags if the user has specified one them in extra_compile_flags.
+
+    This is needed because '-arch ARCH' adds another architecture to the
+    build, without a way to remove an architecture. Furthermore GCC will
+    barf if multiple '-isysroot' arguments are present.
+    """
+    stripArch = stripSysroot = False
+
+    compiler_so = list(compiler_so)
+    kernel_version = os.uname()[2] # 8.4.3
+    major_version = int(kernel_version.split('.')[0])
+
+    if major_version < 8:
+        # OSX before 10.4.0, these don't support -arch and -isysroot at
+        # all.
+        stripArch = stripSysroot = True
+    else:
+        stripArch = '-arch' in cc_args
+        stripSysroot = '-isysroot' in cc_args
+
+    if stripArch or 'ARCHFLAGS' in os.environ:
+        while True:
+            try:
+                index = compiler_so.index('-arch')
+                # Strip this argument and the next one:
+                del compiler_so[index:index+2]
+            except ValueError:
+                break
+
+    if 'ARCHFLAGS' in os.environ and not stripArch:
+        # User specified different -arch flags in the environ,
+        # see also the sysconfig
+        compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
+
+    if stripSysroot:
+        try:
+            index = compiler_so.index('-isysroot')
+            # Strip this argument and the next one:
+            del compiler_so[index:index+2]
+        except ValueError:
+            pass
+
+    # Check if the SDK that is used during compilation actually exists,
+    # the universal build requires the usage of a universal SDK and not all
+    # users have that installed by default.
+    sysroot = None
+    if '-isysroot' in cc_args:
+        idx = cc_args.index('-isysroot')
+        sysroot = cc_args[idx+1]
+    elif '-isysroot' in compiler_so:
+        idx = compiler_so.index('-isysroot')
+        sysroot = compiler_so[idx+1]
+
+    if sysroot and not os.path.isdir(sysroot):
+        logger.warning(
+            "compiling with an SDK that doesn't seem to exist: %r;\n"
+            "please check your Xcode installation", sysroot)
+
+    return compiler_so
+
+class UnixCCompiler(CCompiler):
+
+    name = 'unix'
+    description = 'Standard UNIX-style compiler'
+
+    # These are used by CCompiler in two places: the constructor sets
+    # instance attributes 'preprocessor', 'compiler', etc. from them, and
+    # 'set_executable()' allows any of these to be set.  The defaults here
+    # are pretty generic; they will probably have to be set by an outsider
+    # (eg. using information discovered by the sysconfig about building
+    # Python extensions).
+    executables = {'preprocessor' : None,
+                   'compiler'     : ["cc"],
+                   'compiler_so'  : ["cc"],
+                   'compiler_cxx' : ["cc"],
+                   'linker_so'    : ["cc", "-shared"],
+                   'linker_exe'   : ["cc"],
+                   'archiver'     : ["ar", "-cr"],
+                   'ranlib'       : None,
+                  }
+
+    if sys.platform[:6] == "darwin":
+        executables['ranlib'] = ["ranlib"]
+
+    # Needed for the filename generation methods provided by the base
+    # class, CCompiler.  NB. whoever instantiates/uses a particular
+    # UnixCCompiler instance should set 'shared_lib_ext' -- we set a
+    # reasonable common default here, but it's not necessarily used on all
+    # Unices!
+
+    src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
+    obj_extension = ".o"
+    static_lib_extension = ".a"
+    shared_lib_extension = ".so"
+    dylib_lib_extension = ".dylib"
+    static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
+    if sys.platform == "cygwin":
+        exe_extension = ".exe"
+
+    def preprocess(self, source,
+                   output_file=None, macros=None, include_dirs=None,
+                   extra_preargs=None, extra_postargs=None):
+        ignore, macros, include_dirs = \
+            self._fix_compile_args(None, macros, include_dirs)
+        pp_opts = gen_preprocess_options(macros, include_dirs)
+        pp_args = self.preprocessor + pp_opts
+        if output_file:
+            pp_args.extend(('-o', output_file))
+        if extra_preargs:
+            pp_args[:0] = extra_preargs
+        if extra_postargs:
+            pp_args.extend(extra_postargs)
+        pp_args.append(source)
+
+        # We need to preprocess: either we're being forced to, or we're
+        # generating output to stdout, or there's a target output file and
+        # the source file is newer than the target (or the target doesn't
+        # exist).
+        if self.force or output_file is None or newer(source, output_file):
+            if output_file:
+                self.mkpath(os.path.dirname(output_file))
+            try:
+                self.spawn(pp_args)
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        compiler_so = self.compiler_so
+        if sys.platform == 'darwin':
+            compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs)
+        try:
+            self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+                       extra_postargs)
+        except PackagingExecError as msg:
+            raise CompileError(msg)
+
+    def create_static_lib(self, objects, output_libname,
+                          output_dir=None, debug=False, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+
+        output_filename = \
+            self.library_filename(output_libname, output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            self.mkpath(os.path.dirname(output_filename))
+            self.spawn(self.archiver +
+                       [output_filename] +
+                       objects + self.objects)
+
+            # Not many Unices required ranlib anymore -- SunOS 4.x is, I
+            # think the only major Unix that does.  Maybe we need some
+            # platform intelligence here to skip ranlib if it's not
+            # needed -- or maybe Python's configure script took care of
+            # it for us, hence the check for leading colon.
+            if self.ranlib:
+                try:
+                    self.spawn(self.ranlib + [output_filename])
+                except PackagingExecError as msg:
+                    raise LibError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    def link(self, target_desc, objects,
+             output_filename, output_dir=None, libraries=None,
+             library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+                                   libraries)
+        if type(output_dir) not in (str, type(None)):
+            raise TypeError("'output_dir' must be a string or None")
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+            ld_args = (objects + self.objects +
+                       lib_opts + ['-o', output_filename])
+            if debug:
+                ld_args[:0] = ['-g']
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                if target_desc == CCompiler.EXECUTABLE:
+                    linker = self.linker_exe[:]
+                else:
+                    linker = self.linker_so[:]
+                if target_lang == "c++" and self.compiler_cxx:
+                    # skip over environment variable settings if /usr/bin/env
+                    # is used to set up the linker's environment.
+                    # This is needed on OSX. Note: this assumes that the
+                    # normal and C++ compiler have the same environment
+                    # settings.
+                    i = 0
+                    if os.path.basename(linker[0]) == "env":
+                        i = 1
+                        while '=' in linker[i]:
+                            i = i + 1
+
+                    linker[i] = self.compiler_cxx[i]
+
+                if sys.platform == 'darwin':
+                    linker = _darwin_compiler_fixup(linker, ld_args)
+
+                self.spawn(linker + ld_args)
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "-L" + dir
+
+    def _is_gcc(self, compiler_name):
+        return "gcc" in compiler_name or "g++" in compiler_name
+
+    def runtime_library_dir_option(self, dir):
+        # XXX Hackish, at the very least.  See Python bug #445902:
+        # http://sourceforge.net/tracker/index.php
+        #   ?func=detail&aid=445902&group_id=5470&atid=105470
+        # Linkers on different platforms need different options to
+        # specify that directories need to be added to the list of
+        # directories searched for dependencies when a dynamic library
+        # is sought.  GCC on GNU systems (Linux, FreeBSD, ...) has to
+        # be told to pass the -R option through to the linker, whereas
+        # other compilers and gcc on other systems just know this.
+        # Other compilers may need something slightly different.  At
+        # this time, there's no way to determine this information from
+        # the configuration data stored in the Python installation, so
+        # we use this hack.
+
+        compiler = os.path.basename(sysconfig.get_config_var("CC"))
+        if sys.platform[:6] == "darwin":
+            # MacOSX's linker doesn't understand the -R flag at all
+            return "-L" + dir
+        elif sys.platform[:5] == "hp-ux":
+            if self._is_gcc(compiler):
+                return ["-Wl,+s", "-L" + dir]
+            return ["+s", "-L" + dir]
+        elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
+            return ["-rpath", dir]
+        elif self._is_gcc(compiler):
+            # gcc on non-GNU systems does not need -Wl, but can
+            # use it anyway.  Since distutils has always passed in
+            # -Wl whenever gcc was used in the past it is probably
+            # safest to keep doing so.
+            if sysconfig.get_config_var("GNULD") == "yes":
+                # GNU ld needs an extra option to get a RUNPATH
+                # instead of just an RPATH.
+                return "-Wl,--enable-new-dtags,-R" + dir
+            else:
+                return "-Wl,-R" + dir
+        elif sys.platform[:3] == "aix":
+            return "-blibpath:" + dir
+        else:
+            # No idea how --enable-new-dtags would be passed on to
+            # ld if this system was using GNU ld.  Don't know if a
+            # system like this even exists.
+            return "-R" + dir
+
+    def library_option(self, lib):
+        return "-l" + lib
+
+    def find_library_file(self, dirs, lib, debug=False):
+        shared_f = self.library_filename(lib, lib_type='shared')
+        dylib_f = self.library_filename(lib, lib_type='dylib')
+        static_f = self.library_filename(lib, lib_type='static')
+
+        for dir in dirs:
+            shared = os.path.join(dir, shared_f)
+            dylib = os.path.join(dir, dylib_f)
+            static = os.path.join(dir, static_f)
+            # We're second-guessing the linker here, with not much hard
+            # data to go on: GCC seems to prefer the shared library, so I'm
+            # assuming that *all* Unix C compilers do.  And of course I'm
+            # ignoring even GCC's "-static" option.  So sue me.
+            if os.path.exists(dylib):
+                return dylib
+            elif os.path.exists(shared):
+                return shared
+            elif os.path.exists(static):
+                return static
+
+        # Oops, didn't find it in *any* of 'dirs'
+        return None
diff --git a/Lib/packaging/config.py b/Lib/packaging/config.py
new file mode 100644
index 0000000..6df2bab
--- /dev/null
+++ b/Lib/packaging/config.py
@@ -0,0 +1,362 @@
+"""Utilities to find and read config files used by packaging."""
+
+import os
+import sys
+import logging
+
+from shlex import split
+from configparser import RawConfigParser
+from packaging import logger
+from packaging.errors import PackagingOptionError
+from packaging.compiler.extension import Extension
+from packaging.util import check_environ, iglob, resolve_name, strtobool
+from packaging.compiler import set_compiler
+from packaging.command import set_command
+from packaging.markers import interpret
+
+
+def _pop_values(values_dct, key):
+    """Remove values from the dictionary and convert them as a list"""
+    vals_str = values_dct.pop(key, '')
+    if not vals_str:
+        return
+    fields = []
+    # the line separator is \n for setup.cfg files
+    for field in vals_str.split('\n'):
+        tmp_vals = field.split('--')
+        if len(tmp_vals) == 2 and not interpret(tmp_vals[1]):
+            continue
+        fields.append(tmp_vals[0])
+    # Get bash options like `gcc -print-file-name=libgcc.a` XXX bash options?
+    vals = split(' '.join(fields))
+    if vals:
+        return vals
+
+
+def _rel_path(base, path):
+    # normalizes and returns a lstripped-/-separated path
+    base = base.replace(os.path.sep, '/')
+    path = path.replace(os.path.sep, '/')
+    assert path.startswith(base)
+    return path[len(base):].lstrip('/')
+
+
+def get_resources_dests(resources_root, rules):
+    """Find destinations for resources files"""
+    destinations = {}
+    for base, suffix, dest in rules:
+        prefix = os.path.join(resources_root, base)
+        for abs_base in iglob(prefix):
+            abs_glob = os.path.join(abs_base, suffix)
+            for abs_path in iglob(abs_glob):
+                resource_file = _rel_path(resources_root, abs_path)
+                if dest is None:  # remove the entry if it was here
+                    destinations.pop(resource_file, None)
+                else:
+                    rel_path = _rel_path(abs_base, abs_path)
+                    rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
+                    destinations[resource_file] = rel_dest + '/' + rel_path
+    return destinations
+
+
+class Config:
+    """Reads configuration files and work with the Distribution instance
+    """
+    def __init__(self, dist):
+        self.dist = dist
+        self.setup_hook = None
+
+    def run_hook(self, config):
+        if self.setup_hook is None:
+            return
+        # the hook gets only the config
+        self.setup_hook(config)
+
+    def find_config_files(self):
+        """Find as many configuration files as should be processed for this
+        platform, and return a list of filenames in the order in which they
+        should be parsed.  The filenames returned are guaranteed to exist
+        (modulo nasty race conditions).
+
+        There are three possible config files: packaging.cfg in the
+        Packaging installation directory (ie. where the top-level
+        Packaging __inst__.py file lives), a file in the user's home
+        directory named .pydistutils.cfg on Unix and pydistutils.cfg
+        on Windows/Mac; and setup.cfg in the current directory.
+
+        The file in the user's home directory can be disabled with the
+        --no-user-cfg option.
+        """
+        files = []
+        check_environ()
+
+        # Where to look for the system-wide Packaging config file
+        sys_dir = os.path.dirname(sys.modules['packaging'].__file__)
+
+        # Look for the system config file
+        sys_file = os.path.join(sys_dir, "packaging.cfg")
+        if os.path.isfile(sys_file):
+            files.append(sys_file)
+
+        # What to call the per-user config file
+        if os.name == 'posix':
+            user_filename = ".pydistutils.cfg"
+        else:
+            user_filename = "pydistutils.cfg"
+
+        # And look for the user config file
+        if self.dist.want_user_cfg:
+            user_file = os.path.join(os.path.expanduser('~'), user_filename)
+            if os.path.isfile(user_file):
+                files.append(user_file)
+
+        # All platforms support local setup.cfg
+        local_file = "setup.cfg"
+        if os.path.isfile(local_file):
+            files.append(local_file)
+
+        if logger.isEnabledFor(logging.DEBUG):
+            logger.debug("using config files: %s", ', '.join(files))
+        return files
+
+    def _convert_metadata(self, name, value):
+        # converts a value found in setup.cfg into a valid metadata
+        # XXX
+        return value
+
+    def _multiline(self, value):
+        value = [v for v in
+                 [v.strip() for v in value.split('\n')]
+                 if v != '']
+        return value
+
+    def _read_setup_cfg(self, parser, cfg_filename):
+        cfg_directory = os.path.dirname(os.path.abspath(cfg_filename))
+        content = {}
+        for section in parser.sections():
+            content[section] = dict(parser.items(section))
+
+        # global:setup_hook is called *first*
+        if 'global' in content:
+            if 'setup_hook' in content['global']:
+                setup_hook = content['global']['setup_hook']
+                try:
+                    self.setup_hook = resolve_name(setup_hook)
+                except ImportError as e:
+                    logger.warning('could not import setup_hook: %s',
+                            e.args[0])
+                else:
+                    self.run_hook(content)
+
+        metadata = self.dist.metadata
+
+        # setting the metadata values
+        if 'metadata' in content:
+            for key, value in content['metadata'].items():
+                key = key.replace('_', '-')
+                if metadata.is_multi_field(key):
+                    value = self._multiline(value)
+
+                if key == 'project-url':
+                    value = [(label.strip(), url.strip())
+                             for label, url in
+                             [v.split(',') for v in value]]
+
+                if key == 'description-file':
+                    if 'description' in content['metadata']:
+                        msg = ("description and description-file' are "
+                               "mutually exclusive")
+                        raise PackagingOptionError(msg)
+
+                    if isinstance(value, list):
+                        filenames = value
+                    else:
+                        filenames = value.split()
+
+                    # concatenate each files
+                    value = ''
+                    for filename in filenames:
+                        # will raise if file not found
+                        with open(filename) as description_file:
+                            value += description_file.read().strip() + '\n'
+                        # add filename as a required file
+                        if filename not in metadata.requires_files:
+                            metadata.requires_files.append(filename)
+                    value = value.strip()
+                    key = 'description'
+
+                if metadata.is_metadata_field(key):
+                    metadata[key] = self._convert_metadata(key, value)
+
+        if 'files' in content:
+            files = content['files']
+            self.dist.package_dir = files.pop('packages_root', None)
+
+            files = dict((key, self._multiline(value)) for key, value in
+                         files.items())
+
+            self.dist.packages = []
+
+            packages = files.get('packages', [])
+            if isinstance(packages, str):
+                packages = [packages]
+
+            for package in packages:
+                if ':' in package:
+                    dir_, package = package.split(':')
+                    self.dist.package_dir[package] = dir_
+                self.dist.packages.append(package)
+
+            self.dist.py_modules = files.get('modules', [])
+            if isinstance(self.dist.py_modules, str):
+                self.dist.py_modules = [self.dist.py_modules]
+            self.dist.scripts = files.get('scripts', [])
+            if isinstance(self.dist.scripts, str):
+                self.dist.scripts = [self.dist.scripts]
+
+            self.dist.package_data = {}
+            for data in files.get('package_data', []):
+                data = data.split('=')
+                if len(data) != 2:
+                    continue  # XXX error should never pass silently
+                key, value = data
+                self.dist.package_data[key.strip()] = value.strip()
+
+            self.dist.data_files = []
+            for data in files.get('data_files', []):
+                data = data.split('=')
+                if len(data) != 2:
+                    continue
+                key, value = data
+                values = [v.strip() for v in value.split(',')]
+                self.dist.data_files.append((key, values))
+
+            # manifest template
+            self.dist.extra_files = files.get('extra_files', [])
+
+            resources = []
+            for rule in files.get('resources', []):
+                glob, destination = rule.split('=', 1)
+                rich_glob = glob.strip().split(' ', 1)
+                if len(rich_glob) == 2:
+                    prefix, suffix = rich_glob
+                else:
+                    assert len(rich_glob) == 1
+                    prefix = ''
+                    suffix = glob
+                if destination == '<exclude>':
+                    destination = None
+                resources.append(
+                    (prefix.strip(), suffix.strip(), destination.strip()))
+                self.dist.data_files = get_resources_dests(
+                    cfg_directory, resources)
+
+        ext_modules = self.dist.ext_modules
+        for section_key in content:
+            labels = section_key.split('=')
+            if len(labels) == 2 and labels[0] == 'extension':
+                # labels[1] not used from now but should be implemented
+                # for extension build dependency
+                values_dct = content[section_key]
+                ext_modules.append(Extension(
+                    values_dct.pop('name'),
+                    _pop_values(values_dct, 'sources'),
+                    _pop_values(values_dct, 'include_dirs'),
+                    _pop_values(values_dct, 'define_macros'),
+                    _pop_values(values_dct, 'undef_macros'),
+                    _pop_values(values_dct, 'library_dirs'),
+                    _pop_values(values_dct, 'libraries'),
+                    _pop_values(values_dct, 'runtime_library_dirs'),
+                    _pop_values(values_dct, 'extra_objects'),
+                    _pop_values(values_dct, 'extra_compile_args'),
+                    _pop_values(values_dct, 'extra_link_args'),
+                    _pop_values(values_dct, 'export_symbols'),
+                    _pop_values(values_dct, 'swig_opts'),
+                    _pop_values(values_dct, 'depends'),
+                    values_dct.pop('language', None),
+                    values_dct.pop('optional', None),
+                    **values_dct))
+
+    def parse_config_files(self, filenames=None):
+        if filenames is None:
+            filenames = self.find_config_files()
+
+        logger.debug("Distribution.parse_config_files():")
+
+        parser = RawConfigParser()
+
+        for filename in filenames:
+            logger.debug("  reading %s", filename)
+            parser.read(filename, encoding='utf-8')
+
+            if os.path.split(filename)[-1] == 'setup.cfg':
+                self._read_setup_cfg(parser, filename)
+
+            for section in parser.sections():
+                if section == 'global':
+                    if parser.has_option('global', 'compilers'):
+                        self._load_compilers(parser.get('global', 'compilers'))
+
+                    if parser.has_option('global', 'commands'):
+                        self._load_commands(parser.get('global', 'commands'))
+
+                options = parser.options(section)
+                opt_dict = self.dist.get_option_dict(section)
+
+                for opt in options:
+                    if opt == '__name__':
+                        continue
+                    val = parser.get(section, opt)
+                    opt = opt.replace('-', '_')
+
+                    if opt == 'sub_commands':
+                        val = self._multiline(val)
+                        if isinstance(val, str):
+                            val = [val]
+
+                    # Hooks use a suffix system to prevent being overriden
+                    # by a config file processed later (i.e. a hook set in
+                    # the user config file cannot be replaced by a hook
+                    # set in a project config file, unless they have the
+                    # same suffix).
+                    if (opt.startswith("pre_hook.") or
+                        opt.startswith("post_hook.")):
+                        hook_type, alias = opt.split(".")
+                        hook_dict = opt_dict.setdefault(
+                            hook_type, (filename, {}))[1]
+                        hook_dict[alias] = val
+                    else:
+                        opt_dict[opt] = filename, val
+
+            # Make the RawConfigParser forget everything (so we retain
+            # the original filenames that options come from)
+            parser.__init__()
+
+        # If there was a "global" section in the config file, use it
+        # to set Distribution options.
+        if 'global' in self.dist.command_options:
+            for opt, (src, val) in self.dist.command_options['global'].items():
+                alias = self.dist.negative_opt.get(opt)
+                try:
+                    if alias:
+                        setattr(self.dist, alias, not strtobool(val))
+                    elif opt == 'dry_run':  # FIXME ugh!
+                        setattr(self.dist, opt, strtobool(val))
+                    else:
+                        setattr(self.dist, opt, val)
+                except ValueError as msg:
+                    raise PackagingOptionError(msg)
+
+    def _load_compilers(self, compilers):
+        compilers = self._multiline(compilers)
+        if isinstance(compilers, str):
+            compilers = [compilers]
+        for compiler in compilers:
+            set_compiler(compiler.strip())
+
+    def _load_commands(self, commands):
+        commands = self._multiline(commands)
+        if isinstance(commands, str):
+            commands = [commands]
+        for command in commands:
+            set_command(command.strip())
diff --git a/Lib/packaging/create.py b/Lib/packaging/create.py
new file mode 100644
index 0000000..b96aef0
--- /dev/null
+++ b/Lib/packaging/create.py
@@ -0,0 +1,689 @@
+#!/usr/bin/env python
+"""Interactive helper used to create a setup.cfg file.
+
+This script will generate a packaging configuration file by looking at
+the current directory and asking the user questions.  It is intended to
+be called as
+
+  pysetup create
+
+or
+
+  python3.3 -m packaging.create
+"""
+
+#  Original code by Sean Reifschneider <jafo@tummy.com>
+
+#  Original TODO list:
+#  Look for a license file and automatically add the category.
+#  When a .c file is found during the walk, can we add it as an extension?
+#  Ask if there is a maintainer different that the author
+#  Ask for the platform (can we detect this via "import win32" or something?)
+#  Ask for the dependencies.
+#  Ask for the Requires-Dist
+#  Ask for the Provides-Dist
+#  Ask for a description
+#  Detect scripts (not sure how.  #! outside of package?)
+
+import os
+import imp
+import sys
+import glob
+import re
+import shutil
+import sysconfig
+import tokenize
+from configparser import RawConfigParser
+from textwrap import dedent
+from hashlib import md5
+from functools import cmp_to_key
+# importing this with an underscore as it should be replaced by the
+# dict form or another structures for all purposes
+from packaging._trove import all_classifiers as _CLASSIFIERS_LIST
+from packaging.version import is_valid_version
+
+_FILENAME = 'setup.cfg'
+_DEFAULT_CFG = '.pypkgcreate'
+
+_helptext = {
+    'name': '''
+The name of the program to be packaged, usually a single word composed
+of lower-case characters such as "python", "sqlalchemy", or "CherryPy".
+''',
+    'version': '''
+Version number of the software, typically 2 or 3 numbers separated by dots
+such as "1.00", "0.6", or "3.02.01".  "0.1.0" is recommended for initial
+development.
+''',
+    'summary': '''
+A one-line summary of what this project is or does, typically a sentence 80
+characters or less in length.
+''',
+    'author': '''
+The full name of the author (typically you).
+''',
+    'author_email': '''
+E-mail address of the project author (typically you).
+''',
+    'do_classifier': '''
+Trove classifiers are optional identifiers that allow you to specify the
+intended audience by saying things like "Beta software with a text UI
+for Linux under the PSF license.  However, this can be a somewhat involved
+process.
+''',
+    'packages': '''
+You can provide a package name contained in your project.
+''',
+    'modules': '''
+You can provide a python module contained in your project.
+''',
+    'extra_files': '''
+You can provide extra files/dirs contained in your project.
+It has to follow the template syntax. XXX add help here.
+''',
+
+    'home_page': '''
+The home page for the project, typically starting with "http://".
+''',
+    'trove_license': '''
+Optionally you can specify a license.  Type a string that identifies a common
+license, and then you can select a list of license specifiers.
+''',
+    'trove_generic': '''
+Optionally, you can set other trove identifiers for things such as the
+human language, programming language, user interface, etc...
+''',
+    'setup.py found': '''
+The setup.py script will be executed to retrieve the metadata.
+A wizard will be run if you answer "n",
+''',
+}
+
+PROJECT_MATURITY = ['Development Status :: 1 - Planning',
+                    'Development Status :: 2 - Pre-Alpha',
+                    'Development Status :: 3 - Alpha',
+                    'Development Status :: 4 - Beta',
+                    'Development Status :: 5 - Production/Stable',
+                    'Development Status :: 6 - Mature',
+                    'Development Status :: 7 - Inactive']
+
+# XXX everything needs docstrings and tests (both low-level tests of various
+# methods and functional tests of running the script)
+
+
+def load_setup():
+    """run the setup script (i.e the setup.py file)
+
+    This function load the setup file in all cases (even if it have already
+    been loaded before, because we are monkey patching its setup function with
+    a particular one"""
+    with open("setup.py", "rb") as f:
+        encoding, lines = tokenize.detect_encoding(f.readline)
+    with open("setup.py", encoding=encoding) as f:
+        imp.load_module("setup", f, "setup.py", (".py", "r", imp.PY_SOURCE))
+
+
+def ask_yn(question, default=None, helptext=None):
+    question += ' (y/n)'
+    while True:
+        answer = ask(question, default, helptext, required=True)
+        if answer and answer[0].lower() in 'yn':
+            return answer[0].lower()
+
+        print('\nERROR: You must select "Y" or "N".\n')
+
+
+def ask(question, default=None, helptext=None, required=True,
+        lengthy=False, multiline=False):
+    prompt = '%s: ' % (question,)
+    if default:
+        prompt = '%s [%s]: ' % (question, default)
+        if default and len(question) + len(default) > 70:
+            prompt = '%s\n    [%s]: ' % (question, default)
+    if lengthy or multiline:
+        prompt += '\n   > '
+
+    if not helptext:
+        helptext = 'No additional help available.'
+
+    helptext = helptext.strip("\n")
+
+    while True:
+        sys.stdout.write(prompt)
+        sys.stdout.flush()
+
+        line = sys.stdin.readline().strip()
+        if line == '?':
+            print('=' * 70)
+            print(helptext)
+            print('=' * 70)
+            continue
+        if default and not line:
+            return default
+        if not line and required:
+            print('*' * 70)
+            print('This value cannot be empty.')
+            print('===========================')
+            if helptext:
+                print(helptext)
+            print('*' * 70)
+            continue
+        return line
+
+
+def convert_yn_to_bool(yn, yes=True, no=False):
+    """Convert a y/yes or n/no to a boolean value."""
+    if yn.lower().startswith('y'):
+        return yes
+    else:
+        return no
+
+
+def _build_classifiers_dict(classifiers):
+    d = {}
+    for key in classifiers:
+        subDict = d
+        for subkey in key.split(' :: '):
+            if not subkey in subDict:
+                subDict[subkey] = {}
+            subDict = subDict[subkey]
+    return d
+
+CLASSIFIERS = _build_classifiers_dict(_CLASSIFIERS_LIST)
+
+
+def _build_licences(classifiers):
+    res = []
+    for index, item in enumerate(classifiers):
+        if not item.startswith('License :: '):
+            continue
+        res.append((index, item.split(' :: ')[-1].lower()))
+    return res
+
+LICENCES = _build_licences(_CLASSIFIERS_LIST)
+
+
+class MainProgram:
+    """Make a project setup configuration file (setup.cfg)."""
+
+    def __init__(self):
+        self.configparser = None
+        self.classifiers = set()
+        self.data = {'name': '',
+                     'version': '1.0.0',
+                     'classifier': self.classifiers,
+                     'packages': [],
+                     'modules': [],
+                     'platform': [],
+                     'resources': [],
+                     'extra_files': [],
+                     'scripts': [],
+                     }
+        self._load_defaults()
+
+    def __call__(self):
+        setupcfg_defined = False
+        if self.has_setup_py() and self._prompt_user_for_conversion():
+            setupcfg_defined = self.convert_py_to_cfg()
+        if not setupcfg_defined:
+            self.define_cfg_values()
+        self._write_cfg()
+
+    def has_setup_py(self):
+        """Test for the existance of a setup.py file."""
+        return os.path.exists('setup.py')
+
+    def define_cfg_values(self):
+        self.inspect()
+        self.query_user()
+
+    def _lookup_option(self, key):
+        if not self.configparser.has_option('DEFAULT', key):
+            return None
+        return self.configparser.get('DEFAULT', key)
+
+    def _load_defaults(self):
+        # Load default values from a user configuration file
+        self.configparser = RawConfigParser()
+        # TODO replace with section in distutils config file
+        default_cfg = os.path.expanduser(os.path.join('~', _DEFAULT_CFG))
+        self.configparser.read(default_cfg)
+        self.data['author'] = self._lookup_option('author')
+        self.data['author_email'] = self._lookup_option('author_email')
+
+    def _prompt_user_for_conversion(self):
+        # Prompt the user about whether they would like to use the setup.py
+        # conversion utility to generate a setup.cfg or generate the setup.cfg
+        # from scratch
+        answer = ask_yn(('A legacy setup.py has been found.\n'
+                         'Would you like to convert it to a setup.cfg?'),
+                        default="y",
+                        helptext=_helptext['setup.py found'])
+        return convert_yn_to_bool(answer)
+
+    def _dotted_packages(self, data):
+        packages = sorted(data)
+        modified_pkgs = []
+        for pkg in packages:
+            pkg = pkg.lstrip('./')
+            pkg = pkg.replace('/', '.')
+            modified_pkgs.append(pkg)
+        return modified_pkgs
+
+    def _write_cfg(self):
+        if os.path.exists(_FILENAME):
+            if os.path.exists('%s.old' % _FILENAME):
+                print("ERROR: %(name)s.old backup exists, please check that "
+                      "current %(name)s is correct and remove %(name)s.old" %
+                      {'name': _FILENAME})
+                return
+            shutil.move(_FILENAME, '%s.old' % _FILENAME)
+
+        with open(_FILENAME, 'w', encoding='utf-8') as fp:
+            fp.write('[metadata]\n')
+            # simple string entries
+            for name in ('name', 'version', 'summary', 'download_url'):
+                fp.write('%s = %s\n' % (name, self.data.get(name, 'UNKNOWN')))
+            # optional string entries
+            if 'keywords' in self.data and self.data['keywords']:
+                fp.write('keywords = %s\n' % ' '.join(self.data['keywords']))
+            for name in ('home_page', 'author', 'author_email',
+                         'maintainer', 'maintainer_email', 'description-file'):
+                if name in self.data and self.data[name]:
+                    fp.write('%s = %s\n' % (name, self.data[name]))
+            if 'description' in self.data:
+                fp.write(
+                    'description = %s\n'
+                    % '\n       |'.join(self.data['description'].split('\n')))
+            # multiple use string entries
+            for name in ('platform', 'supported-platform', 'classifier',
+                         'requires-dist', 'provides-dist', 'obsoletes-dist',
+                         'requires-external'):
+                if not(name in self.data and self.data[name]):
+                    continue
+                fp.write('%s = ' % name)
+                fp.write(''.join('    %s\n' % val
+                                 for val in self.data[name]).lstrip())
+            fp.write('\n[files]\n')
+            for name in ('packages', 'modules', 'scripts',
+                         'package_data', 'extra_files'):
+                if not(name in self.data and self.data[name]):
+                    continue
+                fp.write('%s = %s\n'
+                         % (name, '\n    '.join(self.data[name]).strip()))
+            fp.write('\nresources =\n')
+            for src, dest in self.data['resources']:
+                fp.write('    %s = %s\n' % (src, dest))
+            fp.write('\n')
+
+        os.chmod(_FILENAME, 0o644)
+        print('Wrote "%s".' % _FILENAME)
+
+    def convert_py_to_cfg(self):
+        """Generate a setup.cfg from an existing setup.py.
+
+        It only exports the distutils metadata (setuptools specific metadata
+        is not currently supported).
+        """
+        data = self.data
+
+        def setup_mock(**attrs):
+            """Mock the setup(**attrs) in order to retrieve metadata."""
+            # use the distutils v1 processings to correctly parse metadata.
+            #XXX we could also use the setuptools distibution ???
+            from distutils.dist import Distribution
+            dist = Distribution(attrs)
+            dist.parse_config_files()
+
+            # 1. retrieve metadata fields that are quite similar in
+            # PEP 314 and PEP 345
+            labels = (('name',) * 2,
+                      ('version',) * 2,
+                      ('author',) * 2,
+                      ('author_email',) * 2,
+                      ('maintainer',) * 2,
+                      ('maintainer_email',) * 2,
+                      ('description', 'summary'),
+                      ('long_description', 'description'),
+                      ('url', 'home_page'),
+                      ('platforms', 'platform'),
+                      # backport only for 2.5+
+                      ('provides', 'provides-dist'),
+                      ('obsoletes', 'obsoletes-dist'),
+                      ('requires', 'requires-dist'))
+
+            get = lambda lab: getattr(dist.metadata, lab.replace('-', '_'))
+            data.update((new, get(old)) for old, new in labels if get(old))
+
+            # 2. retrieve data that requires special processing
+            data['classifier'].update(dist.get_classifiers() or [])
+            data['scripts'].extend(dist.scripts or [])
+            data['packages'].extend(dist.packages or [])
+            data['modules'].extend(dist.py_modules or [])
+            # 2.1 data_files -> resources
+            if dist.data_files:
+                if len(dist.data_files) < 2 or \
+                   isinstance(dist.data_files[1], str):
+                    dist.data_files = [('', dist.data_files)]
+                # add tokens in the destination paths
+                vars = {'distribution.name': data['name']}
+                path_tokens = list(sysconfig.get_paths(vars=vars).items())
+
+                def length_comparison(x, y):
+                    len_x = len(x[1])
+                    len_y = len(y[1])
+                    if len_x == len_y:
+                        return 0
+                    elif len_x < len_y:
+                        return -1
+                    else:
+                        return 1
+
+                # sort tokens to use the longest one first
+                path_tokens.sort(key=cmp_to_key(length_comparison))
+                for dest, srcs in (dist.data_files or []):
+                    dest = os.path.join(sys.prefix, dest)
+                    dest = dest.replace(os.path.sep, '/')
+                    for tok, path in path_tokens:
+                        path = path.replace(os.path.sep, '/')
+                        if not dest.startswith(path):
+                            continue
+
+                        dest = ('{%s}' % tok) + dest[len(path):]
+                        files = [('/ '.join(src.rsplit('/', 1)), dest)
+                                    for src in srcs]
+                        data['resources'].extend(files)
+
+            # 2.2 package_data -> extra_files
+            package_dirs = dist.package_dir or {}
+            for package, extras in iter(dist.package_data.items()) or []:
+                package_dir = package_dirs.get(package, package)
+                for file_ in extras:
+                    if package_dir:
+                        file_ = package_dir + '/' + file_
+                    data['extra_files'].append(file_)
+
+            # Use README file if its content is the desciption
+            if "description" in data:
+                ref = md5(re.sub('\s', '',
+                                 self.data['description']).lower().encode())
+                ref = ref.digest()
+                for readme in glob.glob('README*'):
+                    with open(readme, encoding='utf-8') as fp:
+                        contents = fp.read()
+                    contents = re.sub('\s', '', contents.lower()).encode()
+                    val = md5(contents).digest()
+                    if val == ref:
+                        del data['description']
+                        data['description-file'] = readme
+                        break
+
+        # apply monkey patch to distutils (v1) and setuptools (if needed)
+        # (abort the feature if distutils v1 has been killed)
+        try:
+            from distutils import core
+            core.setup  # make sure it's not d2 maskerading as d1
+        except (ImportError, AttributeError):
+            return
+        saved_setups = [(core, core.setup)]
+        core.setup = setup_mock
+        try:
+            import setuptools
+        except ImportError:
+            pass
+        else:
+            saved_setups.append((setuptools, setuptools.setup))
+            setuptools.setup = setup_mock
+        # get metadata by executing the setup.py with the patched setup(...)
+        success = False  # for python < 2.4
+        try:
+            load_setup()
+            success = True
+        finally:  # revert monkey patches
+            for patched_module, original_setup in saved_setups:
+                patched_module.setup = original_setup
+        if not self.data:
+            raise ValueError('Unable to load metadata from setup.py')
+        return success
+
+    def inspect(self):
+        """Inspect the current working diretory for a name and version.
+
+        This information is harvested in where the directory is named
+        like [name]-[version].
+        """
+        dir_name = os.path.basename(os.getcwd())
+        self.data['name'] = dir_name
+        match = re.match(r'(.*)-(\d.+)', dir_name)
+        if match:
+            self.data['name'] = match.group(1)
+            self.data['version'] = match.group(2)
+            # TODO Needs tested!
+            if not is_valid_version(self.data['version']):
+                msg = "Invalid version discovered: %s" % self.data['version']
+                raise RuntimeError(msg)
+
+    def query_user(self):
+        self.data['name'] = ask('Project name', self.data['name'],
+              _helptext['name'])
+
+        self.data['version'] = ask('Current version number',
+              self.data.get('version'), _helptext['version'])
+        self.data['summary'] = ask('Package summary',
+              self.data.get('summary'), _helptext['summary'],
+              lengthy=True)
+        self.data['author'] = ask('Author name',
+              self.data.get('author'), _helptext['author'])
+        self.data['author_email'] = ask('Author e-mail address',
+              self.data.get('author_email'), _helptext['author_email'])
+        self.data['home_page'] = ask('Project Home Page',
+              self.data.get('home_page'), _helptext['home_page'],
+              required=False)
+
+        if ask_yn('Do you want me to automatically build the file list '
+              'with everything I can find in the current directory ? '
+              'If you say no, you will have to define them manually.') == 'y':
+            self._find_files()
+        else:
+            while ask_yn('Do you want to add a single module ?'
+                        ' (you will be able to add full packages next)',
+                    helptext=_helptext['modules']) == 'y':
+                self._set_multi('Module name', 'modules')
+
+            while ask_yn('Do you want to add a package ?',
+                    helptext=_helptext['packages']) == 'y':
+                self._set_multi('Package name', 'packages')
+
+            while ask_yn('Do you want to add an extra file ?',
+                        helptext=_helptext['extra_files']) == 'y':
+                self._set_multi('Extra file/dir name', 'extra_files')
+
+        if ask_yn('Do you want to set Trove classifiers?',
+                  helptext=_helptext['do_classifier']) == 'y':
+            self.set_classifier()
+
+    def _find_files(self):
+        # we are looking for python modules and packages,
+        # other stuff are added as regular files
+        pkgs = self.data['packages']
+        modules = self.data['modules']
+        extra_files = self.data['extra_files']
+
+        def is_package(path):
+            return os.path.exists(os.path.join(path, '__init__.py'))
+
+        curdir = os.getcwd()
+        scanned = []
+        _pref = ['lib', 'include', 'dist', 'build', '.', '~']
+        _suf = ['.pyc']
+
+        def to_skip(path):
+            path = relative(path)
+
+            for pref in _pref:
+                if path.startswith(pref):
+                    return True
+
+            for suf in _suf:
+                if path.endswith(suf):
+                    return True
+
+            return False
+
+        def relative(path):
+            return path[len(curdir) + 1:]
+
+        def dotted(path):
+            res = relative(path).replace(os.path.sep, '.')
+            if res.endswith('.py'):
+                res = res[:-len('.py')]
+            return res
+
+        # first pass: packages
+        for root, dirs, files in os.walk(curdir):
+            if to_skip(root):
+                continue
+            for dir_ in sorted(dirs):
+                if to_skip(dir_):
+                    continue
+                fullpath = os.path.join(root, dir_)
+                dotted_name = dotted(fullpath)
+                if is_package(fullpath) and dotted_name not in pkgs:
+                    pkgs.append(dotted_name)
+                    scanned.append(fullpath)
+
+        # modules and extra files
+        for root, dirs, files in os.walk(curdir):
+            if to_skip(root):
+                continue
+
+            if any(root.startswith(path) for path in scanned):
+                continue
+
+            for file in sorted(files):
+                fullpath = os.path.join(root, file)
+                if to_skip(fullpath):
+                    continue
+                # single module?
+                if os.path.splitext(file)[-1] == '.py':
+                    modules.append(dotted(fullpath))
+                else:
+                    extra_files.append(relative(fullpath))
+
+    def _set_multi(self, question, name):
+        existing_values = self.data[name]
+        value = ask(question, helptext=_helptext[name]).strip()
+        if value not in existing_values:
+            existing_values.append(value)
+
+    def set_classifier(self):
+        self.set_maturity_status(self.classifiers)
+        self.set_license(self.classifiers)
+        self.set_other_classifier(self.classifiers)
+
+    def set_other_classifier(self, classifiers):
+        if ask_yn('Do you want to set other trove identifiers', 'n',
+                  _helptext['trove_generic']) != 'y':
+            return
+        self.walk_classifiers(classifiers, [CLASSIFIERS], '')
+
+    def walk_classifiers(self, classifiers, trovepath, desc):
+        trove = trovepath[-1]
+
+        if not trove:
+            return
+
+        for key in sorted(trove):
+            if len(trove[key]) == 0:
+                if ask_yn('Add "%s"' % desc[4:] + ' :: ' + key, 'n') == 'y':
+                    classifiers.add(desc[4:] + ' :: ' + key)
+                continue
+
+            if ask_yn('Do you want to set items under\n   "%s" (%d sub-items)'
+                      % (key, len(trove[key])), 'n',
+                      _helptext['trove_generic']) == 'y':
+                self.walk_classifiers(classifiers, trovepath + [trove[key]],
+                                      desc + ' :: ' + key)
+
+    def set_license(self, classifiers):
+        while True:
+            license = ask('What license do you use',
+                          helptext=_helptext['trove_license'], required=False)
+            if not license:
+                return
+
+            license_words = license.lower().split(' ')
+            found_list = []
+
+            for index, licence in LICENCES:
+                for word in license_words:
+                    if word in licence:
+                        found_list.append(index)
+                        break
+
+            if len(found_list) == 0:
+                print('ERROR: Could not find a matching license for "%s"' %
+                      license)
+                continue
+
+            question = 'Matching licenses:\n\n'
+
+            for index, list_index in enumerate(found_list):
+                question += '   %s) %s\n' % (index + 1,
+                                             _CLASSIFIERS_LIST[list_index])
+
+            question += ('\nType the number of the license you wish to use or '
+                         '? to try again:')
+            choice = ask(question, required=False)
+
+            if choice == '?':
+                continue
+            if choice == '':
+                return
+
+            try:
+                index = found_list[int(choice) - 1]
+            except ValueError:
+                print("ERROR: Invalid selection, type a number from the list "
+                      "above.")
+
+            classifiers.add(_CLASSIFIERS_LIST[index])
+
+    def set_maturity_status(self, classifiers):
+        maturity_name = lambda mat: mat.split('- ')[-1]
+        maturity_question = '''\
+            Please select the project status:
+
+            %s
+
+            Status''' % '\n'.join('%s - %s' % (i, maturity_name(n))
+                                  for i, n in enumerate(PROJECT_MATURITY))
+        while True:
+            choice = ask(dedent(maturity_question), required=False)
+
+            if choice:
+                try:
+                    choice = int(choice) - 1
+                    key = PROJECT_MATURITY[choice]
+                    classifiers.add(key)
+                    return
+                except (IndexError, ValueError):
+                    print("ERROR: Invalid selection, type a single digit "
+                          "number.")
+
+
+def main():
+    """Main entry point."""
+    program = MainProgram()
+    # # uncomment when implemented
+    # if not program.load_existing_setup_script():
+    #     program.inspect_directory()
+    #     program.query_user()
+    #     program.update_config_file()
+    # program.write_setup_script()
+    # packaging.util.cfg_to_args()
+    program()
+
+
+if __name__ == '__main__':
+    main()
diff --git a/Lib/packaging/database.py b/Lib/packaging/database.py
new file mode 100644
index 0000000..b107148
--- /dev/null
+++ b/Lib/packaging/database.py
@@ -0,0 +1,629 @@
+"""PEP 376 implementation."""
+
+import io
+import os
+import re
+import csv
+import sys
+import zipimport
+from hashlib import md5
+from packaging import logger
+from packaging.errors import PackagingError
+from packaging.version import suggest_normalized_version, VersionPredicate
+from packaging.metadata import Metadata
+
+
+__all__ = [
+    'Distribution', 'EggInfoDistribution', 'distinfo_dirname',
+    'get_distributions', 'get_distribution', 'get_file_users',
+    'provides_distribution', 'obsoletes_distribution',
+    'enable_cache', 'disable_cache', 'clear_cache',
+]
+
+
+# TODO update docs
+
+DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES')
+
+# Cache
+_cache_name = {}  # maps names to Distribution instances
+_cache_name_egg = {}  # maps names to EggInfoDistribution instances
+_cache_path = {}  # maps paths to Distribution instances
+_cache_path_egg = {}  # maps paths to EggInfoDistribution instances
+_cache_generated = False  # indicates if .dist-info distributions are cached
+_cache_generated_egg = False  # indicates if .dist-info and .egg are cached
+_cache_enabled = True
+
+
+def enable_cache():
+    """
+    Enables the internal cache.
+
+    Note that this function will not clear the cache in any case, for that
+    functionality see :func:`clear_cache`.
+    """
+    global _cache_enabled
+
+    _cache_enabled = True
+
+
+def disable_cache():
+    """
+    Disables the internal cache.
+
+    Note that this function will not clear the cache in any case, for that
+    functionality see :func:`clear_cache`.
+    """
+    global _cache_enabled
+
+    _cache_enabled = False
+
+
+def clear_cache():
+    """ Clears the internal cache. """
+    global _cache_name, _cache_name_egg, _cache_path, _cache_path_egg, \
+        _cache_generated, _cache_generated_egg
+
+    _cache_name = {}
+    _cache_name_egg = {}
+    _cache_path = {}
+    _cache_path_egg = {}
+    _cache_generated = False
+    _cache_generated_egg = False
+
+
+def _yield_distributions(include_dist, include_egg, paths=sys.path):
+    """
+    Yield .dist-info and .egg(-info) distributions, based on the arguments
+
+    :parameter include_dist: yield .dist-info distributions
+    :parameter include_egg: yield .egg(-info) distributions
+    """
+    for path in paths:
+        realpath = os.path.realpath(path)
+        if not os.path.isdir(realpath):
+            continue
+        for dir in os.listdir(realpath):
+            dist_path = os.path.join(realpath, dir)
+            if include_dist and dir.endswith('.dist-info'):
+                yield Distribution(dist_path)
+            elif include_egg and (dir.endswith('.egg-info') or
+                                  dir.endswith('.egg')):
+                yield EggInfoDistribution(dist_path)
+
+
+def _generate_cache(use_egg_info=False, paths=sys.path):
+    global _cache_generated, _cache_generated_egg
+
+    if _cache_generated_egg or (_cache_generated and not use_egg_info):
+        return
+    else:
+        gen_dist = not _cache_generated
+        gen_egg = use_egg_info
+
+        for dist in _yield_distributions(gen_dist, gen_egg, paths):
+            if isinstance(dist, Distribution):
+                _cache_path[dist.path] = dist
+                if not dist.name in _cache_name:
+                    _cache_name[dist.name] = []
+                _cache_name[dist.name].append(dist)
+            else:
+                _cache_path_egg[dist.path] = dist
+                if not dist.name in _cache_name_egg:
+                    _cache_name_egg[dist.name] = []
+                _cache_name_egg[dist.name].append(dist)
+
+        if gen_dist:
+            _cache_generated = True
+        if gen_egg:
+            _cache_generated_egg = True
+
+
+class Distribution:
+    """Created with the *path* of the ``.dist-info`` directory provided to the
+    constructor. It reads the metadata contained in ``METADATA`` when it is
+    instantiated."""
+
+    name = ''
+    """The name of the distribution."""
+
+    version = ''
+    """The version of the distribution."""
+
+    metadata = None
+    """A :class:`packaging.metadata.Metadata` instance loaded with
+    the distribution's ``METADATA`` file."""
+
+    requested = False
+    """A boolean that indicates whether the ``REQUESTED`` metadata file is
+    present (in other words, whether the package was installed by user
+    request or it was installed as a dependency)."""
+
+    def __init__(self, path):
+        if _cache_enabled and path in _cache_path:
+            self.metadata = _cache_path[path].metadata
+        else:
+            metadata_path = os.path.join(path, 'METADATA')
+            self.metadata = Metadata(path=metadata_path)
+
+        self.name = self.metadata['Name']
+        self.version = self.metadata['Version']
+        self.path = path
+
+        if _cache_enabled and not path in _cache_path:
+            _cache_path[path] = self
+
+    def __repr__(self):
+        return '<Distribution %r %s at %r>' % (
+            self.name, self.version, self.path)
+
+    def _get_records(self, local=False):
+        with self.get_distinfo_file('RECORD') as record:
+            record_reader = csv.reader(record, delimiter=',',
+                                       lineterminator='\n')
+            # XXX needs an explaining comment
+            for row in record_reader:
+                path, checksum, size = (row[:] +
+                                        [None for i in range(len(row), 3)])
+                if local:
+                    path = path.replace('/', os.sep)
+                    path = os.path.join(sys.prefix, path)
+                yield path, checksum, size
+
+    def get_resource_path(self, relative_path):
+        with self.get_distinfo_file('RESOURCES') as resources_file:
+            resources_reader = csv.reader(resources_file, delimiter=',',
+                                           lineterminator='\n')
+            for relative, destination in resources_reader:
+                if relative == relative_path:
+                    return destination
+        raise KeyError(
+            'no resource file with relative path %r is installed' %
+            relative_path)
+
+    def list_installed_files(self, local=False):
+        """
+        Iterates over the ``RECORD`` entries and returns a tuple
+        ``(path, md5, size)`` for each line. If *local* is ``True``,
+        the returned path is transformed into a local absolute path.
+        Otherwise the raw value from RECORD is returned.
+
+        A local absolute path is an absolute path in which occurrences of
+        ``'/'`` have been replaced by the system separator given by ``os.sep``.
+
+        :parameter local: flag to say if the path should be returned a local
+                          absolute path
+
+        :type local: boolean
+        :returns: iterator of (path, md5, size)
+        """
+        return self._get_records(local)
+
+    def uses(self, path):
+        """
+        Returns ``True`` if path is listed in ``RECORD``. *path* can be a local
+        absolute path or a relative ``'/'``-separated path.
+
+        :rtype: boolean
+        """
+        for p, checksum, size in self._get_records():
+            local_absolute = os.path.join(sys.prefix, p)
+            if path == p or path == local_absolute:
+                return True
+        return False
+
+    def get_distinfo_file(self, path, binary=False):
+        """
+        Returns a file located under the ``.dist-info`` directory. Returns a
+        ``file`` instance for the file pointed by *path*.
+
+        :parameter path: a ``'/'``-separated path relative to the
+                         ``.dist-info`` directory or an absolute path;
+                         If *path* is an absolute path and doesn't start
+                         with the ``.dist-info`` directory path,
+                         a :class:`PackagingError` is raised
+        :type path: string
+        :parameter binary: If *binary* is ``True``, opens the file in read-only
+                           binary mode (``rb``), otherwise opens it in
+                           read-only mode (``r``).
+        :rtype: file object
+        """
+        open_flags = 'r'
+        if binary:
+            open_flags += 'b'
+
+        # Check if it is an absolute path  # XXX use relpath, add tests
+        if path.find(os.sep) >= 0:
+            # it's an absolute path?
+            distinfo_dirname, path = path.split(os.sep)[-2:]
+            if distinfo_dirname != self.path.split(os.sep)[-1]:
+                raise PackagingError(
+                    'dist-info file %r does not belong to the %r %s '
+                    'distribution' % (path, self.name, self.version))
+
+        # The file must be relative
+        if path not in DIST_FILES:
+            raise PackagingError('invalid path for a dist-info file: %r' %
+                                 path)
+
+        path = os.path.join(self.path, path)
+        return open(path, open_flags)
+
+    def list_distinfo_files(self, local=False):
+        """
+        Iterates over the ``RECORD`` entries and returns paths for each line if
+        the path is pointing to a file located in the ``.dist-info`` directory
+        or one of its subdirectories.
+
+        :parameter local: If *local* is ``True``, each returned path is
+                          transformed into a local absolute path. Otherwise the
+                          raw value from ``RECORD`` is returned.
+        :type local: boolean
+        :returns: iterator of paths
+        """
+        for path, checksum, size in self._get_records(local):
+            yield path
+
+    def __eq__(self, other):
+        return isinstance(other, Distribution) and self.path == other.path
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+class EggInfoDistribution:
+    """Created with the *path* of the ``.egg-info`` directory or file provided
+    to the constructor. It reads the metadata contained in the file itself, or
+    if the given path happens to be a directory, the metadata is read from the
+    file ``PKG-INFO`` under that directory."""
+
+    name = ''
+    """The name of the distribution."""
+
+    version = ''
+    """The version of the distribution."""
+
+    metadata = None
+    """A :class:`packaging.metadata.Metadata` instance loaded with
+    the distribution's ``METADATA`` file."""
+
+    _REQUIREMENT = re.compile(
+        r'(?P<name>[-A-Za-z0-9_.]+)\s*'
+        r'(?P<first>(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*'
+        r'(?P<rest>(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*'
+        r'(?P<extras>\[.*\])?')
+
+    def __init__(self, path):
+        self.path = path
+        if _cache_enabled and path in _cache_path_egg:
+            self.metadata = _cache_path_egg[path].metadata
+            self.name = self.metadata['Name']
+            self.version = self.metadata['Version']
+            return
+
+        # reused from Distribute's pkg_resources
+        def yield_lines(strs):
+            """Yield non-empty/non-comment lines of a ``basestring``
+            or sequence"""
+            if isinstance(strs, str):
+                for s in strs.splitlines():
+                    s = s.strip()
+                    # skip blank lines/comments
+                    if s and not s.startswith('#'):
+                        yield s
+            else:
+                for ss in strs:
+                    for s in yield_lines(ss):
+                        yield s
+
+        requires = None
+
+        if path.endswith('.egg'):
+            if os.path.isdir(path):
+                meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+                self.metadata = Metadata(path=meta_path)
+                try:
+                    req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
+                    with open(req_path, 'r') as fp:
+                        requires = fp.read()
+                except IOError:
+                    requires = None
+            else:
+                # FIXME handle the case where zipfile is not available
+                zipf = zipimport.zipimporter(path)
+                fileobj = io.StringIO(
+                    zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
+                self.metadata = Metadata(fileobj=fileobj)
+                try:
+                    requires = zipf.get_data('EGG-INFO/requires.txt')
+                except IOError:
+                    requires = None
+            self.name = self.metadata['Name']
+            self.version = self.metadata['Version']
+
+        elif path.endswith('.egg-info'):
+            if os.path.isdir(path):
+                path = os.path.join(path, 'PKG-INFO')
+                try:
+                    with open(os.path.join(path, 'requires.txt'), 'r') as fp:
+                        requires = fp.read()
+                except IOError:
+                    requires = None
+            self.metadata = Metadata(path=path)
+            self.name = self.metadata['name']
+            self.version = self.metadata['Version']
+
+        else:
+            raise ValueError('path must end with .egg-info or .egg, got %r' %
+                             path)
+
+        if requires is not None:
+            if self.metadata['Metadata-Version'] == '1.1':
+                # we can't have 1.1 metadata *and* Setuptools requires
+                for field in ('Obsoletes', 'Requires', 'Provides'):
+                    del self.metadata[field]
+
+        reqs = []
+
+        if requires is not None:
+            for line in yield_lines(requires):
+                if line.startswith('['):
+                    logger.warning(
+                        'extensions in requires.txt are not supported '
+                        '(used by %r %s)', self.name, self.version)
+                    break
+                else:
+                    match = self._REQUIREMENT.match(line.strip())
+                    if not match:
+                        # this happens when we encounter extras; since they
+                        # are written at the end of the file we just exit
+                        break
+                    else:
+                        if match.group('extras'):
+                            msg = ('extra requirements are not supported '
+                                   '(used by %r %s)', self.name, self.version)
+                            logger.warning(msg, self.name)
+                        name = match.group('name')
+                        version = None
+                        if match.group('first'):
+                            version = match.group('first')
+                            if match.group('rest'):
+                                version += match.group('rest')
+                            version = version.replace(' ', '')  # trim spaces
+                        if version is None:
+                            reqs.append(name)
+                        else:
+                            reqs.append('%s (%s)' % (name, version))
+
+            if len(reqs) > 0:
+                self.metadata['Requires-Dist'] += reqs
+
+        if _cache_enabled:
+            _cache_path_egg[self.path] = self
+
+    def __repr__(self):
+        return '<EggInfoDistribution %r %s at %r>' % (
+            self.name, self.version, self.path)
+
+    def list_installed_files(self, local=False):
+
+        def _md5(path):
+            with open(path, 'rb') as f:
+                content = f.read()
+            return md5(content).hexdigest()
+
+        def _size(path):
+            return os.stat(path).st_size
+
+        path = self.path
+        if local:
+            path = path.replace('/', os.sep)
+
+        # XXX What about scripts and data files ?
+        if os.path.isfile(path):
+            return [(path, _md5(path), _size(path))]
+        else:
+            files = []
+            for root, dir, files_ in os.walk(path):
+                for item in files_:
+                    item = os.path.join(root, item)
+                    files.append((item, _md5(item), _size(item)))
+            return files
+
+        return []
+
+    def uses(self, path):
+        return False
+
+    def __eq__(self, other):
+        return (isinstance(other, EggInfoDistribution) and
+                self.path == other.path)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+def distinfo_dirname(name, version):
+    """
+    The *name* and *version* parameters are converted into their
+    filename-escaped form, i.e. any ``'-'`` characters are replaced
+    with ``'_'`` other than the one in ``'dist-info'`` and the one
+    separating the name from the version number.
+
+    :parameter name: is converted to a standard distribution name by replacing
+                     any runs of non- alphanumeric characters with a single
+                     ``'-'``.
+    :type name: string
+    :parameter version: is converted to a standard version string. Spaces
+                        become dots, and all other non-alphanumeric characters
+                        (except dots) become dashes, with runs of multiple
+                        dashes condensed to a single dash.
+    :type version: string
+    :returns: directory name
+    :rtype: string"""
+    file_extension = '.dist-info'
+    name = name.replace('-', '_')
+    normalized_version = suggest_normalized_version(version)
+    # Because this is a lookup procedure, something will be returned even if
+    #   it is a version that cannot be normalized
+    if normalized_version is None:
+        # Unable to achieve normality?
+        normalized_version = version
+    return '-'.join([name, normalized_version]) + file_extension
+
+
+def get_distributions(use_egg_info=False, paths=sys.path):
+    """
+    Provides an iterator that looks for ``.dist-info`` directories in
+    ``sys.path`` and returns :class:`Distribution` instances for each one of
+    them. If the parameters *use_egg_info* is ``True``, then the ``.egg-info``
+    files and directores are iterated as well.
+
+    :rtype: iterator of :class:`Distribution` and :class:`EggInfoDistribution`
+            instances
+    """
+    if not _cache_enabled:
+        for dist in _yield_distributions(True, use_egg_info, paths):
+            yield dist
+    else:
+        _generate_cache(use_egg_info, paths)
+
+        for dist in _cache_path.values():
+            yield dist
+
+        if use_egg_info:
+            for dist in _cache_path_egg.values():
+                yield dist
+
+
+def get_distribution(name, use_egg_info=False, paths=None):
+    """
+    Scans all elements in ``sys.path`` and looks for all directories
+    ending with ``.dist-info``. Returns a :class:`Distribution`
+    corresponding to the ``.dist-info`` directory that contains the
+    ``METADATA`` that matches *name* for the *name* metadata field.
+    If no distribution exists with the given *name* and the parameter
+    *use_egg_info* is set to ``True``, then all files and directories ending
+    with ``.egg-info`` are scanned. A :class:`EggInfoDistribution` instance is
+    returned if one is found that has metadata that matches *name* for the
+    *name* metadata field.
+
+    This function only returns the first result found, as no more than one
+    value is expected. If the directory is not found, ``None`` is returned.
+
+    :rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None
+    """
+    if paths == None:
+        paths = sys.path
+
+    if not _cache_enabled:
+        for dist in _yield_distributions(True, use_egg_info, paths):
+            if dist.name == name:
+                return dist
+    else:
+        _generate_cache(use_egg_info, paths)
+
+        if name in _cache_name:
+            return _cache_name[name][0]
+        elif use_egg_info and name in _cache_name_egg:
+            return _cache_name_egg[name][0]
+        else:
+            return None
+
+
+def obsoletes_distribution(name, version=None, use_egg_info=False):
+    """
+    Iterates over all distributions to find which distributions obsolete
+    *name*.
+
+    If a *version* is provided, it will be used to filter the results.
+    If the argument *use_egg_info* is set to ``True``, then ``.egg-info``
+    distributions will be considered as well.
+
+    :type name: string
+    :type version: string
+    :parameter name:
+    """
+    for dist in get_distributions(use_egg_info):
+        obsoleted = (dist.metadata['Obsoletes-Dist'] +
+                     dist.metadata['Obsoletes'])
+        for obs in obsoleted:
+            o_components = obs.split(' ', 1)
+            if len(o_components) == 1 or version is None:
+                if name == o_components[0]:
+                    yield dist
+                    break
+            else:
+                try:
+                    predicate = VersionPredicate(obs)
+                except ValueError:
+                    raise PackagingError(
+                        'distribution %r has ill-formed obsoletes field: '
+                        '%r' % (dist.name, obs))
+                if name == o_components[0] and predicate.match(version):
+                    yield dist
+                    break
+
+
+def provides_distribution(name, version=None, use_egg_info=False):
+    """
+    Iterates over all distributions to find which distributions provide *name*.
+    If a *version* is provided, it will be used to filter the results. Scans
+    all elements in ``sys.path``  and looks for all directories ending with
+    ``.dist-info``. Returns a :class:`Distribution`  corresponding to the
+    ``.dist-info`` directory that contains a ``METADATA`` that matches *name*
+    for the name metadata. If the argument *use_egg_info* is set to ``True``,
+    then all files and directories ending with ``.egg-info`` are considered
+    as well and returns an :class:`EggInfoDistribution` instance.
+
+    This function only returns the first result found, since no more than
+    one values are expected. If the directory is not found, returns ``None``.
+
+    :parameter version: a version specifier that indicates the version
+                        required, conforming to the format in ``PEP-345``
+
+    :type name: string
+    :type version: string
+    """
+    predicate = None
+    if not version is None:
+        try:
+            predicate = VersionPredicate(name + ' (' + version + ')')
+        except ValueError:
+            raise PackagingError('invalid name or version: %r, %r' %
+                                 (name, version))
+
+    for dist in get_distributions(use_egg_info):
+        provided = dist.metadata['Provides-Dist'] + dist.metadata['Provides']
+
+        for p in provided:
+            p_components = p.rsplit(' ', 1)
+            if len(p_components) == 1 or predicate is None:
+                if name == p_components[0]:
+                    yield dist
+                    break
+            else:
+                p_name, p_ver = p_components
+                if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')':
+                    raise PackagingError(
+                        'distribution %r has invalid Provides field: %r' %
+                        (dist.name, p))
+                p_ver = p_ver[1:-1]  # trim off the parenthesis
+                if p_name == name and predicate.match(p_ver):
+                    yield dist
+                    break
+
+
+def get_file_users(path):
+    """
+    Iterates over all distributions to find out which distributions use
+    *path*.
+
+    :parameter path: can be a local absolute path or a relative
+                     ``'/'``-separated path.
+    :type path: string
+    :rtype: iterator of :class:`Distribution` instances
+    """
+    for dist in get_distributions():
+        if dist.uses(path):
+            yield dist
diff --git a/Lib/packaging/depgraph.py b/Lib/packaging/depgraph.py
new file mode 100644
index 0000000..48ea3d9
--- /dev/null
+++ b/Lib/packaging/depgraph.py
@@ -0,0 +1,270 @@
+"""Class and functions dealing with dependencies between distributions.
+
+This module provides a DependencyGraph class to represent the
+dependencies between distributions.  Auxiliary functions can generate a
+graph, find reverse dependencies, and print a graph in DOT format.
+"""
+
+import sys
+
+from io import StringIO
+from packaging.errors import PackagingError
+from packaging.version import VersionPredicate, IrrationalVersionError
+
+__all__ = ['DependencyGraph', 'generate_graph', 'dependent_dists',
+           'graph_to_dot']
+
+
+class DependencyGraph:
+    """
+    Represents a dependency graph between distributions.
+
+    The dependency relationships are stored in an ``adjacency_list`` that maps
+    distributions to a list of ``(other, label)`` tuples where  ``other``
+    is a distribution and the edge is labeled with ``label`` (i.e. the version
+    specifier, if such was provided). Also, for more efficient traversal, for
+    every distribution ``x``, a list of predecessors is kept in
+    ``reverse_list[x]``. An edge from distribution ``a`` to
+    distribution ``b`` means that ``a`` depends on ``b``. If any missing
+    dependencies are found, they are stored in ``missing``, which is a
+    dictionary that maps distributions to a list of requirements that were not
+    provided by any other distributions.
+    """
+
+    def __init__(self):
+        self.adjacency_list = {}
+        self.reverse_list = {}
+        self.missing = {}
+
+    def add_distribution(self, distribution):
+        """Add the *distribution* to the graph.
+
+        :type distribution: :class:`packaging.database.Distribution` or
+                            :class:`packaging.database.EggInfoDistribution`
+        """
+        self.adjacency_list[distribution] = []
+        self.reverse_list[distribution] = []
+        self.missing[distribution] = []
+
+    def add_edge(self, x, y, label=None):
+        """Add an edge from distribution *x* to distribution *y* with the given
+        *label*.
+
+        :type x: :class:`packaging.database.Distribution` or
+                 :class:`packaging.database.EggInfoDistribution`
+        :type y: :class:`packaging.database.Distribution` or
+                 :class:`packaging.database.EggInfoDistribution`
+        :type label: ``str`` or ``None``
+        """
+        self.adjacency_list[x].append((y, label))
+        # multiple edges are allowed, so be careful
+        if not x in self.reverse_list[y]:
+            self.reverse_list[y].append(x)
+
+    def add_missing(self, distribution, requirement):
+        """
+        Add a missing *requirement* for the given *distribution*.
+
+        :type distribution: :class:`packaging.database.Distribution` or
+                            :class:`packaging.database.EggInfoDistribution`
+        :type requirement: ``str``
+        """
+        self.missing[distribution].append(requirement)
+
+    def _repr_dist(self, dist):
+        return '%s %s' % (dist.name, dist.metadata['Version'])
+
+    def repr_node(self, dist, level=1):
+        """Prints only a subgraph"""
+        output = []
+        output.append(self._repr_dist(dist))
+        for other, label in self.adjacency_list[dist]:
+            dist = self._repr_dist(other)
+            if label is not None:
+                dist = '%s [%s]' % (dist, label)
+            output.append('    ' * level + str(dist))
+            suboutput = self.repr_node(other, level + 1)
+            subs = suboutput.split('\n')
+            output.extend(subs[1:])
+        return '\n'.join(output)
+
+    def __repr__(self):
+        """Representation of the graph"""
+        output = []
+        for dist, adjs in self.adjacency_list.items():
+            output.append(self.repr_node(dist))
+        return '\n'.join(output)
+
+
+def graph_to_dot(graph, f, skip_disconnected=True):
+    """Writes a DOT output for the graph to the provided file *f*.
+
+    If *skip_disconnected* is set to ``True``, then all distributions
+    that are not dependent on any other distribution are skipped.
+
+    :type f: has to support ``file``-like operations
+    :type skip_disconnected: ``bool``
+    """
+    disconnected = []
+
+    f.write("digraph dependencies {\n")
+    for dist, adjs in graph.adjacency_list.items():
+        if len(adjs) == 0 and not skip_disconnected:
+            disconnected.append(dist)
+        for other, label in adjs:
+            if not label is None:
+                f.write('"%s" -> "%s" [label="%s"]\n' %
+                                            (dist.name, other.name, label))
+            else:
+                f.write('"%s" -> "%s"\n' % (dist.name, other.name))
+    if not skip_disconnected and len(disconnected) > 0:
+        f.write('subgraph disconnected {\n')
+        f.write('label = "Disconnected"\n')
+        f.write('bgcolor = red\n')
+
+        for dist in disconnected:
+            f.write('"%s"' % dist.name)
+            f.write('\n')
+        f.write('}\n')
+    f.write('}\n')
+
+
+def generate_graph(dists):
+    """Generates a dependency graph from the given distributions.
+
+    :parameter dists: a list of distributions
+    :type dists: list of :class:`packaging.database.Distribution` and
+                 :class:`packaging.database.EggInfoDistribution` instances
+    :rtype: a :class:`DependencyGraph` instance
+    """
+    graph = DependencyGraph()
+    provided = {}  # maps names to lists of (version, dist) tuples
+
+    # first, build the graph and find out the provides
+    for dist in dists:
+        graph.add_distribution(dist)
+        provides = (dist.metadata['Provides-Dist'] +
+                    dist.metadata['Provides'] +
+                    ['%s (%s)' % (dist.name, dist.metadata['Version'])])
+
+        for p in provides:
+            comps = p.strip().rsplit(" ", 1)
+            name = comps[0]
+            version = None
+            if len(comps) == 2:
+                version = comps[1]
+                if len(version) < 3 or version[0] != '(' or version[-1] != ')':
+                    raise PackagingError('Distribution %s has ill formed' \
+                                         'provides field: %s' % (dist.name, p))
+                version = version[1:-1]  # trim off parenthesis
+            if not name in provided:
+                provided[name] = []
+            provided[name].append((version, dist))
+
+    # now make the edges
+    for dist in dists:
+        requires = dist.metadata['Requires-Dist'] + dist.metadata['Requires']
+        for req in requires:
+            try:
+                predicate = VersionPredicate(req)
+            except IrrationalVersionError:
+                # XXX compat-mode if cannot read the version
+                name = req.split()[0]
+                predicate = VersionPredicate(name)
+
+            name = predicate.name
+
+            if not name in provided:
+                graph.add_missing(dist, req)
+            else:
+                matched = False
+                for version, provider in provided[name]:
+                    try:
+                        match = predicate.match(version)
+                    except IrrationalVersionError:
+                        # XXX small compat-mode
+                        if version.split(' ') == 1:
+                            match = True
+                        else:
+                            match = False
+
+                    if match:
+                        graph.add_edge(dist, provider, req)
+                        matched = True
+                        break
+                if not matched:
+                    graph.add_missing(dist, req)
+    return graph
+
+
+def dependent_dists(dists, dist):
+    """Recursively generate a list of distributions from *dists* that are
+    dependent on *dist*.
+
+    :param dists: a list of distributions
+    :param dist: a distribution, member of *dists* for which we are interested
+    """
+    if not dist in dists:
+        raise ValueError('The given distribution is not a member of the list')
+    graph = generate_graph(dists)
+
+    dep = [dist]  # dependent distributions
+    fringe = graph.reverse_list[dist]  # list of nodes we should inspect
+
+    while not len(fringe) == 0:
+        node = fringe.pop()
+        dep.append(node)
+        for prev in graph.reverse_list[node]:
+            if not prev in dep:
+                fringe.append(prev)
+
+    dep.pop(0)  # remove dist from dep, was there to prevent infinite loops
+    return dep
+
+
+def main():
+    from packaging.database import get_distributions
+    tempout = StringIO()
+    try:
+        old = sys.stderr
+        sys.stderr = tempout
+        try:
+            dists = list(get_distributions(use_egg_info=True))
+            graph = generate_graph(dists)
+        finally:
+            sys.stderr = old
+    except Exception as e:
+        tempout.seek(0)
+        tempout = tempout.read()
+        print('Could not generate the graph\n%s\n%s\n' % (tempout, e))
+        sys.exit(1)
+
+    for dist, reqs in graph.missing.items():
+        if len(reqs) > 0:
+            print("Warning: Missing dependencies for %s:" % dist.name,
+                  ", ".join(reqs))
+    # XXX replace with argparse
+    if len(sys.argv) == 1:
+        print('Dependency graph:')
+        print('    ' + repr(graph).replace('\n', '\n    '))
+        sys.exit(0)
+    elif len(sys.argv) > 1 and sys.argv[1] in ('-d', '--dot'):
+        if len(sys.argv) > 2:
+            filename = sys.argv[2]
+        else:
+            filename = 'depgraph.dot'
+
+        with open(filename, 'w') as f:
+            graph_to_dot(graph, f, True)
+        tempout.seek(0)
+        tempout = tempout.read()
+        print(tempout)
+        print('Dot file written at "%s"' % filename)
+        sys.exit(0)
+    else:
+        print('Supported option: -d [filename]')
+        sys.exit(1)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/Lib/packaging/dist.py b/Lib/packaging/dist.py
new file mode 100644
index 0000000..6065e78
--- /dev/null
+++ b/Lib/packaging/dist.py
@@ -0,0 +1,819 @@
+"""Class representing the distribution being built/installed/etc."""
+
+import os
+import re
+
+from packaging.errors import (PackagingOptionError, PackagingArgError,
+                              PackagingModuleError, PackagingClassError)
+from packaging.fancy_getopt import FancyGetopt
+from packaging.util import strtobool, resolve_name
+from packaging import logger
+from packaging.metadata import Metadata
+from packaging.config import Config
+from packaging.command import get_command_class, STANDARD_COMMANDS
+
+# Regex to define acceptable Packaging command names.  This is not *quite*
+# the same as a Python NAME -- I don't allow leading underscores.  The fact
+# that they're very similar is no coincidence; the default naming scheme is
+# to look for a Python module named after the command.
+command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+USAGE = """\
+usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+   or: %(script)s --help [cmd1 cmd2 ...]
+   or: %(script)s --help-commands
+   or: %(script)s cmd --help
+"""
+
+
+def gen_usage(script_name):
+    script = os.path.basename(script_name)
+    return USAGE % {'script': script}
+
+
+class Distribution:
+    """The core of the Packaging.  Most of the work hiding behind 'setup'
+    is really done within a Distribution instance, which farms the work out
+    to the Packaging commands specified on the command line.
+
+    Setup scripts will almost never instantiate Distribution directly,
+    unless the 'setup()' function is totally inadequate to their needs.
+    However, it is conceivable that a setup script might wish to subclass
+    Distribution for some specialized purpose, and then pass the subclass
+    to 'setup()' as the 'distclass' keyword argument.  If so, it is
+    necessary to respect the expectations that 'setup' has of Distribution.
+    See the code for 'setup()', in run.py, for details.
+    """
+
+    # 'global_options' describes the command-line options that may be
+    # supplied to the setup script prior to any actual commands.
+    # Eg. "./setup.py -n" or "./setup.py --dry-run" both take advantage of
+    # these global options.  This list should be kept to a bare minimum,
+    # since every global option is also valid as a command option -- and we
+    # don't want to pollute the commands with too many options that they
+    # have minimal control over.
+    global_options = [
+        ('dry-run', 'n', "don't actually do anything"),
+        ('help', 'h', "show detailed help message"),
+        ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
+    ]
+
+    # 'common_usage' is a short (2-3 line) string describing the common
+    # usage of the setup script.
+    common_usage = """\
+Common commands: (see '--help-commands' for more)
+
+  setup.py build      will build the package underneath 'build/'
+  setup.py install    will install the package
+"""
+
+    # options that are not propagated to the commands
+    display_options = [
+        ('help-commands', None,
+         "list all available commands"),
+        ('name', None,
+         "print package name"),
+        ('version', 'V',
+         "print package version"),
+        ('fullname', None,
+         "print <package name>-<version>"),
+        ('author', None,
+         "print the author's name"),
+        ('author-email', None,
+         "print the author's email address"),
+        ('maintainer', None,
+         "print the maintainer's name"),
+        ('maintainer-email', None,
+         "print the maintainer's email address"),
+        ('contact', None,
+         "print the maintainer's name if known, else the author's"),
+        ('contact-email', None,
+         "print the maintainer's email address if known, else the author's"),
+        ('url', None,
+         "print the URL for this package"),
+        ('license', None,
+         "print the license of the package"),
+        ('licence', None,
+         "alias for --license"),
+        ('description', None,
+         "print the package description"),
+        ('long-description', None,
+         "print the long package description"),
+        ('platforms', None,
+         "print the list of platforms"),
+        ('classifier', None,
+         "print the list of classifiers"),
+        ('keywords', None,
+         "print the list of keywords"),
+        ('provides', None,
+         "print the list of packages/modules provided"),
+        ('requires', None,
+         "print the list of packages/modules required"),
+        ('obsoletes', None,
+         "print the list of packages/modules made obsolete"),
+        ('use-2to3', None,
+         "use 2to3 to make source python 3.x compatible"),
+        ('convert-2to3-doctests', None,
+         "use 2to3 to convert doctests in seperate text files"),
+        ]
+    display_option_names = [x[0].replace('-', '_') for x in display_options]
+
+    # negative options are options that exclude other options
+    negative_opt = {}
+
+    # -- Creation/initialization methods -------------------------------
+    def __init__(self, attrs=None):
+        """Construct a new Distribution instance: initialize all the
+        attributes of a Distribution, and then use 'attrs' (a dictionary
+        mapping attribute names to values) to assign some of those
+        attributes their "real" values.  (Any attributes not mentioned in
+        'attrs' will be assigned to some null value: 0, None, an empty list
+        or dictionary, etc.)  Most importantly, initialize the
+        'command_obj' attribute to the empty dictionary; this will be
+        filled in with real command objects by 'parse_command_line()'.
+        """
+
+        # Default values for our command-line options
+        self.dry_run = False
+        self.help = False
+        for attr in self.display_option_names:
+            setattr(self, attr, False)
+
+        # Store the configuration
+        self.config = Config(self)
+
+        # Store the distribution metadata (name, version, author, and so
+        # forth) in a separate object -- we're getting to have enough
+        # information here (and enough command-line options) that it's
+        # worth it.
+        self.metadata = Metadata()
+
+        # 'cmdclass' maps command names to class objects, so we
+        # can 1) quickly figure out which class to instantiate when
+        # we need to create a new command object, and 2) have a way
+        # for the setup script to override command classes
+        self.cmdclass = {}
+
+        # 'script_name' and 'script_args' are usually set to sys.argv[0]
+        # and sys.argv[1:], but they can be overridden when the caller is
+        # not necessarily a setup script run from the command line.
+        self.script_name = None
+        self.script_args = None
+
+        # 'command_options' is where we store command options between
+        # parsing them (from config files, the command line, etc.) and when
+        # they are actually needed -- ie. when the command in question is
+        # instantiated.  It is a dictionary of dictionaries of 2-tuples:
+        #   command_options = { command_name : { option : (source, value) } }
+        self.command_options = {}
+
+        # 'dist_files' is the list of (command, pyversion, file) that
+        # have been created by any dist commands run so far. This is
+        # filled regardless of whether the run is dry or not. pyversion
+        # gives sysconfig.get_python_version() if the dist file is
+        # specific to a Python version, 'any' if it is good for all
+        # Python versions on the target platform, and '' for a source
+        # file. pyversion should not be used to specify minimum or
+        # maximum required Python versions; use the metainfo for that
+        # instead.
+        self.dist_files = []
+
+        # These options are really the business of various commands, rather
+        # than of the Distribution itself.  We provide aliases for them in
+        # Distribution as a convenience to the developer.
+        self.packages = []
+        self.package_data = {}
+        self.package_dir = None
+        self.py_modules = []
+        self.libraries = []
+        self.headers = []
+        self.ext_modules = []
+        self.ext_package = None
+        self.include_dirs = []
+        self.extra_path = None
+        self.scripts = []
+        self.data_files = {}
+        self.password = ''
+        self.use_2to3 = False
+        self.convert_2to3_doctests = []
+        self.extra_files = []
+
+        # And now initialize bookkeeping stuff that can't be supplied by
+        # the caller at all.  'command_obj' maps command names to
+        # Command instances -- that's how we enforce that every command
+        # class is a singleton.
+        self.command_obj = {}
+
+        # 'have_run' maps command names to boolean values; it keeps track
+        # of whether we have actually run a particular command, to make it
+        # cheap to "run" a command whenever we think we might need to -- if
+        # it's already been done, no need for expensive filesystem
+        # operations, we just check the 'have_run' dictionary and carry on.
+        # It's only safe to query 'have_run' for a command class that has
+        # been instantiated -- a false value will be inserted when the
+        # command object is created, and replaced with a true value when
+        # the command is successfully run.  Thus it's probably best to use
+        # '.get()' rather than a straight lookup.
+        self.have_run = {}
+
+        # Now we'll use the attrs dictionary (ultimately, keyword args from
+        # the setup script) to possibly override any or all of these
+        # distribution options.
+
+        if attrs is not None:
+            # Pull out the set of command options and work on them
+            # specifically.  Note that this order guarantees that aliased
+            # command options will override any supplied redundantly
+            # through the general options dictionary.
+            options = attrs.get('options')
+            if options is not None:
+                del attrs['options']
+                for command, cmd_options in options.items():
+                    opt_dict = self.get_option_dict(command)
+                    for opt, val in cmd_options.items():
+                        opt_dict[opt] = ("setup script", val)
+
+            # Now work on the rest of the attributes.  Any attribute that's
+            # not already defined is invalid!
+            for key, val in attrs.items():
+                if self.metadata.is_metadata_field(key):
+                    self.metadata[key] = val
+                elif hasattr(self, key):
+                    setattr(self, key, val)
+                else:
+                    logger.warning(
+                        'unknown argument given to Distribution: %r', key)
+
+        # no-user-cfg is handled before other command line args
+        # because other args override the config files, and this
+        # one is needed before we can load the config files.
+        # If attrs['script_args'] wasn't passed, assume false.
+        #
+        # This also make sure we just look at the global options
+        self.want_user_cfg = True
+
+        if self.script_args is not None:
+            for arg in self.script_args:
+                if not arg.startswith('-'):
+                    break
+                if arg == '--no-user-cfg':
+                    self.want_user_cfg = False
+                    break
+
+        self.finalize_options()
+
+    def get_option_dict(self, command):
+        """Get the option dictionary for a given command.  If that
+        command's option dictionary hasn't been created yet, then create it
+        and return the new dictionary; otherwise, return the existing
+        option dictionary.
+        """
+        d = self.command_options.get(command)
+        if d is None:
+            d = self.command_options[command] = {}
+        return d
+
+    def get_fullname(self):
+        return self.metadata.get_fullname()
+
+    def dump_option_dicts(self, header=None, commands=None, indent=""):
+        from pprint import pformat
+
+        if commands is None:             # dump all command option dicts
+            commands = sorted(self.command_options)
+
+        if header is not None:
+            logger.info(indent + header)
+            indent = indent + "  "
+
+        if not commands:
+            logger.info(indent + "no commands known yet")
+            return
+
+        for cmd_name in commands:
+            opt_dict = self.command_options.get(cmd_name)
+            if opt_dict is None:
+                logger.info(indent + "no option dict for %r command",
+                            cmd_name)
+            else:
+                logger.info(indent + "option dict for %r command:", cmd_name)
+                out = pformat(opt_dict)
+                for line in out.split('\n'):
+                    logger.info(indent + "  " + line)
+
+    # -- Config file finding/parsing methods ---------------------------
+    # XXX to be removed
+    def parse_config_files(self, filenames=None):
+        return self.config.parse_config_files(filenames)
+
+    def find_config_files(self):
+        return self.config.find_config_files()
+
+    # -- Command-line parsing methods ----------------------------------
+
+    def parse_command_line(self):
+        """Parse the setup script's command line, taken from the
+        'script_args' instance attribute (which defaults to 'sys.argv[1:]'
+        -- see 'setup()' in run.py).  This list is first processed for
+        "global options" -- options that set attributes of the Distribution
+        instance.  Then, it is alternately scanned for Packaging commands
+        and options for that command.  Each new command terminates the
+        options for the previous command.  The allowed options for a
+        command are determined by the 'user_options' attribute of the
+        command class -- thus, we have to be able to load command classes
+        in order to parse the command line.  Any error in that 'options'
+        attribute raises PackagingGetoptError; any error on the
+        command line raises PackagingArgError.  If no Packaging commands
+        were found on the command line, raises PackagingArgError.  Return
+        true if command line was successfully parsed and we should carry
+        on with executing commands; false if no errors but we shouldn't
+        execute commands (currently, this only happens if user asks for
+        help).
+        """
+        #
+        # We now have enough information to show the Macintosh dialog
+        # that allows the user to interactively specify the "command line".
+        #
+        toplevel_options = self._get_toplevel_options()
+
+        # We have to parse the command line a bit at a time -- global
+        # options, then the first command, then its options, and so on --
+        # because each command will be handled by a different class, and
+        # the options that are valid for a particular class aren't known
+        # until we have loaded the command class, which doesn't happen
+        # until we know what the command is.
+
+        self.commands = []
+        parser = FancyGetopt(toplevel_options + self.display_options)
+        parser.set_negative_aliases(self.negative_opt)
+        parser.set_aliases({'licence': 'license'})
+        args = parser.getopt(args=self.script_args, object=self)
+        option_order = parser.get_option_order()
+
+        # for display options we return immediately
+        if self.handle_display_options(option_order):
+            return
+
+        while args:
+            args = self._parse_command_opts(parser, args)
+            if args is None:            # user asked for help (and got it)
+                return
+
+        # Handle the cases of --help as a "global" option, ie.
+        # "setup.py --help" and "setup.py --help command ...".  For the
+        # former, we show global options (--dry-run, etc.)
+        # and display-only options (--name, --version, etc.); for the
+        # latter, we omit the display-only options and show help for
+        # each command listed on the command line.
+        if self.help:
+            self._show_help(parser,
+                            display_options=len(self.commands) == 0,
+                            commands=self.commands)
+            return
+
+        return 1
+
+    def _get_toplevel_options(self):
+        """Return the non-display options recognized at the top level.
+
+        This includes options that are recognized *only* at the top
+        level as well as options recognized for commands.
+        """
+        return self.global_options
+
+    def _parse_command_opts(self, parser, args):
+        """Parse the command-line options for a single command.
+        'parser' must be a FancyGetopt instance; 'args' must be the list
+        of arguments, starting with the current command (whose options
+        we are about to parse).  Returns a new version of 'args' with
+        the next command at the front of the list; will be the empty
+        list if there are no more commands on the command line.  Returns
+        None if the user asked for help on this command.
+        """
+        # Pull the current command from the head of the command line
+        command = args[0]
+        if not command_re.match(command):
+            raise SystemExit("invalid command name %r" % command)
+        self.commands.append(command)
+
+        # Dig up the command class that implements this command, so we
+        # 1) know that it's a valid command, and 2) know which options
+        # it takes.
+        try:
+            cmd_class = get_command_class(command)
+        except PackagingModuleError as msg:
+            raise PackagingArgError(msg)
+
+        # XXX We want to push this in packaging.command
+        #
+        # Require that the command class be derived from Command -- want
+        # to be sure that the basic "command" interface is implemented.
+        for meth in ('initialize_options', 'finalize_options', 'run'):
+            if hasattr(cmd_class, meth):
+                continue
+            raise PackagingClassError(
+                'command %r must implement %r' % (cmd_class, meth))
+
+        # Also make sure that the command object provides a list of its
+        # known options.
+        if not (hasattr(cmd_class, 'user_options') and
+                isinstance(cmd_class.user_options, list)):
+            raise PackagingClassError(
+                "command class %s must provide "
+                "'user_options' attribute (a list of tuples)" % cmd_class)
+
+        # If the command class has a list of negative alias options,
+        # merge it in with the global negative aliases.
+        negative_opt = self.negative_opt
+        if hasattr(cmd_class, 'negative_opt'):
+            negative_opt = negative_opt.copy()
+            negative_opt.update(cmd_class.negative_opt)
+
+        # Check for help_options in command class.  They have a different
+        # format (tuple of four) so we need to preprocess them here.
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_options = cmd_class.help_options[:]
+        else:
+            help_options = []
+
+        # All commands support the global options too, just by adding
+        # in 'global_options'.
+        parser.set_option_table(self.global_options +
+                                cmd_class.user_options +
+                                help_options)
+        parser.set_negative_aliases(negative_opt)
+        args, opts = parser.getopt(args[1:])
+        if hasattr(opts, 'help') and opts.help:
+            self._show_help(parser, display_options=False,
+                            commands=[cmd_class])
+            return
+
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_option_found = False
+            for help_option, short, desc, func in cmd_class.help_options:
+                if hasattr(opts, help_option.replace('-', '_')):
+                    help_option_found = True
+                    if hasattr(func, '__call__'):
+                        func()
+                    else:
+                        raise PackagingClassError(
+                            "invalid help function %r for help option %r: "
+                            "must be a callable object (function, etc.)"
+                            % (func, help_option))
+
+            if help_option_found:
+                return
+
+        # Put the options from the command line into their official
+        # holding pen, the 'command_options' dictionary.
+        opt_dict = self.get_option_dict(command)
+        for name, value in vars(opts).items():
+            opt_dict[name] = ("command line", value)
+
+        return args
+
+    def finalize_options(self):
+        """Set final values for all the options on the Distribution
+        instance, analogous to the .finalize_options() method of Command
+        objects.
+        """
+        if getattr(self, 'convert_2to3_doctests', None):
+            self.convert_2to3_doctests = [os.path.join(p)
+                                for p in self.convert_2to3_doctests]
+        else:
+            self.convert_2to3_doctests = []
+
+    def _show_help(self, parser, global_options=True, display_options=True,
+                   commands=[]):
+        """Show help for the setup script command line in the form of
+        several lists of command-line options.  'parser' should be a
+        FancyGetopt instance; do not expect it to be returned in the
+        same state, as its option table will be reset to make it
+        generate the correct help text.
+
+        If 'global_options' is true, lists the global options:
+        --dry-run, etc.  If 'display_options' is true, lists
+        the "display-only" options: --name, --version, etc.  Finally,
+        lists per-command help for every command name or command class
+        in 'commands'.
+        """
+        # late import because of mutual dependence between these modules
+        from packaging.command.cmd import Command
+
+        if global_options:
+            if display_options:
+                options = self._get_toplevel_options()
+            else:
+                options = self.global_options
+            parser.set_option_table(options)
+            parser.print_help(self.common_usage + "\nGlobal options:")
+            print('')
+
+        if display_options:
+            parser.set_option_table(self.display_options)
+            parser.print_help(
+                "Information display options (just display " +
+                "information, ignore any commands)")
+            print('')
+
+        for command in self.commands:
+            if isinstance(command, type) and issubclass(command, Command):
+                cls = command
+            else:
+                cls = get_command_class(command)
+            if (hasattr(cls, 'help_options') and
+                isinstance(cls.help_options, list)):
+                parser.set_option_table(cls.user_options + cls.help_options)
+            else:
+                parser.set_option_table(cls.user_options)
+            parser.print_help("Options for %r command:" % cls.__name__)
+            print('')
+
+        print(gen_usage(self.script_name))
+
+    def handle_display_options(self, option_order):
+        """If there were any non-global "display-only" options
+        (--help-commands or the metadata display options) on the command
+        line, display the requested info and return true; else return
+        false.
+        """
+        # User just wants a list of commands -- we'll print it out and stop
+        # processing now (ie. if they ran "setup --help-commands foo bar",
+        # we ignore "foo bar").
+        if self.help_commands:
+            self.print_commands()
+            print('')
+            print(gen_usage(self.script_name))
+            return 1
+
+        # If user supplied any of the "display metadata" options, then
+        # display that metadata in the order in which the user supplied the
+        # metadata options.
+        any_display_options = False
+        is_display_option = set()
+        for option in self.display_options:
+            is_display_option.add(option[0])
+
+        for opt, val in option_order:
+            if val and opt in is_display_option:
+                opt = opt.replace('-', '_')
+                value = self.metadata[opt]
+                if opt in ('keywords', 'platform'):
+                    print(','.join(value))
+                elif opt in ('classifier', 'provides', 'requires',
+                             'obsoletes'):
+                    print('\n'.join(value))
+                else:
+                    print(value)
+                any_display_options = True
+
+        return any_display_options
+
+    def print_command_list(self, commands, header, max_length):
+        """Print a subset of the list of all commands -- used by
+        'print_commands()'.
+        """
+        print(header + ":")
+
+        for cmd in commands:
+            cls = self.cmdclass.get(cmd) or get_command_class(cmd)
+            description = getattr(cls, 'description',
+                                  '(no description available)')
+
+            print("  %-*s  %s" % (max_length, cmd, description))
+
+    def _get_command_groups(self):
+        """Helper function to retrieve all the command class names divided
+        into standard commands (listed in
+        packaging2.command.STANDARD_COMMANDS) and extra commands (given in
+        self.cmdclass and not standard commands).
+        """
+        extra_commands = [cmd for cmd in self.cmdclass
+                          if cmd not in STANDARD_COMMANDS]
+        return STANDARD_COMMANDS, extra_commands
+
+    def print_commands(self):
+        """Print out a help message listing all available commands with a
+        description of each.  The list is divided into standard commands
+        (listed in packaging2.command.STANDARD_COMMANDS) and extra commands
+        (given in self.cmdclass and not standard commands).  The
+        descriptions come from the command class attribute
+        'description'.
+        """
+        std_commands, extra_commands = self._get_command_groups()
+        max_length = 0
+        for cmd in (std_commands + extra_commands):
+            if len(cmd) > max_length:
+                max_length = len(cmd)
+
+        self.print_command_list(std_commands,
+                                "Standard commands",
+                                max_length)
+        if extra_commands:
+            print()
+            self.print_command_list(extra_commands,
+                                    "Extra commands",
+                                    max_length)
+
+    # -- Command class/object methods ----------------------------------
+
+    def get_command_obj(self, command, create=True):
+        """Return the command object for 'command'.  Normally this object
+        is cached on a previous call to 'get_command_obj()'; if no command
+        object for 'command' is in the cache, then we either create and
+        return it (if 'create' is true) or return None.
+        """
+        cmd_obj = self.command_obj.get(command)
+        if not cmd_obj and create:
+            logger.debug("Distribution.get_command_obj(): " \
+                         "creating %r command object", command)
+
+            cls = get_command_class(command)
+            cmd_obj = self.command_obj[command] = cls(self)
+            self.have_run[command] = 0
+
+            # Set any options that were supplied in config files
+            # or on the command line.  (NB. support for error
+            # reporting is lame here: any errors aren't reported
+            # until 'finalize_options()' is called, which means
+            # we won't report the source of the error.)
+            options = self.command_options.get(command)
+            if options:
+                self._set_command_options(cmd_obj, options)
+
+        return cmd_obj
+
+    def _set_command_options(self, command_obj, option_dict=None):
+        """Set the options for 'command_obj' from 'option_dict'.  Basically
+        this means copying elements of a dictionary ('option_dict') to
+        attributes of an instance ('command').
+
+        'command_obj' must be a Command instance.  If 'option_dict' is not
+        supplied, uses the standard option dictionary for this command
+        (from 'self.command_options').
+        """
+        command_name = command_obj.get_command_name()
+        if option_dict is None:
+            option_dict = self.get_option_dict(command_name)
+
+        logger.debug("  setting options for %r command:", command_name)
+
+        for option, (source, value) in option_dict.items():
+            logger.debug("    %s = %s (from %s)", option, value, source)
+            try:
+                bool_opts = [x.replace('-', '_')
+                             for x in command_obj.boolean_options]
+            except AttributeError:
+                bool_opts = []
+            try:
+                neg_opt = command_obj.negative_opt
+            except AttributeError:
+                neg_opt = {}
+
+            try:
+                is_string = isinstance(value, str)
+                if option in neg_opt and is_string:
+                    setattr(command_obj, neg_opt[option], not strtobool(value))
+                elif option in bool_opts and is_string:
+                    setattr(command_obj, option, strtobool(value))
+                elif hasattr(command_obj, option):
+                    setattr(command_obj, option, value)
+                else:
+                    raise PackagingOptionError(
+                        "error in %s: command %r has no such option %r" %
+                        (source, command_name, option))
+            except ValueError as msg:
+                raise PackagingOptionError(msg)
+
+    def get_reinitialized_command(self, command, reinit_subcommands=False):
+        """Reinitializes a command to the state it was in when first
+        returned by 'get_command_obj()': ie., initialized but not yet
+        finalized.  This provides the opportunity to sneak option
+        values in programmatically, overriding or supplementing
+        user-supplied values from the config files and command line.
+        You'll have to re-finalize the command object (by calling
+        'finalize_options()' or 'ensure_finalized()') before using it for
+        real.
+
+        'command' should be a command name (string) or command object.  If
+        'reinit_subcommands' is true, also reinitializes the command's
+        sub-commands, as declared by the 'sub_commands' class attribute (if
+        it has one).  See the "install_dist" command for an example.  Only
+        reinitializes the sub-commands that actually matter, ie. those
+        whose test predicates return true.
+
+        Returns the reinitialized command object.
+        """
+        from packaging.command.cmd import Command
+        if not isinstance(command, Command):
+            command_name = command
+            command = self.get_command_obj(command_name)
+        else:
+            command_name = command.get_command_name()
+
+        if not command.finalized:
+            return command
+        command.initialize_options()
+        self.have_run[command_name] = 0
+        command.finalized = False
+        self._set_command_options(command)
+
+        if reinit_subcommands:
+            for sub in command.get_sub_commands():
+                self.get_reinitialized_command(sub, reinit_subcommands)
+
+        return command
+
+    # -- Methods that operate on the Distribution ----------------------
+
+    def run_commands(self):
+        """Run each command that was seen on the setup script command line.
+        Uses the list of commands found and cache of command objects
+        created by 'get_command_obj()'.
+        """
+        for cmd in self.commands:
+            self.run_command(cmd)
+
+    # -- Methods that operate on its Commands --------------------------
+
+    def run_command(self, command, options=None):
+        """Do whatever it takes to run a command (including nothing at all,
+        if the command has already been run).  Specifically: if we have
+        already created and run the command named by 'command', return
+        silently without doing anything.  If the command named by 'command'
+        doesn't even have a command object yet, create one.  Then invoke
+        'run()' on that command object (or an existing one).
+        """
+        # Already been here, done that? then return silently.
+        if self.have_run.get(command):
+            return
+
+        if options is not None:
+            self.command_options[command] = options
+
+        cmd_obj = self.get_command_obj(command)
+        cmd_obj.ensure_finalized()
+        self.run_command_hooks(cmd_obj, 'pre_hook')
+        logger.info("running %s", command)
+        cmd_obj.run()
+        self.run_command_hooks(cmd_obj, 'post_hook')
+        self.have_run[command] = 1
+
+    def run_command_hooks(self, cmd_obj, hook_kind):
+        """Run hooks registered for that command and phase.
+
+        *cmd_obj* is a finalized command object; *hook_kind* is either
+        'pre_hook' or 'post_hook'.
+        """
+        if hook_kind not in ('pre_hook', 'post_hook'):
+            raise ValueError('invalid hook kind: %r' % hook_kind)
+
+        hooks = getattr(cmd_obj, hook_kind, None)
+
+        if hooks is None:
+            return
+
+        for hook in hooks.values():
+            if isinstance(hook, str):
+                try:
+                    hook_obj = resolve_name(hook)
+                except ImportError as e:
+                    raise PackagingModuleError(e)
+            else:
+                hook_obj = hook
+
+            if not hasattr(hook_obj, '__call__'):
+                raise PackagingOptionError('hook %r is not callable' % hook)
+
+            logger.info('running %s %s for command %s',
+                        hook_kind, hook, cmd_obj.get_command_name())
+            hook_obj(cmd_obj)
+
+    # -- Distribution query methods ------------------------------------
+    def has_pure_modules(self):
+        return len(self.packages or self.py_modules or []) > 0
+
+    def has_ext_modules(self):
+        return self.ext_modules and len(self.ext_modules) > 0
+
+    def has_c_libraries(self):
+        return self.libraries and len(self.libraries) > 0
+
+    def has_modules(self):
+        return self.has_pure_modules() or self.has_ext_modules()
+
+    def has_headers(self):
+        return self.headers and len(self.headers) > 0
+
+    def has_scripts(self):
+        return self.scripts and len(self.scripts) > 0
+
+    def has_data_files(self):
+        return self.data_files and len(self.data_files) > 0
+
+    def is_pure(self):
+        return (self.has_pure_modules() and
+                not self.has_ext_modules() and
+                not self.has_c_libraries())
diff --git a/Lib/packaging/errors.py b/Lib/packaging/errors.py
new file mode 100644
index 0000000..8924a2d
--- /dev/null
+++ b/Lib/packaging/errors.py
@@ -0,0 +1,142 @@
+"""Exceptions used throughout the package.
+
+Submodules of packaging may raise exceptions defined in this module as
+well as standard exceptions; in particular, SystemExit is usually raised
+for errors that are obviously the end-user's fault (e.g. bad
+command-line arguments).
+"""
+
+
+class PackagingError(Exception):
+    """The root of all Packaging evil."""
+
+
+class PackagingModuleError(PackagingError):
+    """Unable to load an expected module, or to find an expected class
+    within some module (in particular, command modules and classes)."""
+
+
+class PackagingClassError(PackagingError):
+    """Some command class (or possibly distribution class, if anyone
+    feels a need to subclass Distribution) is found not to be holding
+    up its end of the bargain, ie. implementing some part of the
+    "command "interface."""
+
+
+class PackagingGetoptError(PackagingError):
+    """The option table provided to 'fancy_getopt()' is bogus."""
+
+
+class PackagingArgError(PackagingError):
+    """Raised by fancy_getopt in response to getopt.error -- ie. an
+    error in the command line usage."""
+
+
+class PackagingFileError(PackagingError):
+    """Any problems in the filesystem: expected file not found, etc.
+    Typically this is for problems that we detect before IOError or
+    OSError could be raised."""
+
+
+class PackagingOptionError(PackagingError):
+    """Syntactic/semantic errors in command options, such as use of
+    mutually conflicting options, or inconsistent options,
+    badly-spelled values, etc.  No distinction is made between option
+    values originating in the setup script, the command line, config
+    files, or what-have-you -- but if we *know* something originated in
+    the setup script, we'll raise PackagingSetupError instead."""
+
+
+class PackagingSetupError(PackagingError):
+    """For errors that can be definitely blamed on the setup script,
+    such as invalid keyword arguments to 'setup()'."""
+
+
+class PackagingPlatformError(PackagingError):
+    """We don't know how to do something on the current platform (but
+    we do know how to do it on some platform) -- eg. trying to compile
+    C files on a platform not supported by a CCompiler subclass."""
+
+
+class PackagingExecError(PackagingError):
+    """Any problems executing an external program (such as the C
+    compiler, when compiling C files)."""
+
+
+class PackagingInternalError(PackagingError):
+    """Internal inconsistencies or impossibilities (obviously, this
+    should never be seen if the code is working!)."""
+
+
+class PackagingTemplateError(PackagingError):
+    """Syntax error in a file list template."""
+
+
+class PackagingByteCompileError(PackagingError):
+    """Byte compile error."""
+
+
+class PackagingPyPIError(PackagingError):
+    """Any problem occuring during using the indexes."""
+
+
+# Exception classes used by the CCompiler implementation classes
+class CCompilerError(Exception):
+    """Some compile/link operation failed."""
+
+
+class PreprocessError(CCompilerError):
+    """Failure to preprocess one or more C/C++ files."""
+
+
+class CompileError(CCompilerError):
+    """Failure to compile one or more C/C++ source files."""
+
+
+class LibError(CCompilerError):
+    """Failure to create a static library from one or more C/C++ object
+    files."""
+
+
+class LinkError(CCompilerError):
+    """Failure to link one or more C/C++ object files into an executable
+    or shared library file."""
+
+
+class UnknownFileError(CCompilerError):
+    """Attempt to process an unknown file type."""
+
+
+class MetadataMissingError(PackagingError):
+    """A required metadata is missing"""
+
+
+class MetadataConflictError(PackagingError):
+    """Attempt to read or write metadata fields that are conflictual."""
+
+
+class MetadataUnrecognizedVersionError(PackagingError):
+    """Unknown metadata version number."""
+
+
+class IrrationalVersionError(Exception):
+    """This is an irrational version."""
+    pass
+
+
+class HugeMajorVersionNumError(IrrationalVersionError):
+    """An irrational version because the major version number is huge
+    (often because a year or date was used).
+
+    See `error_on_huge_major_num` option in `NormalizedVersion` for details.
+    This guard can be disabled by setting that option False.
+    """
+    pass
+
+
+class InstallationException(Exception):
+    """Base exception for installation scripts"""
+
+
+class InstallationConflict(InstallationException):
+    """Raised when a conflict is detected"""
diff --git a/Lib/packaging/fancy_getopt.py b/Lib/packaging/fancy_getopt.py
new file mode 100644
index 0000000..0490864
--- /dev/null
+++ b/Lib/packaging/fancy_getopt.py
@@ -0,0 +1,451 @@
+"""Command line parsing machinery.
+
+The FancyGetopt class is a Wrapper around the getopt module that
+provides the following additional features:
+  * short and long options are tied together
+  * options have help strings, so fancy_getopt could potentially
+    create a complete usage summary
+  * options set attributes of a passed-in object.
+
+It is used under the hood by the command classes.  Do not use directly.
+"""
+
+import getopt
+import re
+import sys
+import string
+import textwrap
+
+from packaging.errors import PackagingGetoptError, PackagingArgError
+
+# Much like command_re in packaging.core, this is close to but not quite
+# the same as a Python NAME -- except, in the spirit of most GNU
+# utilities, we use '-' in place of '_'.  (The spirit of LISP lives on!)
+# The similarities to NAME are again not a coincidence...
+longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
+longopt_re = re.compile(r'^%s$' % longopt_pat)
+
+# For recognizing "negative alias" options, eg. "quiet=!verbose"
+neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
+
+
+class FancyGetopt:
+    """Wrapper around the standard 'getopt()' module that provides some
+    handy extra functionality:
+      * short and long options are tied together
+      * options have help strings, and help text can be assembled
+        from them
+      * options set attributes of a passed-in object
+      * boolean options can have "negative aliases" -- eg. if
+        --quiet is the "negative alias" of --verbose, then "--quiet"
+        on the command line sets 'verbose' to false
+    """
+
+    def __init__(self, option_table=None):
+
+        # The option table is (currently) a list of tuples.  The
+        # tuples may have 3 or four values:
+        #   (long_option, short_option, help_string [, repeatable])
+        # if an option takes an argument, its long_option should have '='
+        # appended; short_option should just be a single character, no ':'
+        # in any case.  If a long_option doesn't have a corresponding
+        # short_option, short_option should be None.  All option tuples
+        # must have long options.
+        self.option_table = option_table
+
+        # 'option_index' maps long option names to entries in the option
+        # table (ie. those 3-tuples).
+        self.option_index = {}
+        if self.option_table:
+            self._build_index()
+
+        # 'alias' records (duh) alias options; {'foo': 'bar'} means
+        # --foo is an alias for --bar
+        self.alias = {}
+
+        # 'negative_alias' keeps track of options that are the boolean
+        # opposite of some other option
+        self.negative_alias = {}
+
+        # These keep track of the information in the option table.  We
+        # don't actually populate these structures until we're ready to
+        # parse the command line, since the 'option_table' passed in here
+        # isn't necessarily the final word.
+        self.short_opts = []
+        self.long_opts = []
+        self.short2long = {}
+        self.attr_name = {}
+        self.takes_arg = {}
+
+        # And 'option_order' is filled up in 'getopt()'; it records the
+        # original order of options (and their values) on the command line,
+        # but expands short options, converts aliases, etc.
+        self.option_order = []
+
+    def _build_index(self):
+        self.option_index.clear()
+        for option in self.option_table:
+            self.option_index[option[0]] = option
+
+    def set_option_table(self, option_table):
+        self.option_table = option_table
+        self._build_index()
+
+    def add_option(self, long_option, short_option=None, help_string=None):
+        if long_option in self.option_index:
+            raise PackagingGetoptError(
+                  "option conflict: already an option '%s'" % long_option)
+        else:
+            option = (long_option, short_option, help_string)
+            self.option_table.append(option)
+            self.option_index[long_option] = option
+
+    def has_option(self, long_option):
+        """Return true if the option table for this parser has an
+        option with long name 'long_option'."""
+        return long_option in self.option_index
+
+    def _check_alias_dict(self, aliases, what):
+        assert isinstance(aliases, dict)
+        for alias, opt in aliases.items():
+            if alias not in self.option_index:
+                raise PackagingGetoptError(
+                      ("invalid %s '%s': "
+                       "option '%s' not defined") % (what, alias, alias))
+            if opt not in self.option_index:
+                raise PackagingGetoptError(
+                      ("invalid %s '%s': "
+                       "aliased option '%s' not defined") % (what, alias, opt))
+
+    def set_aliases(self, alias):
+        """Set the aliases for this option parser."""
+        self._check_alias_dict(alias, "alias")
+        self.alias = alias
+
+    def set_negative_aliases(self, negative_alias):
+        """Set the negative aliases for this option parser.
+        'negative_alias' should be a dictionary mapping option names to
+        option names, both the key and value must already be defined
+        in the option table."""
+        self._check_alias_dict(negative_alias, "negative alias")
+        self.negative_alias = negative_alias
+
+    def _grok_option_table(self):
+        """Populate the various data structures that keep tabs on the
+        option table.  Called by 'getopt()' before it can do anything
+        worthwhile.
+        """
+        self.long_opts = []
+        self.short_opts = []
+        self.short2long.clear()
+        self.repeat = {}
+
+        for option in self.option_table:
+            if len(option) == 3:
+                integer, short, help = option
+                repeat = 0
+            elif len(option) == 4:
+                integer, short, help, repeat = option
+            else:
+                # the option table is part of the code, so simply
+                # assert that it is correct
+                raise ValueError("invalid option tuple: %r" % option)
+
+            # Type- and value-check the option names
+            if not isinstance(integer, str) or len(integer) < 2:
+                raise PackagingGetoptError(
+                      ("invalid long option '%s': "
+                       "must be a string of length >= 2") % integer)
+
+            if (not ((short is None) or
+                     (isinstance(short, str) and len(short) == 1))):
+                raise PackagingGetoptError(
+                      ("invalid short option '%s': "
+                       "must be a single character or None") % short)
+
+            self.repeat[integer] = repeat
+            self.long_opts.append(integer)
+
+            if integer[-1] == '=':             # option takes an argument?
+                if short:
+                    short = short + ':'
+                integer = integer[0:-1]
+                self.takes_arg[integer] = 1
+            else:
+
+                # Is option is a "negative alias" for some other option (eg.
+                # "quiet" == "!verbose")?
+                alias_to = self.negative_alias.get(integer)
+                if alias_to is not None:
+                    if self.takes_arg[alias_to]:
+                        raise PackagingGetoptError(
+                              ("invalid negative alias '%s': "
+                               "aliased option '%s' takes a value") % \
+                               (integer, alias_to))
+
+                    self.long_opts[-1] = integer   # XXX redundant?!
+                    self.takes_arg[integer] = 0
+
+                else:
+                    self.takes_arg[integer] = 0
+
+            # If this is an alias option, make sure its "takes arg" flag is
+            # the same as the option it's aliased to.
+            alias_to = self.alias.get(integer)
+            if alias_to is not None:
+                if self.takes_arg[integer] != self.takes_arg[alias_to]:
+                    raise PackagingGetoptError(
+                          ("invalid alias '%s': inconsistent with "
+                           "aliased option '%s' (one of them takes a value, "
+                           "the other doesn't") % (integer, alias_to))
+
+            # Now enforce some bondage on the long option name, so we can
+            # later translate it to an attribute name on some object.  Have
+            # to do this a bit late to make sure we've removed any trailing
+            # '='.
+            if not longopt_re.match(integer):
+                raise PackagingGetoptError(
+                      ("invalid long option name '%s' " +
+                       "(must be letters, numbers, hyphens only") % integer)
+
+            self.attr_name[integer] = integer.replace('-', '_')
+            if short:
+                self.short_opts.append(short)
+                self.short2long[short[0]] = integer
+
+    def getopt(self, args=None, object=None):
+        """Parse command-line options in args. Store as attributes on object.
+
+        If 'args' is None or not supplied, uses 'sys.argv[1:]'.  If
+        'object' is None or not supplied, creates a new OptionDummy
+        object, stores option values there, and returns a tuple (args,
+        object).  If 'object' is supplied, it is modified in place and
+        'getopt()' just returns 'args'; in both cases, the returned
+        'args' is a modified copy of the passed-in 'args' list, which
+        is left untouched.
+        """
+        if args is None:
+            args = sys.argv[1:]
+        if object is None:
+            object = OptionDummy()
+            created_object = 1
+        else:
+            created_object = 0
+
+        self._grok_option_table()
+
+        short_opts = ' '.join(self.short_opts)
+
+        try:
+            opts, args = getopt.getopt(args, short_opts, self.long_opts)
+        except getopt.error as msg:
+            raise PackagingArgError(msg)
+
+        for opt, val in opts:
+            if len(opt) == 2 and opt[0] == '-':   # it's a short option
+                opt = self.short2long[opt[1]]
+            else:
+                assert len(opt) > 2 and opt[:2] == '--'
+                opt = opt[2:]
+
+            alias = self.alias.get(opt)
+            if alias:
+                opt = alias
+
+            if not self.takes_arg[opt]:     # boolean option?
+                assert val == '', "boolean option can't have value"
+                alias = self.negative_alias.get(opt)
+                if alias:
+                    opt = alias
+                    val = 0
+                else:
+                    val = 1
+
+            attr = self.attr_name[opt]
+            # The only repeating option at the moment is 'verbose'.
+            # It has a negative option -q quiet, which should set verbose = 0.
+            if val and self.repeat.get(attr) is not None:
+                val = getattr(object, attr, 0) + 1
+            setattr(object, attr, val)
+            self.option_order.append((opt, val))
+
+        # for opts
+        if created_object:
+            return args, object
+        else:
+            return args
+
+    def get_option_order(self):
+        """Returns the list of (option, value) tuples processed by the
+        previous run of 'getopt()'.  Raises RuntimeError if
+        'getopt()' hasn't been called yet.
+        """
+        if self.option_order is None:
+            raise RuntimeError("'getopt()' hasn't been called yet")
+        else:
+            return self.option_order
+
+        return self.option_order
+
+    def generate_help(self, header=None):
+        """Generate help text (a list of strings, one per suggested line of
+        output) from the option table for this FancyGetopt object.
+        """
+        # Blithely assume the option table is good: probably wouldn't call
+        # 'generate_help()' unless you've already called 'getopt()'.
+
+        # First pass: determine maximum length of long option names
+        max_opt = 0
+        for option in self.option_table:
+            integer = option[0]
+            short = option[1]
+            l = len(integer)
+            if integer[-1] == '=':
+                l = l - 1
+            if short is not None:
+                l = l + 5                   # " (-x)" where short == 'x'
+            if l > max_opt:
+                max_opt = l
+
+        opt_width = max_opt + 2 + 2 + 2     # room for indent + dashes + gutter
+
+        # Typical help block looks like this:
+        #   --foo       controls foonabulation
+        # Help block for longest option looks like this:
+        #   --flimflam  set the flim-flam level
+        # and with wrapped text:
+        #   --flimflam  set the flim-flam level (must be between
+        #               0 and 100, except on Tuesdays)
+        # Options with short names will have the short name shown (but
+        # it doesn't contribute to max_opt):
+        #   --foo (-f)  controls foonabulation
+        # If adding the short option would make the left column too wide,
+        # we push the explanation off to the next line
+        #   --flimflam (-l)
+        #               set the flim-flam level
+        # Important parameters:
+        #   - 2 spaces before option block start lines
+        #   - 2 dashes for each long option name
+        #   - min. 2 spaces between option and explanation (gutter)
+        #   - 5 characters (incl. space) for short option name
+
+        # Now generate lines of help text.  (If 80 columns were good enough
+        # for Jesus, then 78 columns are good enough for me!)
+        line_width = 78
+        text_width = line_width - opt_width
+        big_indent = ' ' * opt_width
+        if header:
+            lines = [header]
+        else:
+            lines = ['Option summary:']
+
+        for option in self.option_table:
+            integer, short, help = option[:3]
+            text = textwrap.wrap(help, text_width)
+
+            # Case 1: no short option at all (makes life easy)
+            if short is None:
+                if text:
+                    lines.append("  --%-*s  %s" % (max_opt, integer, text[0]))
+                else:
+                    lines.append("  --%-*s  " % (max_opt, integer))
+
+            # Case 2: we have a short option, so we have to include it
+            # just after the long option
+            else:
+                opt_names = "%s (-%s)" % (integer, short)
+                if text:
+                    lines.append("  --%-*s  %s" %
+                                 (max_opt, opt_names, text[0]))
+                else:
+                    lines.append("  --%-*s" % opt_names)
+
+            for l in text[1:]:
+                lines.append(big_indent + l)
+
+        return lines
+
+    def print_help(self, header=None, file=None):
+        if file is None:
+            file = sys.stdout
+        for line in self.generate_help(header):
+            file.write(line + "\n")
+
+
+def fancy_getopt(options, negative_opt, object, args):
+    parser = FancyGetopt(options)
+    parser.set_negative_aliases(negative_opt)
+    return parser.getopt(args, object)
+
+
+WS_TRANS = str.maketrans(string.whitespace, ' ' * len(string.whitespace))
+
+
+def wrap_text(text, width):
+    """Split *text* into lines of no more than *width* characters each.
+
+    *text* is a str and *width* an int.  Returns a list of str.
+    """
+
+    if text is None:
+        return []
+    if len(text) <= width:
+        return [text]
+
+    text = text.expandtabs()
+    text = text.translate(WS_TRANS)
+
+    chunks = re.split(r'( +|-+)', text)
+    chunks = [_f for _f in chunks if _f]      # ' - ' results in empty strings
+    lines = []
+
+    while chunks:
+
+        cur_line = []                   # list of chunks (to-be-joined)
+        cur_len = 0                     # length of current line
+
+        while chunks:
+            l = len(chunks[0])
+            if cur_len + l <= width:    # can squeeze (at least) this chunk in
+                cur_line.append(chunks[0])
+                del chunks[0]
+                cur_len = cur_len + l
+            else:                       # this line is full
+                # drop last chunk if all space
+                if cur_line and cur_line[-1][0] == ' ':
+                    del cur_line[-1]
+                break
+
+        if chunks:                      # any chunks left to process?
+
+            # if the current line is still empty, then we had a single
+            # chunk that's too big too fit on a line -- so we break
+            # down and break it up at the line width
+            if cur_len == 0:
+                cur_line.append(chunks[0][0:width])
+                chunks[0] = chunks[0][width:]
+
+            # all-whitespace chunks at the end of a line can be discarded
+            # (and we know from the re.split above that if a chunk has
+            # *any* whitespace, it is *all* whitespace)
+            if chunks[0][0] == ' ':
+                del chunks[0]
+
+        # and store this line in the list-of-all-lines -- as a single
+        # string, of course!
+        lines.append(''.join(cur_line))
+
+    # while chunks
+
+    return lines
+
+
+class OptionDummy:
+    """Dummy class just used as a place to hold command-line option
+    values as instance attributes."""
+
+    def __init__(self, options=[]):
+        """Create a new OptionDummy instance.  The attributes listed in
+        'options' will be initialized to None."""
+        for opt in options:
+            setattr(self, opt, None)
diff --git a/Lib/packaging/install.py b/Lib/packaging/install.py
new file mode 100644
index 0000000..92657ea
--- /dev/null
+++ b/Lib/packaging/install.py
@@ -0,0 +1,481 @@
+"""Building blocks for installers.
+
+When used as a script, this module installs a release thanks to info
+obtained from an index (e.g. PyPI), with dependencies.
+
+This is a higher-level module built on packaging.database and
+packaging.pypi.
+"""
+
+import os
+import sys
+import stat
+import errno
+import shutil
+import logging
+import tempfile
+from sysconfig import get_config_var
+
+from packaging import logger
+from packaging.dist import Distribution
+from packaging.util import (_is_archive_file, ask, get_install_method,
+                            egginfo_to_distinfo)
+from packaging.pypi import wrapper
+from packaging.version import get_version_predicate
+from packaging.database import get_distributions, get_distribution
+from packaging.depgraph import generate_graph
+
+from packaging.errors import (PackagingError, InstallationException,
+                              InstallationConflict, CCompilerError)
+from packaging.pypi.errors import ProjectNotFound, ReleaseNotFound
+
+__all__ = ['install_dists', 'install_from_infos', 'get_infos', 'remove',
+           'install', 'install_local_project']
+
+
+def _move_files(files, destination):
+    """Move the list of files in the destination folder, keeping the same
+    structure.
+
+    Return a list of tuple (old, new) emplacement of files
+
+    :param files: a list of files to move.
+    :param destination: the destination directory to put on the files.
+                        if not defined, create a new one, using mkdtemp
+    """
+    if not destination:
+        destination = tempfile.mkdtemp()
+
+    for old in files:
+        filename = os.path.split(old)[-1]
+        new = os.path.join(destination, filename)
+        # try to make the paths.
+        try:
+            os.makedirs(os.path.dirname(new))
+        except OSError as e:
+            if e.errno == errno.EEXIST:
+                pass
+            else:
+                raise e
+        os.rename(old, new)
+        yield old, new
+
+
+def _run_distutils_install(path):
+    # backward compat: using setuptools or plain-distutils
+    cmd = '%s setup.py install --record=%s'
+    record_file = os.path.join(path, 'RECORD')
+    os.system(cmd % (sys.executable, record_file))
+    if not os.path.exists(record_file):
+        raise ValueError('failed to install')
+    else:
+        egginfo_to_distinfo(record_file, remove_egginfo=True)
+
+
+def _run_setuptools_install(path):
+    cmd = '%s setup.py install --record=%s --single-version-externally-managed'
+    record_file = os.path.join(path, 'RECORD')
+    os.system(cmd % (sys.executable, record_file))
+    if not os.path.exists(record_file):
+        raise ValueError('failed to install')
+    else:
+        egginfo_to_distinfo(record_file, remove_egginfo=True)
+
+
+def _run_packaging_install(path):
+    # XXX check for a valid setup.cfg?
+    dist = Distribution()
+    dist.parse_config_files()
+    try:
+        dist.run_command('install_dist')
+    except (IOError, os.error, PackagingError, CCompilerError) as msg:
+        raise SystemExit("error: " + str(msg))
+
+
+def _install_dist(dist, path):
+    """Install a distribution into a path.
+
+    This:
+
+    * unpack the distribution
+    * copy the files in "path"
+    * determine if the distribution is packaging or distutils1.
+    """
+    where = dist.unpack()
+
+    if where is None:
+        raise ValueError('Cannot locate the unpacked archive')
+
+    return _run_install_from_archive(where)
+
+
+def install_local_project(path):
+    """Install a distribution from a source directory.
+
+    If the source directory contains a setup.py install using distutils1.
+    If a setup.cfg is found, install using the install_dist command.
+
+    """
+    path = os.path.abspath(path)
+    if os.path.isdir(path):
+        logger.info('installing from source directory: %s', path)
+        _run_install_from_dir(path)
+    elif _is_archive_file(path):
+        logger.info('installing from archive: %s', path)
+        _unpacked_dir = tempfile.mkdtemp()
+        shutil.unpack_archive(path, _unpacked_dir)
+        _run_install_from_archive(_unpacked_dir)
+    else:
+        logger.warning('no projects to install')
+
+
+def _run_install_from_archive(source_dir):
+    # XXX need a better way
+    for item in os.listdir(source_dir):
+        fullpath = os.path.join(source_dir, item)
+        if os.path.isdir(fullpath):
+            source_dir = fullpath
+            break
+    return _run_install_from_dir(source_dir)
+
+
+install_methods = {
+    'packaging': _run_packaging_install,
+    'setuptools': _run_setuptools_install,
+    'distutils': _run_distutils_install}
+
+
+def _run_install_from_dir(source_dir):
+    old_dir = os.getcwd()
+    os.chdir(source_dir)
+    install_method = get_install_method(source_dir)
+    func = install_methods[install_method]
+    try:
+        func = install_methods[install_method]
+        return func(source_dir)
+    finally:
+        os.chdir(old_dir)
+
+
+def install_dists(dists, path, paths=sys.path):
+    """Install all distributions provided in dists, with the given prefix.
+
+    If an error occurs while installing one of the distributions, uninstall all
+    the installed distribution (in the context if this function).
+
+    Return a list of installed dists.
+
+    :param dists: distributions to install
+    :param path: base path to install distribution in
+    :param paths: list of paths (defaults to sys.path) to look for info
+    """
+    if not path:
+        path = tempfile.mkdtemp()
+
+    installed_dists = []
+    for dist in dists:
+        logger.info('installing %s %s', dist.name, dist.version)
+        try:
+            _install_dist(dist, path)
+            installed_dists.append(dist)
+        except Exception as e:
+            logger.info('failed: %s', e)
+
+            # reverting
+            for installed_dist in installed_dists:
+                logger.info('reverting %s', installed_dist)
+                _remove_dist(installed_dist, paths)
+            raise e
+    return installed_dists
+
+
+def install_from_infos(install_path=None, install=[], remove=[], conflicts=[],
+                       paths=sys.path):
+    """Install and remove the given distributions.
+
+    The function signature is made to be compatible with the one of get_infos.
+    The aim of this script is to povide a way to install/remove what's asked,
+    and to rollback if needed.
+
+    So, it's not possible to be in an inconsistant state, it could be either
+    installed, either uninstalled, not half-installed.
+
+    The process follow those steps:
+
+        1. Move all distributions that will be removed in a temporary location
+        2. Install all the distributions that will be installed in a temp. loc.
+        3. If the installation fails, rollback (eg. move back) those
+           distributions, or remove what have been installed.
+        4. Else, move the distributions to the right locations, and remove for
+           real the distributions thats need to be removed.
+
+    :param install_path: the installation path where we want to install the
+                         distributions.
+    :param install: list of distributions that will be installed; install_path
+                    must be provided if this list is not empty.
+    :param remove: list of distributions that will be removed.
+    :param conflicts: list of conflicting distributions, eg. that will be in
+                      conflict once the install and remove distribution will be
+                      processed.
+    :param paths: list of paths (defaults to sys.path) to look for info
+    """
+    # first of all, if we have conflicts, stop here.
+    if conflicts:
+        raise InstallationConflict(conflicts)
+
+    if install and not install_path:
+        raise ValueError("Distributions are to be installed but `install_path`"
+                         " is not provided.")
+
+    # before removing the files, we will start by moving them away
+    # then, if any error occurs, we could replace them in the good place.
+    temp_files = {}  # contains lists of {dist: (old, new)} paths
+    temp_dir = None
+    if remove:
+        temp_dir = tempfile.mkdtemp()
+        for dist in remove:
+            files = dist.list_installed_files()
+            temp_files[dist] = _move_files(files, temp_dir)
+    try:
+        if install:
+            install_dists(install, install_path, paths)
+    except:
+        # if an error occurs, put back the files in the right place.
+        for files in temp_files.values():
+            for old, new in files:
+                shutil.move(new, old)
+        if temp_dir:
+            shutil.rmtree(temp_dir)
+        # now re-raising
+        raise
+
+    # we can remove them for good
+    for files in temp_files.values():
+        for old, new in files:
+            os.remove(new)
+    if temp_dir:
+        shutil.rmtree(temp_dir)
+
+
+def _get_setuptools_deps(release):
+    # NotImplementedError
+    pass
+
+
+def get_infos(requirements, index=None, installed=None, prefer_final=True):
+    """Return the informations on what's going to be installed and upgraded.
+
+    :param requirements: is a *string* containing the requirements for this
+                         project (for instance "FooBar 1.1" or "BarBaz (<1.2)")
+    :param index: If an index is specified, use this one, otherwise, use
+                  :class index.ClientWrapper: to get project metadatas.
+    :param installed: a list of already installed distributions.
+    :param prefer_final: when picking up the releases, prefer a "final" one
+                         over a beta/alpha/etc one.
+
+    The results are returned in a dict, containing all the operations
+    needed to install the given requirements::
+
+        >>> get_install_info("FooBar (<=1.2)")
+        {'install': [<FooBar 1.1>], 'remove': [], 'conflict': []}
+
+    Conflict contains all the conflicting distributions, if there is a
+    conflict.
+    """
+    # this function does several things:
+    # 1. get a release specified by the requirements
+    # 2. gather its metadata, using setuptools compatibility if needed
+    # 3. compare this tree with what is currently installed on the system,
+    #    return the requirements of what is missing
+    # 4. do that recursively and merge back the results
+    # 5. return a dict containing information about what is needed to install
+    #    or remove
+
+    if not installed:
+        logger.info('reading installed distributions')
+        installed = list(get_distributions(use_egg_info=True))
+
+    infos = {'install': [], 'remove': [], 'conflict': []}
+    # Is a compatible version of the project already installed ?
+    predicate = get_version_predicate(requirements)
+    found = False
+
+    # check that the project isn't already installed
+    for installed_project in installed:
+        # is it a compatible project ?
+        if predicate.name.lower() != installed_project.name.lower():
+            continue
+        found = True
+        logger.info('found %s %s', installed_project.name,
+                    installed_project.metadata['version'])
+
+        # if we already have something installed, check it matches the
+        # requirements
+        if predicate.match(installed_project.metadata['version']):
+            return infos
+        break
+
+    if not found:
+        logger.info('project not installed')
+
+    if not index:
+        index = wrapper.ClientWrapper()
+
+    if not installed:
+        installed = get_distributions(use_egg_info=True)
+
+    # Get all the releases that match the requirements
+    try:
+        release = index.get_release(requirements)
+    except (ReleaseNotFound, ProjectNotFound):
+        raise InstallationException('Release not found: "%s"' % requirements)
+
+    if release is None:
+        logger.info('could not find a matching project')
+        return infos
+
+    metadata = release.fetch_metadata()
+
+    # we need to build setuptools deps if any
+    if 'requires_dist' not in metadata:
+        metadata['requires_dist'] = _get_setuptools_deps(release)
+
+    # build the dependency graph with local and required dependencies
+    dists = list(installed)
+    dists.append(release)
+    depgraph = generate_graph(dists)
+
+    # Get what the missing deps are
+    dists = depgraph.missing[release]
+    if dists:
+        logger.info("missing dependencies found, retrieving metadata")
+        # we have missing deps
+        for dist in dists:
+            _update_infos(infos, get_infos(dist, index, installed))
+
+    # Fill in the infos
+    existing = [d for d in installed if d.name == release.name]
+    if existing:
+        infos['remove'].append(existing[0])
+        infos['conflict'].extend(depgraph.reverse_list[existing[0]])
+    infos['install'].append(release)
+    return infos
+
+
+def _update_infos(infos, new_infos):
+    """extends the lists contained in the `info` dict with those contained
+    in the `new_info` one
+    """
+    for key, value in infos.items():
+        if key in new_infos:
+            infos[key].extend(new_infos[key])
+
+
+def _remove_dist(dist, paths=sys.path):
+    remove(dist.name, paths)
+
+
+def remove(project_name, paths=sys.path, auto_confirm=True):
+    """Removes a single project from the installation"""
+    dist = get_distribution(project_name, use_egg_info=True, paths=paths)
+    if dist is None:
+        raise PackagingError('Distribution "%s" not found' % project_name)
+    files = dist.list_installed_files(local=True)
+    rmdirs = []
+    rmfiles = []
+    tmp = tempfile.mkdtemp(prefix=project_name + '-uninstall')
+    try:
+        for file_, md5, size in files:
+            if os.path.isfile(file_):
+                dirname, filename = os.path.split(file_)
+                tmpfile = os.path.join(tmp, filename)
+                try:
+                    os.rename(file_, tmpfile)
+                finally:
+                    if not os.path.isfile(file_):
+                        os.rename(tmpfile, file_)
+                if file_ not in rmfiles:
+                    rmfiles.append(file_)
+                if dirname not in rmdirs:
+                    rmdirs.append(dirname)
+    finally:
+        shutil.rmtree(tmp)
+
+    logger.info('removing %r: ', project_name)
+
+    for file_ in rmfiles:
+        logger.info('  %s', file_)
+
+    # Taken from the pip project
+    if auto_confirm:
+        response = 'y'
+    else:
+        response = ask('Proceed (y/n)? ', ('y', 'n'))
+
+    if response == 'y':
+        file_count = 0
+        for file_ in rmfiles:
+            os.remove(file_)
+            file_count += 1
+
+        dir_count = 0
+        for dirname in rmdirs:
+            if not os.path.exists(dirname):
+                # could
+                continue
+
+            files_count = 0
+            for root, dir, files in os.walk(dirname):
+                files_count += len(files)
+
+            if files_count > 0:
+                # XXX Warning
+                continue
+
+            # empty dirs with only empty dirs
+            if os.stat(dirname).st_mode & stat.S_IWUSR:
+                # XXX Add a callable in shutil.rmtree to count
+                # the number of deleted elements
+                shutil.rmtree(dirname)
+                dir_count += 1
+
+        # removing the top path
+        # XXX count it ?
+        if os.path.exists(dist.path):
+            shutil.rmtree(dist.path)
+
+        logger.info('success: removed %d files and %d dirs',
+                    file_count, dir_count)
+
+
+def install(project):
+    logger.info('getting information about %r', project)
+    try:
+        info = get_infos(project)
+    except InstallationException:
+        logger.info('cound not find %r', project)
+        return
+
+    if info['install'] == []:
+        logger.info('nothing to install')
+        return
+
+    install_path = get_config_var('base')
+    try:
+        install_from_infos(install_path,
+                           info['install'], info['remove'], info['conflict'])
+
+    except InstallationConflict as e:
+        if logger.isEnabledFor(logging.INFO):
+            projects = ['%s %s' % (p.name, p.version) for p in e.args[0]]
+            logger.info('%r conflicts with %s', project, ','.join(projects))
+
+
+def _main(**attrs):
+    if 'script_args' not in attrs:
+        import sys
+        attrs['requirements'] = sys.argv[1]
+    get_infos(**attrs)
+
+if __name__ == '__main__':
+    _main()
diff --git a/Lib/packaging/manifest.py b/Lib/packaging/manifest.py
new file mode 100644
index 0000000..a379853
--- /dev/null
+++ b/Lib/packaging/manifest.py
@@ -0,0 +1,372 @@
+"""Class representing the list of files in a distribution.
+
+The Manifest class can be used to:
+
+ - read or write a MANIFEST file
+ - read a template file and find out the file list
+"""
+# XXX todo: document + add tests
+import re
+import os
+import fnmatch
+
+from packaging import logger
+from packaging.util import write_file, convert_path
+from packaging.errors import (PackagingTemplateError,
+                              PackagingInternalError)
+
+__all__ = ['Manifest']
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
+_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
+
+
+class Manifest(object):
+    """A list of files built by on exploring the filesystem and filtered by
+    applying various patterns to what we find there.
+    """
+
+    def __init__(self):
+        self.allfiles = None
+        self.files = []
+
+    #
+    # Public API
+    #
+
+    def findall(self, dir=os.curdir):
+        self.allfiles = _findall(dir)
+
+    def append(self, item):
+        self.files.append(item)
+
+    def extend(self, items):
+        self.files.extend(items)
+
+    def sort(self):
+        # Not a strict lexical sort!
+        self.files = [os.path.join(*path_tuple) for path_tuple in
+                      sorted(os.path.split(path) for path in self.files)]
+
+    def clear(self):
+        """Clear all collected files."""
+        self.files = []
+        if self.allfiles is not None:
+            self.allfiles = []
+
+    def remove_duplicates(self):
+        # Assumes list has been sorted!
+        for i in range(len(self.files) - 1, 0, -1):
+            if self.files[i] == self.files[i - 1]:
+                del self.files[i]
+
+    def read_template(self, path_or_file):
+        """Read and parse a manifest template file.
+        'path' can be a path or a file-like object.
+
+        Updates the list accordingly.
+        """
+        if isinstance(path_or_file, str):
+            f = open(path_or_file)
+        else:
+            f = path_or_file
+
+        try:
+            content = f.read()
+            # first, let's unwrap collapsed lines
+            content = _COLLAPSE_PATTERN.sub('', content)
+            # next, let's remove commented lines and empty lines
+            content = _COMMENTED_LINE.sub('', content)
+
+            # now we have our cleaned up lines
+            lines = [line.strip() for line in content.split('\n')]
+        finally:
+            f.close()
+
+        for line in lines:
+            if line == '':
+                continue
+            try:
+                self._process_template_line(line)
+            except PackagingTemplateError as msg:
+                logger.warning("%s, %s", path_or_file, msg)
+
+    def write(self, path):
+        """Write the file list in 'self.filelist' (presumably as filled in
+        by 'add_defaults()' and 'read_template()') to the manifest file
+        named by 'self.manifest'.
+        """
+        if os.path.isfile(path):
+            with open(path) as fp:
+                first_line = fp.readline()
+
+            if first_line != '# file GENERATED by packaging, do NOT edit\n':
+                logger.info("not writing to manually maintained "
+                            "manifest file %r", path)
+                return
+
+        self.sort()
+        self.remove_duplicates()
+        content = self.files[:]
+        content.insert(0, '# file GENERATED by packaging, do NOT edit')
+        logger.info("writing manifest file %r", path)
+        write_file(path, content)
+
+    def read(self, path):
+        """Read the manifest file (named by 'self.manifest') and use it to
+        fill in 'self.filelist', the list of files to include in the source
+        distribution.
+        """
+        logger.info("reading manifest file %r", path)
+        with open(path) as manifest:
+            for line in manifest.readlines():
+                self.append(line)
+
+    def exclude_pattern(self, pattern, anchor=True, prefix=None,
+                        is_regex=False):
+        """Remove strings (presumably filenames) from 'files' that match
+        'pattern'.
+
+        Other parameters are the same as for 'include_pattern()', above.
+        The list 'self.files' is modified in place. Return True if files are
+        found.
+        """
+        files_found = False
+        pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
+        for i in range(len(self.files) - 1, -1, -1):
+            if pattern_re.search(self.files[i]):
+                del self.files[i]
+                files_found = True
+
+        return files_found
+
+    #
+    # Private API
+    #
+
+    def _parse_template_line(self, line):
+        words = line.split()
+        if len(words) == 1:
+            # no action given, let's use the default 'include'
+            words.insert(0, 'include')
+
+        action = words[0]
+        patterns = dir = dir_pattern = None
+
+        if action in ('include', 'exclude',
+                      'global-include', 'global-exclude'):
+            if len(words) < 2:
+                raise PackagingTemplateError(
+                      "%r expects <pattern1> <pattern2> ..." % action)
+
+            patterns = [convert_path(word) for word in words[1:]]
+
+        elif action in ('recursive-include', 'recursive-exclude'):
+            if len(words) < 3:
+                raise PackagingTemplateError(
+                      "%r expects <dir> <pattern1> <pattern2> ..." % action)
+
+            dir = convert_path(words[1])
+            patterns = [convert_path(word) for word in words[2:]]
+
+        elif action in ('graft', 'prune'):
+            if len(words) != 2:
+                raise PackagingTemplateError(
+                     "%r expects a single <dir_pattern>" % action)
+
+            dir_pattern = convert_path(words[1])
+
+        else:
+            raise PackagingTemplateError("unknown action %r" % action)
+
+        return action, patterns, dir, dir_pattern
+
+    def _process_template_line(self, line):
+        # Parse the line: split it up, make sure the right number of words
+        # is there, and return the relevant words.  'action' is always
+        # defined: it's the first word of the line.  Which of the other
+        # three are defined depends on the action; it'll be either
+        # patterns, (dir and patterns), or (dir_pattern).
+        action, patterns, dir, dir_pattern = self._parse_template_line(line)
+
+        # OK, now we know that the action is valid and we have the
+        # right number of words on the line for that action -- so we
+        # can proceed with minimal error-checking.
+        if action == 'include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=True):
+                    logger.warning("no files found matching %r", pattern)
+
+        elif action == 'exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=True):
+                    logger.warning("no previously-included files "
+                                   "found matching %r", pattern)
+
+        elif action == 'global-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=False):
+                    logger.warning("no files found matching %r "
+                                   "anywhere in distribution", pattern)
+
+        elif action == 'global-exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=False):
+                    logger.warning("no previously-included files "
+                                   "matching %r found anywhere in "
+                                   "distribution", pattern)
+
+        elif action == 'recursive-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, prefix=dir):
+                    logger.warning("no files found matching %r "
+                                   "under directory %r", pattern, dir)
+
+        elif action == 'recursive-exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, prefix=dir):
+                    logger.warning("no previously-included files "
+                                   "matching %r found under directory %r",
+                                   pattern, dir)
+
+        elif action == 'graft':
+            if not self._include_pattern(None, prefix=dir_pattern):
+                logger.warning("no directories found matching %r",
+                               dir_pattern)
+
+        elif action == 'prune':
+            if not self.exclude_pattern(None, prefix=dir_pattern):
+                logger.warning("no previously-included directories found "
+                               "matching %r", dir_pattern)
+        else:
+            raise PackagingInternalError(
+                "this cannot happen: invalid action %r" % action)
+
+    def _include_pattern(self, pattern, anchor=True, prefix=None,
+                         is_regex=False):
+        """Select strings (presumably filenames) from 'self.files' that
+        match 'pattern', a Unix-style wildcard (glob) pattern.
+
+        Patterns are not quite the same as implemented by the 'fnmatch'
+        module: '*' and '?'  match non-special characters, where "special"
+        is platform-dependent: slash on Unix; colon, slash, and backslash on
+        DOS/Windows; and colon on Mac OS.
+
+        If 'anchor' is true (the default), then the pattern match is more
+        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
+        'anchor' is false, both of these will match.
+
+        If 'prefix' is supplied, then only filenames starting with 'prefix'
+        (itself a pattern) and ending with 'pattern', with anything in between
+        them, will match.  'anchor' is ignored in this case.
+
+        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+        'pattern' is assumed to be either a string containing a regex or a
+        regex object -- no translation is done, the regex is just compiled
+        and used as-is.
+
+        Selected strings will be added to self.files.
+
+        Return True if files are found.
+        """
+        files_found = False
+        pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
+
+        # delayed loading of allfiles list
+        if self.allfiles is None:
+            self.findall()
+
+        for name in self.allfiles:
+            if pattern_re.search(name):
+                self.files.append(name)
+                files_found = True
+
+        return files_found
+
+
+#
+# Utility functions
+#
+def _findall(dir=os.curdir):
+    """Find all files under 'dir' and return the list of full filenames
+    (relative to 'dir').
+    """
+    from stat import S_ISREG, S_ISDIR, S_ISLNK
+
+    list = []
+    stack = [dir]
+    pop = stack.pop
+    push = stack.append
+
+    while stack:
+        dir = pop()
+        names = os.listdir(dir)
+
+        for name in names:
+            if dir != os.curdir:        # avoid the dreaded "./" syndrome
+                fullname = os.path.join(dir, name)
+            else:
+                fullname = name
+
+            # Avoid excess stat calls -- just one will do, thank you!
+            stat = os.stat(fullname)
+            mode = stat.st_mode
+            if S_ISREG(mode):
+                list.append(fullname)
+            elif S_ISDIR(mode) and not S_ISLNK(mode):
+                push(fullname)
+
+    return list
+
+
+def _glob_to_re(pattern):
+    """Translate a shell-like glob pattern to a regular expression.
+
+    Return a string containing the regex.  Differs from
+    'fnmatch.translate()' in that '*' does not match "special characters"
+    (which are platform-specific).
+    """
+    pattern_re = fnmatch.translate(pattern)
+
+    # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+    # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+    # and by extension they shouldn't match such "special characters" under
+    # any OS.  So change all non-escaped dots in the RE to match any
+    # character except the special characters.
+    # XXX currently the "special characters" are just slash -- i.e. this is
+    # Unix-only.
+    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', r'\1[^/]', pattern_re)
+
+    return pattern_re
+
+
+def _translate_pattern(pattern, anchor=True, prefix=None, is_regex=False):
+    """Translate a shell-like wildcard pattern to a compiled regular
+    expression.
+
+    Return the compiled regex.  If 'is_regex' true,
+    then 'pattern' is directly compiled to a regex (if it's a string)
+    or just returned as-is (assumes it's a regex object).
+    """
+    if is_regex:
+        if isinstance(pattern, str):
+            return re.compile(pattern)
+        else:
+            return pattern
+
+    if pattern:
+        pattern_re = _glob_to_re(pattern)
+    else:
+        pattern_re = ''
+
+    if prefix is not None:
+        # ditch end of pattern character
+        empty_pattern = _glob_to_re('')
+        prefix_re = _glob_to_re(prefix)[:-len(empty_pattern)]
+        pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
+    else:                               # no prefix -- respect anchor flag
+        if anchor:
+            pattern_re = "^" + pattern_re
+
+    return re.compile(pattern_re)
diff --git a/Lib/packaging/markers.py b/Lib/packaging/markers.py
new file mode 100644
index 0000000..4bbac7e
--- /dev/null
+++ b/Lib/packaging/markers.py
@@ -0,0 +1,187 @@
+"""Parser for the environment markers micro-language defined in PEP 345."""
+
+import sys
+import platform
+import os
+
+from tokenize import tokenize, NAME, OP, STRING, ENDMARKER, ENCODING
+from io import BytesIO
+
+__all__ = ['interpret']
+
+
+# allowed operators
+_OPERATORS = {'==': lambda x, y: x == y,
+              '!=': lambda x, y: x != y,
+              '>': lambda x, y: x > y,
+              '>=': lambda x, y: x >= y,
+              '<': lambda x, y: x < y,
+              '<=': lambda x, y: x <= y,
+              'in': lambda x, y: x in y,
+              'not in': lambda x, y: x not in y}
+
+
+def _operate(operation, x, y):
+    return _OPERATORS[operation](x, y)
+
+
+# restricted set of variables
+_VARS = {'sys.platform': sys.platform,
+         'python_version': sys.version[:3],
+         'python_full_version': sys.version.split(' ', 1)[0],
+         'os.name': os.name,
+         'platform.version': platform.version(),
+         'platform.machine': platform.machine(),
+         'platform.python_implementation': platform.python_implementation()}
+
+
+class _Operation:
+
+    def __init__(self, execution_context=None):
+        self.left = None
+        self.op = None
+        self.right = None
+        if execution_context is None:
+            execution_context = {}
+        self.execution_context = execution_context
+
+    def _get_var(self, name):
+        if name in self.execution_context:
+            return self.execution_context[name]
+        return _VARS[name]
+
+    def __repr__(self):
+        return '%s %s %s' % (self.left, self.op, self.right)
+
+    def _is_string(self, value):
+        if value is None or len(value) < 2:
+            return False
+        for delimiter in '"\'':
+            if value[0] == value[-1] == delimiter:
+                return True
+        return False
+
+    def _is_name(self, value):
+        return value in _VARS
+
+    def _convert(self, value):
+        if value in _VARS:
+            return self._get_var(value)
+        return value.strip('"\'')
+
+    def _check_name(self, value):
+        if value not in _VARS:
+            raise NameError(value)
+
+    def _nonsense_op(self):
+        msg = 'This operation is not supported : "%s"' % self
+        raise SyntaxError(msg)
+
+    def __call__(self):
+        # make sure we do something useful
+        if self._is_string(self.left):
+            if self._is_string(self.right):
+                self._nonsense_op()
+            self._check_name(self.right)
+        else:
+            if not self._is_string(self.right):
+                self._nonsense_op()
+            self._check_name(self.left)
+
+        if self.op not in _OPERATORS:
+            raise TypeError('Operator not supported "%s"' % self.op)
+
+        left = self._convert(self.left)
+        right = self._convert(self.right)
+        return _operate(self.op, left, right)
+
+
+class _OR:
+    def __init__(self, left, right=None):
+        self.left = left
+        self.right = right
+
+    def filled(self):
+        return self.right is not None
+
+    def __repr__(self):
+        return 'OR(%r, %r)' % (self.left, self.right)
+
+    def __call__(self):
+        return self.left() or self.right()
+
+
+class _AND:
+    def __init__(self, left, right=None):
+        self.left = left
+        self.right = right
+
+    def filled(self):
+        return self.right is not None
+
+    def __repr__(self):
+        return 'AND(%r, %r)' % (self.left, self.right)
+
+    def __call__(self):
+        return self.left() and self.right()
+
+
+def interpret(marker, execution_context=None):
+    """Interpret a marker and return a result depending on environment."""
+    marker = marker.strip().encode()
+    ops = []
+    op_starting = True
+    for token in tokenize(BytesIO(marker).readline):
+        # Unpack token
+        toktype, tokval, rowcol, line, logical_line = token
+        if toktype not in (NAME, OP, STRING, ENDMARKER, ENCODING):
+            raise SyntaxError('Type not supported "%s"' % tokval)
+
+        if op_starting:
+            op = _Operation(execution_context)
+            if len(ops) > 0:
+                last = ops[-1]
+                if isinstance(last, (_OR, _AND)) and not last.filled():
+                    last.right = op
+                else:
+                    ops.append(op)
+            else:
+                ops.append(op)
+            op_starting = False
+        else:
+            op = ops[-1]
+
+        if (toktype == ENDMARKER or
+            (toktype == NAME and tokval in ('and', 'or'))):
+            if toktype == NAME and tokval == 'and':
+                ops.append(_AND(ops.pop()))
+            elif toktype == NAME and tokval == 'or':
+                ops.append(_OR(ops.pop()))
+            op_starting = True
+            continue
+
+        if isinstance(op, (_OR, _AND)) and op.right is not None:
+            op = op.right
+
+        if ((toktype in (NAME, STRING) and tokval not in ('in', 'not'))
+            or (toktype == OP and tokval == '.')):
+            if op.op is None:
+                if op.left is None:
+                    op.left = tokval
+                else:
+                    op.left += tokval
+            else:
+                if op.right is None:
+                    op.right = tokval
+                else:
+                    op.right += tokval
+        elif toktype == OP or tokval in ('in', 'not'):
+            if tokval == 'in' and op.op == 'not':
+                op.op = 'not in'
+            else:
+                op.op = tokval
+
+    for op in ops:
+        if not op():
+            return False
+    return True
diff --git a/Lib/packaging/metadata.py b/Lib/packaging/metadata.py
new file mode 100644
index 0000000..596eec7
--- /dev/null
+++ b/Lib/packaging/metadata.py
@@ -0,0 +1,552 @@
+"""Implementation of the Metadata for Python packages PEPs.
+
+Supports all metadata formats (1.0, 1.1, 1.2).
+"""
+
+import re
+import logging
+
+from io import StringIO
+from email import message_from_file
+from packaging import logger
+from packaging.markers import interpret
+from packaging.version import (is_valid_predicate, is_valid_version,
+                               is_valid_versions)
+from packaging.errors import (MetadataMissingError,
+                              MetadataConflictError,
+                              MetadataUnrecognizedVersionError)
+
+try:
+    # docutils is installed
+    from docutils.utils import Reporter
+    from docutils.parsers.rst import Parser
+    from docutils import frontend
+    from docutils import nodes
+
+    class SilentReporter(Reporter):
+
+        def __init__(self, source, report_level, halt_level, stream=None,
+                     debug=0, encoding='ascii', error_handler='replace'):
+            self.messages = []
+            Reporter.__init__(self, source, report_level, halt_level, stream,
+                              debug, encoding, error_handler)
+
+        def system_message(self, level, message, *children, **kwargs):
+            self.messages.append((level, message, children, kwargs))
+
+    _HAS_DOCUTILS = True
+except ImportError:
+    # docutils is not installed
+    _HAS_DOCUTILS = False
+
+# public API of this module
+__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
+
+# Encoding used for the PKG-INFO files
+PKG_INFO_ENCODING = 'utf-8'
+
+# preferred version. Hopefully will be changed
+# to 1.2 once PEP 345 is supported everywhere
+PKG_INFO_PREFERRED_VERSION = '1.0'
+
+_LINE_PREFIX = re.compile('\n       \|')
+_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'License')
+
+_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'License', 'Classifier', 'Download-URL', 'Obsoletes',
+               'Provides', 'Requires')
+
+_314_MARKERS = ('Obsoletes', 'Provides', 'Requires')
+
+_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'Maintainer', 'Maintainer-email', 'License',
+               'Classifier', 'Download-URL', 'Obsoletes-Dist',
+               'Project-URL', 'Provides-Dist', 'Requires-Dist',
+               'Requires-Python', 'Requires-External')
+
+_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
+                'Obsoletes-Dist', 'Requires-External', 'Maintainer',
+                'Maintainer-email', 'Project-URL')
+
+_ALL_FIELDS = set()
+_ALL_FIELDS.update(_241_FIELDS)
+_ALL_FIELDS.update(_314_FIELDS)
+_ALL_FIELDS.update(_345_FIELDS)
+
+
+def _version2fieldlist(version):
+    if version == '1.0':
+        return _241_FIELDS
+    elif version == '1.1':
+        return _314_FIELDS
+    elif version == '1.2':
+        return _345_FIELDS
+    raise MetadataUnrecognizedVersionError(version)
+
+
+def _best_version(fields):
+    """Detect the best version depending on the fields used."""
+    def _has_marker(keys, markers):
+        for marker in markers:
+            if marker in keys:
+                return True
+        return False
+
+    keys = list(fields)
+    possible_versions = ['1.0', '1.1', '1.2']
+
+    # first let's try to see if a field is not part of one of the version
+    for key in keys:
+        if key not in _241_FIELDS and '1.0' in possible_versions:
+            possible_versions.remove('1.0')
+        if key not in _314_FIELDS and '1.1' in possible_versions:
+            possible_versions.remove('1.1')
+        if key not in _345_FIELDS and '1.2' in possible_versions:
+            possible_versions.remove('1.2')
+
+    # possible_version contains qualified versions
+    if len(possible_versions) == 1:
+        return possible_versions[0]   # found !
+    elif len(possible_versions) == 0:
+        raise MetadataConflictError('Unknown metadata set')
+
+    # let's see if one unique marker is found
+    is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
+    is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
+    if is_1_1 and is_1_2:
+        raise MetadataConflictError('You used incompatible 1.1 and 1.2 fields')
+
+    # we have the choice, either 1.0, or 1.2
+    #   - 1.0 has a broken Summary field but works with all tools
+    #   - 1.1 is to avoid
+    #   - 1.2 fixes Summary but is not widespread yet
+    if not is_1_1 and not is_1_2:
+        # we couldn't find any specific marker
+        if PKG_INFO_PREFERRED_VERSION in possible_versions:
+            return PKG_INFO_PREFERRED_VERSION
+    if is_1_1:
+        return '1.1'
+
+    # default marker when 1.0 is disqualified
+    return '1.2'
+
+
+_ATTR2FIELD = {
+    'metadata_version': 'Metadata-Version',
+    'name': 'Name',
+    'version': 'Version',
+    'platform': 'Platform',
+    'supported_platform': 'Supported-Platform',
+    'summary': 'Summary',
+    'description': 'Description',
+    'keywords': 'Keywords',
+    'home_page': 'Home-page',
+    'author': 'Author',
+    'author_email': 'Author-email',
+    'maintainer': 'Maintainer',
+    'maintainer_email': 'Maintainer-email',
+    'license': 'License',
+    'classifier': 'Classifier',
+    'download_url': 'Download-URL',
+    'obsoletes_dist': 'Obsoletes-Dist',
+    'provides_dist': 'Provides-Dist',
+    'requires_dist': 'Requires-Dist',
+    'requires_python': 'Requires-Python',
+    'requires_external': 'Requires-External',
+    'requires': 'Requires',
+    'provides': 'Provides',
+    'obsoletes': 'Obsoletes',
+    'project_url': 'Project-URL',
+}
+
+_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
+_VERSIONS_FIELDS = ('Requires-Python',)
+_VERSION_FIELDS = ('Version',)
+_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
+               'Requires', 'Provides', 'Obsoletes-Dist',
+               'Provides-Dist', 'Requires-Dist', 'Requires-External',
+               'Project-URL', 'Supported-Platform')
+_LISTTUPLEFIELDS = ('Project-URL',)
+
+_ELEMENTSFIELD = ('Keywords',)
+
+_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
+
+_MISSING = object()
+
+
+class NoDefault:
+    """Marker object used for clean representation"""
+    def __repr__(self):
+        return '<NoDefault>'
+
+_MISSING = NoDefault()
+
+
+class Metadata:
+    """The metadata of a release.
+
+    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
+    instantiate the class with one of these arguments (or none):
+    - *path*, the path to a METADATA file
+    - *fileobj* give a file-like object with METADATA as content
+    - *mapping* is a dict-like object
+    """
+    # TODO document that execution_context and platform_dependent are used
+    # to filter on query, not when setting a key
+    # also document the mapping API and UNKNOWN default key
+
+    def __init__(self, path=None, platform_dependent=False,
+                 execution_context=None, fileobj=None, mapping=None):
+        self._fields = {}
+        self.requires_files = []
+        self.docutils_support = _HAS_DOCUTILS
+        self.platform_dependent = platform_dependent
+        self.execution_context = execution_context
+        if [path, fileobj, mapping].count(None) < 2:
+            raise TypeError('path, fileobj and mapping are exclusive')
+        if path is not None:
+            self.read(path)
+        elif fileobj is not None:
+            self.read_file(fileobj)
+        elif mapping is not None:
+            self.update(mapping)
+
+    def _set_best_version(self):
+        self._fields['Metadata-Version'] = _best_version(self._fields)
+
+    def _write_field(self, file, name, value):
+        file.write('%s: %s\n' % (name, value))
+
+    def __getitem__(self, name):
+        return self.get(name)
+
+    def __setitem__(self, name, value):
+        return self.set(name, value)
+
+    def __delitem__(self, name):
+        field_name = self._convert_name(name)
+        try:
+            del self._fields[field_name]
+        except KeyError:
+            raise KeyError(name)
+        self._set_best_version()
+
+    def __contains__(self, name):
+        return (name in self._fields or
+                self._convert_name(name) in self._fields)
+
+    def _convert_name(self, name):
+        if name in _ALL_FIELDS:
+            return name
+        name = name.replace('-', '_').lower()
+        return _ATTR2FIELD.get(name, name)
+
+    def _default_value(self, name):
+        if name in _LISTFIELDS or name in _ELEMENTSFIELD:
+            return []
+        return 'UNKNOWN'
+
+    def _check_rst_data(self, data):
+        """Return warnings when the provided data has syntax errors."""
+        source_path = StringIO()
+        parser = Parser()
+        settings = frontend.OptionParser().get_default_values()
+        settings.tab_width = 4
+        settings.pep_references = None
+        settings.rfc_references = None
+        reporter = SilentReporter(source_path,
+                          settings.report_level,
+                          settings.halt_level,
+                          stream=settings.warning_stream,
+                          debug=settings.debug,
+                          encoding=settings.error_encoding,
+                          error_handler=settings.error_encoding_error_handler)
+
+        document = nodes.document(settings, reporter, source=source_path)
+        document.note_source(source_path, -1)
+        try:
+            parser.parse(data, document)
+        except AttributeError:
+            reporter.messages.append((-1, 'Could not finish the parsing.',
+                                      '', {}))
+
+        return reporter.messages
+
+    def _platform(self, value):
+        if not self.platform_dependent or ';' not in value:
+            return True, value
+        value, marker = value.split(';')
+        return interpret(marker, self.execution_context), value
+
+    def _remove_line_prefix(self, value):
+        return _LINE_PREFIX.sub('\n', value)
+
+    #
+    # Public API
+    #
+    def get_fullname(self):
+        """Return the distribution name with version"""
+        return '%s-%s' % (self['Name'], self['Version'])
+
+    def is_metadata_field(self, name):
+        """return True if name is a valid metadata key"""
+        name = self._convert_name(name)
+        return name in _ALL_FIELDS
+
+    def is_multi_field(self, name):
+        name = self._convert_name(name)
+        return name in _LISTFIELDS
+
+    def read(self, filepath):
+        """Read the metadata values from a file path."""
+        with open(filepath, 'r', encoding='utf-8') as fp:
+            self.read_file(fp)
+
+    def read_file(self, fileob):
+        """Read the metadata values from a file object."""
+        msg = message_from_file(fileob)
+        self._fields['Metadata-Version'] = msg['metadata-version']
+
+        for field in _version2fieldlist(self['Metadata-Version']):
+            if field in _LISTFIELDS:
+                # we can have multiple lines
+                values = msg.get_all(field)
+                if field in _LISTTUPLEFIELDS and values is not None:
+                    values = [tuple(value.split(',')) for value in values]
+                self.set(field, values)
+            else:
+                # single line
+                value = msg[field]
+                if value is not None and value != 'UNKNOWN':
+                    self.set(field, value)
+
+    def write(self, filepath):
+        """Write the metadata fields to filepath."""
+        with open(filepath, 'w', encoding='utf-8') as fp:
+            self.write_file(fp)
+
+    def write_file(self, fileobject):
+        """Write the PKG-INFO format data to a file object."""
+        self._set_best_version()
+        for field in _version2fieldlist(self['Metadata-Version']):
+            values = self.get(field)
+            if field in _ELEMENTSFIELD:
+                self._write_field(fileobject, field, ','.join(values))
+                continue
+            if field not in _LISTFIELDS:
+                if field == 'Description':
+                    values = values.replace('\n', '\n       |')
+                values = [values]
+
+            if field in _LISTTUPLEFIELDS:
+                values = [','.join(value) for value in values]
+
+            for value in values:
+                self._write_field(fileobject, field, value)
+
+    def update(self, other=None, **kwargs):
+        """Set metadata values from the given iterable `other` and kwargs.
+
+        Behavior is like `dict.update`: If `other` has a ``keys`` method,
+        they are looped over and ``self[key]`` is assigned ``other[key]``.
+        Else, ``other`` is an iterable of ``(key, value)`` iterables.
+
+        Keys that don't match a metadata field or that have an empty value are
+        dropped.
+        """
+        def _set(key, value):
+            if key in _ATTR2FIELD and value:
+                self.set(self._convert_name(key), value)
+
+        if other is None:
+            pass
+        elif hasattr(other, 'keys'):
+            for k in other.keys():
+                _set(k, other[k])
+        else:
+            for k, v in other:
+                _set(k, v)
+
+        if kwargs:
+            self.update(kwargs)
+
+    def set(self, name, value):
+        """Control then set a metadata field."""
+        name = self._convert_name(name)
+
+        if ((name in _ELEMENTSFIELD or name == 'Platform') and
+            not isinstance(value, (list, tuple))):
+            if isinstance(value, str):
+                value = [v.strip() for v in value.split(',')]
+            else:
+                value = []
+        elif (name in _LISTFIELDS and
+              not isinstance(value, (list, tuple))):
+            if isinstance(value, str):
+                value = [value]
+            else:
+                value = []
+
+        if logger.isEnabledFor(logging.WARNING):
+            if name in _PREDICATE_FIELDS and value is not None:
+                for v in value:
+                    # check that the values are valid predicates
+                    if not is_valid_predicate(v.split(';')[0]):
+                        logger.warning(
+                            '%r is not a valid predicate (field %r)',
+                            v, name)
+            # FIXME this rejects UNKNOWN, is that right?
+            elif name in _VERSIONS_FIELDS and value is not None:
+                if not is_valid_versions(value):
+                    logger.warning('%r is not a valid version (field %r)',
+                                   value, name)
+            elif name in _VERSION_FIELDS and value is not None:
+                if not is_valid_version(value):
+                    logger.warning('%r is not a valid version (field %r)',
+                                   value, name)
+
+        if name in _UNICODEFIELDS:
+            if name == 'Description':
+                value = self._remove_line_prefix(value)
+
+        self._fields[name] = value
+        self._set_best_version()
+
+    def get(self, name, default=_MISSING):
+        """Get a metadata field."""
+        name = self._convert_name(name)
+        if name not in self._fields:
+            if default is _MISSING:
+                default = self._default_value(name)
+            return default
+        if name in _UNICODEFIELDS:
+            value = self._fields[name]
+            return value
+        elif name in _LISTFIELDS:
+            value = self._fields[name]
+            if value is None:
+                return []
+            res = []
+            for val in value:
+                valid, val = self._platform(val)
+                if not valid:
+                    continue
+                if name not in _LISTTUPLEFIELDS:
+                    res.append(val)
+                else:
+                    # That's for Project-URL
+                    res.append((val[0], val[1]))
+            return res
+
+        elif name in _ELEMENTSFIELD:
+            valid, value = self._platform(self._fields[name])
+            if not valid:
+                return []
+            if isinstance(value, str):
+                return value.split(',')
+        valid, value = self._platform(self._fields[name])
+        if not valid:
+            return None
+        return value
+
+    def check(self, strict=False, restructuredtext=False):
+        """Check if the metadata is compliant. If strict is False then raise if
+        no Name or Version are provided"""
+        # XXX should check the versions (if the file was loaded)
+        missing, warnings = [], []
+
+        for attr in ('Name', 'Version'):  # required by PEP 345
+            if attr not in self:
+                missing.append(attr)
+
+        if strict and missing != []:
+            msg = 'missing required metadata: %s' % ', '.join(missing)
+            raise MetadataMissingError(msg)
+
+        for attr in ('Home-page', 'Author'):
+            if attr not in self:
+                missing.append(attr)
+
+        if _HAS_DOCUTILS and restructuredtext:
+            warnings.extend(self._check_rst_data(self['Description']))
+
+        # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
+        if self['Metadata-Version'] != '1.2':
+            return missing, warnings
+
+        def is_valid_predicates(value):
+            for v in value:
+                if not is_valid_predicate(v.split(';')[0]):
+                    return False
+            return True
+
+        for fields, controller in ((_PREDICATE_FIELDS, is_valid_predicates),
+                                   (_VERSIONS_FIELDS, is_valid_versions),
+                                   (_VERSION_FIELDS, is_valid_version)):
+            for field in fields:
+                value = self.get(field, None)
+                if value is not None and not controller(value):
+                    warnings.append('Wrong value for %r: %s' % (field, value))
+
+        return missing, warnings
+
+    def todict(self):
+        """Return fields as a dict.
+
+        Field names will be converted to use the underscore-lowercase style
+        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
+        """
+        data = {
+            'metadata_version': self['Metadata-Version'],
+            'name': self['Name'],
+            'version': self['Version'],
+            'summary': self['Summary'],
+            'home_page': self['Home-page'],
+            'author': self['Author'],
+            'author_email': self['Author-email'],
+            'license': self['License'],
+            'description': self['Description'],
+            'keywords': self['Keywords'],
+            'platform': self['Platform'],
+            'classifier': self['Classifier'],
+            'download_url': self['Download-URL'],
+        }
+
+        if self['Metadata-Version'] == '1.2':
+            data['requires_dist'] = self['Requires-Dist']
+            data['requires_python'] = self['Requires-Python']
+            data['requires_external'] = self['Requires-External']
+            data['provides_dist'] = self['Provides-Dist']
+            data['obsoletes_dist'] = self['Obsoletes-Dist']
+            data['project_url'] = [','.join(url) for url in
+                                   self['Project-URL']]
+
+        elif self['Metadata-Version'] == '1.1':
+            data['provides'] = self['Provides']
+            data['requires'] = self['Requires']
+            data['obsoletes'] = self['Obsoletes']
+
+        return data
+
+    # Mapping API
+
+    def keys(self):
+        return _version2fieldlist(self['Metadata-Version'])
+
+    def __iter__(self):
+        for key in self.keys():
+            yield key
+
+    def values(self):
+        return [self[key] for key in list(self.keys())]
+
+    def items(self):
+        return [(key, self[key]) for key in list(self.keys())]
diff --git a/Lib/packaging/pypi/__init__.py b/Lib/packaging/pypi/__init__.py
new file mode 100644
index 0000000..5660c50
--- /dev/null
+++ b/Lib/packaging/pypi/__init__.py
@@ -0,0 +1,9 @@
+"""Low-level and high-level APIs to interact with project indexes."""
+
+__all__ = ['simple',
+           'xmlrpc',
+           'dist',
+           'errors',
+           'mirrors']
+
+from packaging.pypi.dist import ReleaseInfo, ReleasesList, DistInfo
diff --git a/Lib/packaging/pypi/base.py b/Lib/packaging/pypi/base.py
new file mode 100644
index 0000000..305fca9
--- /dev/null
+++ b/Lib/packaging/pypi/base.py
@@ -0,0 +1,48 @@
+"""Base class for index crawlers."""
+
+from packaging.pypi.dist import ReleasesList
+
+
+class BaseClient:
+    """Base class containing common methods for the index crawlers/clients"""
+
+    def __init__(self, prefer_final, prefer_source):
+        self._prefer_final = prefer_final
+        self._prefer_source = prefer_source
+        self._index = self
+
+    def _get_prefer_final(self, prefer_final=None):
+        """Return the prefer_final internal parameter or the specified one if
+        provided"""
+        if prefer_final:
+            return prefer_final
+        else:
+            return self._prefer_final
+
+    def _get_prefer_source(self, prefer_source=None):
+        """Return the prefer_source internal parameter or the specified one if
+        provided"""
+        if prefer_source:
+            return prefer_source
+        else:
+            return self._prefer_source
+
+    def _get_project(self, project_name):
+        """Return an project instance, create it if necessary"""
+        return self._projects.setdefault(project_name.lower(),
+                    ReleasesList(project_name, index=self._index))
+
+    def download_distribution(self, requirements, temp_path=None,
+                              prefer_source=None, prefer_final=None):
+        """Download a distribution from the last release according to the
+        requirements.
+
+        If temp_path is provided, download to this path, otherwise, create a
+        temporary location for the download and return it.
+        """
+        prefer_final = self._get_prefer_final(prefer_final)
+        prefer_source = self._get_prefer_source(prefer_source)
+        release = self.get_release(requirements, prefer_final)
+        if release:
+            dist = release.get_distribution(prefer_source=prefer_source)
+            return dist.download(temp_path)
diff --git a/Lib/packaging/pypi/dist.py b/Lib/packaging/pypi/dist.py
new file mode 100644
index 0000000..16510df
--- /dev/null
+++ b/Lib/packaging/pypi/dist.py
@@ -0,0 +1,547 @@
+"""Classes representing releases and distributions retrieved from indexes.
+
+A project (= unique name) can have several releases (= versions) and
+each release can have several distributions (= sdist and bdists).
+
+Release objects contain metadata-related information (see PEP 376);
+distribution objects contain download-related information.
+"""
+
+import sys
+import mimetypes
+import re
+import tempfile
+import urllib.request
+import urllib.parse
+import urllib.error
+import urllib.parse
+import hashlib
+from shutil import unpack_archive
+
+from packaging.errors import IrrationalVersionError
+from packaging.version import (suggest_normalized_version, NormalizedVersion,
+                               get_version_predicate)
+from packaging.metadata import Metadata
+from packaging.pypi.errors import (HashDoesNotMatch, UnsupportedHashName,
+                                   CantParseArchiveName)
+
+
+__all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url']
+
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz .egg".split()
+MD5_HASH = re.compile(r'^.*#md5=([a-f0-9]+)$')
+DIST_TYPES = ['bdist', 'sdist']
+
+
+class IndexReference:
+    """Mixin used to store the index reference"""
+    def set_index(self, index=None):
+        self._index = index
+
+
+class ReleaseInfo(IndexReference):
+    """Represent a release of a project (a project with a specific version).
+    The release contain the _metadata informations related to this specific
+    version, and is also a container for distribution related informations.
+
+    See the DistInfo class for more information about distributions.
+    """
+
+    def __init__(self, name, version, metadata=None, hidden=False,
+                 index=None, **kwargs):
+        """
+        :param name: the name of the distribution
+        :param version: the version of the distribution
+        :param metadata: the metadata fields of the release.
+        :type metadata: dict
+        :param kwargs: optional arguments for a new distribution.
+        """
+        self.set_index(index)
+        self.name = name
+        self._version = None
+        self.version = version
+        if metadata:
+            self.metadata = Metadata(mapping=metadata)
+        else:
+            self.metadata = None
+        self.dists = {}
+        self.hidden = hidden
+
+        if 'dist_type' in kwargs:
+            dist_type = kwargs.pop('dist_type')
+            self.add_distribution(dist_type, **kwargs)
+
+    def set_version(self, version):
+        try:
+            self._version = NormalizedVersion(version)
+        except IrrationalVersionError:
+            suggestion = suggest_normalized_version(version)
+            if suggestion:
+                self.version = suggestion
+            else:
+                raise IrrationalVersionError(version)
+
+    def get_version(self):
+        return self._version
+
+    version = property(get_version, set_version)
+
+    def fetch_metadata(self):
+        """If the metadata is not set, use the indexes to get it"""
+        if not self.metadata:
+            self._index.get_metadata(self.name, str(self.version))
+        return self.metadata
+
+    @property
+    def is_final(self):
+        """proxy to version.is_final"""
+        return self.version.is_final
+
+    def fetch_distributions(self):
+        if self.dists is None:
+            self._index.get_distributions(self.name, str(self.version))
+            if self.dists is None:
+                self.dists = {}
+        return self.dists
+
+    def add_distribution(self, dist_type='sdist', python_version=None,
+                         **params):
+        """Add distribution informations to this release.
+        If distribution information is already set for this distribution type,
+        add the given url paths to the distribution. This can be useful while
+        some of them fails to download.
+
+        :param dist_type: the distribution type (eg. "sdist", "bdist", etc.)
+        :param params: the fields to be passed to the distribution object
+                       (see the :class:DistInfo constructor).
+        """
+        if dist_type not in DIST_TYPES:
+            raise ValueError(dist_type)
+        if dist_type in self.dists:
+            self.dists[dist_type].add_url(**params)
+        else:
+            self.dists[dist_type] = DistInfo(self, dist_type,
+                                             index=self._index, **params)
+        if python_version:
+            self.dists[dist_type].python_version = python_version
+
+    def get_distribution(self, dist_type=None, prefer_source=True):
+        """Return a distribution.
+
+        If dist_type is set, find first for this distribution type, and just
+        act as an alias of __get_item__.
+
+        If prefer_source is True, search first for source distribution, and if
+        not return one existing distribution.
+        """
+        if len(self.dists) == 0:
+            raise LookupError()
+        if dist_type:
+            return self[dist_type]
+        if prefer_source:
+            if "sdist" in self.dists:
+                dist = self["sdist"]
+            else:
+                dist = next(self.dists.values())
+            return dist
+
+    def unpack(self, path=None, prefer_source=True):
+        """Unpack the distribution to the given path.
+
+        If not destination is given, creates a temporary location.
+
+        Returns the location of the extracted files (root).
+        """
+        return self.get_distribution(prefer_source=prefer_source)\
+                   .unpack(path=path)
+
+    def download(self, temp_path=None, prefer_source=True):
+        """Download the distribution, using the requirements.
+
+        If more than one distribution match the requirements, use the last
+        version.
+        Download the distribution, and put it in the temp_path. If no temp_path
+        is given, creates and return one.
+
+        Returns the complete absolute path to the downloaded archive.
+        """
+        return self.get_distribution(prefer_source=prefer_source)\
+                   .download(path=temp_path)
+
+    def set_metadata(self, metadata):
+        if not self.metadata:
+            self.metadata = Metadata()
+        self.metadata.update(metadata)
+
+    def __getitem__(self, item):
+        """distributions are available using release["sdist"]"""
+        return self.dists[item]
+
+    def _check_is_comparable(self, other):
+        if not isinstance(other, ReleaseInfo):
+            raise TypeError("cannot compare %s and %s"
+                % (type(self).__name__, type(other).__name__))
+        elif self.name != other.name:
+            raise TypeError("cannot compare %s and %s"
+                % (self.name, other.name))
+
+    def __repr__(self):
+        return "<%s %s>" % (self.name, self.version)
+
+    def __eq__(self, other):
+        self._check_is_comparable(other)
+        return self.version == other.version
+
+    def __lt__(self, other):
+        self._check_is_comparable(other)
+        return self.version < other.version
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __gt__(self, other):
+        return not (self.__lt__(other) or self.__eq__(other))
+
+    def __le__(self, other):
+        return self.__eq__(other) or self.__lt__(other)
+
+    def __ge__(self, other):
+        return self.__eq__(other) or self.__gt__(other)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+class DistInfo(IndexReference):
+    """Represents a distribution retrieved from an index (sdist, bdist, ...)
+    """
+
+    def __init__(self, release, dist_type=None, url=None, hashname=None,
+                 hashval=None, is_external=True, python_version=None,
+                 index=None):
+        """Create a new instance of DistInfo.
+
+        :param release: a DistInfo class is relative to a release.
+        :param dist_type: the type of the dist (eg. source, bin-*, etc.)
+        :param url: URL where we found this distribution
+        :param hashname: the name of the hash we want to use. Refer to the
+                         hashlib.new documentation for more information.
+        :param hashval: the hash value.
+        :param is_external: we need to know if the provided url comes from
+                            an index browsing, or from an external resource.
+
+        """
+        self.set_index(index)
+        self.release = release
+        self.dist_type = dist_type
+        self.python_version = python_version
+        self._unpacked_dir = None
+        # set the downloaded path to None by default. The goal here
+        # is to not download distributions multiple times
+        self.downloaded_location = None
+        # We store urls in dict, because we need to have a bit more infos
+        # than the simple URL. It will be used later to find the good url to
+        # use.
+        # We have two _url* attributes: _url and urls. urls contains a list
+        # of dict for the different urls, and _url contains the choosen url, in
+        # order to dont make the selection process multiple times.
+        self.urls = []
+        self._url = None
+        self.add_url(url, hashname, hashval, is_external)
+
+    def add_url(self, url=None, hashname=None, hashval=None, is_external=True):
+        """Add a new url to the list of urls"""
+        if hashname is not None:
+            try:
+                hashlib.new(hashname)
+            except ValueError:
+                raise UnsupportedHashName(hashname)
+        if not url in [u['url'] for u in self.urls]:
+            self.urls.append({
+                'url': url,
+                'hashname': hashname,
+                'hashval': hashval,
+                'is_external': is_external,
+            })
+            # reset the url selection process
+            self._url = None
+
+    @property
+    def url(self):
+        """Pick up the right url for the list of urls in self.urls"""
+        # We return internal urls over externals.
+        # If there is more than one internal or external, return the first
+        # one.
+        if self._url is None:
+            if len(self.urls) > 1:
+                internals_urls = [u for u in self.urls \
+                                  if u['is_external'] == False]
+                if len(internals_urls) >= 1:
+                    self._url = internals_urls[0]
+            if self._url is None:
+                self._url = self.urls[0]
+        return self._url
+
+    @property
+    def is_source(self):
+        """return if the distribution is a source one or not"""
+        return self.dist_type == 'sdist'
+
+    def download(self, path=None):
+        """Download the distribution to a path, and return it.
+
+        If the path is given in path, use this, otherwise, generates a new one
+        Return the download location.
+        """
+        if path is None:
+            path = tempfile.mkdtemp()
+
+        # if we do not have downloaded it yet, do it.
+        if self.downloaded_location is None:
+            url = self.url['url']
+            archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
+            filename, headers = urllib.request.urlretrieve(url,
+                                                   path + "/" + archive_name)
+            self.downloaded_location = filename
+            self._check_md5(filename)
+        return self.downloaded_location
+
+    def unpack(self, path=None):
+        """Unpack the distribution to the given path.
+
+        If not destination is given, creates a temporary location.
+
+        Returns the location of the extracted files (root).
+        """
+        if not self._unpacked_dir:
+            if path is None:
+                path = tempfile.mkdtemp()
+
+            filename = self.download(path)
+            content_type = mimetypes.guess_type(filename)[0]
+            unpack_archive(filename, path)
+            self._unpacked_dir = path
+
+        return path
+
+    def _check_md5(self, filename):
+        """Check that the md5 checksum of the given file matches the one in
+        url param"""
+        hashname = self.url['hashname']
+        expected_hashval = self.url['hashval']
+        if not None in (expected_hashval, hashname):
+            with open(filename, 'rb') as f:
+                hashval = hashlib.new(hashname)
+                hashval.update(f.read())
+
+            if hashval.hexdigest() != expected_hashval:
+                raise HashDoesNotMatch("got %s instead of %s"
+                    % (hashval.hexdigest(), expected_hashval))
+
+    def __repr__(self):
+        if self.release is None:
+            return "<? ? %s>" % self.dist_type
+
+        return "<%s %s %s>" % (
+            self.release.name, self.release.version, self.dist_type or "")
+
+
+class ReleasesList(IndexReference):
+    """A container of Release.
+
+    Provides useful methods and facilities to sort and filter releases.
+    """
+    def __init__(self, name, releases=None, contains_hidden=False, index=None):
+        self.set_index(index)
+        self.releases = []
+        self.name = name
+        self.contains_hidden = contains_hidden
+        if releases:
+            self.add_releases(releases)
+
+    def fetch_releases(self):
+        self._index.get_releases(self.name)
+        return self.releases
+
+    def filter(self, predicate):
+        """Filter and return a subset of releases matching the given predicate.
+        """
+        return ReleasesList(self.name, [release for release in self.releases
+                                        if predicate.match(release.version)],
+                                        index=self._index)
+
+    def get_last(self, requirements, prefer_final=None):
+        """Return the "last" release, that satisfy the given predicates.
+
+        "last" is defined by the version number of the releases, you also could
+        set prefer_final parameter to True or False to change the order results
+        """
+        predicate = get_version_predicate(requirements)
+        releases = self.filter(predicate)
+        if len(releases) == 0:
+            return None
+        releases.sort_releases(prefer_final, reverse=True)
+        return releases[0]
+
+    def add_releases(self, releases):
+        """Add releases in the release list.
+
+        :param: releases is a list of ReleaseInfo objects.
+        """
+        for r in releases:
+            self.add_release(release=r)
+
+    def add_release(self, version=None, dist_type='sdist', release=None,
+                    **dist_args):
+        """Add a release to the list.
+
+        The release can be passed in the `release` parameter, and in this case,
+        it will be crawled to extract the useful informations if necessary, or
+        the release informations can be directly passed in the `version` and
+        `dist_type` arguments.
+
+        Other keywords arguments can be provided, and will be forwarded to the
+        distribution creation (eg. the arguments of the DistInfo constructor).
+        """
+        if release:
+            if release.name.lower() != self.name.lower():
+                raise ValueError("%s is not the same project as %s" %
+                                 (release.name, self.name))
+            version = str(release.version)
+
+            if not version in self.get_versions():
+                # append only if not already exists
+                self.releases.append(release)
+            for dist in release.dists.values():
+                for url in dist.urls:
+                    self.add_release(version, dist.dist_type, **url)
+        else:
+            matches = [r for r in self.releases
+                       if str(r.version) == version and r.name == self.name]
+            if not matches:
+                release = ReleaseInfo(self.name, version, index=self._index)
+                self.releases.append(release)
+            else:
+                release = matches[0]
+
+            release.add_distribution(dist_type=dist_type, **dist_args)
+
+    def sort_releases(self, prefer_final=False, reverse=True, *args, **kwargs):
+        """Sort the results with the given properties.
+
+        The `prefer_final` argument can be used to specify if final
+        distributions (eg. not dev, bet or alpha) would be prefered or not.
+
+        Results can be inverted by using `reverse`.
+
+        Any other parameter provided will be forwarded to the sorted call. You
+        cannot redefine the key argument of "sorted" here, as it is used
+        internally to sort the releases.
+        """
+
+        sort_by = []
+        if prefer_final:
+            sort_by.append("is_final")
+        sort_by.append("version")
+
+        self.releases.sort(
+            key=lambda i: tuple(getattr(i, arg) for arg in sort_by),
+            reverse=reverse, *args, **kwargs)
+
+    def get_release(self, version):
+        """Return a release from its version."""
+        matches = [r for r in self.releases if str(r.version) == version]
+        if len(matches) != 1:
+            raise KeyError(version)
+        return matches[0]
+
+    def get_versions(self):
+        """Return a list of releases versions contained"""
+        return [str(r.version) for r in self.releases]
+
+    def __getitem__(self, key):
+        return self.releases[key]
+
+    def __len__(self):
+        return len(self.releases)
+
+    def __repr__(self):
+        string = 'Project "%s"' % self.name
+        if self.get_versions():
+            string += ' versions: %s' % ', '.join(self.get_versions())
+        return '<%s>' % string
+
+
+def get_infos_from_url(url, probable_dist_name=None, is_external=True):
+    """Get useful informations from an URL.
+
+    Return a dict of (name, version, url, hashtype, hash, is_external)
+
+    :param url: complete url of the distribution
+    :param probable_dist_name: A probable name of the project.
+    :param is_external: Tell if the url commes from an index or from
+                        an external URL.
+    """
+    # if the url contains a md5 hash, get it.
+    md5_hash = None
+    match = MD5_HASH.match(url)
+    if match is not None:
+        md5_hash = match.group(1)
+        # remove the hash
+        url = url.replace("#md5=%s" % md5_hash, "")
+
+    # parse the archive name to find dist name and version
+    archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
+    extension_matched = False
+    # remove the extension from the name
+    for ext in EXTENSIONS:
+        if archive_name.endswith(ext):
+            archive_name = archive_name[:-len(ext)]
+            extension_matched = True
+
+    name, version = split_archive_name(archive_name)
+    if extension_matched is True:
+        return {'name': name,
+                'version': version,
+                'url': url,
+                'hashname': "md5",
+                'hashval': md5_hash,
+                'is_external': is_external,
+                'dist_type': 'sdist'}
+
+
+def split_archive_name(archive_name, probable_name=None):
+    """Split an archive name into two parts: name and version.
+
+    Return the tuple (name, version)
+    """
+    # Try to determine wich part is the name and wich is the version using the
+    # "-" separator. Take the larger part to be the version number then reduce
+    # if this not works.
+    def eager_split(str, maxsplit=2):
+        # split using the "-" separator
+        splits = str.rsplit("-", maxsplit)
+        name = splits[0]
+        version = "-".join(splits[1:])
+        if version.startswith("-"):
+            version = version[1:]
+        if suggest_normalized_version(version) is None and maxsplit >= 0:
+            # we dont get a good version number: recurse !
+            return eager_split(str, maxsplit - 1)
+        else:
+            return name, version
+    if probable_name is not None:
+        probable_name = probable_name.lower()
+    name = None
+    if probable_name is not None and probable_name in archive_name:
+        # we get the name from probable_name, if given.
+        name = probable_name
+        version = archive_name.lstrip(name)
+    else:
+        name, version = eager_split(archive_name)
+
+    version = suggest_normalized_version(version)
+    if version is not None and name != "":
+        return name.lower(), version
+    else:
+        raise CantParseArchiveName(archive_name)
diff --git a/Lib/packaging/pypi/errors.py b/Lib/packaging/pypi/errors.py
new file mode 100644
index 0000000..2191ac1
--- /dev/null
+++ b/Lib/packaging/pypi/errors.py
@@ -0,0 +1,39 @@
+"""Exceptions raised by packaging.pypi code."""
+
+from packaging.errors import PackagingPyPIError
+
+
+class ProjectNotFound(PackagingPyPIError):
+    """Project has not been found"""
+
+
+class DistributionNotFound(PackagingPyPIError):
+    """The release has not been found"""
+
+
+class ReleaseNotFound(PackagingPyPIError):
+    """The release has not been found"""
+
+
+class CantParseArchiveName(PackagingPyPIError):
+    """An archive name can't be parsed to find distribution name and version"""
+
+
+class DownloadError(PackagingPyPIError):
+    """An error has occurs while downloading"""
+
+
+class HashDoesNotMatch(DownloadError):
+    """Compared hashes does not match"""
+
+
+class UnsupportedHashName(PackagingPyPIError):
+    """A unsupported hashname has been used"""
+
+
+class UnableToDownload(PackagingPyPIError):
+    """All mirrors have been tried, without success"""
+
+
+class InvalidSearchField(PackagingPyPIError):
+    """An invalid search field has been used"""
diff --git a/Lib/packaging/pypi/mirrors.py b/Lib/packaging/pypi/mirrors.py
new file mode 100644
index 0000000..a646acf
--- /dev/null
+++ b/Lib/packaging/pypi/mirrors.py
@@ -0,0 +1,52 @@
+"""Utilities related to the mirror infrastructure defined in PEP 381."""
+
+from string import ascii_lowercase
+import socket
+
+DEFAULT_MIRROR_URL = "last.pypi.python.org"
+
+
+def get_mirrors(hostname=None):
+    """Return the list of mirrors from the last record found on the DNS
+    entry::
+
+    >>> from packaging.pypi.mirrors import get_mirrors
+    >>> get_mirrors()
+    ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org',
+    'd.pypi.python.org']
+
+    """
+    if hostname is None:
+        hostname = DEFAULT_MIRROR_URL
+
+    # return the last mirror registered on PyPI.
+    try:
+        hostname = socket.gethostbyname_ex(hostname)[0]
+    except socket.gaierror:
+        return []
+    end_letter = hostname.split(".", 1)
+
+    # determine the list from the last one.
+    return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
+
+
+def string_range(last):
+    """Compute the range of string between "a" and last.
+
+    This works for simple "a to z" lists, but also for "a to zz" lists.
+    """
+    for k in range(len(last)):
+        for x in product(ascii_lowercase, repeat=(k + 1)):
+            result = ''.join(x)
+            yield result
+            if result == last:
+                return
+
+
+def product(*args, **kwds):
+    pools = [tuple(arg) for arg in args] * kwds.get('repeat', 1)
+    result = [[]]
+    for pool in pools:
+        result = [x + [y] for x in result for y in pool]
+    for prod in result:
+        yield tuple(prod)
diff --git a/Lib/packaging/pypi/simple.py b/Lib/packaging/pypi/simple.py
new file mode 100644
index 0000000..ee7a113
--- /dev/null
+++ b/Lib/packaging/pypi/simple.py
@@ -0,0 +1,462 @@
+"""Spider using the screen-scraping "simple" PyPI API.
+
+This module contains the class SimpleIndexCrawler, a simple spider that
+can be used to find and retrieve distributions from a project index
+(like the Python Package Index), using its so-called simple API (see
+reference implementation available at http://pypi.python.org/simple/).
+"""
+
+import http.client
+import re
+import socket
+import sys
+import urllib.request
+import urllib.parse
+import urllib.error
+import os
+
+
+from fnmatch import translate
+from packaging import logger
+from packaging.metadata import Metadata
+from packaging.version import get_version_predicate
+from packaging import __version__ as packaging_version
+from packaging.pypi.base import BaseClient
+from packaging.pypi.dist import (ReleasesList, EXTENSIONS,
+                                  get_infos_from_url, MD5_HASH)
+from packaging.pypi.errors import (PackagingPyPIError, DownloadError,
+                                    UnableToDownload, CantParseArchiveName,
+                                    ReleaseNotFound, ProjectNotFound)
+from packaging.pypi.mirrors import get_mirrors
+from packaging.metadata import Metadata
+
+__all__ = ['Crawler', 'DEFAULT_SIMPLE_INDEX_URL']
+
+# -- Constants -----------------------------------------------
+DEFAULT_SIMPLE_INDEX_URL = "http://a.pypi.python.org/simple/"
+DEFAULT_HOSTS = ("*",)
+SOCKET_TIMEOUT = 15
+USER_AGENT = "Python-urllib/%s packaging/%s" % (
+    sys.version[:3], packaging_version)
+
+# -- Regexps -------------------------------------------------
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
+HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
+
+# This pattern matches a character entity reference (a decimal numeric
+# references, a hexadecimal numeric reference, or a named reference).
+ENTITY_SUB = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
+REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
+
+
+def socket_timeout(timeout=SOCKET_TIMEOUT):
+    """Decorator to add a socket timeout when requesting pages on PyPI.
+    """
+    def _socket_timeout(func):
+        def _socket_timeout(self, *args, **kwargs):
+            old_timeout = socket.getdefaulttimeout()
+            if hasattr(self, "_timeout"):
+                timeout = self._timeout
+            socket.setdefaulttimeout(timeout)
+            try:
+                return func(self, *args, **kwargs)
+            finally:
+                socket.setdefaulttimeout(old_timeout)
+        return _socket_timeout
+    return _socket_timeout
+
+
+def with_mirror_support():
+    """Decorator that makes the mirroring support easier"""
+    def wrapper(func):
+        def wrapped(self, *args, **kwargs):
+            try:
+                return func(self, *args, **kwargs)
+            except DownloadError:
+                # if an error occurs, try with the next index_url
+                if self._mirrors_tries >= self._mirrors_max_tries:
+                    try:
+                        self._switch_to_next_mirror()
+                    except KeyError:
+                        raise UnableToDownload("Tried all mirrors")
+                else:
+                    self._mirrors_tries += 1
+                self._projects.clear()
+                return wrapped(self, *args, **kwargs)
+        return wrapped
+    return wrapper
+
+
+class Crawler(BaseClient):
+    """Provides useful tools to request the Python Package Index simple API.
+
+    You can specify both mirrors and mirrors_url, but mirrors_url will only be
+    used if mirrors is set to None.
+
+    :param index_url: the url of the simple index to search on.
+    :param prefer_final: if the version is not mentioned, and the last
+                         version is not a "final" one (alpha, beta, etc.),
+                         pick up the last final version.
+    :param prefer_source: if the distribution type is not mentioned, pick up
+                          the source one if available.
+    :param follow_externals: tell if following external links is needed or
+                             not. Default is False.
+    :param hosts: a list of hosts allowed to be processed while using
+                  follow_externals=True. Default behavior is to follow all
+                  hosts.
+    :param follow_externals: tell if following external links is needed or
+                             not. Default is False.
+    :param mirrors_url: the url to look on for DNS records giving mirror
+                        adresses.
+    :param mirrors: a list of mirrors (see PEP 381).
+    :param timeout: time in seconds to consider a url has timeouted.
+    :param mirrors_max_tries": number of times to try requesting informations
+                               on mirrors before switching.
+    """
+
+    def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False,
+                 prefer_source=True, hosts=DEFAULT_HOSTS,
+                 follow_externals=False, mirrors_url=None, mirrors=None,
+                 timeout=SOCKET_TIMEOUT, mirrors_max_tries=0):
+        super(Crawler, self).__init__(prefer_final, prefer_source)
+        self.follow_externals = follow_externals
+
+        # mirroring attributes.
+        parsed = urllib.parse.urlparse(index_url)
+        self.scheme = parsed[0]
+        if self.scheme == 'file':
+            ender = os.path.sep
+        else:
+            ender = '/'
+        if not index_url.endswith(ender):
+            index_url += ender
+        # if no mirrors are defined, use the method described in PEP 381.
+        if mirrors is None:
+            mirrors = get_mirrors(mirrors_url)
+        self._mirrors = set(mirrors)
+        self._mirrors_used = set()
+        self.index_url = index_url
+        self._mirrors_max_tries = mirrors_max_tries
+        self._mirrors_tries = 0
+        self._timeout = timeout
+
+        # create a regexp to match all given hosts
+        self._allowed_hosts = re.compile('|'.join(map(translate, hosts))).match
+
+        # we keep an index of pages we have processed, in order to avoid
+        # scanning them multple time (eg. if there is multiple pages pointing
+        # on one)
+        self._processed_urls = []
+        self._projects = {}
+
+    @with_mirror_support()
+    def search_projects(self, name=None, **kwargs):
+        """Search the index for projects containing the given name.
+
+        Return a list of names.
+        """
+        with self._open_url(self.index_url) as index:
+            if '*' in name:
+                name.replace('*', '.*')
+            else:
+                name = "%s%s%s" % ('*.?', name, '*.?')
+            name = name.replace('*', '[^<]*')  # avoid matching end tag
+            projectname = re.compile('<a[^>]*>(%s)</a>' % name, re.I)
+            matching_projects = []
+
+            index_content = index.read()
+
+        # FIXME should use bytes I/O and regexes instead of decoding
+        index_content = index_content.decode()
+
+        for match in projectname.finditer(index_content):
+            project_name = match.group(1)
+            matching_projects.append(self._get_project(project_name))
+        return matching_projects
+
+    def get_releases(self, requirements, prefer_final=None,
+                     force_update=False):
+        """Search for releases and return a ReleaseList object containing
+        the results.
+        """
+        predicate = get_version_predicate(requirements)
+        if predicate.name.lower() in self._projects and not force_update:
+            return self._projects.get(predicate.name.lower())
+        prefer_final = self._get_prefer_final(prefer_final)
+        logger.info('reading info on PyPI about %s', predicate.name)
+        self._process_index_page(predicate.name)
+
+        if predicate.name.lower() not in self._projects:
+            raise ProjectNotFound()
+
+        releases = self._projects.get(predicate.name.lower())
+        releases.sort_releases(prefer_final=prefer_final)
+        return releases
+
+    def get_release(self, requirements, prefer_final=None):
+        """Return only one release that fulfill the given requirements"""
+        predicate = get_version_predicate(requirements)
+        release = self.get_releases(predicate, prefer_final)\
+                      .get_last(predicate)
+        if not release:
+            raise ReleaseNotFound("No release matches the given criterias")
+        return release
+
+    def get_distributions(self, project_name, version):
+        """Return the distributions found on the index for the specific given
+        release"""
+        # as the default behavior of get_release is to return a release
+        # containing the distributions, just alias it.
+        return self.get_release("%s (%s)" % (project_name, version))
+
+    def get_metadata(self, project_name, version):
+        """Return the metadatas from the simple index.
+
+        Currently, download one archive, extract it and use the PKG-INFO file.
+        """
+        release = self.get_distributions(project_name, version)
+        if not release.metadata:
+            location = release.get_distribution().unpack()
+            pkg_info = os.path.join(location, 'PKG-INFO')
+            release.metadata = Metadata(pkg_info)
+        return release
+
+    def _switch_to_next_mirror(self):
+        """Switch to the next mirror (eg. point self.index_url to the next
+        mirror url.
+
+        Raise a KeyError if all mirrors have been tried.
+        """
+        self._mirrors_used.add(self.index_url)
+        index_url = self._mirrors.pop()
+        if not ("http://" or "https://" or "file://") in index_url:
+            index_url = "http://%s" % index_url
+
+        if not index_url.endswith("/simple"):
+            index_url = "%s/simple/" % index_url
+
+        self.index_url = index_url
+
+    def _is_browsable(self, url):
+        """Tell if the given URL can be browsed or not.
+
+        It uses the follow_externals and the hosts list to tell if the given
+        url is browsable or not.
+        """
+        # if _index_url is contained in the given URL, we are browsing the
+        # index, and it's always "browsable".
+        # local files are always considered browable resources
+        if self.index_url in url or urllib.parse.urlparse(url)[0] == "file":
+            return True
+        elif self.follow_externals:
+            if self._allowed_hosts(urllib.parse.urlparse(url)[1]):  # 1 is netloc
+                return True
+            else:
+                return False
+        return False
+
+    def _is_distribution(self, link):
+        """Tell if the given URL matches to a distribution name or not.
+        """
+        #XXX find a better way to check that links are distributions
+        # Using a regexp ?
+        for ext in EXTENSIONS:
+            if ext in link:
+                return True
+        return False
+
+    def _register_release(self, release=None, release_info={}):
+        """Register a new release.
+
+        Both a release or a dict of release_info can be provided, the prefered
+        way (eg. the quicker) is the dict one.
+
+        Return the list of existing releases for the given project.
+        """
+        # Check if the project already has a list of releases (refering to
+        # the project name). If not, create a new release list.
+        # Then, add the release to the list.
+        if release:
+            name = release.name
+        else:
+            name = release_info['name']
+        if not name.lower() in self._projects:
+            self._projects[name.lower()] = ReleasesList(name, index=self._index)
+
+        if release:
+            self._projects[name.lower()].add_release(release=release)
+        else:
+            name = release_info.pop('name')
+            version = release_info.pop('version')
+            dist_type = release_info.pop('dist_type')
+            self._projects[name.lower()].add_release(version, dist_type,
+                                                     **release_info)
+        return self._projects[name.lower()]
+
+    def _process_url(self, url, project_name=None, follow_links=True):
+        """Process an url and search for distributions packages.
+
+        For each URL found, if it's a download, creates a PyPIdistribution
+        object. If it's a homepage and we can follow links, process it too.
+
+        :param url: the url to process
+        :param project_name: the project name we are searching for.
+        :param follow_links: Do not want to follow links more than from one
+                             level. This parameter tells if we want to follow
+                             the links we find (eg. run recursively this
+                             method on it)
+        """
+        with self._open_url(url) as f:
+            base_url = f.url
+            if url not in self._processed_urls:
+                self._processed_urls.append(url)
+                link_matcher = self._get_link_matcher(url)
+                for link, is_download in link_matcher(f.read().decode(), base_url):
+                    if link not in self._processed_urls:
+                        if self._is_distribution(link) or is_download:
+                            self._processed_urls.append(link)
+                            # it's a distribution, so create a dist object
+                            try:
+                                infos = get_infos_from_url(link, project_name,
+                                            is_external=not self.index_url in url)
+                            except CantParseArchiveName as e:
+                                logger.warning(
+                                    "version has not been parsed: %s", e)
+                            else:
+                                self._register_release(release_info=infos)
+                        else:
+                            if self._is_browsable(link) and follow_links:
+                                self._process_url(link, project_name,
+                                    follow_links=False)
+
+    def _get_link_matcher(self, url):
+        """Returns the right link matcher function of the given url
+        """
+        if self.index_url in url:
+            return self._simple_link_matcher
+        else:
+            return self._default_link_matcher
+
+    def _get_full_url(self, url, base_url):
+        return urllib.parse.urljoin(base_url, self._htmldecode(url))
+
+    def _simple_link_matcher(self, content, base_url):
+        """Yield all links with a rel="download" or rel="homepage".
+
+        This matches the simple index requirements for matching links.
+        If follow_externals is set to False, dont yeld the external
+        urls.
+
+        :param content: the content of the page we want to parse
+        :param base_url: the url of this page.
+        """
+        for match in HREF.finditer(content):
+            url = self._get_full_url(match.group(1), base_url)
+            if MD5_HASH.match(url):
+                yield (url, True)
+
+        for match in REL.finditer(content):
+            # search for rel links.
+            tag, rel = match.groups()
+            rels = [s.strip() for s in rel.lower().split(',')]
+            if 'homepage' in rels or 'download' in rels:
+                for match in HREF.finditer(tag):
+                    url = self._get_full_url(match.group(1), base_url)
+                    if 'download' in rels or self._is_browsable(url):
+                        # yield a list of (url, is_download)
+                        yield (url, 'download' in rels)
+
+    def _default_link_matcher(self, content, base_url):
+        """Yield all links found on the page.
+        """
+        for match in HREF.finditer(content):
+            url = self._get_full_url(match.group(1), base_url)
+            if self._is_browsable(url):
+                yield (url, False)
+
+    @with_mirror_support()
+    def _process_index_page(self, name):
+        """Find and process a PyPI page for the given project name.
+
+        :param name: the name of the project to find the page
+        """
+        # Browse and index the content of the given PyPI page.
+        if self.scheme == 'file':
+            ender = os.path.sep
+        else:
+            ender = '/'
+        url = self.index_url + name + ender
+        self._process_url(url, name)
+
+    @socket_timeout()
+    def _open_url(self, url):
+        """Open a urllib2 request, handling HTTP authentication, and local
+        files support.
+
+        """
+        scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
+
+        # authentication stuff
+        if scheme in ('http', 'https'):
+            auth, host = urllib.parse.splituser(netloc)
+        else:
+            auth = None
+
+        # add index.html automatically for filesystem paths
+        if scheme == 'file':
+            if url.endswith(os.path.sep):
+                url += "index.html"
+
+        # add authorization headers if auth is provided
+        if auth:
+            auth = "Basic " + \
+                urllib.parse.unquote(auth).encode('base64').strip()
+            new_url = urllib.parse.urlunparse((
+                scheme, host, path, params, query, frag))
+            request = urllib.request.Request(new_url)
+            request.add_header("Authorization", auth)
+        else:
+            request = urllib.request.Request(url)
+        request.add_header('User-Agent', USER_AGENT)
+        try:
+            fp = urllib.request.urlopen(request)
+        except (ValueError, http.client.InvalidURL) as v:
+            msg = ' '.join([str(arg) for arg in v.args])
+            raise PackagingPyPIError('%s %s' % (url, msg))
+        except urllib.error.HTTPError as v:
+            return v
+        except urllib.error.URLError as v:
+            raise DownloadError("Download error for %s: %s" % (url, v.reason))
+        except http.client.BadStatusLine as v:
+            raise DownloadError('%s returned a bad status line. '
+                'The server might be down, %s' % (url, v.line))
+        except http.client.HTTPException as v:
+            raise DownloadError("Download error for %s: %s" % (url, v))
+        except socket.timeout:
+            raise DownloadError("The server timeouted")
+
+        if auth:
+            # Put authentication info back into request URL if same host,
+            # so that links found on the page will work
+            s2, h2, path2, param2, query2, frag2 = \
+                urllib.parse.urlparse(fp.url)
+            if s2 == scheme and h2 == host:
+                fp.url = urllib.parse.urlunparse(
+                    (s2, netloc, path2, param2, query2, frag2))
+        return fp
+
+    def _decode_entity(self, match):
+        what = match.group(1)
+        if what.startswith('#x'):
+            what = int(what[2:], 16)
+        elif what.startswith('#'):
+            what = int(what[1:])
+        else:
+            from html.entities import name2codepoint
+            what = name2codepoint.get(what, match.group(0))
+        return chr(what)
+
+    def _htmldecode(self, text):
+        """Decode HTML entities in the given text."""
+        return ENTITY_SUB(self._decode_entity, text)
diff --git a/Lib/packaging/pypi/wrapper.py b/Lib/packaging/pypi/wrapper.py
new file mode 100644
index 0000000..945d08a
--- /dev/null
+++ b/Lib/packaging/pypi/wrapper.py
@@ -0,0 +1,99 @@
+"""Convenient client for all PyPI APIs.
+
+This module provides a ClientWrapper class which will use the "simple"
+or XML-RPC API to request information or files from an index.
+"""
+
+from packaging.pypi import simple, xmlrpc
+
+_WRAPPER_MAPPINGS = {'get_release': 'simple',
+                     'get_releases': 'simple',
+                     'search_projects': 'simple',
+                     'get_metadata': 'xmlrpc',
+                     'get_distributions': 'simple'}
+
+_WRAPPER_INDEXES = {'xmlrpc': xmlrpc.Client,
+                    'simple': simple.Crawler}
+
+
+def switch_index_if_fails(func, wrapper):
+    """Decorator that switch of index (for instance from xmlrpc to simple)
+    if the first mirror return an empty list or raises an exception.
+    """
+    def decorator(*args, **kwargs):
+        retry = True
+        exception = None
+        methods = [func]
+        for f in wrapper._indexes.values():
+            if f != func.__self__ and hasattr(f, func.__name__):
+                methods.append(getattr(f, func.__name__))
+        for method in methods:
+            try:
+                response = method(*args, **kwargs)
+                retry = False
+            except Exception as e:
+                exception = e
+            if not retry:
+                break
+        if retry and exception:
+            raise exception
+        else:
+            return response
+    return decorator
+
+
+class ClientWrapper:
+    """Wrapper around simple and xmlrpc clients,
+
+    Choose the best implementation to use depending the needs, using the given
+    mappings.
+    If one of the indexes returns an error, tries to use others indexes.
+
+    :param index: tell which index to rely on by default.
+    :param index_classes: a dict of name:class to use as indexes.
+    :param indexes: a dict of name:index already instantiated
+    :param mappings: the mappings to use for this wrapper
+    """
+
+    def __init__(self, default_index='simple', index_classes=_WRAPPER_INDEXES,
+                 indexes={}, mappings=_WRAPPER_MAPPINGS):
+        self._projects = {}
+        self._mappings = mappings
+        self._indexes = indexes
+        self._default_index = default_index
+
+        # instantiate the classes and set their _project attribute to the one
+        # of the wrapper.
+        for name, cls in index_classes.items():
+            obj = self._indexes.setdefault(name, cls())
+            obj._projects = self._projects
+            obj._index = self
+
+    def __getattr__(self, method_name):
+        """When asking for methods of the wrapper, return the implementation of
+        the wrapped classes, depending the mapping.
+
+        Decorate the methods to switch of implementation if an error occurs
+        """
+        real_method = None
+        if method_name in _WRAPPER_MAPPINGS:
+            obj = self._indexes[_WRAPPER_MAPPINGS[method_name]]
+            real_method = getattr(obj, method_name)
+        else:
+            # the method is not defined in the mappings, so we try first to get
+            # it via the default index, and rely on others if needed.
+            try:
+                real_method = getattr(self._indexes[self._default_index],
+                                      method_name)
+            except AttributeError:
+                other_indexes = [i for i in self._indexes
+                                 if i != self._default_index]
+                for index in other_indexes:
+                    real_method = getattr(self._indexes[index], method_name,
+                                          None)
+                    if real_method:
+                        break
+        if real_method:
+            return switch_index_if_fails(real_method, self)
+        else:
+            raise AttributeError("No index have attribute '%s'" % method_name)
diff --git a/Lib/packaging/pypi/xmlrpc.py b/Lib/packaging/pypi/xmlrpc.py
new file mode 100644
index 0000000..7a9f6cc
--- /dev/null
+++ b/Lib/packaging/pypi/xmlrpc.py
@@ -0,0 +1,200 @@
+"""Spider using the XML-RPC PyPI API.
+
+This module contains the class Client, a spider that can be used to find
+and retrieve distributions from a project index (like the Python Package
+Index), using its XML-RPC API (see documentation of the reference
+implementation at http://wiki.python.org/moin/PyPiXmlRpc).
+"""
+
+import xmlrpc.client
+
+from packaging import logger
+from packaging.errors import IrrationalVersionError
+from packaging.version import get_version_predicate
+from packaging.pypi.base import BaseClient
+from packaging.pypi.errors import (ProjectNotFound, InvalidSearchField,
+                                   ReleaseNotFound)
+from packaging.pypi.dist import ReleaseInfo
+
+__all__ = ['Client', 'DEFAULT_XMLRPC_INDEX_URL']
+
+DEFAULT_XMLRPC_INDEX_URL = 'http://python.org/pypi'
+
+_SEARCH_FIELDS = ['name', 'version', 'author', 'author_email', 'maintainer',
+                  'maintainer_email', 'home_page', 'license', 'summary',
+                  'description', 'keywords', 'platform', 'download_url']
+
+
+class Client(BaseClient):
+    """Client to query indexes using XML-RPC method calls.
+
+    If no server_url is specified, use the default PyPI XML-RPC URL,
+    defined in the DEFAULT_XMLRPC_INDEX_URL constant::
+
+        >>> client = XMLRPCClient()
+        >>> client.server_url == DEFAULT_XMLRPC_INDEX_URL
+        True
+
+        >>> client = XMLRPCClient("http://someurl/")
+        >>> client.server_url
+        'http://someurl/'
+    """
+
+    def __init__(self, server_url=DEFAULT_XMLRPC_INDEX_URL, prefer_final=False,
+                 prefer_source=True):
+        super(Client, self).__init__(prefer_final, prefer_source)
+        self.server_url = server_url
+        self._projects = {}
+
+    def get_release(self, requirements, prefer_final=False):
+        """Return a release with all complete metadata and distribution
+        related informations.
+        """
+        prefer_final = self._get_prefer_final(prefer_final)
+        predicate = get_version_predicate(requirements)
+        releases = self.get_releases(predicate.name)
+        release = releases.get_last(predicate, prefer_final)
+        self.get_metadata(release.name, str(release.version))
+        self.get_distributions(release.name, str(release.version))
+        return release
+
+    def get_releases(self, requirements, prefer_final=None, show_hidden=True,
+                     force_update=False):
+        """Return the list of existing releases for a specific project.
+
+        Cache the results from one call to another.
+
+        If show_hidden is True, return the hidden releases too.
+        If force_update is True, reprocess the index to update the
+        informations (eg. make a new XML-RPC call).
+        ::
+
+            >>> client = XMLRPCClient()
+            >>> client.get_releases('Foo')
+            ['1.1', '1.2', '1.3']
+
+        If no such project exists, raise a ProjectNotFound exception::
+
+            >>> client.get_project_versions('UnexistingProject')
+            ProjectNotFound: UnexistingProject
+
+        """
+        def get_versions(project_name, show_hidden):
+            return self.proxy.package_releases(project_name, show_hidden)
+
+        predicate = get_version_predicate(requirements)
+        prefer_final = self._get_prefer_final(prefer_final)
+        project_name = predicate.name
+        if not force_update and (project_name.lower() in self._projects):
+            project = self._projects[project_name.lower()]
+            if not project.contains_hidden and show_hidden:
+                # if hidden releases are requested, and have an existing
+                # list of releases that does not contains hidden ones
+                all_versions = get_versions(project_name, show_hidden)
+                existing_versions = project.get_versions()
+                hidden_versions = set(all_versions) - set(existing_versions)
+                for version in hidden_versions:
+                    project.add_release(release=ReleaseInfo(project_name,
+                                            version, index=self._index))
+        else:
+            versions = get_versions(project_name, show_hidden)
+            if not versions:
+                raise ProjectNotFound(project_name)
+            project = self._get_project(project_name)
+            project.add_releases([ReleaseInfo(project_name, version,
+                                              index=self._index)
+                                  for version in versions])
+        project = project.filter(predicate)
+        if len(project) == 0:
+            raise ReleaseNotFound("%s" % predicate)
+        project.sort_releases(prefer_final)
+        return project
+
+
+    def get_distributions(self, project_name, version):
+        """Grab informations about distributions from XML-RPC.
+
+        Return a ReleaseInfo object, with distribution-related informations
+        filled in.
+        """
+        url_infos = self.proxy.release_urls(project_name, version)
+        project = self._get_project(project_name)
+        if version not in project.get_versions():
+            project.add_release(release=ReleaseInfo(project_name, version,
+                                                    index=self._index))
+        release = project.get_release(version)
+        for info in url_infos:
+            packagetype = info['packagetype']
+            dist_infos = {'url': info['url'],
+                          'hashval': info['md5_digest'],
+                          'hashname': 'md5',
+                          'is_external': False,
+                          'python_version': info['python_version']}
+            release.add_distribution(packagetype, **dist_infos)
+        return release
+
+    def get_metadata(self, project_name, version):
+        """Retrieve project metadata.
+
+        Return a ReleaseInfo object, with metadata informations filled in.
+        """
+        # to be case-insensitive, get the informations from the XMLRPC API
+        projects = [d['name'] for d in
+                    self.proxy.search({'name': project_name})
+                    if d['name'].lower() == project_name]
+        if len(projects) > 0:
+            project_name = projects[0]
+
+        metadata = self.proxy.release_data(project_name, version)
+        project = self._get_project(project_name)
+        if version not in project.get_versions():
+            project.add_release(release=ReleaseInfo(project_name, version,
+                                                    index=self._index))
+        release = project.get_release(version)
+        release.set_metadata(metadata)
+        return release
+
+    def search_projects(self, name=None, operator="or", **kwargs):
+        """Find using the keys provided in kwargs.
+
+        You can set operator to "and" or "or".
+        """
+        for key in kwargs:
+            if key not in _SEARCH_FIELDS:
+                raise InvalidSearchField(key)
+        if name:
+            kwargs["name"] = name
+        projects = self.proxy.search(kwargs, operator)
+        for p in projects:
+            project = self._get_project(p['name'])
+            try:
+                project.add_release(release=ReleaseInfo(p['name'],
+                    p['version'], metadata={'summary': p['summary']},
+                    index=self._index))
+            except IrrationalVersionError as e:
+                logger.warning("Irrational version error found: %s", e)
+        return [self._projects[p['name'].lower()] for p in projects]
+
+    def get_all_projects(self):
+        """Return the list of all projects registered in the package index"""
+        projects = self.proxy.list_packages()
+        for name in projects:
+            self.get_releases(name, show_hidden=True)
+
+        return [self._projects[name.lower()] for name in set(projects)]
+
+    @property
+    def proxy(self):
+        """Property used to return the XMLRPC server proxy.
+
+        If no server proxy is defined yet, creates a new one::
+
+            >>> client = XmlRpcClient()
+            >>> client.proxy()
+            <ServerProxy for python.org/pypi>
+
+        """
+        if not hasattr(self, '_server_proxy'):
+            self._server_proxy = xmlrpc.client.ServerProxy(self.server_url)
+
+        return self._server_proxy
diff --git a/Lib/packaging/resources.py b/Lib/packaging/resources.py
new file mode 100644
index 0000000..e5904f3
--- /dev/null
+++ b/Lib/packaging/resources.py
@@ -0,0 +1,25 @@
+"""Data file path abstraction.
+
+Functions in this module use sysconfig to find the paths to the resource
+files registered in project's setup.cfg file.  See the documentation for
+more information.
+"""
+# TODO write that documentation
+
+from packaging.database import get_distribution
+
+__all__ = ['get_file_path', 'get_file']
+
+
+def get_file_path(distribution_name, relative_path):
+    """Return the path to a resource file."""
+    dist = get_distribution(distribution_name)
+    if dist != None:
+        return dist.get_resource_path(relative_path)
+    raise LookupError('no distribution named %r found' % distribution_name)
+
+
+def get_file(distribution_name, relative_path, *args, **kwargs):
+    """Open and return a resource file."""
+    return open(get_file_path(distribution_name, relative_path),
+                *args, **kwargs)
diff --git a/Lib/packaging/run.py b/Lib/packaging/run.py
new file mode 100644
index 0000000..1d4fadb
--- /dev/null
+++ b/Lib/packaging/run.py
@@ -0,0 +1,645 @@
+"""Main command line parser.  Implements the pysetup script."""
+
+import os
+import re
+import sys
+import getopt
+import logging
+
+from packaging import logger
+from packaging.dist import Distribution
+from packaging.util import _is_archive_file
+from packaging.command import get_command_class, STANDARD_COMMANDS
+from packaging.install import install, install_local_project, remove
+from packaging.database import get_distribution, get_distributions
+from packaging.depgraph import generate_graph
+from packaging.fancy_getopt import FancyGetopt
+from packaging.errors import (PackagingArgError, PackagingError,
+                              PackagingModuleError, PackagingClassError,
+                              CCompilerError)
+
+
+command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+common_usage = """\
+Actions:
+%(actions)s
+
+To get more help on an action, use:
+
+    pysetup action --help
+"""
+
+create_usage = """\
+Usage: pysetup create
+   or: pysetup create --help
+
+Create a new Python package.
+"""
+
+graph_usage = """\
+Usage: pysetup graph dist
+   or: pysetup graph --help
+
+Print dependency graph for the distribution.
+
+positional arguments:
+   dist  installed distribution name
+"""
+
+install_usage = """\
+Usage: pysetup install [dist]
+   or: pysetup install [archive]
+   or: pysetup install [src_dir]
+   or: pysetup install --help
+
+Install a Python distribution from the indexes, source directory, or sdist.
+
+positional arguments:
+   archive  path to source distribution (zip, tar.gz)
+   dist     distribution name to install from the indexes
+   scr_dir  path to source directory
+
+"""
+
+metadata_usage = """\
+Usage: pysetup metadata [dist] [-f field ...]
+   or: pysetup metadata [dist] [--all]
+   or: pysetup metadata --help
+
+Print metadata for the distribution.
+
+positional arguments:
+   dist  installed distribution name
+
+optional arguments:
+   -f     metadata field to print
+   --all  print all metadata fields
+"""
+
+remove_usage = """\
+Usage: pysetup remove dist [-y]
+   or: pysetup remove --help
+
+Uninstall a Python distribution.
+
+positional arguments:
+   dist  installed distribution name
+
+optional arguments:
+   -y  auto confirm package removal
+"""
+
+run_usage = """\
+Usage: pysetup run [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+   or: pysetup run --help
+   or: pysetup run --list-commands
+   or: pysetup run cmd --help
+"""
+
+list_usage = """\
+Usage: pysetup list dist [dist ...]
+   or: pysetup list --help
+   or: pysetup list --all
+
+Print name, version and location for the matching installed distributions.
+
+positional arguments:
+   dist  installed distribution name
+
+optional arguments:
+   --all  list all installed distributions
+"""
+
+search_usage = """\
+Usage: pysetup search [project] [--simple [url]] [--xmlrpc [url] [--fieldname value ...] --operator or|and]
+   or: pysetup search --help
+
+Search the indexes for the matching projects.
+
+positional arguments:
+    project     the project pattern to search for
+
+optional arguments:
+    --xmlrpc [url]      wether to use the xmlrpc index or not. If an url is
+                        specified, it will be used rather than the default one.
+
+    --simple [url]      wether to use the simple index or not. If an url is
+                        specified, it will be used rather than the default one.
+
+    --fieldname value   Make a search on this field. Can only be used if
+                        --xmlrpc has been selected or is the default index.
+
+    --operator or|and   Defines what is the operator to use when doing xmlrpc
+                        searchs with multiple fieldnames. Can only be used if
+                        --xmlrpc has been selected or is the default index.
+"""
+
+global_options = [
+    # The fourth entry for verbose means that it can be repeated.
+    ('verbose', 'v', "run verbosely (default)", True),
+    ('quiet', 'q', "run quietly (turns verbosity off)"),
+    ('dry-run', 'n', "don't actually do anything"),
+    ('help', 'h', "show detailed help message"),
+    ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
+    ('version', None, 'Display the version'),
+]
+
+negative_opt = {'quiet': 'verbose'}
+
+display_options = [
+    ('help-commands', None, "list all available commands"),
+]
+
+display_option_names = [x[0].replace('-', '_') for x in display_options]
+
+
+def _parse_args(args, options, long_options):
+    """Transform sys.argv input into a dict.
+
+    :param args: the args to parse (i.e sys.argv)
+    :param options: the list of options to pass to getopt
+    :param long_options: the list of string with the names of the long options
+                         to be passed to getopt.
+
+    The function returns a dict with options/long_options as keys and matching
+    values as values.
+    """
+    optlist, args = getopt.gnu_getopt(args, options, long_options)
+    optdict = {}
+    optdict['args'] = args
+    for k, v in optlist:
+        k = k.lstrip('-')
+        if k not in optdict:
+            optdict[k] = []
+            if v:
+                optdict[k].append(v)
+        else:
+            optdict[k].append(v)
+    return optdict
+
+
+class action_help:
+    """Prints a help message when the standard help flags: -h and --help
+    are used on the commandline.
+    """
+
+    def __init__(self, help_msg):
+        self.help_msg = help_msg
+
+    def __call__(self, f):
+        def wrapper(*args, **kwargs):
+            f_args = args[1]
+            if '--help' in f_args or '-h' in f_args:
+                print(self.help_msg)
+                return
+            return f(*args, **kwargs)
+        return wrapper
+
+
+@action_help(create_usage)
+def _create(distpatcher, args, **kw):
+    from packaging.create import main
+    return main()
+
+
+@action_help(graph_usage)
+def _graph(dispatcher, args, **kw):
+    name = args[1]
+    dist = get_distribution(name, use_egg_info=True)
+    if dist is None:
+        print('Distribution not found.')
+    else:
+        dists = get_distributions(use_egg_info=True)
+        graph = generate_graph(dists)
+        print(graph.repr_node(dist))
+
+
+@action_help(install_usage)
+def _install(dispatcher, args, **kw):
+    # first check if we are in a source directory
+    if len(args) < 2:
+        # are we inside a project dir?
+        listing = os.listdir(os.getcwd())
+        if 'setup.py' in listing or 'setup.cfg' in listing:
+            args.insert(1, os.getcwd())
+        else:
+            logger.warning('no project to install')
+            return
+
+    # installing from a source dir or archive file?
+    if os.path.isdir(args[1]) or _is_archive_file(args[1]):
+        install_local_project(args[1])
+    else:
+        # download from PyPI
+        install(args[1])
+
+
+@action_help(metadata_usage)
+def _metadata(dispatcher, args, **kw):
+    opts = _parse_args(args[1:], 'f:', ['all'])
+    if opts['args']:
+        name = opts['args'][0]
+        dist = get_distribution(name, use_egg_info=True)
+        if dist is None:
+            logger.warning('%s not installed', name)
+            return
+    else:
+        logger.info('searching local dir for metadata')
+        dist = Distribution()
+        dist.parse_config_files()
+
+    metadata = dist.metadata
+
+    if 'all' in opts:
+        keys = metadata.keys()
+    else:
+        if 'f' in opts:
+            keys = (k for k in opts['f'] if k in metadata)
+        else:
+            keys = ()
+
+    for key in keys:
+        if key in metadata:
+            print(metadata._convert_name(key) + ':')
+            value = metadata[key]
+            if isinstance(value, list):
+                for v in value:
+                    print('    ' + v)
+            else:
+                print('    ' + value.replace('\n', '\n    '))
+
+
+@action_help(remove_usage)
+def _remove(distpatcher, args, **kw):
+    opts = _parse_args(args[1:], 'y', [])
+    if 'y' in opts:
+        auto_confirm = True
+    else:
+        auto_confirm = False
+
+    for dist in set(opts['args']):
+        try:
+            remove(dist, auto_confirm=auto_confirm)
+        except PackagingError:
+            logger.warning('%s not installed', dist)
+
+
+@action_help(run_usage)
+def _run(dispatcher, args, **kw):
+    parser = dispatcher.parser
+    args = args[1:]
+
+    commands = STANDARD_COMMANDS  # + extra commands
+
+    if args == ['--list-commands']:
+        print('List of available commands:')
+        cmds = sorted(commands)
+
+        for cmd in cmds:
+            cls = dispatcher.cmdclass.get(cmd) or get_command_class(cmd)
+            desc = getattr(cls, 'description',
+                            '(no description available)')
+            print('  %s: %s' % (cmd, desc))
+        return
+
+    while args:
+        args = dispatcher._parse_command_opts(parser, args)
+        if args is None:
+            return
+
+    # create the Distribution class
+    # need to feed setup.cfg here !
+    dist = Distribution()
+
+    # Find and parse the config file(s): they will override options from
+    # the setup script, but be overridden by the command line.
+
+    # XXX still need to be extracted from Distribution
+    dist.parse_config_files()
+
+    try:
+        for cmd in dispatcher.commands:
+            dist.run_command(cmd, dispatcher.command_options[cmd])
+
+    except KeyboardInterrupt:
+        raise SystemExit("interrupted")
+    except (IOError, os.error, PackagingError, CCompilerError) as msg:
+        raise SystemExit("error: " + str(msg))
+
+    # XXX this is crappy
+    return dist
+
+
+@action_help(list_usage)
+def _list(dispatcher, args, **kw):
+    opts = _parse_args(args[1:], '', ['all'])
+    dists = get_distributions(use_egg_info=True)
+    if 'all' in opts:
+        results = dists
+    else:
+        results = [d for d in dists if d.name.lower() in opts['args']]
+
+    for dist in results:
+        print('%s %s at %s' % (dist.name, dist.metadata['version'], dist.path))
+
+
+@action_help(search_usage)
+def _search(dispatcher, args, **kw):
+    """The search action.
+
+    It is able to search for a specific index (specified with --index), using
+    the simple or xmlrpc index types (with --type xmlrpc / --type simple)
+    """
+    opts = _parse_args(args[1:], '', ['simple', 'xmlrpc'])
+    # 1. what kind of index is requested ? (xmlrpc / simple)
+
+
+actions = [
+    ('run', 'Run one or several commands', _run),
+    ('metadata', 'Display the metadata of a project', _metadata),
+    ('install', 'Install a project', _install),
+    ('remove', 'Remove a project', _remove),
+    ('search', 'Search for a project in the indexes', _search),
+    ('list', 'Search for local projects', _list),
+    ('graph', 'Display a graph', _graph),
+    ('create', 'Create a Project', _create),
+]
+
+
+class Dispatcher:
+    """Reads the command-line options
+    """
+    def __init__(self, args=None):
+        self.verbose = 1
+        self.dry_run = False
+        self.help = False
+        self.script_name = 'pysetup'
+        self.cmdclass = {}
+        self.commands = []
+        self.command_options = {}
+
+        for attr in display_option_names:
+            setattr(self, attr, False)
+
+        self.parser = FancyGetopt(global_options + display_options)
+        self.parser.set_negative_aliases(negative_opt)
+        # FIXME this parses everything, including command options (e.g. "run
+        # build -i" errors with "option -i not recognized")
+        args = self.parser.getopt(args=args, object=self)
+
+        # if first arg is "run", we have some commands
+        if len(args) == 0:
+            self.action = None
+        else:
+            self.action = args[0]
+
+        allowed = [action[0] for action in actions] + [None]
+        if self.action not in allowed:
+            msg = 'Unrecognized action "%s"' % self.action
+            raise PackagingArgError(msg)
+
+        # setting up the logging level from the command-line options
+        # -q gets warning, error and critical
+        if self.verbose == 0:
+            level = logging.WARNING
+        # default level or -v gets info too
+        # XXX there's a bug somewhere: the help text says that -v is default
+        # (and verbose is set to 1 above), but when the user explicitly gives
+        # -v on the command line, self.verbose is incremented to 2!  Here we
+        # compensate for that (I tested manually).  On a related note, I think
+        # it's a good thing to use -q/nothing/-v/-vv on the command line
+        # instead of logging constants; it will be easy to add support for
+        # logging configuration in setup.cfg for advanced users. --merwok
+        elif self.verbose in (1, 2):
+            level = logging.INFO
+        else:  # -vv and more for debug
+            level = logging.DEBUG
+
+        # for display options we return immediately
+        option_order = self.parser.get_option_order()
+
+        self.args = args
+
+        if self.help or self.action is None:
+            self._show_help(self.parser, display_options_=False)
+
+    def _parse_command_opts(self, parser, args):
+        # Pull the current command from the head of the command line
+        command = args[0]
+        if not command_re.match(command):
+            raise SystemExit("invalid command name %r" % (command,))
+        self.commands.append(command)
+
+        # Dig up the command class that implements this command, so we
+        # 1) know that it's a valid command, and 2) know which options
+        # it takes.
+        try:
+            cmd_class = get_command_class(command)
+        except PackagingModuleError as msg:
+            raise PackagingArgError(msg)
+
+        # XXX We want to push this in packaging.command
+        #
+        # Require that the command class be derived from Command -- want
+        # to be sure that the basic "command" interface is implemented.
+        for meth in ('initialize_options', 'finalize_options', 'run'):
+            if hasattr(cmd_class, meth):
+                continue
+            raise PackagingClassError(
+                'command %r must implement %r' % (cmd_class, meth))
+
+        # Also make sure that the command object provides a list of its
+        # known options.
+        if not (hasattr(cmd_class, 'user_options') and
+                isinstance(cmd_class.user_options, list)):
+            raise PackagingClassError(
+                "command class %s must provide "
+                "'user_options' attribute (a list of tuples)" % cmd_class)
+
+        # If the command class has a list of negative alias options,
+        # merge it in with the global negative aliases.
+        _negative_opt = negative_opt.copy()
+
+        if hasattr(cmd_class, 'negative_opt'):
+            _negative_opt.update(cmd_class.negative_opt)
+
+        # Check for help_options in command class.  They have a different
+        # format (tuple of four) so we need to preprocess them here.
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_options = cmd_class.help_options[:]
+        else:
+            help_options = []
+
+        # All commands support the global options too, just by adding
+        # in 'global_options'.
+        parser.set_option_table(global_options +
+                                cmd_class.user_options +
+                                help_options)
+        parser.set_negative_aliases(_negative_opt)
+        args, opts = parser.getopt(args[1:])
+
+        if hasattr(opts, 'help') and opts.help:
+            self._show_command_help(cmd_class)
+            return
+
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_option_found = False
+            for help_option, short, desc, func in cmd_class.help_options:
+                if hasattr(opts, help_option.replace('-', '_')):
+                    help_option_found = True
+                    if hasattr(func, '__call__'):
+                        func()
+                    else:
+                        raise PackagingClassError(
+                            "invalid help function %r for help option %r: "
+                            "must be a callable object (function, etc.)"
+                            % (func, help_option))
+
+            if help_option_found:
+                return
+
+        # Put the options from the command line into their official
+        # holding pen, the 'command_options' dictionary.
+        opt_dict = self.get_option_dict(command)
+        for name, value in vars(opts).items():
+            opt_dict[name] = ("command line", value)
+
+        return args
+
+    def get_option_dict(self, command):
+        """Get the option dictionary for a given command.  If that
+        command's option dictionary hasn't been created yet, then create it
+        and return the new dictionary; otherwise, return the existing
+        option dictionary.
+        """
+        d = self.command_options.get(command)
+        if d is None:
+            d = self.command_options[command] = {}
+        return d
+
+    def show_help(self):
+        self._show_help(self.parser)
+
+    def print_usage(self, parser):
+        parser.set_option_table(global_options)
+
+        actions_ = ['    %s: %s' % (name, desc) for name, desc, __ in actions]
+        usage = common_usage % {'actions': '\n'.join(actions_)}
+
+        parser.print_help(usage + "\nGlobal options:")
+
+    def _show_help(self, parser, global_options_=True, display_options_=True,
+                   commands=[]):
+        # late import because of mutual dependence between these modules
+        from packaging.command.cmd import Command
+
+        print('Usage: pysetup [options] action [action_options]')
+        print('')
+        if global_options_:
+            self.print_usage(self.parser)
+            print('')
+
+        if display_options_:
+            parser.set_option_table(display_options)
+            parser.print_help(
+                "Information display options (just display " +
+                "information, ignore any commands)")
+            print('')
+
+        for command in commands:
+            if isinstance(command, type) and issubclass(command, Command):
+                cls = command
+            else:
+                cls = get_command_class(command)
+            if (hasattr(cls, 'help_options') and
+                isinstance(cls.help_options, list)):
+                parser.set_option_table(cls.user_options + cls.help_options)
+            else:
+                parser.set_option_table(cls.user_options)
+
+            parser.print_help("Options for %r command:" % cls.__name__)
+            print('')
+
+    def _show_command_help(self, command):
+        if isinstance(command, str):
+            command = get_command_class(command)
+
+        name = command.get_command_name()
+
+        desc = getattr(command, 'description', '(no description available)')
+        print('Description: %s' % desc)
+        print('')
+
+        if (hasattr(command, 'help_options') and
+            isinstance(command.help_options, list)):
+            self.parser.set_option_table(command.user_options +
+                                         command.help_options)
+        else:
+            self.parser.set_option_table(command.user_options)
+
+        self.parser.print_help("Options:")
+        print('')
+
+    def _get_command_groups(self):
+        """Helper function to retrieve all the command class names divided
+        into standard commands (listed in
+        packaging.command.STANDARD_COMMANDS) and extra commands (given in
+        self.cmdclass and not standard commands).
+        """
+        extra_commands = [cmd for cmd in self.cmdclass
+                          if cmd not in STANDARD_COMMANDS]
+        return STANDARD_COMMANDS, extra_commands
+
+    def print_commands(self):
+        """Print out a help message listing all available commands with a
+        description of each.  The list is divided into standard commands
+        (listed in packaging.command.STANDARD_COMMANDS) and extra commands
+        (given in self.cmdclass and not standard commands).  The
+        descriptions come from the command class attribute
+        'description'.
+        """
+        std_commands, extra_commands = self._get_command_groups()
+        max_length = max(len(command)
+                         for commands in (std_commands, extra_commands)
+                         for command in commands)
+
+        self.print_command_list(std_commands, "Standard commands", max_length)
+        if extra_commands:
+            print()
+            self.print_command_list(extra_commands, "Extra commands",
+                                    max_length)
+
+    def print_command_list(self, commands, header, max_length):
+        """Print a subset of the list of all commands -- used by
+        'print_commands()'.
+        """
+        print(header + ":")
+
+        for cmd in commands:
+            cls = self.cmdclass.get(cmd) or get_command_class(cmd)
+            description = getattr(cls, 'description',
+                                  '(no description available)')
+
+            print("  %-*s  %s" % (max_length, cmd, description))
+
+    def __call__(self):
+        if self.action is None:
+            return
+        for action, desc, func in actions:
+            if action == self.action:
+                return func(self, self.args)
+        return -1
+
+
+def main(args=None):
+    dispatcher = Dispatcher(args)
+    if dispatcher.action is None:
+        return
+
+    return dispatcher()
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/Lib/packaging/tests/LONG_DESC.txt b/Lib/packaging/tests/LONG_DESC.txt
new file mode 100644
index 0000000..2b4358a
--- /dev/null
+++ b/Lib/packaging/tests/LONG_DESC.txt
@@ -0,0 +1,44 @@
+CLVault
+=======
+
+CLVault uses Keyring to provide a command-line utility to safely store
+and retrieve passwords.
+
+Install it using pip or the setup.py script::
+
+    $ python setup.py install
+
+    $ pip install clvault
+
+Once it's installed, you will have three scripts installed in your
+Python scripts folder, you can use to list, store and retrieve passwords::
+
+    $ clvault-set blog
+    Set your password:
+    Set the associated username (can be blank): tarek
+    Set a description (can be blank): My blog password
+    Password set.
+
+    $ clvault-get blog
+    The username is "tarek"
+    The password has been copied in your clipboard
+
+    $ clvault-list
+    Registered services:
+    blog    My blog password
+
+
+*clvault-set* takes a service name then prompt you for a password, and some
+optional information about your service. The password is safely stored in
+a keyring while the description is saved in a ``.clvault`` file in your
+home directory. This file is created automatically the first time the command
+is used.
+
+*clvault-get* copies the password for a given service in your clipboard, and
+displays the associated user if any.
+
+*clvault-list* lists all registered services, with their description when
+given.
+
+
+Project page: http://bitbucket.org/tarek/clvault
diff --git a/Lib/packaging/tests/PKG-INFO b/Lib/packaging/tests/PKG-INFO
new file mode 100644
index 0000000..f48546e
--- /dev/null
+++ b/Lib/packaging/tests/PKG-INFO
@@ -0,0 +1,57 @@
+Metadata-Version: 1.2
+Name: CLVault
+Version: 0.5
+Summary: Command-Line utility to store and retrieve passwords
+Home-page: http://bitbucket.org/tarek/clvault
+Author: Tarek Ziade
+Author-email: tarek@ziade.org
+License: PSF
+Keywords: keyring,password,crypt
+Requires-Dist: foo; sys.platform == 'okook'
+Requires-Dist: bar; sys.platform == '%s'
+Platform: UNKNOWN
+Description: CLVault
+       |=======
+       |
+       |CLVault uses Keyring to provide a command-line utility to safely store
+       |and retrieve passwords.
+       |
+       |Install it using pip or the setup.py script::
+       |
+       |    $ python setup.py install
+       |
+       |    $ pip install clvault
+       |
+       |Once it's installed, you will have three scripts installed in your
+       |Python scripts folder, you can use to list, store and retrieve passwords::
+       |
+       |    $ clvault-set blog
+       |    Set your password:
+       |    Set the associated username (can be blank): tarek
+       |    Set a description (can be blank): My blog password
+       |    Password set.
+       |
+       |    $ clvault-get blog
+       |    The username is "tarek"
+       |    The password has been copied in your clipboard
+       |
+       |    $ clvault-list
+       |    Registered services:
+       |    blog    My blog password
+       |
+       |
+       |*clvault-set* takes a service name then prompt you for a password, and some
+       |optional information about your service. The password is safely stored in
+       |a keyring while the description is saved in a ``.clvault`` file in your
+       |home directory. This file is created automatically the first time the command
+       |is used.
+       |
+       |*clvault-get* copies the password for a given service in your clipboard, and
+       |displays the associated user if any.
+       |
+       |*clvault-list* lists all registered services, with their description when
+       |given.
+       |
+       |
+       |Project page: http://bitbucket.org/tarek/clvault
+       |
diff --git a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO
new file mode 100644
index 0000000..dff8d00
--- /dev/null
+++ b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO
@@ -0,0 +1,182 @@
+Metadata-Version: 1.0
+Name: setuptools
+Version: 0.6c9
+Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
+Home-page: http://pypi.python.org/pypi/setuptools
+Author: Phillip J. Eby
+Author-email: distutils-sig@python.org
+License: PSF or ZPL
+Description: ===============================
+        Installing and Using Setuptools
+        ===============================
+
+        .. contents:: **Table of Contents**
+
+
+        -------------------------
+        Installation Instructions
+        -------------------------
+
+        Windows
+        =======
+
+        Install setuptools using the provided ``.exe`` installer.  If you've previously
+        installed older versions of setuptools, please delete all ``setuptools*.egg``
+        and ``setuptools.pth`` files from your system's ``site-packages`` directory
+        (and any other ``sys.path`` directories) FIRST.
+
+        If you are upgrading a previous version of setuptools that was installed using
+        an ``.exe`` installer, please be sure to also *uninstall that older version*
+        via your system's "Add/Remove Programs" feature, BEFORE installing the newer
+        version.
+
+        Once installation is complete, you will find an ``easy_install.exe`` program in
+        your Python ``Scripts`` subdirectory.  Be sure to add this directory to your
+        ``PATH`` environment variable, if you haven't already done so.
+
+
+        RPM-Based Systems
+        =================
+
+        Install setuptools using the provided source RPM.  The included ``.spec`` file
+        assumes you are installing using the default ``python`` executable, and is not
+        specific to a particular Python version.  The ``easy_install`` executable will
+        be installed to a system ``bin`` directory such as ``/usr/bin``.
+
+        If you wish to install to a location other than the default Python
+        installation's default ``site-packages`` directory (and ``$prefix/bin`` for
+        scripts), please use the ``.egg``-based installation approach described in the
+        following section.
+
+
+        Cygwin, Mac OS X, Linux, Other
+        ==============================
+
+        1. Download the appropriate egg for your version of Python (e.g.
+        ``setuptools-0.6c9-py2.4.egg``).  Do NOT rename it.
+
+        2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
+        Setuptools will install itself using the matching version of Python (e.g.
+        ``python2.4``), and will place the ``easy_install`` executable in the
+        default location for installing Python scripts (as determined by the
+        standard distutils configuration files, or by the Python installation).
+
+        If you want to install setuptools to somewhere other than ``site-packages`` or
+        your default distutils installation locations for libraries and scripts, you
+        may include EasyInstall command-line options such as ``--prefix``,
+        ``--install-dir``, and so on, following the ``.egg`` filename on the same
+        command line.  For example::
+
+        sh setuptools-0.6c9-py2.4.egg --prefix=~
+
+        You can use ``--help`` to get a full options list, but we recommend consulting
+        the `EasyInstall manual`_ for detailed instructions, especially `the section
+        on custom installation locations`_.
+
+        .. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+        .. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
+
+
+        Cygwin Note
+        -----------
+
+        If you are trying to install setuptools for the **Windows** version of Python
+        (as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
+        sure that an appropriate executable (``python2.3``, ``python2.4``, or
+        ``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg.  For
+        example, doing the following at a Cygwin bash prompt will install setuptools
+        for the **Windows** Python found at ``C:\\Python24``::
+
+        ln -s /cygdrive/c/Python24/python.exe python2.4
+        PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
+        rm python2.4
+
+
+        Downloads
+        =========
+
+        All setuptools downloads can be found at `the project's home page in the Python
+        Package Index`_.  Scroll to the very bottom of the page to find the links.
+
+        .. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools
+
+        In addition to the PyPI downloads, the development version of ``setuptools``
+        is available from the `Python SVN sandbox`_, and in-development versions of the
+        `0.6 branch`_ are available as well.
+
+        .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
+
+        .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
+
+        --------------------------------
+        Using Setuptools and EasyInstall
+        --------------------------------
+
+        Here are some of the available manuals, tutorials, and other resources for
+        learning about Setuptools, Python Eggs, and EasyInstall:
+
+        * `The EasyInstall user's guide and reference manual`_
+        * `The setuptools Developer's Guide`_
+        * `The pkg_resources API reference`_
+        * `Package Compatibility Notes`_ (user-maintained)
+        * `The Internal Structure of Python Eggs`_
+
+        Questions, comments, and bug reports should be directed to the `distutils-sig
+        mailing list`_.  If you have written (or know of) any tutorials, documentation,
+        plug-ins, or other resources for setuptools users, please let us know about
+        them there, so this reference list can be updated.  If you have working,
+        *tested* patches to correct problems or add features, you may submit them to
+        the `setuptools bug tracker`_.
+
+        .. _setuptools bug tracker: http://bugs.python.org/setuptools/
+        .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
+        .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
+        .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
+        .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
+        .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+        .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
+
+
+        -------
+        Credits
+        -------
+
+        * The original design for the ``.egg`` format and the ``pkg_resources`` API was
+        co-created by Phillip Eby and Bob Ippolito.  Bob also implemented the first
+        version of ``pkg_resources``, and supplied the OS X operating system version
+        compatibility algorithm.
+
+        * Ian Bicking implemented many early "creature comfort" features of
+        easy_install, including support for downloading via Sourceforge and
+        Subversion repositories.  Ian's comments on the Web-SIG about WSGI
+        application deployment also inspired the concept of "entry points" in eggs,
+        and he has given talks at PyCon and elsewhere to inform and educate the
+        community about eggs and setuptools.
+
+        * Jim Fulton contributed time and effort to build automated tests of various
+        aspects of ``easy_install``, and supplied the doctests for the command-line
+        ``.exe`` wrappers on Windows.
+
+        * Phillip J. Eby is the principal author and maintainer of setuptools, and
+        first proposed the idea of an importable binary distribution format for
+        Python application plug-ins.
+
+        * Significant parts of the implementation of setuptools were funded by the Open
+        Source Applications Foundation, to provide a plug-in infrastructure for the
+        Chandler PIM application.  In addition, many OSAF staffers (such as Mike
+        "Code Bear" Taylor) contributed their time and stress as guinea pigs for the
+        use of eggs and setuptools, even before eggs were "cool".  (Thanks, guys!)
+
+
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
diff --git a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2 b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2
new file mode 100644
index 0000000..4b3906a
--- /dev/null
+++ b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2
@@ -0,0 +1,183 @@
+Metadata-Version: 1.1
+Name: setuptools
+Version: 0.6c9
+Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
+Home-page: http://pypi.python.org/pypi/setuptools
+Author: Phillip J. Eby
+Author-email: distutils-sig@python.org
+License: PSF or ZPL
+Description: ===============================
+        Installing and Using Setuptools
+        ===============================
+
+        .. contents:: **Table of Contents**
+
+
+        -------------------------
+        Installation Instructions
+        -------------------------
+
+        Windows
+        =======
+
+        Install setuptools using the provided ``.exe`` installer.  If you've previously
+        installed older versions of setuptools, please delete all ``setuptools*.egg``
+        and ``setuptools.pth`` files from your system's ``site-packages`` directory
+        (and any other ``sys.path`` directories) FIRST.
+
+        If you are upgrading a previous version of setuptools that was installed using
+        an ``.exe`` installer, please be sure to also *uninstall that older version*
+        via your system's "Add/Remove Programs" feature, BEFORE installing the newer
+        version.
+
+        Once installation is complete, you will find an ``easy_install.exe`` program in
+        your Python ``Scripts`` subdirectory.  Be sure to add this directory to your
+        ``PATH`` environment variable, if you haven't already done so.
+
+
+        RPM-Based Systems
+        =================
+
+        Install setuptools using the provided source RPM.  The included ``.spec`` file
+        assumes you are installing using the default ``python`` executable, and is not
+        specific to a particular Python version.  The ``easy_install`` executable will
+        be installed to a system ``bin`` directory such as ``/usr/bin``.
+
+        If you wish to install to a location other than the default Python
+        installation's default ``site-packages`` directory (and ``$prefix/bin`` for
+        scripts), please use the ``.egg``-based installation approach described in the
+        following section.
+
+
+        Cygwin, Mac OS X, Linux, Other
+        ==============================
+
+        1. Download the appropriate egg for your version of Python (e.g.
+        ``setuptools-0.6c9-py2.4.egg``).  Do NOT rename it.
+
+        2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
+        Setuptools will install itself using the matching version of Python (e.g.
+        ``python2.4``), and will place the ``easy_install`` executable in the
+        default location for installing Python scripts (as determined by the
+        standard distutils configuration files, or by the Python installation).
+
+        If you want to install setuptools to somewhere other than ``site-packages`` or
+        your default distutils installation locations for libraries and scripts, you
+        may include EasyInstall command-line options such as ``--prefix``,
+        ``--install-dir``, and so on, following the ``.egg`` filename on the same
+        command line.  For example::
+
+        sh setuptools-0.6c9-py2.4.egg --prefix=~
+
+        You can use ``--help`` to get a full options list, but we recommend consulting
+        the `EasyInstall manual`_ for detailed instructions, especially `the section
+        on custom installation locations`_.
+
+        .. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+        .. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
+
+
+        Cygwin Note
+        -----------
+
+        If you are trying to install setuptools for the **Windows** version of Python
+        (as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
+        sure that an appropriate executable (``python2.3``, ``python2.4``, or
+        ``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg.  For
+        example, doing the following at a Cygwin bash prompt will install setuptools
+        for the **Windows** Python found at ``C:\\Python24``::
+
+        ln -s /cygdrive/c/Python24/python.exe python2.4
+        PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
+        rm python2.4
+
+
+        Downloads
+        =========
+
+        All setuptools downloads can be found at `the project's home page in the Python
+        Package Index`_.  Scroll to the very bottom of the page to find the links.
+
+        .. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools
+
+        In addition to the PyPI downloads, the development version of ``setuptools``
+        is available from the `Python SVN sandbox`_, and in-development versions of the
+        `0.6 branch`_ are available as well.
+
+        .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
+
+        .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
+
+        --------------------------------
+        Using Setuptools and EasyInstall
+        --------------------------------
+
+        Here are some of the available manuals, tutorials, and other resources for
+        learning about Setuptools, Python Eggs, and EasyInstall:
+
+        * `The EasyInstall user's guide and reference manual`_
+        * `The setuptools Developer's Guide`_
+        * `The pkg_resources API reference`_
+        * `Package Compatibility Notes`_ (user-maintained)
+        * `The Internal Structure of Python Eggs`_
+
+        Questions, comments, and bug reports should be directed to the `distutils-sig
+        mailing list`_.  If you have written (or know of) any tutorials, documentation,
+        plug-ins, or other resources for setuptools users, please let us know about
+        them there, so this reference list can be updated.  If you have working,
+        *tested* patches to correct problems or add features, you may submit them to
+        the `setuptools bug tracker`_.
+
+        .. _setuptools bug tracker: http://bugs.python.org/setuptools/
+        .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
+        .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
+        .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
+        .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
+        .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+        .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
+
+
+        -------
+        Credits
+        -------
+
+        * The original design for the ``.egg`` format and the ``pkg_resources`` API was
+        co-created by Phillip Eby and Bob Ippolito.  Bob also implemented the first
+        version of ``pkg_resources``, and supplied the OS X operating system version
+        compatibility algorithm.
+
+        * Ian Bicking implemented many early "creature comfort" features of
+        easy_install, including support for downloading via Sourceforge and
+        Subversion repositories.  Ian's comments on the Web-SIG about WSGI
+        application deployment also inspired the concept of "entry points" in eggs,
+        and he has given talks at PyCon and elsewhere to inform and educate the
+        community about eggs and setuptools.
+
+        * Jim Fulton contributed time and effort to build automated tests of various
+        aspects of ``easy_install``, and supplied the doctests for the command-line
+        ``.exe`` wrappers on Windows.
+
+        * Phillip J. Eby is the principal author and maintainer of setuptools, and
+        first proposed the idea of an importable binary distribution format for
+        Python application plug-ins.
+
+        * Significant parts of the implementation of setuptools were funded by the Open
+        Source Applications Foundation, to provide a plug-in infrastructure for the
+        Chandler PIM application.  In addition, many OSAF staffers (such as Mike
+        "Code Bear" Taylor) contributed their time and stress as guinea pigs for the
+        use of eggs and setuptools, even before eggs were "cool".  (Thanks, guys!)
+
+
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
+Requires: Foo
diff --git a/Lib/packaging/tests/__init__.py b/Lib/packaging/tests/__init__.py
new file mode 100644
index 0000000..0b0e3c5
--- /dev/null
+++ b/Lib/packaging/tests/__init__.py
@@ -0,0 +1,133 @@
+"""Test suite for packaging.
+
+This test suite consists of a collection of test modules in the
+packaging.tests package.  Each test module has a name starting with
+'test' and contains a function test_suite().  The function is expected
+to return an initialized unittest.TestSuite instance.
+
+Utility code is included in packaging.tests.support.
+"""
+
+# Put this text back for the backport
+#Always import unittest from this module, it will be the right version
+#(standard library unittest for 3.2 and higher, third-party unittest2
+#elease for older versions).
+
+import os
+import sys
+import unittest
+from test.support import TESTFN
+
+# XXX move helpers to support, add tests for them, remove things that
+# duplicate test.support (or keep them for the backport; needs thinking)
+
+here = os.path.dirname(__file__) or os.curdir
+verbose = 1
+
+def test_suite():
+    suite = unittest.TestSuite()
+    for fn in os.listdir(here):
+        if fn.startswith("test") and fn.endswith(".py"):
+            modname = "packaging.tests." + fn[:-3]
+            __import__(modname)
+            module = sys.modules[modname]
+            suite.addTest(module.test_suite())
+    return suite
+
+
+class Error(Exception):
+    """Base class for regression test exceptions."""
+
+
+class TestFailed(Error):
+    """Test failed."""
+
+
+class BasicTestRunner:
+    def run(self, test):
+        result = unittest.TestResult()
+        test(result)
+        return result
+
+
+def _run_suite(suite, verbose_=1):
+    """Run tests from a unittest.TestSuite-derived class."""
+    global verbose
+    verbose = verbose_
+    if verbose_:
+        runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
+    else:
+        runner = BasicTestRunner()
+
+    result = runner.run(suite)
+    if not result.wasSuccessful():
+        if len(result.errors) == 1 and not result.failures:
+            err = result.errors[0][1]
+        elif len(result.failures) == 1 and not result.errors:
+            err = result.failures[0][1]
+        else:
+            err = "errors occurred; run in verbose mode for details"
+        raise TestFailed(err)
+
+
+def run_unittest(classes, verbose_=1):
+    """Run tests from unittest.TestCase-derived classes.
+
+    Originally extracted from stdlib test.test_support and modified to
+    support unittest2.
+    """
+    valid_types = (unittest.TestSuite, unittest.TestCase)
+    suite = unittest.TestSuite()
+    for cls in classes:
+        if isinstance(cls, str):
+            if cls in sys.modules:
+                suite.addTest(unittest.findTestCases(sys.modules[cls]))
+            else:
+                raise ValueError("str arguments must be keys in sys.modules")
+        elif isinstance(cls, valid_types):
+            suite.addTest(cls)
+        else:
+            suite.addTest(unittest.makeSuite(cls))
+    _run_suite(suite, verbose_)
+
+
+def reap_children():
+    """Use this function at the end of test_main() whenever sub-processes
+    are started.  This will help ensure that no extra children (zombies)
+    stick around to hog resources and create problems when looking
+    for refleaks.
+
+    Extracted from stdlib test.support.
+    """
+
+    # Reap all our dead child processes so we don't leave zombies around.
+    # These hog resources and might be causing some of the buildbots to die.
+    if hasattr(os, 'waitpid'):
+        any_process = -1
+        while True:
+            try:
+                # This will raise an exception on Windows.  That's ok.
+                pid, status = os.waitpid(any_process, os.WNOHANG)
+                if pid == 0:
+                    break
+            except:
+                break
+
+
+def captured_stdout(func, *args, **kw):
+    import io
+    orig_stdout = getattr(sys, 'stdout')
+    setattr(sys, 'stdout', io.StringIO())
+    try:
+        res = func(*args, **kw)
+        sys.stdout.seek(0)
+        return res, sys.stdout.read()
+    finally:
+        setattr(sys, 'stdout', orig_stdout)
+
+
+def unload(name):
+    try:
+        del sys.modules[name]
+    except KeyError:
+        pass
diff --git a/Lib/packaging/tests/__main__.py b/Lib/packaging/tests/__main__.py
new file mode 100644
index 0000000..68ee229
--- /dev/null
+++ b/Lib/packaging/tests/__main__.py
@@ -0,0 +1,20 @@
+"""Packaging test suite runner."""
+
+# Ripped from importlib tests, thanks Brett!
+
+import os
+import sys
+import unittest
+from test.support import run_unittest, reap_children
+
+
+def test_main():
+    start_dir = os.path.dirname(__file__)
+    top_dir = os.path.dirname(os.path.dirname(start_dir))
+    test_loader = unittest.TestLoader()
+    run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
+    reap_children()
+
+
+if __name__ == '__main__':
+    test_main()
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA
new file mode 100644
index 0000000..65e839a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA
@@ -0,0 +1,4 @@
+Metadata-version: 1.2
+Name: babar
+Version: 0.1
+Author: FELD Boris
\ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES
new file mode 100644
index 0000000..5d0da49
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES
@@ -0,0 +1,2 @@
+babar.png,babar.png
+babar.cfg,babar.cfg
\ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar.cfg b/Lib/packaging/tests/fake_dists/babar.cfg
new file mode 100644
index 0000000..ecd6efe
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar.cfg
@@ -0,0 +1 @@
+Config
\ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar.png b/Lib/packaging/tests/fake_dists/babar.png
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar.png
diff --git a/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO b/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO
new file mode 100644
index 0000000..a176dfd
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO
@@ -0,0 +1,6 @@
+Metadata-Version: 1.2
+Name: bacon
+Version: 0.1
+Provides-Dist: truffles (2.0)
+Provides-Dist: bacon (0.1)
+Obsoletes-Dist: truffles (>=0.9,<=1.5)
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO
new file mode 100644
index 0000000..a7e118a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO
@@ -0,0 +1,18 @@
+Metadata-Version: 1.0
+Name: banana
+Version: 0.4
+Summary: A yellow fruit
+Home-page: http://en.wikipedia.org/wiki/Banana
+Author: Josip Djolonga
+Author-email: foo@nbar.com
+License: BSD
+Description: A fruit
+Keywords: foo bar
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Scientific/Engineering :: GIS
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt
new file mode 100644
index 0000000..5d3e5f6
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt
@@ -0,0 +1,3 @@
+
+      # -*- Entry points: -*-
+      
\ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt
new file mode 100644
index 0000000..4354305
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt
@@ -0,0 +1,6 @@
+# this should be ignored
+
+strawberry >=0.5
+
+[section ignored]
+foo ==0.5
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt
diff --git a/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info b/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info
new file mode 100644
index 0000000..27cbe30
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: cheese
+Version: 2.0.2
+Provides-Dist: truffles (1.0.2)
+Obsoletes-Dist: truffles (!=1.2,<=2.0)
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
new file mode 100644
index 0000000..418929e
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
@@ -0,0 +1,9 @@
+Metadata-Version: 1.2
+Name: choxie
+Version: 2.0.0.9
+Summary: Chocolate with a kick!
+Requires-Dist: towel-stuff (0.1)
+Requires-Dist: nut
+Provides-Dist: truffles (1.0)
+Obsoletes-Dist: truffles (<=0.8,>=0.5)
+Obsoletes-Dist: truffles (<=0.9,>=0.6)
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py
new file mode 100644
index 0000000..40a96af
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py
new file mode 100644
index 0000000..c4027f3
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+from towel_stuff import Towel
+
+class Chocolate(object):
+    """A piece of chocolate."""
+
+    def wrap_with_towel(self):
+        towel = Towel()
+        towel.wrap(self)
+        return towel
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py
new file mode 100644
index 0000000..342b8ea
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py
@@ -0,0 +1,5 @@
+# -*- coding: utf-8 -*-
+from choxie.chocolate import Chocolate
+
+class Truffle(Chocolate):
+    """A truffle."""
diff --git a/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO b/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO
new file mode 100644
index 0000000..499a083
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: coconuts-aster
+Version: 10.3
+Provides-Dist: strawberry (0.6)
+Provides-Dist: banana (0.4)
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
new file mode 100644
index 0000000..0b99f52
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: grammar
+Version: 1.0a4
+Requires-Dist: truffles (>=1.2)
+Author: Sherlock Holmes
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py
new file mode 100644
index 0000000..40a96af
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py
new file mode 100644
index 0000000..66ba796
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+from random import randint
+
+def is_valid_grammar(sentence):
+    if randint(0, 10) < 2:
+        return False
+    else:
+        return True
diff --git a/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info b/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info
new file mode 100644
index 0000000..0c58ec1
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info
@@ -0,0 +1,3 @@
+Metadata-Version: 1.2
+Name: nut
+Version: funkyversion
diff --git a/Lib/packaging/tests/fake_dists/strawberry-0.6.egg b/Lib/packaging/tests/fake_dists/strawberry-0.6.egg
new file mode 100644
index 0000000..6d160e8
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/strawberry-0.6.egg
Binary files differ
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
new file mode 100644
index 0000000..ca46d0a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
@@ -0,0 +1,7 @@
+Metadata-Version: 1.2
+Name: towel-stuff
+Version: 0.1
+Provides-Dist: truffles (1.1.2)
+Provides-Dist: towel-stuff (0.1)
+Obsoletes-Dist: truffles (!=0.8,<1.0)
+Requires-Dist: bacon (<=0.2)
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py b/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py
new file mode 100644
index 0000000..191f895
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+class Towel(object):
+    """A towel, that one should never be without."""
+
+    def __init__(self, color='tie-dye'):
+        self.color = color
+        self.wrapped_obj = None
+
+    def wrap(self, obj):
+        """Wrap an object up in our towel."""
+        self.wrapped_obj = obj
+
+    def unwrap(self):
+        """Unwrap whatever is in our towel and return whatever it is."""
+        obj = self.wrapped_obj
+        self.wrapped_obj = None
+        return obj
diff --git a/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info b/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info
new file mode 100644
index 0000000..45f0cf8
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info
@@ -0,0 +1,3 @@
+Metadata-Version: 1.2
+Name: truffles
+Version: 5.0
diff --git a/Lib/packaging/tests/fixer/__init__.py b/Lib/packaging/tests/fixer/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fixer/__init__.py
diff --git a/Lib/packaging/tests/fixer/fix_idioms.py b/Lib/packaging/tests/fixer/fix_idioms.py
new file mode 100644
index 0000000..64f5ea0
--- /dev/null
+++ b/Lib/packaging/tests/fixer/fix_idioms.py
@@ -0,0 +1,134 @@
+"""Adjust some old Python 2 idioms to their modern counterparts.
+
+* Change some type comparisons to isinstance() calls:
+    type(x) == T -> isinstance(x, T)
+    type(x) is T -> isinstance(x, T)
+    type(x) != T -> not isinstance(x, T)
+    type(x) is not T -> not isinstance(x, T)
+
+* Change "while 1:" into "while True:".
+
+* Change both
+
+    v = list(EXPR)
+    v.sort()
+    foo(v)
+
+and the more general
+
+    v = EXPR
+    v.sort()
+    foo(v)
+
+into
+
+    v = sorted(EXPR)
+    foo(v)
+"""
+# Author: Jacques Frechet, Collin Winter
+
+# Local imports
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Call, Comma, Name, Node, syms
+
+CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
+TYPE = "power< 'type' trailer< '(' x=any ')' > >"
+
+class FixIdioms(fixer_base.BaseFix):
+
+    explicit = False # The user must ask for this fixer
+
+    PATTERN = r"""
+        isinstance=comparison< %s %s T=any >
+        |
+        isinstance=comparison< T=any %s %s >
+        |
+        while_stmt< 'while' while='1' ':' any+ >
+        |
+        sorted=any<
+            any*
+            simple_stmt<
+              expr_stmt< id1=any '='
+                         power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
+              >
+              '\n'
+            >
+            sort=
+            simple_stmt<
+              power< id2=any
+                     trailer< '.' 'sort' > trailer< '(' ')' >
+              >
+              '\n'
+            >
+            next=any*
+        >
+        |
+        sorted=any<
+            any*
+            simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
+            sort=
+            simple_stmt<
+              power< id2=any
+                     trailer< '.' 'sort' > trailer< '(' ')' >
+              >
+              '\n'
+            >
+            next=any*
+        >
+    """ % (TYPE, CMP, CMP, TYPE)
+
+    def match(self, node):
+        r = super(FixIdioms, self).match(node)
+        # If we've matched one of the sort/sorted subpatterns above, we
+        # want to reject matches where the initial assignment and the
+        # subsequent .sort() call involve different identifiers.
+        if r and "sorted" in r:
+            if r["id1"] == r["id2"]:
+                return r
+            return None
+        return r
+
+    def transform(self, node, results):
+        if "isinstance" in results:
+            return self.transform_isinstance(node, results)
+        elif "while" in results:
+            return self.transform_while(node, results)
+        elif "sorted" in results:
+            return self.transform_sort(node, results)
+        else:
+            raise RuntimeError("Invalid match")
+
+    def transform_isinstance(self, node, results):
+        x = results["x"].clone() # The thing inside of type()
+        T = results["T"].clone() # The type being compared against
+        x.prefix = ""
+        T.prefix = " "
+        test = Call(Name("isinstance"), [x, Comma(), T])
+        if "n" in results:
+            test.prefix = " "
+            test = Node(syms.not_test, [Name("not"), test])
+        test.prefix = node.prefix
+        return test
+
+    def transform_while(self, node, results):
+        one = results["while"]
+        one.replace(Name("True", prefix=one.prefix))
+
+    def transform_sort(self, node, results):
+        sort_stmt = results["sort"]
+        next_stmt = results["next"]
+        list_call = results.get("list")
+        simple_expr = results.get("expr")
+
+        if list_call:
+            list_call.replace(Name("sorted", prefix=list_call.prefix))
+        elif simple_expr:
+            new = simple_expr.clone()
+            new.prefix = ""
+            simple_expr.replace(Call(Name("sorted"), [new],
+                                     prefix=simple_expr.prefix))
+        else:
+            raise RuntimeError("should not have reached here")
+        sort_stmt.remove()
+        if next_stmt:
+            next_stmt[0].prefix = sort_stmt._prefix
diff --git a/Lib/packaging/tests/pypi_server.py b/Lib/packaging/tests/pypi_server.py
new file mode 100644
index 0000000..cdff07c
--- /dev/null
+++ b/Lib/packaging/tests/pypi_server.py
@@ -0,0 +1,446 @@
+"""Mock PyPI Server implementation, to use in tests.
+
+This module also provides a simple test case to extend if you need to use
+the PyPIServer all along your test case. Be sure to read the documentation
+before any use.
+
+XXX TODO:
+
+The mock server can handle simple HTTP request (to simulate a simple index) or
+XMLRPC requests, over HTTP. Both does not have the same intergface to deal
+with, and I think it's a pain.
+
+A good idea could be to re-think a bit the way dstributions are handled in the
+mock server. As it should return malformed HTML pages, we need to keep the
+static behavior.
+
+I think of something like that:
+
+    >>> server = PyPIMockServer()
+    >>> server.startHTTP()
+    >>> server.startXMLRPC()
+
+Then, the server must have only one port to rely on, eg.
+
+    >>> server.fulladress()
+    "http://ip:port/"
+
+It could be simple to have one HTTP server, relaying the requests to the two
+implementations (static HTTP and XMLRPC over HTTP).
+"""
+
+import os
+import queue
+import select
+import threading
+import socketserver
+from functools import wraps
+from http.server import HTTPServer, SimpleHTTPRequestHandler
+from xmlrpc.server import SimpleXMLRPCServer
+
+from packaging.tests import unittest
+
+PYPI_DEFAULT_STATIC_PATH = os.path.join(
+    os.path.dirname(os.path.abspath(__file__)), 'pypiserver')
+
+
+def use_xmlrpc_server(*server_args, **server_kwargs):
+    server_kwargs['serve_xmlrpc'] = True
+    return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_http_server(*server_args, **server_kwargs):
+    server_kwargs['serve_xmlrpc'] = False
+    return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_pypi_server(*server_args, **server_kwargs):
+    """Decorator to make use of the PyPIServer for test methods,
+    just when needed, and not for the entire duration of the testcase.
+    """
+    def wrapper(func):
+        @wraps(func)
+        def wrapped(*args, **kwargs):
+            server = PyPIServer(*server_args, **server_kwargs)
+            server.start()
+            try:
+                func(server=server, *args, **kwargs)
+            finally:
+                server.stop()
+        return wrapped
+    return wrapper
+
+
+class PyPIServerTestCase(unittest.TestCase):
+
+    def setUp(self):
+        super(PyPIServerTestCase, self).setUp()
+        self.pypi = PyPIServer()
+        self.pypi.start()
+        self.addCleanup(self.pypi.stop)
+
+
+class PyPIServer(threading.Thread):
+    """PyPI Mocked server.
+    Provides a mocked version of the PyPI API's, to ease tests.
+
+    Support serving static content and serving previously given text.
+    """
+
+    def __init__(self, test_static_path=None,
+                 static_filesystem_paths=["default"],
+                 static_uri_paths=["simple", "packages"], serve_xmlrpc=False):
+        """Initialize the server.
+
+        Default behavior is to start the HTTP server. You can either start the
+        xmlrpc server by setting xmlrpc to True. Caution: Only one server will
+        be started.
+
+        static_uri_paths and static_base_path are parameters used to provides
+        respectively the http_paths to serve statically, and where to find the
+        matching files on the filesystem.
+        """
+        # we want to launch the server in a new dedicated thread, to not freeze
+        # tests.
+        threading.Thread.__init__(self)
+        self._run = True
+        self._serve_xmlrpc = serve_xmlrpc
+
+        #TODO allow to serve XMLRPC and HTTP static files at the same time.
+        if not self._serve_xmlrpc:
+            self.server = HTTPServer(('127.0.0.1', 0), PyPIRequestHandler)
+            self.server.RequestHandlerClass.pypi_server = self
+
+            self.request_queue = queue.Queue()
+            self._requests = []
+            self.default_response_status = 404
+            self.default_response_headers = [('Content-type', 'text/plain')]
+            self.default_response_data = "The page does not exists"
+
+            # initialize static paths / filesystems
+            self.static_uri_paths = static_uri_paths
+
+            # append the static paths defined locally
+            if test_static_path is not None:
+                static_filesystem_paths.append(test_static_path)
+            self.static_filesystem_paths = [
+                PYPI_DEFAULT_STATIC_PATH + "/" + path
+                for path in static_filesystem_paths]
+        else:
+            # XMLRPC server
+            self.server = PyPIXMLRPCServer(('127.0.0.1', 0))
+            self.xmlrpc = XMLRPCMockIndex()
+            # register the xmlrpc methods
+            self.server.register_introspection_functions()
+            self.server.register_instance(self.xmlrpc)
+
+        self.address = ('127.0.0.1', self.server.server_port)
+        # to not have unwanted outputs.
+        self.server.RequestHandlerClass.log_request = lambda *_: None
+
+    def run(self):
+        # loop because we can't stop it otherwise, for python < 2.6
+        while self._run:
+            r, w, e = select.select([self.server], [], [], 0.5)
+            if r:
+                self.server.handle_request()
+
+    def stop(self):
+        """self shutdown is not supported for python < 2.6"""
+        self._run = False
+        if self.is_alive():
+            self.join()
+        self.server.server_close()
+
+    def get_next_response(self):
+        return (self.default_response_status,
+                self.default_response_headers,
+                self.default_response_data)
+
+    @property
+    def requests(self):
+        """Use this property to get all requests that have been made
+        to the server
+        """
+        while True:
+            try:
+                self._requests.append(self.request_queue.get_nowait())
+            except queue.Empty:
+                break
+        return self._requests
+
+    @property
+    def full_address(self):
+        return "http://%s:%s" % self.address
+
+
+class PyPIRequestHandler(SimpleHTTPRequestHandler):
+    # we need to access the pypi server while serving the content
+    pypi_server = None
+
+    def serve_request(self):
+        """Serve the content.
+
+        Also record the requests to be accessed later. If trying to access an
+        url matching a static uri, serve static content, otherwise serve
+        what is provided by the `get_next_response` method.
+
+        If nothing is defined there, return a 404 header.
+        """
+        # record the request. Read the input only on PUT or POST requests
+        if self.command in ("PUT", "POST"):
+            if 'content-length' in self.headers:
+                request_data = self.rfile.read(
+                    int(self.headers['content-length']))
+            else:
+                request_data = self.rfile.read()
+
+        elif self.command in ("GET", "DELETE"):
+            request_data = ''
+
+        self.pypi_server.request_queue.put((self, request_data))
+
+        # serve the content from local disc if we request an URL beginning
+        # by a pattern defined in `static_paths`
+        url_parts = self.path.split("/")
+        if (len(url_parts) > 1 and
+                url_parts[1] in self.pypi_server.static_uri_paths):
+            data = None
+            # always take the last first.
+            fs_paths = []
+            fs_paths.extend(self.pypi_server.static_filesystem_paths)
+            fs_paths.reverse()
+            relative_path = self.path
+            for fs_path in fs_paths:
+                try:
+                    if self.path.endswith("/"):
+                        relative_path += "index.html"
+
+                    if relative_path.endswith('.tar.gz'):
+                        with open(fs_path + relative_path, 'br') as file:
+                            data = file.read()
+                        headers = [('Content-type', 'application/x-gtar')]
+                    else:
+                        with open(fs_path + relative_path) as file:
+                            data = file.read().encode()
+                        headers = [('Content-type', 'text/html')]
+
+                    headers.append(('Content-Length', len(data)))
+                    self.make_response(data, headers=headers)
+
+                except IOError:
+                    pass
+
+            if data is None:
+                self.make_response("Not found", 404)
+
+        # otherwise serve the content from get_next_response
+        else:
+            # send back a response
+            status, headers, data = self.pypi_server.get_next_response()
+            self.make_response(data, status, headers)
+
+    do_POST = do_GET = do_DELETE = do_PUT = serve_request
+
+    def make_response(self, data, status=200,
+                      headers=[('Content-type', 'text/html')]):
+        """Send the response to the HTTP client"""
+        if not isinstance(status, int):
+            try:
+                status = int(status)
+            except ValueError:
+                # we probably got something like YYY Codename.
+                # Just get the first 3 digits
+                status = int(status[:3])
+
+        self.send_response(status)
+        for header, value in headers:
+            self.send_header(header, value)
+        self.end_headers()
+
+        if type(data) is str:
+            data = data.encode()
+
+        self.wfile.write(data)
+
+
+class PyPIXMLRPCServer(SimpleXMLRPCServer):
+    def server_bind(self):
+        """Override server_bind to store the server name."""
+        socketserver.TCPServer.server_bind(self)
+        host, port = self.socket.getsockname()[:2]
+        self.server_port = port
+
+
+class MockDist:
+    """Fake distribution, used in the Mock PyPI Server"""
+
+    def __init__(self, name, version="1.0", hidden=False, url="http://url/",
+             type="sdist", filename="", size=10000,
+             digest="123456", downloads=7, has_sig=False,
+             python_version="source", comment="comment",
+             author="John Doe", author_email="john@doe.name",
+             maintainer="Main Tayner", maintainer_email="maintainer_mail",
+             project_url="http://project_url/", homepage="http://homepage/",
+             keywords="", platform="UNKNOWN", classifiers=[], licence="",
+             description="Description", summary="Summary", stable_version="",
+             ordering="", documentation_id="", code_kwalitee_id="",
+             installability_id="", obsoletes=[], obsoletes_dist=[],
+             provides=[], provides_dist=[], requires=[], requires_dist=[],
+             requires_external=[], requires_python=""):
+
+        # basic fields
+        self.name = name
+        self.version = version
+        self.hidden = hidden
+
+        # URL infos
+        self.url = url
+        self.digest = digest
+        self.downloads = downloads
+        self.has_sig = has_sig
+        self.python_version = python_version
+        self.comment = comment
+        self.type = type
+
+        # metadata
+        self.author = author
+        self.author_email = author_email
+        self.maintainer = maintainer
+        self.maintainer_email = maintainer_email
+        self.project_url = project_url
+        self.homepage = homepage
+        self.keywords = keywords
+        self.platform = platform
+        self.classifiers = classifiers
+        self.licence = licence
+        self.description = description
+        self.summary = summary
+        self.stable_version = stable_version
+        self.ordering = ordering
+        self.cheesecake_documentation_id = documentation_id
+        self.cheesecake_code_kwalitee_id = code_kwalitee_id
+        self.cheesecake_installability_id = installability_id
+
+        self.obsoletes = obsoletes
+        self.obsoletes_dist = obsoletes_dist
+        self.provides = provides
+        self.provides_dist = provides_dist
+        self.requires = requires
+        self.requires_dist = requires_dist
+        self.requires_external = requires_external
+        self.requires_python = requires_python
+
+    def url_infos(self):
+        return {
+            'url': self.url,
+            'packagetype': self.type,
+            'filename': 'filename.tar.gz',
+            'size': '6000',
+            'md5_digest': self.digest,
+            'downloads': self.downloads,
+            'has_sig': self.has_sig,
+            'python_version': self.python_version,
+            'comment_text': self.comment,
+        }
+
+    def metadata(self):
+        return {
+            'maintainer': self.maintainer,
+            'project_url': [self.project_url],
+            'maintainer_email': self.maintainer_email,
+            'cheesecake_code_kwalitee_id': self.cheesecake_code_kwalitee_id,
+            'keywords': self.keywords,
+            'obsoletes_dist': self.obsoletes_dist,
+            'requires_external': self.requires_external,
+            'author': self.author,
+            'author_email': self.author_email,
+            'download_url': self.url,
+            'platform': self.platform,
+            'version': self.version,
+            'obsoletes': self.obsoletes,
+            'provides': self.provides,
+            'cheesecake_documentation_id': self.cheesecake_documentation_id,
+            '_pypi_hidden': self.hidden,
+            'description': self.description,
+            '_pypi_ordering': 19,
+            'requires_dist': self.requires_dist,
+            'requires_python': self.requires_python,
+            'classifiers': [],
+            'name': self.name,
+            'licence': self.licence,
+            'summary': self.summary,
+            'home_page': self.homepage,
+            'stable_version': self.stable_version,
+            'provides_dist': self.provides_dist or "%s (%s)" % (self.name,
+                                                              self.version),
+            'requires': self.requires,
+            'cheesecake_installability_id': self.cheesecake_installability_id,
+        }
+
+    def search_result(self):
+        return {
+            '_pypi_ordering': 0,
+            'version': self.version,
+            'name': self.name,
+            'summary': self.summary,
+        }
+
+
+class XMLRPCMockIndex:
+    """Mock XMLRPC server"""
+
+    def __init__(self, dists=[]):
+        self._dists = dists
+        self._search_result = []
+
+    def add_distributions(self, dists):
+        for dist in dists:
+            self._dists.append(MockDist(**dist))
+
+    def set_distributions(self, dists):
+        self._dists = []
+        self.add_distributions(dists)
+
+    def set_search_result(self, result):
+        """set a predefined search result"""
+        self._search_result = result
+
+    def _get_search_results(self):
+        results = []
+        for name in self._search_result:
+            found_dist = [d for d in self._dists if d.name == name]
+            if found_dist:
+                results.append(found_dist[0])
+            else:
+                dist = MockDist(name)
+                results.append(dist)
+                self._dists.append(dist)
+        return [r.search_result() for r in results]
+
+    def list_packages(self):
+        return [d.name for d in self._dists]
+
+    def package_releases(self, package_name, show_hidden=False):
+        if show_hidden:
+            # return all
+            return [d.version for d in self._dists if d.name == package_name]
+        else:
+            # return only un-hidden
+            return [d.version for d in self._dists if d.name == package_name
+                    and not d.hidden]
+
+    def release_urls(self, package_name, version):
+        return [d.url_infos() for d in self._dists
+                if d.name == package_name and d.version == version]
+
+    def release_data(self, package_name, version):
+        release = [d for d in self._dists
+                   if d.name == package_name and d.version == version]
+        if release:
+            return release[0].metadata()
+        else:
+            return {}
+
+    def search(self, spec, operator="and"):
+        return self._get_search_results()
diff --git a/Lib/packaging/tests/pypi_test_server.py b/Lib/packaging/tests/pypi_test_server.py
new file mode 100644
index 0000000..8c8c641
--- /dev/null
+++ b/Lib/packaging/tests/pypi_test_server.py
@@ -0,0 +1,59 @@
+"""Test PyPI Server implementation at testpypi.python.org, to use in tests.
+
+This is a drop-in replacement for the mock pypi server for testing against a
+real pypi server hosted by python.org especially for testing against.
+"""
+
+import unittest
+
+PYPI_DEFAULT_STATIC_PATH = None
+
+
+def use_xmlrpc_server(*server_args, **server_kwargs):
+    server_kwargs['serve_xmlrpc'] = True
+    return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_http_server(*server_args, **server_kwargs):
+    server_kwargs['serve_xmlrpc'] = False
+    return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_pypi_server(*server_args, **server_kwargs):
+    """Decorator to make use of the PyPIServer for test methods,
+    just when needed, and not for the entire duration of the testcase.
+    """
+    def wrapper(func):
+        def wrapped(*args, **kwargs):
+            server = PyPIServer(*server_args, **server_kwargs)
+            func(server=server, *args, **kwargs)
+        return wrapped
+    return wrapper
+
+
+class PyPIServerTestCase(unittest.TestCase):
+
+    def setUp(self):
+        super(PyPIServerTestCase, self).setUp()
+        self.pypi = PyPIServer()
+        self.pypi.start()
+        self.addCleanup(self.pypi.stop)
+
+
+class PyPIServer:
+    """Shim to access testpypi.python.org, for testing a real server."""
+
+    def __init__(self, test_static_path=None,
+                 static_filesystem_paths=["default"],
+                 static_uri_paths=["simple"], serve_xmlrpc=False):
+        self.address = ('testpypi.python.org', '80')
+
+    def start(self):
+        pass
+
+    def stop(self):
+        pass
+
+    @property
+    def full_address(self):
+        return "http://%s:%s" % self.address
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz b/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz
new file mode 100644
index 0000000..333961e
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz
Binary files differ
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html
new file mode 100644
index 0000000..b89f1bd
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="badmd5-0.1.tar.gz#md5=3e3d86693d6564c807272b11b3069dfe" rel="download">badmd5-0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html
new file mode 100644
index 0000000..9e42b16
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="foobar-0.1.tar.gz#md5=fe18804c5b722ff024cabdf514924fc4" rel="download">foobar-0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html
new file mode 100644
index 0000000..9baee04
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html
@@ -0,0 +1,2 @@
+<a href="foobar/">foobar/</a> 
+<a href="badmd5/">badmd5/</a> 
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html
new file mode 100644
index 0000000..c3d42c5
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for bar</title></head><body><h1>Links for bar</h1>
+<a rel="download" href="../../packages/source/F/bar/bar-1.0.tar.gz">bar-1.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/bar/bar-1.0.1.tar.gz">bar-1.0.1.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/bar/bar-2.0.tar.gz">bar-2.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/bar/bar-2.0.1.tar.gz">bar-2.0.1.tar.gz</a><br/> 
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html
new file mode 100644
index 0000000..4f34312
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for baz</title></head><body><h1>Links for baz</h1>
+<a rel="download" href="../../packages/source/F/baz/baz-1.0.tar.gz">baz-1.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/baz/baz-1.0.1.tar.gz">baz-1.0.1.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/baz/baz-2.0.tar.gz">baz-2.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/baz/baz-2.0.1.tar.gz">baz-2.0.1.tar.gz</a><br/> 
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html
new file mode 100644
index 0000000..0565e11
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for foo</title></head><body><h1>Links for foo</h1>
+<a rel="download" href="../../packages/source/F/foo/foo-1.0.tar.gz">foo-1.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/foo/foo-1.0.1.tar.gz">foo-1.0.1.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/foo/foo-2.0.tar.gz">foo-2.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/foo/foo-2.0.1.tar.gz">foo-2.0.1.tar.gz</a><br/> 
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html
new file mode 100644
index 0000000..a70cfd3
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html
@@ -0,0 +1,3 @@
+<a href="foo/">foo/</a> 
+<a href="bar/">bar/</a> 
+<a href="baz/">baz/</a> 
diff --git a/Lib/packaging/tests/pypiserver/project_list/simple/index.html b/Lib/packaging/tests/pypiserver/project_list/simple/index.html
new file mode 100644
index 0000000..b36d728
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/project_list/simple/index.html
@@ -0,0 +1,5 @@
+<a class="test" href="yeah">FooBar-bar</a>
+<a class="test" href="yeah">Foobar-baz</a>
+<a class="test" href="yeah">Baz-FooBar</a>
+<a class="test" href="yeah">Baz</a>
+<a class="test" href="yeah">Foo</a>
diff --git a/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html
new file mode 100644
index 0000000..a282a4e
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for Foobar</title></head><body><h1>Links for Foobar</h1>
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-1.0.tar.gz#md5=98fa833fdabcdd78d00245aead66c174">Foobar-1.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-1.0.1.tar.gz#md5=2351efb20f6b7b5d9ce80fa4cb1bd9ca">Foobar-1.0.1.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-2.0.tar.gz#md5=98fa833fdabcdd78d00245aead66c274">Foobar-2.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-2.0.1.tar.gz#md5=2352efb20f6b7b5d9ce80fa4cb2bd9ca">Foobar-2.0.1.tar.gz</a><br/> 
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html b/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a> 
diff --git a/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html b/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html
new file mode 100644
index 0000000..265ee0a
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html
@@ -0,0 +1 @@
+index.html from external server
diff --git a/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html b/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html
new file mode 100644
index 0000000..6f97667
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html
@@ -0,0 +1 @@
+Yeah
diff --git a/Lib/packaging/tests/pypiserver/with_externals/external/external.html b/Lib/packaging/tests/pypiserver/with_externals/external/external.html
new file mode 100644
index 0000000..92e4702
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/external/external.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html
new file mode 100644
index 0000000..b100a26
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html
@@ -0,0 +1,4 @@
+<html><body>
+<a rel ="download" href="/foobar-0.1.tar.gz#md5=12345678901234567">foobar-0.1.tar.gz</a><br/>
+<a href="../../external/external.html" rel="homepage">external homepage</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_externals/simple/index.html b/Lib/packaging/tests/pypiserver/with_externals/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a> 
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html b/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html
new file mode 100644
index 0000000..1cc0c32
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html
@@ -0,0 +1,7 @@
+<html>
+<body>
+<p>a rel=homepage HTML page</p>
+<a href="/foobar-2.0.tar.gz">foobar 2.0</a>
+</body>
+</html>
+
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html b/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html
new file mode 100644
index 0000000..f6ace22
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html
@@ -0,0 +1 @@
+A page linked without rel="download" or rel="homepage" link.
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html
new file mode 100644
index 0000000..171df93
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html
@@ -0,0 +1,6 @@
+<html><body>
+<a rel="download" href="/foobar-0.1.tar.gz" rel="download">foobar-0.1.tar.gz</a><br/>
+<a href="../../external/homepage.html" rel="homepage">external homepage</a><br/>
+<a href="../../external/nonrel.html">unrelated link</a><br/>
+<a href="/unrelated-0.2.tar.gz">unrelated download</a></br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html b/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a> 
diff --git a/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html
new file mode 100644
index 0000000..b2885ae
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html
@@ -0,0 +1,4 @@
+<html><body>
+<a rel="download" href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
+<a href="http://a-really-external-website/external/external.html" rel="homepage">external homepage</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html b/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a> 
diff --git a/Lib/packaging/tests/support.py b/Lib/packaging/tests/support.py
new file mode 100644
index 0000000..6d60b9e
--- /dev/null
+++ b/Lib/packaging/tests/support.py
@@ -0,0 +1,260 @@
+"""Support code for packaging test cases.
+
+A few helper classes are provided: LoggingCatcher, TempdirManager and
+EnvironRestorer. They are written to be used as mixins::
+
+    from packaging.tests import unittest
+    from packaging.tests.support import LoggingCatcher
+
+    class SomeTestCase(LoggingCatcher, unittest.TestCase):
+
+If you need to define a setUp method on your test class, you have to
+call the mixin class' setUp method or it won't work (same thing for
+tearDown):
+
+        def setUp(self):
+            super(SomeTestCase, self).setUp()
+            ... # other setup code
+
+Also provided is a DummyCommand class, useful to mock commands in the
+tests of another command that needs them, a create_distribution function
+and a skip_unless_symlink decorator.
+
+Also provided is a DummyCommand class, useful to mock commands in the
+tests of another command that needs them, a create_distribution function
+and a skip_unless_symlink decorator.
+
+Each class or function has a docstring to explain its purpose and usage.
+"""
+
+import os
+import errno
+import shutil
+import logging
+import weakref
+import tempfile
+
+from packaging import logger
+from packaging.dist import Distribution
+from packaging.tests import unittest
+from test.support import requires_zlib
+
+__all__ = ['LoggingCatcher', 'TempdirManager', 'EnvironRestorer',
+           'DummyCommand', 'unittest', 'create_distribution',
+           'skip_unless_symlink', 'requires_zlib']
+
+
+class _TestHandler(logging.handlers.BufferingHandler):
+    # stolen and adapted from test.support
+
+    def __init__(self):
+        logging.handlers.BufferingHandler.__init__(self, 0)
+        self.setLevel(logging.DEBUG)
+
+    def shouldFlush(self):
+        return False
+
+    def emit(self, record):
+        self.buffer.append(record)
+
+
+class LoggingCatcher:
+    """TestCase-compatible mixin to receive logging calls.
+
+    Upon setUp, instances of this classes get a BufferingHandler that's
+    configured to record all messages logged to the 'packaging' logger.
+
+    Use get_logs to retrieve messages and self.loghandler.flush to discard
+    them.
+    """
+
+    def setUp(self):
+        super(LoggingCatcher, self).setUp()
+        self.loghandler = handler = _TestHandler()
+        self.old_level = logger.level
+        logger.addHandler(handler)
+        logger.setLevel(logging.DEBUG)  # we want all messages
+
+    def tearDown(self):
+        handler = self.loghandler
+        # All this is necessary to properly shut down the logging system and
+        # avoid a regrtest complaint.  Thanks to Vinay Sajip for the help.
+        handler.close()
+        logger.removeHandler(handler)
+        for ref in weakref.getweakrefs(handler):
+            logging._removeHandlerRef(ref)
+        del self.loghandler
+        logger.setLevel(self.old_level)
+        super(LoggingCatcher, self).tearDown()
+
+    def get_logs(self, *levels):
+        """Return all log messages with level in *levels*.
+
+        Without explicit levels given, returns all messages.
+        *levels* defaults to all levels.  For log calls with arguments (i.e.
+        logger.info('bla bla %s', arg)), the messages
+        Returns a list.
+
+        Example: self.get_logs(logging.WARN, logging.DEBUG).
+        """
+        if not levels:
+            return [log.getMessage() for log in self.loghandler.buffer]
+        return [log.getMessage() for log in self.loghandler.buffer
+                if log.levelno in levels]
+
+
+class TempdirManager:
+    """TestCase-compatible mixin to create temporary directories and files.
+
+    Directories and files created in a test_* method will be removed after it
+    has run.
+    """
+
+    def setUp(self):
+        super(TempdirManager, self).setUp()
+        self._basetempdir = tempfile.mkdtemp()
+        self._files = []
+
+    def tearDown(self):
+        shutil.rmtree(self._basetempdir, os.name in ('nt', 'cygwin'))
+
+        for handle, name in self._files:
+            handle.close()
+            if os.path.exists(name):
+                try:
+                    os.remove(name)
+                except OSError as exc:
+                    if exc.errno != errno.ENOENT:
+                        raise
+
+        super(TempdirManager, self).tearDown()
+
+    def mktempfile(self):
+        """Create a read-write temporary file and return it."""
+        fd, fn = tempfile.mkstemp(dir=self._basetempdir)
+        os.close(fd)
+        fp = open(fn, 'w+')
+        self._files.append((fp, fn))
+        return fp
+
+    def mkdtemp(self):
+        """Create a temporary directory and return its path."""
+        d = tempfile.mkdtemp(dir=self._basetempdir)
+        return d
+
+    def write_file(self, path, content='xxx', encoding=None):
+        """Write a file at the given path.
+
+        path can be a string, a tuple or a list; if it's a tuple or list,
+        os.path.join will be used to produce a path.
+        """
+        if isinstance(path, (list, tuple)):
+            path = os.path.join(*path)
+        with open(path, 'w', encoding=encoding) as f:
+            f.write(content)
+
+    def create_dist(self, **kw):
+        """Create a stub distribution object and files.
+
+        This function creates a Distribution instance (use keyword arguments
+        to customize it) and a temporary directory with a project structure
+        (currently an empty directory).
+
+        It returns the path to the directory and the Distribution instance.
+        You can use self.write_file to write any file in that
+        directory, e.g. setup scripts or Python modules.
+        """
+        if 'name' not in kw:
+            kw['name'] = 'foo'
+        tmp_dir = self.mkdtemp()
+        project_dir = os.path.join(tmp_dir, kw['name'])
+        os.mkdir(project_dir)
+        dist = Distribution(attrs=kw)
+        return project_dir, dist
+
+    def assertIsFile(self, *args):
+        path = os.path.join(*args)
+        dirname = os.path.dirname(path)
+        file = os.path.basename(path)
+        if os.path.isdir(dirname):
+            files = os.listdir(dirname)
+            msg = "%s not found in %s: %s" % (file, dirname, files)
+            assert os.path.isfile(path), msg
+        else:
+            raise AssertionError(
+                    '%s not found. %s does not exist' % (file, dirname))
+
+    def assertIsNotFile(self, *args):
+        path = os.path.join(*args)
+        self.assertFalse(os.path.isfile(path), "%r exists" % path)
+
+
+class EnvironRestorer:
+    """TestCase-compatible mixin to restore or delete environment variables.
+
+    The variables to restore (or delete if they were not originally present)
+    must be explicitly listed in self.restore_environ.  It's better to be
+    aware of what we're modifying instead of saving and restoring the whole
+    environment.
+    """
+
+    def setUp(self):
+        super(EnvironRestorer, self).setUp()
+        self._saved = []
+        self._added = []
+        for key in self.restore_environ:
+            if key in os.environ:
+                self._saved.append((key, os.environ[key]))
+            else:
+                self._added.append(key)
+
+    def tearDown(self):
+        for key, value in self._saved:
+            os.environ[key] = value
+        for key in self._added:
+            os.environ.pop(key, None)
+        super(EnvironRestorer, self).tearDown()
+
+
+class DummyCommand:
+    """Class to store options for retrieval via set_undefined_options().
+
+    Useful for mocking one dependency command in the tests for another
+    command, see e.g. the dummy build command in test_build_scripts.
+    """
+
+    def __init__(self, **kwargs):
+        for kw, val in kwargs.items():
+            setattr(self, kw, val)
+
+    def ensure_finalized(self):
+        pass
+
+
+class TestDistribution(Distribution):
+    """Distribution subclasses that avoids the default search for
+    configuration files.
+
+    The ._config_files attribute must be set before
+    .parse_config_files() is called.
+    """
+
+    def find_config_files(self):
+        return self._config_files
+
+
+def create_distribution(configfiles=()):
+    """Prepares a distribution with given config files parsed."""
+    d = TestDistribution()
+    d.config.find_config_files = d.find_config_files
+    d._config_files = configfiles
+    d.parse_config_files()
+    d.parse_command_line()
+    return d
+
+
+try:
+    from test.support import skip_unless_symlink
+except ImportError:
+    skip_unless_symlink = unittest.skip(
+        'requires test.support.skip_unless_symlink')
diff --git a/Lib/packaging/tests/test_ccompiler.py b/Lib/packaging/tests/test_ccompiler.py
new file mode 100644
index 0000000..dd4bdd9
--- /dev/null
+++ b/Lib/packaging/tests/test_ccompiler.py
@@ -0,0 +1,15 @@
+"""Tests for distutils.compiler.ccompiler."""
+
+from packaging.compiler import ccompiler
+from packaging.tests import unittest, support
+
+
+class CCompilerTestCase(unittest.TestCase):
+    pass  # XXX need some tests on CCompiler
+
+
+def test_suite():
+    return unittest.makeSuite(CCompilerTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_bdist.py b/Lib/packaging/tests/test_command_bdist.py
new file mode 100644
index 0000000..1522b7e
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist.py
@@ -0,0 +1,77 @@
+"""Tests for distutils.command.bdist."""
+
+from packaging import util
+from packaging.command.bdist import bdist, show_formats
+
+from packaging.tests import unittest, support, captured_stdout
+
+
+class BuildTestCase(support.TempdirManager,
+                    support.LoggingCatcher,
+                    unittest.TestCase):
+
+    def _mock_get_platform(self):
+        self._get_platform_called = True
+        return self._get_platform()
+
+    def setUp(self):
+        super(BuildTestCase, self).setUp()
+
+        # mock util.get_platform
+        self._get_platform_called = False
+        self._get_platform = util.get_platform
+        util.get_platform = self._mock_get_platform
+
+    def tearDown(self):
+        super(BuildTestCase, self).tearDown()
+        util.get_platform = self._get_platform
+
+    def test_formats(self):
+
+        # let's create a command and make sure
+        # we can fix the format
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist(dist)
+        cmd.formats = ['msi']
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.formats, ['msi'])
+
+        # what format bdist offers ?
+        # XXX an explicit list in bdist is
+        # not the best way to  bdist_* commands
+        # we should add a registry
+        formats = sorted(('zip', 'gztar', 'bztar', 'ztar',
+                          'tar', 'wininst', 'msi'))
+        found = sorted(cmd.format_command)
+        self.assertEqual(found, formats)
+
+    def test_skip_build(self):
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist(dist)
+        cmd.skip_build = False
+        cmd.formats = ['ztar']
+        cmd.ensure_finalized()
+        self.assertFalse(self._get_platform_called)
+
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist(dist)
+        cmd.skip_build = True
+        cmd.formats = ['ztar']
+        cmd.ensure_finalized()
+        self.assertTrue(self._get_platform_called)
+
+    def test_show_formats(self):
+        __, stdout = captured_stdout(show_formats)
+
+        # the output should be a header line + one line per format
+        num_formats = len(bdist.format_commands)
+        output = [line for line in stdout.split('\n')
+                  if line.strip().startswith('--formats=')]
+        self.assertEqual(len(output), num_formats)
+
+
+def test_suite():
+    return unittest.makeSuite(BuildTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_dumb.py b/Lib/packaging/tests/test_command_bdist_dumb.py
new file mode 100644
index 0000000..41b0dd0
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_dumb.py
@@ -0,0 +1,97 @@
+"""Tests for distutils.command.bdist_dumb."""
+
+import sys
+import os
+
+from packaging.dist import Distribution
+from packaging.command.bdist_dumb import bdist_dumb
+from packaging.tests import unittest, support
+from packaging.tests.support import requires_zlib
+
+
+SETUP_PY = """\
+from distutils.run import setup
+import foo
+
+setup(name='foo', version='0.1', py_modules=['foo'],
+      url='xxx', author='xxx', author_email='xxx')
+"""
+
+
+class BuildDumbTestCase(support.TempdirManager,
+                        support.LoggingCatcher,
+                        unittest.TestCase):
+
+    def setUp(self):
+        super(BuildDumbTestCase, self).setUp()
+        self.old_location = os.getcwd()
+        self.old_sys_argv = sys.argv, sys.argv[:]
+
+    def tearDown(self):
+        os.chdir(self.old_location)
+        sys.argv = self.old_sys_argv[0]
+        sys.argv[:] = self.old_sys_argv[1]
+        super(BuildDumbTestCase, self).tearDown()
+
+    @requires_zlib
+    def test_simple_built(self):
+
+        # let's create a simple package
+        tmp_dir = self.mkdtemp()
+        pkg_dir = os.path.join(tmp_dir, 'foo')
+        os.mkdir(pkg_dir)
+        self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
+        self.write_file((pkg_dir, 'foo.py'), '#')
+        self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
+        self.write_file((pkg_dir, 'README'), '')
+
+        dist = Distribution({'name': 'foo', 'version': '0.1',
+                             'py_modules': ['foo'],
+                             'url': 'xxx', 'author': 'xxx',
+                             'author_email': 'xxx'})
+        dist.script_name = 'setup.py'
+        os.chdir(pkg_dir)
+
+        sys.argv[:] = ['setup.py']
+        cmd = bdist_dumb(dist)
+
+        # so the output is the same no matter
+        # what is the platform
+        cmd.format = 'zip'
+
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # see what we have
+        dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
+        base = "%s.%s" % (dist.get_fullname(), cmd.plat_name)
+        if os.name == 'os2':
+            base = base.replace(':', '-')
+
+        wanted = ['%s.zip' % base]
+        self.assertEqual(dist_created, wanted)
+
+        # now let's check what we have in the zip file
+        # XXX to be done
+
+    def test_finalize_options(self):
+        pkg_dir, dist = self.create_dist()
+        os.chdir(pkg_dir)
+        cmd = bdist_dumb(dist)
+        self.assertEqual(cmd.bdist_dir, None)
+        cmd.finalize_options()
+
+        # bdist_dir is initialized to bdist_base/dumb if not set
+        base = cmd.get_finalized_command('bdist').bdist_base
+        self.assertEqual(cmd.bdist_dir, os.path.join(base, 'dumb'))
+
+        # the format is set to a default value depending on the os.name
+        default = cmd.default_format[os.name]
+        self.assertEqual(cmd.format, default)
+
+
+def test_suite():
+    return unittest.makeSuite(BuildDumbTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_msi.py b/Lib/packaging/tests/test_command_bdist_msi.py
new file mode 100644
index 0000000..fded962
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_msi.py
@@ -0,0 +1,25 @@
+"""Tests for distutils.command.bdist_msi."""
+import sys
+
+from packaging.tests import unittest, support
+
+
+class BDistMSITestCase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+    def test_minimal(self):
+        # minimal test XXX need more tests
+        from packaging.command.bdist_msi import bdist_msi
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist_msi(dist)
+        cmd.ensure_finalized()
+
+
+def test_suite():
+    return unittest.makeSuite(BDistMSITestCase)
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_wininst.py b/Lib/packaging/tests/test_command_bdist_wininst.py
new file mode 100644
index 0000000..09bdaad
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_wininst.py
@@ -0,0 +1,32 @@
+"""Tests for distutils.command.bdist_wininst."""
+
+from packaging.command.bdist_wininst import bdist_wininst
+from packaging.tests import unittest, support
+
+
+class BuildWinInstTestCase(support.TempdirManager,
+                           support.LoggingCatcher,
+                           unittest.TestCase):
+
+    def test_get_exe_bytes(self):
+
+        # issue5731: command was broken on non-windows platforms
+        # this test makes sure it works now for every platform
+        # let's create a command
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist_wininst(dist)
+        cmd.ensure_finalized()
+
+        # let's run the code that finds the right wininst*.exe file
+        # and make sure it finds it and returns its content
+        # no matter what platform we have
+        exe_file = cmd.get_exe_bytes()
+        self.assertGreater(len(exe_file), 10)
+
+
+def test_suite():
+    return unittest.makeSuite(BuildWinInstTestCase)
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_build.py b/Lib/packaging/tests/test_command_build.py
new file mode 100644
index 0000000..91fbe42
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build.py
@@ -0,0 +1,55 @@
+"""Tests for distutils.command.build."""
+import os
+import sys
+
+from packaging.command.build import build
+from sysconfig import get_platform
+from packaging.tests import unittest, support
+
+
+class BuildTestCase(support.TempdirManager,
+                    support.LoggingCatcher,
+                    unittest.TestCase):
+
+    def test_finalize_options(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build(dist)
+        cmd.finalize_options()
+
+        # if not specified, plat_name gets the current platform
+        self.assertEqual(cmd.plat_name, get_platform())
+
+        # build_purelib is build + lib
+        wanted = os.path.join(cmd.build_base, 'lib')
+        self.assertEqual(cmd.build_purelib, wanted)
+
+        # build_platlib is 'build/lib.platform-x.x[-pydebug]'
+        # examples:
+        #   build/lib.macosx-10.3-i386-2.7
+        plat_spec = '.%s-%s' % (cmd.plat_name, sys.version[0:3])
+        if hasattr(sys, 'gettotalrefcount'):
+            self.assertTrue(cmd.build_platlib.endswith('-pydebug'))
+            plat_spec += '-pydebug'
+        wanted = os.path.join(cmd.build_base, 'lib' + plat_spec)
+        self.assertEqual(cmd.build_platlib, wanted)
+
+        # by default, build_lib = build_purelib
+        self.assertEqual(cmd.build_lib, cmd.build_purelib)
+
+        # build_temp is build/temp.<plat>
+        wanted = os.path.join(cmd.build_base, 'temp' + plat_spec)
+        self.assertEqual(cmd.build_temp, wanted)
+
+        # build_scripts is build/scripts-x.x
+        wanted = os.path.join(cmd.build_base, 'scripts-' + sys.version[0:3])
+        self.assertEqual(cmd.build_scripts, wanted)
+
+        # executable is os.path.normpath(sys.executable)
+        self.assertEqual(cmd.executable, os.path.normpath(sys.executable))
+
+
+def test_suite():
+    return unittest.makeSuite(BuildTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_clib.py b/Lib/packaging/tests/test_command_build_clib.py
new file mode 100644
index 0000000..a2a8583
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_clib.py
@@ -0,0 +1,141 @@
+"""Tests for distutils.command.build_clib."""
+import os
+import sys
+
+from packaging.util import find_executable
+from packaging.command.build_clib import build_clib
+from packaging.errors import PackagingSetupError
+from packaging.tests import unittest, support
+
+
+class BuildCLibTestCase(support.TempdirManager,
+                        support.LoggingCatcher,
+                        unittest.TestCase):
+
+    def test_check_library_dist(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        # 'libraries' option must be a list
+        self.assertRaises(PackagingSetupError, cmd.check_library_list, 'foo')
+
+        # each element of 'libraries' must a 2-tuple
+        self.assertRaises(PackagingSetupError, cmd.check_library_list,
+                          ['foo1', 'foo2'])
+
+        # first element of each tuple in 'libraries'
+        # must be a string (the library name)
+        self.assertRaises(PackagingSetupError, cmd.check_library_list,
+                          [(1, 'foo1'), ('name', 'foo2')])
+
+        # library name may not contain directory separators
+        self.assertRaises(PackagingSetupError, cmd.check_library_list,
+                          [('name', 'foo1'),
+                           ('another/name', 'foo2')])
+
+        # second element of each tuple must be a dictionary (build info)
+        self.assertRaises(PackagingSetupError, cmd.check_library_list,
+                          [('name', {}),
+                           ('another', 'foo2')])
+
+        # those work
+        libs = [('name', {}), ('name', {'ok': 'good'})]
+        cmd.check_library_list(libs)
+
+    def test_get_source_files(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        # "in 'libraries' option 'sources' must be present and must be
+        # a list of source filenames
+        cmd.libraries = [('name', {})]
+        self.assertRaises(PackagingSetupError, cmd.get_source_files)
+
+        cmd.libraries = [('name', {'sources': 1})]
+        self.assertRaises(PackagingSetupError, cmd.get_source_files)
+
+        cmd.libraries = [('name', {'sources': ['a', 'b']})]
+        self.assertEqual(cmd.get_source_files(), ['a', 'b'])
+
+        cmd.libraries = [('name', {'sources': ('a', 'b')})]
+        self.assertEqual(cmd.get_source_files(), ['a', 'b'])
+
+        cmd.libraries = [('name', {'sources': ('a', 'b')}),
+                         ('name2', {'sources': ['c', 'd']})]
+        self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd'])
+
+    def test_build_libraries(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        class FakeCompiler:
+            def compile(*args, **kw):
+                pass
+            create_static_lib = compile
+
+        cmd.compiler = FakeCompiler()
+
+        # build_libraries is also doing a bit of type checking
+        lib = [('name', {'sources': 'notvalid'})]
+        self.assertRaises(PackagingSetupError, cmd.build_libraries, lib)
+
+        lib = [('name', {'sources': []})]
+        cmd.build_libraries(lib)
+
+        lib = [('name', {'sources': ()})]
+        cmd.build_libraries(lib)
+
+    def test_finalize_options(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        cmd.include_dirs = 'one-dir'
+        cmd.finalize_options()
+        self.assertEqual(cmd.include_dirs, ['one-dir'])
+
+        cmd.include_dirs = None
+        cmd.finalize_options()
+        self.assertEqual(cmd.include_dirs, [])
+
+        cmd.distribution.libraries = 'WONTWORK'
+        self.assertRaises(PackagingSetupError, cmd.finalize_options)
+
+    @unittest.skipIf(sys.platform == 'win32', 'disabled on win32')
+    def test_run(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        foo_c = os.path.join(pkg_dir, 'foo.c')
+        self.write_file(foo_c, 'int main(void) { return 1;}\n')
+        cmd.libraries = [('foo', {'sources': [foo_c]})]
+
+        build_temp = os.path.join(pkg_dir, 'build')
+        os.mkdir(build_temp)
+        cmd.build_temp = build_temp
+        cmd.build_clib = build_temp
+
+        # before we run the command, we want to make sure
+        # all commands are present on the system
+        # by creating a compiler and checking its executables
+        from packaging.compiler import new_compiler, customize_compiler
+
+        compiler = new_compiler()
+        customize_compiler(compiler)
+        for ccmd in compiler.executables.values():
+            if ccmd is None:
+                continue
+            if find_executable(ccmd[0]) is None:
+                raise unittest.SkipTest("can't test")
+
+        # this should work
+        cmd.run()
+
+        # let's check the result
+        self.assertIn('libfoo.a', os.listdir(build_temp))
+
+
+def test_suite():
+    return unittest.makeSuite(BuildCLibTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_ext.py b/Lib/packaging/tests/test_command_build_ext.py
new file mode 100644
index 0000000..fba27c7
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_ext.py
@@ -0,0 +1,380 @@
+import os
+import sys
+import site
+import shutil
+import sysconfig
+from io import StringIO
+from packaging.dist import Distribution
+from packaging.errors import UnknownFileError, CompileError
+from packaging.command.build_ext import build_ext
+from packaging.compiler.extension import Extension
+
+from packaging.tests import support, unittest, verbose, unload
+
+# http://bugs.python.org/issue4373
+# Don't load the xx module more than once.
+ALREADY_TESTED = False
+
+
+def _get_source_filename():
+    srcdir = sysconfig.get_config_var('srcdir')
+    return os.path.join(srcdir, 'Modules', 'xxmodule.c')
+
+
+class BuildExtTestCase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+    def setUp(self):
+        # Create a simple test environment
+        # Note that we're making changes to sys.path
+        super(BuildExtTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.sys_path = sys.path, sys.path[:]
+        sys.path.append(self.tmp_dir)
+        filename = _get_source_filename()
+        if os.path.exists(filename):
+            shutil.copy(filename, self.tmp_dir)
+        self.old_user_base = site.USER_BASE
+        site.USER_BASE = self.mkdtemp()
+        build_ext.USER_BASE = site.USER_BASE
+
+    def _fixup_command(self, cmd):
+        # When Python was build with --enable-shared, -L. is not good enough
+        # to find the libpython<blah>.so.  This is because regrtest runs it
+        # under a tempdir, not in the top level where the .so lives.  By the
+        # time we've gotten here, Python's already been chdir'd to the
+        # tempdir.
+        #
+        # To further add to the fun, we can't just add library_dirs to the
+        # Extension() instance because that doesn't get plumbed through to the
+        # final compiler command.
+        if (sysconfig.get_config_var('Py_ENABLE_SHARED') and
+            not sys.platform.startswith('win')):
+            runshared = sysconfig.get_config_var('RUNSHARED')
+            if runshared is None:
+                cmd.library_dirs = ['.']
+            else:
+                name, equals, value = runshared.partition('=')
+                cmd.library_dirs = value.split(os.pathsep)
+
+    def test_build_ext(self):
+        global ALREADY_TESTED
+        xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
+        if not os.path.exists(xx_c):
+            # skipping if we cannot find it
+            return
+        xx_ext = Extension('xx', [xx_c])
+        dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
+        dist.package_dir = self.tmp_dir
+        cmd = build_ext(dist)
+        self._fixup_command(cmd)
+
+        if os.name == "nt":
+            # On Windows, we must build a debug version iff running
+            # a debug build of Python
+            cmd.debug = sys.executable.endswith("_d.exe")
+        cmd.build_lib = self.tmp_dir
+        cmd.build_temp = self.tmp_dir
+
+        old_stdout = sys.stdout
+        if not verbose:
+            # silence compiler output
+            sys.stdout = StringIO()
+        try:
+            cmd.ensure_finalized()
+            cmd.run()
+        finally:
+            sys.stdout = old_stdout
+
+        if ALREADY_TESTED:
+            return
+        else:
+            ALREADY_TESTED = True
+
+        import xx
+
+        for attr in ('error', 'foo', 'new', 'roj'):
+            self.assertTrue(hasattr(xx, attr))
+
+        self.assertEqual(xx.foo(2, 5), 7)
+        self.assertEqual(xx.foo(13, 15), 28)
+        self.assertEqual(xx.new().demo(), None)
+        doc = 'This is a template module just for instruction.'
+        self.assertEqual(xx.__doc__, doc)
+        self.assertTrue(isinstance(xx.Null(), xx.Null))
+        self.assertTrue(isinstance(xx.Str(), xx.Str))
+
+    def tearDown(self):
+        # Get everything back to normal
+        unload('xx')
+        sys.path = self.sys_path[0]
+        sys.path[:] = self.sys_path[1]
+        if sys.version > "2.6":
+            site.USER_BASE = self.old_user_base
+            build_ext.USER_BASE = self.old_user_base
+
+        super(BuildExtTestCase, self).tearDown()
+
+    def test_solaris_enable_shared(self):
+        dist = Distribution({'name': 'xx'})
+        cmd = build_ext(dist)
+        old = sys.platform
+
+        sys.platform = 'sunos'  # fooling finalize_options
+        from sysconfig import _CONFIG_VARS
+
+        old_var = _CONFIG_VARS.get('Py_ENABLE_SHARED')
+        _CONFIG_VARS['Py_ENABLE_SHARED'] = 1
+        try:
+            cmd.ensure_finalized()
+        finally:
+            sys.platform = old
+            if old_var is None:
+                del _CONFIG_VARS['Py_ENABLE_SHARED']
+            else:
+                _CONFIG_VARS['Py_ENABLE_SHARED'] = old_var
+
+        # make sure we get some library dirs under solaris
+        self.assertGreater(len(cmd.library_dirs), 0)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_user_site(self):
+        dist = Distribution({'name': 'xx'})
+        cmd = build_ext(dist)
+
+        # making sure the user option is there
+        options = [name for name, short, label in
+                   cmd.user_options]
+        self.assertIn('user', options)
+
+        # setting a value
+        cmd.user = True
+
+        # setting user based lib and include
+        lib = os.path.join(site.USER_BASE, 'lib')
+        incl = os.path.join(site.USER_BASE, 'include')
+        os.mkdir(lib)
+        os.mkdir(incl)
+
+        # let's run finalize
+        cmd.ensure_finalized()
+
+        # see if include_dirs and library_dirs
+        # were set
+        self.assertIn(lib, cmd.library_dirs)
+        self.assertIn(lib, cmd.rpath)
+        self.assertIn(incl, cmd.include_dirs)
+
+    def test_optional_extension(self):
+
+        # this extension will fail, but let's ignore this failure
+        # with the optional argument.
+        modules = [Extension('foo', ['xxx'], optional=False)]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = build_ext(dist)
+        cmd.ensure_finalized()
+        self.assertRaises((UnknownFileError, CompileError),
+                          cmd.run)  # should raise an error
+
+        modules = [Extension('foo', ['xxx'], optional=True)]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = build_ext(dist)
+        cmd.ensure_finalized()
+        cmd.run()  # should pass
+
+    def test_finalize_options(self):
+        # Make sure Python's include directories (for Python.h, pyconfig.h,
+        # etc.) are in the include search path.
+        modules = [Extension('foo', ['xxx'], optional=False)]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = build_ext(dist)
+        cmd.finalize_options()
+
+        py_include = sysconfig.get_path('include')
+        self.assertIn(py_include, cmd.include_dirs)
+
+        plat_py_include = sysconfig.get_path('platinclude')
+        self.assertIn(plat_py_include, cmd.include_dirs)
+
+        # make sure cmd.libraries is turned into a list
+        # if it's a string
+        cmd = build_ext(dist)
+        cmd.libraries = 'my_lib'
+        cmd.finalize_options()
+        self.assertEqual(cmd.libraries, ['my_lib'])
+
+        # make sure cmd.library_dirs is turned into a list
+        # if it's a string
+        cmd = build_ext(dist)
+        cmd.library_dirs = 'my_lib_dir'
+        cmd.finalize_options()
+        self.assertIn('my_lib_dir', cmd.library_dirs)
+
+        # make sure rpath is turned into a list
+        # if it's a list of os.pathsep's paths
+        cmd = build_ext(dist)
+        cmd.rpath = os.pathsep.join(['one', 'two'])
+        cmd.finalize_options()
+        self.assertEqual(cmd.rpath, ['one', 'two'])
+
+        # XXX more tests to perform for win32
+
+        # make sure define is turned into 2-tuples
+        # strings if they are ','-separated strings
+        cmd = build_ext(dist)
+        cmd.define = 'one,two'
+        cmd.finalize_options()
+        self.assertEqual(cmd.define, [('one', '1'), ('two', '1')])
+
+        # make sure undef is turned into a list of
+        # strings if they are ','-separated strings
+        cmd = build_ext(dist)
+        cmd.undef = 'one,two'
+        cmd.finalize_options()
+        self.assertEqual(cmd.undef, ['one', 'two'])
+
+        # make sure swig_opts is turned into a list
+        cmd = build_ext(dist)
+        cmd.swig_opts = None
+        cmd.finalize_options()
+        self.assertEqual(cmd.swig_opts, [])
+
+        cmd = build_ext(dist)
+        cmd.swig_opts = '1 2'
+        cmd.finalize_options()
+        self.assertEqual(cmd.swig_opts, ['1', '2'])
+
+    def test_get_source_files(self):
+        modules = [Extension('foo', ['xxx'], optional=False)]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = build_ext(dist)
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.get_source_files(), ['xxx'])
+
+    def test_compiler_option(self):
+        # cmd.compiler is an option and
+        # should not be overriden by a compiler instance
+        # when the command is run
+        dist = Distribution()
+        cmd = build_ext(dist)
+        cmd.compiler = 'unix'
+        cmd.ensure_finalized()
+        cmd.run()
+        self.assertEqual(cmd.compiler, 'unix')
+
+    def test_get_outputs(self):
+        tmp_dir = self.mkdtemp()
+        c_file = os.path.join(tmp_dir, 'foo.c')
+        self.write_file(c_file, 'void PyInit_foo(void) {};\n')
+        ext = Extension('foo', [c_file], optional=False)
+        dist = Distribution({'name': 'xx',
+                             'ext_modules': [ext]})
+        cmd = build_ext(dist)
+        self._fixup_command(cmd)
+        cmd.ensure_finalized()
+        self.assertEqual(len(cmd.get_outputs()), 1)
+
+        if os.name == "nt":
+            cmd.debug = sys.executable.endswith("_d.exe")
+
+        cmd.build_lib = os.path.join(self.tmp_dir, 'build')
+        cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
+
+        # issue #5977 : distutils build_ext.get_outputs
+        # returns wrong result with --inplace
+        other_tmp_dir = os.path.realpath(self.mkdtemp())
+        old_wd = os.getcwd()
+        os.chdir(other_tmp_dir)
+        try:
+            cmd.inplace = True
+            cmd.run()
+            so_file = cmd.get_outputs()[0]
+        finally:
+            os.chdir(old_wd)
+        self.assertTrue(os.path.exists(so_file))
+        so_ext = sysconfig.get_config_var('SO')
+        self.assertTrue(so_file.endswith(so_ext))
+        so_dir = os.path.dirname(so_file)
+        self.assertEqual(so_dir, other_tmp_dir)
+
+        cmd.inplace = False
+        cmd.run()
+        so_file = cmd.get_outputs()[0]
+        self.assertTrue(os.path.exists(so_file))
+        self.assertTrue(so_file.endswith(so_ext))
+        so_dir = os.path.dirname(so_file)
+        self.assertEqual(so_dir, cmd.build_lib)
+
+        # inplace = False, cmd.package = 'bar'
+        build_py = cmd.get_finalized_command('build_py')
+        build_py.package_dir = 'bar'
+        path = cmd.get_ext_fullpath('foo')
+        # checking that the last directory is the build_dir
+        path = os.path.split(path)[0]
+        self.assertEqual(path, cmd.build_lib)
+
+        # inplace = True, cmd.package = 'bar'
+        cmd.inplace = True
+        other_tmp_dir = os.path.realpath(self.mkdtemp())
+        old_wd = os.getcwd()
+        os.chdir(other_tmp_dir)
+        try:
+            path = cmd.get_ext_fullpath('foo')
+        finally:
+            os.chdir(old_wd)
+        # checking that the last directory is bar
+        path = os.path.split(path)[0]
+        lastdir = os.path.split(path)[-1]
+        self.assertEqual(lastdir, 'bar')
+
+    def test_ext_fullpath(self):
+        ext = sysconfig.get_config_vars()['SO']
+        # building lxml.etree inplace
+        #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
+        #etree_ext = Extension('lxml.etree', [etree_c])
+        #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
+        dist = Distribution()
+        cmd = build_ext(dist)
+        cmd.inplace = True
+        cmd.distribution.package_dir = 'src'
+        cmd.distribution.packages = ['lxml', 'lxml.html']
+        curdir = os.getcwd()
+        wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
+        path = cmd.get_ext_fullpath('lxml.etree')
+        self.assertEqual(wanted, path)
+
+        # building lxml.etree not inplace
+        cmd.inplace = False
+        cmd.build_lib = os.path.join(curdir, 'tmpdir')
+        wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
+        path = cmd.get_ext_fullpath('lxml.etree')
+        self.assertEqual(wanted, path)
+
+        # building twisted.runner.portmap not inplace
+        build_py = cmd.get_finalized_command('build_py')
+        build_py.package_dir = None
+        cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
+        path = cmd.get_ext_fullpath('twisted.runner.portmap')
+        wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
+                              'portmap' + ext)
+        self.assertEqual(wanted, path)
+
+        # building twisted.runner.portmap inplace
+        cmd.inplace = True
+        path = cmd.get_ext_fullpath('twisted.runner.portmap')
+        wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
+        self.assertEqual(wanted, path)
+
+
+def test_suite():
+    src = _get_source_filename()
+    if not os.path.exists(src):
+        if verbose:
+            print ('test_build_ext: Cannot find source code (test'
+                   ' must run in python build dir)')
+        return unittest.TestSuite()
+    else:
+        return unittest.makeSuite(BuildExtTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_build_py.py b/Lib/packaging/tests/test_command_build_py.py
new file mode 100644
index 0000000..b6d60de
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_py.py
@@ -0,0 +1,125 @@
+"""Tests for distutils.command.build_py."""
+
+import os
+import sys
+
+from packaging.command.build_py import build_py
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError
+
+from packaging.tests import unittest, support
+
+
+class BuildPyTestCase(support.TempdirManager,
+                      support.LoggingCatcher,
+                      unittest.TestCase):
+
+    def test_package_data(self):
+        sources = self.mkdtemp()
+        pkg_dir = os.path.join(sources, 'pkg')
+        os.mkdir(pkg_dir)
+        f = open(os.path.join(pkg_dir, "__init__.py"), "w")
+        try:
+            f.write("# Pretend this is a package.")
+        finally:
+            f.close()
+        f = open(os.path.join(pkg_dir, "README.txt"), "w")
+        try:
+            f.write("Info about this package")
+        finally:
+            f.close()
+
+        destination = self.mkdtemp()
+
+        dist = Distribution({"packages": ["pkg"],
+                             "package_dir": sources})
+        # script_name need not exist, it just need to be initialized
+
+        dist.script_name = os.path.join(sources, "setup.py")
+        dist.command_obj["build"] = support.DummyCommand(
+            force=False,
+            build_lib=destination,
+            use_2to3_fixers=None,
+            convert_2to3_doctests=None,
+            use_2to3=False)
+        dist.packages = ["pkg"]
+        dist.package_data = {"pkg": ["README.txt"]}
+        dist.package_dir = sources
+
+        cmd = build_py(dist)
+        cmd.compile = True
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.package_data, dist.package_data)
+
+        cmd.run()
+
+        # This makes sure the list of outputs includes byte-compiled
+        # files for Python modules but not for package data files
+        # (there shouldn't *be* byte-code files for those!).
+        #
+        self.assertEqual(len(cmd.get_outputs()), 3)
+        pkgdest = os.path.join(destination, "pkg")
+        files = os.listdir(pkgdest)
+        self.assertIn("__init__.py", files)
+        if not sys.dont_write_bytecode:
+            self.assertIn("__init__.pyc", files)
+        self.assertIn("README.txt", files)
+
+    def test_empty_package_dir(self):
+        # See SF 1668596/1720897.
+        cwd = os.getcwd()
+
+        # create the distribution files.
+        sources = self.mkdtemp()
+        pkg = os.path.join(sources, 'pkg')
+        os.mkdir(pkg)
+        open(os.path.join(pkg, "__init__.py"), "wb").close()
+        testdir = os.path.join(pkg, "doc")
+        os.mkdir(testdir)
+        open(os.path.join(testdir, "testfile"), "wb").close()
+
+        os.chdir(sources)
+        old_stdout = sys.stdout
+        #sys.stdout = StringIO.StringIO()
+
+        try:
+            dist = Distribution({"packages": ["pkg"],
+                                 "package_dir": sources,
+                                 "package_data": {"pkg": ["doc/*"]}})
+            # script_name need not exist, it just need to be initialized
+            dist.script_name = os.path.join(sources, "setup.py")
+            dist.script_args = ["build"]
+            dist.parse_command_line()
+
+            try:
+                dist.run_commands()
+            except PackagingFileError as e:
+                self.fail("failed package_data test when package_dir is ''")
+        finally:
+            # Restore state.
+            os.chdir(cwd)
+            sys.stdout = old_stdout
+
+    @unittest.skipUnless(hasattr(sys, 'dont_write_bytecode'),
+                         'sys.dont_write_bytecode not supported')
+    def test_dont_write_bytecode(self):
+        # makes sure byte_compile is not used
+        pkg_dir, dist = self.create_dist()
+        cmd = build_py(dist)
+        cmd.compile = True
+        cmd.optimize = 1
+
+        old_dont_write_bytecode = sys.dont_write_bytecode
+        sys.dont_write_bytecode = True
+        try:
+            cmd.byte_compile([])
+        finally:
+            sys.dont_write_bytecode = old_dont_write_bytecode
+
+        self.assertIn('byte-compiling is disabled', self.get_logs()[0])
+
+def test_suite():
+    return unittest.makeSuite(BuildPyTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_scripts.py b/Lib/packaging/tests/test_command_build_scripts.py
new file mode 100644
index 0000000..fd3ac24
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_scripts.py
@@ -0,0 +1,109 @@
+"""Tests for distutils.command.build_scripts."""
+
+import os
+import sys
+import sysconfig
+from packaging.dist import Distribution
+from packaging.command.build_scripts import build_scripts
+
+from packaging.tests import unittest, support
+
+
+class BuildScriptsTestCase(support.TempdirManager,
+                           support.LoggingCatcher,
+                           unittest.TestCase):
+
+    def test_default_settings(self):
+        cmd = self.get_build_scripts_cmd("/foo/bar", [])
+        self.assertFalse(cmd.force)
+        self.assertIs(cmd.build_dir, None)
+
+        cmd.finalize_options()
+
+        self.assertTrue(cmd.force)
+        self.assertEqual(cmd.build_dir, "/foo/bar")
+
+    def test_build(self):
+        source = self.mkdtemp()
+        target = self.mkdtemp()
+        expected = self.write_sample_scripts(source)
+
+        cmd = self.get_build_scripts_cmd(target,
+                                         [os.path.join(source, fn)
+                                          for fn in expected])
+        cmd.finalize_options()
+        cmd.run()
+
+        built = os.listdir(target)
+        for name in expected:
+            self.assertIn(name, built)
+
+    def get_build_scripts_cmd(self, target, scripts):
+        dist = Distribution()
+        dist.scripts = scripts
+        dist.command_obj["build"] = support.DummyCommand(
+            build_scripts=target,
+            force=True,
+            executable=sys.executable,
+            use_2to3=False,
+            use_2to3_fixers=None,
+            convert_2to3_doctests=None
+            )
+        return build_scripts(dist)
+
+    def write_sample_scripts(self, dir):
+        expected = []
+        expected.append("script1.py")
+        self.write_script(dir, "script1.py",
+                          ("#! /usr/bin/env python2.3\n"
+                           "# bogus script w/ Python sh-bang\n"
+                           "pass\n"))
+        expected.append("script2.py")
+        self.write_script(dir, "script2.py",
+                          ("#!/usr/bin/python\n"
+                           "# bogus script w/ Python sh-bang\n"
+                           "pass\n"))
+        expected.append("shell.sh")
+        self.write_script(dir, "shell.sh",
+                          ("#!/bin/sh\n"
+                           "# bogus shell script w/ sh-bang\n"
+                           "exit 0\n"))
+        return expected
+
+    def write_script(self, dir, name, text):
+        with open(os.path.join(dir, name), "w") as f:
+            f.write(text)
+
+    def test_version_int(self):
+        source = self.mkdtemp()
+        target = self.mkdtemp()
+        expected = self.write_sample_scripts(source)
+
+
+        cmd = self.get_build_scripts_cmd(target,
+                                         [os.path.join(source, fn)
+                                          for fn in expected])
+        cmd.finalize_options()
+
+        # http://bugs.python.org/issue4524
+        #
+        # On linux-g++-32 with command line `./configure --enable-ipv6
+        # --with-suffix=3`, python is compiled okay but the build scripts
+        # failed when writing the name of the executable
+        old = sysconfig.get_config_vars().get('VERSION')
+        sysconfig._CONFIG_VARS['VERSION'] = 4
+        try:
+            cmd.run()
+        finally:
+            if old is not None:
+                sysconfig._CONFIG_VARS['VERSION'] = old
+
+        built = os.listdir(target)
+        for name in expected:
+            self.assertIn(name, built)
+
+def test_suite():
+    return unittest.makeSuite(BuildScriptsTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_check.py b/Lib/packaging/tests/test_command_check.py
new file mode 100644
index 0000000..8b32673
--- /dev/null
+++ b/Lib/packaging/tests/test_command_check.py
@@ -0,0 +1,131 @@
+"""Tests for distutils.command.check."""
+
+import logging
+from packaging.command.check import check
+from packaging.metadata import _HAS_DOCUTILS
+from packaging.errors import PackagingSetupError, MetadataMissingError
+from packaging.tests import unittest, support
+
+
+class CheckTestCase(support.LoggingCatcher,
+                    support.TempdirManager,
+                    unittest.TestCase):
+
+    def _run(self, metadata=None, **options):
+        if metadata is None:
+            metadata = {'name': 'xxx', 'version': '1.2'}
+        pkg_info, dist = self.create_dist(**metadata)
+        cmd = check(dist)
+        cmd.initialize_options()
+        for name, value in options.items():
+            setattr(cmd, name, value)
+        cmd.ensure_finalized()
+        cmd.run()
+        return cmd
+
+    def test_check_metadata(self):
+        # let's run the command with no metadata at all
+        # by default, check is checking the metadata
+        # should have some warnings
+        cmd = self._run()
+        # trick: using assertNotEqual with an empty list will give us a more
+        # useful error message than assertGreater(.., 0) when the code change
+        # and the test fails
+        self.assertNotEqual([], self.get_logs(logging.WARNING))
+
+        # now let's add the required fields
+        # and run it again, to make sure we don't get
+        # any warning anymore
+        self.loghandler.flush()
+        metadata = {'home_page': 'xxx', 'author': 'xxx',
+                    'author_email': 'xxx',
+                    'name': 'xxx', 'version': '4.2',
+                    }
+        cmd = self._run(metadata)
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+        # now with the strict mode, we should
+        # get an error if there are missing metadata
+        self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1})
+        self.assertRaises(PackagingSetupError, self._run,
+            {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
+
+        # and of course, no error when all metadata fields are present
+        self.loghandler.flush()
+        cmd = self._run(metadata, strict=True)
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    def test_check_metadata_1_2(self):
+        # let's run the command with no metadata at all
+        # by default, check is checking the metadata
+        # should have some warnings
+        cmd = self._run()
+        self.assertNotEqual([], self.get_logs(logging.WARNING))
+
+        # now let's add the required fields and run it again, to make sure we
+        # don't get any warning anymore let's use requires_python as a marker
+        # to enforce Metadata-Version 1.2
+        metadata = {'home_page': 'xxx', 'author': 'xxx',
+                    'author_email': 'xxx',
+                    'name': 'xxx', 'version': '4.2',
+                    'requires_python': '2.4',
+                    }
+        self.loghandler.flush()
+        cmd = self._run(metadata)
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+        # now with the strict mode, we should
+        # get an error if there are missing metadata
+        self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1})
+        self.assertRaises(PackagingSetupError, self._run,
+            {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
+
+        # complain about version format
+        metadata['version'] = 'xxx'
+        self.assertRaises(PackagingSetupError, self._run, metadata,
+            **{'strict': 1})
+
+        # now with correct version format again
+        metadata['version'] = '4.2'
+        self.loghandler.flush()
+        cmd = self._run(metadata, strict=True)
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    @unittest.skipUnless(_HAS_DOCUTILS, "requires docutils")
+    def test_check_restructuredtext(self):
+        # let's see if it detects broken rest in long_description
+        broken_rest = 'title\n===\n\ntest'
+        pkg_info, dist = self.create_dist(description=broken_rest)
+        cmd = check(dist)
+        cmd.check_restructuredtext()
+        self.assertEqual(len(self.get_logs(logging.WARNING)), 1)
+
+        self.loghandler.flush()
+        pkg_info, dist = self.create_dist(description='title\n=====\n\ntest')
+        cmd = check(dist)
+        cmd.check_restructuredtext()
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    def test_check_all(self):
+        self.assertRaises(PackagingSetupError, self._run,
+                          {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1,
+                                 'all': 1})
+        self.assertRaises(MetadataMissingError, self._run,
+                          {}, **{'strict': 1,
+                                 'all': 1})
+
+    def test_check_hooks(self):
+        pkg_info, dist = self.create_dist()
+        dist.command_options['install_dist'] = {
+            'pre_hook': ('file', {"a": 'some.nonextistant.hook.ghrrraarrhll'}),
+        }
+        cmd = check(dist)
+        cmd.check_hooks_resolvable()
+        self.assertEqual(len(self.get_logs(logging.WARNING)), 1)
+
+
+def test_suite():
+    return unittest.makeSuite(CheckTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_clean.py b/Lib/packaging/tests/test_command_clean.py
new file mode 100644
index 0000000..8d29e4d
--- /dev/null
+++ b/Lib/packaging/tests/test_command_clean.py
@@ -0,0 +1,48 @@
+"""Tests for distutils.command.clean."""
+import os
+
+from packaging.command.clean import clean
+from packaging.tests import unittest, support
+
+
+class cleanTestCase(support.TempdirManager, support.LoggingCatcher,
+                    unittest.TestCase):
+
+    def test_simple_run(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = clean(dist)
+
+        # let's add some elements clean should remove
+        dirs = [(d, os.path.join(pkg_dir, d))
+                for d in ('build_temp', 'build_lib', 'bdist_base',
+                'build_scripts', 'build_base')]
+
+        for name, path in dirs:
+            os.mkdir(path)
+            setattr(cmd, name, path)
+            if name == 'build_base':
+                continue
+            for f in ('one', 'two', 'three'):
+                self.write_file(os.path.join(path, f))
+
+        # let's run the command
+        cmd.all = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # make sure the files where removed
+        for name, path in dirs:
+            self.assertFalse(os.path.exists(path),
+                             '%r was not removed' % path)
+
+        # let's run the command again (should spit warnings but succeed)
+        cmd.all = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+
+def test_suite():
+    return unittest.makeSuite(cleanTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_cmd.py b/Lib/packaging/tests/test_command_cmd.py
new file mode 100644
index 0000000..8ac9dce
--- /dev/null
+++ b/Lib/packaging/tests/test_command_cmd.py
@@ -0,0 +1,101 @@
+"""Tests for distutils.cmd."""
+import os
+
+from packaging.command.cmd import Command
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+from packaging.tests import support, unittest
+
+
+class MyCmd(Command):
+    def initialize_options(self):
+        pass
+
+
+class CommandTestCase(support.LoggingCatcher,
+                      unittest.TestCase):
+
+    def setUp(self):
+        super(CommandTestCase, self).setUp()
+        dist = Distribution()
+        self.cmd = MyCmd(dist)
+
+    def test_make_file(self):
+        cmd = self.cmd
+
+        # making sure it raises when infiles is not a string or a list/tuple
+        self.assertRaises(TypeError, cmd.make_file,
+                          infiles=1, outfile='', func='func', args=())
+
+        # making sure execute gets called properly
+        def _execute(func, args, exec_msg, level):
+            self.assertEqual(exec_msg, 'generating out from in')
+        cmd.force = True
+        cmd.execute = _execute
+        cmd.make_file(infiles='in', outfile='out', func='func', args=())
+
+    def test_dump_options(self):
+        cmd = self.cmd
+        cmd.option1 = 1
+        cmd.option2 = 1
+        cmd.user_options = [('option1', '', ''), ('option2', '', '')]
+        cmd.dump_options()
+
+        wanted = ["command options for 'MyCmd':", '  option1 = 1',
+                  '  option2 = 1']
+        msgs = self.get_logs()
+        self.assertEqual(msgs, wanted)
+
+    def test_ensure_string(self):
+        cmd = self.cmd
+        cmd.option1 = 'ok'
+        cmd.ensure_string('option1')
+
+        cmd.option2 = None
+        cmd.ensure_string('option2', 'xxx')
+        self.assertTrue(hasattr(cmd, 'option2'))
+
+        cmd.option3 = 1
+        self.assertRaises(PackagingOptionError, cmd.ensure_string, 'option3')
+
+    def test_ensure_string_list(self):
+        cmd = self.cmd
+        cmd.option1 = 'ok,dok'
+        cmd.ensure_string_list('option1')
+        self.assertEqual(cmd.option1, ['ok', 'dok'])
+
+        cmd.yes_string_list = ['one', 'two', 'three']
+        cmd.yes_string_list2 = 'ok'
+        cmd.ensure_string_list('yes_string_list')
+        cmd.ensure_string_list('yes_string_list2')
+        self.assertEqual(cmd.yes_string_list, ['one', 'two', 'three'])
+        self.assertEqual(cmd.yes_string_list2, ['ok'])
+
+        cmd.not_string_list = ['one', 2, 'three']
+        cmd.not_string_list2 = object()
+        self.assertRaises(PackagingOptionError,
+                          cmd.ensure_string_list, 'not_string_list')
+
+        self.assertRaises(PackagingOptionError,
+                          cmd.ensure_string_list, 'not_string_list2')
+
+    def test_ensure_filename(self):
+        cmd = self.cmd
+        cmd.option1 = __file__
+        cmd.ensure_filename('option1')
+        cmd.option2 = 'xxx'
+        self.assertRaises(PackagingOptionError, cmd.ensure_filename, 'option2')
+
+    def test_ensure_dirname(self):
+        cmd = self.cmd
+        cmd.option1 = os.path.dirname(__file__) or os.curdir
+        cmd.ensure_dirname('option1')
+        cmd.option2 = 'xxx'
+        self.assertRaises(PackagingOptionError, cmd.ensure_dirname, 'option2')
+
+
+def test_suite():
+    return unittest.makeSuite(CommandTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_config.py b/Lib/packaging/tests/test_command_config.py
new file mode 100644
index 0000000..6d780c5
--- /dev/null
+++ b/Lib/packaging/tests/test_command_config.py
@@ -0,0 +1,76 @@
+"""Tests for distutils.command.config."""
+import os
+import sys
+import logging
+
+from packaging.command.config import dump_file, config
+from packaging.tests import unittest, support
+
+
+class ConfigTestCase(support.LoggingCatcher,
+                     support.TempdirManager,
+                     unittest.TestCase):
+
+    def test_dump_file(self):
+        this_file = __file__.rstrip('co')
+        with open(this_file) as f:
+            numlines = len(f.readlines())
+
+        dump_file(this_file, 'I am the header')
+
+        logs = []
+        for log in self.get_logs(logging.INFO):
+            logs.extend(line for line in log.split('\n'))
+        self.assertEqual(len(logs), numlines + 2)
+
+    @unittest.skipIf(sys.platform == 'win32', 'disabled on win32')
+    def test_search_cpp(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = config(dist)
+
+        # simple pattern searches
+        match = cmd.search_cpp(pattern='xxx', body='// xxx')
+        self.assertEqual(match, 0)
+
+        match = cmd.search_cpp(pattern='_configtest', body='// xxx')
+        self.assertEqual(match, 1)
+
+    def test_finalize_options(self):
+        # finalize_options does a bit of transformation
+        # on options
+        pkg_dir, dist = self.create_dist()
+        cmd = config(dist)
+        cmd.include_dirs = 'one%stwo' % os.pathsep
+        cmd.libraries = 'one'
+        cmd.library_dirs = 'three%sfour' % os.pathsep
+        cmd.ensure_finalized()
+
+        self.assertEqual(cmd.include_dirs, ['one', 'two'])
+        self.assertEqual(cmd.libraries, ['one'])
+        self.assertEqual(cmd.library_dirs, ['three', 'four'])
+
+    def test_clean(self):
+        # _clean removes files
+        tmp_dir = self.mkdtemp()
+        f1 = os.path.join(tmp_dir, 'one')
+        f2 = os.path.join(tmp_dir, 'two')
+
+        self.write_file(f1, 'xxx')
+        self.write_file(f2, 'xxx')
+
+        for f in (f1, f2):
+            self.assertTrue(os.path.exists(f))
+
+        pkg_dir, dist = self.create_dist()
+        cmd = config(dist)
+        cmd._clean(f1, f2)
+
+        for f in (f1, f2):
+            self.assertFalse(os.path.exists(f))
+
+
+def test_suite():
+    return unittest.makeSuite(ConfigTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_data.py b/Lib/packaging/tests/test_command_install_data.py
new file mode 100644
index 0000000..0486427
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_data.py
@@ -0,0 +1,87 @@
+"""Tests for packaging.command.install_data."""
+import os
+import sysconfig
+from sysconfig import _get_default_scheme
+from packaging.tests import unittest, support
+from packaging.command.install_data import install_data
+
+
+class InstallDataTestCase(support.TempdirManager,
+                          support.LoggingCatcher,
+                          unittest.TestCase):
+
+    def test_simple_run(self):
+        scheme = _get_default_scheme()
+        old_items = sysconfig._SCHEMES.items(scheme)
+        def restore():
+            sysconfig._SCHEMES.remove_section(scheme)
+            sysconfig._SCHEMES.add_section(scheme)
+            for option, value in old_items:
+                sysconfig._SCHEMES.set(scheme, option, value)
+        self.addCleanup(restore)
+
+        pkg_dir, dist = self.create_dist()
+        cmd = install_data(dist)
+        cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
+
+        sysconfig._SCHEMES.set(scheme, 'inst',
+                               os.path.join(pkg_dir, 'inst'))
+        sysconfig._SCHEMES.set(scheme, 'inst2',
+                               os.path.join(pkg_dir, 'inst2'))
+
+        one = os.path.join(pkg_dir, 'one')
+        self.write_file(one, 'xxx')
+        inst2 = os.path.join(pkg_dir, 'inst2')
+        two = os.path.join(pkg_dir, 'two')
+        self.write_file(two, 'xxx')
+
+        cmd.data_files = {one: '{inst}/one', two: '{inst2}/two'}
+        self.assertCountEqual(cmd.get_inputs(), [one, two])
+
+        # let's run the command
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the result
+        self.assertEqual(len(cmd.get_outputs()), 2)
+        rtwo = os.path.split(two)[-1]
+        self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+        rone = os.path.split(one)[-1]
+        self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+        cmd.outfiles = []
+
+        # let's try with warn_dir one
+        cmd.warn_dir = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the result
+        self.assertEqual(len(cmd.get_outputs()), 2)
+        self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+        self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+        cmd.outfiles = []
+
+        # now using root and empty dir
+        cmd.root = os.path.join(pkg_dir, 'root')
+        three = os.path.join(cmd.install_dir, 'three')
+        self.write_file(three, 'xx')
+
+        sysconfig._SCHEMES.set(scheme, 'inst3',
+                               cmd.install_dir)
+
+        cmd.data_files = {one: '{inst}/one', two: '{inst2}/two',
+                          three: '{inst3}/three'}
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the result
+        self.assertEqual(len(cmd.get_outputs()), 3)
+        self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+        self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+
+
+def test_suite():
+    return unittest.makeSuite(InstallDataTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_dist.py b/Lib/packaging/tests/test_command_install_dist.py
new file mode 100644
index 0000000..1974a2f
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_dist.py
@@ -0,0 +1,207 @@
+"""Tests for packaging.command.install."""
+
+import os
+import sys
+
+from sysconfig import (get_scheme_names, get_config_vars,
+                       _SCHEMES, get_config_var, get_path)
+
+_CONFIG_VARS = get_config_vars()
+
+from packaging.tests import captured_stdout
+
+from packaging.command.install_dist import install_dist
+from packaging.command import install_dist as install_module
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+
+from packaging.tests import unittest, support
+
+
+class InstallTestCase(support.TempdirManager,
+                      support.LoggingCatcher,
+                      unittest.TestCase):
+
+    def test_home_installation_scheme(self):
+        # This ensure two things:
+        # - that --home generates the desired set of directory names
+        # - test --home is supported on all platforms
+        builddir = self.mkdtemp()
+        destination = os.path.join(builddir, "installation")
+
+        dist = Distribution({"name": "foopkg"})
+        # script_name need not exist, it just need to be initialized
+        dist.script_name = os.path.join(builddir, "setup.py")
+        dist.command_obj["build"] = support.DummyCommand(
+            build_base=builddir,
+            build_lib=os.path.join(builddir, "lib"),
+        )
+
+        old_posix_prefix = _SCHEMES.get('posix_prefix', 'platinclude')
+        old_posix_home = _SCHEMES.get('posix_home', 'platinclude')
+
+        new_path = '{platbase}/include/python{py_version_short}'
+        _SCHEMES.set('posix_prefix', 'platinclude', new_path)
+        _SCHEMES.set('posix_home', 'platinclude', '{platbase}/include/python')
+
+        try:
+            cmd = install_dist(dist)
+            cmd.home = destination
+            cmd.ensure_finalized()
+        finally:
+            _SCHEMES.set('posix_prefix', 'platinclude', old_posix_prefix)
+            _SCHEMES.set('posix_home', 'platinclude', old_posix_home)
+
+        self.assertEqual(cmd.install_base, destination)
+        self.assertEqual(cmd.install_platbase, destination)
+
+        def check_path(got, expected):
+            got = os.path.normpath(got)
+            expected = os.path.normpath(expected)
+            self.assertEqual(got, expected)
+
+        libdir = os.path.join(destination, "lib", "python")
+        check_path(cmd.install_lib, libdir)
+        check_path(cmd.install_platlib, libdir)
+        check_path(cmd.install_purelib, libdir)
+        check_path(cmd.install_headers,
+                   os.path.join(destination, "include", "python", "foopkg"))
+        check_path(cmd.install_scripts, os.path.join(destination, "bin"))
+        check_path(cmd.install_data, destination)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_user_site(self):
+        # test install with --user
+        # preparing the environment for the test
+        self.old_user_base = get_config_var('userbase')
+        self.old_user_site = get_path('purelib', '%s_user' % os.name)
+        self.tmpdir = self.mkdtemp()
+        self.user_base = os.path.join(self.tmpdir, 'B')
+        self.user_site = os.path.join(self.tmpdir, 'S')
+        _CONFIG_VARS['userbase'] = self.user_base
+        scheme = '%s_user' % os.name
+        _SCHEMES.set(scheme, 'purelib', self.user_site)
+
+        def _expanduser(path):
+            if path[0] == '~':
+                path = os.path.normpath(self.tmpdir) + path[1:]
+            return path
+
+        self.old_expand = os.path.expanduser
+        os.path.expanduser = _expanduser
+
+        try:
+            # this is the actual test
+            self._test_user_site()
+        finally:
+            _CONFIG_VARS['userbase'] = self.old_user_base
+            _SCHEMES.set(scheme, 'purelib', self.old_user_site)
+            os.path.expanduser = self.old_expand
+
+    def _test_user_site(self):
+        schemes = get_scheme_names()
+        for key in ('nt_user', 'posix_user', 'os2_home'):
+            self.assertIn(key, schemes)
+
+        dist = Distribution({'name': 'xx'})
+        cmd = install_dist(dist)
+        # making sure the user option is there
+        options = [name for name, short, lable in
+                   cmd.user_options]
+        self.assertIn('user', options)
+
+        # setting a value
+        cmd.user = True
+
+        # user base and site shouldn't be created yet
+        self.assertFalse(os.path.exists(self.user_base))
+        self.assertFalse(os.path.exists(self.user_site))
+
+        # let's run finalize
+        cmd.ensure_finalized()
+
+        # now they should
+        self.assertTrue(os.path.exists(self.user_base))
+        self.assertTrue(os.path.exists(self.user_site))
+
+        self.assertIn('userbase', cmd.config_vars)
+        self.assertIn('usersite', cmd.config_vars)
+
+    def test_handle_extra_path(self):
+        dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
+        cmd = install_dist(dist)
+
+        # two elements
+        cmd.handle_extra_path()
+        self.assertEqual(cmd.extra_path, ['path', 'dirs'])
+        self.assertEqual(cmd.extra_dirs, 'dirs')
+        self.assertEqual(cmd.path_file, 'path')
+
+        # one element
+        cmd.extra_path = ['path']
+        cmd.handle_extra_path()
+        self.assertEqual(cmd.extra_path, ['path'])
+        self.assertEqual(cmd.extra_dirs, 'path')
+        self.assertEqual(cmd.path_file, 'path')
+
+        # none
+        dist.extra_path = cmd.extra_path = None
+        cmd.handle_extra_path()
+        self.assertEqual(cmd.extra_path, None)
+        self.assertEqual(cmd.extra_dirs, '')
+        self.assertEqual(cmd.path_file, None)
+
+        # three elements (no way !)
+        cmd.extra_path = 'path,dirs,again'
+        self.assertRaises(PackagingOptionError, cmd.handle_extra_path)
+
+    def test_finalize_options(self):
+        dist = Distribution({'name': 'xx'})
+        cmd = install_dist(dist)
+
+        # must supply either prefix/exec-prefix/home or
+        # install-base/install-platbase -- not both
+        cmd.prefix = 'prefix'
+        cmd.install_base = 'base'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+        # must supply either home or prefix/exec-prefix -- not both
+        cmd.install_base = None
+        cmd.home = 'home'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+        if sys.version >= '2.6':
+            # can't combine user with with prefix/exec_prefix/home or
+            # install_(plat)base
+            cmd.prefix = None
+            cmd.user = 'user'
+            self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+    def test_old_record(self):
+        # test pre-PEP 376 --record option (outside dist-info dir)
+        install_dir = self.mkdtemp()
+        pkgdir, dist = self.create_dist()
+
+        dist = Distribution()
+        cmd = install_dist(dist)
+        dist.command_obj['install_dist'] = cmd
+        cmd.root = install_dir
+        cmd.record = os.path.join(pkgdir, 'filelist')
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the record file was created with four
+        # lines, one for each .dist-info entry: METADATA,
+        # INSTALLER, REQUSTED, RECORD
+        with open(cmd.record) as f:
+            self.assertEqual(len(f.readlines()), 4)
+
+        # XXX test that fancy_getopt is okay with options named
+        # record and no-record but unrelated
+
+
+def test_suite():
+    return unittest.makeSuite(InstallTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_distinfo.py b/Lib/packaging/tests/test_command_install_distinfo.py
new file mode 100644
index 0000000..6d40f66
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_distinfo.py
@@ -0,0 +1,192 @@
+"""Tests for ``packaging.command.install_distinfo``. """
+
+import os
+import csv
+import hashlib
+import sys
+
+from packaging.command.install_distinfo import install_distinfo
+from packaging.command.cmd import Command
+from packaging.metadata import Metadata
+from packaging.tests import unittest, support
+
+
+class DummyInstallCmd(Command):
+
+    def __init__(self, dist=None):
+        self.outputs = []
+        self.distribution = dist
+
+    def __getattr__(self, name):
+        return None
+
+    def ensure_finalized(self):
+        pass
+
+    def get_outputs(self):
+        return (self.outputs +
+                self.get_finalized_command('install_distinfo').get_outputs())
+
+
+class InstallDistinfoTestCase(support.TempdirManager,
+                              support.LoggingCatcher,
+                              unittest.TestCase):
+
+    checkLists = lambda self, x, y: self.assertListEqual(sorted(x), sorted(y))
+
+    def test_empty_install(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.ensure_finalized()
+        cmd.run()
+
+        self.checkLists(os.listdir(install_dir), ['foo-1.0.dist-info'])
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+        self.checkLists(os.listdir(dist_info),
+                        ['METADATA', 'RECORD', 'REQUESTED', 'INSTALLER'])
+        with open(os.path.join(dist_info, 'INSTALLER')) as fp:
+            self.assertEqual(fp.read(), 'distutils')
+        with open(os.path.join(dist_info, 'REQUESTED')) as fp:
+            self.assertEqual(fp.read(), '')
+        meta_path = os.path.join(dist_info, 'METADATA')
+        self.assertTrue(Metadata(path=meta_path).check())
+
+    def test_installer(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.installer = 'bacon-python'
+        cmd.ensure_finalized()
+        cmd.run()
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+        with open(os.path.join(dist_info, 'INSTALLER')) as fp:
+            self.assertEqual(fp.read(), 'bacon-python')
+
+    def test_requested(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.requested = False
+        cmd.ensure_finalized()
+        cmd.run()
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+        self.checkLists(os.listdir(dist_info),
+                        ['METADATA', 'RECORD', 'INSTALLER'])
+
+    def test_no_record(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.no_record = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+        self.checkLists(os.listdir(dist_info),
+                        ['METADATA', 'REQUESTED', 'INSTALLER'])
+
+    def test_record(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        fake_dists = os.path.join(os.path.dirname(__file__), 'fake_dists')
+        fake_dists = os.path.realpath(fake_dists)
+
+        # for testing, we simply add all files from _backport's fake_dists
+        dirs = []
+        for dir in os.listdir(fake_dists):
+            full_path = os.path.join(fake_dists, dir)
+            if (not dir.endswith('.egg') or dir.endswith('.egg-info') or
+                dir.endswith('.dist-info')) and os.path.isdir(full_path):
+                dirs.append(full_path)
+
+        for dir in dirs:
+            for path, subdirs, files in os.walk(dir):
+                install.outputs += [os.path.join(path, f) for f in files]
+                install.outputs += [os.path.join('path', f + 'c')
+                                    for f in files if f.endswith('.py')]
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.ensure_finalized()
+        cmd.run()
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+
+        expected = []
+        for f in install.get_outputs():
+            if (f.endswith('.pyc') or f == os.path.join(
+                install_dir, 'foo-1.0.dist-info', 'RECORD')):
+                expected.append([f, '', ''])
+            else:
+                size = os.path.getsize(f)
+                md5 = hashlib.md5()
+                with open(f, 'rb') as fp:
+                    md5.update(fp.read())
+                hash = md5.hexdigest()
+                expected.append([f, hash, str(size)])
+
+        parsed = []
+        with open(os.path.join(dist_info, 'RECORD'), 'r') as f:
+            reader = csv.reader(f, delimiter=',',
+                                   lineterminator=os.linesep,
+                                   quotechar='"')
+            parsed = list(reader)
+
+        self.maxDiff = None
+        self.checkLists(parsed, expected)
+
+
+def test_suite():
+    return unittest.makeSuite(InstallDistinfoTestCase)
+
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_headers.py b/Lib/packaging/tests/test_command_install_headers.py
new file mode 100644
index 0000000..f2906a7
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_headers.py
@@ -0,0 +1,38 @@
+"""Tests for packaging.command.install_headers."""
+import os
+
+from packaging.command.install_headers import install_headers
+from packaging.tests import unittest, support
+
+
+class InstallHeadersTestCase(support.TempdirManager,
+                             support.LoggingCatcher,
+                             unittest.TestCase):
+
+    def test_simple_run(self):
+        # we have two headers
+        header_list = self.mkdtemp()
+        header1 = os.path.join(header_list, 'header1')
+        header2 = os.path.join(header_list, 'header2')
+        self.write_file(header1)
+        self.write_file(header2)
+        headers = [header1, header2]
+
+        pkg_dir, dist = self.create_dist(headers=headers)
+        cmd = install_headers(dist)
+        self.assertEqual(cmd.get_inputs(), headers)
+
+        # let's run the command
+        cmd.install_dir = os.path.join(pkg_dir, 'inst')
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the results
+        self.assertEqual(len(cmd.get_outputs()), 2)
+
+
+def test_suite():
+    return unittest.makeSuite(InstallHeadersTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_lib.py b/Lib/packaging/tests/test_command_install_lib.py
new file mode 100644
index 0000000..96749e3
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_lib.py
@@ -0,0 +1,115 @@
+"""Tests for packaging.command.install_data."""
+import sys
+import os
+
+from packaging.tests import unittest, support
+from packaging.command.install_lib import install_lib
+from packaging.compiler.extension import Extension
+from packaging.errors import PackagingOptionError
+
+try:
+    no_bytecode = sys.dont_write_bytecode
+    bytecode_support = True
+except AttributeError:
+    no_bytecode = False
+    bytecode_support = False
+
+
+class InstallLibTestCase(support.TempdirManager,
+                         support.LoggingCatcher,
+                         support.EnvironRestorer,
+                         unittest.TestCase):
+
+    restore_environ = ['PYTHONPATH']
+
+    def test_finalize_options(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+
+        cmd.finalize_options()
+        self.assertTrue(cmd.compile)
+        self.assertEqual(cmd.optimize, 0)
+
+        # optimize must be 0, 1, or 2
+        cmd.optimize = 'foo'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+        cmd.optimize = '4'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+        cmd.optimize = '2'
+        cmd.finalize_options()
+        self.assertEqual(cmd.optimize, 2)
+
+    @unittest.skipIf(no_bytecode, 'byte-compile not supported')
+    def test_byte_compile(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+        cmd.compile = True
+        cmd.optimize = 1
+
+        f = os.path.join(pkg_dir, 'foo.py')
+        self.write_file(f, '# python file')
+        cmd.byte_compile([f])
+        self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyc')))
+        self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyo')))
+
+    def test_get_outputs(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+
+        # setting up a dist environment
+        cmd.compile = True
+        cmd.optimize = 1
+        cmd.install_dir = pkg_dir
+        f = os.path.join(pkg_dir, '__init__.py')
+        self.write_file(f, '# python package')
+        cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
+        cmd.distribution.packages = [pkg_dir]
+        cmd.distribution.script_name = 'setup.py'
+
+        # make sure the build_lib is set the temp dir
+        build_dir = os.path.split(pkg_dir)[0]
+        cmd.get_finalized_command('build_py').build_lib = build_dir
+
+        # get_output should return 4 elements
+        self.assertEqual(len(cmd.get_outputs()), 4)
+
+    def test_get_inputs(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+
+        # setting up a dist environment
+        cmd.compile = True
+        cmd.optimize = 1
+        cmd.install_dir = pkg_dir
+        f = os.path.join(pkg_dir, '__init__.py')
+        self.write_file(f, '# python package')
+        cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
+        cmd.distribution.packages = [pkg_dir]
+        cmd.distribution.script_name = 'setup.py'
+
+        # get_input should return 2 elements
+        self.assertEqual(len(cmd.get_inputs()), 2)
+
+    @unittest.skipUnless(bytecode_support,
+                         'sys.dont_write_bytecode not supported')
+    def test_dont_write_bytecode(self):
+        # makes sure byte_compile is not used
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+        cmd.compile = True
+        cmd.optimize = 1
+
+        self.addCleanup(setattr, sys, 'dont_write_bytecode',
+                        sys.dont_write_bytecode)
+        sys.dont_write_bytecode = True
+        cmd.byte_compile([])
+
+        self.assertIn('byte-compiling is disabled', self.get_logs()[0])
+
+
+def test_suite():
+    return unittest.makeSuite(InstallLibTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_scripts.py b/Lib/packaging/tests/test_command_install_scripts.py
new file mode 100644
index 0000000..6452a34
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_scripts.py
@@ -0,0 +1,75 @@
+"""Tests for packaging.command.install_scripts."""
+import os
+
+from packaging.tests import unittest, support
+from packaging.command.install_scripts import install_scripts
+from packaging.dist import Distribution
+
+
+class InstallScriptsTestCase(support.TempdirManager,
+                             support.LoggingCatcher,
+                             unittest.TestCase):
+
+    def test_default_settings(self):
+        dist = Distribution()
+        dist.command_obj["build"] = support.DummyCommand(
+            build_scripts="/foo/bar")
+        dist.command_obj["install_dist"] = support.DummyCommand(
+            install_scripts="/splat/funk",
+            force=True,
+            skip_build=True,
+            )
+        cmd = install_scripts(dist)
+        self.assertFalse(cmd.force)
+        self.assertFalse(cmd.skip_build)
+        self.assertIs(cmd.build_dir, None)
+        self.assertIs(cmd.install_dir, None)
+
+        cmd.finalize_options()
+
+        self.assertTrue(cmd.force)
+        self.assertTrue(cmd.skip_build)
+        self.assertEqual(cmd.build_dir, "/foo/bar")
+        self.assertEqual(cmd.install_dir, "/splat/funk")
+
+    def test_installation(self):
+        source = self.mkdtemp()
+        expected = []
+
+        def write_script(name, text):
+            expected.append(name)
+            with open(os.path.join(source, name), "w") as f:
+                f.write(text)
+
+        write_script("script1.py", ("#! /usr/bin/env python2.3\n"
+                                    "# bogus script w/ Python sh-bang\n"
+                                    "pass\n"))
+        write_script("script2.py", ("#!/usr/bin/python\n"
+                                    "# bogus script w/ Python sh-bang\n"
+                                    "pass\n"))
+        write_script("shell.sh", ("#!/bin/sh\n"
+                                  "# bogus shell script w/ sh-bang\n"
+                                  "exit 0\n"))
+
+        target = self.mkdtemp()
+        dist = Distribution()
+        dist.command_obj["build"] = support.DummyCommand(build_scripts=source)
+        dist.command_obj["install_dist"] = support.DummyCommand(
+            install_scripts=target,
+            force=True,
+            skip_build=True,
+            )
+        cmd = install_scripts(dist)
+        cmd.finalize_options()
+        cmd.run()
+
+        installed = os.listdir(target)
+        for name in expected:
+            self.assertIn(name, installed)
+
+
+def test_suite():
+    return unittest.makeSuite(InstallScriptsTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_register.py b/Lib/packaging/tests/test_command_register.py
new file mode 100644
index 0000000..7aa487a
--- /dev/null
+++ b/Lib/packaging/tests/test_command_register.py
@@ -0,0 +1,259 @@
+"""Tests for packaging.command.register."""
+import os
+import getpass
+import urllib.request
+import urllib.error
+import urllib.parse
+
+try:
+    import docutils
+    DOCUTILS_SUPPORT = True
+except ImportError:
+    DOCUTILS_SUPPORT = False
+
+from packaging.tests import unittest, support
+from packaging.command import register as register_module
+from packaging.command.register import register
+from packaging.errors import PackagingSetupError
+
+
+PYPIRC_NOPASSWORD = """\
+[distutils]
+
+index-servers =
+    server1
+
+[server1]
+username:me
+"""
+
+WANTED_PYPIRC = """\
+[distutils]
+index-servers =
+    pypi
+
+[pypi]
+username:tarek
+password:password
+"""
+
+
+class Inputs:
+    """Fakes user inputs."""
+    def __init__(self, *answers):
+        self.answers = answers
+        self.index = 0
+
+    def __call__(self, prompt=''):
+        try:
+            return self.answers[self.index]
+        finally:
+            self.index += 1
+
+
+class FakeOpener:
+    """Fakes a PyPI server"""
+    def __init__(self):
+        self.reqs = []
+
+    def __call__(self, *args):
+        return self
+
+    def open(self, req):
+        self.reqs.append(req)
+        return self
+
+    def read(self):
+        return 'xxx'
+
+
+class RegisterTestCase(support.TempdirManager,
+                       support.EnvironRestorer,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(RegisterTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.rc = os.path.join(self.tmp_dir, '.pypirc')
+        os.environ['HOME'] = self.tmp_dir
+
+        # patching the password prompt
+        self._old_getpass = getpass.getpass
+
+        def _getpass(prompt):
+            return 'password'
+
+        getpass.getpass = _getpass
+        self.old_opener = urllib.request.build_opener
+        self.conn = urllib.request.build_opener = FakeOpener()
+
+    def tearDown(self):
+        getpass.getpass = self._old_getpass
+        urllib.request.build_opener = self.old_opener
+        if hasattr(register_module, 'input'):
+            del register_module.input
+        super(RegisterTestCase, self).tearDown()
+
+    def _get_cmd(self, metadata=None):
+        if metadata is None:
+            metadata = {'url': 'xxx', 'author': 'xxx',
+                        'author_email': 'xxx',
+                        'name': 'xxx', 'version': 'xxx'}
+        pkg_info, dist = self.create_dist(**metadata)
+        return register(dist)
+
+    def test_create_pypirc(self):
+        # this test makes sure a .pypirc file
+        # is created when requested.
+
+        # let's create a register instance
+        cmd = self._get_cmd()
+
+        # we shouldn't have a .pypirc file yet
+        self.assertFalse(os.path.exists(self.rc))
+
+        # patching input and getpass.getpass
+        # so register gets happy
+        # Here's what we are faking :
+        # use your existing login (choice 1.)
+        # Username : 'tarek'
+        # Password : 'password'
+        # Save your login (y/N)? : 'y'
+        inputs = Inputs('1', 'tarek', 'y')
+        register_module.input = inputs
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # we should have a brand new .pypirc file
+        self.assertTrue(os.path.exists(self.rc))
+
+        # with the content similar to WANTED_PYPIRC
+        with open(self.rc) as fp:
+            content = fp.read()
+        self.assertEqual(content, WANTED_PYPIRC)
+
+        # now let's make sure the .pypirc file generated
+        # really works : we shouldn't be asked anything
+        # if we run the command again
+        def _no_way(prompt=''):
+            raise AssertionError(prompt)
+
+        register_module.input = _no_way
+        cmd.show_response = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's see what the server received : we should
+        # have 2 similar requests
+        self.assertEqual(len(self.conn.reqs), 2)
+        req1 = dict(self.conn.reqs[0].headers)
+        req2 = dict(self.conn.reqs[1].headers)
+        self.assertEqual(req2['Content-length'], req1['Content-length'])
+        self.assertIn('xxx', self.conn.reqs[1].data)
+
+    def test_password_not_in_file(self):
+
+        self.write_file(self.rc, PYPIRC_NOPASSWORD)
+        cmd = self._get_cmd()
+        cmd.finalize_options()
+        cmd._set_config()
+        cmd.send_metadata()
+
+        # dist.password should be set
+        # therefore used afterwards by other commands
+        self.assertEqual(cmd.distribution.password, 'password')
+
+    def test_registration(self):
+        # this test runs choice 2
+        cmd = self._get_cmd()
+        inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
+        register_module.input = inputs
+        # let's run the command
+        # FIXME does this send a real request? use a mock server
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # we should have send a request
+        self.assertEqual(len(self.conn.reqs), 1)
+        req = self.conn.reqs[0]
+        headers = dict(req.headers)
+        self.assertEqual(headers['Content-length'], '608')
+        self.assertIn('tarek', req.data)
+
+    def test_password_reset(self):
+        # this test runs choice 3
+        cmd = self._get_cmd()
+        inputs = Inputs('3', 'tarek@ziade.org')
+        register_module.input = inputs
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # we should have send a request
+        self.assertEqual(len(self.conn.reqs), 1)
+        req = self.conn.reqs[0]
+        headers = dict(req.headers)
+        self.assertEqual(headers['Content-length'], '290')
+        self.assertIn('tarek', req.data)
+
+    @unittest.skipUnless(DOCUTILS_SUPPORT, 'needs docutils')
+    def test_strict(self):
+        # testing the script option
+        # when on, the register command stops if
+        # the metadata is incomplete or if
+        # long_description is not reSt compliant
+
+        # empty metadata
+        cmd = self._get_cmd({'name': 'xxx', 'version': 'xxx'})
+        cmd.ensure_finalized()
+        cmd.strict = True
+        inputs = Inputs('1', 'tarek', 'y')
+        register_module.input = inputs
+        self.assertRaises(PackagingSetupError, cmd.run)
+
+        # metadata is OK but long_description is broken
+        metadata = {'home_page': 'xxx', 'author': 'xxx',
+                    'author_email': 'éxéxé',
+                    'name': 'xxx', 'version': 'xxx',
+                    'description': 'title\n==\n\ntext'}
+
+        cmd = self._get_cmd(metadata)
+        cmd.ensure_finalized()
+        cmd.strict = True
+
+        self.assertRaises(PackagingSetupError, cmd.run)
+
+        # now something that works
+        metadata['description'] = 'title\n=====\n\ntext'
+        cmd = self._get_cmd(metadata)
+        cmd.ensure_finalized()
+        cmd.strict = True
+        inputs = Inputs('1', 'tarek', 'y')
+        register_module.input = inputs
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # strict is not by default
+        cmd = self._get_cmd()
+        cmd.ensure_finalized()
+        inputs = Inputs('1', 'tarek', 'y')
+        register_module.input = inputs
+        cmd.ensure_finalized()
+        cmd.run()
+
+    def test_register_pep345(self):
+        cmd = self._get_cmd({})
+        cmd.ensure_finalized()
+        cmd.distribution.metadata['Requires-Dist'] = ['lxml']
+        data = cmd.build_post_data('submit')
+        self.assertEqual(data['metadata_version'], '1.2')
+        self.assertEqual(data['requires_dist'], ['lxml'])
+
+
+def test_suite():
+    return unittest.makeSuite(RegisterTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_sdist.py b/Lib/packaging/tests/test_command_sdist.py
new file mode 100644
index 0000000..a086e62
--- /dev/null
+++ b/Lib/packaging/tests/test_command_sdist.py
@@ -0,0 +1,407 @@
+"""Tests for packaging.command.sdist."""
+import os
+import zipfile
+import tarfile
+import logging
+
+from packaging.tests.support import requires_zlib
+
+try:
+    import grp
+    import pwd
+    UID_GID_SUPPORT = True
+except ImportError:
+    UID_GID_SUPPORT = False
+
+from os.path import join
+from packaging.tests import captured_stdout
+from packaging.command.sdist import sdist
+from packaging.command.sdist import show_formats
+from packaging.dist import Distribution
+from packaging.tests import unittest
+from packaging.errors import PackagingOptionError
+from packaging.util import find_executable
+from packaging.tests import support
+from shutil import get_archive_formats
+
+SETUP_PY = """
+from packaging.core import setup
+import somecode
+
+setup(name='fake')
+"""
+
+MANIFEST = """\
+# file GENERATED by packaging, do NOT edit
+README
+inroot.txt
+data%(sep)sdata.dt
+scripts%(sep)sscript.py
+some%(sep)sfile.txt
+some%(sep)sother_file.txt
+somecode%(sep)s__init__.py
+somecode%(sep)sdoc.dat
+somecode%(sep)sdoc.txt
+"""
+
+
+def builder(dist, filelist):
+    filelist.append('bah')
+
+
+class SDistTestCase(support.TempdirManager,
+                    support.LoggingCatcher,
+                    support.EnvironRestorer,
+                    unittest.TestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        # PyPIRCCommandTestCase creates a temp dir already
+        # and put it in self.tmp_dir
+        super(SDistTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        os.environ['HOME'] = self.tmp_dir
+        # setting up an environment
+        self.old_path = os.getcwd()
+        os.mkdir(join(self.tmp_dir, 'somecode'))
+        os.mkdir(join(self.tmp_dir, 'dist'))
+        # a package, and a README
+        self.write_file((self.tmp_dir, 'README'), 'xxx')
+        self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#')
+        self.write_file((self.tmp_dir, 'setup.py'), SETUP_PY)
+        os.chdir(self.tmp_dir)
+
+    def tearDown(self):
+        # back to normal
+        os.chdir(self.old_path)
+        super(SDistTestCase, self).tearDown()
+
+    def get_cmd(self, metadata=None):
+        """Returns a cmd"""
+        if metadata is None:
+            metadata = {'name': 'fake', 'version': '1.0',
+                        'url': 'xxx', 'author': 'xxx',
+                        'author_email': 'xxx'}
+        dist = Distribution(metadata)
+        dist.script_name = 'setup.py'
+        dist.packages = ['somecode']
+        dist.include_package_data = True
+        cmd = sdist(dist)
+        cmd.dist_dir = 'dist'
+        return dist, cmd
+
+    @requires_zlib
+    def test_prune_file_list(self):
+        # this test creates a package with some vcs dirs in it
+        # and launch sdist to make sure they get pruned
+        # on all systems
+
+        # creating VCS directories with some files in them
+        os.mkdir(join(self.tmp_dir, 'somecode', '.svn'))
+
+        self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx')
+
+        os.mkdir(join(self.tmp_dir, 'somecode', '.hg'))
+        self.write_file((self.tmp_dir, 'somecode', '.hg',
+                         'ok'), 'xxx')
+
+        os.mkdir(join(self.tmp_dir, 'somecode', '.git'))
+        self.write_file((self.tmp_dir, 'somecode', '.git',
+                         'ok'), 'xxx')
+
+        # now building a sdist
+        dist, cmd = self.get_cmd()
+
+        # zip is available universally
+        # (tar might not be installed under win32)
+        cmd.formats = ['zip']
+
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # now let's check what we have
+        dist_folder = join(self.tmp_dir, 'dist')
+        files = os.listdir(dist_folder)
+        self.assertEqual(files, ['fake-1.0.zip'])
+
+        with zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) as zip_file:
+            content = zip_file.namelist()
+
+        # making sure everything has been pruned correctly
+        self.assertEqual(len(content), 3)
+
+    @requires_zlib
+    @unittest.skipIf(find_executable('tar') is None or
+                     find_executable('gzip') is None,
+                     'requires tar and gzip programs')
+    def test_make_distribution(self):
+        # building a sdist
+        dist, cmd = self.get_cmd()
+
+        # creating a gztar then a tar
+        cmd.formats = ['gztar', 'tar']
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # making sure we have two files
+        dist_folder = join(self.tmp_dir, 'dist')
+        result = sorted(os.listdir(dist_folder))
+        self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
+
+        os.remove(join(dist_folder, 'fake-1.0.tar'))
+        os.remove(join(dist_folder, 'fake-1.0.tar.gz'))
+
+        # now trying a tar then a gztar
+        cmd.formats = ['tar', 'gztar']
+
+        cmd.ensure_finalized()
+        cmd.run()
+
+        result = sorted(os.listdir(dist_folder))
+        self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
+
+    @requires_zlib
+    def test_add_defaults(self):
+
+        # http://bugs.python.org/issue2279
+
+        # add_default should also include
+        # data_files and package_data
+        dist, cmd = self.get_cmd()
+
+        # filling data_files by pointing files
+        # in package_data
+        dist.package_data = {'': ['*.cfg', '*.dat'],
+                             'somecode': ['*.txt']}
+        self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+        self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#')
+
+        # adding some data in data_files
+        data_dir = join(self.tmp_dir, 'data')
+        os.mkdir(data_dir)
+        self.write_file((data_dir, 'data.dt'), '#')
+        some_dir = join(self.tmp_dir, 'some')
+        os.mkdir(some_dir)
+        self.write_file((self.tmp_dir, 'inroot.txt'), '#')
+        self.write_file((some_dir, 'file.txt'), '#')
+        self.write_file((some_dir, 'other_file.txt'), '#')
+
+        dist.data_files = {'data/data.dt': '{appdata}/data.dt',
+                           'inroot.txt': '{appdata}/inroot.txt',
+                           'some/file.txt': '{appdata}/file.txt',
+                           'some/other_file.txt': '{appdata}/other_file.txt'}
+
+        # adding a script
+        script_dir = join(self.tmp_dir, 'scripts')
+        os.mkdir(script_dir)
+        self.write_file((script_dir, 'script.py'), '#')
+        dist.scripts = [join('scripts', 'script.py')]
+
+        cmd.formats = ['zip']
+        cmd.use_defaults = True
+
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # now let's check what we have
+        dist_folder = join(self.tmp_dir, 'dist')
+        files = os.listdir(dist_folder)
+        self.assertEqual(files, ['fake-1.0.zip'])
+
+        with zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) as zip_file:
+            content = zip_file.namelist()
+
+        # Making sure everything was added. This includes 9 code and data
+        # files in addition to PKG-INFO.
+        self.assertEqual(len(content), 10)
+
+        # Checking the MANIFEST
+        with open(join(self.tmp_dir, 'MANIFEST')) as fp:
+            manifest = fp.read()
+        self.assertEqual(manifest, MANIFEST % {'sep': os.sep})
+
+    @requires_zlib
+    def test_metadata_check_option(self):
+        # testing the `check-metadata` option
+        dist, cmd = self.get_cmd(metadata={'name': 'xxx', 'version': 'xxx'})
+
+        # this should raise some warnings
+        # with the check subcommand
+        cmd.ensure_finalized()
+        cmd.run()
+        warnings = self.get_logs(logging.WARN)
+        self.assertEqual(len(warnings), 3)
+
+        # trying with a complete set of metadata
+        self.loghandler.flush()
+        dist, cmd = self.get_cmd()
+        cmd.ensure_finalized()
+        cmd.metadata_check = False
+        cmd.run()
+        warnings = self.get_logs(logging.WARN)
+        # removing manifest generated warnings
+        warnings = [warn for warn in warnings if
+                    not warn.endswith('-- skipping')]
+        # the remaining warning is about the use of the default file list
+        self.assertEqual(len(warnings), 1)
+
+    def test_show_formats(self):
+        __, stdout = captured_stdout(show_formats)
+
+        # the output should be a header line + one line per format
+        num_formats = len(get_archive_formats())
+        output = [line for line in stdout.split('\n')
+                  if line.strip().startswith('--formats=')]
+        self.assertEqual(len(output), num_formats)
+
+    def test_finalize_options(self):
+
+        dist, cmd = self.get_cmd()
+        cmd.finalize_options()
+
+        # default options set by finalize
+        self.assertEqual(cmd.manifest, 'MANIFEST')
+        self.assertEqual(cmd.dist_dir, 'dist')
+
+        # formats has to be a string splitable on (' ', ',') or
+        # a stringlist
+        cmd.formats = 1
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+        cmd.formats = ['zip']
+        cmd.finalize_options()
+
+        # formats has to be known
+        cmd.formats = 'supazipa'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+    @requires_zlib
+    @unittest.skipUnless(UID_GID_SUPPORT, "requires grp and pwd support")
+    @unittest.skipIf(find_executable('tar') is None or
+                     find_executable('gzip') is None,
+                     'requires tar and gzip programs')
+    def test_make_distribution_owner_group(self):
+        # building a sdist
+        dist, cmd = self.get_cmd()
+
+        # creating a gztar and specifying the owner+group
+        cmd.formats = ['gztar']
+        cmd.owner = pwd.getpwuid(0)[0]
+        cmd.group = grp.getgrgid(0)[0]
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # making sure we have the good rights
+        archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+        with tarfile.open(archive_name) as archive:
+            for member in archive.getmembers():
+                self.assertEqual(member.uid, 0)
+                self.assertEqual(member.gid, 0)
+
+        # building a sdist again
+        dist, cmd = self.get_cmd()
+
+        # creating a gztar
+        cmd.formats = ['gztar']
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # making sure we have the good rights
+        archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+        with tarfile.open(archive_name) as archive:
+
+            # note that we are not testing the group ownership here
+            # because, depending on the platforms and the container
+            # rights (see #7408)
+            for member in archive.getmembers():
+                self.assertEqual(member.uid, os.getuid())
+
+    @requires_zlib
+    def test_get_file_list(self):
+        # make sure MANIFEST is recalculated
+        dist, cmd = self.get_cmd()
+        # filling data_files by pointing files in package_data
+        dist.package_data = {'somecode': ['*.txt']}
+        self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # Should produce four lines. Those lines are one comment, one default
+        # (README) and two package files.
+        with open(cmd.manifest) as f:
+            manifest = [line.strip() for line in f.read().split('\n')
+                        if line.strip() != '']
+        self.assertEqual(len(manifest), 4)
+
+        # Adding a file
+        self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
+
+        # make sure build_py is reinitialized, like a fresh run
+        build_py = dist.get_command_obj('build_py')
+        build_py.finalized = False
+        build_py.ensure_finalized()
+
+        cmd.run()
+
+        with open(cmd.manifest) as f:
+            manifest2 = [line.strip() for line in f.read().split('\n')
+                         if line.strip() != '']
+
+        # Do we have the new file in MANIFEST?
+        self.assertEqual(len(manifest2), 5)
+        self.assertIn('doc2.txt', manifest2[-1])
+
+    @requires_zlib
+    def test_manifest_marker(self):
+        # check that autogenerated MANIFESTs have a marker
+        dist, cmd = self.get_cmd()
+        cmd.ensure_finalized()
+        cmd.run()
+
+        with open(cmd.manifest) as f:
+            manifest = [line.strip() for line in f.read().split('\n')
+                        if line.strip() != '']
+
+        self.assertEqual(manifest[0],
+                         '# file GENERATED by packaging, do NOT edit')
+
+    @requires_zlib
+    def test_manual_manifest(self):
+        # check that a MANIFEST without a marker is left alone
+        dist, cmd = self.get_cmd()
+        cmd.ensure_finalized()
+        self.write_file((self.tmp_dir, cmd.manifest), 'README.manual')
+        cmd.run()
+
+        with open(cmd.manifest) as f:
+            manifest = [line.strip() for line in f.read().split('\n')
+                        if line.strip() != '']
+
+        self.assertEqual(manifest, ['README.manual'])
+
+    @requires_zlib
+    def test_template(self):
+        dist, cmd = self.get_cmd()
+        dist.extra_files = ['include yeah']
+        cmd.ensure_finalized()
+        self.write_file((self.tmp_dir, 'yeah'), 'xxx')
+        cmd.run()
+        with open(cmd.manifest) as f:
+            content = f.read()
+
+        self.assertIn('yeah', content)
+
+    @requires_zlib
+    def test_manifest_builder(self):
+        dist, cmd = self.get_cmd()
+        cmd.manifest_builders = 'packaging.tests.test_command_sdist.builder'
+        cmd.ensure_finalized()
+        cmd.run()
+        self.assertIn('bah', cmd.filelist.files)
+
+
+def test_suite():
+    return unittest.makeSuite(SDistTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_test.py b/Lib/packaging/tests/test_command_test.py
new file mode 100644
index 0000000..4fd8452
--- /dev/null
+++ b/Lib/packaging/tests/test_command_test.py
@@ -0,0 +1,225 @@
+import os
+import re
+import sys
+import shutil
+import logging
+import unittest as ut1
+import packaging.database
+
+from os.path import join
+from operator import getitem, setitem, delitem
+from packaging.command.build import build
+from packaging.tests import unittest
+from packaging.tests.support import (TempdirManager, EnvironRestorer,
+                                     LoggingCatcher)
+from packaging.command.test import test
+from packaging.command import set_command
+from packaging.dist import Distribution
+
+
+EXPECTED_OUTPUT_RE = r'''FAIL: test_blah \(myowntestmodule.SomeTest\)
+----------------------------------------------------------------------
+Traceback \(most recent call last\):
+  File ".+/myowntestmodule.py", line \d+, in test_blah
+    self.fail\("horribly"\)
+AssertionError: horribly
+'''
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class MockBuildCmd(build):
+    build_lib = "mock build lib"
+    command_name = 'build'
+    plat_name = 'whatever'
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        self._record.append("build has run")
+
+
+class TestTest(TempdirManager,
+               EnvironRestorer,
+               LoggingCatcher,
+               unittest.TestCase):
+
+    restore_environ = ['PYTHONPATH']
+
+    def setUp(self):
+        super(TestTest, self).setUp()
+        self.addCleanup(packaging.database.clear_cache)
+        new_pythonpath = os.path.dirname(os.path.dirname(here))
+        pythonpath = os.environ.get('PYTHONPATH')
+        if pythonpath is not None:
+            new_pythonpath = os.pathsep.join((new_pythonpath, pythonpath))
+        os.environ['PYTHONPATH'] = new_pythonpath
+
+    def assert_re_match(self, pattern, string):
+        def quote(s):
+            lines = ['## ' + line for line in s.split('\n')]
+            sep = ["#" * 60]
+            return [''] + sep + lines + sep
+        msg = quote(pattern) + ["didn't match"] + quote(string)
+        msg = "\n".join(msg)
+        if not re.search(pattern, string):
+            self.fail(msg)
+
+    def prepare_dist(self, dist_name):
+        pkg_dir = join(os.path.dirname(__file__), "dists", dist_name)
+        temp_pkg_dir = join(self.mkdtemp(), dist_name)
+        shutil.copytree(pkg_dir, temp_pkg_dir)
+        return temp_pkg_dir
+
+    def safely_replace(self, obj, attr,
+                       new_val=None, delete=False, dictionary=False):
+        """Replace a object's attribute returning to its original state at the
+        end of the test run. Creates the attribute if not present before
+        (deleting afterwards). When delete=True, makes sure the value is del'd
+        for the test run. If dictionary is set to True, operates of its items
+        rather than attributes."""
+        if dictionary:
+            _setattr, _getattr, _delattr = setitem, getitem, delitem
+
+            def _hasattr(_dict, value):
+                return value in _dict
+        else:
+            _setattr, _getattr, _delattr, _hasattr = (setattr, getattr,
+                                                      delattr, hasattr)
+
+        orig_has_attr = _hasattr(obj, attr)
+        if orig_has_attr:
+            orig_val = _getattr(obj, attr)
+
+        if delete is False:
+            _setattr(obj, attr, new_val)
+        elif orig_has_attr:
+            _delattr(obj, attr)
+
+        def do_cleanup():
+            if orig_has_attr:
+                _setattr(obj, attr, orig_val)
+            elif _hasattr(obj, attr):
+                _delattr(obj, attr)
+
+        self.addCleanup(do_cleanup)
+
+    def test_runs_unittest(self):
+        module_name, a_module = self.prepare_a_module()
+        record = []
+        a_module.recorder = lambda *args: record.append("suite")
+
+        class MockTextTestRunner:
+            def __init__(*_, **__):
+                pass
+
+            def run(_self, suite):
+                record.append("run")
+
+        self.safely_replace(ut1, "TextTestRunner", MockTextTestRunner)
+
+        dist = Distribution()
+        cmd = test(dist)
+        cmd.suite = "%s.recorder" % module_name
+        cmd.run()
+        self.assertEqual(record, ["suite", "run"])
+
+    def test_builds_before_running_tests(self):
+        self.addCleanup(set_command, 'packaging.command.build.build')
+        set_command('packaging.tests.test_command_test.MockBuildCmd')
+
+        dist = Distribution()
+        dist.get_command_obj('build')._record = record = []
+        cmd = test(dist)
+        cmd.runner = self.prepare_named_function(lambda: None)
+        cmd.ensure_finalized()
+        cmd.run()
+        self.assertEqual(['build has run'], record)
+
+    def _test_works_with_2to3(self):
+        pass
+
+    def test_checks_requires(self):
+        dist = Distribution()
+        cmd = test(dist)
+        phony_project = 'ohno_ohno-impossible_1234-name_stop-that!'
+        cmd.tests_require = [phony_project]
+        cmd.ensure_finalized()
+        logs = self.get_logs(logging.WARNING)
+        self.assertEqual(1, len(logs))
+        self.assertIn(phony_project, logs[0])
+
+    def prepare_a_module(self):
+        tmp_dir = self.mkdtemp()
+        sys.path.append(tmp_dir)
+        self.addCleanup(sys.path.remove, tmp_dir)
+
+        self.write_file((tmp_dir, 'packaging_tests_a.py'), '')
+        import packaging_tests_a as a_module
+        return "packaging_tests_a", a_module
+
+    def prepare_named_function(self, func):
+        module_name, a_module = self.prepare_a_module()
+        a_module.recorder = func
+        return "%s.recorder" % module_name
+
+    def test_custom_runner(self):
+        dist = Distribution()
+        cmd = test(dist)
+        record = []
+        cmd.runner = self.prepare_named_function(
+            lambda: record.append("runner called"))
+        cmd.ensure_finalized()
+        cmd.run()
+        self.assertEqual(["runner called"], record)
+
+    def prepare_mock_ut2(self):
+        class MockUTClass:
+            def __init__(*_, **__):
+                pass
+
+            def discover(self):
+                pass
+
+            def run(self, _):
+                pass
+
+        class MockUTModule:
+            TestLoader = MockUTClass
+            TextTestRunner = MockUTClass
+
+        mock_ut2 = MockUTModule()
+        self.safely_replace(sys.modules, "unittest2",
+                            mock_ut2, dictionary=True)
+        return mock_ut2
+
+    def test_gets_unittest_discovery(self):
+        mock_ut2 = self.prepare_mock_ut2()
+        dist = Distribution()
+        cmd = test(dist)
+        self.safely_replace(ut1.TestLoader, "discover", lambda: None)
+        self.assertEqual(cmd.get_ut_with_discovery(), ut1)
+
+        del ut1.TestLoader.discover
+        self.assertEqual(cmd.get_ut_with_discovery(), mock_ut2)
+
+    def test_calls_discover(self):
+        self.safely_replace(ut1.TestLoader, "discover", delete=True)
+        mock_ut2 = self.prepare_mock_ut2()
+        record = []
+        mock_ut2.TestLoader.discover = lambda self, path: record.append(path)
+        dist = Distribution()
+        cmd = test(dist)
+        cmd.run()
+        self.assertEqual([os.curdir], record)
+
+
+def test_suite():
+    return unittest.makeSuite(TestTest)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_upload.py b/Lib/packaging/tests/test_command_upload.py
new file mode 100644
index 0000000..d7609a2
--- /dev/null
+++ b/Lib/packaging/tests/test_command_upload.py
@@ -0,0 +1,163 @@
+"""Tests for packaging.command.upload."""
+import os
+import sys
+
+from packaging.command.upload import upload
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+
+from packaging.tests import unittest, support
+try:
+    import threading
+    from packaging.tests.pypi_server import PyPIServerTestCase
+except ImportError:
+    threading = None
+    PyPIServerTestCase = unittest.TestCase
+
+
+PYPIRC_NOPASSWORD = """\
+[distutils]
+
+index-servers =
+    server1
+
+[server1]
+username:me
+"""
+
+PYPIRC = """\
+[distutils]
+
+index-servers =
+    server1
+    server2
+
+[server1]
+username:me
+password:secret
+
+[server2]
+username:meagain
+password: secret
+realm:acme
+repository:http://another.pypi/
+"""
+
+
+@unittest.skipIf(threading is None, 'needs threading')
+class UploadTestCase(support.TempdirManager, support.EnvironRestorer,
+                     support.LoggingCatcher, PyPIServerTestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(UploadTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.rc = os.path.join(self.tmp_dir, '.pypirc')
+        os.environ['HOME'] = self.tmp_dir
+
+    def test_finalize_options(self):
+        # new format
+        self.write_file(self.rc, PYPIRC)
+        dist = Distribution()
+        cmd = upload(dist)
+        cmd.finalize_options()
+        for attr, expected in (('username', 'me'), ('password', 'secret'),
+                               ('realm', 'pypi'),
+                               ('repository', 'http://pypi.python.org/pypi')):
+            self.assertEqual(getattr(cmd, attr), expected)
+
+    def test_finalize_options_unsigned_identity_raises_exception(self):
+        self.write_file(self.rc, PYPIRC)
+        dist = Distribution()
+        cmd = upload(dist)
+        cmd.identity = True
+        cmd.sign = False
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+    def test_saved_password(self):
+        # file with no password
+        self.write_file(self.rc, PYPIRC_NOPASSWORD)
+
+        # make sure it passes
+        dist = Distribution()
+        cmd = upload(dist)
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.password, None)
+
+        # make sure we get it as well, if another command
+        # initialized it at the dist level
+        dist.password = 'xxx'
+        cmd = upload(dist)
+        cmd.finalize_options()
+        self.assertEqual(cmd.password, 'xxx')
+
+    def test_upload_without_files_raises_exception(self):
+        dist = Distribution()
+        cmd = upload(dist)
+        self.assertRaises(PackagingOptionError, cmd.run)
+
+    def test_upload(self):
+        path = os.path.join(self.tmp_dir, 'xxx')
+        self.write_file(path)
+        command, pyversion, filename = 'xxx', '3.3', path
+        dist_files = [(command, pyversion, filename)]
+
+        # lets run it
+        pkg_dir, dist = self.create_dist(dist_files=dist_files, author='dédé')
+        cmd = upload(dist)
+        cmd.ensure_finalized()
+        cmd.repository = self.pypi.full_address
+        cmd.run()
+
+        # what did we send ?
+        handler, request_data = self.pypi.requests[-1]
+        headers = handler.headers
+        #self.assertIn('dédé', str(request_data))
+        self.assertIn(b'xxx', request_data)
+
+        self.assertEqual(int(headers['content-length']), len(request_data))
+        self.assertLess(int(headers['content-length']), 2500)
+        self.assertTrue(headers['content-type'].startswith('multipart/form-data'))
+        self.assertEqual(handler.command, 'POST')
+        self.assertNotIn('\n', headers['authorization'])
+
+    def test_upload_docs(self):
+        path = os.path.join(self.tmp_dir, 'xxx')
+        self.write_file(path)
+        command, pyversion, filename = 'xxx', '3.3', path
+        dist_files = [(command, pyversion, filename)]
+        docs_path = os.path.join(self.tmp_dir, "build", "docs")
+        os.makedirs(docs_path)
+        self.write_file(os.path.join(docs_path, "index.html"), "yellow")
+        self.write_file(self.rc, PYPIRC)
+
+        # lets run it
+        pkg_dir, dist = self.create_dist(dist_files=dist_files, author='dédé')
+
+        cmd = upload(dist)
+        cmd.get_finalized_command("build").run()
+        cmd.upload_docs = True
+        cmd.ensure_finalized()
+        cmd.repository = self.pypi.full_address
+        try:
+            prev_dir = os.getcwd()
+            os.chdir(self.tmp_dir)
+            cmd.run()
+        finally:
+            os.chdir(prev_dir)
+
+        handler, request_data = self.pypi.requests[-1]
+        action, name, content = request_data.split(
+            "----------------GHSKFJDLGDS7543FJKLFHRE75642756743254"
+            .encode())[1:4]
+
+        self.assertIn(b'name=":action"', action)
+        self.assertIn(b'doc_upload', action)
+
+
+def test_suite():
+    return unittest.makeSuite(UploadTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_upload_docs.py b/Lib/packaging/tests/test_command_upload_docs.py
new file mode 100644
index 0000000..9876d18
--- /dev/null
+++ b/Lib/packaging/tests/test_command_upload_docs.py
@@ -0,0 +1,217 @@
+"""Tests for packaging.command.upload_docs."""
+import os
+import sys
+import shutil
+import zipfile
+try:
+    import _ssl
+except ImportError:
+    _ssl = None
+
+from packaging.command import upload_docs as upload_docs_mod
+from packaging.command.upload_docs import (upload_docs, zip_dir,
+                                           encode_multipart)
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError, PackagingOptionError
+
+from packaging.tests import unittest, support
+try:
+    import threading
+    from packaging.tests.pypi_server import PyPIServerTestCase
+except ImportError:
+    threading = None
+    PyPIServerTestCase = object
+
+
+EXPECTED_MULTIPART_OUTPUT = [
+    b'---x',
+    b'Content-Disposition: form-data; name="username"',
+    b'',
+    b'wok',
+    b'---x',
+    b'Content-Disposition: form-data; name="password"',
+    b'',
+    b'secret',
+    b'---x',
+    b'Content-Disposition: form-data; name="picture"; filename="wok.png"',
+    b'',
+    b'PNG89',
+    b'---x--',
+    b'',
+]
+
+PYPIRC = """\
+[distutils]
+index-servers = server1
+
+[server1]
+repository = %s
+username = real_slim_shady
+password = long_island
+"""
+
+
+@unittest.skipIf(threading is None, "Needs threading")
+class UploadDocsTestCase(support.TempdirManager,
+                         support.EnvironRestorer,
+                         support.LoggingCatcher,
+                         PyPIServerTestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(UploadDocsTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.rc = os.path.join(self.tmp_dir, '.pypirc')
+        os.environ['HOME'] = self.tmp_dir
+        self.dist = Distribution()
+        self.dist.metadata['Name'] = "distr-name"
+        self.cmd = upload_docs(self.dist)
+
+    def test_default_uploaddir(self):
+        sandbox = self.mkdtemp()
+        previous = os.getcwd()
+        os.chdir(sandbox)
+        try:
+            os.mkdir("build")
+            self.prepare_sample_dir("build")
+            self.cmd.ensure_finalized()
+            self.assertEqual(self.cmd.upload_dir, os.path.join("build", "docs"))
+        finally:
+            os.chdir(previous)
+
+    def test_default_uploaddir_looks_for_doc_also(self):
+        sandbox = self.mkdtemp()
+        previous = os.getcwd()
+        os.chdir(sandbox)
+        try:
+            os.mkdir("build")
+            self.prepare_sample_dir("build")
+            os.rename(os.path.join("build", "docs"), os.path.join("build", "doc"))
+            self.cmd.ensure_finalized()
+            self.assertEqual(self.cmd.upload_dir, os.path.join("build", "doc"))
+        finally:
+            os.chdir(previous)
+
+    def prepare_sample_dir(self, sample_dir=None):
+        if sample_dir is None:
+            sample_dir = self.mkdtemp()
+        os.mkdir(os.path.join(sample_dir, "docs"))
+        self.write_file(os.path.join(sample_dir, "docs", "index.html"), "Ce mortel ennui")
+        self.write_file(os.path.join(sample_dir, "index.html"), "Oh la la")
+        return sample_dir
+
+    def test_zip_dir(self):
+        source_dir = self.prepare_sample_dir()
+        compressed = zip_dir(source_dir)
+
+        zip_f = zipfile.ZipFile(compressed)
+        self.assertEqual(zip_f.namelist(), ['index.html', 'docs/index.html'])
+
+    def test_encode_multipart(self):
+        fields = [('username', 'wok'), ('password', 'secret')]
+        files = [('picture', 'wok.png', b'PNG89')]
+        content_type, body = encode_multipart(fields, files, b'-x')
+        self.assertEqual(b'multipart/form-data; boundary=-x', content_type)
+        self.assertEqual(EXPECTED_MULTIPART_OUTPUT, body.split(b'\r\n'))
+
+    def prepare_command(self):
+        self.cmd.upload_dir = self.prepare_sample_dir()
+        self.cmd.ensure_finalized()
+        self.cmd.repository = self.pypi.full_address
+        self.cmd.username = "username"
+        self.cmd.password = "password"
+
+    def test_upload(self):
+        self.prepare_command()
+        self.cmd.run()
+
+        self.assertEqual(len(self.pypi.requests), 1)
+        handler, request_data = self.pypi.requests[-1]
+        self.assertIn(b"content", request_data)
+        self.assertIn("Basic", handler.headers['authorization'])
+        self.assertTrue(handler.headers['content-type']
+            .startswith('multipart/form-data;'))
+
+        action, name, version, content =\
+            request_data.split("----------------GHSKFJDLGDS7543FJKLFHRE75642756743254".encode())[1:5]
+
+
+        # check that we picked the right chunks
+        self.assertIn(b'name=":action"', action)
+        self.assertIn(b'name="name"', name)
+        self.assertIn(b'name="version"', version)
+        self.assertIn(b'name="content"', content)
+
+        # check their contents
+        self.assertIn(b'doc_upload', action)
+        self.assertIn(b'distr-name', name)
+        self.assertIn(b'docs/index.html', content)
+        self.assertIn(b'Ce mortel ennui', content)
+
+    @unittest.skipIf(_ssl is None, 'Needs SSL support')
+    def test_https_connection(self):
+        https_called = False
+
+        orig_https = upload_docs_mod.http.client.HTTPSConnection
+
+        def https_conn_wrapper(*args):
+            nonlocal https_called
+            https_called = True
+            # the testing server is http
+            return upload_docs_mod.http.client.HTTPConnection(*args)
+
+        upload_docs_mod.http.client.HTTPSConnection = https_conn_wrapper
+        try:
+            self.prepare_command()
+            self.cmd.run()
+            self.assertFalse(https_called)
+
+            self.cmd.repository = self.cmd.repository.replace("http", "https")
+            self.cmd.run()
+            self.assertTrue(https_called)
+        finally:
+            upload_docs_mod.http.client.HTTPSConnection = orig_https
+
+    def test_handling_response(self):
+        self.pypi.default_response_status = '403 Forbidden'
+        self.prepare_command()
+        self.cmd.run()
+        self.assertIn('Upload failed (403): Forbidden', self.get_logs()[-1])
+
+        self.pypi.default_response_status = '301 Moved Permanently'
+        self.pypi.default_response_headers.append(("Location", "brand_new_location"))
+        self.cmd.run()
+        self.assertIn('brand_new_location', self.get_logs()[-1])
+
+    def test_reads_pypirc_data(self):
+        self.write_file(self.rc, PYPIRC % self.pypi.full_address)
+        self.cmd.repository = self.pypi.full_address
+        self.cmd.upload_dir = self.prepare_sample_dir()
+        self.cmd.ensure_finalized()
+        self.assertEqual(self.cmd.username, "real_slim_shady")
+        self.assertEqual(self.cmd.password, "long_island")
+
+    def test_checks_index_html_presence(self):
+        self.cmd.upload_dir = self.prepare_sample_dir()
+        os.remove(os.path.join(self.cmd.upload_dir, "index.html"))
+        self.assertRaises(PackagingFileError, self.cmd.ensure_finalized)
+
+    def test_checks_upload_dir(self):
+        self.cmd.upload_dir = self.prepare_sample_dir()
+        shutil.rmtree(os.path.join(self.cmd.upload_dir))
+        self.assertRaises(PackagingOptionError, self.cmd.ensure_finalized)
+
+    def test_show_response(self):
+        self.prepare_command()
+        self.cmd.show_response = True
+        self.cmd.run()
+        record = self.get_logs()[-1]
+        self.assertTrue(record, "should report the response")
+        self.assertIn(self.pypi.default_response_data, record)
+
+def test_suite():
+    return unittest.makeSuite(UploadDocsTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_compiler.py b/Lib/packaging/tests/test_compiler.py
new file mode 100644
index 0000000..2c620cb
--- /dev/null
+++ b/Lib/packaging/tests/test_compiler.py
@@ -0,0 +1,66 @@
+"""Tests for distutils.compiler."""
+import os
+
+from packaging.compiler import (get_default_compiler, customize_compiler,
+                                gen_lib_options)
+from packaging.tests import unittest, support
+
+
+class FakeCompiler:
+
+    name = 'fake'
+    description = 'Fake'
+
+    def library_dir_option(self, dir):
+        return "-L" + dir
+
+    def runtime_library_dir_option(self, dir):
+        return ["-cool", "-R" + dir]
+
+    def find_library_file(self, dirs, lib, debug=False):
+        return 'found'
+
+    def library_option(self, lib):
+        return "-l" + lib
+
+
+class CompilerTestCase(support.EnvironRestorer, unittest.TestCase):
+
+    restore_environ = ['AR', 'ARFLAGS']
+
+    @unittest.skipUnless(get_default_compiler() == 'unix',
+                        'irrelevant if default compiler is not unix')
+    def test_customize_compiler(self):
+
+        os.environ['AR'] = 'my_ar'
+        os.environ['ARFLAGS'] = '-arflags'
+
+        # make sure AR gets caught
+        class compiler:
+            name = 'unix'
+
+            def set_executables(self, **kw):
+                self.exes = kw
+
+        comp = compiler()
+        customize_compiler(comp)
+        self.assertEqual(comp.exes['archiver'], 'my_ar -arflags')
+
+    def test_gen_lib_options(self):
+        compiler = FakeCompiler()
+        libdirs = ['lib1', 'lib2']
+        runlibdirs = ['runlib1']
+        libs = [os.path.join('dir', 'name'), 'name2']
+
+        opts = gen_lib_options(compiler, libdirs, runlibdirs, libs)
+        wanted = ['-Llib1', '-Llib2', '-cool', '-Rrunlib1', 'found',
+                  '-lname2']
+        self.assertEqual(opts, wanted)
+
+
+def test_suite():
+    return unittest.makeSuite(CompilerTestCase)
+
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_config.py b/Lib/packaging/tests/test_config.py
new file mode 100644
index 0000000..a276730
--- /dev/null
+++ b/Lib/packaging/tests/test_config.py
@@ -0,0 +1,431 @@
+"""Tests for packaging.config."""
+import os
+import sys
+import logging
+from io import StringIO
+
+from packaging import command
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError
+from packaging.compiler import new_compiler, _COMPILERS
+from packaging.command.sdist import sdist
+
+from packaging.tests import unittest, support
+from packaging.tests.support import requires_zlib
+
+
+SETUP_CFG = """
+[metadata]
+name = RestingParrot
+version = 0.6.4
+author = Carl Meyer
+author_email = carl@oddbird.net
+maintainer = Éric Araujo
+maintainer_email = merwok@netwok.org
+summary = A sample project demonstrating packaging
+description-file = %(description-file)s
+keywords = packaging, sample project
+
+classifier =
+  Development Status :: 4 - Beta
+  Environment :: Console (Text Based)
+  Environment :: X11 Applications :: GTK; python_version < '3'
+  License :: OSI Approved :: MIT License
+  Programming Language :: Python
+  Programming Language :: Python :: 2
+  Programming Language :: Python :: 3
+
+requires_python = >=2.4, <3.2
+
+requires_dist =
+  PetShoppe
+  MichaelPalin (> 1.1)
+  pywin32; sys.platform == 'win32'
+  pysqlite2; python_version < '2.5'
+  inotify (0.0.1); sys.platform == 'linux2'
+
+requires_external = libxml2
+
+provides_dist = packaging-sample-project (0.2)
+                unittest2-sample-project
+
+project_url =
+  Main repository, http://bitbucket.org/carljm/sample-distutils2-project
+  Fork in progress, http://bitbucket.org/Merwok/sample-distutils2-project
+
+[files]
+packages_root = src
+
+packages = one
+           two
+           three
+
+modules = haven
+
+scripts =
+  script1.py
+  scripts/find-coconuts
+  bin/taunt
+
+package_data =
+  cheese = data/templates/*
+
+extra_files = %(extra-files)s
+
+# Replaces MANIFEST.in
+sdist_extra =
+  include THANKS HACKING
+  recursive-include examples *.txt *.py
+  prune examples/sample?/build
+
+resources=
+  bm/ {b1,b2}.gif = {icon}
+  Cf*/ *.CFG = {config}/baBar/
+  init_script = {script}/JunGle/
+
+[global]
+commands =
+    packaging.tests.test_config.FooBarBazTest
+
+compilers =
+    packaging.tests.test_config.DCompiler
+
+setup_hook = %(setup-hook)s
+
+
+
+[install_dist]
+sub_commands = foo
+"""
+
+# Can not be merged with SETUP_CFG else install_dist
+# command will fail when trying to compile C sources
+EXT_SETUP_CFG = """
+[files]
+packages = one
+           two
+
+[extension=speed_coconuts]
+name = one.speed_coconuts
+sources = c_src/speed_coconuts.c
+extra_link_args = "`gcc -print-file-name=libgcc.a`" -shared
+define_macros = HAVE_CAIRO HAVE_GTK2
+libraries = gecodeint gecodekernel -- sys.platform != 'win32'
+    GecodeInt GecodeKernel -- sys.platform == 'win32'
+
+[extension=fast_taunt]
+name = three.fast_taunt
+sources = cxx_src/utils_taunt.cxx
+          cxx_src/python_module.cxx
+include_dirs = /usr/include/gecode
+    /usr/include/blitz
+extra_compile_args = -fPIC -O2
+    -DGECODE_VERSION=$(./gecode_version) -- sys.platform != 'win32'
+    /DGECODE_VERSION='win32' -- sys.platform == 'win32'
+language = cxx
+
+"""
+
+
+class DCompiler:
+    name = 'd'
+    description = 'D Compiler'
+
+    def __init__(self, *args):
+        pass
+
+
+def hook(content):
+    content['metadata']['version'] += '.dev1'
+
+
+class FooBarBazTest:
+
+    def __init__(self, dist):
+        self.distribution = dist
+
+    @classmethod
+    def get_command_name(cls):
+        return 'foo'
+
+    def run(self):
+        self.distribution.foo_was_here = True
+
+    def nothing(self):
+        pass
+
+    def get_source_files(self):
+        return []
+
+    ensure_finalized = finalize_options = initialize_options = nothing
+
+
+class ConfigTestCase(support.TempdirManager,
+                     support.EnvironRestorer,
+                     support.LoggingCatcher,
+                     unittest.TestCase):
+
+    restore_environ = ['PLAT']
+
+    def setUp(self):
+        super(ConfigTestCase, self).setUp()
+        self.addCleanup(setattr, sys, 'stdout', sys.stdout)
+        self.addCleanup(setattr, sys, 'stderr', sys.stderr)
+        sys.stdout = StringIO()
+        sys.stderr = StringIO()
+
+        self.addCleanup(os.chdir, os.getcwd())
+        tempdir = self.mkdtemp()
+        self.working_dir = os.getcwd()
+        os.chdir(tempdir)
+        self.tempdir = tempdir
+
+    def tearDown(self):
+        os.chdir(self.working_dir)
+        super(ConfigTestCase, self).tearDown()
+
+    def write_setup(self, kwargs=None):
+        opts = {'description-file': 'README', 'extra-files': '',
+                'setup-hook': 'packaging.tests.test_config.hook'}
+        if kwargs:
+            opts.update(kwargs)
+        self.write_file('setup.cfg', SETUP_CFG % opts, encoding='utf-8')
+
+    def get_dist(self):
+        dist = Distribution()
+        dist.parse_config_files()
+        return dist
+
+    def test_config(self):
+        self.write_setup()
+        self.write_file('README', 'yeah')
+        os.mkdir('bm')
+        self.write_file(('bm', 'b1.gif'), '')
+        self.write_file(('bm', 'b2.gif'), '')
+        os.mkdir('Cfg')
+        self.write_file(('Cfg', 'data.CFG'), '')
+        self.write_file('init_script', '')
+
+        # try to load the metadata now
+        dist = self.get_dist()
+
+        # check what was done
+        self.assertEqual(dist.metadata['Author'], 'Carl Meyer')
+        self.assertEqual(dist.metadata['Author-Email'], 'carl@oddbird.net')
+
+        # the hook adds .dev1
+        self.assertEqual(dist.metadata['Version'], '0.6.4.dev1')
+
+        wanted = [
+            'Development Status :: 4 - Beta',
+            'Environment :: Console (Text Based)',
+            "Environment :: X11 Applications :: GTK; python_version < '3'",
+            'License :: OSI Approved :: MIT License',
+            'Programming Language :: Python',
+            'Programming Language :: Python :: 2',
+            'Programming Language :: Python :: 3']
+        self.assertEqual(dist.metadata['Classifier'], wanted)
+
+        wanted = ['packaging', 'sample project']
+        self.assertEqual(dist.metadata['Keywords'], wanted)
+
+        self.assertEqual(dist.metadata['Requires-Python'], '>=2.4, <3.2')
+
+        wanted = ['PetShoppe',
+                  'MichaelPalin (> 1.1)',
+                  "pywin32; sys.platform == 'win32'",
+                  "pysqlite2; python_version < '2.5'",
+                  "inotify (0.0.1); sys.platform == 'linux2'"]
+
+        self.assertEqual(dist.metadata['Requires-Dist'], wanted)
+        urls = [('Main repository',
+                 'http://bitbucket.org/carljm/sample-distutils2-project'),
+                ('Fork in progress',
+                 'http://bitbucket.org/Merwok/sample-distutils2-project')]
+        self.assertEqual(dist.metadata['Project-Url'], urls)
+
+        self.assertEqual(dist.packages, ['one', 'two', 'three'])
+        self.assertEqual(dist.py_modules, ['haven'])
+        self.assertEqual(dist.package_data, {'cheese': 'data/templates/*'})
+        self.assertEqual(
+            {'bm/b1.gif': '{icon}/b1.gif',
+             'bm/b2.gif': '{icon}/b2.gif',
+             'Cfg/data.CFG': '{config}/baBar/data.CFG',
+             'init_script': '{script}/JunGle/init_script'},
+             dist.data_files)
+
+        self.assertEqual(dist.package_dir, 'src')
+
+        # Make sure we get the foo command loaded.  We use a string comparison
+        # instead of assertIsInstance because the class is not the same when
+        # this test is run directly: foo is packaging.tests.test_config.Foo
+        # because get_command_class uses the full name, but a bare "Foo" in
+        # this file would be __main__.Foo when run as "python test_config.py".
+        # The name FooBarBazTest should be unique enough to prevent
+        # collisions.
+        self.assertEqual('FooBarBazTest',
+                         dist.get_command_obj('foo').__class__.__name__)
+
+        # did the README got loaded ?
+        self.assertEqual(dist.metadata['description'], 'yeah')
+
+        # do we have the D Compiler enabled ?
+        self.assertIn('d', _COMPILERS)
+        d = new_compiler(compiler='d')
+        self.assertEqual(d.description, 'D Compiler')
+
+    def test_multiple_description_file(self):
+        self.write_setup({'description-file': 'README  CHANGES'})
+        self.write_file('README', 'yeah')
+        self.write_file('CHANGES', 'changelog2')
+        dist = self.get_dist()
+        self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+    def test_multiline_description_file(self):
+        self.write_setup({'description-file': 'README\n  CHANGES'})
+        self.write_file('README', 'yeah')
+        self.write_file('CHANGES', 'changelog')
+        dist = self.get_dist()
+        self.assertEqual(dist.metadata['description'], 'yeah\nchangelog')
+        self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+    def test_parse_extensions_in_config(self):
+        self.write_file('setup.cfg', EXT_SETUP_CFG)
+        dist = self.get_dist()
+
+        ext_modules = dict((mod.name, mod) for mod in dist.ext_modules)
+        self.assertEqual(len(ext_modules), 2)
+        ext = ext_modules.get('one.speed_coconuts')
+        self.assertEqual(ext.sources, ['c_src/speed_coconuts.c'])
+        self.assertEqual(ext.define_macros, ['HAVE_CAIRO', 'HAVE_GTK2'])
+        libs = ['gecodeint', 'gecodekernel']
+        if sys.platform == 'win32':
+            libs = ['GecodeInt', 'GecodeKernel']
+        self.assertEqual(ext.libraries, libs)
+        self.assertEqual(ext.extra_link_args,
+            ['`gcc -print-file-name=libgcc.a`', '-shared'])
+
+        ext = ext_modules.get('three.fast_taunt')
+        self.assertEqual(ext.sources,
+            ['cxx_src/utils_taunt.cxx', 'cxx_src/python_module.cxx'])
+        self.assertEqual(ext.include_dirs,
+            ['/usr/include/gecode', '/usr/include/blitz'])
+        cargs = ['-fPIC', '-O2']
+        if sys.platform == 'win32':
+            cargs.append("/DGECODE_VERSION=win32")
+        else:
+            cargs.append('-DGECODE_VERSION=$(./gecode_version)')
+        self.assertEqual(ext.extra_compile_args, cargs)
+        self.assertEqual(ext.language, 'cxx')
+
+    def test_missing_setuphook_warns(self):
+        self.write_setup({'setup-hook': 'this.does._not.exist'})
+        self.write_file('README', 'yeah')
+        dist = self.get_dist()
+        logs = self.get_logs(logging.WARNING)
+        self.assertEqual(1, len(logs))
+        self.assertIn('could not import setup_hook', logs[0])
+
+    def test_metadata_requires_description_files_missing(self):
+        self.write_setup({'description-file': 'README\n  README2'})
+        self.write_file('README', 'yeah')
+        self.write_file('README2', 'yeah')
+        os.mkdir('src')
+        self.write_file(('src', 'haven.py'), '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(('bin', 'taunt'), '#')
+
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file((pkg, '__init__.py'), '#')
+
+        dist = self.get_dist()
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        self.assertRaises(PackagingFileError, cmd.make_distribution)
+
+    @requires_zlib
+    def test_metadata_requires_description_files(self):
+        # Create the following file structure:
+        #   README
+        #   README2
+        #   script1.py
+        #   scripts/
+        #       find-coconuts
+        #   bin/
+        #       taunt
+        #   src/
+        #       haven.py
+        #       one/__init__.py
+        #       two/__init__.py
+        #       three/__init__.py
+
+        self.write_setup({'description-file': 'README\n  README2',
+                          'extra-files': '\n  README3'})
+        self.write_file('README', 'yeah 1')
+        self.write_file('README2', 'yeah 2')
+        self.write_file('README3', 'yeah 3')
+        os.mkdir('src')
+        self.write_file(('src', 'haven.py'), '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(('bin', 'taunt'), '#')
+
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file((pkg, '__init__.py'), '#')
+
+        dist = self.get_dist()
+        self.assertIn('yeah 1\nyeah 2', dist.metadata['description'])
+
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        self.assertRaises(PackagingFileError, cmd.make_distribution)
+
+        self.write_setup({'description-file': 'README\n  README2',
+                          'extra-files': '\n  README2\n    README'})
+        dist = self.get_dist()
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        cmd.make_distribution()
+        with open('MANIFEST') as fp:
+            self.assertIn('README\nREADME2\n', fp.read())
+
+    def test_sub_commands(self):
+        self.write_setup()
+        self.write_file('README', 'yeah')
+        os.mkdir('src')
+        self.write_file(('src', 'haven.py'), '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(('bin', 'taunt'), '#')
+
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file((pkg, '__init__.py'), '#')
+
+        # try to run the install command to see if foo is called
+        dist = self.get_dist()
+        self.assertIn('foo', command.get_command_names())
+        self.assertEqual('FooBarBazTest',
+                         dist.get_command_obj('foo').__class__.__name__)
+
+
+def test_suite():
+    return unittest.makeSuite(ConfigTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_create.py b/Lib/packaging/tests/test_create.py
new file mode 100644
index 0000000..906ca8f
--- /dev/null
+++ b/Lib/packaging/tests/test_create.py
@@ -0,0 +1,232 @@
+"""Tests for packaging.create."""
+import io
+import os
+import sys
+import sysconfig
+from textwrap import dedent
+from packaging.create import MainProgram, ask_yn, ask, main
+
+from packaging.tests import support, unittest
+
+
+class CreateTestCase(support.TempdirManager,
+                     support.EnvironRestorer,
+                     unittest.TestCase):
+
+    restore_environ = ['PLAT']
+
+    def setUp(self):
+        super(CreateTestCase, self).setUp()
+        self._stdin = sys.stdin  # TODO use Inputs
+        self._stdout = sys.stdout
+        sys.stdin = io.StringIO()
+        sys.stdout = io.StringIO()
+        self._cwd = os.getcwd()
+        self.wdir = self.mkdtemp()
+        os.chdir(self.wdir)
+        # patch sysconfig
+        self._old_get_paths = sysconfig.get_paths
+        sysconfig.get_paths = lambda *args, **kwargs: {
+            'man': sys.prefix + '/share/man',
+            'doc': sys.prefix + '/share/doc/pyxfoil', }
+
+    def tearDown(self):
+        sys.stdin = self._stdin
+        sys.stdout = self._stdout
+        os.chdir(self._cwd)
+        sysconfig.get_paths = self._old_get_paths
+        super(CreateTestCase, self).tearDown()
+
+    def test_ask_yn(self):
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        self.assertEqual('y', ask_yn('is this a test'))
+
+    def test_ask(self):
+        sys.stdin.write('a\n')
+        sys.stdin.write('b\n')
+        sys.stdin.seek(0)
+        self.assertEqual('a', ask('is this a test'))
+        self.assertEqual('b', ask(str(list(range(0, 70))), default='c',
+                                  lengthy=True))
+
+    def test_set_multi(self):
+        mainprogram = MainProgram()
+        sys.stdin.write('aaaaa\n')
+        sys.stdin.seek(0)
+        mainprogram.data['author'] = []
+        mainprogram._set_multi('_set_multi test', 'author')
+        self.assertEqual(['aaaaa'], mainprogram.data['author'])
+
+    def test_find_files(self):
+        # making sure we scan a project dir correctly
+        mainprogram = MainProgram()
+
+        # building the structure
+        tempdir = self.wdir
+        dirs = ['pkg1', 'data', 'pkg2', 'pkg2/sub']
+        files = ['README', 'setup.cfg', 'foo.py',
+                 'pkg1/__init__.py', 'pkg1/bar.py',
+                 'data/data1', 'pkg2/__init__.py',
+                 'pkg2/sub/__init__.py']
+
+        for dir_ in dirs:
+            os.mkdir(os.path.join(tempdir, dir_))
+
+        for file_ in files:
+            path = os.path.join(tempdir, file_)
+            self.write_file(path, 'xxx')
+
+        mainprogram._find_files()
+        mainprogram.data['packages'].sort()
+
+        # do we have what we want?
+        self.assertEqual(mainprogram.data['packages'],
+                         ['pkg1', 'pkg2', 'pkg2.sub'])
+        self.assertEqual(mainprogram.data['modules'], ['foo'])
+        data_fn = os.path.join('data', 'data1')
+        self.assertEqual(set(mainprogram.data['extra_files']),
+                         set(['setup.cfg', 'README', data_fn]))
+
+    def test_convert_setup_py_to_cfg(self):
+        self.write_file((self.wdir, 'setup.py'),
+                        dedent("""
+        # coding: utf-8
+        from distutils.core import setup
+
+        long_description = '''My super Death-scription
+        barbar is now on the public domain,
+        ho, baby !'''
+
+        setup(name='pyxfoil',
+              version='0.2',
+              description='Python bindings for the Xfoil engine',
+              long_description=long_description,
+              maintainer='André Espaze',
+              maintainer_email='andre.espaze@logilab.fr',
+              url='http://www.python-science.org/project/pyxfoil',
+              license='GPLv2',
+              packages=['pyxfoil', 'babar', 'me'],
+              data_files=[
+                  ('share/doc/pyxfoil', ['README.rst']),
+                  ('share/man', ['pyxfoil.1']),
+                         ],
+              py_modules=['my_lib', 'mymodule'],
+              package_dir={
+                  'babar': '',
+                  'me': 'Martinique/Lamentin',
+                          },
+              package_data={
+                  'babar': ['Pom', 'Flora', 'Alexander'],
+                  'me': ['dady', 'mumy', 'sys', 'bro'],
+                  '':  ['setup.py', 'README'],
+                  'pyxfoil': ['fengine.so'],
+                           },
+              scripts=['my_script', 'bin/run'],
+              )
+        """), encoding='utf-8')
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        main()
+
+        with open(os.path.join(self.wdir, 'setup.cfg'), encoding='utf-8') as fp:
+            lines = set(line.rstrip() for line in fp)
+
+        # FIXME don't use sets
+        self.assertEqual(lines, set(['',
+            '[metadata]',
+            'version = 0.2',
+            'name = pyxfoil',
+            'maintainer = André Espaze',
+            'description = My super Death-scription',
+            '       |barbar is now on the public domain,',
+            '       |ho, baby !',
+            'maintainer_email = andre.espaze@logilab.fr',
+            'home_page = http://www.python-science.org/project/pyxfoil',
+            'download_url = UNKNOWN',
+            'summary = Python bindings for the Xfoil engine',
+            '[files]',
+            'modules = my_lib',
+            '    mymodule',
+            'packages = pyxfoil',
+            '    babar',
+            '    me',
+            'extra_files = Martinique/Lamentin/dady',
+            '    Martinique/Lamentin/mumy',
+            '    Martinique/Lamentin/sys',
+            '    Martinique/Lamentin/bro',
+            '    Pom',
+            '    Flora',
+            '    Alexander',
+            '    setup.py',
+            '    README',
+            '    pyxfoil/fengine.so',
+            'scripts = my_script',
+            '    bin/run',
+            'resources =',
+            '    README.rst = {doc}',
+            '    pyxfoil.1 = {man}',
+        ]))
+
+    def test_convert_setup_py_to_cfg_with_description_in_readme(self):
+        self.write_file((self.wdir, 'setup.py'),
+                        dedent("""
+        # coding: utf-8
+        from distutils.core import setup
+        with open('README.txt') as fp:
+            long_description = fp.read()
+
+        setup(name='pyxfoil',
+              version='0.2',
+              description='Python bindings for the Xfoil engine',
+              long_description=long_description,
+              maintainer='André Espaze',
+              maintainer_email='andre.espaze@logilab.fr',
+              url='http://www.python-science.org/project/pyxfoil',
+              license='GPLv2',
+              packages=['pyxfoil'],
+              package_data={'pyxfoil': ['fengine.so', 'babar.so']},
+              data_files=[
+                ('share/doc/pyxfoil', ['README.rst']),
+                ('share/man', ['pyxfoil.1']),
+              ],
+        )
+        """), encoding='utf-8')
+        self.write_file((self.wdir, 'README.txt'),
+                        dedent('''
+My super Death-scription
+barbar is now in the public domain,
+ho, baby!
+                        '''))
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        # FIXME Out of memory error.
+        main()
+        with open(os.path.join(self.wdir, 'setup.cfg'), encoding='utf-8') as fp:
+            lines = set(line.rstrip() for line in fp)
+
+        self.assertEqual(lines, set(['',
+            '[metadata]',
+            'version = 0.2',
+            'name = pyxfoil',
+            'maintainer = André Espaze',
+            'maintainer_email = andre.espaze@logilab.fr',
+            'home_page = http://www.python-science.org/project/pyxfoil',
+            'download_url = UNKNOWN',
+            'summary = Python bindings for the Xfoil engine',
+            'description-file = README.txt',
+            '[files]',
+            'packages = pyxfoil',
+            'extra_files = pyxfoil/fengine.so',
+            '    pyxfoil/babar.so',
+            'resources =',
+            '    README.rst = {doc}',
+            '    pyxfoil.1 = {man}',
+        ]))
+
+
+def test_suite():
+    return unittest.makeSuite(CreateTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_cygwinccompiler.py b/Lib/packaging/tests/test_cygwinccompiler.py
new file mode 100644
index 0000000..17c43cd
--- /dev/null
+++ b/Lib/packaging/tests/test_cygwinccompiler.py
@@ -0,0 +1,88 @@
+"""Tests for packaging.cygwinccompiler."""
+import os
+import sys
+import sysconfig
+from packaging.compiler.cygwinccompiler import (
+    check_config_h, get_msvcr,
+    CONFIG_H_OK, CONFIG_H_NOTOK, CONFIG_H_UNCERTAIN)
+
+from packaging.tests import unittest, support
+
+
+class CygwinCCompilerTestCase(support.TempdirManager,
+                              unittest.TestCase):
+
+    def setUp(self):
+        super(CygwinCCompilerTestCase, self).setUp()
+        self.version = sys.version
+        self.python_h = os.path.join(self.mkdtemp(), 'python.h')
+        self.old_get_config_h_filename = sysconfig.get_config_h_filename
+        sysconfig.get_config_h_filename = self._get_config_h_filename
+
+    def tearDown(self):
+        sys.version = self.version
+        sysconfig.get_config_h_filename = self.old_get_config_h_filename
+        super(CygwinCCompilerTestCase, self).tearDown()
+
+    def _get_config_h_filename(self):
+        return self.python_h
+
+    def test_check_config_h(self):
+        # check_config_h looks for "GCC" in sys.version first
+        # returns CONFIG_H_OK if found
+        sys.version = ('2.6.1 (r261:67515, Dec  6 2008, 16:42:21) \n[GCC '
+                       '4.0.1 (Apple Computer, Inc. build 5370)]')
+
+        self.assertEqual(check_config_h()[0], CONFIG_H_OK)
+
+        # then it tries to see if it can find "__GNUC__" in pyconfig.h
+        sys.version = 'something without the *CC word'
+
+        # if the file doesn't exist it returns  CONFIG_H_UNCERTAIN
+        self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
+
+        # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
+        self.write_file(self.python_h, 'xxx')
+        self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
+
+        # and CONFIG_H_OK if __GNUC__ is found
+        self.write_file(self.python_h, 'xxx __GNUC__ xxx')
+        self.assertEqual(check_config_h()[0], CONFIG_H_OK)
+
+    def test_get_msvcr(self):
+        # none
+        sys.version = ('2.6.1 (r261:67515, Dec  6 2008, 16:42:21) '
+                       '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
+        self.assertEqual(get_msvcr(), None)
+
+        # MSVC 7.0
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1300 32 bits (Intel)]')
+        self.assertEqual(get_msvcr(), ['msvcr70'])
+
+        # MSVC 7.1
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1310 32 bits (Intel)]')
+        self.assertEqual(get_msvcr(), ['msvcr71'])
+
+        # VS2005 / MSVC 8.0
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1400 32 bits (Intel)]')
+        self.assertEqual(get_msvcr(), ['msvcr80'])
+
+        # VS2008 / MSVC 9.0
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1500 32 bits (Intel)]')
+        self.assertEqual(get_msvcr(), ['msvcr90'])
+
+        # unknown
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1999 32 bits (Intel)]')
+        self.assertRaises(ValueError, get_msvcr)
+
+
+def test_suite():
+    return unittest.makeSuite(CygwinCCompilerTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_database.py b/Lib/packaging/tests/test_database.py
new file mode 100644
index 0000000..ea63d3e
--- /dev/null
+++ b/Lib/packaging/tests/test_database.py
@@ -0,0 +1,517 @@
+import os
+import io
+import csv
+import imp
+import sys
+import shutil
+import zipfile
+import tempfile
+from os.path import relpath  # separate import for backport concerns
+from hashlib import md5
+
+from packaging.errors import PackagingError
+from packaging.metadata import Metadata
+from packaging.tests import unittest, run_unittest, support, TESTFN
+from packaging.tests.support import requires_zlib
+
+from packaging.database import (
+    Distribution, EggInfoDistribution, get_distribution, get_distributions,
+    provides_distribution, obsoletes_distribution, get_file_users,
+    enable_cache, disable_cache, distinfo_dirname, _yield_distributions)
+
+# TODO Add a test for getting a distribution provided by another distribution
+# TODO Add a test for absolute pathed RECORD items (e.g. /etc/myapp/config.ini)
+# TODO Add tests from the former pep376 project (zipped site-packages, etc.)
+
+
+def get_hexdigest(filename):
+    with open(filename, 'rb') as file:
+        checksum = md5(file.read())
+    return checksum.hexdigest()
+
+
+def record_pieces(file):
+    path = relpath(file, sys.prefix)
+    digest = get_hexdigest(file)
+    size = os.path.getsize(file)
+    return [path, digest, size]
+
+
+class CommonDistributionTests:
+    """Mixin used to test the interface common to both Distribution classes.
+
+    Derived classes define cls, sample_dist, dirs and records.  These
+    attributes are used in test methods.  See source code for details.
+    """
+
+    def setUp(self):
+        super(CommonDistributionTests, self).setUp()
+        self.addCleanup(enable_cache)
+        disable_cache()
+        self.fake_dists_path = os.path.abspath(
+            os.path.join(os.path.dirname(__file__), 'fake_dists'))
+
+    def test_instantiation(self):
+        # check that useful attributes are here
+        name, version, distdir = self.sample_dist
+        here = os.path.abspath(os.path.dirname(__file__))
+        dist_path = os.path.join(here, 'fake_dists', distdir)
+
+        dist = self.dist = self.cls(dist_path)
+        self.assertEqual(dist.path, dist_path)
+        self.assertEqual(dist.name, name)
+        self.assertEqual(dist.metadata['Name'], name)
+        self.assertIsInstance(dist.metadata, Metadata)
+        self.assertEqual(dist.version, version)
+        self.assertEqual(dist.metadata['Version'], version)
+
+    @requires_zlib
+    def test_repr(self):
+        dist = self.cls(self.dirs[0])
+        # just check that the class name is in the repr
+        self.assertIn(self.cls.__name__, repr(dist))
+
+    @requires_zlib
+    def test_comparison(self):
+        # tests for __eq__ and __hash__
+        dist = self.cls(self.dirs[0])
+        dist2 = self.cls(self.dirs[0])
+        dist3 = self.cls(self.dirs[1])
+        self.assertIn(dist, {dist: True})
+        self.assertEqual(dist, dist)
+
+        self.assertIsNot(dist, dist2)
+        self.assertEqual(dist, dist2)
+        self.assertNotEqual(dist, dist3)
+        self.assertNotEqual(dist, ())
+
+    def test_list_installed_files(self):
+        for dir_ in self.dirs:
+            dist = self.cls(dir_)
+            for path, md5_, size in dist.list_installed_files():
+                record_data = self.records[dist.path]
+                self.assertIn(path, record_data)
+                self.assertEqual(md5_, record_data[path][0])
+                self.assertEqual(size, record_data[path][1])
+
+
+class TestDistribution(CommonDistributionTests, unittest.TestCase):
+
+    cls = Distribution
+    sample_dist = 'choxie', '2.0.0.9', 'choxie-2.0.0.9.dist-info'
+
+    def setUp(self):
+        super(TestDistribution, self).setUp()
+        self.dirs = [os.path.join(self.fake_dists_path, f)
+                     for f in os.listdir(self.fake_dists_path)
+                     if f.endswith('.dist-info')]
+
+        self.records = {}
+        for distinfo_dir in self.dirs:
+            record_file = os.path.join(distinfo_dir, 'RECORD')
+            with open(record_file, 'w') as file:
+                record_writer = csv.writer(
+                    file, delimiter=',', quoting=csv.QUOTE_NONE,
+                    lineterminator='\n')
+
+                dist_location = distinfo_dir.replace('.dist-info', '')
+
+                for path, dirs, files in os.walk(dist_location):
+                    for f in files:
+                        record_writer.writerow(record_pieces(
+                                               os.path.join(path, f)))
+                for file in ('INSTALLER', 'METADATA', 'REQUESTED'):
+                    record_writer.writerow(record_pieces(
+                                           os.path.join(distinfo_dir, file)))
+                record_writer.writerow([relpath(record_file, sys.prefix)])
+
+            with open(record_file) as file:
+                record_reader = csv.reader(file, lineterminator='\n')
+                record_data = {}
+                for row in record_reader:
+                    if row == []:
+                        continue
+                    path, md5_, size = (row[:] +
+                                        [None for i in range(len(row), 3)])
+                    record_data[path] = md5_, size
+            self.records[distinfo_dir] = record_data
+
+    def tearDown(self):
+        for distinfo_dir in self.dirs:
+            record_file = os.path.join(distinfo_dir, 'RECORD')
+            open(record_file, 'wb').close()
+        super(TestDistribution, self).tearDown()
+
+    def test_instantiation(self):
+        super(TestDistribution, self).test_instantiation()
+        self.assertIsInstance(self.dist.requested, bool)
+
+    def test_uses(self):
+        # Test to determine if a distribution uses a specified file.
+        # Criteria to test against
+        distinfo_name = 'grammar-1.0a4'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        true_path = [self.fake_dists_path, distinfo_name,
+                     'grammar', 'utils.py']
+        true_path = relpath(os.path.join(*true_path), sys.prefix)
+        false_path = [self.fake_dists_path, 'towel_stuff-0.1', 'towel_stuff',
+                      '__init__.py']
+        false_path = relpath(os.path.join(*false_path), sys.prefix)
+
+        # Test if the distribution uses the file in question
+        dist = Distribution(distinfo_dir)
+        self.assertTrue(dist.uses(true_path))
+        self.assertFalse(dist.uses(false_path))
+
+    def test_get_distinfo_file(self):
+        # Test the retrieval of dist-info file objects.
+        distinfo_name = 'choxie-2.0.0.9'
+        other_distinfo_name = 'grammar-1.0a4'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        dist = Distribution(distinfo_dir)
+        # Test for known good file matches
+        distinfo_files = [
+            # Relative paths
+            'INSTALLER', 'METADATA',
+            # Absolute paths
+            os.path.join(distinfo_dir, 'RECORD'),
+            os.path.join(distinfo_dir, 'REQUESTED'),
+        ]
+
+        for distfile in distinfo_files:
+            with dist.get_distinfo_file(distfile) as value:
+                self.assertIsInstance(value, io.TextIOWrapper)
+                # Is it the correct file?
+                self.assertEqual(value.name,
+                                 os.path.join(distinfo_dir, distfile))
+
+        # Test an absolute path that is part of another distributions dist-info
+        other_distinfo_file = os.path.join(
+            self.fake_dists_path, other_distinfo_name + '.dist-info',
+            'REQUESTED')
+        self.assertRaises(PackagingError, dist.get_distinfo_file,
+                          other_distinfo_file)
+        # Test for a file that should not exist
+        self.assertRaises(PackagingError, dist.get_distinfo_file,
+                          'MAGICFILE')
+
+    def test_list_distinfo_files(self):
+        # Test for the iteration of RECORD path entries.
+        distinfo_name = 'towel_stuff-0.1'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        dist = Distribution(distinfo_dir)
+        # Test for the iteration of the raw path
+        distinfo_record_paths = self.records[distinfo_dir].keys()
+        found = dist.list_distinfo_files()
+        self.assertEqual(sorted(found), sorted(distinfo_record_paths))
+        # Test for the iteration of local absolute paths
+        distinfo_record_paths = [os.path.join(sys.prefix, path)
+            for path in self.records[distinfo_dir]]
+        found = dist.list_distinfo_files(local=True)
+        self.assertEqual(sorted(found), sorted(distinfo_record_paths))
+
+    def test_get_resources_path(self):
+        distinfo_name = 'babar-0.1'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        dist = Distribution(distinfo_dir)
+        resource_path = dist.get_resource_path('babar.png')
+        self.assertEqual(resource_path, 'babar.png')
+        self.assertRaises(KeyError, dist.get_resource_path, 'notexist')
+
+
+class TestEggInfoDistribution(CommonDistributionTests,
+                              support.LoggingCatcher,
+                              unittest.TestCase):
+
+    cls = EggInfoDistribution
+    sample_dist = 'bacon', '0.1', 'bacon-0.1.egg-info'
+
+    def setUp(self):
+        super(TestEggInfoDistribution, self).setUp()
+
+        self.dirs = [os.path.join(self.fake_dists_path, f)
+                     for f in os.listdir(self.fake_dists_path)
+                     if f.endswith('.egg') or f.endswith('.egg-info')]
+
+        self.records = {}
+
+    @unittest.skip('not implemented yet')
+    def test_list_installed_files(self):
+        # EggInfoDistribution defines list_installed_files but there is no
+        # test for it yet; someone with setuptools expertise needs to add a
+        # file with the list of installed files for one of the egg fake dists
+        # and write the support code to populate self.records (and then delete
+        # this method)
+        pass
+
+
+class TestDatabase(support.LoggingCatcher,
+                   unittest.TestCase):
+
+    def setUp(self):
+        super(TestDatabase, self).setUp()
+        disable_cache()
+        # Setup the path environment with our fake distributions
+        current_path = os.path.abspath(os.path.dirname(__file__))
+        self.sys_path = sys.path[:]
+        self.fake_dists_path = os.path.join(current_path, 'fake_dists')
+        sys.path.insert(0, self.fake_dists_path)
+
+    def tearDown(self):
+        sys.path[:] = self.sys_path
+        enable_cache()
+        super(TestDatabase, self).tearDown()
+
+    def test_distinfo_dirname(self):
+        # Given a name and a version, we expect the distinfo_dirname function
+        # to return a standard distribution information directory name.
+
+        items = [
+            # (name, version, standard_dirname)
+            # Test for a very simple single word name and decimal version
+            # number
+            ('docutils', '0.5', 'docutils-0.5.dist-info'),
+            # Test for another except this time with a '-' in the name, which
+            # needs to be transformed during the name lookup
+            ('python-ldap', '2.5', 'python_ldap-2.5.dist-info'),
+            # Test for both '-' in the name and a funky version number
+            ('python-ldap', '2.5 a---5', 'python_ldap-2.5 a---5.dist-info'),
+            ]
+
+        # Loop through the items to validate the results
+        for name, version, standard_dirname in items:
+            dirname = distinfo_dirname(name, version)
+            self.assertEqual(dirname, standard_dirname)
+
+    @requires_zlib
+    def test_get_distributions(self):
+        # Lookup all distributions found in the ``sys.path``.
+        # This test could potentially pick up other installed distributions
+        fake_dists = [('grammar', '1.0a4'), ('choxie', '2.0.0.9'),
+                      ('towel-stuff', '0.1'), ('babar', '0.1')]
+        found_dists = []
+
+        # Verify the fake dists have been found.
+        dists = [dist for dist in get_distributions()]
+        for dist in dists:
+            self.assertIsInstance(dist, Distribution)
+            if (dist.name in dict(fake_dists) and
+                dist.path.startswith(self.fake_dists_path)):
+                found_dists.append((dist.name, dist.metadata['version'], ))
+            else:
+                # check that it doesn't find anything more than this
+                self.assertFalse(dist.path.startswith(self.fake_dists_path))
+            # otherwise we don't care what other distributions are found
+
+        # Finally, test that we found all that we were looking for
+        self.assertEqual(sorted(found_dists), sorted(fake_dists))
+
+        # Now, test if the egg-info distributions are found correctly as well
+        fake_dists += [('bacon', '0.1'), ('cheese', '2.0.2'),
+                       ('coconuts-aster', '10.3'),
+                       ('banana', '0.4'), ('strawberry', '0.6'),
+                       ('truffles', '5.0'), ('nut', 'funkyversion')]
+        found_dists = []
+
+        dists = [dist for dist in get_distributions(use_egg_info=True)]
+        for dist in dists:
+            self.assertIsInstance(dist, (Distribution, EggInfoDistribution))
+            if (dist.name in dict(fake_dists) and
+                dist.path.startswith(self.fake_dists_path)):
+                found_dists.append((dist.name, dist.metadata['version']))
+            else:
+                self.assertFalse(dist.path.startswith(self.fake_dists_path))
+
+        self.assertEqual(sorted(fake_dists), sorted(found_dists))
+
+    @requires_zlib
+    def test_get_distribution(self):
+        # Test for looking up a distribution by name.
+        # Test the lookup of the towel-stuff distribution
+        name = 'towel-stuff'  # Note: This is different from the directory name
+
+        # Lookup the distribution
+        dist = get_distribution(name)
+        self.assertIsInstance(dist, Distribution)
+        self.assertEqual(dist.name, name)
+
+        # Verify that an unknown distribution returns None
+        self.assertIsNone(get_distribution('bogus'))
+
+        # Verify partial name matching doesn't work
+        self.assertIsNone(get_distribution('towel'))
+
+        # Verify that it does not find egg-info distributions, when not
+        # instructed to
+        self.assertIsNone(get_distribution('bacon'))
+        self.assertIsNone(get_distribution('cheese'))
+        self.assertIsNone(get_distribution('strawberry'))
+        self.assertIsNone(get_distribution('banana'))
+
+        # Now check that it works well in both situations, when egg-info
+        # is a file and directory respectively.
+        dist = get_distribution('cheese', use_egg_info=True)
+        self.assertIsInstance(dist, EggInfoDistribution)
+        self.assertEqual(dist.name, 'cheese')
+
+        dist = get_distribution('bacon', use_egg_info=True)
+        self.assertIsInstance(dist, EggInfoDistribution)
+        self.assertEqual(dist.name, 'bacon')
+
+        dist = get_distribution('banana', use_egg_info=True)
+        self.assertIsInstance(dist, EggInfoDistribution)
+        self.assertEqual(dist.name, 'banana')
+
+        dist = get_distribution('strawberry', use_egg_info=True)
+        self.assertIsInstance(dist, EggInfoDistribution)
+        self.assertEqual(dist.name, 'strawberry')
+
+    def test_get_file_users(self):
+        # Test the iteration of distributions that use a file.
+        name = 'towel_stuff-0.1'
+        path = os.path.join(self.fake_dists_path, name,
+                            'towel_stuff', '__init__.py')
+        for dist in get_file_users(path):
+            self.assertIsInstance(dist, Distribution)
+            self.assertEqual(dist.name, name)
+
+    @requires_zlib
+    def test_provides(self):
+        # Test for looking up distributions by what they provide
+        checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+        l = [dist.name for dist in provides_distribution('truffles')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.0')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.0',
+                                                         use_egg_info=True)]
+        checkLists(l, ['choxie', 'cheese'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.1.2')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.1')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles',
+                                                         '!=1.1,<=2.0')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in provides_distribution('truffles',
+                                                         '!=1.1,<=2.0',
+                                                          use_egg_info=True)]
+        checkLists(l, ['choxie', 'bacon', 'cheese'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.0')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.5')]
+        checkLists(l, [])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.5',
+                                                         use_egg_info=True)]
+        checkLists(l, ['bacon'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>=1.0')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('strawberry', '0.6',
+                                                         use_egg_info=True)]
+        checkLists(l, ['coconuts-aster'])
+
+        l = [dist.name for dist in provides_distribution('strawberry', '>=0.5',
+                                                         use_egg_info=True)]
+        checkLists(l, ['coconuts-aster'])
+
+        l = [dist.name for dist in provides_distribution('strawberry', '>0.6',
+                                                         use_egg_info=True)]
+        checkLists(l, [])
+
+        l = [dist.name for dist in provides_distribution('banana', '0.4',
+                                                         use_egg_info=True)]
+        checkLists(l, ['coconuts-aster'])
+
+        l = [dist.name for dist in provides_distribution('banana', '>=0.3',
+                                                         use_egg_info=True)]
+        checkLists(l, ['coconuts-aster'])
+
+        l = [dist.name for dist in provides_distribution('banana', '!=0.4',
+                                                         use_egg_info=True)]
+        checkLists(l, [])
+
+    @requires_zlib
+    def test_obsoletes(self):
+        # Test looking for distributions based on what they obsolete
+        checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')]
+        checkLists(l, [])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '1.0',
+                                                          use_egg_info=True)]
+        checkLists(l, ['cheese', 'bacon'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.8',
+                                                          use_egg_info=True)]
+        checkLists(l, ['choxie', 'cheese'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.9.6')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles',
+                                                          '0.5.2.3')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.2')]
+        checkLists(l, ['towel-stuff'])
+
+    @requires_zlib
+    def test_yield_distribution(self):
+        # tests the internal function _yield_distributions
+        checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+        eggs = [('bacon', '0.1'), ('banana', '0.4'), ('strawberry', '0.6'),
+                ('truffles', '5.0'), ('cheese', '2.0.2'),
+                ('coconuts-aster', '10.3'), ('nut', 'funkyversion')]
+        dists = [('choxie', '2.0.0.9'), ('grammar', '1.0a4'),
+                 ('towel-stuff', '0.1'), ('babar', '0.1')]
+
+        checkLists([], _yield_distributions(False, False))
+
+        found = [(dist.name, dist.metadata['Version'])
+                 for dist in _yield_distributions(False, True)
+                 if dist.path.startswith(self.fake_dists_path)]
+        checkLists(eggs, found)
+
+        found = [(dist.name, dist.metadata['Version'])
+                 for dist in _yield_distributions(True, False)
+                 if dist.path.startswith(self.fake_dists_path)]
+        checkLists(dists, found)
+
+        found = [(dist.name, dist.metadata['Version'])
+                 for dist in _yield_distributions(True, True)
+                 if dist.path.startswith(self.fake_dists_path)]
+        checkLists(dists + eggs, found)
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    load = unittest.defaultTestLoader.loadTestsFromTestCase
+    suite.addTest(load(TestDistribution))
+    suite.addTest(load(TestEggInfoDistribution))
+    suite.addTest(load(TestDatabase))
+    return suite
+
+
+if __name__ == "__main__":
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_depgraph.py b/Lib/packaging/tests/test_depgraph.py
new file mode 100644
index 0000000..64c22eb
--- /dev/null
+++ b/Lib/packaging/tests/test_depgraph.py
@@ -0,0 +1,309 @@
+"""Tests for packaging.depgraph """
+import io
+import os
+import re
+import sys
+import packaging.database
+from packaging import depgraph
+
+from packaging.tests import unittest, support
+from packaging.tests.support import requires_zlib
+
+
+class DepGraphTestCase(support.LoggingCatcher,
+                       unittest.TestCase):
+
+    DISTROS_DIST = ('choxie', 'grammar', 'towel-stuff')
+    DISTROS_EGG = ('bacon', 'banana', 'strawberry', 'cheese')
+    BAD_EGGS = ('nut',)
+
+    EDGE = re.compile(
+           r'"(?P<from>.*)" -> "(?P<to>.*)" \[label="(?P<label>.*)"\]')
+
+    def checkLists(self, l1, l2):
+        """ Compare two lists without taking the order into consideration """
+        self.assertListEqual(sorted(l1), sorted(l2))
+
+    def setUp(self):
+        super(DepGraphTestCase, self).setUp()
+        path = os.path.join(os.path.dirname(__file__), 'fake_dists')
+        path = os.path.abspath(path)
+        sys.path.insert(0, path)
+        self.addCleanup(sys.path.remove, path)
+        self.addCleanup(packaging.database.enable_cache)
+        packaging.database.disable_cache()
+
+    def test_generate_graph(self):
+        dists = []
+        for name in self.DISTROS_DIST:
+            dist = packaging.database.get_distribution(name)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        choxie, grammar, towel = dists
+
+        graph = depgraph.generate_graph(dists)
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[choxie]]
+        self.checkLists([('towel-stuff', 'towel-stuff (0.1)')], deps)
+        self.assertIn(choxie, graph.reverse_list[towel])
+        self.checkLists(graph.missing[choxie], ['nut'])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[grammar]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[grammar], ['truffles (>=1.2)'])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[towel]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[towel], ['bacon (<=0.2)'])
+
+    @requires_zlib
+    def test_generate_graph_egg(self):
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        choxie, grammar, towel, bacon, banana, strawberry, cheese = dists
+
+        graph = depgraph.generate_graph(dists)
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[choxie]]
+        self.checkLists([('towel-stuff', 'towel-stuff (0.1)')], deps)
+        self.assertIn(choxie, graph.reverse_list[towel])
+        self.checkLists(graph.missing[choxie], ['nut'])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[grammar]]
+        self.checkLists([('bacon', 'truffles (>=1.2)')], deps)
+        self.checkLists(graph.missing[grammar], [])
+        self.assertIn(grammar, graph.reverse_list[bacon])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[towel]]
+        self.checkLists([('bacon', 'bacon (<=0.2)')], deps)
+        self.checkLists(graph.missing[towel], [])
+        self.assertIn(towel, graph.reverse_list[bacon])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[bacon]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[bacon], [])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[banana]]
+        self.checkLists([('strawberry', 'strawberry (>=0.5)')], deps)
+        self.checkLists(graph.missing[banana], [])
+        self.assertIn(banana, graph.reverse_list[strawberry])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[strawberry]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[strawberry], [])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[cheese]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[cheese], [])
+
+    def test_dependent_dists(self):
+        dists = []
+        for name in self.DISTROS_DIST:
+            dist = packaging.database.get_distribution(name)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        choxie, grammar, towel = dists
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, choxie)]
+        self.checkLists([], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, grammar)]
+        self.checkLists([], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, towel)]
+        self.checkLists(['choxie'], deps)
+
+    @requires_zlib
+    def test_dependent_dists_egg(self):
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        choxie, grammar, towel, bacon, banana, strawberry, cheese = dists
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, choxie)]
+        self.checkLists([], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, grammar)]
+        self.checkLists([], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, towel)]
+        self.checkLists(['choxie'], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, bacon)]
+        self.checkLists(['choxie', 'towel-stuff', 'grammar'], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, strawberry)]
+        self.checkLists(['banana'], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, cheese)]
+        self.checkLists([], deps)
+
+    @requires_zlib
+    def test_graph_to_dot(self):
+        expected = (
+            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+            ('grammar', 'bacon', 'truffles (>=1.2)'),
+            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+            ('banana', 'strawberry', 'strawberry (>=0.5)'),
+        )
+
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        buf = io.StringIO()
+        depgraph.graph_to_dot(graph, buf)
+        buf.seek(0)
+        matches = []
+        lines = buf.readlines()
+        for line in lines[1:-1]:  # skip the first and the last lines
+            if line[-1] == '\n':
+                line = line[:-1]
+            match = self.EDGE.match(line.strip())
+            self.assertIsNot(match, None)
+            matches.append(match.groups())
+
+        self.checkLists(matches, expected)
+
+    @requires_zlib
+    def test_graph_disconnected_to_dot(self):
+        dependencies_expected = (
+            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+            ('grammar', 'bacon', 'truffles (>=1.2)'),
+            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+            ('banana', 'strawberry', 'strawberry (>=0.5)'),
+        )
+        disconnected_expected = ('cheese', 'bacon', 'strawberry')
+
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        buf = io.StringIO()
+        depgraph.graph_to_dot(graph, buf, skip_disconnected=False)
+        buf.seek(0)
+        lines = buf.readlines()
+
+        dependencies_lines = []
+        disconnected_lines = []
+
+        # First sort output lines into dependencies and disconnected lines.
+        # We also skip the attribute lines, and don't include the "{" and "}"
+        # lines.
+        disconnected_active = False
+        for line in lines[1:-1]:  # Skip first and last line
+            if line.startswith('subgraph disconnected'):
+                disconnected_active = True
+                continue
+            if line.startswith('}') and disconnected_active:
+                disconnected_active = False
+                continue
+
+            if disconnected_active:
+                # Skip the 'label = "Disconnected"', etc. attribute lines.
+                if ' = ' not in line:
+                    disconnected_lines.append(line)
+            else:
+                dependencies_lines.append(line)
+
+        dependencies_matches = []
+        for line in dependencies_lines:
+            if line[-1] == '\n':
+                line = line[:-1]
+            match = self.EDGE.match(line.strip())
+            self.assertIsNot(match, None)
+            dependencies_matches.append(match.groups())
+
+        disconnected_matches = []
+        for line in disconnected_lines:
+            if line[-1] == '\n':
+                line = line[:-1]
+            line = line.strip('"')
+            disconnected_matches.append(line)
+
+        self.checkLists(dependencies_matches, dependencies_expected)
+        self.checkLists(disconnected_matches, disconnected_expected)
+
+    @requires_zlib
+    def test_graph_bad_version_to_dot(self):
+        expected = (
+            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+            ('grammar', 'bacon', 'truffles (>=1.2)'),
+            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+            ('banana', 'strawberry', 'strawberry (>=0.5)'),
+        )
+
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        buf = io.StringIO()
+        depgraph.graph_to_dot(graph, buf)
+        buf.seek(0)
+        matches = []
+        lines = buf.readlines()
+        for line in lines[1:-1]:  # skip the first and the last lines
+            if line[-1] == '\n':
+                line = line[:-1]
+            match = self.EDGE.match(line.strip())
+            self.assertIsNot(match, None)
+            matches.append(match.groups())
+
+        self.checkLists(matches, expected)
+
+    @requires_zlib
+    def test_repr(self):
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        self.assertTrue(repr(graph))
+
+    @requires_zlib
+    def test_main(self):
+        tempout = io.StringIO()
+        old = sys.stdout
+        sys.stdout = tempout
+        oldargv = sys.argv[:]
+        sys.argv[:] = ['script.py']
+        try:
+            try:
+                depgraph.main()
+            except SystemExit:
+                pass
+        finally:
+            sys.stdout = old
+            sys.argv[:] = oldargv
+
+        # checks what main did XXX could do more here
+        tempout.seek(0)
+        res = tempout.read()
+        self.assertIn('towel', res)
+
+
+def test_suite():
+    return unittest.makeSuite(DepGraphTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_dist.py b/Lib/packaging/tests/test_dist.py
new file mode 100644
index 0000000..fb6d524
--- /dev/null
+++ b/Lib/packaging/tests/test_dist.py
@@ -0,0 +1,455 @@
+"""Tests for packaging.dist."""
+import os
+import io
+import sys
+import logging
+import textwrap
+import packaging.dist
+
+from packaging.dist import Distribution
+from packaging.command import set_command
+from packaging.command.cmd import Command
+from packaging.errors import PackagingModuleError, PackagingOptionError
+from packaging.tests import TESTFN, captured_stdout
+from packaging.tests import support, unittest
+from packaging.tests.support import create_distribution
+from test.support import unload
+
+
+class test_dist(Command):
+    """Sample packaging extension command."""
+
+    user_options = [
+        ("sample-option=", "S", "help text"),
+        ]
+
+    def initialize_options(self):
+        self.sample_option = None
+
+    def finalize_options(self):
+        pass
+
+
+class DistributionTestCase(support.TempdirManager,
+                           support.LoggingCatcher,
+                           support.EnvironRestorer,
+                           unittest.TestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(DistributionTestCase, self).setUp()
+        self.argv = sys.argv, sys.argv[:]
+        del sys.argv[1:]
+
+    def tearDown(self):
+        sys.argv = self.argv[0]
+        sys.argv[:] = self.argv[1]
+        super(DistributionTestCase, self).tearDown()
+
+    def test_debug_mode(self):
+        self.addCleanup(os.unlink, TESTFN)
+        with open(TESTFN, "w") as f:
+            f.write("[global]\n")
+            f.write("command_packages = foo.bar, splat")
+
+        files = [TESTFN]
+        sys.argv.append("build")
+        __, stdout = captured_stdout(create_distribution, files)
+        self.assertEqual(stdout, '')
+        packaging.dist.DEBUG = True
+        try:
+            __, stdout = captured_stdout(create_distribution, files)
+            self.assertEqual(stdout, '')
+        finally:
+            packaging.dist.DEBUG = False
+
+    def test_write_pkg_file(self):
+        # Check Metadata handling of Unicode fields
+        tmp_dir = self.mkdtemp()
+        my_file = os.path.join(tmp_dir, 'f')
+        cls = Distribution
+
+        dist = cls(attrs={'author': 'Mister Café',
+                          'name': 'my.package',
+                          'maintainer': 'Café Junior',
+                          'summary': 'Café torréfié',
+                          'description': 'Héhéhé'})
+
+        # let's make sure the file can be written
+        # with Unicode fields. they are encoded with
+        # PKG_INFO_ENCODING
+        with open(my_file, 'w', encoding='utf-8') as fp:
+            dist.metadata.write_file(fp)
+
+        # regular ascii is of course always usable
+        dist = cls(attrs={'author': 'Mister Cafe',
+                          'name': 'my.package',
+                          'maintainer': 'Cafe Junior',
+                          'summary': 'Cafe torrefie',
+                          'description': 'Hehehe'})
+
+        with open(my_file, 'w') as fp:
+            dist.metadata.write_file(fp)
+
+    def test_bad_attr(self):
+        Distribution(attrs={'author': 'xxx',
+                            'name': 'xxx',
+                            'version': '1.2',
+                            'url': 'xxxx',
+                            'badoptname': 'xxx'})
+        logs = self.get_logs(logging.WARNING)
+        self.assertEqual(1, len(logs))
+        self.assertIn('unknown argument', logs[0])
+
+    def test_bad_version(self):
+        Distribution(attrs={'author': 'xxx',
+                            'name': 'xxx',
+                            'version': 'xxx',
+                            'url': 'xxxx'})
+        logs = self.get_logs(logging.WARNING)
+        self.assertEqual(1, len(logs))
+        self.assertIn('not a valid version', logs[0])
+
+    def test_empty_options(self):
+        # an empty options dictionary should not stay in the
+        # list of attributes
+        Distribution(attrs={'author': 'xxx',
+                            'name': 'xxx',
+                            'version': '1.2',
+                            'url': 'xxxx',
+                            'options': {}})
+
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    def test_non_empty_options(self):
+        # TODO: how to actually use options is not documented except
+        # for a few cryptic comments in dist.py.  If this is to stay
+        # in the public API, it deserves some better documentation.
+
+        # Here is an example of how it's used out there:
+        # http://svn.pythonmac.org/py2app/py2app/trunk/doc/
+        # index.html#specifying-customizations
+        dist = Distribution(attrs={'author': 'xxx',
+                                   'name': 'xxx',
+                                   'version': 'xxx',
+                                   'url': 'xxxx',
+                                   'options': {'sdist': {'owner': 'root'}}})
+
+        self.assertIn('owner', dist.get_option_dict('sdist'))
+
+    def test_finalize_options(self):
+
+        attrs = {'keywords': 'one,two',
+                 'platform': 'one,two'}
+
+        dist = Distribution(attrs=attrs)
+        dist.finalize_options()
+
+        # finalize_option splits platforms and keywords
+        self.assertEqual(dist.metadata['platform'], ['one', 'two'])
+        self.assertEqual(dist.metadata['keywords'], ['one', 'two'])
+
+    def test_find_config_files_disable(self):
+        # Bug #1180: Allow users to disable their own config file.
+        temp_home = self.mkdtemp()
+        if os.name == 'posix':
+            user_filename = os.path.join(temp_home, ".pydistutils.cfg")
+        else:
+            user_filename = os.path.join(temp_home, "pydistutils.cfg")
+
+        with open(user_filename, 'w') as f:
+            f.write('[distutils2]\n')
+
+        def _expander(path):
+            return temp_home
+
+        old_expander = os.path.expanduser
+        os.path.expanduser = _expander
+        try:
+            d = packaging.dist.Distribution()
+            all_files = d.find_config_files()
+
+            d = packaging.dist.Distribution(attrs={'script_args':
+                                                   ['--no-user-cfg']})
+            files = d.find_config_files()
+        finally:
+            os.path.expanduser = old_expander
+
+        # make sure --no-user-cfg disables the user cfg file
+        self.assertEqual((len(all_files) - 1), len(files))
+
+    def test_special_hooks_parsing(self):
+        temp_home = self.mkdtemp()
+        config_files = [os.path.join(temp_home, "config1.cfg"),
+                        os.path.join(temp_home, "config2.cfg")]
+
+        # Store two aliased hooks in config files
+        self.write_file((temp_home, "config1.cfg"),
+                        '[test_dist]\npre-hook.a = type')
+        self.write_file((temp_home, "config2.cfg"),
+                        '[test_dist]\npre-hook.b = type')
+
+        set_command('packaging.tests.test_dist.test_dist')
+        dist = create_distribution(config_files)
+        cmd = dist.get_command_obj("test_dist")
+        self.assertEqual(cmd.pre_hook, {"a": 'type', "b": 'type'})
+
+    def test_hooks_get_run(self):
+        temp_home = self.mkdtemp()
+        module_name = os.path.split(temp_home)[-1]
+        pyname = '%s.py' % module_name
+        config_file = os.path.join(temp_home, "config1.cfg")
+        hooks_module = os.path.join(temp_home, pyname)
+
+        self.write_file(config_file, textwrap.dedent('''
+            [test_dist]
+            pre-hook.test = %(modname)s.log_pre_call
+            post-hook.test = %(modname)s.log_post_call'''
+            % {'modname': module_name}))
+
+        self.write_file(hooks_module, textwrap.dedent('''
+        record = []
+
+        def log_pre_call(cmd):
+            record.append('pre-%s' % cmd.get_command_name())
+
+        def log_post_call(cmd):
+            record.append('post-%s' % cmd.get_command_name())
+        '''))
+
+        set_command('packaging.tests.test_dist.test_dist')
+        d = create_distribution([config_file])
+        cmd = d.get_command_obj("test_dist")
+
+        # prepare the call recorders
+        sys.path.append(temp_home)
+        self.addCleanup(sys.path.remove, temp_home)
+        self.addCleanup(unload, module_name)
+        record = __import__(module_name).record
+
+        old_run = cmd.run
+        old_finalize = cmd.finalize_options
+        cmd.run = lambda: record.append('run')
+        cmd.finalize_options = lambda: record.append('finalize')
+        try:
+            d.run_command('test_dist')
+        finally:
+            cmd.run = old_run
+            cmd.finalize_options = old_finalize
+
+        self.assertEqual(record, ['finalize',
+                                  'pre-test_dist',
+                                  'run',
+                                  'post-test_dist'])
+
+    def test_hooks_importable(self):
+        temp_home = self.mkdtemp()
+        config_file = os.path.join(temp_home, "config1.cfg")
+
+        self.write_file(config_file, textwrap.dedent('''
+            [test_dist]
+            pre-hook.test = nonexistent.dotted.name'''))
+
+        set_command('packaging.tests.test_dist.test_dist')
+        d = create_distribution([config_file])
+        cmd = d.get_command_obj("test_dist")
+        cmd.ensure_finalized()
+
+        self.assertRaises(PackagingModuleError, d.run_command, 'test_dist')
+
+    def test_hooks_callable(self):
+        temp_home = self.mkdtemp()
+        config_file = os.path.join(temp_home, "config1.cfg")
+
+        self.write_file(config_file, textwrap.dedent('''
+            [test_dist]
+            pre-hook.test = packaging.tests.test_dist.__doc__'''))
+
+        set_command('packaging.tests.test_dist.test_dist')
+        d = create_distribution([config_file])
+        cmd = d.get_command_obj("test_dist")
+        cmd.ensure_finalized()
+
+        self.assertRaises(PackagingOptionError, d.run_command, 'test_dist')
+
+
+class MetadataTestCase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    def setUp(self):
+        super(MetadataTestCase, self).setUp()
+        self.argv = sys.argv, sys.argv[:]
+
+    def tearDown(self):
+        sys.argv = self.argv[0]
+        sys.argv[:] = self.argv[1]
+        super(MetadataTestCase, self).tearDown()
+
+    def test_simple_metadata(self):
+        attrs = {"name": "package",
+                 "version": "1.0"}
+        dist = Distribution(attrs)
+        meta = self.format_metadata(dist)
+        self.assertIn("Metadata-Version: 1.0", meta)
+        self.assertNotIn("provides:", meta.lower())
+        self.assertNotIn("requires:", meta.lower())
+        self.assertNotIn("obsoletes:", meta.lower())
+
+    def test_provides_dist(self):
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "provides_dist": ["package", "package.sub"]}
+        dist = Distribution(attrs)
+        self.assertEqual(dist.metadata['Provides-Dist'],
+                         ["package", "package.sub"])
+        meta = self.format_metadata(dist)
+        self.assertIn("Metadata-Version: 1.2", meta)
+        self.assertNotIn("requires:", meta.lower())
+        self.assertNotIn("obsoletes:", meta.lower())
+
+    def _test_provides_illegal(self):
+        # XXX to do: check the versions
+        self.assertRaises(ValueError, Distribution,
+                          {"name": "package",
+                           "version": "1.0",
+                           "provides_dist": ["my.pkg (splat)"]})
+
+    def test_requires_dist(self):
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "requires_dist": ["other", "another (==1.0)"]}
+        dist = Distribution(attrs)
+        self.assertEqual(dist.metadata['Requires-Dist'],
+                         ["other", "another (==1.0)"])
+        meta = self.format_metadata(dist)
+        self.assertIn("Metadata-Version: 1.2", meta)
+        self.assertNotIn("provides:", meta.lower())
+        self.assertIn("Requires-Dist: other", meta)
+        self.assertIn("Requires-Dist: another (==1.0)", meta)
+        self.assertNotIn("obsoletes:", meta.lower())
+
+    def _test_requires_illegal(self):
+        # XXX
+        self.assertRaises(ValueError, Distribution,
+                          {"name": "package",
+                           "version": "1.0",
+                           "requires": ["my.pkg (splat)"]})
+
+    def test_obsoletes_dist(self):
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "obsoletes_dist": ["other", "another (<1.0)"]}
+        dist = Distribution(attrs)
+        self.assertEqual(dist.metadata['Obsoletes-Dist'],
+                         ["other", "another (<1.0)"])
+        meta = self.format_metadata(dist)
+        self.assertIn("Metadata-Version: 1.2", meta)
+        self.assertNotIn("provides:", meta.lower())
+        self.assertNotIn("requires:", meta.lower())
+        self.assertIn("Obsoletes-Dist: other", meta)
+        self.assertIn("Obsoletes-Dist: another (<1.0)", meta)
+
+    def _test_obsoletes_illegal(self):
+        # XXX
+        self.assertRaises(ValueError, Distribution,
+                          {"name": "package",
+                           "version": "1.0",
+                           "obsoletes": ["my.pkg (splat)"]})
+
+    def format_metadata(self, dist):
+        sio = io.StringIO()
+        dist.metadata.write_file(sio)
+        return sio.getvalue()
+
+    def test_custom_pydistutils(self):
+        # fixes #2166
+        # make sure pydistutils.cfg is found
+        if os.name == 'posix':
+            user_filename = ".pydistutils.cfg"
+        else:
+            user_filename = "pydistutils.cfg"
+
+        temp_dir = self.mkdtemp()
+        user_filename = os.path.join(temp_dir, user_filename)
+        with open(user_filename, 'w') as f:
+            f.write('.')
+
+        dist = Distribution()
+
+        # linux-style
+        if sys.platform in ('linux', 'darwin'):
+            os.environ['HOME'] = temp_dir
+            files = dist.find_config_files()
+            self.assertIn(user_filename, files)
+
+        # win32-style
+        if sys.platform == 'win32':
+            # home drive should be found
+            os.environ['HOME'] = temp_dir
+            files = dist.find_config_files()
+            self.assertIn(user_filename, files)
+
+    def test_show_help(self):
+        # smoke test, just makes sure some help is displayed
+        dist = Distribution()
+        sys.argv = []
+        dist.help = True
+        dist.script_name = 'setup.py'
+        __, stdout = captured_stdout(dist.parse_command_line)
+        output = [line for line in stdout.split('\n')
+                  if line.strip() != '']
+        self.assertGreater(len(output), 0)
+
+    def test_description(self):
+        desc = textwrap.dedent("""\
+        example::
+              We start here
+            and continue here
+          and end here.""")
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "description": desc}
+
+        dist = packaging.dist.Distribution(attrs)
+        meta = self.format_metadata(dist)
+        meta = meta.replace('\n' + 7 * ' ' + '|', '\n')
+        self.assertIn(desc, meta)
+
+    def test_read_metadata(self):
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "description": "desc",
+                 "summary": "xxx",
+                 "download_url": "http://example.com",
+                 "keywords": ['one', 'two'],
+                 "requires_dist": ['foo']}
+
+        dist = Distribution(attrs)
+        metadata = dist.metadata
+
+        # write it then reloads it
+        PKG_INFO = io.StringIO()
+        metadata.write_file(PKG_INFO)
+        PKG_INFO.seek(0)
+
+        metadata.read_file(PKG_INFO)
+        self.assertEqual(metadata['name'], "package")
+        self.assertEqual(metadata['version'], "1.0")
+        self.assertEqual(metadata['summary'], "xxx")
+        self.assertEqual(metadata['download_url'], 'http://example.com')
+        self.assertEqual(metadata['keywords'], ['one', 'two'])
+        self.assertEqual(metadata['platform'], [])
+        self.assertEqual(metadata['obsoletes'], [])
+        self.assertEqual(metadata['requires-dist'], ['foo'])
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(DistributionTestCase))
+    suite.addTest(unittest.makeSuite(MetadataTestCase))
+    return suite
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_extension.py b/Lib/packaging/tests/test_extension.py
new file mode 100644
index 0000000..41182e5
--- /dev/null
+++ b/Lib/packaging/tests/test_extension.py
@@ -0,0 +1,15 @@
+"""Tests for packaging.extension."""
+import os
+
+from packaging.compiler.extension import Extension
+from packaging.tests import unittest
+
+class ExtensionTestCase(unittest.TestCase):
+
+    pass
+
+def test_suite():
+    return unittest.makeSuite(ExtensionTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_install.py b/Lib/packaging/tests/test_install.py
new file mode 100644
index 0000000..01c3dcf
--- /dev/null
+++ b/Lib/packaging/tests/test_install.py
@@ -0,0 +1,366 @@
+"""Tests for the packaging.install module."""
+
+import os
+from tempfile import mkstemp
+from packaging import install
+from packaging.pypi.xmlrpc import Client
+from packaging.metadata import Metadata
+
+from packaging.tests.support import LoggingCatcher, TempdirManager, unittest
+try:
+    import threading
+    from packaging.tests.pypi_server import use_xmlrpc_server
+except ImportError:
+    threading = None
+    use_xmlrpc_server = None
+
+
+class InstalledDist:
+    """Distribution object, represent distributions currently installed on the
+    system"""
+    def __init__(self, name, version, deps):
+        self.metadata = Metadata()
+        self.name = name
+        self.version = version
+        self.metadata['Name'] = name
+        self.metadata['Version'] = version
+        self.metadata['Requires-Dist'] = deps
+
+    def __repr__(self):
+        return '<InstalledDist %s>' % self.metadata['Name']
+
+
+class ToInstallDist:
+    """Distribution that will be installed"""
+
+    def __init__(self, files=False):
+        self._files = files
+        self.install_called = False
+        self.install_called_with = {}
+        self.uninstall_called = False
+        self._real_files = []
+        self.name = "fake"
+        self.version = "fake"
+        if files:
+            for f in range(0, 3):
+                fp, fn = mkstemp()
+                os.close(fp)
+                self._real_files.append(fn)
+
+    def _unlink_installed_files(self):
+        if self._files:
+            for fn in self._real_files:
+                os.unlink(fn)
+
+    def list_installed_files(self, **args):
+        if self._files:
+            return self._real_files
+
+    def get_install(self, **args):
+        return self.list_installed_files()
+
+
+class MagicMock:
+    def __init__(self, return_value=None, raise_exception=False):
+        self.called = False
+        self._times_called = 0
+        self._called_with = []
+        self._return_value = return_value
+        self._raise = raise_exception
+
+    def __call__(self, *args, **kwargs):
+        self.called = True
+        self._times_called = self._times_called + 1
+        self._called_with.append((args, kwargs))
+        iterable = hasattr(self._raise, '__iter__')
+        if self._raise:
+            if ((not iterable and self._raise)
+                    or self._raise[self._times_called - 1]):
+                raise Exception
+        return self._return_value
+
+    def called_with(self, *args, **kwargs):
+        return (args, kwargs) in self._called_with
+
+
+def get_installed_dists(dists):
+    """Return a list of fake installed dists.
+    The list is name, version, deps"""
+    objects = []
+    for name, version, deps in dists:
+        objects.append(InstalledDist(name, version, deps))
+    return objects
+
+
+class TestInstall(LoggingCatcher, TempdirManager, unittest.TestCase):
+    def _get_client(self, server, *args, **kwargs):
+        return Client(server.full_address, *args, **kwargs)
+
+    def _get_results(self, output):
+        """return a list of results"""
+        installed = [(o.name, str(o.version)) for o in output['install']]
+        remove = [(o.name, str(o.version)) for o in output['remove']]
+        conflict = [(o.name, str(o.version)) for o in output['conflict']]
+        return installed, remove, conflict
+
+    @unittest.skipIf(threading is None, 'needs threading')
+    @use_xmlrpc_server()
+    def test_existing_deps(self, server):
+        # Test that the installer get the dependencies from the metadatas
+        # and ask the index for this dependencies.
+        # In this test case, we have choxie that is dependent from towel-stuff
+        # 0.1, which is in-turn dependent on bacon <= 0.2:
+        # choxie -> towel-stuff -> bacon.
+        # Each release metadata is not provided in metadata 1.2.
+        client = self._get_client(server)
+        archive_path = '%s/distribution.tar.gz' % server.full_address
+        server.xmlrpc.set_distributions([
+            {'name': 'choxie',
+             'version': '2.0.0.9',
+             'requires_dist': ['towel-stuff (0.1)'],
+             'url': archive_path},
+            {'name': 'towel-stuff',
+             'version': '0.1',
+             'requires_dist': ['bacon (<= 0.2)'],
+             'url': archive_path},
+            {'name': 'bacon',
+             'version': '0.1',
+             'requires_dist': [],
+             'url': archive_path},
+            ])
+        installed = get_installed_dists([('bacon', '0.1', [])])
+        output = install.get_infos("choxie", index=client,
+                                   installed=installed)
+
+        # we don't have installed bacon as it's already installed system-wide
+        self.assertEqual(0, len(output['remove']))
+        self.assertEqual(2, len(output['install']))
+        readable_output = [(o.name, str(o.version))
+                           for o in output['install']]
+        self.assertIn(('towel-stuff', '0.1'), readable_output)
+        self.assertIn(('choxie', '2.0.0.9'), readable_output)
+
+    @unittest.skipIf(threading is None, 'needs threading')
+    @use_xmlrpc_server()
+    def test_upgrade_existing_deps(self, server):
+        client = self._get_client(server)
+        archive_path = '%s/distribution.tar.gz' % server.full_address
+        server.xmlrpc.set_distributions([
+            {'name': 'choxie',
+             'version': '2.0.0.9',
+             'requires_dist': ['towel-stuff (0.1)'],
+             'url': archive_path},
+            {'name': 'towel-stuff',
+             'version': '0.1',
+             'requires_dist': ['bacon (>= 0.2)'],
+             'url': archive_path},
+            {'name': 'bacon',
+             'version': '0.2',
+             'requires_dist': [],
+             'url': archive_path},
+            ])
+
+        output = install.get_infos("choxie", index=client,
+                     installed=get_installed_dists([('bacon', '0.1', [])]))
+        installed = [(o.name, str(o.version)) for o in output['install']]
+
+        # we need bacon 0.2, but 0.1 is installed.
+        # So we expect to remove 0.1 and to install 0.2 instead.
+        remove = [(o.name, str(o.version)) for o in output['remove']]
+        self.assertIn(('choxie', '2.0.0.9'), installed)
+        self.assertIn(('towel-stuff', '0.1'), installed)
+        self.assertIn(('bacon', '0.2'), installed)
+        self.assertIn(('bacon', '0.1'), remove)
+        self.assertEqual(0, len(output['conflict']))
+
+    @unittest.skipIf(threading is None, 'needs threading')
+    @use_xmlrpc_server()
+    def test_conflicts(self, server):
+        # Tests that conflicts are detected
+        client = self._get_client(server)
+        archive_path = '%s/distribution.tar.gz' % server.full_address
+
+        # choxie depends on towel-stuff, which depends on bacon.
+        server.xmlrpc.set_distributions([
+            {'name': 'choxie',
+             'version': '2.0.0.9',
+             'requires_dist': ['towel-stuff (0.1)'],
+             'url': archive_path},
+            {'name': 'towel-stuff',
+             'version': '0.1',
+             'requires_dist': ['bacon (>= 0.2)'],
+             'url': archive_path},
+            {'name': 'bacon',
+             'version': '0.2',
+             'requires_dist': [],
+             'url': archive_path},
+            ])
+
+        # name, version, deps.
+        already_installed = [('bacon', '0.1', []),
+                             ('chicken', '1.1', ['bacon (0.1)'])]
+        output = install.get_infos(
+            'choxie', index=client,
+            installed=get_installed_dists(already_installed))
+
+        # we need bacon 0.2, but 0.1 is installed.
+        # So we expect to remove 0.1 and to install 0.2 instead.
+        installed, remove, conflict = self._get_results(output)
+        self.assertIn(('choxie', '2.0.0.9'), installed)
+        self.assertIn(('towel-stuff', '0.1'), installed)
+        self.assertIn(('bacon', '0.2'), installed)
+        self.assertIn(('bacon', '0.1'), remove)
+        self.assertIn(('chicken', '1.1'), conflict)
+
+    @unittest.skipIf(threading is None, 'needs threading')
+    @use_xmlrpc_server()
+    def test_installation_unexisting_project(self, server):
+        # Test that the isntalled raises an exception if the project does not
+        # exists.
+        client = self._get_client(server)
+        self.assertRaises(install.InstallationException,
+                          install.get_infos,
+                          'unexisting project', index=client)
+
+    def test_move_files(self):
+        # test that the files are really moved, and that the new path is
+        # returned.
+        path = self.mkdtemp()
+        newpath = self.mkdtemp()
+        files = [os.path.join(path, str(x)) for x in range(1, 20)]
+        for f in files:
+            open(f, 'ab+').close()
+        output = [o for o in install._move_files(files, newpath)]
+
+        # check that output return the list of old/new places
+        for file_ in files:
+            name = os.path.split(file_)[-1]
+            newloc = os.path.join(newpath, name)
+            self.assertIn((file_, newloc), output)
+
+        # remove the files
+        for f in [o[1] for o in output]:  # o[1] is the new place
+            os.remove(f)
+
+    def test_update_infos(self):
+        tests = [[
+            {'foo': ['foobar', 'foo', 'baz'], 'baz': ['foo', 'foo']},
+            {'foo': ['additional_content', 'yeah'], 'baz': ['test', 'foo']},
+            {'foo': ['foobar', 'foo', 'baz', 'additional_content', 'yeah'],
+             'baz': ['foo', 'foo', 'test', 'foo']},
+        ]]
+
+        for dict1, dict2, expect in tests:
+            install._update_infos(dict1, dict2)
+            for key in expect:
+                self.assertEqual(expect[key], dict1[key])
+
+    def test_install_dists_rollback(self):
+        # if one of the distribution installation fails, call uninstall on all
+        # installed distributions.
+
+        old_install_dist = install._install_dist
+        old_uninstall = getattr(install, 'uninstall', None)
+
+        install._install_dist = MagicMock(return_value=[],
+                                          raise_exception=(False, True))
+        install.remove = MagicMock()
+        try:
+            d1 = ToInstallDist()
+            d2 = ToInstallDist()
+            path = self.mkdtemp()
+            self.assertRaises(Exception, install.install_dists, [d1, d2], path)
+            self.assertTrue(install._install_dist.called_with(d1, path))
+            self.assertTrue(install.remove.called)
+        finally:
+            install._install_dist = old_install_dist
+            install.remove = old_uninstall
+
+    def test_install_dists_success(self):
+        old_install_dist = install._install_dist
+        install._install_dist = MagicMock(return_value=[])
+        try:
+            # test that the install method is called on each distributions
+            d1 = ToInstallDist()
+            d2 = ToInstallDist()
+
+            # should call install
+            path = self.mkdtemp()
+            install.install_dists([d1, d2], path)
+            for dist in (d1, d2):
+                self.assertTrue(install._install_dist.called_with(dist, path))
+        finally:
+            install._install_dist = old_install_dist
+
+    def test_install_from_infos_conflict(self):
+        # assert conflicts raise an exception
+        self.assertRaises(install.InstallationConflict,
+            install.install_from_infos,
+            conflicts=[ToInstallDist()])
+
+    def test_install_from_infos_remove_success(self):
+        old_install_dists = install.install_dists
+        install.install_dists = lambda x, y=None: None
+        try:
+            dists = []
+            for i in range(2):
+                dists.append(ToInstallDist(files=True))
+            install.install_from_infos(remove=dists)
+
+            # assert that the files have been removed
+            for dist in dists:
+                for f in dist.list_installed_files():
+                    self.assertFalse(os.path.exists(f))
+        finally:
+            install.install_dists = old_install_dists
+
+    def test_install_from_infos_remove_rollback(self):
+        old_install_dist = install._install_dist
+        old_uninstall = getattr(install, 'uninstall', None)
+
+        install._install_dist = MagicMock(return_value=[],
+                raise_exception=(False, True))
+        install.uninstall = MagicMock()
+        try:
+            # assert that if an error occurs, the removed files are restored.
+            remove = []
+            for i in range(2):
+                remove.append(ToInstallDist(files=True))
+            to_install = [ToInstallDist(), ToInstallDist()]
+            temp_dir = self.mkdtemp()
+
+            self.assertRaises(Exception, install.install_from_infos,
+                              install_path=temp_dir, install=to_install,
+                              remove=remove)
+            # assert that the files are in the same place
+            # assert that the files have been removed
+            for dist in remove:
+                for f in dist.list_installed_files():
+                    self.assertTrue(os.path.exists(f))
+                dist._unlink_installed_files()
+        finally:
+            install.install_dist = old_install_dist
+            install.uninstall = old_uninstall
+
+    def test_install_from_infos_install_succes(self):
+        old_install_dist = install._install_dist
+        install._install_dist = MagicMock([])
+        try:
+            # assert that the distribution can be installed
+            install_path = "my_install_path"
+            to_install = [ToInstallDist(), ToInstallDist()]
+
+            install.install_from_infos(install_path, install=to_install)
+            for dist in to_install:
+                install._install_dist.called_with(install_path)
+        finally:
+            install._install_dist = old_install_dist
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestInstall))
+    return suite
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_manifest.py b/Lib/packaging/tests/test_manifest.py
new file mode 100644
index 0000000..9fb8b63
--- /dev/null
+++ b/Lib/packaging/tests/test_manifest.py
@@ -0,0 +1,80 @@
+"""Tests for packaging.manifest."""
+import os
+import logging
+from io import StringIO
+from packaging.manifest import Manifest
+
+from packaging.tests import unittest, support
+
+_MANIFEST = """\
+recursive-include foo *.py   # ok
+# nothing here
+
+#
+
+recursive-include bar \\
+  *.dat   *.txt
+"""
+
+_MANIFEST2 = """\
+README
+file1
+"""
+
+
+class ManifestTestCase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    def setUp(self):
+        super(ManifestTestCase, self).setUp()
+        self.cwd = os.getcwd()
+
+    def tearDown(self):
+        os.chdir(self.cwd)
+        super(ManifestTestCase, self).tearDown()
+
+    def test_manifest_reader(self):
+        tmpdir = self.mkdtemp()
+        MANIFEST = os.path.join(tmpdir, 'MANIFEST.in')
+        with open(MANIFEST, 'w') as f:
+            f.write(_MANIFEST)
+
+        manifest = Manifest()
+        manifest.read_template(MANIFEST)
+
+        warnings = self.get_logs(logging.WARNING)
+        # the manifest should have been read and 3 warnings issued
+        # (we didn't provide the files)
+        self.assertEqual(3, len(warnings))
+        for warning in warnings:
+            self.assertIn('no files found matching', warning)
+
+        # reset logs for the next assert
+        self.loghandler.flush()
+
+        # manifest also accepts file-like objects
+        with open(MANIFEST) as f:
+            manifest.read_template(f)
+
+        # the manifest should have been read and 3 warnings issued
+        # (we didn't provide the files)
+        self.assertEqual(3, len(warnings))
+
+    def test_default_actions(self):
+        tmpdir = self.mkdtemp()
+        self.addCleanup(os.chdir, os.getcwd())
+        os.chdir(tmpdir)
+        self.write_file('README', 'xxx')
+        self.write_file('file1', 'xxx')
+        content = StringIO(_MANIFEST2)
+        manifest = Manifest()
+        manifest.read_template(content)
+        self.assertEqual(['README', 'file1'], manifest.files)
+
+
+def test_suite():
+    return unittest.makeSuite(ManifestTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_markers.py b/Lib/packaging/tests/test_markers.py
new file mode 100644
index 0000000..dec0429
--- /dev/null
+++ b/Lib/packaging/tests/test_markers.py
@@ -0,0 +1,71 @@
+"""Tests for packaging.markers."""
+import os
+import sys
+import platform
+from packaging.markers import interpret
+
+from packaging.tests import unittest
+from packaging.tests.support import LoggingCatcher
+
+
+class MarkersTestCase(LoggingCatcher,
+                      unittest.TestCase):
+
+    def test_interpret(self):
+        sys_platform = sys.platform
+        version = sys.version.split()[0]
+        os_name = os.name
+        platform_version = platform.version()
+        platform_machine = platform.machine()
+        platform_python_implementation = platform.python_implementation()
+
+        self.assertTrue(interpret("sys.platform == '%s'" % sys_platform))
+        self.assertTrue(interpret(
+            "sys.platform == '%s' or python_version == '2.4'" % sys_platform))
+        self.assertTrue(interpret(
+            "sys.platform == '%s' and python_full_version == '%s'" %
+            (sys_platform, version)))
+        self.assertTrue(interpret("'%s' == sys.platform" % sys_platform))
+        self.assertTrue(interpret('os.name == "%s"' % os_name))
+        self.assertTrue(interpret(
+            'platform.version == "%s" and platform.machine == "%s"' %
+            (platform_version, platform_machine)))
+        self.assertTrue(interpret('platform.python_implementation == "%s"' %
+            platform_python_implementation))
+
+        # stuff that need to raise a syntax error
+        ops = ('os.name == os.name', 'os.name == 2', "'2' == '2'",
+               'okpjonon', '', 'os.name ==', 'python_version == 2.4')
+        for op in ops:
+            self.assertRaises(SyntaxError, interpret, op)
+
+        # combined operations
+        OP = 'os.name == "%s"' % os_name
+        AND = ' and '
+        OR = ' or '
+        self.assertTrue(interpret(OP + AND + OP))
+        self.assertTrue(interpret(OP + AND + OP + AND + OP))
+        self.assertTrue(interpret(OP + OR + OP))
+        self.assertTrue(interpret(OP + OR + OP + OR + OP))
+
+        # other operators
+        self.assertTrue(interpret("os.name != 'buuuu'"))
+        self.assertTrue(interpret("python_version > '1.0'"))
+        self.assertTrue(interpret("python_version < '5.0'"))
+        self.assertTrue(interpret("python_version <= '5.0'"))
+        self.assertTrue(interpret("python_version >= '1.0'"))
+        self.assertTrue(interpret("'%s' in os.name" % os_name))
+        self.assertTrue(interpret("'buuuu' not in os.name"))
+        self.assertTrue(interpret(
+            "'buuuu' not in os.name and '%s' in os.name" % os_name))
+
+        # execution context
+        self.assertTrue(interpret('python_version == "0.1"',
+                                  {'python_version': '0.1'}))
+
+
+def test_suite():
+    return unittest.makeSuite(MarkersTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_metadata.py b/Lib/packaging/tests/test_metadata.py
new file mode 100644
index 0000000..904019c
--- /dev/null
+++ b/Lib/packaging/tests/test_metadata.py
@@ -0,0 +1,279 @@
+"""Tests for packaging.metadata."""
+import os
+import sys
+import logging
+from io import StringIO
+
+from packaging.errors import (MetadataConflictError, MetadataMissingError,
+                              MetadataUnrecognizedVersionError)
+from packaging.metadata import Metadata, PKG_INFO_PREFERRED_VERSION
+
+from packaging.tests import unittest
+from packaging.tests.support import LoggingCatcher
+
+
+class MetadataTestCase(LoggingCatcher,
+                       unittest.TestCase):
+
+    def test_instantiation(self):
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        with open(PKG_INFO, 'r', encoding='utf-8') as f:
+            contents = f.read()
+        fp = StringIO(contents)
+
+        m = Metadata()
+        self.assertRaises(MetadataUnrecognizedVersionError, m.items)
+
+        m = Metadata(PKG_INFO)
+        self.assertEqual(len(m.items()), 22)
+
+        m = Metadata(fileobj=fp)
+        self.assertEqual(len(m.items()), 22)
+
+        m = Metadata(mapping=dict(name='Test', version='1.0'))
+        self.assertEqual(len(m.items()), 11)
+
+        d = dict(m.items())
+        self.assertRaises(TypeError, Metadata,
+                          PKG_INFO, fileobj=fp)
+        self.assertRaises(TypeError, Metadata,
+                          PKG_INFO, mapping=d)
+        self.assertRaises(TypeError, Metadata,
+                          fileobj=fp, mapping=d)
+        self.assertRaises(TypeError, Metadata,
+                          PKG_INFO, mapping=m, fileobj=fp)
+
+    def test_metadata_read_write(self):
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        metadata = Metadata(PKG_INFO)
+        out = StringIO()
+        metadata.write_file(out)
+        out.seek(0)
+        res = Metadata()
+        res.read_file(out)
+        for k in metadata:
+            self.assertEqual(metadata[k], res[k])
+
+    def test_metadata_markers(self):
+        # see if we can be platform-aware
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        with open(PKG_INFO, 'r', encoding='utf-8') as f:
+            content = f.read() % sys.platform
+        metadata = Metadata(platform_dependent=True)
+
+        metadata.read_file(StringIO(content))
+        self.assertEqual(metadata['Requires-Dist'], ['bar'])
+        metadata['Name'] = "baz; sys.platform == 'blah'"
+        # FIXME is None or 'UNKNOWN' correct here?
+        # where is that documented?
+        self.assertEqual(metadata['Name'], None)
+
+        # test with context
+        context = {'sys.platform': 'okook'}
+        metadata = Metadata(platform_dependent=True,
+                                        execution_context=context)
+        metadata.read_file(StringIO(content))
+        self.assertEqual(metadata['Requires-Dist'], ['foo'])
+
+    def test_description(self):
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        with open(PKG_INFO, 'r', encoding='utf-8') as f:
+            content = f.read() % sys.platform
+        metadata = Metadata()
+        metadata.read_file(StringIO(content))
+
+        # see if we can read the description now
+        DESC = os.path.join(os.path.dirname(__file__), 'LONG_DESC.txt')
+        with open(DESC) as f:
+            wanted = f.read()
+        self.assertEqual(wanted, metadata['Description'])
+
+        # save the file somewhere and make sure we can read it back
+        out = StringIO()
+        metadata.write_file(out)
+        out.seek(0)
+        metadata.read_file(out)
+        self.assertEqual(wanted, metadata['Description'])
+
+    def test_mapping_api(self):
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        with open(PKG_INFO, 'r', encoding='utf-8') as f:
+            content = f.read() % sys.platform
+        metadata = Metadata(fileobj=StringIO(content))
+        self.assertIn('Version', metadata.keys())
+        self.assertIn('0.5', metadata.values())
+        self.assertIn(('Version', '0.5'), metadata.items())
+
+        metadata.update({'version': '0.6'})
+        self.assertEqual(metadata['Version'], '0.6')
+        metadata.update([('version', '0.7')])
+        self.assertEqual(metadata['Version'], '0.7')
+
+        self.assertEqual(list(metadata), list(metadata.keys()))
+
+    def test_versions(self):
+        metadata = Metadata()
+        metadata['Obsoletes'] = 'ok'
+        self.assertEqual(metadata['Metadata-Version'], '1.1')
+
+        del metadata['Obsoletes']
+        metadata['Obsoletes-Dist'] = 'ok'
+        self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+        self.assertRaises(MetadataConflictError, metadata.set,
+                          'Obsoletes', 'ok')
+
+        del metadata['Obsoletes']
+        del metadata['Obsoletes-Dist']
+        metadata['Version'] = '1'
+        self.assertEqual(metadata['Metadata-Version'], '1.0')
+
+        PKG_INFO = os.path.join(os.path.dirname(__file__),
+                                'SETUPTOOLS-PKG-INFO')
+        with open(PKG_INFO, 'r', encoding='utf-8') as f:
+            content = f.read()
+        metadata.read_file(StringIO(content))
+        self.assertEqual(metadata['Metadata-Version'], '1.0')
+
+        PKG_INFO = os.path.join(os.path.dirname(__file__),
+                                'SETUPTOOLS-PKG-INFO2')
+        with open(PKG_INFO, 'r', encoding='utf-8') as f:
+            content = f.read()
+        metadata.read_file(StringIO(content))
+        self.assertEqual(metadata['Metadata-Version'], '1.1')
+
+        # Update the _fields dict directly to prevent 'Metadata-Version'
+        # from being updated by the _set_best_version() method.
+        metadata._fields['Metadata-Version'] = '1.618'
+        self.assertRaises(MetadataUnrecognizedVersionError, metadata.keys)
+
+    def test_warnings(self):
+        metadata = Metadata()
+
+        # these should raise a warning
+        values = (('Requires-Dist', 'Funky (Groovie)'),
+                  ('Requires-Python', '1-4'))
+
+        for name, value in values:
+            metadata.set(name, value)
+
+        # we should have a certain amount of warnings
+        self.assertEqual(len(self.get_logs()), 2)
+
+    def test_multiple_predicates(self):
+        metadata = Metadata()
+
+        # see for "3" instead of "3.0"  ???
+        # its seems like the MINOR VERSION can be omitted
+        metadata['Requires-Python'] = '>=2.6, <3.0'
+        metadata['Requires-Dist'] = ['Foo (>=2.6, <3.0)']
+
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    def test_project_url(self):
+        metadata = Metadata()
+        metadata['Project-URL'] = [('one', 'http://ok')]
+        self.assertEqual(metadata['Project-URL'],
+                          [('one', 'http://ok')])
+        self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+    def test_check_version(self):
+        metadata = Metadata()
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Version'])
+
+    def test_check_version_strict(self):
+        metadata = Metadata()
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        self.assertRaises(MetadataMissingError, metadata.check, strict=True)
+
+    def test_check_name(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Name'])
+
+    def test_check_name_strict(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        self.assertRaises(MetadataMissingError, metadata.check, strict=True)
+
+    def test_check_author(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Author'])
+
+    def test_check_homepage(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Name'] = 'vimpdb'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Home-page'])
+
+    def test_check_predicates(self):
+        metadata = Metadata()
+        metadata['Version'] = 'rr'
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata['Requires-dist'] = ['Foo (a)']
+        metadata['Obsoletes-dist'] = ['Foo (a)']
+        metadata['Provides-dist'] = ['Foo (a)']
+        if metadata.docutils_support:
+            missing, warnings = metadata.check()
+            self.assertEqual(len(warnings), 4)
+            metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(len(warnings), 4)
+
+    def test_best_choice(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        self.assertEqual(metadata['Metadata-Version'],
+                         PKG_INFO_PREFERRED_VERSION)
+        metadata['Classifier'] = ['ok']
+        self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+    def test_project_urls(self):
+        # project-url is a bit specific, make sure we write it
+        # properly in PKG-INFO
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Project-Url'] = [('one', 'http://ok')]
+        self.assertEqual(metadata['Project-Url'], [('one', 'http://ok')])
+        file_ = StringIO()
+        metadata.write_file(file_)
+        file_.seek(0)
+        res = file_.read().split('\n')
+        self.assertIn('Project-URL: one,http://ok', res)
+
+        file_.seek(0)
+        metadata = Metadata()
+        metadata.read_file(file_)
+        self.assertEqual(metadata['Project-Url'], [('one', 'http://ok')])
+
+
+def test_suite():
+    return unittest.makeSuite(MetadataTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_mixin2to3.py b/Lib/packaging/tests/test_mixin2to3.py
new file mode 100644
index 0000000..d7c83c2
--- /dev/null
+++ b/Lib/packaging/tests/test_mixin2to3.py
@@ -0,0 +1,75 @@
+"""Tests for packaging.command.build_py."""
+import sys
+
+from packaging.tests import unittest, support
+from packaging.compat import Mixin2to3
+
+
+class Mixin2to3TestCase(support.TempdirManager,
+                        support.LoggingCatcher,
+                        unittest.TestCase):
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_convert_code_only(self):
+        # used to check if code gets converted properly.
+        code_content = "print 'test'\n"
+        code_handle = self.mktempfile()
+        code_name = code_handle.name
+
+        code_handle.write(code_content)
+        code_handle.flush()
+
+        mixin2to3 = Mixin2to3()
+        mixin2to3._run_2to3([code_name])
+        converted_code_content = "print('test')\n"
+        with open(code_name) as fp:
+            new_code_content = "".join(fp.readlines())
+
+        self.assertEqual(new_code_content, converted_code_content)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_doctests_only(self):
+        # used to check if doctests gets converted properly.
+        doctest_content = '"""\n>>> print test\ntest\n"""\nprint test\n\n'
+        doctest_handle = self.mktempfile()
+        doctest_name = doctest_handle.name
+
+        doctest_handle.write(doctest_content)
+        doctest_handle.flush()
+
+        mixin2to3 = Mixin2to3()
+        mixin2to3._run_2to3([doctest_name])
+
+        converted_doctest_content = ['"""', '>>> print(test)', 'test', '"""',
+                                     'print(test)', '', '', '']
+        converted_doctest_content = '\n'.join(converted_doctest_content)
+        with open(doctest_name) as fp:
+            new_doctest_content = "".join(fp.readlines())
+
+        self.assertEqual(new_doctest_content, converted_doctest_content)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_additional_fixers(self):
+        # used to check if use_2to3_fixers works
+        code_content = "type(x) is T"
+        code_handle = self.mktempfile()
+        code_name = code_handle.name
+
+        code_handle.write(code_content)
+        code_handle.flush()
+
+        mixin2to3 = Mixin2to3()
+
+        mixin2to3._run_2to3(files=[code_name], doctests=[code_name],
+                            fixers=['packaging.tests.fixer'])
+        converted_code_content = "isinstance(x, T)"
+        with open(code_name) as fp:
+            new_code_content = "".join(fp.readlines())
+        self.assertEqual(new_code_content, converted_code_content)
+
+
+def test_suite():
+    return unittest.makeSuite(Mixin2to3TestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_msvc9compiler.py b/Lib/packaging/tests/test_msvc9compiler.py
new file mode 100644
index 0000000..dc3ae65
--- /dev/null
+++ b/Lib/packaging/tests/test_msvc9compiler.py
@@ -0,0 +1,140 @@
+"""Tests for packaging.compiler.msvc9compiler."""
+import os
+import sys
+
+from packaging.errors import PackagingPlatformError
+
+from packaging.tests import unittest, support
+
+_MANIFEST = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+          manifestVersion="1.0">
+  <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+    <security>
+      <requestedPrivileges>
+        <requestedExecutionLevel level="asInvoker" uiAccess="false">
+        </requestedExecutionLevel>
+      </requestedPrivileges>
+    </security>
+  </trustInfo>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
+         version="9.0.21022.8" processorArchitecture="x86"
+         publicKeyToken="XXXX">
+      </assemblyIdentity>
+    </dependentAssembly>
+  </dependency>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
+        version="9.0.21022.8" processorArchitecture="x86"
+        publicKeyToken="XXXX"></assemblyIdentity>
+    </dependentAssembly>
+  </dependency>
+</assembly>
+"""
+
+_CLEANED_MANIFEST = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+          manifestVersion="1.0">
+  <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+    <security>
+      <requestedPrivileges>
+        <requestedExecutionLevel level="asInvoker" uiAccess="false">
+        </requestedExecutionLevel>
+      </requestedPrivileges>
+    </security>
+  </trustInfo>
+  <dependency>
+
+  </dependency>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
+        version="9.0.21022.8" processorArchitecture="x86"
+        publicKeyToken="XXXX"></assemblyIdentity>
+    </dependentAssembly>
+  </dependency>
+</assembly>"""
+
+
+class msvc9compilerTestCase(support.TempdirManager,
+                            unittest.TestCase):
+
+    @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+    def test_no_compiler(self):
+        # make sure query_vcvarsall raises a PackagingPlatformError if
+        # the compiler is not found
+        from packaging.compiler.msvccompiler import get_build_version
+        if get_build_version() < 8.0:
+            raise unittest.SkipTest('only for MSVC8.0 or above')
+
+        from packaging.compiler import msvc9compiler
+        from packaging.compiler.msvc9compiler import query_vcvarsall
+
+        def _find_vcvarsall(version):
+            return None
+
+        old_find_vcvarsall = msvc9compiler.find_vcvarsall
+        msvc9compiler.find_vcvarsall = _find_vcvarsall
+        try:
+            self.assertRaises(PackagingPlatformError, query_vcvarsall,
+                             'wont find this version')
+        finally:
+            msvc9compiler.find_vcvarsall = old_find_vcvarsall
+
+    @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+    def test_reg_class(self):
+        from packaging.compiler.msvccompiler import get_build_version
+        if get_build_version() < 8.0:
+            raise unittest.SkipTest("requires MSVC 8.0 or later")
+
+        from packaging.compiler.msvc9compiler import Reg
+        self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx')
+
+        # looking for values that should exist on all
+        # windows registeries versions.
+        path = r'Control Panel\Desktop'
+        v = Reg.get_value(path, 'dragfullwindows')
+        self.assertIn(v, ('0', '1', '2'))
+
+        import winreg
+        HKCU = winreg.HKEY_CURRENT_USER
+        keys = Reg.read_keys(HKCU, 'xxxx')
+        self.assertEqual(keys, None)
+
+        keys = Reg.read_keys(HKCU, r'Control Panel')
+        self.assertIn('Desktop', keys)
+
+    @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+    def test_remove_visual_c_ref(self):
+        from packaging.compiler.msvccompiler import get_build_version
+        if get_build_version() < 8.0:
+            raise unittest.SkipTest("requires MSVC 8.0 or later")
+
+        from packaging.compiler.msvc9compiler import MSVCCompiler
+        tempdir = self.mkdtemp()
+        manifest = os.path.join(tempdir, 'manifest')
+        with open(manifest, 'w') as f:
+            f.write(_MANIFEST)
+
+        compiler = MSVCCompiler()
+        compiler._remove_visual_c_ref(manifest)
+
+        # see what we got
+        with open(manifest) as f:
+            # removing trailing spaces
+            content = '\n'.join(line.rstrip() for line in f.readlines())
+
+        # makes sure the manifest was properly cleaned
+        self.assertEqual(content, _CLEANED_MANIFEST)
+
+
+def test_suite():
+    return unittest.makeSuite(msvc9compilerTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_dist.py b/Lib/packaging/tests/test_pypi_dist.py
new file mode 100644
index 0000000..b7f4e98
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_dist.py
@@ -0,0 +1,284 @@
+"""Tests for the packaging.pypi.dist module."""
+
+import os
+from packaging.version import VersionPredicate
+from packaging.pypi.dist import (ReleaseInfo, ReleasesList, DistInfo,
+                                 split_archive_name, get_infos_from_url)
+from packaging.pypi.errors import HashDoesNotMatch, UnsupportedHashName
+
+from packaging.tests import unittest
+from packaging.tests.support import TempdirManager, requires_zlib
+try:
+    import threading
+    from packaging.tests.pypi_server import use_pypi_server
+except ImportError:
+    threading = use_pypi_server = None
+
+
+def Dist(*args, **kwargs):
+    # DistInfo takes a release as a first parameter, avoid this in tests.
+    return DistInfo(None, *args, **kwargs)
+
+
+class TestReleaseInfo(unittest.TestCase):
+
+    def test_instantiation(self):
+        # Test the DistInfo class provides us the good attributes when
+        # given on construction
+        release = ReleaseInfo("FooBar", "1.1")
+        self.assertEqual("FooBar", release.name)
+        self.assertEqual("1.1", "%s" % release.version)
+
+    def test_add_dist(self):
+        # empty distribution type should assume "sdist"
+        release = ReleaseInfo("FooBar", "1.1")
+        release.add_distribution(url="http://example.org/")
+        # should not fail
+        release['sdist']
+
+    def test_get_unknown_distribution(self):
+        # should raise a KeyError
+        pass
+
+    def test_get_infos_from_url(self):
+        # Test that the the URLs are parsed the right way
+        url_list = {
+            'FooBar-1.1.0.tar.gz': {
+                'name': 'foobar',  # lowercase the name
+                'version': '1.1.0',
+            },
+            'Foo-Bar-1.1.0.zip': {
+                'name': 'foo-bar',  # keep the dash
+                'version': '1.1.0',
+            },
+            'foobar-1.1b2.tar.gz#md5=123123123123123': {
+                'name': 'foobar',
+                'version': '1.1b2',
+                'url': 'http://example.org/foobar-1.1b2.tar.gz',  # no hash
+                'hashval': '123123123123123',
+                'hashname': 'md5',
+            },
+            'foobar-1.1-rc2.tar.gz': {  # use suggested name
+                'name': 'foobar',
+                'version': '1.1c2',
+                'url': 'http://example.org/foobar-1.1-rc2.tar.gz',
+            }
+        }
+
+        for url, attributes in url_list.items():
+            # for each url
+            infos = get_infos_from_url("http://example.org/" + url)
+            for attribute, expected in attributes.items():
+                got = infos.get(attribute)
+                if attribute == "version":
+                    self.assertEqual("%s" % got, expected)
+                else:
+                    self.assertEqual(got, expected)
+
+    def test_split_archive_name(self):
+        # Test we can split the archive names
+        names = {
+            'foo-bar-baz-1.0-rc2': ('foo-bar-baz', '1.0c2'),
+            'foo-bar-baz-1.0': ('foo-bar-baz', '1.0'),
+            'foobarbaz-1.0': ('foobarbaz', '1.0'),
+        }
+        for name, results in names.items():
+            self.assertEqual(results, split_archive_name(name))
+
+
+class TestDistInfo(TempdirManager, unittest.TestCase):
+    srcpath = "/packages/source/f/foobar/foobar-0.1.tar.gz"
+
+    def test_get_url(self):
+        # Test that the url property works well
+
+        d = Dist(url="test_url")
+        self.assertDictEqual(d.url, {
+            "url": "test_url",
+            "is_external": True,
+            "hashname": None,
+            "hashval": None,
+        })
+
+        # add a new url
+        d.add_url(url="internal_url", is_external=False)
+        self.assertEqual(d._url, None)
+        self.assertDictEqual(d.url, {
+            "url": "internal_url",
+            "is_external": False,
+            "hashname": None,
+            "hashval": None,
+        })
+        self.assertEqual(2, len(d.urls))
+
+    def test_comparison(self):
+        # Test that we can compare DistInfoributionInfoList
+        foo1 = ReleaseInfo("foo", "1.0")
+        foo2 = ReleaseInfo("foo", "2.0")
+        bar = ReleaseInfo("bar", "2.0")
+        # assert we use the version to compare
+        self.assertTrue(foo1 < foo2)
+        self.assertFalse(foo1 > foo2)
+        self.assertFalse(foo1 == foo2)
+
+        # assert we can't compare dists with different names
+        self.assertRaises(TypeError, foo1.__eq__, bar)
+
+    @unittest.skipIf(threading is None, 'needs threading')
+    @use_pypi_server("downloads_with_md5")
+    def test_download(self, server):
+        # Download is possible, and the md5 is checked if given
+
+        url = server.full_address + self.srcpath
+
+        # check that a md5 if given
+        dist = Dist(url=url, hashname="md5",
+                    hashval="fe18804c5b722ff024cabdf514924fc4")
+        dist.download(self.mkdtemp())
+
+        # a wrong md5 fails
+        dist2 = Dist(url=url, hashname="md5", hashval="wrongmd5")
+
+        self.assertRaises(HashDoesNotMatch, dist2.download, self.mkdtemp())
+
+        # we can omit the md5 hash
+        dist3 = Dist(url=url)
+        dist3.download(self.mkdtemp())
+
+        # and specify a temporary location
+        # for an already downloaded dist
+        path1 = self.mkdtemp()
+        dist3.download(path=path1)
+        # and for a new one
+        path2_base = self.mkdtemp()
+        dist4 = Dist(url=url)
+        path2 = dist4.download(path=path2_base)
+        self.assertIn(path2_base, path2)
+
+    def test_hashname(self):
+        # Invalid hashnames raises an exception on assignation
+        Dist(hashname="md5", hashval="value")
+
+        self.assertRaises(UnsupportedHashName, Dist,
+                          hashname="invalid_hashname",
+                          hashval="value")
+
+    @unittest.skipIf(threading is None, 'needs threading')
+    @requires_zlib
+    @use_pypi_server('downloads_with_md5')
+    def test_unpack(self, server):
+        url = server.full_address + self.srcpath
+        dist1 = Dist(url=url)
+
+        # unpack the distribution in a specfied folder
+        dist1_here = self.mkdtemp()
+        dist1_there = dist1.unpack(path=dist1_here)
+
+        # assert we unpack to the path provided
+        self.assertEqual(dist1_here, dist1_there)
+        dist1_result = os.listdir(dist1_there)
+        self.assertIn('paf', dist1_result)
+        os.remove(os.path.join(dist1_there, 'paf'))
+
+        # Test unpack works without a path argument
+        dist2 = Dist(url=url)
+        # doing an unpack
+        dist2_there = dist2.unpack()
+        dist2_result = os.listdir(dist2_there)
+        self.assertIn('paf', dist2_result)
+        os.remove(os.path.join(dist2_there, 'paf'))
+
+
+class TestReleasesList(unittest.TestCase):
+
+    def test_filter(self):
+        # Test we filter the distributions the right way, using version
+        # predicate match method
+        releases = ReleasesList('FooBar', (
+            ReleaseInfo("FooBar", "1.1"),
+            ReleaseInfo("FooBar", "1.1.1"),
+            ReleaseInfo("FooBar", "1.2"),
+            ReleaseInfo("FooBar", "1.2.1"),
+        ))
+        filtered = releases.filter(VersionPredicate("FooBar (<1.2)"))
+        self.assertNotIn(releases[2], filtered)
+        self.assertNotIn(releases[3], filtered)
+        self.assertIn(releases[0], filtered)
+        self.assertIn(releases[1], filtered)
+
+    def test_append(self):
+        # When adding a new item to the list, the behavior is to test if
+        # a release with the same name and version number already exists,
+        # and if so, to add a new distribution for it. If the distribution type
+        # is already defined too, add url informations to the existing DistInfo
+        # object.
+
+        releases = ReleasesList("FooBar", [
+            ReleaseInfo("FooBar", "1.1", url="external_url",
+                        dist_type="sdist"),
+        ])
+        self.assertEqual(1, len(releases))
+        releases.add_release(release=ReleaseInfo("FooBar", "1.1",
+                                                 url="internal_url",
+                                                 is_external=False,
+                                                 dist_type="sdist"))
+        self.assertEqual(1, len(releases))
+        self.assertEqual(2, len(releases[0]['sdist'].urls))
+
+        releases.add_release(release=ReleaseInfo("FooBar", "1.1.1",
+                                                 dist_type="sdist"))
+        self.assertEqual(2, len(releases))
+
+        # when adding a distribution whith a different type, a new distribution
+        # has to be added.
+        releases.add_release(release=ReleaseInfo("FooBar", "1.1.1",
+                                                 dist_type="bdist"))
+        self.assertEqual(2, len(releases))
+        self.assertEqual(2, len(releases[1].dists))
+
+    def test_prefer_final(self):
+        # Can order the distributions using prefer_final
+
+        fb10 = ReleaseInfo("FooBar", "1.0")  # final distribution
+        fb11a = ReleaseInfo("FooBar", "1.1a1")  # alpha
+        fb12a = ReleaseInfo("FooBar", "1.2a1")  # alpha
+        fb12b = ReleaseInfo("FooBar", "1.2b1")  # beta
+        dists = ReleasesList("FooBar", [fb10, fb11a, fb12a, fb12b])
+
+        dists.sort_releases(prefer_final=True)
+        self.assertEqual(fb10, dists[0])
+
+        dists.sort_releases(prefer_final=False)
+        self.assertEqual(fb12b, dists[0])
+
+#    def test_prefer_source(self):
+#        # Ordering support prefer_source
+#        fb_source = Dist("FooBar", "1.0", type="source")
+#        fb_binary = Dist("FooBar", "1.0", type="binary")
+#        fb2_binary = Dist("FooBar", "2.0", type="binary")
+#        dists = ReleasesList([fb_binary, fb_source])
+#
+#        dists.sort_distributions(prefer_source=True)
+#        self.assertEqual(fb_source, dists[0])
+#
+#        dists.sort_distributions(prefer_source=False)
+#        self.assertEqual(fb_binary, dists[0])
+#
+#        dists.append(fb2_binary)
+#        dists.sort_distributions(prefer_source=True)
+#        self.assertEqual(fb2_binary, dists[0])
+
+    def test_get_last(self):
+        dists = ReleasesList('Foo')
+        self.assertEqual(dists.get_last('Foo 1.0'), None)
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestDistInfo))
+    suite.addTest(unittest.makeSuite(TestReleaseInfo))
+    suite.addTest(unittest.makeSuite(TestReleasesList))
+    return suite
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_pypi_server.py b/Lib/packaging/tests/test_pypi_server.py
new file mode 100644
index 0000000..2c4ec0d
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_server.py
@@ -0,0 +1,87 @@
+"""Tests for packaging.command.bdist."""
+import sys
+
+import urllib.request
+import urllib.parse
+import urllib.error
+
+try:
+    import threading
+    from packaging.tests.pypi_server import PyPIServer, PYPI_DEFAULT_STATIC_PATH
+except ImportError:
+    threading = None
+    PyPIServer = None
+    PYPI_DEFAULT_STATIC_PATH = None
+
+from packaging.tests import unittest
+
+
+@unittest.skipIf(threading is None, "Needs threading")
+class PyPIServerTest(unittest.TestCase):
+
+    def test_records_requests(self):
+        # We expect that PyPIServer can log our requests
+        server = PyPIServer()
+        server.default_response_status = 200
+
+        try:
+            server.start()
+            self.assertEqual(len(server.requests), 0)
+
+            data = b'Rock Around The Bunker'
+
+            headers = {"X-test-header": "Mister Iceberg"}
+
+            request = urllib.request.Request(server.full_address, data, headers)
+            urllib.request.urlopen(request)
+            self.assertEqual(len(server.requests), 1)
+            handler, request_data = server.requests[-1]
+            self.assertIn(data, request_data)
+            self.assertIn("x-test-header", handler.headers)
+            self.assertEqual(handler.headers["x-test-header"], "Mister Iceberg")
+
+        finally:
+            server.stop()
+
+
+    def test_serve_static_content(self):
+        # PYPI Mocked server can serve static content from disk.
+
+        def uses_local_files_for(server, url_path):
+            """Test that files are served statically (eg. the output from the
+            server is the same than the one made by a simple file read.
+            """
+            url = server.full_address + url_path
+            request = urllib.request.Request(url)
+            response = urllib.request.urlopen(request)
+            with open(PYPI_DEFAULT_STATIC_PATH + "/test_pypi_server"
+                      + url_path) as file:
+                return response.read().decode() == file.read()
+
+        server = PyPIServer(static_uri_paths=["simple", "external"],
+            static_filesystem_paths=["test_pypi_server"])
+        server.start()
+        try:
+            # the file does not exists on the disc, so it might not be served
+            url = server.full_address + "/simple/unexisting_page"
+            request = urllib.request.Request(url)
+            try:
+                urllib.request.urlopen(request)
+            except urllib.error.HTTPError as e:
+                self.assertEqual(e.code, 404)
+
+            # now try serving a content that do exists
+            self.assertTrue(uses_local_files_for(server, "/simple/index.html"))
+
+            # and another one in another root path
+            self.assertTrue(uses_local_files_for(server, "/external/index.html"))
+
+        finally:
+            server.stop()
+
+
+def test_suite():
+    return unittest.makeSuite(PyPIServerTest)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_simple.py b/Lib/packaging/tests/test_pypi_simple.py
new file mode 100644
index 0000000..d50e3f4
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_simple.py
@@ -0,0 +1,335 @@
+"""Tests for the packaging.pypi.simple module."""
+import re
+import os
+import sys
+import http.client
+import urllib.error
+import urllib.parse
+import urllib.request
+
+from packaging.pypi.simple import Crawler
+
+from packaging.tests import unittest
+from packaging.tests.support import TempdirManager, LoggingCatcher
+from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
+                                         PYPI_DEFAULT_STATIC_PATH)
+
+
+class SimpleCrawlerTestCase(TempdirManager,
+                            LoggingCatcher,
+                            unittest.TestCase):
+
+    def _get_simple_crawler(self, server, base_url="/simple/", hosts=None,
+                            *args, **kwargs):
+        """Build and return a SimpleIndex with the test server urls"""
+        if hosts is None:
+            hosts = (server.full_address.replace("http://", ""),)
+        kwargs['hosts'] = hosts
+        return Crawler(server.full_address + base_url, *args,
+                       **kwargs)
+
+    @use_pypi_server()
+    def test_bad_urls(self, server):
+        crawler = Crawler()
+        url = 'http://127.0.0.1:0/nonesuch/test_simple'
+        try:
+            v = crawler._open_url(url)
+        except Exception as v:
+            self.assertIn(url, str(v))
+        else:
+            v.close()
+            self.assertIsInstance(v, urllib.error.HTTPError)
+
+        # issue 16
+        # easy_install inquant.contentmirror.plone breaks because of a typo
+        # in its home URL
+        crawler = Crawler(hosts=('example.org',))
+        url = ('url:%20https://svn.plone.org/svn/collective/'
+               'inquant.contentmirror.plone/trunk')
+        try:
+            v = crawler._open_url(url)
+        except Exception as v:
+            self.assertIn(url, str(v))
+        else:
+            v.close()
+            self.assertIsInstance(v, urllib.error.HTTPError)
+
+        def _urlopen(*args):
+            raise http.client.BadStatusLine('line')
+
+        old_urlopen = urllib.request.urlopen
+        urllib.request.urlopen = _urlopen
+        url = 'http://example.org'
+        try:
+            v = crawler._open_url(url)
+        except Exception as v:
+            self.assertIn('line', str(v))
+        else:
+            v.close()
+            # TODO use self.assertRaises
+            raise AssertionError('Should have raise here!')
+        finally:
+            urllib.request.urlopen = old_urlopen
+
+        # issue 20
+        url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
+        try:
+            crawler._open_url(url)
+        except Exception as v:
+            self.assertIn('nonnumeric port', str(v))
+
+        # issue #160
+        url = server.full_address
+        page = ('<a href="http://www.famfamfam.com]('
+                'http://www.famfamfam.com/">')
+        crawler._process_url(url, page)
+
+    @use_pypi_server("test_found_links")
+    def test_found_links(self, server):
+        # Browse the index, asking for a specified release version
+        # The PyPI index contains links for version 1.0, 1.1, 2.0 and 2.0.1
+        crawler = self._get_simple_crawler(server)
+        last_release = crawler.get_release("foobar")
+
+        # we have scanned the index page
+        self.assertIn(server.full_address + "/simple/foobar/",
+            crawler._processed_urls)
+
+        # we have found 4 releases in this page
+        self.assertEqual(len(crawler._projects["foobar"]), 4)
+
+        # and returned the most recent one
+        self.assertEqual("%s" % last_release.version, '2.0.1')
+
+    def test_is_browsable(self):
+        crawler = Crawler(follow_externals=False)
+        self.assertTrue(crawler._is_browsable(crawler.index_url + "test"))
+
+        # Now, when following externals, we can have a list of hosts to trust.
+        # and don't follow other external links than the one described here.
+        crawler = Crawler(hosts=["pypi.python.org", "example.org"],
+                          follow_externals=True)
+        good_urls = (
+            "http://pypi.python.org/foo/bar",
+            "http://pypi.python.org/simple/foobar",
+            "http://example.org",
+            "http://example.org/",
+            "http://example.org/simple/",
+        )
+        bad_urls = (
+            "http://python.org",
+            "http://example.tld",
+        )
+
+        for url in good_urls:
+            self.assertTrue(crawler._is_browsable(url))
+
+        for url in bad_urls:
+            self.assertFalse(crawler._is_browsable(url))
+
+        # allow all hosts
+        crawler = Crawler(follow_externals=True, hosts=("*",))
+        self.assertTrue(crawler._is_browsable("http://an-external.link/path"))
+        self.assertTrue(crawler._is_browsable("pypi.example.org/a/path"))
+
+        # specify a list of hosts we want to allow
+        crawler = Crawler(follow_externals=True,
+                          hosts=("*.example.org",))
+        self.assertFalse(crawler._is_browsable("http://an-external.link/path"))
+        self.assertTrue(
+            crawler._is_browsable("http://pypi.example.org/a/path"))
+
+    @use_pypi_server("with_externals")
+    def test_follow_externals(self, server):
+        # Include external pages
+        # Try to request the package index, wich contains links to "externals"
+        # resources. They have to  be scanned too.
+        crawler = self._get_simple_crawler(server, follow_externals=True)
+        crawler.get_release("foobar")
+        self.assertIn(server.full_address + "/external/external.html",
+            crawler._processed_urls)
+
+    @use_pypi_server("with_real_externals")
+    def test_restrict_hosts(self, server):
+        # Only use a list of allowed hosts is possible
+        # Test that telling the simple pyPI client to not retrieve external
+        # works
+        crawler = self._get_simple_crawler(server, follow_externals=False)
+        crawler.get_release("foobar")
+        self.assertNotIn(server.full_address + "/external/external.html",
+            crawler._processed_urls)
+
+    @use_pypi_server(static_filesystem_paths=["with_externals"],
+        static_uri_paths=["simple", "external"])
+    def test_links_priority(self, server):
+        # Download links from the pypi simple index should be used before
+        # external download links.
+        # http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error
+        #
+        # Usecase :
+        # - someone uploads a package on pypi, a md5 is generated
+        # - someone manually coindexes this link (with the md5 in the url) onto
+        #   an external page accessible from the package page.
+        # - someone reuploads the package (with a different md5)
+        # - while easy_installing, an MD5 error occurs because the external
+        # link is used
+        # -> The index should use the link from pypi, not the external one.
+
+        # start an index server
+        index_url = server.full_address + '/simple/'
+
+        # scan a test index
+        crawler = Crawler(index_url, follow_externals=True)
+        releases = crawler.get_releases("foobar")
+        server.stop()
+
+        # we have only one link, because links are compared without md5
+        self.assertEqual(1, len(releases))
+        self.assertEqual(1, len(releases[0].dists))
+        # the link should be from the index
+        self.assertEqual(2, len(releases[0].dists['sdist'].urls))
+        self.assertEqual('12345678901234567',
+                         releases[0].dists['sdist'].url['hashval'])
+        self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
+
+    @use_pypi_server(static_filesystem_paths=["with_norel_links"],
+        static_uri_paths=["simple", "external"])
+    def test_not_scan_all_links(self, server):
+        # Do not follow all index page links.
+        # The links not tagged with rel="download" and rel="homepage" have
+        # to not be processed by the package index, while processing "pages".
+
+        # process the pages
+        crawler = self._get_simple_crawler(server, follow_externals=True)
+        crawler.get_releases("foobar")
+        # now it should have processed only pages with links rel="download"
+        # and rel="homepage"
+        self.assertIn("%s/simple/foobar/" % server.full_address,
+            crawler._processed_urls)  # it's the simple index page
+        self.assertIn("%s/external/homepage.html" % server.full_address,
+            crawler._processed_urls)  # the external homepage is rel="homepage"
+        self.assertNotIn("%s/external/nonrel.html" % server.full_address,
+            crawler._processed_urls)  # this link contains no rel=*
+        self.assertNotIn("%s/unrelated-0.2.tar.gz" % server.full_address,
+            crawler._processed_urls)  # linked from simple index (no rel)
+        self.assertIn("%s/foobar-0.1.tar.gz" % server.full_address,
+            crawler._processed_urls)  # linked from simple index (rel)
+        self.assertIn("%s/foobar-2.0.tar.gz" % server.full_address,
+            crawler._processed_urls)  # linked from external homepage (rel)
+
+    def test_uses_mirrors(self):
+        # When the main repository seems down, try using the given mirrors"""
+        server = PyPIServer("foo_bar_baz")
+        mirror = PyPIServer("foo_bar_baz")
+        mirror.start()  # we dont start the server here
+
+        try:
+            # create the index using both servers
+            crawler = Crawler(server.full_address + "/simple/", hosts=('*',),
+                              # set the timeout to 1s for the tests
+                              timeout=1, mirrors=[mirror.full_address])
+
+            # this should not raise a timeout
+            self.assertEqual(4, len(crawler.get_releases("foo")))
+        finally:
+            mirror.stop()
+            server.stop()
+
+    def test_simple_link_matcher(self):
+        # Test that the simple link matcher finds the right links"""
+        crawler = Crawler(follow_externals=False)
+
+        # Here, we define:
+        #   1. one link that must be followed, cause it's a download one
+        #   2. one link that must *not* be followed, cause the is_browsable
+        #      returns false for it.
+        #   3. one link that must be followed cause it's a homepage that is
+        #      browsable
+        #   4. one link that must be followed, because it contain a md5 hash
+        self.assertTrue(crawler._is_browsable("%stest" % crawler.index_url))
+        self.assertFalse(crawler._is_browsable("http://dl-link2"))
+        content = """
+        <a href="http://dl-link1" rel="download">download_link1</a>
+        <a href="http://dl-link2" rel="homepage">homepage_link1</a>
+        <a href="%(index_url)stest" rel="homepage">homepage_link2</a>
+        <a href="%(index_url)stest/foobar-1.tar.gz#md5=abcdef>download_link2</a>
+        """ % {'index_url': crawler.index_url}
+
+        # Test that the simple link matcher yield the good links.
+        generator = crawler._simple_link_matcher(content, crawler.index_url)
+        self.assertEqual(('%stest/foobar-1.tar.gz#md5=abcdef' %
+                          crawler.index_url, True), next(generator))
+        self.assertEqual(('http://dl-link1', True), next(generator))
+        self.assertEqual(('%stest' % crawler.index_url, False),
+                         next(generator))
+        self.assertRaises(StopIteration, generator.__next__)
+
+        # Follow the external links is possible (eg. homepages)
+        crawler.follow_externals = True
+        generator = crawler._simple_link_matcher(content, crawler.index_url)
+        self.assertEqual(('%stest/foobar-1.tar.gz#md5=abcdef' %
+                          crawler.index_url, True), next(generator))
+        self.assertEqual(('http://dl-link1', True), next(generator))
+        self.assertEqual(('http://dl-link2', False), next(generator))
+        self.assertEqual(('%stest' % crawler.index_url, False),
+                         next(generator))
+        self.assertRaises(StopIteration, generator.__next__)
+
+    def test_browse_local_files(self):
+        # Test that we can browse local files"""
+        index_url = "file://" + PYPI_DEFAULT_STATIC_PATH
+        if sys.platform == 'win32':
+            # under windows the correct syntax is:
+            #   file:///C|\the\path\here
+            # instead of
+            #   file://C:\the\path\here
+            fix = re.compile(r'^(file://)([A-Za-z])(:)')
+            index_url = fix.sub('\\1/\\2|', index_url)
+
+        index_path = os.sep.join([index_url, "test_found_links", "simple"])
+        crawler = Crawler(index_path)
+        dists = crawler.get_releases("foobar")
+        self.assertEqual(4, len(dists))
+
+    def test_get_link_matcher(self):
+        crawler = Crawler("http://example.org")
+        self.assertEqual('_simple_link_matcher', crawler._get_link_matcher(
+                         "http://example.org/some/file").__name__)
+        self.assertEqual('_default_link_matcher', crawler._get_link_matcher(
+                         "http://other-url").__name__)
+
+    def test_default_link_matcher(self):
+        crawler = Crawler("http://example.org", mirrors=[])
+        crawler.follow_externals = True
+        crawler._is_browsable = lambda *args: True
+        base_url = "http://example.org/some/file/"
+        content = """
+<a href="../homepage" rel="homepage">link</a>
+<a href="../download" rel="download">link2</a>
+<a href="../simpleurl">link2</a>
+        """
+        found_links = set(uri for uri, _ in
+                          crawler._default_link_matcher(content, base_url))
+        self.assertIn('http://example.org/some/homepage', found_links)
+        self.assertIn('http://example.org/some/simpleurl', found_links)
+        self.assertIn('http://example.org/some/download', found_links)
+
+    @use_pypi_server("project_list")
+    def test_search_projects(self, server):
+        # we can search the index for some projects, on their names
+        # the case used no matters here
+        crawler = self._get_simple_crawler(server)
+        tests = (('Foobar', ['FooBar-bar', 'Foobar-baz', 'Baz-FooBar']),
+                 ('foobar*', ['FooBar-bar', 'Foobar-baz']),
+                 ('*foobar', ['Baz-FooBar']))
+
+        for search, expected in tests:
+            projects = [p.name for p in crawler.search_projects(search)]
+            self.assertListEqual(expected, projects)
+
+
+def test_suite():
+    return unittest.makeSuite(SimpleCrawlerTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_xmlrpc.py b/Lib/packaging/tests/test_pypi_xmlrpc.py
new file mode 100644
index 0000000..682bd7b
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_xmlrpc.py
@@ -0,0 +1,105 @@
+"""Tests for the packaging.pypi.xmlrpc module."""
+
+from packaging.pypi.xmlrpc import Client, InvalidSearchField, ProjectNotFound
+
+from packaging.tests import unittest
+
+try:
+    import threading
+    from packaging.tests.pypi_server import use_xmlrpc_server
+except ImportError:
+    threading = None
+    def use_xmlrpc_server():
+        def _use(func):
+            def __use(*args, **kw):
+                return func(*args, **kw)
+            return __use
+        return _use
+
+
+@unittest.skipIf(threading is None, "Needs threading")
+class TestXMLRPCClient(unittest.TestCase):
+    def _get_client(self, server, *args, **kwargs):
+        return Client(server.full_address, *args, **kwargs)
+
+    @use_xmlrpc_server()
+    def test_search_projects(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_search_result(['FooBar', 'Foo', 'FooFoo'])
+        results = [r.name for r in client.search_projects(name='Foo')]
+        self.assertEqual(3, len(results))
+        self.assertIn('FooBar', results)
+        self.assertIn('Foo', results)
+        self.assertIn('FooFoo', results)
+
+    def test_search_projects_bad_fields(self):
+        client = Client()
+        self.assertRaises(InvalidSearchField, client.search_projects,
+                          invalid="test")
+
+    @use_xmlrpc_server()
+    def test_get_releases(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_distributions([
+            {'name': 'FooBar', 'version': '1.1'},
+            {'name': 'FooBar', 'version': '1.2', 'url': 'http://some/url/'},
+            {'name': 'FooBar', 'version': '1.3', 'url': 'http://other/url/'},
+        ])
+
+        # use a lambda here to avoid an useless mock call
+        server.xmlrpc.list_releases = lambda *a, **k: ['1.1', '1.2', '1.3']
+
+        releases = client.get_releases('FooBar (<=1.2)')
+        # dont call release_data and release_url; just return name and version.
+        self.assertEqual(2, len(releases))
+        versions = releases.get_versions()
+        self.assertIn('1.1', versions)
+        self.assertIn('1.2', versions)
+        self.assertNotIn('1.3', versions)
+
+        self.assertRaises(ProjectNotFound, client.get_releases, 'Foo')
+
+    @use_xmlrpc_server()
+    def test_get_distributions(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_distributions([
+            {'name': 'FooBar', 'version': '1.1',
+             'url': 'http://example.org/foobar-1.1-sdist.tar.gz',
+             'digest': '1234567',
+             'type': 'sdist', 'python_version': 'source'},
+            {'name':'FooBar', 'version': '1.1',
+             'url': 'http://example.org/foobar-1.1-bdist.tar.gz',
+             'digest': '8912345', 'type': 'bdist'},
+        ])
+
+        releases = client.get_releases('FooBar', '1.1')
+        client.get_distributions('FooBar', '1.1')
+        release = releases.get_release('1.1')
+        self.assertTrue('http://example.org/foobar-1.1-sdist.tar.gz',
+                        release['sdist'].url['url'])
+        self.assertTrue('http://example.org/foobar-1.1-bdist.tar.gz',
+                release['bdist'].url['url'])
+        self.assertEqual(release['sdist'].python_version, 'source')
+
+    @use_xmlrpc_server()
+    def test_get_metadata(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_distributions([
+            {'name': 'FooBar',
+             'version': '1.1',
+             'keywords': '',
+             'obsoletes_dist': ['FooFoo'],
+             'requires_external': ['Foo'],
+            }])
+        release = client.get_metadata('FooBar', '1.1')
+        self.assertEqual(['Foo'], release.metadata['requires_external'])
+        self.assertEqual(['FooFoo'], release.metadata['obsoletes_dist'])
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestXMLRPCClient))
+    return suite
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_resources.py b/Lib/packaging/tests/test_resources.py
new file mode 100644
index 0000000..1e92f2a
--- /dev/null
+++ b/Lib/packaging/tests/test_resources.py
@@ -0,0 +1,167 @@
+"""Tests for packaging.resources."""
+
+import os
+import sys
+import shutil
+import tempfile
+from textwrap import dedent
+from packaging.config import get_resources_dests
+from packaging.database import disable_cache, enable_cache
+from packaging.resources import get_file, get_file_path
+
+from packaging.tests import unittest
+from packaging.tests.test_util import GlobTestCaseBase
+
+
+class DataFilesTestCase(GlobTestCaseBase):
+
+    def assertRulesMatch(self, rules, spec):
+        tempdir = self.build_files_tree(spec)
+        expected = self.clean_tree(spec)
+        result = get_resources_dests(tempdir, rules)
+        self.assertEqual(expected, result)
+
+    def clean_tree(self, spec):
+        files = {}
+        for path, value in spec.items():
+            if value is not None:
+                files[path] = value
+        return files
+
+    def test_simple_glob(self):
+        rules = [('', '*.tpl', '{data}')]
+        spec = {'coucou.tpl': '{data}/coucou.tpl',
+                'Donotwant': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_multiple_match(self):
+        rules = [('scripts', '*.bin', '{appdata}'),
+                 ('scripts', '*', '{appscript}')]
+        spec = {'scripts/script.bin': '{appscript}/script.bin',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match(self):
+        rules = [('scripts', '*.{bin,sh}', '{appscript}')]
+        spec = {'scripts/script.bin': '{appscript}/script.bin',
+                'scripts/babar.sh':  '{appscript}/babar.sh',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match_multiple(self):
+        rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripts/script.sh':  '{appscript}/script.sh',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match_exclude(self):
+        rules = [('scripts', '*', '{appscript}'),
+                 ('', os.path.join('**', '*.sh'), None)]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripts/script.sh':  None,
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_glob_in_base(self):
+        rules = [('scrip*', '*.bin', '{appscript}')]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripouille/babar.bin': '{appscript}/babar.bin',
+                'scriptortu/lotus.bin': '{appscript}/lotus.bin',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_recursive_glob(self):
+        rules = [('', os.path.join('**', '*.bin'), '{binary}')]
+        spec = {'binary0.bin': '{binary}/binary0.bin',
+                'scripts/binary1.bin': '{binary}/scripts/binary1.bin',
+                'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin',
+                'you/kill/pandabear.guy': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_final_exemple_glob(self):
+        rules = [
+            ('mailman/database/schemas/', '*', '{appdata}/schemas'),
+            ('', os.path.join('**', '*.tpl'), '{appdata}/templates'),
+            ('', os.path.join('developer-docs', '**', '*.txt'), '{doc}'),
+            ('', 'README', '{doc}'),
+            ('mailman/etc/', '*', '{config}'),
+            ('mailman/foo/', os.path.join('**', 'bar', '*.cfg'), '{config}/baz'),
+            ('mailman/foo/', os.path.join('**', '*.cfg'), '{config}/hmm'),
+            ('', 'some-new-semantic.sns', '{funky-crazy-category}'),
+        ]
+        spec = {
+            'README': '{doc}/README',
+            'some.tpl': '{appdata}/templates/some.tpl',
+            'some-new-semantic.sns':
+                '{funky-crazy-category}/some-new-semantic.sns',
+            'mailman/database/mailman.db': None,
+            'mailman/database/schemas/blah.schema':
+                '{appdata}/schemas/blah.schema',
+            'mailman/etc/my.cnf': '{config}/my.cnf',
+            'mailman/foo/some/path/bar/my.cfg':
+                '{config}/hmm/some/path/bar/my.cfg',
+            'mailman/foo/some/path/other.cfg':
+                '{config}/hmm/some/path/other.cfg',
+            'developer-docs/index.txt': '{doc}/developer-docs/index.txt',
+            'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt',
+        }
+        self.maxDiff = None
+        self.assertRulesMatch(rules, spec)
+
+    def test_get_file(self):
+        # Create a fake dist
+        temp_site_packages = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, temp_site_packages)
+
+        dist_name = 'test'
+        dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info')
+        os.mkdir(dist_info)
+
+        metadata_path = os.path.join(dist_info, 'METADATA')
+        resources_path = os.path.join(dist_info, 'RESOURCES')
+
+        with open(metadata_path, 'w') as fp:
+            fp.write(dedent("""\
+                Metadata-Version: 1.2
+                Name: test
+                Version: 0.1
+                Summary: test
+                Author: me
+                """))
+
+        test_path = 'test.cfg'
+
+        fd, test_resource_path = tempfile.mkstemp()
+        os.close(fd)
+        self.addCleanup(os.remove, test_resource_path)
+
+        with open(test_resource_path, 'w') as fp:
+            fp.write('Config')
+
+        with open(resources_path, 'w') as fp:
+            fp.write('%s,%s' % (test_path, test_resource_path))
+
+        # Add fake site-packages to sys.path to retrieve fake dist
+        self.addCleanup(sys.path.remove, temp_site_packages)
+        sys.path.insert(0, temp_site_packages)
+
+        # Force packaging.database to rescan the sys.path
+        self.addCleanup(enable_cache)
+        disable_cache()
+
+        # Try to retrieve resources paths and files
+        self.assertEqual(get_file_path(dist_name, test_path),
+                         test_resource_path)
+        self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist')
+
+        with get_file(dist_name, test_path) as fp:
+            self.assertEqual(fp.read(), 'Config')
+        self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist')
+
+
+def test_suite():
+    return unittest.makeSuite(DataFilesTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_run.py b/Lib/packaging/tests/test_run.py
new file mode 100644
index 0000000..01fa5aa
--- /dev/null
+++ b/Lib/packaging/tests/test_run.py
@@ -0,0 +1,62 @@
+"""Tests for packaging.run."""
+
+import os
+import sys
+import shutil
+
+from packaging.tests import unittest, support, TESTFN
+
+# setup script that uses __file__
+setup_using___file__ = """\
+
+__file__
+
+from packaging.run import setup
+setup()
+"""
+
+setup_prints_cwd = """\
+
+import os
+print os.getcwd()
+
+from packaging.run import setup
+setup()
+"""
+
+
+class CoreTestCase(unittest.TestCase):
+
+    def setUp(self):
+        super(CoreTestCase, self).setUp()
+        self.old_stdout = sys.stdout
+        self.cleanup_testfn()
+        self.old_argv = sys.argv, sys.argv[:]
+
+    def tearDown(self):
+        sys.stdout = self.old_stdout
+        self.cleanup_testfn()
+        sys.argv = self.old_argv[0]
+        sys.argv[:] = self.old_argv[1]
+        super(CoreTestCase, self).tearDown()
+
+    def cleanup_testfn(self):
+        path = TESTFN
+        if os.path.isfile(path):
+            os.remove(path)
+        elif os.path.isdir(path):
+            shutil.rmtree(path)
+
+    def write_setup(self, text, path=TESTFN):
+        with open(path, "w") as fp:
+            fp.write(text)
+        return path
+
+    # TODO restore the tests removed six months ago and port them to pysetup
+
+
+def test_suite():
+    return unittest.makeSuite(CoreTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_uninstall.py b/Lib/packaging/tests/test_uninstall.py
new file mode 100644
index 0000000..578b10d
--- /dev/null
+++ b/Lib/packaging/tests/test_uninstall.py
@@ -0,0 +1,105 @@
+"""Tests for the uninstall command."""
+import os
+import sys
+
+from packaging.database import disable_cache, enable_cache
+from packaging.run import main
+from packaging.errors import PackagingError
+from packaging.install import remove
+from packaging.command.install_dist import install_dist
+
+from packaging.tests import unittest, support
+
+SETUP_CFG = """
+[metadata]
+name = %(name)s
+version = %(version)s
+
+[files]
+packages =
+    %(pkg)s
+    %(pkg)s.sub
+"""
+
+
+class UninstallTestCase(support.TempdirManager,
+                        support.LoggingCatcher,
+                        support.EnvironRestorer,
+                        unittest.TestCase):
+
+    restore_environ = ['PLAT']
+
+    def setUp(self):
+        super(UninstallTestCase, self).setUp()
+        self.addCleanup(setattr, sys, 'stdout', sys.stdout)
+        self.addCleanup(setattr, sys, 'stderr', sys.stderr)
+        self.addCleanup(os.chdir, os.getcwd())
+        self.addCleanup(enable_cache)
+        self.root_dir = self.mkdtemp()
+        self.cwd = os.getcwd()
+        disable_cache()
+
+    def tearDown(self):
+        os.chdir(self.cwd)
+        super(UninstallTestCase, self).tearDown()
+
+    def run_setup(self, *args):
+        # run setup with args
+        args = ['run'] + list(args)
+        dist = main(args)
+        return dist
+
+    def get_path(self, dist, name):
+        cmd = install_dist(dist)
+        cmd.prefix = self.root_dir
+        cmd.finalize_options()
+        return getattr(cmd, 'install_' + name)
+
+    def make_dist(self, name='Foo', **kw):
+        kw['name'] = name
+        pkg = name.lower()
+        if 'version' not in kw:
+            kw['version'] = '0.1'
+        project_dir, dist = self.create_dist(**kw)
+        kw['pkg'] = pkg
+
+        pkg_dir = os.path.join(project_dir, pkg)
+        os.mkdir(pkg_dir)
+        os.mkdir(os.path.join(pkg_dir, 'sub'))
+
+        self.write_file((project_dir, 'setup.cfg'), SETUP_CFG % kw)
+        self.write_file((pkg_dir, '__init__.py'), '#')
+        self.write_file((pkg_dir, pkg + '_utils.py'), '#')
+        self.write_file((pkg_dir, 'sub', '__init__.py'), '#')
+        self.write_file((pkg_dir, 'sub', pkg + '_utils.py'), '#')
+
+        return project_dir
+
+    def install_dist(self, name='Foo', dirname=None, **kw):
+        if not dirname:
+            dirname = self.make_dist(name, **kw)
+        os.chdir(dirname)
+        dist = self.run_setup('install_dist', '--prefix=' + self.root_dir)
+        install_lib = self.get_path(dist, 'purelib')
+        return dist, install_lib
+
+    def test_uninstall_unknow_distribution(self):
+        self.assertRaises(PackagingError, remove, 'Foo',
+                          paths=[self.root_dir])
+
+    @unittest.skipIf(sys.platform == 'win32', 'deactivated for now')
+    def test_uninstall(self):
+        dist, install_lib = self.install_dist()
+        self.assertIsFile(install_lib, 'foo', '__init__.py')
+        self.assertIsFile(install_lib, 'foo', 'sub', '__init__.py')
+        self.assertIsFile(install_lib, 'Foo-0.1.dist-info', 'RECORD')
+        remove('Foo', paths=[install_lib])
+        self.assertIsNotFile(install_lib, 'foo', 'sub', '__init__.py')
+        self.assertIsNotFile(install_lib, 'Foo-0.1.dist-info', 'RECORD')
+
+
+def test_suite():
+    return unittest.makeSuite(UninstallTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_unixccompiler.py b/Lib/packaging/tests/test_unixccompiler.py
new file mode 100644
index 0000000..16a1af3
--- /dev/null
+++ b/Lib/packaging/tests/test_unixccompiler.py
@@ -0,0 +1,132 @@
+"""Tests for packaging.unixccompiler."""
+import sys
+
+import sysconfig
+from packaging.compiler.unixccompiler import UnixCCompiler
+from packaging.tests import unittest
+
+
+class UnixCCompilerTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self._backup_platform = sys.platform
+        self._backup_get_config_var = sysconfig.get_config_var
+
+        class CompilerWrapper(UnixCCompiler):
+            def rpath_foo(self):
+                return self.runtime_library_dir_option('/foo')
+        self.cc = CompilerWrapper()
+
+    def tearDown(self):
+        sys.platform = self._backup_platform
+        sysconfig.get_config_var = self._backup_get_config_var
+
+    @unittest.skipIf(sys.platform == 'win32', 'irrelevant on win32')
+    def test_runtime_libdir_option(self):
+
+        # Issue #5900: Ensure RUNPATH is added to extension
+        # modules with RPATH if GNU ld is used
+
+        # darwin
+        sys.platform = 'darwin'
+        self.assertEqual(self.cc.rpath_foo(), '-L/foo')
+
+        # hp-ux
+        sys.platform = 'hp-ux'
+        old_gcv = sysconfig.get_config_var
+
+        def gcv(v):
+            return 'xxx'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo'])
+
+        def gcv(v):
+            return 'gcc'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
+
+        def gcv(v):
+            return 'g++'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
+
+        sysconfig.get_config_var = old_gcv
+
+        # irix646
+        sys.platform = 'irix646'
+        self.assertEqual(self.cc.rpath_foo(), ['-rpath', '/foo'])
+
+        # osf1V5
+        sys.platform = 'osf1V5'
+        self.assertEqual(self.cc.rpath_foo(), ['-rpath', '/foo'])
+
+        # GCC GNULD
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'gcc'
+            elif v == 'GNULD':
+                return 'yes'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+        # GCC non-GNULD
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'gcc'
+            elif v == 'GNULD':
+                return 'no'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo')
+
+        # GCC GNULD with fully qualified configuration prefix
+        # see #7617
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'x86_64-pc-linux-gnu-gcc-4.4.2'
+            elif v == 'GNULD':
+                return 'yes'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+        # non-GCC GNULD
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'cc'
+            elif v == 'GNULD':
+                return 'yes'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-R/foo')
+
+        # non-GCC non-GNULD
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'cc'
+            elif v == 'GNULD':
+                return 'no'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-R/foo')
+
+        # AIX C/C++ linker
+        sys.platform = 'aix'
+
+        def gcv(v):
+            return 'xxx'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-blibpath:/foo')
+
+
+def test_suite():
+    return unittest.makeSuite(UnixCCompilerTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_util.py b/Lib/packaging/tests/test_util.py
new file mode 100644
index 0000000..61b4ec7
--- /dev/null
+++ b/Lib/packaging/tests/test_util.py
@@ -0,0 +1,947 @@
+"""Tests for packaging.util."""
+import os
+import sys
+import time
+import logging
+import tempfile
+import subprocess
+from io import StringIO
+
+from packaging.tests import support, unittest
+from packaging.errors import (
+    PackagingPlatformError, PackagingByteCompileError, PackagingFileError,
+    PackagingExecError, InstallationException)
+from packaging import util
+from packaging.util import (
+    convert_path, change_root, split_quoted, strtobool, rfc822_escape,
+    get_compiler_versions, _MAC_OS_X_LD_VERSION, byte_compile, find_packages,
+    spawn, get_pypirc_path, generate_pypirc, read_pypirc, resolve_name, iglob,
+    RICH_GLOB, egginfo_to_distinfo, is_setuptools, is_distutils, is_packaging,
+    get_install_method)
+
+
+PYPIRC = """\
+[distutils]
+index-servers =
+    pypi
+    server1
+
+[pypi]
+username:me
+password:xxxx
+
+[server1]
+repository:http://example.com
+username:tarek
+password:secret
+"""
+
+PYPIRC_OLD = """\
+[server-login]
+username:tarek
+password:secret
+"""
+
+WANTED = """\
+[distutils]
+index-servers =
+    pypi
+
+[pypi]
+username:tarek
+password:xxx
+"""
+
+
+class FakePopen:
+    test_class = None
+
+    def __init__(self, args, bufsize=0, executable=None,
+                 stdin=None, stdout=None, stderr=None,
+                 preexec_fn=None, close_fds=False,
+                 shell=False, cwd=None, env=None, universal_newlines=False,
+                 startupinfo=None, creationflags=0,
+                 restore_signals=True, start_new_session=False,
+                 pass_fds=()):
+        if isinstance(args, str):
+            args = args.split()
+        self.cmd = args[0]
+        exes = self.test_class._exes
+        if self.cmd not in exes:
+            # we don't want to call the system, returning an empty
+            # output so it doesn't match
+            self.stdout = StringIO()
+            self.stderr = StringIO()
+        else:
+            self.stdout = StringIO(exes[self.cmd])
+            self.stderr = StringIO()
+
+    def communicate(self, input=None, timeout=None):
+        return self.stdout.read(), self.stderr.read()
+
+    def wait(self, timeout=None):
+        return 0
+
+
+class UtilTestCase(support.EnvironRestorer,
+                   support.TempdirManager,
+                   support.LoggingCatcher,
+                   unittest.TestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(UtilTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.rc = os.path.join(self.tmp_dir, '.pypirc')
+        os.environ['HOME'] = self.tmp_dir
+        # saving the environment
+        self.name = os.name
+        self.platform = sys.platform
+        self.version = sys.version
+        self.sep = os.sep
+        self.join = os.path.join
+        self.isabs = os.path.isabs
+        self.splitdrive = os.path.splitdrive
+        #self._config_vars = copy(sysconfig._config_vars)
+
+        # patching os.uname
+        if hasattr(os, 'uname'):
+            self.uname = os.uname
+            self._uname = os.uname()
+        else:
+            self.uname = None
+            self._uname = None
+        os.uname = self._get_uname
+
+        # patching POpen
+        self.old_find_executable = util.find_executable
+        util.find_executable = self._find_executable
+        self._exes = {}
+        self.old_popen = subprocess.Popen
+        self.old_stdout = sys.stdout
+        self.old_stderr = sys.stderr
+        FakePopen.test_class = self
+        subprocess.Popen = FakePopen
+
+    def tearDown(self):
+        # getting back the environment
+        os.name = self.name
+        sys.platform = self.platform
+        sys.version = self.version
+        os.sep = self.sep
+        os.path.join = self.join
+        os.path.isabs = self.isabs
+        os.path.splitdrive = self.splitdrive
+        if self.uname is not None:
+            os.uname = self.uname
+        else:
+            del os.uname
+        #sysconfig._config_vars = copy(self._config_vars)
+        util.find_executable = self.old_find_executable
+        subprocess.Popen = self.old_popen
+        sys.old_stdout = self.old_stdout
+        sys.old_stderr = self.old_stderr
+        super(UtilTestCase, self).tearDown()
+
+    def _set_uname(self, uname):
+        self._uname = uname
+
+    def _get_uname(self):
+        return self._uname
+
+    def test_convert_path(self):
+        # linux/mac
+        os.sep = '/'
+
+        def _join(path):
+            return '/'.join(path)
+        os.path.join = _join
+
+        self.assertEqual(convert_path('/home/to/my/stuff'),
+                         '/home/to/my/stuff')
+
+        # win
+        os.sep = '\\'
+
+        def _join(*path):
+            return '\\'.join(path)
+        os.path.join = _join
+
+        self.assertRaises(ValueError, convert_path, '/home/to/my/stuff')
+        self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/')
+
+        self.assertEqual(convert_path('home/to/my/stuff'),
+                         'home\\to\\my\\stuff')
+        self.assertEqual(convert_path('.'),
+                         os.curdir)
+
+    def test_change_root(self):
+        # linux/mac
+        os.name = 'posix'
+
+        def _isabs(path):
+            return path[0] == '/'
+        os.path.isabs = _isabs
+
+        def _join(*path):
+            return '/'.join(path)
+        os.path.join = _join
+
+        self.assertEqual(change_root('/root', '/old/its/here'),
+                         '/root/old/its/here')
+        self.assertEqual(change_root('/root', 'its/here'),
+                         '/root/its/here')
+
+        # windows
+        os.name = 'nt'
+
+        def _isabs(path):
+            return path.startswith('c:\\')
+        os.path.isabs = _isabs
+
+        def _splitdrive(path):
+            if path.startswith('c:'):
+                return '', path.replace('c:', '')
+            return '', path
+        os.path.splitdrive = _splitdrive
+
+        def _join(*path):
+            return '\\'.join(path)
+        os.path.join = _join
+
+        self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'),
+                         'c:\\root\\old\\its\\here')
+        self.assertEqual(change_root('c:\\root', 'its\\here'),
+                         'c:\\root\\its\\here')
+
+        # BugsBunny os (it's a great os)
+        os.name = 'BugsBunny'
+        self.assertRaises(PackagingPlatformError,
+                          change_root, 'c:\\root', 'its\\here')
+
+        # XXX platforms to be covered: os2, mac
+
+    def test_split_quoted(self):
+        self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'),
+                         ['one', 'two', 'three', 'four'])
+
+    def test_strtobool(self):
+        yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1')
+        no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N')
+
+        for y in yes:
+            self.assertTrue(strtobool(y))
+
+        for n in no:
+            self.assertFalse(strtobool(n))
+
+    def test_rfc822_escape(self):
+        header = 'I am a\npoor\nlonesome\nheader\n'
+        res = rfc822_escape(header)
+        wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s'
+                  'header%(8s)s') % {'8s': '\n' + 8 * ' '}
+        self.assertEqual(res, wanted)
+
+    def test_find_exe_version(self):
+        # the ld version scheme under MAC OS is:
+        #   ^@(#)PROGRAM:ld  PROJECT:ld64-VERSION
+        #
+        # where VERSION is a 2-digit number for major
+        # revisions. For instance under Leopard, it's
+        # currently 77
+        #
+        # Dots are used when branching is done.
+        #
+        # The SnowLeopard ld64 is currently 95.2.12
+
+        for output, version in (('@(#)PROGRAM:ld  PROJECT:ld64-77', '77'),
+                                ('@(#)PROGRAM:ld  PROJECT:ld64-95.2.12',
+                                 '95.2.12')):
+            result = _MAC_OS_X_LD_VERSION.search(output)
+            self.assertEqual(result.group(1), version)
+
+    def _find_executable(self, name):
+        if name in self._exes:
+            return name
+        return None
+
+    def test_get_compiler_versions(self):
+        # get_versions calls distutils.spawn.find_executable on
+        # 'gcc', 'ld' and 'dllwrap'
+        self.assertEqual(get_compiler_versions(), (None, None, None))
+
+        # Let's fake we have 'gcc' and it returns '3.4.5'
+        self._exes['gcc'] = 'gcc (GCC) 3.4.5 (mingw special)\nFSF'
+        res = get_compiler_versions()
+        self.assertEqual(str(res[0]), '3.4.5')
+
+        # and let's see what happens when the version
+        # doesn't match the regular expression
+        # (\d+\.\d+(\.\d+)*)
+        self._exes['gcc'] = 'very strange output'
+        res = get_compiler_versions()
+        self.assertEqual(res[0], None)
+
+        # same thing for ld
+        if sys.platform != 'darwin':
+            self._exes['ld'] = 'GNU ld version 2.17.50 20060824'
+            res = get_compiler_versions()
+            self.assertEqual(str(res[1]), '2.17.50')
+            self._exes['ld'] = '@(#)PROGRAM:ld  PROJECT:ld64-77'
+            res = get_compiler_versions()
+            self.assertEqual(res[1], None)
+        else:
+            self._exes['ld'] = 'GNU ld version 2.17.50 20060824'
+            res = get_compiler_versions()
+            self.assertEqual(res[1], None)
+            self._exes['ld'] = '@(#)PROGRAM:ld  PROJECT:ld64-77'
+            res = get_compiler_versions()
+            self.assertEqual(str(res[1]), '77')
+
+        # and dllwrap
+        self._exes['dllwrap'] = 'GNU dllwrap 2.17.50 20060824\nFSF'
+        res = get_compiler_versions()
+        self.assertEqual(str(res[2]), '2.17.50')
+        self._exes['dllwrap'] = 'Cheese Wrap'
+        res = get_compiler_versions()
+        self.assertEqual(res[2], None)
+
+    @unittest.skipUnless(hasattr(sys, 'dont_write_bytecode'),
+                         'sys.dont_write_bytecode not supported')
+    def test_dont_write_bytecode(self):
+        # makes sure byte_compile raise a PackagingError
+        # if sys.dont_write_bytecode is True
+        old_dont_write_bytecode = sys.dont_write_bytecode
+        sys.dont_write_bytecode = True
+        try:
+            self.assertRaises(PackagingByteCompileError, byte_compile, [])
+        finally:
+            sys.dont_write_bytecode = old_dont_write_bytecode
+
+    def test_newer(self):
+        self.assertRaises(PackagingFileError, util.newer, 'xxx', 'xxx')
+        self.newer_f1 = self.mktempfile()
+        time.sleep(1)
+        self.newer_f2 = self.mktempfile()
+        self.assertTrue(util.newer(self.newer_f2.name, self.newer_f1.name))
+
+    def test_find_packages(self):
+        # let's create a structure we want to scan:
+        #
+        #   pkg1
+        #     __init__
+        #     pkg2
+        #       __init__
+        #     pkg3
+        #       __init__
+        #       pkg6
+        #           __init__
+        #     pkg4    <--- not a pkg
+        #       pkg8
+        #          __init__
+        #   pkg5
+        #     __init__
+        #
+        root = self.mkdtemp()
+        pkg1 = os.path.join(root, 'pkg1')
+        os.mkdir(pkg1)
+        self.write_file(os.path.join(pkg1, '__init__.py'))
+        os.mkdir(os.path.join(pkg1, 'pkg2'))
+        self.write_file(os.path.join(pkg1, 'pkg2', '__init__.py'))
+        os.mkdir(os.path.join(pkg1, 'pkg3'))
+        self.write_file(os.path.join(pkg1, 'pkg3', '__init__.py'))
+        os.mkdir(os.path.join(pkg1, 'pkg3', 'pkg6'))
+        self.write_file(os.path.join(pkg1, 'pkg3', 'pkg6', '__init__.py'))
+        os.mkdir(os.path.join(pkg1, 'pkg4'))
+        os.mkdir(os.path.join(pkg1, 'pkg4', 'pkg8'))
+        self.write_file(os.path.join(pkg1, 'pkg4', 'pkg8', '__init__.py'))
+        pkg5 = os.path.join(root, 'pkg5')
+        os.mkdir(pkg5)
+        self.write_file(os.path.join(pkg5, '__init__.py'))
+
+        res = find_packages([root], ['pkg1.pkg2'])
+        self.assertEqual(set(res), set(['pkg1', 'pkg5', 'pkg1.pkg3',
+                                         'pkg1.pkg3.pkg6']))
+
+    def test_resolve_name(self):
+        self.assertIs(str, resolve_name('builtins.str'))
+        self.assertEqual(
+            UtilTestCase.__name__,
+            resolve_name("packaging.tests.test_util.UtilTestCase").__name__)
+        self.assertEqual(
+            UtilTestCase.test_resolve_name.__name__,
+            resolve_name("packaging.tests.test_util.UtilTestCase."
+                         "test_resolve_name").__name__)
+
+        self.assertRaises(ImportError, resolve_name,
+                          "packaging.tests.test_util.UtilTestCaseNot")
+        self.assertRaises(ImportError, resolve_name,
+                          "packaging.tests.test_util.UtilTestCase."
+                          "nonexistent_attribute")
+
+    def test_import_nested_first_time(self):
+        tmp_dir = self.mkdtemp()
+        os.makedirs(os.path.join(tmp_dir, 'a', 'b'))
+        self.write_file(os.path.join(tmp_dir, 'a', '__init__.py'), '')
+        self.write_file(os.path.join(tmp_dir, 'a', 'b', '__init__.py'), '')
+        self.write_file(os.path.join(tmp_dir, 'a', 'b', 'c.py'),
+                                    'class Foo: pass')
+
+        try:
+            sys.path.append(tmp_dir)
+            resolve_name("a.b.c.Foo")
+            # assert nothing raised
+        finally:
+            sys.path.remove(tmp_dir)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_run_2to3_on_code(self):
+        content = "print 'test'"
+        converted_content = "print('test')"
+        file_handle = self.mktempfile()
+        file_name = file_handle.name
+        file_handle.write(content)
+        file_handle.flush()
+        file_handle.seek(0)
+        from packaging.util import run_2to3
+        run_2to3([file_name])
+        new_content = "".join(file_handle.read())
+        file_handle.close()
+        self.assertEqual(new_content, converted_content)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_run_2to3_on_doctests(self):
+        # to check if text files containing doctests only get converted.
+        content = ">>> print 'test'\ntest\n"
+        converted_content = ">>> print('test')\ntest\n\n"
+        file_handle = self.mktempfile()
+        file_name = file_handle.name
+        file_handle.write(content)
+        file_handle.flush()
+        file_handle.seek(0)
+        from packaging.util import run_2to3
+        run_2to3([file_name], doctests_only=True)
+        new_content = "".join(file_handle.readlines())
+        file_handle.close()
+        self.assertEqual(new_content, converted_content)
+
+    @unittest.skipUnless(os.name in ('nt', 'posix'),
+                         'runs only under posix or nt')
+    def test_spawn(self):
+        # no patching of Popen here
+        subprocess.Popen = self.old_popen
+        tmpdir = self.mkdtemp()
+
+        # creating something executable
+        # through the shell that returns 1
+        if os.name == 'posix':
+            exe = os.path.join(tmpdir, 'foo.sh')
+            self.write_file(exe, '#!/bin/sh\nexit 1')
+            os.chmod(exe, 0o777)
+        else:
+            exe = os.path.join(tmpdir, 'foo.bat')
+            self.write_file(exe, 'exit 1')
+
+        os.chmod(exe, 0o777)
+        self.assertRaises(PackagingExecError, spawn, [exe])
+
+        # now something that works
+        if os.name == 'posix':
+            exe = os.path.join(tmpdir, 'foo.sh')
+            self.write_file(exe, '#!/bin/sh\nexit 0')
+            os.chmod(exe, 0o777)
+        else:
+            exe = os.path.join(tmpdir, 'foo.bat')
+            self.write_file(exe, 'exit 0')
+
+        os.chmod(exe, 0o777)
+        spawn([exe])  # should work without any error
+
+    def test_server_registration(self):
+        # This test makes sure we know how to:
+        # 1. handle several sections in .pypirc
+        # 2. handle the old format
+
+        # new format
+        self.write_file(self.rc, PYPIRC)
+        config = read_pypirc()
+
+        config = sorted(config.items())
+        expected = [('password', 'xxxx'), ('realm', 'pypi'),
+                    ('repository', 'http://pypi.python.org/pypi'),
+                    ('server', 'pypi'), ('username', 'me')]
+        self.assertEqual(config, expected)
+
+        # old format
+        self.write_file(self.rc, PYPIRC_OLD)
+        config = read_pypirc()
+        config = sorted(config.items())
+        expected = [('password', 'secret'), ('realm', 'pypi'),
+                    ('repository', 'http://pypi.python.org/pypi'),
+                    ('server', 'server-login'), ('username', 'tarek')]
+        self.assertEqual(config, expected)
+
+    def test_server_empty_registration(self):
+        rc = get_pypirc_path()
+        self.assertFalse(os.path.exists(rc))
+        generate_pypirc('tarek', 'xxx')
+        self.assertTrue(os.path.exists(rc))
+        with open(rc) as f:
+            content = f.read()
+        self.assertEqual(content, WANTED)
+
+
+class GlobTestCaseBase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    def build_files_tree(self, files):
+        tempdir = self.mkdtemp()
+        for filepath in files:
+            is_dir = filepath.endswith('/')
+            filepath = os.path.join(tempdir, *filepath.split('/'))
+            if is_dir:
+                dirname = filepath
+            else:
+                dirname = os.path.dirname(filepath)
+            if dirname and not os.path.exists(dirname):
+                os.makedirs(dirname)
+            if not is_dir:
+                self.write_file(filepath, 'babar')
+        return tempdir
+
+    @staticmethod
+    def os_dependent_path(path):
+        path = path.rstrip('/').split('/')
+        return os.path.join(*path)
+
+    def clean_tree(self, spec):
+        files = []
+        for path, includes in spec.items():
+            if includes:
+                files.append(self.os_dependent_path(path))
+        return files
+
+
+class GlobTestCase(GlobTestCaseBase):
+
+    def setUp(self):
+        super(GlobTestCase, self).setUp()
+        self.cwd = os.getcwd()
+
+    def tearDown(self):
+        os.chdir(self.cwd)
+        super(GlobTestCase, self).tearDown()
+
+    def assertGlobMatch(self, glob, spec):
+        """"""
+        tempdir = self.build_files_tree(spec)
+        expected = self.clean_tree(spec)
+        os.chdir(tempdir)
+        result = list(iglob(glob))
+        self.assertCountEqual(expected, result)
+
+    def test_regex_rich_glob(self):
+        matches = RICH_GLOB.findall(
+                                r"babar aime les {fraises} est les {huitres}")
+        self.assertEqual(["fraises", "huitres"], matches)
+
+    def test_simple_glob(self):
+        glob = '*.tp?'
+        spec = {'coucou.tpl': True,
+                 'coucou.tpj': True,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_simple_glob_in_dir(self):
+        glob = os.path.join('babar', '*.tp?')
+        spec = {'babar/coucou.tpl': True,
+                 'babar/coucou.tpj': True,
+                 'babar/toto.bin': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_head(self):
+        glob = os.path.join('**', 'tip', '*.t?l')
+        spec = {'babar/zaza/zuzu/tip/coucou.tpl': True,
+                 'babar/z/tip/coucou.tpl': True,
+                 'babar/tip/coucou.tpl': True,
+                 'babar/zeop/tip/babar/babar.tpl': False,
+                 'babar/z/tip/coucou.bin': False,
+                 'babar/toto.bin': False,
+                 'zozo/zuzu/tip/babar.tpl': True,
+                 'zozo/tip/babar.tpl': True,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_tail(self):
+        glob = os.path.join('babar', '**')
+        spec = {'babar/zaza/': True,
+                'babar/zaza/zuzu/': True,
+                'babar/zaza/zuzu/babar.xml': True,
+                'babar/zaza/zuzu/toto.xml': True,
+                'babar/zaza/zuzu/toto.csv': True,
+                'babar/zaza/coucou.tpl': True,
+                'babar/bubu.tpl': True,
+                'zozo/zuzu/tip/babar.tpl': False,
+                'zozo/tip/babar.tpl': False,
+                'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_middle(self):
+        glob = os.path.join('babar', '**', 'tip', '*.t?l')
+        spec = {'babar/zaza/zuzu/tip/coucou.tpl': True,
+                 'babar/z/tip/coucou.tpl': True,
+                 'babar/tip/coucou.tpl': True,
+                 'babar/zeop/tip/babar/babar.tpl': False,
+                 'babar/z/tip/coucou.bin': False,
+                 'babar/toto.bin': False,
+                 'zozo/zuzu/tip/babar.tpl': False,
+                 'zozo/tip/babar.tpl': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_tail(self):
+        glob = os.path.join('bin', '*.{bin,sh,exe}')
+        spec = {'bin/babar.bin': True,
+                 'bin/zephir.sh': True,
+                 'bin/celestine.exe': True,
+                 'bin/cornelius.bat': False,
+                 'bin/cornelius.xml': False,
+                 'toto/yurg': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_middle(self):
+        glob = os.path.join('xml', '{babar,toto}.xml')
+        spec = {'xml/babar.xml': True,
+                 'xml/toto.xml': True,
+                 'xml/babar.xslt': False,
+                 'xml/cornelius.sgml': False,
+                 'xml/zephir.xml': False,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_head(self):
+        glob = os.path.join('{xml,xslt}', 'babar.*')
+        spec = {'xml/babar.xml': True,
+                 'xml/toto.xml': False,
+                 'xslt/babar.xslt': True,
+                 'xslt/toto.xslt': False,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_all(self):
+        dirs = '{%s,%s}' % (os.path.join('xml', '*'),
+                            os.path.join('xslt', '**'))
+        glob = os.path.join(dirs, 'babar.xml')
+        spec = {'xml/a/babar.xml': True,
+                 'xml/b/babar.xml': True,
+                 'xml/a/c/babar.xml': False,
+                 'xslt/a/babar.xml': True,
+                 'xslt/b/babar.xml': True,
+                 'xslt/a/c/babar.xml': True,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_invalid_glob_pattern(self):
+        invalids = [
+            'ppooa**',
+            'azzaeaz4**/',
+            '/**ddsfs',
+            '**##1e"&e',
+            'DSFb**c009',
+            '{',
+            '{aaQSDFa',
+            '}',
+            'aQSDFSaa}',
+            '{**a,',
+            ',**a}',
+            '{a**,',
+            ',b**}',
+            '{a**a,babar}',
+            '{bob,b**z}',
+        ]
+        msg = "%r is not supposed to be a valid pattern"
+        for pattern in invalids:
+            try:
+                iglob(pattern)
+            except ValueError:
+                continue
+            else:
+                self.fail(msg % pattern)
+
+
+class EggInfoToDistInfoTestCase(support.TempdirManager,
+                                support.LoggingCatcher,
+                                unittest.TestCase):
+
+    def get_metadata_file_paths(self, distinfo_path):
+        req_metadata_files = ['METADATA', 'RECORD', 'INSTALLER']
+        metadata_file_paths = []
+        for metadata_file in req_metadata_files:
+            path = os.path.join(distinfo_path, metadata_file)
+            metadata_file_paths.append(path)
+        return metadata_file_paths
+
+    def test_egginfo_to_distinfo_setuptools(self):
+        distinfo = 'hello-0.1.1-py3.3.dist-info'
+        egginfo = 'hello-0.1.1-py3.3.egg-info'
+        dirs = [egginfo]
+        files = ['hello.py', 'hello.pyc']
+        extra_metadata = ['dependency_links.txt', 'entry_points.txt',
+                          'not-zip-safe', 'PKG-INFO', 'top_level.txt',
+                          'SOURCES.txt']
+        for f in extra_metadata:
+            files.append(os.path.join(egginfo, f))
+
+        tempdir, record_file = self.build_dist_tree(files, dirs)
+        distinfo_path = os.path.join(tempdir, distinfo)
+        egginfo_path = os.path.join(tempdir, egginfo)
+        metadata_file_paths = self.get_metadata_file_paths(distinfo_path)
+
+        egginfo_to_distinfo(record_file)
+        # test that directories and files get created
+        self.assertTrue(os.path.isdir(distinfo_path))
+        self.assertTrue(os.path.isdir(egginfo_path))
+
+        for mfile in metadata_file_paths:
+            self.assertTrue(os.path.isfile(mfile))
+
+    def test_egginfo_to_distinfo_distutils(self):
+        distinfo = 'hello-0.1.1-py3.3.dist-info'
+        egginfo = 'hello-0.1.1-py3.3.egg-info'
+        # egginfo is a file in distutils which contains the metadata
+        files = ['hello.py', 'hello.pyc', egginfo]
+
+        tempdir, record_file = self.build_dist_tree(files, dirs=[])
+        distinfo_path = os.path.join(tempdir, distinfo)
+        egginfo_path = os.path.join(tempdir, egginfo)
+        metadata_file_paths = self.get_metadata_file_paths(distinfo_path)
+
+        egginfo_to_distinfo(record_file)
+        # test that directories and files get created
+        self.assertTrue(os.path.isdir(distinfo_path))
+        self.assertTrue(os.path.isfile(egginfo_path))
+
+        for mfile in metadata_file_paths:
+            self.assertTrue(os.path.isfile(mfile))
+
+    def build_dist_tree(self, files, dirs):
+        tempdir = self.mkdtemp()
+        record_file_path = os.path.join(tempdir, 'RECORD')
+        file_paths, dir_paths = ([], [])
+        for d in dirs:
+            path = os.path.join(tempdir, d)
+            os.makedirs(path)
+            dir_paths.append(path)
+        for f in files:
+            path = os.path.join(tempdir, f)
+            with open(path, 'w') as _f:
+                _f.write(f)
+            file_paths.append(path)
+
+        with open(record_file_path, 'w') as record_file:
+            for fpath in file_paths:
+                record_file.write(fpath + '\n')
+            for dpath in dir_paths:
+                record_file.write(dpath + '\n')
+
+        return (tempdir, record_file_path)
+
+
+class PackagingLibChecks(support.TempdirManager,
+                         support.LoggingCatcher,
+                         unittest.TestCase):
+
+    def setUp(self):
+        super(PackagingLibChecks, self).setUp()
+        self._empty_dir = self.mkdtemp()
+
+    def test_empty_package_is_not_based_on_anything(self):
+        self.assertFalse(is_setuptools(self._empty_dir))
+        self.assertFalse(is_distutils(self._empty_dir))
+        self.assertFalse(is_packaging(self._empty_dir))
+
+    def test_setup_py_importing_setuptools_is_setuptools_based(self):
+        self.assertTrue(is_setuptools(self._setuptools_setup_py_pkg()))
+
+    def test_egg_info_dir_and_setup_py_is_setuptools_based(self):
+        self.assertTrue(is_setuptools(self._setuptools_egg_info_pkg()))
+
+    def test_egg_info_and_non_setuptools_setup_py_is_setuptools_based(self):
+        self.assertTrue(is_setuptools(self._egg_info_with_no_setuptools()))
+
+    def test_setup_py_not_importing_setuptools_is_not_setuptools_based(self):
+        self.assertFalse(is_setuptools(self._random_setup_py_pkg()))
+
+    def test_setup_py_importing_distutils_is_distutils_based(self):
+        self.assertTrue(is_distutils(self._distutils_setup_py_pkg()))
+
+    def test_pkg_info_file_and_setup_py_is_distutils_based(self):
+        self.assertTrue(is_distutils(self._distutils_pkg_info()))
+
+    def test_pkg_info_and_non_distutils_setup_py_is_distutils_based(self):
+        self.assertTrue(is_distutils(self._pkg_info_with_no_distutils()))
+
+    def test_setup_py_not_importing_distutils_is_not_distutils_based(self):
+        self.assertFalse(is_distutils(self._random_setup_py_pkg()))
+
+    def test_setup_cfg_with_no_metadata_section_is_not_packaging_based(self):
+        self.assertFalse(is_packaging(self._setup_cfg_with_no_metadata_pkg()))
+
+    def test_setup_cfg_with_valid_metadata_section_is_packaging_based(self):
+        self.assertTrue(is_packaging(self._valid_setup_cfg_pkg()))
+
+    def test_setup_cfg_and_invalid_setup_cfg_is_not_packaging_based(self):
+        self.assertFalse(is_packaging(self._invalid_setup_cfg_pkg()))
+
+    def test_get_install_method_with_setuptools_pkg(self):
+        path = self._setuptools_setup_py_pkg()
+        self.assertEqual("setuptools", get_install_method(path))
+
+    def test_get_install_method_with_distutils_pkg(self):
+        path = self._distutils_pkg_info()
+        self.assertEqual("distutils", get_install_method(path))
+
+    def test_get_install_method_with_packaging_pkg(self):
+        path = self._valid_setup_cfg_pkg()
+        self.assertEqual("packaging", get_install_method(path))
+
+    def test_get_install_method_with_unknown_pkg(self):
+        path = self._invalid_setup_cfg_pkg()
+        self.assertRaises(InstallationException, get_install_method, path)
+
+    def test_is_setuptools_logs_setup_py_text_found(self):
+        is_setuptools(self._setuptools_setup_py_pkg())
+        expected = ['setup.py file found', 'found setuptools text in setup.py']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_setuptools_logs_setup_py_text_not_found(self):
+        directory = self._random_setup_py_pkg()
+        is_setuptools(directory)
+        info_expected = ['setup.py file found']
+        warn_expected = ['no egg-info directory found',
+                         'no setuptools text found in setup.py']
+        self.assertEqual(info_expected, self.get_logs(logging.INFO))
+        self.assertEqual(warn_expected, self.get_logs(logging.WARN))
+
+    def test_is_setuptools_logs_egg_info_dir_found(self):
+        is_setuptools(self._setuptools_egg_info_pkg())
+        expected = ['setup.py file found', 'found egg-info directory']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_distutils_logs_setup_py_text_found(self):
+        is_distutils(self._distutils_setup_py_pkg())
+        expected = ['setup.py file found', 'found distutils text in setup.py']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_distutils_logs_setup_py_text_not_found(self):
+        directory = self._random_setup_py_pkg()
+        is_distutils(directory)
+        info_expected = ['setup.py file found']
+        warn_expected = ['no PKG-INFO file found',
+                         'no distutils text found in setup.py']
+        self.assertEqual(info_expected, self.get_logs(logging.INFO))
+        self.assertEqual(warn_expected, self.get_logs(logging.WARN))
+
+    def test_is_distutils_logs_pkg_info_file_found(self):
+        is_distutils(self._distutils_pkg_info())
+        expected = ['setup.py file found', 'PKG-INFO file found']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_packaging_logs_setup_cfg_found(self):
+        is_packaging(self._valid_setup_cfg_pkg())
+        expected = ['setup.cfg file found']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_packaging_logs_setup_cfg_not_found(self):
+        is_packaging(self._empty_dir)
+        expected = ['no setup.cfg file found']
+        self.assertEqual(expected, self.get_logs(logging.WARN))
+
+    def _write_setuptools_setup_py(self, directory):
+        self.write_file((directory, 'setup.py'),
+                "from setuptools import setup")
+
+    def _write_distutils_setup_py(self, directory):
+        self.write_file([directory, 'setup.py'],
+                "from distutils.core import setup")
+
+    def _write_packaging_setup_cfg(self, directory):
+        self.write_file([directory, 'setup.cfg'],
+                        ("[metadata]\n"
+                         "name = mypackage\n"
+                         "version = 0.1.0\n"))
+
+    def _setuptools_setup_py_pkg(self):
+        tmp = self.mkdtemp()
+        self._write_setuptools_setup_py(tmp)
+        return tmp
+
+    def _distutils_setup_py_pkg(self):
+        tmp = self.mkdtemp()
+        self._write_distutils_setup_py(tmp)
+        return tmp
+
+    def _valid_setup_cfg_pkg(self):
+        tmp = self.mkdtemp()
+        self._write_packaging_setup_cfg(tmp)
+        return tmp
+
+    def _setuptools_egg_info_pkg(self):
+        tmp = self.mkdtemp()
+        self._write_setuptools_setup_py(tmp)
+        tempfile.mkdtemp(suffix='.egg-info', dir=tmp)
+        return tmp
+
+    def _distutils_pkg_info(self):
+        tmp = self._distutils_setup_py_pkg()
+        self.write_file([tmp, 'PKG-INFO'], '')
+        return tmp
+
+    def _setup_cfg_with_no_metadata_pkg(self):
+        tmp = self.mkdtemp()
+        self.write_file([tmp, 'setup.cfg'],
+                        ("[othersection]\n"
+                         "foo = bar\n"))
+        return tmp
+
+    def _invalid_setup_cfg_pkg(self):
+        tmp = self.mkdtemp()
+        self.write_file([tmp, 'setup.cfg'],
+                        ("[metadata]\n"
+                         "name = john\n"
+                         "last_name = doe\n"))
+        return tmp
+
+    def _egg_info_with_no_setuptools(self):
+        tmp = self._random_setup_py_pkg()
+        tempfile.mkdtemp(suffix='.egg-info', dir=tmp)
+        return tmp
+
+    def _pkg_info_with_no_distutils(self):
+        tmp = self._random_setup_py_pkg()
+        self.write_file([tmp, 'PKG-INFO'], '')
+        return tmp
+
+    def _random_setup_py_pkg(self):
+        tmp = self.mkdtemp()
+        self.write_file((tmp, 'setup.py'), "from mypackage import setup")
+        return tmp
+
+
+def test_suite():
+    suite = unittest.makeSuite(UtilTestCase)
+    suite.addTest(unittest.makeSuite(GlobTestCase))
+    suite.addTest(unittest.makeSuite(EggInfoToDistInfoTestCase))
+    suite.addTest(unittest.makeSuite(PackagingLibChecks))
+    return suite
+
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_version.py b/Lib/packaging/tests/test_version.py
new file mode 100644
index 0000000..f94c800
--- /dev/null
+++ b/Lib/packaging/tests/test_version.py
@@ -0,0 +1,252 @@
+"""Tests for packaging.version."""
+import doctest
+import os
+
+from packaging.version import NormalizedVersion as V
+from packaging.version import HugeMajorVersionNumError, IrrationalVersionError
+from packaging.version import suggest_normalized_version as suggest
+from packaging.version import VersionPredicate
+from packaging.tests import unittest
+
+
+class VersionTestCase(unittest.TestCase):
+
+    versions = ((V('1.0'), '1.0'),
+                (V('1.1'), '1.1'),
+                (V('1.2.3'), '1.2.3'),
+                (V('1.2'), '1.2'),
+                (V('1.2.3a4'), '1.2.3a4'),
+                (V('1.2c4'), '1.2c4'),
+                (V('1.2.3.4'), '1.2.3.4'),
+                (V('1.2.3.4.0b3'), '1.2.3.4b3'),
+                (V('1.2.0.0.0'), '1.2'),
+                (V('1.0.dev345'), '1.0.dev345'),
+                (V('1.0.post456.dev623'), '1.0.post456.dev623'))
+
+    def test_repr(self):
+
+        self.assertEqual(repr(V('1.0')), "NormalizedVersion('1.0')")
+
+    def test_basic_versions(self):
+
+        for v, s in self.versions:
+            self.assertEqual(str(v), s)
+
+    def test_hash(self):
+
+        for v, s in self.versions:
+            self.assertEqual(hash(v), hash(V(s)))
+
+        versions = set([v for v, s in self.versions])
+        for v, s in self.versions:
+            self.assertIn(v, versions)
+
+        self.assertEqual(set([V('1.0')]), set([V('1.0'), V('1.0')]))
+
+    def test_from_parts(self):
+
+        for v, s in self.versions:
+            parts = v.parts
+            v2 = V.from_parts(*v.parts)
+            self.assertEqual(v, v2)
+            self.assertEqual(str(v), str(v2))
+
+    def test_irrational_versions(self):
+
+        irrational = ('1', '1.2a', '1.2.3b', '1.02', '1.2a03',
+                      '1.2a3.04', '1.2.dev.2', '1.2dev', '1.2.dev',
+                      '1.2.dev2.post2', '1.2.post2.dev3.post4')
+
+        for s in irrational:
+            self.assertRaises(IrrationalVersionError, V, s)
+
+    def test_huge_version(self):
+
+        self.assertEqual(str(V('1980.0')), '1980.0')
+        self.assertRaises(HugeMajorVersionNumError, V, '1981.0')
+        self.assertEqual(str(V('1981.0', error_on_huge_major_num=False)),
+                         '1981.0')
+
+    def test_comparison(self):
+        comparison_doctest_string = r"""
+        >>> V('1.2.0') == '1.2'
+        Traceback (most recent call last):
+        ...
+        TypeError: cannot compare NormalizedVersion and str
+
+        >>> V('1.2') < '1.3'
+        Traceback (most recent call last):
+        ...
+        TypeError: cannot compare NormalizedVersion and str
+
+        >>> V('1.2.0') == V('1.2')
+        True
+        >>> V('1.2.0') == V('1.2.3')
+        False
+        >>> V('1.2.0') != V('1.2.3')
+        True
+        >>> V('1.2.0') < V('1.2.3')
+        True
+        >>> V('1.2.0') < V('1.2.0')
+        False
+        >>> V('1.2.0') <= V('1.2.0')
+        True
+        >>> V('1.2.0') <= V('1.2.3')
+        True
+        >>> V('1.2.3') <= V('1.2.0')
+        False
+        >>> V('1.2.0') >= V('1.2.0')
+        True
+        >>> V('1.2.3') >= V('1.2.0')
+        True
+        >>> V('1.2.0') >= V('1.2.3')
+        False
+        >>> (V('1.0') > V('1.0b2'))
+        True
+        >>> (V('1.0') > V('1.0c2') > V('1.0c1') > V('1.0b2') > V('1.0b1')
+        ...  > V('1.0a2') > V('1.0a1'))
+        True
+        >>> (V('1.0.0') > V('1.0.0c2') > V('1.0.0c1') > V('1.0.0b2') > V('1.0.0b1')
+        ...  > V('1.0.0a2') > V('1.0.0a1'))
+        True
+
+        >>> V('1.0') < V('1.0.post456.dev623')
+        True
+
+        >>> V('1.0.post456.dev623') < V('1.0.post456')  < V('1.0.post1234')
+        True
+
+        >>> (V('1.0a1')
+        ...  < V('1.0a2.dev456')
+        ...  < V('1.0a2')
+        ...  < V('1.0a2.1.dev456')  # e.g. need to do a quick post release on 1.0a2
+        ...  < V('1.0a2.1')
+        ...  < V('1.0b1.dev456')
+        ...  < V('1.0b2')
+        ...  < V('1.0c1.dev456')
+        ...  < V('1.0c1')
+        ...  < V('1.0.dev7')
+        ...  < V('1.0.dev18')
+        ...  < V('1.0.dev456')
+        ...  < V('1.0.dev1234')
+        ...  < V('1.0')
+        ...  < V('1.0.post456.dev623')  # development version of a post release
+        ...  < V('1.0.post456'))
+        True
+        """
+        doctest.script_from_examples(comparison_doctest_string)
+
+    def test_suggest_normalized_version(self):
+
+        self.assertEqual(suggest('1.0'), '1.0')
+        self.assertEqual(suggest('1.0-alpha1'), '1.0a1')
+        self.assertEqual(suggest('1.0c2'), '1.0c2')
+        self.assertEqual(suggest('walla walla washington'), None)
+        self.assertEqual(suggest('2.4c1'), '2.4c1')
+        self.assertEqual(suggest('v1.0'), '1.0')
+
+        # from setuptools
+        self.assertEqual(suggest('0.4a1.r10'), '0.4a1.post10')
+        self.assertEqual(suggest('0.7a1dev-r66608'), '0.7a1.dev66608')
+        self.assertEqual(suggest('0.6a9.dev-r41475'), '0.6a9.dev41475')
+        self.assertEqual(suggest('2.4preview1'), '2.4c1')
+        self.assertEqual(suggest('2.4pre1'), '2.4c1')
+        self.assertEqual(suggest('2.1-rc2'), '2.1c2')
+
+        # from pypi
+        self.assertEqual(suggest('0.1dev'), '0.1.dev0')
+        self.assertEqual(suggest('0.1.dev'), '0.1.dev0')
+
+        # we want to be able to parse Twisted
+        # development versions are like post releases in Twisted
+        self.assertEqual(suggest('9.0.0+r2363'), '9.0.0.post2363')
+
+        # pre-releases are using markers like "pre1"
+        self.assertEqual(suggest('9.0.0pre1'), '9.0.0c1')
+
+        # we want to be able to parse Tcl-TK
+        # they us "p1" "p2" for post releases
+        self.assertEqual(suggest('1.4p1'), '1.4.post1')
+
+    def test_predicate(self):
+        # VersionPredicate knows how to parse stuff like:
+        #
+        #   Project (>=version, ver2)
+
+        predicates = ('zope.interface (>3.5.0)',
+                      'AnotherProject (3.4)',
+                      'OtherProject (<3.0)',
+                      'NoVersion',
+                      'Hey (>=2.5,<2.7)')
+
+        for predicate in predicates:
+            v = VersionPredicate(predicate)
+
+        self.assertTrue(VersionPredicate('Hey (>=2.5,<2.7)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho').match('2.6'))
+        self.assertFalse(VersionPredicate('Hey (>=2.5,!=2.6,<2.7)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho (<3.0)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho (<3.0,!=2.5)').match('2.6.0'))
+        self.assertFalse(VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.0'))
+        self.assertTrue(VersionPredicate('Ho (2.5)').match('2.5.4'))
+        self.assertFalse(VersionPredicate('Ho (!=2.5)').match('2.5.2'))
+        self.assertTrue(VersionPredicate('Hey (<=2.5)').match('2.5.9'))
+        self.assertFalse(VersionPredicate('Hey (<=2.5)').match('2.6.0'))
+        self.assertTrue(VersionPredicate('Hey (>=2.5)').match('2.5.1'))
+
+        self.assertRaises(ValueError, VersionPredicate, '')
+
+        self.assertTrue(VersionPredicate('Hey 2.5').match('2.5.1'))
+
+        # XXX need to silent the micro version in this case
+        self.assertFalse(VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.3'))
+
+        # Make sure a predicate that ends with a number works
+        self.assertTrue(VersionPredicate('virtualenv5 (1.0)').match('1.0'))
+        self.assertTrue(VersionPredicate('virtualenv5').match('1.0'))
+        self.assertTrue(VersionPredicate('vi5two').match('1.0'))
+        self.assertTrue(VersionPredicate('5two').match('1.0'))
+        self.assertTrue(VersionPredicate('vi5two 1.0').match('1.0'))
+        self.assertTrue(VersionPredicate('5two 1.0').match('1.0'))
+
+        # test repr
+        for predicate in predicates:
+            self.assertEqual(str(VersionPredicate(predicate)), predicate)
+
+    def test_predicate_name(self):
+        # Test that names are parsed the right way
+
+        self.assertEqual('Hey', VersionPredicate('Hey (<1.1)').name)
+        self.assertEqual('Foo-Bar', VersionPredicate('Foo-Bar (1.1)').name)
+        self.assertEqual('Foo Bar', VersionPredicate('Foo Bar (1.1)').name)
+
+    def test_is_final(self):
+        # VersionPredicate knows is a distribution is a final one or not.
+        final_versions = ('1.0', '1.0.post456')
+        other_versions = ('1.0.dev1', '1.0a2', '1.0c3')
+
+        for version in final_versions:
+            self.assertTrue(V(version).is_final)
+        for version in other_versions:
+            self.assertFalse(V(version).is_final)
+
+
+class VersionWhiteBoxTestCase(unittest.TestCase):
+
+    def test_parse_numdots(self):
+        # For code coverage completeness, as pad_zeros_length can't be set or
+        # influenced from the public interface
+        self.assertEqual(V('1.0')._parse_numdots('1.0', '1.0',
+                                                  pad_zeros_length=3),
+                          [1, 0, 0])
+
+
+def test_suite():
+    #README = os.path.join(os.path.dirname(__file__), 'README.txt')
+    #suite = [doctest.DocFileSuite(README), unittest.makeSuite(VersionTestCase)]
+    suite = [unittest.makeSuite(VersionTestCase),
+             unittest.makeSuite(VersionWhiteBoxTestCase)]
+    return unittest.TestSuite(suite)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/util.py b/Lib/packaging/util.py
new file mode 100644
index 0000000..15da9e6
--- /dev/null
+++ b/Lib/packaging/util.py
@@ -0,0 +1,1450 @@
+"""packaging.util
+Miscellaneous utility functions.
+"""
+import errno
+import csv
+import hashlib
+import os
+import sys
+import re
+import shutil
+import string
+import tarfile
+import zipfile
+import posixpath
+import sysconfig
+import subprocess
+from copy import copy
+from glob import iglob as std_iglob
+from fnmatch import fnmatchcase
+from inspect import getsource
+from configparser import RawConfigParser
+
+from packaging import logger
+from packaging.errors import (PackagingPlatformError, PackagingFileError,
+                              PackagingByteCompileError, PackagingExecError,
+                              InstallationException, PackagingInternalError)
+
+_PLATFORM = None
+_DEFAULT_INSTALLER = 'packaging'
+
+
+def newer(source, target):
+    """Tell if the target is newer than the source.
+
+    Returns true if 'source' exists and is more recently modified than
+    'target', or if 'source' exists and 'target' doesn't.
+
+    Returns false if both exist and 'target' is the same age or younger
+    than 'source'. Raise PackagingFileError if 'source' does not exist.
+
+    Note that this test is not very accurate: files created in the same second
+    will have the same "age".
+    """
+    if not os.path.exists(source):
+        raise PackagingFileError("file '%s' does not exist" %
+                                 os.path.abspath(source))
+    if not os.path.exists(target):
+        return True
+
+    return os.stat(source).st_mtime > os.stat(target).st_mtime
+
+
+def get_platform():
+    """Return a string that identifies the current platform.
+
+    By default, will return the value returned by sysconfig.get_platform(),
+    but it can be changed by calling set_platform().
+    """
+    global _PLATFORM
+    if _PLATFORM is None:
+        _PLATFORM = sysconfig.get_platform()
+    return _PLATFORM
+
+
+def set_platform(identifier):
+    """Set the platform string identifier returned by get_platform().
+
+    Note that this change doesn't impact the value returned by
+    sysconfig.get_platform(); it is local to packaging.
+    """
+    global _PLATFORM
+    _PLATFORM = identifier
+
+
+def convert_path(pathname):
+    """Return 'pathname' as a name that will work on the native filesystem.
+
+    The path is split on '/' and put back together again using the current
+    directory separator.  Needed because filenames in the setup script are
+    always supplied in Unix style, and have to be converted to the local
+    convention before we can actually use them in the filesystem.  Raises
+    ValueError on non-Unix-ish systems if 'pathname' either starts or
+    ends with a slash.
+    """
+    if os.sep == '/':
+        return pathname
+    if not pathname:
+        return pathname
+    if pathname[0] == '/':
+        raise ValueError("path '%s' cannot be absolute" % pathname)
+    if pathname[-1] == '/':
+        raise ValueError("path '%s' cannot end with '/'" % pathname)
+
+    paths = pathname.split('/')
+    while os.curdir in paths:
+        paths.remove(os.curdir)
+    if not paths:
+        return os.curdir
+    return os.path.join(*paths)
+
+
+def change_root(new_root, pathname):
+    """Return 'pathname' with 'new_root' prepended.
+
+    If 'pathname' is relative, this is equivalent to
+    os.path.join(new_root,pathname). Otherwise, it requires making 'pathname'
+    relative and then joining the two, which is tricky on DOS/Windows.
+    """
+    if os.name == 'posix':
+        if not os.path.isabs(pathname):
+            return os.path.join(new_root, pathname)
+        else:
+            return os.path.join(new_root, pathname[1:])
+
+    elif os.name == 'nt':
+        drive, path = os.path.splitdrive(pathname)
+        if path[0] == '\\':
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    elif os.name == 'os2':
+        drive, path = os.path.splitdrive(pathname)
+        if path[0] == os.sep:
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    else:
+        raise PackagingPlatformError("nothing known about "
+                                     "platform '%s'" % os.name)
+
+_environ_checked = False
+
+
+def check_environ():
+    """Ensure that 'os.environ' has all the environment variables needed.
+
+    We guarantee that users can use in config files, command-line options,
+    etc.  Currently this includes:
+      HOME - user's home directory (Unix only)
+      PLAT - description of the current platform, including hardware
+             and OS (see 'get_platform()')
+    """
+    global _environ_checked
+    if _environ_checked:
+        return
+
+    if os.name == 'posix' and 'HOME' not in os.environ:
+        import pwd
+        os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
+
+    if 'PLAT' not in os.environ:
+        os.environ['PLAT'] = sysconfig.get_platform()
+
+    _environ_checked = True
+
+
+def subst_vars(s, local_vars):
+    """Perform shell/Perl-style variable substitution on 'string'.
+
+    Every occurrence of '$' followed by a name is considered a variable, and
+    variable is substituted by the value found in the 'local_vars'
+    dictionary, or in 'os.environ' if it's not in 'local_vars'.
+    'os.environ' is first checked/augmented to guarantee that it contains
+    certain values: see 'check_environ()'.  Raise ValueError for any
+    variables not found in either 'local_vars' or 'os.environ'.
+    """
+    check_environ()
+
+    def _subst(match, local_vars=local_vars):
+        var_name = match.group(1)
+        if var_name in local_vars:
+            return str(local_vars[var_name])
+        else:
+            return os.environ[var_name]
+
+    try:
+        return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
+    except KeyError as var:
+        raise ValueError("invalid variable '$%s'" % var)
+
+
+# Needed by 'split_quoted()'
+_wordchars_re = _squote_re = _dquote_re = None
+
+
+def _init_regex():
+    global _wordchars_re, _squote_re, _dquote_re
+    _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+    _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
+    _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
+
+
+def split_quoted(s):
+    """Split a string up according to Unix shell-like rules for quotes and
+    backslashes.
+
+    In short: words are delimited by spaces, as long as those
+    spaces are not escaped by a backslash, or inside a quoted string.
+    Single and double quotes are equivalent, and the quote characters can
+    be backslash-escaped.  The backslash is stripped from any two-character
+    escape sequence, leaving only the escaped character.  The quote
+    characters are stripped from any quoted string.  Returns a list of
+    words.
+    """
+    # This is a nice algorithm for splitting up a single string, since it
+    # doesn't require character-by-character examination.  It was a little
+    # bit of a brain-bender to get it working right, though...
+    if _wordchars_re is None:
+        _init_regex()
+
+    s = s.strip()
+    words = []
+    pos = 0
+
+    while s:
+        m = _wordchars_re.match(s, pos)
+        end = m.end()
+        if end == len(s):
+            words.append(s[:end])
+            break
+
+        if s[end] in string.whitespace:  # unescaped, unquoted whitespace: now
+            words.append(s[:end])        # we definitely have a word delimiter
+            s = s[end:].lstrip()
+            pos = 0
+
+        elif s[end] == '\\':             # preserve whatever is being escaped;
+                                         # will become part of the current word
+            s = s[:end] + s[end + 1:]
+            pos = end + 1
+
+        else:
+            if s[end] == "'":            # slurp singly-quoted string
+                m = _squote_re.match(s, end)
+            elif s[end] == '"':          # slurp doubly-quoted string
+                m = _dquote_re.match(s, end)
+            else:
+                raise RuntimeError("this can't happen "
+                                   "(bad char '%c')" % s[end])
+
+            if m is None:
+                raise ValueError("bad string (mismatched %s quotes?)" % s[end])
+
+            beg, end = m.span()
+            s = s[:beg] + s[beg + 1:end - 1] + s[end:]
+            pos = m.end() - 2
+
+        if pos >= len(s):
+            words.append(s)
+            break
+
+    return words
+
+
+def execute(func, args, msg=None, verbose=0, dry_run=False):
+    """Perform some action that affects the outside world.
+
+    Some actions (e.g. writing to the filesystem) are special because
+    they are disabled by the 'dry_run' flag.  This method takes care of all
+    that bureaucracy for you; all you have to do is supply the
+    function to call and an argument tuple for it (to embody the
+    "external action" being performed), and an optional message to
+    print.
+    """
+    if msg is None:
+        msg = "%s%r" % (func.__name__, args)
+        if msg[-2:] == ',)':        # correct for singleton tuple
+            msg = msg[0:-2] + ')'
+
+    logger.info(msg)
+    if not dry_run:
+        func(*args)
+
+
+def strtobool(val):
+    """Convert a string representation of truth to true (1) or false (0).
+
+    True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+    are 'n', 'no', 'f', 'false', 'off', and '0'.  Raises ValueError if
+    'val' is anything else.
+    """
+    val = val.lower()
+    if val in ('y', 'yes', 't', 'true', 'on', '1'):
+        return True
+    elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+        return False
+    else:
+        raise ValueError("invalid truth value %r" % (val,))
+
+
+def byte_compile(py_files, optimize=0, force=False, prefix=None,
+                 base_dir=None, verbose=0, dry_run=False, direct=None):
+    """Byte-compile a collection of Python source files to either .pyc
+    or .pyo files in the same directory.
+
+    'py_files' is a list of files to compile; any files that don't end in
+    ".py" are silently skipped. 'optimize' must be one of the following:
+      0 - don't optimize (generate .pyc)
+      1 - normal optimization (like "python -O")
+      2 - extra optimization (like "python -OO")
+    If 'force' is true, all files are recompiled regardless of
+    timestamps.
+
+    The source filename encoded in each bytecode file defaults to the
+    filenames listed in 'py_files'; you can modify these with 'prefix' and
+    'basedir'.  'prefix' is a string that will be stripped off of each
+    source filename, and 'base_dir' is a directory name that will be
+    prepended (after 'prefix' is stripped).  You can supply either or both
+    (or neither) of 'prefix' and 'base_dir', as you wish.
+
+    If 'dry_run' is true, doesn't actually do anything that would
+    affect the filesystem.
+
+    Byte-compilation is either done directly in this interpreter process
+    with the standard py_compile module, or indirectly by writing a
+    temporary script and executing it.  Normally, you should let
+    'byte_compile()' figure out to use direct compilation or not (see
+    the source for details).  The 'direct' flag is used by the script
+    generated in indirect mode; unless you know what you're doing, leave
+    it set to None.
+    """
+    # nothing is done if sys.dont_write_bytecode is True
+    # FIXME this should not raise an error
+    if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode:
+        raise PackagingByteCompileError('byte-compiling is disabled.')
+
+    # First, if the caller didn't force us into direct or indirect mode,
+    # figure out which mode we should be in.  We take a conservative
+    # approach: choose direct mode *only* if the current interpreter is
+    # in debug mode and optimize is 0.  If we're not in debug mode (-O
+    # or -OO), we don't know which level of optimization this
+    # interpreter is running with, so we can't do direct
+    # byte-compilation and be certain that it's the right thing.  Thus,
+    # always compile indirectly if the current interpreter is in either
+    # optimize mode, or if either optimization level was requested by
+    # the caller.
+    if direct is None:
+        direct = (__debug__ and optimize == 0)
+
+    # "Indirect" byte-compilation: write a temporary script and then
+    # run it with the appropriate flags.
+    if not direct:
+        from tempfile import mkstemp
+        # XXX script_fd may leak, use something better than mkstemp
+        script_fd, script_name = mkstemp(".py")
+        logger.info("writing byte-compilation script '%s'", script_name)
+        if not dry_run:
+            if script_fd is not None:
+                script = os.fdopen(script_fd, "w", encoding='utf-8')
+            else:
+                script = open(script_name, "w", encoding='utf-8')
+
+            with script:
+                script.write("""\
+from packaging.util import byte_compile
+files = [
+""")
+
+                # XXX would be nice to write absolute filenames, just for
+                # safety's sake (script should be more robust in the face of
+                # chdir'ing before running it).  But this requires abspath'ing
+                # 'prefix' as well, and that breaks the hack in build_lib's
+                # 'byte_compile()' method that carefully tacks on a trailing
+                # slash (os.sep really) to make sure the prefix here is "just
+                # right".  This whole prefix business is rather delicate -- the
+                # problem is that it's really a directory, but I'm treating it
+                # as a dumb string, so trailing slashes and so forth matter.
+
+                #py_files = map(os.path.abspath, py_files)
+                #if prefix:
+                #    prefix = os.path.abspath(prefix)
+
+                script.write(",\n".join(map(repr, py_files)) + "]\n")
+                script.write("""
+byte_compile(files, optimize=%r, force=%r,
+             prefix=%r, base_dir=%r,
+             verbose=%r, dry_run=False,
+             direct=True)
+""" % (optimize, force, prefix, base_dir, verbose))
+
+        cmd = [sys.executable, script_name]
+        if optimize == 1:
+            cmd.insert(1, "-O")
+        elif optimize == 2:
+            cmd.insert(1, "-OO")
+
+        env = copy(os.environ)
+        env['PYTHONPATH'] = os.path.pathsep.join(sys.path)
+        try:
+            spawn(cmd, env=env)
+        finally:
+            execute(os.remove, (script_name,), "removing %s" % script_name,
+                    dry_run=dry_run)
+
+    # "Direct" byte-compilation: use the py_compile module to compile
+    # right here, right now.  Note that the script generated in indirect
+    # mode simply calls 'byte_compile()' in direct mode, a weird sort of
+    # cross-process recursion.  Hey, it works!
+    else:
+        from py_compile import compile
+
+        for file in py_files:
+            if file[-3:] != ".py":
+                # This lets us be lazy and not filter filenames in
+                # the "install_lib" command.
+                continue
+
+            # Terminology from the py_compile module:
+            #   cfile - byte-compiled file
+            #   dfile - purported source filename (same as 'file' by default)
+            cfile = file + (__debug__ and "c" or "o")
+            dfile = file
+            if prefix:
+                if file[:len(prefix)] != prefix:
+                    raise ValueError("invalid prefix: filename %r doesn't "
+                                     "start with %r" % (file, prefix))
+                dfile = dfile[len(prefix):]
+            if base_dir:
+                dfile = os.path.join(base_dir, dfile)
+
+            cfile_base = os.path.basename(cfile)
+            if direct:
+                if force or newer(file, cfile):
+                    logger.info("byte-compiling %s to %s", file, cfile_base)
+                    if not dry_run:
+                        compile(file, cfile, dfile)
+                else:
+                    logger.debug("skipping byte-compilation of %s to %s",
+                              file, cfile_base)
+
+
+def rfc822_escape(header):
+    """Return a form of *header* suitable for inclusion in an RFC 822-header.
+
+    This function ensures there are 8 spaces after each newline.
+    """
+    lines = header.split('\n')
+    sep = '\n' + 8 * ' '
+    return sep.join(lines)
+
+_RE_VERSION = re.compile('(\d+\.\d+(\.\d+)*)')
+_MAC_OS_X_LD_VERSION = re.compile('^@\(#\)PROGRAM:ld  '
+                                  'PROJECT:ld64-((\d+)(\.\d+)*)')
+
+
+def _find_ld_version():
+    """Find the ld version.  The version scheme differs under Mac OS X."""
+    if sys.platform == 'darwin':
+        return _find_exe_version('ld -v', _MAC_OS_X_LD_VERSION)
+    else:
+        return _find_exe_version('ld -v')
+
+
+def _find_exe_version(cmd, pattern=_RE_VERSION):
+    """Find the version of an executable by running `cmd` in the shell.
+
+    `pattern` is a compiled regular expression.  If not provided, defaults
+    to _RE_VERSION. If the command is not found, or the output does not
+    match the mattern, returns None.
+    """
+    from subprocess import Popen, PIPE
+    executable = cmd.split()[0]
+    if find_executable(executable) is None:
+        return None
+    pipe = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
+    try:
+        stdout, stderr = pipe.communicate()
+    finally:
+        pipe.stdout.close()
+        pipe.stderr.close()
+    # some commands like ld under MacOS X, will give the
+    # output in the stderr, rather than stdout.
+    if stdout != '':
+        out_string = stdout
+    else:
+        out_string = stderr
+
+    result = pattern.search(out_string)
+    if result is None:
+        return None
+    return result.group(1)
+
+
+def get_compiler_versions():
+    """Return a tuple providing the versions of gcc, ld and dllwrap
+
+    For each command, if a command is not found, None is returned.
+    Otherwise a string with the version is returned.
+    """
+    gcc = _find_exe_version('gcc -dumpversion')
+    ld = _find_ld_version()
+    dllwrap = _find_exe_version('dllwrap --version')
+    return gcc, ld, dllwrap
+
+
+def newer_group(sources, target, missing='error'):
+    """Return true if 'target' is out-of-date with respect to any file
+    listed in 'sources'.
+
+    In other words, if 'target' exists and is newer
+    than every file in 'sources', return false; otherwise return true.
+    'missing' controls what we do when a source file is missing; the
+    default ("error") is to blow up with an OSError from inside 'stat()';
+    if it is "ignore", we silently drop any missing source files; if it is
+    "newer", any missing source files make us assume that 'target' is
+    out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
+    carry out commands that wouldn't work because inputs are missing, but
+    that doesn't matter because you're not actually going to run the
+    commands).
+    """
+    # If the target doesn't even exist, then it's definitely out-of-date.
+    if not os.path.exists(target):
+        return True
+
+    # Otherwise we have to find out the hard way: if *any* source file
+    # is more recent than 'target', then 'target' is out-of-date and
+    # we can immediately return true.  If we fall through to the end
+    # of the loop, then 'target' is up-to-date and we return false.
+    target_mtime = os.stat(target).st_mtime
+
+    for source in sources:
+        if not os.path.exists(source):
+            if missing == 'error':      # blow up when we stat() the file
+                pass
+            elif missing == 'ignore':   # missing source dropped from
+                continue                # target's dependency list
+            elif missing == 'newer':    # missing source means target is
+                return True             # out-of-date
+
+        if os.stat(source).st_mtime > target_mtime:
+            return True
+
+    return False
+
+
+def write_file(filename, contents):
+    """Create *filename* and write *contents* to it.
+
+    *contents* is a sequence of strings without line terminators.
+    """
+    with open(filename, "w") as f:
+        for line in contents:
+            f.write(line + "\n")
+
+
+def _is_package(path):
+    if not os.path.isdir(path):
+        return False
+    return os.path.isfile(os.path.join(path, '__init__.py'))
+
+
+# Code taken from the pip project
+def _is_archive_file(name):
+    archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar')
+    ext = splitext(name)[1].lower()
+    if ext in archives:
+        return True
+    return False
+
+
+def _under(path, root):
+    path = path.split(os.sep)
+    root = root.split(os.sep)
+    if len(root) > len(path):
+        return False
+    for pos, part in enumerate(root):
+        if path[pos] != part:
+            return False
+    return True
+
+
+def _package_name(root_path, path):
+    # Return a dotted package name, given a subpath
+    if not _under(path, root_path):
+        raise ValueError('"%s" is not a subpath of "%s"' % (path, root_path))
+    return path[len(root_path) + 1:].replace(os.sep, '.')
+
+
+def find_packages(paths=(os.curdir,), exclude=()):
+    """Return a list all Python packages found recursively within
+    directories 'paths'
+
+    'paths' should be supplied as a sequence of "cross-platform"
+    (i.e. URL-style) path; it will be converted to the appropriate local
+    path syntax.
+
+    'exclude' is a sequence of package names to exclude; '*' can be used as
+    a wildcard in the names, such that 'foo.*' will exclude all subpackages
+    of 'foo' (but not 'foo' itself).
+    """
+    packages = []
+    discarded = []
+
+    def _discarded(path):
+        for discard in discarded:
+            if _under(path, discard):
+                return True
+        return False
+
+    for path in paths:
+        path = convert_path(path)
+        for root, dirs, files in os.walk(path):
+            for dir_ in dirs:
+                fullpath = os.path.join(root, dir_)
+                if _discarded(fullpath):
+                    continue
+                # we work only with Python packages
+                if not _is_package(fullpath):
+                    discarded.append(fullpath)
+                    continue
+                # see if it's excluded
+                excluded = False
+                package_name = _package_name(path, fullpath)
+                for pattern in exclude:
+                    if fnmatchcase(package_name, pattern):
+                        excluded = True
+                        break
+                if excluded:
+                    continue
+
+                # adding it to the list
+                packages.append(package_name)
+    return packages
+
+
+def resolve_name(name):
+    """Resolve a name like ``module.object`` to an object and return it.
+
+    Raise ImportError if the module or name is not found.
+    """
+    parts = name.split('.')
+    cursor = len(parts)
+    module_name = parts[:cursor]
+
+    while cursor > 0:
+        try:
+            ret = __import__('.'.join(module_name))
+            break
+        except ImportError:
+            if cursor == 0:
+                raise
+            cursor -= 1
+            module_name = parts[:cursor]
+            ret = ''
+
+    for part in parts[1:]:
+        try:
+            ret = getattr(ret, part)
+        except AttributeError as exc:
+            raise ImportError(exc)
+
+    return ret
+
+
+def splitext(path):
+    """Like os.path.splitext, but take off .tar too"""
+    base, ext = posixpath.splitext(path)
+    if base.lower().endswith('.tar'):
+        ext = base[-4:] + ext
+        base = base[:-4]
+    return base, ext
+
+
+def unzip_file(filename, location, flatten=True):
+    """Unzip the file *filename* into the *location* directory."""
+    if not os.path.exists(location):
+        os.makedirs(location)
+    with open(filename, 'rb') as zipfp:
+        zip = zipfile.ZipFile(zipfp)
+        leading = has_leading_dir(zip.namelist()) and flatten
+        for name in zip.namelist():
+            data = zip.read(name)
+            fn = name
+            if leading:
+                fn = split_leading_dir(name)[1]
+            fn = os.path.join(location, fn)
+            dir = os.path.dirname(fn)
+            if not os.path.exists(dir):
+                os.makedirs(dir)
+            if fn.endswith('/') or fn.endswith('\\'):
+                # A directory
+                if not os.path.exists(fn):
+                    os.makedirs(fn)
+            else:
+                with open(fn, 'wb') as fp:
+                    fp.write(data)
+
+
+def untar_file(filename, location):
+    """Untar the file *filename* into the *location* directory."""
+    if not os.path.exists(location):
+        os.makedirs(location)
+    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+        mode = 'r:gz'
+    elif (filename.lower().endswith('.bz2')
+          or filename.lower().endswith('.tbz')):
+        mode = 'r:bz2'
+    elif filename.lower().endswith('.tar'):
+        mode = 'r'
+    else:
+        mode = 'r:*'
+    with tarfile.open(filename, mode) as tar:
+        leading = has_leading_dir(member.name for member in tar.getmembers())
+        for member in tar.getmembers():
+            fn = member.name
+            if leading:
+                fn = split_leading_dir(fn)[1]
+            path = os.path.join(location, fn)
+            if member.isdir():
+                if not os.path.exists(path):
+                    os.makedirs(path)
+            else:
+                try:
+                    fp = tar.extractfile(member)
+                except (KeyError, AttributeError):
+                    # Some corrupt tar files seem to produce this
+                    # (specifically bad symlinks)
+                    continue
+                try:
+                    if not os.path.exists(os.path.dirname(path)):
+                        os.makedirs(os.path.dirname(path))
+                        with open(path, 'wb') as destfp:
+                            shutil.copyfileobj(fp, destfp)
+                finally:
+                    fp.close()
+
+
+def has_leading_dir(paths):
+    """Return true if all the paths have the same leading path name.
+
+    In other words, check that everything is in one subdirectory in an
+    archive.
+    """
+    common_prefix = None
+    for path in paths:
+        prefix, rest = split_leading_dir(path)
+        if not prefix:
+            return False
+        elif common_prefix is None:
+            common_prefix = prefix
+        elif prefix != common_prefix:
+            return False
+    return True
+
+
+def split_leading_dir(path):
+    path = str(path)
+    path = path.lstrip('/').lstrip('\\')
+    if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
+                        or '\\' not in path):
+        return path.split('/', 1)
+    elif '\\' in path:
+        return path.split('\\', 1)
+    else:
+        return path, ''
+
+
+def spawn(cmd, search_path=True, verbose=0, dry_run=False, env=None):
+    """Run another program specified as a command list 'cmd' in a new process.
+
+    'cmd' is just the argument list for the new process, ie.
+    cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
+    There is no way to run a program with a name different from that of its
+    executable.
+
+    If 'search_path' is true (the default), the system's executable
+    search path will be used to find the program; otherwise, cmd[0]
+    must be the exact path to the executable.  If 'dry_run' is true,
+    the command will not actually be run.
+
+    If 'env' is given, it's a environment dictionary used for the execution
+    environment.
+
+    Raise PackagingExecError if running the program fails in any way; just
+    return on success.
+    """
+    logger.info(' '.join(cmd))
+    if dry_run:
+        return
+    exit_status = subprocess.call(cmd, env=env)
+    if exit_status != 0:
+        msg = "command '%s' failed with exit status %d"
+        raise PackagingExecError(msg % (cmd, exit_status))
+
+
+def find_executable(executable, path=None):
+    """Try to find 'executable' in the directories listed in 'path'.
+
+    *path* is a string listing directories separated by 'os.pathsep' and
+    defaults to os.environ['PATH'].  Returns the complete filename or None
+    if not found.
+    """
+    if path is None:
+        path = os.environ['PATH']
+    paths = path.split(os.pathsep)
+    base, ext = os.path.splitext(executable)
+
+    if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+        executable = executable + '.exe'
+
+    if not os.path.isfile(executable):
+        for p in paths:
+            f = os.path.join(p, executable)
+            if os.path.isfile(f):
+                # the file exists, we have a shot at spawn working
+                return f
+        return None
+    else:
+        return executable
+
+
+DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
+DEFAULT_REALM = 'pypi'
+DEFAULT_PYPIRC = """\
+[distutils]
+index-servers =
+    pypi
+
+[pypi]
+username:%s
+password:%s
+"""
+
+
+def get_pypirc_path():
+    """Return path to pypirc config file."""
+    return os.path.join(os.path.expanduser('~'), '.pypirc')
+
+
+def generate_pypirc(username, password):
+    """Create a default .pypirc file."""
+    rc = get_pypirc_path()
+    with open(rc, 'w') as f:
+        f.write(DEFAULT_PYPIRC % (username, password))
+    try:
+        os.chmod(rc, 0o600)
+    except OSError:
+        # should do something better here
+        pass
+
+
+def read_pypirc(repository=DEFAULT_REPOSITORY, realm=DEFAULT_REALM):
+    """Read the .pypirc file."""
+    rc = get_pypirc_path()
+    if os.path.exists(rc):
+        config = RawConfigParser()
+        config.read(rc)
+        sections = config.sections()
+        if 'distutils' in sections:
+            # let's get the list of servers
+            index_servers = config.get('distutils', 'index-servers')
+            _servers = [server.strip() for server in
+                        index_servers.split('\n')
+                        if server.strip() != '']
+            if _servers == []:
+                # nothing set, let's try to get the default pypi
+                if 'pypi' in sections:
+                    _servers = ['pypi']
+                else:
+                    # the file is not properly defined, returning
+                    # an empty dict
+                    return {}
+            for server in _servers:
+                current = {'server': server}
+                current['username'] = config.get(server, 'username')
+
+                # optional params
+                for key, default in (('repository', DEFAULT_REPOSITORY),
+                                     ('realm', DEFAULT_REALM),
+                                     ('password', None)):
+                    if config.has_option(server, key):
+                        current[key] = config.get(server, key)
+                    else:
+                        current[key] = default
+                if (current['server'] == repository or
+                    current['repository'] == repository):
+                    return current
+        elif 'server-login' in sections:
+            # old format
+            server = 'server-login'
+            if config.has_option(server, 'repository'):
+                repository = config.get(server, 'repository')
+            else:
+                repository = DEFAULT_REPOSITORY
+
+            return {'username': config.get(server, 'username'),
+                    'password': config.get(server, 'password'),
+                    'repository': repository,
+                    'server': server,
+                    'realm': DEFAULT_REALM}
+
+    return {}
+
+
+# utility functions for 2to3 support
+
+def run_2to3(files, doctests_only=False, fixer_names=None,
+             options=None, explicit=None):
+    """ Wrapper function around the refactor() class which
+    performs the conversions on a list of python files.
+    Invoke 2to3 on a list of Python files. The files should all come
+    from the build area, as the modification is done in-place."""
+
+    #if not files:
+    #    return
+
+    # Make this class local, to delay import of 2to3
+    from lib2to3.refactor import get_fixers_from_package, RefactoringTool
+    fixers = []
+    fixers = get_fixers_from_package('lib2to3.fixes')
+
+    if fixer_names:
+        for fixername in fixer_names:
+            fixers.extend(fixer for fixer in
+                          get_fixers_from_package(fixername))
+    r = RefactoringTool(fixers, options=options)
+    r.refactor(files, write=True, doctests_only=doctests_only)
+
+
+class Mixin2to3:
+    """ Wrapper class for commands that run 2to3.
+    To configure 2to3, setup scripts may either change
+    the class variables, or inherit from this class
+    to override how 2to3 is invoked.
+    """
+    # provide list of fixers to run.
+    # defaults to all from lib2to3.fixers
+    fixer_names = None
+
+    # options dictionary
+    options = None
+
+    # list of fixers to invoke even though they are marked as explicit
+    explicit = None
+
+    def run_2to3(self, files, doctests_only=False):
+        """ Issues a call to util.run_2to3. """
+        return run_2to3(files, doctests_only, self.fixer_names,
+                        self.options, self.explicit)
+
+RICH_GLOB = re.compile(r'\{([^}]*)\}')
+_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
+_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
+
+
+def iglob(path_glob):
+    """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
+    if _CHECK_RECURSIVE_GLOB.search(path_glob):
+        msg = """invalid glob %r: recursive glob "**" must be used alone"""
+        raise ValueError(msg % path_glob)
+    if _CHECK_MISMATCH_SET.search(path_glob):
+        msg = """invalid glob %r: mismatching set marker '{' or '}'"""
+        raise ValueError(msg % path_glob)
+    return _iglob(path_glob)
+
+
+def _iglob(path_glob):
+    rich_path_glob = RICH_GLOB.split(path_glob, 1)
+    if len(rich_path_glob) > 1:
+        assert len(rich_path_glob) == 3, rich_path_glob
+        prefix, set, suffix = rich_path_glob
+        for item in set.split(','):
+            for path in _iglob(''.join((prefix, item, suffix))):
+                yield path
+    else:
+        if '**' not in path_glob:
+            for item in std_iglob(path_glob):
+                yield item
+        else:
+            prefix, radical = path_glob.split('**', 1)
+            if prefix == '':
+                prefix = '.'
+            if radical == '':
+                radical = '*'
+            else:
+                # we support both
+                radical = radical.lstrip('/')
+                radical = radical.lstrip('\\')
+            for path, dir, files in os.walk(prefix):
+                path = os.path.normpath(path)
+                for file in _iglob(os.path.join(path, radical)):
+                    yield file
+
+
+def cfg_to_args(path='setup.cfg'):
+    """Compatibility helper to use setup.cfg in setup.py.
+
+    This functions uses an existing setup.cfg to generate a dictionnary of
+    keywords that can be used by distutils.core.setup(**kwargs).  It is used
+    by generate_setup_py.
+
+    *file* is the path to the setup.cfg file.  If it doesn't exist,
+    PackagingFileError is raised.
+    """
+    # We need to declare the following constants here so that it's easier to
+    # generate the setup.py afterwards, using inspect.getsource.
+
+    # XXX ** == needs testing
+    D1_D2_SETUP_ARGS = {"name": ("metadata",),
+                        "version": ("metadata",),
+                        "author": ("metadata",),
+                        "author_email": ("metadata",),
+                        "maintainer": ("metadata",),
+                        "maintainer_email": ("metadata",),
+                        "url": ("metadata", "home_page"),
+                        "description": ("metadata", "summary"),
+                        "long_description": ("metadata", "description"),
+                        "download-url": ("metadata",),
+                        "classifiers": ("metadata", "classifier"),
+                        "platforms": ("metadata", "platform"),  # **
+                        "license": ("metadata",),
+                        "requires": ("metadata", "requires_dist"),
+                        "provides": ("metadata", "provides_dist"),  # **
+                        "obsoletes": ("metadata", "obsoletes_dist"),  # **
+                        "packages": ("files",),
+                        "scripts": ("files",),
+                        "py_modules": ("files", "modules"),  # **
+                        }
+
+    MULTI_FIELDS = ("classifiers",
+                    "requires",
+                    "platforms",
+                    "packages",
+                    "scripts")
+
+    def has_get_option(config, section, option):
+        if config.has_option(section, option):
+            return config.get(section, option)
+        elif config.has_option(section, option.replace('_', '-')):
+            return config.get(section, option.replace('_', '-'))
+        else:
+            return False
+
+    # The real code starts here
+    config = RawConfigParser()
+    if not os.path.exists(file):
+        raise PackagingFileError("file '%s' does not exist" %
+                                 os.path.abspath(file))
+    config.read(path)
+
+    kwargs = {}
+    for arg in D1_D2_SETUP_ARGS:
+        if len(D1_D2_SETUP_ARGS[arg]) == 2:
+            # The distutils field name is different than packaging's
+            section, option = D1_D2_SETUP_ARGS[arg]
+
+        else:
+            # The distutils field name is the same thant packaging's
+            section = D1_D2_SETUP_ARGS[arg][0]
+            option = arg
+
+        in_cfg_value = has_get_option(config, section, option)
+        if not in_cfg_value:
+            # There is no such option in the setup.cfg
+            if arg == "long_description":
+                filename = has_get_option(config, section, "description_file")
+                if filename:
+                    with open(filename) as fp:
+                        in_cfg_value = fp.read()
+            else:
+                continue
+
+        if arg in MULTI_FIELDS:
+            # support multiline options
+            in_cfg_value = in_cfg_value.strip().split('\n')
+
+        kwargs[arg] = in_cfg_value
+
+    return kwargs
+
+
+_SETUP_TMPL = """\
+# This script was automatically generated by packaging
+import os
+from distutils.core import setup
+from ConfigParser import RawConfigParser
+
+%(func)s
+
+setup(**cfg_to_args())
+"""
+
+
+def generate_setup_py():
+    """Generate a distutils compatible setup.py using an existing setup.cfg.
+
+    Raises a PackagingFileError when a setup.py already exists.
+    """
+    if os.path.exists("setup.py"):
+        raise PackagingFileError("a setup.py file alreadyexists")
+
+    with open("setup.py", "w", encoding='utf-8') as fp:
+        fp.write(_SETUP_TMPL % {'func': getsource(cfg_to_args)})
+
+
+# Taken from the pip project
+# https://github.com/pypa/pip/blob/master/pip/util.py
+def ask(message, options):
+    """Prompt the user with *message*; *options* contains allowed responses."""
+    while True:
+        response = input(message)
+        response = response.strip().lower()
+        if response not in options:
+            print('invalid response: %r' % response)
+            print('choose one of', ', '.join(repr(o) for o in options))
+        else:
+            return response
+
+
+def _parse_record_file(record_file):
+    distinfo, extra_metadata, installed = ({}, [], [])
+    with open(record_file, 'r') as rfile:
+        for path in rfile:
+            path = path.strip()
+            if path.endswith('egg-info') and os.path.isfile(path):
+                distinfo_dir = path.replace('egg-info', 'dist-info')
+                metadata = path
+                egginfo = path
+            elif path.endswith('egg-info') and os.path.isdir(path):
+                distinfo_dir = path.replace('egg-info', 'dist-info')
+                egginfo = path
+                for metadata_file in os.listdir(path):
+                    metadata_fpath = os.path.join(path, metadata_file)
+                    if metadata_file == 'PKG-INFO':
+                        metadata = metadata_fpath
+                    else:
+                        extra_metadata.append(metadata_fpath)
+            elif 'egg-info' in path and os.path.isfile(path):
+                # skip extra metadata files
+                continue
+            else:
+                installed.append(path)
+
+    distinfo['egginfo'] = egginfo
+    distinfo['metadata'] = metadata
+    distinfo['distinfo_dir'] = distinfo_dir
+    distinfo['installer_path'] = os.path.join(distinfo_dir, 'INSTALLER')
+    distinfo['metadata_path'] = os.path.join(distinfo_dir, 'METADATA')
+    distinfo['record_path'] = os.path.join(distinfo_dir, 'RECORD')
+    distinfo['requested_path'] = os.path.join(distinfo_dir, 'REQUESTED')
+    installed.extend([distinfo['installer_path'], distinfo['metadata_path']])
+    distinfo['installed'] = installed
+    distinfo['extra_metadata'] = extra_metadata
+    return distinfo
+
+
+def _write_record_file(record_path, installed_files):
+    with open(record_path, 'w', encoding='utf-8') as f:
+        writer = csv.writer(f, delimiter=',', lineterminator=os.linesep,
+                            quotechar='"')
+
+        for fpath in installed_files:
+            if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
+                # do not put size and md5 hash, as in PEP-376
+                writer.writerow((fpath, '', ''))
+            else:
+                hash = hashlib.md5()
+                with open(fpath, 'rb') as fp:
+                    hash.update(fp.read())
+                md5sum = hash.hexdigest()
+                size = os.path.getsize(fpath)
+                writer.writerow((fpath, md5sum, size))
+
+        # add the RECORD file itself
+        writer.writerow((record_path, '', ''))
+    return record_path
+
+
+def egginfo_to_distinfo(record_file, installer=_DEFAULT_INSTALLER,
+                        requested=False, remove_egginfo=False):
+    """Create files and directories required for PEP 376
+
+    :param record_file: path to RECORD file as produced by setup.py --record
+    :param installer: installer name
+    :param requested: True if not installed as a dependency
+    :param remove_egginfo: delete egginfo dir?
+    """
+    distinfo = _parse_record_file(record_file)
+    distinfo_dir = distinfo['distinfo_dir']
+    if os.path.isdir(distinfo_dir) and not os.path.islink(distinfo_dir):
+        shutil.rmtree(distinfo_dir)
+    elif os.path.exists(distinfo_dir):
+        os.unlink(distinfo_dir)
+
+    os.makedirs(distinfo_dir)
+
+    # copy setuptools extra metadata files
+    if distinfo['extra_metadata']:
+        for path in distinfo['extra_metadata']:
+            shutil.copy2(path, distinfo_dir)
+            new_path = path.replace('egg-info', 'dist-info')
+            distinfo['installed'].append(new_path)
+
+    metadata_path = distinfo['metadata_path']
+    logger.info('creating %s', metadata_path)
+    shutil.copy2(distinfo['metadata'], metadata_path)
+
+    installer_path = distinfo['installer_path']
+    logger.info('creating %s', installer_path)
+    with open(installer_path, 'w') as f:
+        f.write(installer)
+
+    if requested:
+        requested_path = distinfo['requested_path']
+        logger.info('creating %s', requested_path)
+        open(requested_path, 'wb').close()
+        distinfo['installed'].append(requested_path)
+
+    record_path = distinfo['record_path']
+    logger.info('creating %s', record_path)
+    _write_record_file(record_path, distinfo['installed'])
+
+    if remove_egginfo:
+        egginfo = distinfo['egginfo']
+        logger.info('removing %s', egginfo)
+        if os.path.isfile(egginfo):
+            os.remove(egginfo)
+        else:
+            shutil.rmtree(egginfo)
+
+
+def _has_egg_info(srcdir):
+    if os.path.isdir(srcdir):
+        for item in os.listdir(srcdir):
+            full_path = os.path.join(srcdir, item)
+            if item.endswith('.egg-info') and os.path.isdir(full_path):
+                logger.info("found egg-info directory")
+                return True
+    logger.warning("no egg-info directory found")
+    return False
+
+
+def _has_setuptools_text(setup_py):
+    return _has_text(setup_py, 'setuptools')
+
+
+def _has_distutils_text(setup_py):
+    return _has_text(setup_py, 'distutils')
+
+
+def _has_text(setup_py, installer):
+    installer_pattern = re.compile('import {0}|from {0}'.format(installer))
+    with open(setup_py, 'r', encoding='utf-8') as setup:
+        for line in setup:
+            if re.search(installer_pattern, line):
+                logger.info("found %s text in setup.py", installer)
+                return True
+    logger.warning("no %s text found in setup.py", installer)
+    return False
+
+
+def _has_required_metadata(setup_cfg):
+    config = RawConfigParser()
+    config.read([setup_cfg], encoding='utf8')
+    return (config.has_section('metadata') and
+            'name' in config.options('metadata') and
+            'version' in config.options('metadata'))
+
+
+def _has_pkg_info(srcdir):
+    pkg_info = os.path.join(srcdir, 'PKG-INFO')
+    has_pkg_info = os.path.isfile(pkg_info)
+    if has_pkg_info:
+        logger.info("PKG-INFO file found")
+    logger.warning("no PKG-INFO file found")
+    return has_pkg_info
+
+
+def _has_setup_py(srcdir):
+    setup_py = os.path.join(srcdir, 'setup.py')
+    if os.path.isfile(setup_py):
+        logger.info('setup.py file found')
+        return True
+    return False
+
+
+def _has_setup_cfg(srcdir):
+    setup_cfg = os.path.join(srcdir, 'setup.cfg')
+    if os.path.isfile(setup_cfg):
+        logger.info('setup.cfg file found')
+        return True
+    logger.warning("no setup.cfg file found")
+    return False
+
+
+def is_setuptools(path):
+    """Check if the project is based on setuptools.
+
+    :param path: path to source directory containing a setup.py script.
+
+    Return True if the project requires setuptools to install, else False.
+    """
+    srcdir = os.path.abspath(path)
+    setup_py = os.path.join(srcdir, 'setup.py')
+
+    return _has_setup_py(srcdir) and (_has_egg_info(srcdir) or
+                                      _has_setuptools_text(setup_py))
+
+
+def is_distutils(path):
+    """Check if the project is based on distutils.
+
+    :param path: path to source directory containing a setup.py script.
+
+    Return True if the project requires distutils to install, else False.
+    """
+    srcdir = os.path.abspath(path)
+    setup_py = os.path.join(srcdir, 'setup.py')
+
+    return _has_setup_py(srcdir) and (_has_pkg_info(srcdir) or
+                                      _has_distutils_text(setup_py))
+
+
+def is_packaging(path):
+    """Check if the project is based on packaging
+
+    :param path: path to source directory containing a setup.cfg file.
+
+    Return True if the project has a valid setup.cfg, else False.
+    """
+    srcdir = os.path.abspath(path)
+    setup_cfg = os.path.join(srcdir, 'setup.cfg')
+
+    return _has_setup_cfg(srcdir) and _has_required_metadata(setup_cfg)
+
+
+def get_install_method(path):
+    """Check if the project is based on packaging, setuptools, or distutils
+
+    :param path: path to source directory containing a setup.cfg file,
+                 or setup.py.
+
+    Returns a string representing the best install method to use.
+    """
+    if is_packaging(path):
+        return "packaging"
+    elif is_setuptools(path):
+        return "setuptools"
+    elif is_distutils(path):
+        return "distutils"
+    else:
+        raise InstallationException('Cannot detect install method')
+
+
+# XXX to be replaced by shutil.copytree
+def copy_tree(src, dst, preserve_mode=True, preserve_times=True,
+              preserve_symlinks=False, update=False, verbose=True,
+              dry_run=False):
+    from distutils.file_util import copy_file
+
+    if not dry_run and not os.path.isdir(src):
+        raise PackagingFileError(
+              "cannot copy tree '%s': not a directory" % src)
+    try:
+        names = os.listdir(src)
+    except os.error as e:
+        errstr = e[1]
+        if dry_run:
+            names = []
+        else:
+            raise PackagingFileError(
+                  "error listing files in '%s': %s" % (src, errstr))
+
+    if not dry_run:
+        _mkpath(dst, verbose=verbose)
+
+    outputs = []
+
+    for n in names:
+        src_name = os.path.join(src, n)
+        dst_name = os.path.join(dst, n)
+
+        if preserve_symlinks and os.path.islink(src_name):
+            link_dest = os.readlink(src_name)
+            if verbose >= 1:
+                logger.info("linking %s -> %s", dst_name, link_dest)
+            if not dry_run:
+                os.symlink(link_dest, dst_name)
+            outputs.append(dst_name)
+
+        elif os.path.isdir(src_name):
+            outputs.extend(
+                copy_tree(src_name, dst_name, preserve_mode,
+                          preserve_times, preserve_symlinks, update,
+                          verbose=verbose, dry_run=dry_run))
+        else:
+            copy_file(src_name, dst_name, preserve_mode,
+                      preserve_times, update, verbose=verbose,
+                      dry_run=dry_run)
+            outputs.append(dst_name)
+
+    return outputs
+
+# cache for by mkpath() -- in addition to cheapening redundant calls,
+# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
+_path_created = set()
+
+
+# I don't use os.makedirs because a) it's new to Python 1.5.2, and
+# b) it blows up if the directory already exists (I want to silently
+# succeed in that case).
+def _mkpath(name, mode=0o777, verbose=True, dry_run=False):
+    # Detect a common bug -- name is None
+    if not isinstance(name, str):
+        raise PackagingInternalError(
+              "mkpath: 'name' must be a string (got %r)" % (name,))
+
+    # XXX what's the better way to handle verbosity? print as we create
+    # each directory in the path (the current behaviour), or only announce
+    # the creation of the whole path? (quite easy to do the latter since
+    # we're not using a recursive algorithm)
+
+    name = os.path.normpath(name)
+    created_dirs = []
+    if os.path.isdir(name) or name == '':
+        return created_dirs
+    if os.path.abspath(name) in _path_created:
+        return created_dirs
+
+    head, tail = os.path.split(name)
+    tails = [tail]                      # stack of lone dirs to create
+
+    while head and tail and not os.path.isdir(head):
+        head, tail = os.path.split(head)
+        tails.insert(0, tail)          # push next higher dir onto stack
+
+    # now 'head' contains the deepest directory that already exists
+    # (that is, the child of 'head' in 'name' is the highest directory
+    # that does *not* exist)
+    for d in tails:
+        head = os.path.join(head, d)
+        abs_head = os.path.abspath(head)
+
+        if abs_head in _path_created:
+            continue
+
+        if verbose >= 1:
+            logger.info("creating %s", head)
+
+        if not dry_run:
+            try:
+                os.mkdir(head, mode)
+            except OSError as exc:
+                if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
+                    raise PackagingFileError(
+                          "could not create '%s': %s" % (head, exc.args[-1]))
+            created_dirs.append(head)
+
+        _path_created.add(abs_head)
+    return created_dirs
diff --git a/Lib/packaging/version.py b/Lib/packaging/version.py
new file mode 100644
index 0000000..0eaf80b
--- /dev/null
+++ b/Lib/packaging/version.py
@@ -0,0 +1,449 @@
+"""Implementation of the versioning scheme defined in PEP 386."""
+
+import re
+
+from packaging.errors import IrrationalVersionError, HugeMajorVersionNumError
+
+__all__ = ['NormalizedVersion', 'suggest_normalized_version',
+           'VersionPredicate', 'is_valid_version', 'is_valid_versions',
+           'is_valid_predicate']
+
+# A marker used in the second and third parts of the `parts` tuple, for
+# versions that don't have those segments, to sort properly. An example
+# of versions in sort order ('highest' last):
+#   1.0b1                 ((1,0), ('b',1), ('f',))
+#   1.0.dev345            ((1,0), ('f',),  ('dev', 345))
+#   1.0                   ((1,0), ('f',),  ('f',))
+#   1.0.post256.dev345    ((1,0), ('f',),  ('f', 'post', 256, 'dev', 345))
+#   1.0.post345           ((1,0), ('f',),  ('f', 'post', 345, 'f'))
+#                                   ^        ^                 ^
+#   'b' < 'f' ---------------------/         |                 |
+#                                            |                 |
+#   'dev' < 'f' < 'post' -------------------/                  |
+#                                                              |
+#   'dev' < 'f' ----------------------------------------------/
+# Other letters would do, but 'f' for 'final' is kind of nice.
+_FINAL_MARKER = ('f',)
+
+_VERSION_RE = re.compile(r'''
+    ^
+    (?P<version>\d+\.\d+)          # minimum 'N.N'
+    (?P<extraversion>(?:\.\d+)*)   # any number of extra '.N' segments
+    (?:
+        (?P<prerel>[abc]|rc)       # 'a'=alpha, 'b'=beta, 'c'=release candidate
+                                   # 'rc'= alias for release candidate
+        (?P<prerelversion>\d+(?:\.\d+)*)
+    )?
+    (?P<postdev>(\.post(?P<post>\d+))?(\.dev(?P<dev>\d+))?)?
+    $''', re.VERBOSE)
+
+
+class NormalizedVersion:
+    """A rational version.
+
+    Good:
+        1.2         # equivalent to "1.2.0"
+        1.2.0
+        1.2a1
+        1.2.3a2
+        1.2.3b1
+        1.2.3c1
+        1.2.3.4
+        TODO: fill this out
+
+    Bad:
+        1           # mininum two numbers
+        1.2a        # release level must have a release serial
+        1.2.3b
+    """
+    def __init__(self, s, error_on_huge_major_num=True):
+        """Create a NormalizedVersion instance from a version string.
+
+        @param s {str} The version string.
+        @param error_on_huge_major_num {bool} Whether to consider an
+            apparent use of a year or full date as the major version number
+            an error. Default True. One of the observed patterns on PyPI before
+            the introduction of `NormalizedVersion` was version numbers like
+            this:
+                2009.01.03
+                20040603
+                2005.01
+            This guard is here to strongly encourage the package author to
+            use an alternate version, because a release deployed into PyPI
+            and, e.g. downstream Linux package managers, will forever remove
+            the possibility of using a version number like "1.0" (i.e.
+            where the major number is less than that huge major number).
+        """
+        self.is_final = True  # by default, consider a version as final.
+        self._parse(s, error_on_huge_major_num)
+
+    @classmethod
+    def from_parts(cls, version, prerelease=_FINAL_MARKER,
+                   devpost=_FINAL_MARKER):
+        return cls(cls.parts_to_str((version, prerelease, devpost)))
+
+    def _parse(self, s, error_on_huge_major_num=True):
+        """Parses a string version into parts."""
+        match = _VERSION_RE.search(s)
+        if not match:
+            raise IrrationalVersionError(s)
+
+        groups = match.groupdict()
+        parts = []
+
+        # main version
+        block = self._parse_numdots(groups['version'], s, False, 2)
+        extraversion = groups.get('extraversion')
+        if extraversion not in ('', None):
+            block += self._parse_numdots(extraversion[1:], s)
+        parts.append(tuple(block))
+
+        # prerelease
+        prerel = groups.get('prerel')
+        if prerel is not None:
+            block = [prerel]
+            block += self._parse_numdots(groups.get('prerelversion'), s,
+                                         pad_zeros_length=1)
+            parts.append(tuple(block))
+            self.is_final = False
+        else:
+            parts.append(_FINAL_MARKER)
+
+        # postdev
+        if groups.get('postdev'):
+            post = groups.get('post')
+            dev = groups.get('dev')
+            postdev = []
+            if post is not None:
+                postdev.extend((_FINAL_MARKER[0], 'post', int(post)))
+                if dev is None:
+                    postdev.append(_FINAL_MARKER[0])
+            if dev is not None:
+                postdev.extend(('dev', int(dev)))
+                self.is_final = False
+            parts.append(tuple(postdev))
+        else:
+            parts.append(_FINAL_MARKER)
+        self.parts = tuple(parts)
+        if error_on_huge_major_num and self.parts[0][0] > 1980:
+            raise HugeMajorVersionNumError("huge major version number, %r, "
+               "which might cause future problems: %r" % (self.parts[0][0], s))
+
+    def _parse_numdots(self, s, full_ver_str, drop_trailing_zeros=True,
+                       pad_zeros_length=0):
+        """Parse 'N.N.N' sequences, return a list of ints.
+
+        @param s {str} 'N.N.N...' sequence to be parsed
+        @param full_ver_str {str} The full version string from which this
+            comes. Used for error strings.
+        @param drop_trailing_zeros {bool} Whether to drop trailing zeros
+            from the returned list. Default True.
+        @param pad_zeros_length {int} The length to which to pad the
+            returned list with zeros, if necessary. Default 0.
+        """
+        nums = []
+        for n in s.split("."):
+            if len(n) > 1 and n[0] == '0':
+                raise IrrationalVersionError("cannot have leading zero in "
+                    "version number segment: '%s' in %r" % (n, full_ver_str))
+            nums.append(int(n))
+        if drop_trailing_zeros:
+            while nums and nums[-1] == 0:
+                nums.pop()
+        while len(nums) < pad_zeros_length:
+            nums.append(0)
+        return nums
+
+    def __str__(self):
+        return self.parts_to_str(self.parts)
+
+    @classmethod
+    def parts_to_str(cls, parts):
+        """Transforms a version expressed in tuple into its string
+        representation."""
+        # XXX This doesn't check for invalid tuples
+        main, prerel, postdev = parts
+        s = '.'.join(str(v) for v in main)
+        if prerel is not _FINAL_MARKER:
+            s += prerel[0]
+            s += '.'.join(str(v) for v in prerel[1:])
+        if postdev and postdev is not _FINAL_MARKER:
+            if postdev[0] == 'f':
+                postdev = postdev[1:]
+            i = 0
+            while i < len(postdev):
+                if i % 2 == 0:
+                    s += '.'
+                s += str(postdev[i])
+                i += 1
+        return s
+
+    def __repr__(self):
+        return "%s('%s')" % (self.__class__.__name__, self)
+
+    def _cannot_compare(self, other):
+        raise TypeError("cannot compare %s and %s"
+                % (type(self).__name__, type(other).__name__))
+
+    def __eq__(self, other):
+        if not isinstance(other, NormalizedVersion):
+            self._cannot_compare(other)
+        return self.parts == other.parts
+
+    def __lt__(self, other):
+        if not isinstance(other, NormalizedVersion):
+            self._cannot_compare(other)
+        return self.parts < other.parts
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __gt__(self, other):
+        return not (self.__lt__(other) or self.__eq__(other))
+
+    def __le__(self, other):
+        return self.__eq__(other) or self.__lt__(other)
+
+    def __ge__(self, other):
+        return self.__eq__(other) or self.__gt__(other)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    def __hash__(self):
+        return hash(self.parts)
+
+
+def suggest_normalized_version(s):
+    """Suggest a normalized version close to the given version string.
+
+    If you have a version string that isn't rational (i.e. NormalizedVersion
+    doesn't like it) then you might be able to get an equivalent (or close)
+    rational version from this function.
+
+    This does a number of simple normalizations to the given string, based
+    on observation of versions currently in use on PyPI. Given a dump of
+    those version during PyCon 2009, 4287 of them:
+    - 2312 (53.93%) match NormalizedVersion without change
+      with the automatic suggestion
+    - 3474 (81.04%) match when using this suggestion method
+
+    @param s {str} An irrational version string.
+    @returns A rational version string, or None, if couldn't determine one.
+    """
+    try:
+        NormalizedVersion(s)
+        return s   # already rational
+    except IrrationalVersionError:
+        pass
+
+    rs = s.lower()
+
+    # part of this could use maketrans
+    for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
+                       ('beta', 'b'), ('rc', 'c'), ('-final', ''),
+                       ('-pre', 'c'),
+                       ('-release', ''), ('.release', ''), ('-stable', ''),
+                       ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
+                       ('final', '')):
+        rs = rs.replace(orig, repl)
+
+    # if something ends with dev or pre, we add a 0
+    rs = re.sub(r"pre$", r"pre0", rs)
+    rs = re.sub(r"dev$", r"dev0", rs)
+
+    # if we have something like "b-2" or "a.2" at the end of the
+    # version, that is pobably beta, alpha, etc
+    # let's remove the dash or dot
+    rs = re.sub(r"([abc|rc])[\-\.](\d+)$", r"\1\2", rs)
+
+    # 1.0-dev-r371 -> 1.0.dev371
+    # 0.1-dev-r79 -> 0.1.dev79
+    rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
+
+    # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
+    rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
+
+    # Clean: v0.3, v1.0
+    if rs.startswith('v'):
+        rs = rs[1:]
+
+    # Clean leading '0's on numbers.
+    #TODO: unintended side-effect on, e.g., "2003.05.09"
+    # PyPI stats: 77 (~2%) better
+    rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
+
+    # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
+    # zero.
+    # PyPI stats: 245 (7.56%) better
+    rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
+
+    # the 'dev-rNNN' tag is a dev tag
+    rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
+
+    # clean the - when used as a pre delimiter
+    rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
+
+    # a terminal "dev" or "devel" can be changed into ".dev0"
+    rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
+
+    # a terminal "dev" can be changed into ".dev0"
+    rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
+
+    # a terminal "final" or "stable" can be removed
+    rs = re.sub(r"(final|stable)$", "", rs)
+
+    # The 'r' and the '-' tags are post release tags
+    #   0.4a1.r10       ->  0.4a1.post10
+    #   0.9.33-17222    ->  0.9.3.post17222
+    #   0.9.33-r17222   ->  0.9.3.post17222
+    rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
+
+    # Clean 'r' instead of 'dev' usage:
+    #   0.9.33+r17222   ->  0.9.3.dev17222
+    #   1.0dev123       ->  1.0.dev123
+    #   1.0.git123      ->  1.0.dev123
+    #   1.0.bzr123      ->  1.0.dev123
+    #   0.1a0dev.123    ->  0.1a0.dev123
+    # PyPI stats:  ~150 (~4%) better
+    rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
+
+    # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
+    #   0.2.pre1        ->  0.2c1
+    #   0.2-c1         ->  0.2c1
+    #   1.0preview123   ->  1.0c123
+    # PyPI stats: ~21 (0.62%) better
+    rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
+
+    # Tcl/Tk uses "px" for their post release markers
+    rs = re.sub(r"p(\d+)$", r".post\1", rs)
+
+    try:
+        NormalizedVersion(rs)
+        return rs   # already rational
+    except IrrationalVersionError:
+        pass
+    return None
+
+
+# A predicate is: "ProjectName (VERSION1, VERSION2, ..)
+_PREDICATE = re.compile(r"(?i)^\s*(\w[\s\w-]*(?:\.\w*)*)(.*)")
+_VERSIONS = re.compile(r"^\s*\((?P<versions>.*)\)\s*$|^\s*"
+                        "(?P<versions2>.*)\s*$")
+_PLAIN_VERSIONS = re.compile(r"^\s*(.*)\s*$")
+_SPLIT_CMP = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
+
+
+def _split_predicate(predicate):
+    match = _SPLIT_CMP.match(predicate)
+    if match is None:
+        # probably no op, we'll use "=="
+        comp, version = '==', predicate
+    else:
+        comp, version = match.groups()
+    return comp, NormalizedVersion(version)
+
+
+class VersionPredicate:
+    """Defines a predicate: ProjectName (>ver1,ver2, ..)"""
+
+    _operators = {"<": lambda x, y: x < y,
+                  ">": lambda x, y: x > y,
+                  "<=": lambda x, y: str(x).startswith(str(y)) or x < y,
+                  ">=": lambda x, y: str(x).startswith(str(y)) or x > y,
+                  "==": lambda x, y: str(x).startswith(str(y)),
+                  "!=": lambda x, y: not str(x).startswith(str(y)),
+                  }
+
+    def __init__(self, predicate):
+        self._string = predicate
+        predicate = predicate.strip()
+        match = _PREDICATE.match(predicate)
+        if match is None:
+            raise ValueError('Bad predicate "%s"' % predicate)
+
+        name, predicates = match.groups()
+        self.name = name.strip()
+        self.predicates = []
+        if predicates is None:
+            return
+
+        predicates = _VERSIONS.match(predicates.strip())
+        if predicates is None:
+            return
+
+        predicates = predicates.groupdict()
+        if predicates['versions'] is not None:
+            versions = predicates['versions']
+        else:
+            versions = predicates.get('versions2')
+
+        if versions is not None:
+            for version in versions.split(','):
+                if version.strip() == '':
+                    continue
+                self.predicates.append(_split_predicate(version))
+
+    def match(self, version):
+        """Check if the provided version matches the predicates."""
+        if isinstance(version, str):
+            version = NormalizedVersion(version)
+        for operator, predicate in self.predicates:
+            if not self._operators[operator](version, predicate):
+                return False
+        return True
+
+    def __repr__(self):
+        return self._string
+
+
+class _Versions(VersionPredicate):
+    def __init__(self, predicate):
+        predicate = predicate.strip()
+        match = _PLAIN_VERSIONS.match(predicate)
+        self.name = None
+        predicates = match.groups()[0]
+        self.predicates = [_split_predicate(pred.strip())
+                           for pred in predicates.split(',')]
+
+
+class _Version(VersionPredicate):
+    def __init__(self, predicate):
+        predicate = predicate.strip()
+        match = _PLAIN_VERSIONS.match(predicate)
+        self.name = None
+        self.predicates = _split_predicate(match.groups()[0])
+
+
+def is_valid_predicate(predicate):
+    try:
+        VersionPredicate(predicate)
+    except (ValueError, IrrationalVersionError):
+        return False
+    else:
+        return True
+
+
+def is_valid_versions(predicate):
+    try:
+        _Versions(predicate)
+    except (ValueError, IrrationalVersionError):
+        return False
+    else:
+        return True
+
+
+def is_valid_version(predicate):
+    try:
+        _Version(predicate)
+    except (ValueError, IrrationalVersionError):
+        return False
+    else:
+        return True
+
+
+def get_version_predicate(requirements):
+    """Return a VersionPredicate object, from a string or an already
+    existing object.
+    """
+    if isinstance(requirements, str):
+        requirements = VersionPredicate(requirements)
+    return requirements
diff --git a/Lib/platform.py b/Lib/platform.py
index 1e4abe6..e2a74fe 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -361,6 +361,8 @@
 
     """ Portable popen() interface.
     """
+    import warnings
+    warnings.warn('use os.popen instead', DeprecationWarning, stacklevel=2)
     return os.popen(cmd, mode, bufsize)
 
 def _norm_version(version, build=''):
diff --git a/Lib/random.py b/Lib/random.py
index 0794f94..49b7c93 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -114,7 +114,7 @@
         if version == 2:
             if isinstance(a, (str, bytes, bytearray)):
                 if isinstance(a, str):
-                    a = a.encode("utf-8")
+                    a = a.encode()
                 a += _sha512(a).digest()
                 a = int.from_bytes(a, 'big')
 
diff --git a/Lib/shutil.py b/Lib/shutil.py
index aafe04e..d2e2dc5 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -311,12 +311,18 @@
     """
     real_dst = dst
     if os.path.isdir(dst):
+        if _samefile(src, dst):
+            # We might be on a case insensitive filesystem,
+            # perform the rename anyway.
+            os.rename(src, dst)
+            return
+
         real_dst = os.path.join(dst, _basename(src))
         if os.path.exists(real_dst):
             raise Error("Destination path '%s' already exists" % real_dst)
     try:
         os.rename(src, real_dst)
-    except OSError:
+    except OSError as exc:
         if os.path.isdir(src):
             if _destinsrc(src, dst):
                 raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
diff --git a/Lib/smtplib.py b/Lib/smtplib.py
index 213138c..f724b9f 100755
--- a/Lib/smtplib.py
+++ b/Lib/smtplib.py
@@ -231,6 +231,7 @@
     ehlo_msg = "ehlo"
     ehlo_resp = None
     does_esmtp = 0
+    default_port = SMTP_PORT
 
     def __init__(self, host='', port=0, local_hostname=None,
                  timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
@@ -246,7 +247,6 @@
         """
         self.timeout = timeout
         self.esmtp_features = {}
-        self.default_port = SMTP_PORT
         if host:
             (code, msg) = self.connect(host, port)
             if code != 220:
@@ -635,7 +635,7 @@
         # We could not login sucessfully. Return result of last attempt.
         raise SMTPAuthenticationError(code, resp)
 
-    def starttls(self, keyfile=None, certfile=None):
+    def starttls(self, keyfile=None, certfile=None, context=None):
         """Puts the connection to the SMTP server into TLS mode.
 
         If there has been no previous EHLO or HELO command this session, this
@@ -659,7 +659,16 @@
         if resp == 220:
             if not _have_ssl:
                 raise RuntimeError("No SSL support included in this Python")
-            self.sock = ssl.wrap_socket(self.sock, keyfile, certfile)
+            if context is not None and keyfile is not None:
+                raise ValueError("context and keyfile arguments are mutually "
+                                 "exclusive")
+            if context is not None and certfile is not None:
+                raise ValueError("context and certfile arguments are mutually "
+                                 "exclusive")
+            if context is not None:
+                self.sock = context.wrap_socket(self.sock)
+            else:
+                self.sock = ssl.wrap_socket(self.sock, keyfile, certfile)
             self.file = SSLFakeFile(self.sock)
             # RFC 3207:
             # The client MUST discard any knowledge obtained from
@@ -815,21 +824,35 @@
         support). If host is not specified, '' (the local host) is used. If port is
         omitted, the standard SMTP-over-SSL port (465) is used. keyfile and certfile
         are also optional - they can contain a PEM formatted private key and
-        certificate chain file for the SSL connection.
+        certificate chain file for the SSL connection. context also optional, can contain
+        a SSLContext, and is an alternative to keyfile and certfile; If it is specified both
+        keyfile and certfile must be None.
         """
+
+        default_port = SMTP_SSL_PORT
+
         def __init__(self, host='', port=0, local_hostname=None,
                      keyfile=None, certfile=None,
-                     timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
+                     timeout=socket._GLOBAL_DEFAULT_TIMEOUT, context=None):
+            if context is not None and keyfile is not None:
+                raise ValueError("context and keyfile arguments are mutually "
+                                 "exclusive")
+            if context is not None and certfile is not None:
+                raise ValueError("context and certfile arguments are mutually "
+                                 "exclusive")
             self.keyfile = keyfile
             self.certfile = certfile
+            self.context = context
             SMTP.__init__(self, host, port, local_hostname, timeout)
-            self.default_port = SMTP_SSL_PORT
 
         def _get_socket(self, host, port, timeout):
             if self.debuglevel > 0:
                 print('connect:', (host, port), file=stderr)
             new_socket = socket.create_connection((host, port), timeout)
-            new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
+            if self.context is not None:
+                new_socket = self.context.wrap_socket(new_socket)
+            else:
+                new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
             self.file = SSLFakeFile(new_socket)
             return new_socket
 
diff --git a/Lib/sqlite3/test/regression.py b/Lib/sqlite3/test/regression.py
index 7d0553d..c7551e3 100644
--- a/Lib/sqlite3/test/regression.py
+++ b/Lib/sqlite3/test/regression.py
@@ -281,6 +281,28 @@
             # Lone surrogate cannot be encoded to the default encoding (utf8)
             "\uDC80", collation_cb)
 
+    def CheckRecursiveCursorUse(self):
+        """
+        http://bugs.python.org/issue10811
+
+        Recursively using a cursor, such as when reusing it from a generator led to segfaults.
+        Now we catch recursive cursor usage and raise a ProgrammingError.
+        """
+        con = sqlite.connect(":memory:")
+
+        cur = con.cursor()
+        cur.execute("create table a (bar)")
+        cur.execute("create table b (baz)")
+
+        def foo():
+            cur.execute("insert into a (bar) values (?)", (1,))
+            yield 1
+
+        with self.assertRaises(sqlite.ProgrammingError):
+            cur.executemany("insert into b (baz) values (?)",
+                            ((i,) for i in foo()))
+
+
 def suite():
     regression_suite = unittest.makeSuite(RegressionTests, "Check")
     return unittest.TestSuite((regression_suite,))
diff --git a/Lib/ssl.py b/Lib/ssl.py
index 6d3828d..1a7f599 100644
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -62,10 +62,8 @@
 from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
 from _ssl import _SSLContext, SSLError
 from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
-from _ssl import (PROTOCOL_SSLv2, PROTOCOL_SSLv3, PROTOCOL_SSLv23,
-                  PROTOCOL_TLSv1)
 from _ssl import OP_ALL, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1
-from _ssl import RAND_status, RAND_egd, RAND_add
+from _ssl import RAND_status, RAND_egd, RAND_add, RAND_bytes, RAND_pseudo_bytes
 from _ssl import (
     SSL_ERROR_ZERO_RETURN,
     SSL_ERROR_WANT_READ,
@@ -78,6 +76,19 @@
     SSL_ERROR_INVALID_ERROR_CODE,
     )
 from _ssl import HAS_SNI
+from _ssl import (PROTOCOL_SSLv3, PROTOCOL_SSLv23,
+                  PROTOCOL_TLSv1)
+_PROTOCOL_NAMES = {
+    PROTOCOL_TLSv1: "TLSv1",
+    PROTOCOL_SSLv23: "SSLv23",
+    PROTOCOL_SSLv3: "SSLv3",
+}
+try:
+    from _ssl import PROTOCOL_SSLv2
+except ImportError:
+    pass
+else:
+    _PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2"
 
 from socket import getnameinfo as _getnameinfo
 from socket import error as socket_error
@@ -122,8 +133,9 @@
             if _dnsname_to_pat(value).match(hostname):
                 return
             dnsnames.append(value)
-    if not san:
-        # The subject is only checked when subjectAltName is empty
+    if not dnsnames:
+        # The subject is only checked when there is no dNSName entry
+        # in subjectAltName
         for sub in cert.get('subject', ()):
             for key, value in sub:
                 # XXX according to RFC 2818, the most specific Common Name
@@ -431,7 +443,7 @@
         finally:
             self.settimeout(timeout)
 
-    def _real_connect(self, addr, return_errno):
+    def _real_connect(self, addr, connect_ex):
         if self.server_side:
             raise ValueError("can't connect in server-side mode")
         # Here we assume that the socket is client-side, and not
@@ -440,17 +452,19 @@
             raise ValueError("attempt to connect already-connected SSLSocket!")
         self._sslobj = self.context._wrap_socket(self, False, self.server_hostname)
         try:
-            socket.connect(self, addr)
-            if self.do_handshake_on_connect:
-                self.do_handshake()
-        except socket_error as e:
-            if return_errno:
-                return e.errno
+            if connect_ex:
+                rc = socket.connect_ex(self, addr)
             else:
-                self._sslobj = None
-                raise e
-        self._connected = True
-        return 0
+                rc = None
+                socket.connect(self, addr)
+            if not rc:
+                if self.do_handshake_on_connect:
+                    self.do_handshake()
+                self._connected = True
+            return rc
+        except socket_error:
+            self._sslobj = None
+            raise
 
     def connect(self, addr):
         """Connects to remote ADDR, and then wraps the connection in
@@ -551,13 +565,4 @@
     return DER_cert_to_PEM_cert(dercert)
 
 def get_protocol_name(protocol_code):
-    if protocol_code == PROTOCOL_TLSv1:
-        return "TLSv1"
-    elif protocol_code == PROTOCOL_SSLv23:
-        return "SSLv23"
-    elif protocol_code == PROTOCOL_SSLv2:
-        return "SSLv2"
-    elif protocol_code == PROTOCOL_SSLv3:
-        return "SSLv3"
-    else:
-        return "<unknown>"
+    return _PROTOCOL_NAMES.get(protocol_code, '<unknown>')
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index a7c68a5..c5128d8 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -796,6 +796,8 @@
             self.stderr.close()
         if self.stdin:
             self.stdin.close()
+        # Wait for the process to terminate, to avoid zombies.
+        self.wait()
 
     def __del__(self, _maxsize=sys.maxsize, _active=_active):
         if not self._child_created:
@@ -815,8 +817,8 @@
     def communicate(self, input=None, timeout=None):
         """Interact with process: Send data to stdin.  Read data from
         stdout and stderr, until end-of-file is reached.  Wait for
-        process to terminate.  The optional input argument should be a
-        string to be sent to the child process, or None, if no data
+        process to terminate.  The optional input argument should be
+        bytes to be sent to the child process, or None, if no data
         should be sent to the child.
 
         communicate() returns a tuple (stdout, stderr)."""
diff --git a/Lib/sysconfig.cfg b/Lib/sysconfig.cfg
new file mode 100644
index 0000000..1f6b8bc
--- /dev/null
+++ b/Lib/sysconfig.cfg
@@ -0,0 +1,111 @@
+[globals]
+# These are the useful categories that are sometimes referenced at runtime,
+# using packaging.resources.get_file:
+# Configuration files
+config = {confdir}/{distribution.name}
+# Non-writable data that is independent of architecture (images, many xml/text files)
+appdata = {datadir}/{distribution.name}
+# Non-writable data that is architecture-dependent (some binary data formats)
+appdata.arch = {libdir}/{distribution.name}
+# Data, written by the package, that must be preserved (databases)
+appdata.persistent = {statedir}/lib/{distribution.name}
+# Data, written by the package, that can be safely discarded (cache)
+appdata.disposable = {statedir}/cache/{distribution.name}
+# Help or documentation files referenced at runtime
+help = {datadir}/{distribution.name}
+icon = {datadir}/pixmaps
+scripts = {base}/bin
+
+# Non-runtime files.  These are valid categories for marking files for
+# install, but they should not be referenced by the app at runtime:
+# Help or documentation files not referenced by the package at runtime
+doc = {datadir}/doc/{distribution.name}
+# GNU info documentation files
+info = {datadir}/info
+# man pages
+man = {datadir}/man
+
+[posix_prefix]
+# Configuration directories.  Some of these come straight out of the
+# configure script.  They are for implementing the other variables, not to
+# be used directly in [resource_locations].
+confdir = /etc
+datadir = /usr/share
+libdir = /usr/lib  ; or /usr/lib64 on a multilib system
+statedir = /var
+# User resource directory
+local = ~/.local/{distribution.name}
+
+stdlib = {base}/lib/python{py_version_short}
+platstdlib = {platbase}/lib/python{py_version_short}
+purelib = {base}/lib/python{py_version_short}/site-packages
+platlib = {platbase}/lib/python{py_version_short}/site-packages
+include = {base}/include/python{py_version_short}{abiflags}
+platinclude = {platbase}/include/python{py_version_short}{abiflags}
+data = {base}
+
+[posix_home]
+stdlib = {base}/lib/python
+platstdlib = {base}/lib/python
+purelib = {base}/lib/python
+platlib = {base}/lib/python
+include = {base}/include/python
+platinclude = {base}/include/python
+scripts = {base}/bin
+data = {base}
+
+[nt]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2_home]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[nt_user]
+stdlib = {userbase}/Python{py_version_nodot}
+platstdlib = {userbase}/Python{py_version_nodot}
+purelib = {userbase}/Python{py_version_nodot}/site-packages
+platlib = {userbase}/Python{py_version_nodot}/site-packages
+include = {userbase}/Python{py_version_nodot}/Include
+scripts = {userbase}/Scripts
+data = {userbase}
+
+[posix_user]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[osx_framework_user]
+stdlib = {userbase}/lib/python
+platstdlib = {userbase}/lib/python
+purelib = {userbase}/lib/python/site-packages
+platlib = {userbase}/lib/python/site-packages
+include = {userbase}/include
+scripts = {userbase}/bin
+data = {userbase}
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index 3b0ca85..59073f4 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -1,9 +1,10 @@
-"""Provide access to Python's configuration information.
+"""Access to Python's configuration information."""
 
-"""
-import sys
 import os
+import re
+import sys
 from os.path import pardir, realpath
+from configparser import RawConfigParser
 
 __all__ = [
     'get_config_h_filename',
@@ -17,91 +18,51 @@
     'get_python_version',
     'get_scheme_names',
     'parse_config_h',
-    ]
+]
 
-_INSTALL_SCHEMES = {
-    'posix_prefix': {
-        'stdlib': '{base}/lib/python{py_version_short}',
-        'platstdlib': '{platbase}/lib/python{py_version_short}',
-        'purelib': '{base}/lib/python{py_version_short}/site-packages',
-        'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
-        'include':
-            '{base}/include/python{py_version_short}{abiflags}',
-        'platinclude':
-            '{platbase}/include/python{py_version_short}{abiflags}',
-        'scripts': '{base}/bin',
-        'data': '{base}',
-        },
-    'posix_home': {
-        'stdlib': '{base}/lib/python',
-        'platstdlib': '{base}/lib/python',
-        'purelib': '{base}/lib/python',
-        'platlib': '{base}/lib/python',
-        'include': '{base}/include/python',
-        'platinclude': '{base}/include/python',
-        'scripts': '{base}/bin',
-        'data'   : '{base}',
-        },
-    'nt': {
-        'stdlib': '{base}/Lib',
-        'platstdlib': '{base}/Lib',
-        'purelib': '{base}/Lib/site-packages',
-        'platlib': '{base}/Lib/site-packages',
-        'include': '{base}/Include',
-        'platinclude': '{base}/Include',
-        'scripts': '{base}/Scripts',
-        'data'   : '{base}',
-        },
-    'os2': {
-        'stdlib': '{base}/Lib',
-        'platstdlib': '{base}/Lib',
-        'purelib': '{base}/Lib/site-packages',
-        'platlib': '{base}/Lib/site-packages',
-        'include': '{base}/Include',
-        'platinclude': '{base}/Include',
-        'scripts': '{base}/Scripts',
-        'data'   : '{base}',
-        },
-    'os2_home': {
-        'stdlib': '{userbase}/lib/python{py_version_short}',
-        'platstdlib': '{userbase}/lib/python{py_version_short}',
-        'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'include': '{userbase}/include/python{py_version_short}',
-        'scripts': '{userbase}/bin',
-        'data'   : '{userbase}',
-        },
-    'nt_user': {
-        'stdlib': '{userbase}/Python{py_version_nodot}',
-        'platstdlib': '{userbase}/Python{py_version_nodot}',
-        'purelib': '{userbase}/Python{py_version_nodot}/site-packages',
-        'platlib': '{userbase}/Python{py_version_nodot}/site-packages',
-        'include': '{userbase}/Python{py_version_nodot}/Include',
-        'scripts': '{userbase}/Scripts',
-        'data'   : '{userbase}',
-        },
-    'posix_user': {
-        'stdlib': '{userbase}/lib/python{py_version_short}',
-        'platstdlib': '{userbase}/lib/python{py_version_short}',
-        'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'include': '{userbase}/include/python{py_version_short}',
-        'scripts': '{userbase}/bin',
-        'data'   : '{userbase}',
-        },
-    'osx_framework_user': {
-        'stdlib': '{userbase}/lib/python',
-        'platstdlib': '{userbase}/lib/python',
-        'purelib': '{userbase}/lib/python/site-packages',
-        'platlib': '{userbase}/lib/python/site-packages',
-        'include': '{userbase}/include',
-        'scripts': '{userbase}/bin',
-        'data'   : '{userbase}',
-        },
-    }
+# let's read the configuration file
+# XXX _CONFIG_DIR will be set by the Makefile later
+_CONFIG_DIR = os.path.normpath(os.path.dirname(__file__))
+_CONFIG_FILE = os.path.join(_CONFIG_DIR, 'sysconfig.cfg')
+_SCHEMES = RawConfigParser()
+_SCHEMES.read(_CONFIG_FILE)
+_VAR_REPL = re.compile(r'\{([^{]*?)\}')
 
-_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
-                'scripts', 'data')
+
+def _expand_globals(config):
+    if config.has_section('globals'):
+        globals = config.items('globals')
+    else:
+        globals = tuple()
+
+    sections = config.sections()
+    for section in sections:
+        if section == 'globals':
+            continue
+        for option, value in globals:
+            if config.has_option(section, option):
+                continue
+            config.set(section, option, value)
+    config.remove_section('globals')
+
+    # now expanding local variables defined in the cfg file
+    #
+    for section in config.sections():
+        variables = dict(config.items(section))
+
+        def _replacer(matchobj):
+            name = matchobj.group(1)
+            if name in variables:
+                return variables[name]
+            return matchobj.group(0)
+
+        for option, value in config.items(section):
+            config.set(section, option, _VAR_REPL.sub(_replacer, value))
+
+_expand_globals(_SCHEMES)
+
+ # FIXME don't rely on sys.version here, its format is an implementatin detail
+ # of CPython, use sys.version_info or sys.hexversion
 _PY_VERSION = sys.version.split()[0]
 _PY_VERSION_SHORT = sys.version[:3]
 _PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
@@ -110,6 +71,7 @@
 _CONFIG_VARS = None
 _USER_BASE = None
 
+
 def _safe_realpath(path):
     try:
         return realpath(path)
@@ -132,6 +94,7 @@
 if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
     _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
 
+
 def is_python_build():
     for fn in ("Setup.dist", "Setup.local"):
         if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
@@ -142,17 +105,25 @@
 
 if _PYTHON_BUILD:
     for scheme in ('posix_prefix', 'posix_home'):
-        _INSTALL_SCHEMES[scheme]['include'] = '{srcdir}/Include'
-        _INSTALL_SCHEMES[scheme]['platinclude'] = '{projectbase}/.'
+        _SCHEMES.set(scheme, 'include', '{srcdir}/Include')
+        _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
 
-def _subst_vars(s, local_vars):
-    try:
-        return s.format(**local_vars)
-    except KeyError:
-        try:
-            return s.format(**os.environ)
-        except KeyError as var:
-            raise AttributeError('{%s}' % var)
+
+def _subst_vars(path, local_vars):
+    """In the string `path`, replace tokens like {some.thing} with the
+    corresponding value from the map `local_vars`.
+
+    If there is no corresponding value, leave the token unchanged.
+    """
+    def _replacer(matchobj):
+        name = matchobj.group(1)
+        if name in local_vars:
+            return local_vars[name]
+        elif name in os.environ:
+            return os.environ[name]
+        return matchobj.group(0)
+    return _VAR_REPL.sub(_replacer, path)
+
 
 def _extend_dict(target_dict, other_dict):
     target_keys = target_dict.keys()
@@ -161,41 +132,63 @@
             continue
         target_dict[key] = value
 
+
 def _expand_vars(scheme, vars):
     res = {}
     if vars is None:
         vars = {}
     _extend_dict(vars, get_config_vars())
 
-    for key, value in _INSTALL_SCHEMES[scheme].items():
+    for key, value in _SCHEMES.items(scheme):
         if os.name in ('posix', 'nt'):
             value = os.path.expanduser(value)
         res[key] = os.path.normpath(_subst_vars(value, vars))
     return res
 
+
+def format_value(value, vars):
+    def _replacer(matchobj):
+        name = matchobj.group(1)
+        if name in vars:
+            return vars[name]
+        return matchobj.group(0)
+    return _VAR_REPL.sub(_replacer, value)
+
+
 def _get_default_scheme():
     if os.name == 'posix':
         # the default scheme for posix is posix_prefix
         return 'posix_prefix'
     return os.name
 
+
 def _getuserbase():
     env_base = os.environ.get("PYTHONUSERBASE", None)
+
     def joinuser(*args):
         return os.path.expanduser(os.path.join(*args))
 
     # what about 'os2emx', 'riscos' ?
     if os.name == "nt":
         base = os.environ.get("APPDATA") or "~"
-        return env_base if env_base else joinuser(base, "Python")
+        if env_base:
+            return env_base
+        else:
+            return joinuser(base, "Python")
 
     if sys.platform == "darwin":
         framework = get_config_var("PYTHONFRAMEWORK")
         if framework:
-            return env_base if env_base else joinuser("~", "Library", framework, "%d.%d"%(
-                sys.version_info[:2]))
+            if env_base:
+                return env_base
+            else:
+                return joinuser("~", "Library", framework, "%d.%d" %
+                                sys.version_info[:2])
 
-    return env_base if env_base else joinuser("~", ".local")
+    if env_base:
+        return env_base
+    else:
+        return joinuser("~", ".local")
 
 
 def _parse_makefile(filename, vars=None):
@@ -205,7 +198,6 @@
     optional dictionary is passed in as the second argument, it is
     used instead of a new dictionary.
     """
-    import re
     # Regexes needed for parsing Makefile (and similar syntaxes,
     # like old-style Setup files).
     _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
@@ -267,7 +259,8 @@
                     item = os.environ[n]
 
                 elif n in renamed_variables:
-                    if name.startswith('PY_') and name[3:] in renamed_variables:
+                    if (name.startswith('PY_') and
+                        name[3:] in renamed_variables):
                         item = ""
 
                     elif 'PY_' + n in notdone:
@@ -300,7 +293,6 @@
                             if name not in done:
                                 done[name] = value
 
-
             else:
                 # bogus variable reference; just drop it since we can't deal
                 variables.remove(name)
@@ -319,9 +311,11 @@
     """Return the path of the Makefile."""
     if _PYTHON_BUILD:
         return os.path.join(_PROJECT_BASE, "Makefile")
-    return os.path.join(get_path('stdlib'),
-                        'config-{}{}'.format(_PY_VERSION_SHORT, sys.abiflags),
-                        'Makefile')
+    if hasattr(sys, 'abiflags'):
+        config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
+    else:
+        config_dir_name = 'config'
+    return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
 
 
 def _init_posix(vars):
@@ -345,27 +339,13 @@
         if hasattr(e, "strerror"):
             msg = msg + " (%s)" % e.strerror
         raise IOError(msg)
-    # On MacOSX we need to check the setting of the environment variable
-    # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
-    # it needs to be compatible.
-    # If it isn't set we set it to the configure-time value
-    if sys.platform == 'darwin' and 'MACOSX_DEPLOYMENT_TARGET' in vars:
-        cfg_target = vars['MACOSX_DEPLOYMENT_TARGET']
-        cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
-        if cur_target == '':
-            cur_target = cfg_target
-            os.putenv('MACOSX_DEPLOYMENT_TARGET', cfg_target)
-        elif (list(map(int, cfg_target.split('.'))) >
-              list(map(int, cur_target.split('.')))):
-            msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" '
-                   'during configure' % (cur_target, cfg_target))
-            raise IOError(msg)
     # On AIX, there are wrong paths to the linker scripts in the Makefile
     # -- these paths are relative to the Python source, but when installed
     # the scripts are in another directory.
     if _PYTHON_BUILD:
         vars['LDSHARED'] = vars['BLDSHARED']
 
+
 def _init_non_posix(vars):
     """Initialize the module as appropriate for NT"""
     # set basic install directories
@@ -389,7 +369,6 @@
     optional dictionary is passed in as the second argument, it is
     used instead of a new dictionary.
     """
-    import re
     if vars is None:
         vars = {}
     define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
@@ -402,8 +381,10 @@
         m = define_rx.match(line)
         if m:
             n, v = m.group(1, 2)
-            try: v = int(v)
-            except ValueError: pass
+            try:
+                v = int(v)
+            except ValueError:
+                pass
             vars[n] = v
         else:
             m = undef_rx.match(line)
@@ -411,6 +392,7 @@
                 vars[m.group(1)] = 0
     return vars
 
+
 def get_config_h_filename():
     """Return the path of pyconfig.h."""
     if _PYTHON_BUILD:
@@ -422,15 +404,17 @@
         inc_dir = get_path('platinclude')
     return os.path.join(inc_dir, 'pyconfig.h')
 
+
 def get_scheme_names():
     """Return a tuple containing the schemes names."""
-    schemes = list(_INSTALL_SCHEMES.keys())
-    schemes.sort()
-    return tuple(schemes)
+    return tuple(sorted(_SCHEMES.sections()))
+
 
 def get_path_names():
     """Return a tuple containing the paths names."""
-    return _SCHEME_KEYS
+    # xxx see if we want a static list
+    return _SCHEMES.options('posix_prefix')
+
 
 def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
     """Return a mapping containing an install scheme.
@@ -441,7 +425,8 @@
     if expand:
         return _expand_vars(scheme, vars)
     else:
-        return _INSTALL_SCHEMES[scheme]
+        return dict(_SCHEMES.items(scheme))
+
 
 def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
     """Return a path corresponding to the scheme.
@@ -450,6 +435,7 @@
     """
     return get_paths(scheme, vars, expand)[name]
 
+
 def get_config_vars(*args):
     """With no arguments, return a dictionary of all configuration
     variables relevant for the current platform.
@@ -460,13 +446,12 @@
     With arguments, return a list of values that result from looking up
     each argument in the configuration variable dictionary.
     """
-    import re
     global _CONFIG_VARS
     if _CONFIG_VARS is None:
         _CONFIG_VARS = {}
         # Normalized versions of prefix and exec_prefix are handy to have;
         # in fact, these are the standard versions used most places in the
-        # Distutils.
+        # packaging module.
         _CONFIG_VARS['prefix'] = _PREFIX
         _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
         _CONFIG_VARS['py_version'] = _PY_VERSION
@@ -488,14 +473,14 @@
         # Setting 'userbase' is done below the call to the
         # init function to enable using 'get_config_var' in
         # the init-function.
-        _CONFIG_VARS['userbase'] = _getuserbase()
+        if sys.version >= '2.6':
+            _CONFIG_VARS['userbase'] = _getuserbase()
 
         if 'srcdir' not in _CONFIG_VARS:
             _CONFIG_VARS['srcdir'] = _PROJECT_BASE
         else:
             _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
 
-
         # Convert srcdir into an absolute path if it appears necessary.
         # Normally it is relative to the build directory.  However, during
         # testing, for example, we might be running a non-installed python
@@ -515,7 +500,7 @@
                 _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
 
         if sys.platform == 'darwin':
-            kernel_version = os.uname()[2] # Kernel version (8.4.3)
+            kernel_version = os.uname()[2]  # Kernel version (8.4.3)
             major_version = int(kernel_version.split('.')[0])
 
             if major_version < 8:
@@ -581,6 +566,7 @@
     else:
         return _CONFIG_VARS
 
+
 def get_config_var(name):
     """Return the value of a single variable using the dictionary returned by
     'get_config_vars()'.
@@ -589,6 +575,7 @@
     """
     return get_config_vars().get(name)
 
+
 def get_platform():
     """Return a string that identifies the current platform.
 
@@ -614,7 +601,6 @@
 
     For other non-POSIX platforms, currently just returns 'sys.platform'.
     """
-    import re
     if os.name == 'nt':
         # sniff sys.version for architecture.
         prefix = " bit ("
@@ -659,7 +645,7 @@
         return "%s-%s.%s" % (osname, version, release)
     elif osname[:6] == "cygwin":
         osname = "cygwin"
-        rel_re = re.compile (r'[\d.]+')
+        rel_re = re.compile(r'[\d.]+')
         m = rel_re.match(release)
         if m:
             release = m.group()
@@ -671,11 +657,9 @@
         # machine is going to compile and link as if it were
         # MACOSX_DEPLOYMENT_TARGET.
         cfgvars = get_config_vars()
-        macver = os.environ.get('MACOSX_DEPLOYMENT_TARGET')
-        if not macver:
-            macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
+        macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
 
-        if 1:
+        if True:
             # Always calculate the release of the running machine,
             # needed to determine if we can build fat binaries or not.
 
@@ -691,14 +675,13 @@
                 pass
             else:
                 try:
-                    m = re.search(
-                            r'<key>ProductUserVisibleVersion</key>\s*' +
-                            r'<string>(.*?)</string>', f.read())
-                    if m is not None:
-                        macrelease = '.'.join(m.group(1).split('.')[:2])
-                    # else: fall back to the default behaviour
+                    m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
+                                  r'<string>(.*?)</string>', f.read())
                 finally:
                     f.close()
+                if m is not None:
+                    macrelease = '.'.join(m.group(1).split('.')[:2])
+                # else: fall back to the default behaviour
 
         if not macver:
             macver = macrelease
@@ -707,8 +690,8 @@
             release = macver
             osname = "macosx"
 
-            if (macrelease + '.') >= '10.4.' and \
-                    '-arch' in get_config_vars().get('CFLAGS', '').strip():
+            if ((macrelease + '.') >= '10.4.' and
+                '-arch' in get_config_vars().get('CFLAGS', '').strip()):
                 # The universal build will build fat binaries, but not on
                 # systems before 10.4
                 #
@@ -735,7 +718,7 @@
                     machine = 'universal'
                 else:
                     raise ValueError(
-                       "Don't know machine value for archs=%r"%(archs,))
+                       "Don't know machine value for archs=%r" % (archs,))
 
             elif machine == 'i386':
                 # On OSX the machine type returned by uname is always the
@@ -758,21 +741,24 @@
 def get_python_version():
     return _PY_VERSION_SHORT
 
+
 def _print_dict(title, data):
     for index, (key, value) in enumerate(sorted(data.items())):
         if index == 0:
-            print('{0}: '.format(title))
-        print('\t{0} = "{1}"'.format(key, value))
+            print('%s: ' % (title))
+        print('\t%s = "%s"' % (key, value))
+
 
 def _main():
     """Display all information sysconfig detains."""
-    print('Platform: "{0}"'.format(get_platform()))
-    print('Python version: "{0}"'.format(get_python_version()))
-    print('Current installation scheme: "{0}"'.format(_get_default_scheme()))
-    print('')
+    print('Platform: "%s"' % get_platform())
+    print('Python version: "%s"' % get_python_version())
+    print('Current installation scheme: "%s"' % _get_default_scheme())
+    print()
     _print_dict('Paths', get_paths())
-    print('')
+    print()
     _print_dict('Variables', get_config_vars())
 
+
 if __name__ == '__main__':
     _main()
diff --git a/Lib/test/cjkencodings/big5-utf8.txt b/Lib/test/cjkencodings/big5-utf8.txt
new file mode 100644
index 0000000..a0a534a
--- /dev/null
+++ b/Lib/test/cjkencodings/big5-utf8.txt
@@ -0,0 +1,9 @@
+如何在 Python 中使用既有的 C library?
+ 在資訊科技快速發展的今天, 開發及測試軟體的速度是不容忽視的
+課題. 為加快開發及測試的速度, 我們便常希望能利用一些已開發好的
+library, 並有一個 fast prototyping 的 programming language 可
+供使用. 目前有許許多多的 library 是以 C 寫成, 而 Python 是一個
+fast prototyping 的 programming language. 故我們希望能將既有的
+C library 拿到 Python 的環境中測試及整合. 其中最主要也是我們所
+要討論的問題就是:
+
diff --git a/Lib/test/cjkencodings/big5.txt b/Lib/test/cjkencodings/big5.txt
new file mode 100644
index 0000000..f442495
--- /dev/null
+++ b/Lib/test/cjkencodings/big5.txt
@@ -0,0 +1,9 @@
+¦p¦ó¦b Python ¤¤¨Ï¥Î¬J¦³ªº C library?
+¡@¦b¸ê°T¬ì§Þ§Ö³tµo®iªº¤µ¤Ñ, ¶}µo¤Î´ú¸Õ³nÅ骺³t«×¬O¤£®e©¿µøªº
+½ÒÃD. ¬°¥[§Ö¶}µo¤Î´ú¸Õªº³t«×, §Ú­Ì«K±`§Æ±æ¯à§Q¥Î¤@¨Ç¤w¶}µo¦nªº
+library, ¨Ã¦³¤@­Ó fast prototyping ªº programming language ¥i
+¨Ñ¨Ï¥Î. ¥Ø«e¦³³\³\¦h¦hªº library ¬O¥H C ¼g¦¨, ¦Ó Python ¬O¤@­Ó
+fast prototyping ªº programming language. ¬G§Ú­Ì§Æ±æ¯à±N¬J¦³ªº
+C library ®³¨ì Python ªºÀô¹Ò¤¤´ú¸Õ¤Î¾ã¦X. ¨ä¤¤³Ì¥D­n¤]¬O§Ú­Ì©Ò
+­n°Q½×ªº°ÝÃD´N¬O:
+
diff --git a/Lib/test/cjkencodings/big5hkscs-utf8.txt b/Lib/test/cjkencodings/big5hkscs-utf8.txt
new file mode 100644
index 0000000..f744ce9
--- /dev/null
+++ b/Lib/test/cjkencodings/big5hkscs-utf8.txt
@@ -0,0 +1,2 @@
+𠄌Ě鵮罓洆
+ÊÊ̄ê êê̄
diff --git a/Lib/test/cjkencodings/big5hkscs.txt b/Lib/test/cjkencodings/big5hkscs.txt
new file mode 100644
index 0000000..81c42b3
--- /dev/null
+++ b/Lib/test/cjkencodings/big5hkscs.txt
@@ -0,0 +1,2 @@
+ˆEˆ\Šs‹ÚØ
+ˆfˆbˆ§ ˆ§ˆ£
diff --git a/Lib/test/cjkencodings/cp949-utf8.txt b/Lib/test/cjkencodings/cp949-utf8.txt
new file mode 100644
index 0000000..5655e38
--- /dev/null
+++ b/Lib/test/cjkencodings/cp949-utf8.txt
@@ -0,0 +1,9 @@
+똠방각하 펲시콜라
+
+㉯㉯납!! 因九月패믤릔궈 ⓡⓖ훀¿¿¿ 긍뒙 ⓔ뎨 ㉯. .
+亞영ⓔ능횹 . . . . 서울뤄 뎐학乙 家훀 ! ! !ㅠ.ㅠ
+흐흐흐 ㄱㄱㄱ☆ㅠ_ㅠ 어릨 탸콰긐 뎌응 칑九들乙 ㉯드긐
+설릌 家훀 . . . . 굴애쉌 ⓔ궈 ⓡ릘㉱긐 因仁川女中까즼
+와쒀훀 ! ! 亞영ⓔ 家능궈 ☆上관 없능궈능 亞능뒈훀 글애듴
+ⓡ려듀九 싀풔숴훀 어릨 因仁川女中싁⑨들앜!! ㉯㉯납♡ ⌒⌒*
+
diff --git a/Lib/test/cjkencodings/cp949.txt b/Lib/test/cjkencodings/cp949.txt
new file mode 100644
index 0000000..16549aa
--- /dev/null
+++ b/Lib/test/cjkencodings/cp949.txt
@@ -0,0 +1,9 @@
+Œc¹æ°¢ÇÏ ¼„½ÃÄݶó
+
+¨À¨À³³!! ì×ÎúêÅÆÐ’æp±Å ¨Þ¨ÓÄR¢¯¢¯¢¯ ±àŠ– ¨Ñµ³ ¨À. .
+䬿µ¨Ñ´ÉÈ . . . . ¼­¿ï·ï µ¯ÇÐëà Ê«ÄR ! ! !¤Ð.¤Ð
+ÈåÈåÈå ¤¡¤¡¤¡¡Ù¤Ð_¤Ð ¾îŠ ÅËÄâƒO µ®ÀÀ ¯hÎúµéëà ¨ÀµåƒO
+¼³j Ê«ÄR . . . . ±¼¾Öšf ¨Ñ±Å ¨Þt¨ÂƒO ì×ìÒô¹åüñé±î£Ž
+¿Í¾¬ÄR ! ! 䬿µ¨Ñ Ê«´É±Å ¡Ùß¾°ü ¾ø´É±Å´É 䬴ɵØÄR ±Û¾ÖŠÛ
+¨Þ·ÁµàÎú šÃÇ´½¤ÄR ¾îŠ ì×ìÒô¹åüñéšÄ¨ïµéÚ!! ¨À¨À³³¢½ ¡Ò¡Ò*
+
diff --git a/Lib/test/cjkencodings/euc_jisx0213-utf8.txt b/Lib/test/cjkencodings/euc_jisx0213-utf8.txt
new file mode 100644
index 0000000..9a56a2e
--- /dev/null
+++ b/Lib/test/cjkencodings/euc_jisx0213-utf8.txt
@@ -0,0 +1,8 @@
+Python の開発は、1990 年ごろから開始されています。
+開発者の Guido van Rossum は教育用のプログラミング言語「ABC」の開発に参加していましたが、ABC は実用上の目的にはあまり適していませんでした。
+このため、Guido はより実用的なプログラミング言語の開発を開始し、英国 BBS 放送のコメディ番組「モンティ パイソン」のファンである Guido はこの言語を「Python」と名づけました。
+このような背景から生まれた Python の言語設計は、「シンプル」で「習得が容易」という目標に重点が置かれています。
+多くのスクリプト系言語ではユーザの目先の利便性を優先して色々な機能を言語要素として取り入れる場合が多いのですが、Python ではそういった小細工が追加されることはあまりありません。
+言語自体の機能は最小限に押さえ、必要な機能は拡張モジュールとして追加する、というのが Python のポリシーです。
+
+ノか゚ ト゚ トキ喝塀 𡚴𪎌 麀齁𩛰
diff --git a/Lib/test/cjkencodings/euc_jisx0213.txt b/Lib/test/cjkencodings/euc_jisx0213.txt
new file mode 100644
index 0000000..51e9268
--- /dev/null
+++ b/Lib/test/cjkencodings/euc_jisx0213.txt
@@ -0,0 +1,8 @@
+Python ¤Î³«È¯¤Ï¡¢1990 ǯ¤´¤í¤«¤é³«»Ï¤µ¤ì¤Æ¤¤¤Þ¤¹¡£
+³«È¯¼Ô¤Î Guido van Rossum ¤Ï¶µ°éÍÑ¤Î¥×¥í¥°¥é¥ß¥ó¥°¸À¸ì¡ÖABC¡×¤Î³«È¯¤Ë»²²Ã¤·¤Æ¤¤¤Þ¤·¤¿¤¬¡¢ABC ¤Ï¼ÂÍѾå¤ÎÌÜŪ¤Ë¤Ï¤¢¤Þ¤êŬ¤·¤Æ¤¤¤Þ¤»¤ó¤Ç¤·¤¿¡£
+¤³¤Î¤¿¤á¡¢Guido ¤Ï¤è¤ê¼ÂÍÑŪ¤Ê¥×¥í¥°¥é¥ß¥ó¥°¸À¸ì¤Î³«È¯¤ò³«»Ï¤·¡¢±Ñ¹ñ BBS ÊüÁ÷¤Î¥³¥á¥Ç¥£ÈÖÁȡ֥â¥ó¥Æ¥£ ¥Ñ¥¤¥½¥ó¡×¤Î¥Õ¥¡¥ó¤Ç¤¢¤ë Guido ¤Ï¤³¤Î¸À¸ì¤ò¡ÖPython¡×¤È̾¤Å¤±¤Þ¤·¤¿¡£
+¤³¤Î¤è¤¦¤ÊÇØ·Ê¤«¤éÀ¸¤Þ¤ì¤¿ Python ¤Î¸À¸ìÀ߷פϡ¢¡Ö¥·¥ó¥×¥ë¡×¤Ç¡Ö½¬ÆÀ¤¬ÍưספȤ¤¤¦ÌÜɸ¤Ë½ÅÅÀ¤¬ÃÖ¤«¤ì¤Æ¤¤¤Þ¤¹¡£
+¿¤¯¤Î¥¹¥¯¥ê¥×¥È·Ï¸À¸ì¤Ç¤Ï¥æ¡¼¥¶¤ÎÌÜÀè¤ÎÍøÊØÀ­¤òÍ¥À褷¤Æ¿§¡¹¤Êµ¡Ç½¤ò¸À¸ìÍ×ÁǤȤ·¤Æ¼è¤êÆþ¤ì¤ë¾ì¹ç¤¬Â¿¤¤¤Î¤Ç¤¹¤¬¡¢Python ¤Ç¤Ï¤½¤¦¤¤¤Ã¤¿¾®ºÙ¹©¤¬Äɲ䵤ì¤ë¤³¤È¤Ï¤¢¤Þ¤ê¤¢¤ê¤Þ¤»¤ó¡£
+¸À¸ì¼«ÂΤε¡Ç½¤ÏºÇ¾®¸Â¤Ë²¡¤µ¤¨¡¢É¬Íפʵ¡Ç½¤Ï³ÈÄ¥¥â¥¸¥å¡¼¥ë¤È¤·¤ÆÄɲ乤롢¤È¤¤¤¦¤Î¤¬ Python ¤Î¥Ý¥ê¥·¡¼¤Ç¤¹¡£
+
+¥Î¤÷ ¥þ ¥È¥­¯¬¯Ú ÏãþØ þԏþèüÖ
diff --git a/Lib/test/cjkencodings/euc_jp-utf8.txt b/Lib/test/cjkencodings/euc_jp-utf8.txt
new file mode 100644
index 0000000..7763250
--- /dev/null
+++ b/Lib/test/cjkencodings/euc_jp-utf8.txt
@@ -0,0 +1,7 @@
+Python の開発は、1990 年ごろから開始されています。
+開発者の Guido van Rossum は教育用のプログラミング言語「ABC」の開発に参加していましたが、ABC は実用上の目的にはあまり適していませんでした。
+このため、Guido はより実用的なプログラミング言語の開発を開始し、英国 BBS 放送のコメディ番組「モンティ パイソン」のファンである Guido はこの言語を「Python」と名づけました。
+このような背景から生まれた Python の言語設計は、「シンプル」で「習得が容易」という目標に重点が置かれています。
+多くのスクリプト系言語ではユーザの目先の利便性を優先して色々な機能を言語要素として取り入れる場合が多いのですが、Python ではそういった小細工が追加されることはあまりありません。
+言語自体の機能は最小限に押さえ、必要な機能は拡張モジュールとして追加する、というのが Python のポリシーです。
+
diff --git a/Lib/test/cjkencodings/euc_jp.txt b/Lib/test/cjkencodings/euc_jp.txt
new file mode 100644
index 0000000..9da6b5d
--- /dev/null
+++ b/Lib/test/cjkencodings/euc_jp.txt
@@ -0,0 +1,7 @@
+Python ¤Î³«È¯¤Ï¡¢1990 ǯ¤´¤í¤«¤é³«»Ï¤µ¤ì¤Æ¤¤¤Þ¤¹¡£
+³«È¯¼Ô¤Î Guido van Rossum ¤Ï¶µ°éÍÑ¤Î¥×¥í¥°¥é¥ß¥ó¥°¸À¸ì¡ÖABC¡×¤Î³«È¯¤Ë»²²Ã¤·¤Æ¤¤¤Þ¤·¤¿¤¬¡¢ABC ¤Ï¼ÂÍѾå¤ÎÌÜŪ¤Ë¤Ï¤¢¤Þ¤êŬ¤·¤Æ¤¤¤Þ¤»¤ó¤Ç¤·¤¿¡£
+¤³¤Î¤¿¤á¡¢Guido ¤Ï¤è¤ê¼ÂÍÑŪ¤Ê¥×¥í¥°¥é¥ß¥ó¥°¸À¸ì¤Î³«È¯¤ò³«»Ï¤·¡¢±Ñ¹ñ BBS ÊüÁ÷¤Î¥³¥á¥Ç¥£ÈÖÁȡ֥â¥ó¥Æ¥£ ¥Ñ¥¤¥½¥ó¡×¤Î¥Õ¥¡¥ó¤Ç¤¢¤ë Guido ¤Ï¤³¤Î¸À¸ì¤ò¡ÖPython¡×¤È̾¤Å¤±¤Þ¤·¤¿¡£
+¤³¤Î¤è¤¦¤ÊÇØ·Ê¤«¤éÀ¸¤Þ¤ì¤¿ Python ¤Î¸À¸ìÀ߷פϡ¢¡Ö¥·¥ó¥×¥ë¡×¤Ç¡Ö½¬ÆÀ¤¬ÍưספȤ¤¤¦ÌÜɸ¤Ë½ÅÅÀ¤¬ÃÖ¤«¤ì¤Æ¤¤¤Þ¤¹¡£
+¿¤¯¤Î¥¹¥¯¥ê¥×¥È·Ï¸À¸ì¤Ç¤Ï¥æ¡¼¥¶¤ÎÌÜÀè¤ÎÍøÊØÀ­¤òÍ¥À褷¤Æ¿§¡¹¤Êµ¡Ç½¤ò¸À¸ìÍ×ÁǤȤ·¤Æ¼è¤êÆþ¤ì¤ë¾ì¹ç¤¬Â¿¤¤¤Î¤Ç¤¹¤¬¡¢Python ¤Ç¤Ï¤½¤¦¤¤¤Ã¤¿¾®ºÙ¹©¤¬Äɲ䵤ì¤ë¤³¤È¤Ï¤¢¤Þ¤ê¤¢¤ê¤Þ¤»¤ó¡£
+¸À¸ì¼«ÂΤε¡Ç½¤ÏºÇ¾®¸Â¤Ë²¡¤µ¤¨¡¢É¬Íפʵ¡Ç½¤Ï³ÈÄ¥¥â¥¸¥å¡¼¥ë¤È¤·¤ÆÄɲ乤롢¤È¤¤¤¦¤Î¤¬ Python ¤Î¥Ý¥ê¥·¡¼¤Ç¤¹¡£
+
diff --git a/Lib/test/cjkencodings/euc_kr-utf8.txt b/Lib/test/cjkencodings/euc_kr-utf8.txt
new file mode 100644
index 0000000..16c3741
--- /dev/null
+++ b/Lib/test/cjkencodings/euc_kr-utf8.txt
@@ -0,0 +1,7 @@
+◎ 파이썬(Python)은 배우기 쉽고, 강력한 프로그래밍 언어입니다. 파이썬은
+효율적인 고수준 데이터 구조와 간단하지만 효율적인 객체지향프로그래밍을
+지원합니다. 파이썬의 우아(優雅)한 문법과 동적 타이핑, 그리고 인터프리팅
+환경은 파이썬을 스크립팅과 여러 분야에서와 대부분의 플랫폼에서의 빠른
+애플리케이션 개발을 할 수 있는 이상적인 언어로 만들어줍니다.
+
+☆첫가끝: 날아라 쓔쓔쓩~ 닁큼! 뜽금없이 전홥니다. 뷁. 그런거 읎다.
diff --git a/Lib/test/cjkencodings/euc_kr.txt b/Lib/test/cjkencodings/euc_kr.txt
new file mode 100644
index 0000000..f68dd35
--- /dev/null
+++ b/Lib/test/cjkencodings/euc_kr.txt
@@ -0,0 +1,7 @@
+¡Ý ÆÄÀ̽ã(Python)Àº ¹è¿ì±â ½±°í, °­·ÂÇÑ ÇÁ·Î±×·¡¹Ö ¾ð¾îÀÔ´Ï´Ù. ÆÄÀ̽ãÀº
+È¿À²ÀûÀÎ °í¼öÁØ µ¥ÀÌÅÍ ±¸Á¶¿Í °£´ÜÇÏÁö¸¸ È¿À²ÀûÀÎ °´Ã¼ÁöÇâÇÁ·Î±×·¡¹ÖÀ»
+Áö¿øÇÕ´Ï´Ù. ÆÄÀ̽ãÀÇ ¿ì¾Æ(éÐäº)ÇÑ ¹®¹ý°ú µ¿Àû ŸÀÌÇÎ, ±×¸®°í ÀÎÅÍÇÁ¸®ÆÃ
+ȯ°æÀº ÆÄÀ̽ãÀ» ½ºÅ©¸³ÆÃ°ú ¿©·¯ ºÐ¾ß¿¡¼­¿Í ´ëºÎºÐÀÇ Ç÷§Æû¿¡¼­ÀÇ ºü¸¥
+¾ÖÇø®ÄÉÀÌ¼Ç °³¹ßÀ» ÇÒ ¼ö ÀÖ´Â ÀÌ»óÀûÀÎ ¾ð¾î·Î ¸¸µé¾îÁÝ´Ï´Ù.
+
+¡Ùù°¡³¡: ³¯¾Æ¶ó ¤Ô¤¶¤Ð¤Ô¤Ô¤¶¤Ð¤Ô¾±~ ¤Ô¤¤¤Ò¤·Å­! ¤Ô¤¨¤Ñ¤·±Ý¾øÀÌ Àü¤Ô¤¾¤È¤²´Ï´Ù. ¤Ô¤²¤Î¤ª. ±×·±°Å ¤Ô¤·¤Ñ¤´´Ù.
diff --git a/Lib/test/cjkencodings/gb18030-utf8.txt b/Lib/test/cjkencodings/gb18030-utf8.txt
new file mode 100644
index 0000000..2060d25
--- /dev/null
+++ b/Lib/test/cjkencodings/gb18030-utf8.txt
@@ -0,0 +1,15 @@
+Python(派森)语言是一种功能强大而完善的通用型计算机程序设计语言,
+已经具有十多年的发展历史,成熟且稳定。这种语言具有非常简捷而清晰
+的语法特点,适合完成各种高层任务,几乎可以在所有的操作系统中
+运行。这种语言简单而强大,适合各种人士学习使用。目前,基于这
+种语言的相关技术正在飞速的发展,用户数量急剧扩大,相关的资源非常多。
+如何在 Python 中使用既有的 C library?
+ 在資訊科技快速發展的今天, 開發及測試軟體的速度是不容忽視的
+課題. 為加快開發及測試的速度, 我們便常希望能利用一些已開發好的
+library, 並有一個 fast prototyping 的 programming language 可
+供使用. 目前有許許多多的 library 是以 C 寫成, 而 Python 是一個
+fast prototyping 的 programming language. 故我們希望能將既有的
+C library 拿到 Python 的環境中測試及整合. 其中最主要也是我們所
+要討論的問題就是:
+파이썬은 강력한 기능을 지닌 범용 컴퓨터 프로그래밍 언어다.
+
diff --git a/Lib/test/cjkencodings/gb18030.txt b/Lib/test/cjkencodings/gb18030.txt
new file mode 100644
index 0000000..5d1f6dc
--- /dev/null
+++ b/Lib/test/cjkencodings/gb18030.txt
@@ -0,0 +1,15 @@
+Python£¨ÅÉÉ­£©ÓïÑÔÊÇÒ»ÖÖ¹¦ÄÜÇ¿´ó¶øÍêÉÆµÄͨÓÃÐͼÆËã»ú³ÌÐòÉè¼ÆÓïÑÔ£¬
+ÒѾ­¾ßÓÐÊ®¶àÄêµÄ·¢Õ¹ÀúÊ·£¬³ÉÊìÇÒÎȶ¨¡£ÕâÖÖÓïÑÔ¾ßÓзdz£¼ò½Ý¶øÇåÎú
+µÄÓï·¨ÌØµã£¬ÊʺÏÍê³É¸÷Öָ߲ãÈÎÎñ£¬¼¸ºõ¿ÉÒÔÔÚËùÓеIJÙ×÷ϵͳÖÐ
+ÔËÐС£ÕâÖÖÓïÑÔ¼òµ¥¶øÇ¿´ó£¬Êʺϸ÷ÖÖÈËʿѧϰʹÓá£Ä¿Ç°£¬»ùÓÚÕâ
+ÖÖÓïÑÔµÄÏà¹Ø¼¼ÊõÕýÔÚ·ÉËٵķ¢Õ¹£¬Óû§ÊýÁ¿¼±¾çÀ©´ó£¬Ïà¹ØµÄ×ÊÔ´·Ç³£¶à¡£
+ÈçºÎÔÚ Python ÖÐʹÓüÈÓÐµÄ C library?
+¡¡ÔÚÙYӍ¿Æ¼¼¿ìËÙ°lÕ¹µÄ½ñÌì, é_°l¼°œyԇܛówµÄËÙ¶ÈÊDz»ÈݺöҕµÄ
+Õnî}. žé¼Ó¿ìé_°l¼°œyԇµÄËÙ¶È, Î҂ƒ±ã³£Ï£ÍûÄÜÀûÓÃһЩÒÑé_°lºÃµÄ
+library, KÓÐÒ»‚€ fast prototyping µÄ programming language ¿É
+¹©Ê¹ÓÃ. ĿǰÓÐÔSÔS¶à¶àµÄ library ÊÇÒÔ C Œ‘³É, ¶ø Python ÊÇÒ»‚€
+fast prototyping µÄ programming language. ¹ÊÎ҂ƒÏ£ÍûÄ܌¢¼ÈÓеÄ
+C library Äõ½ Python µÄ­h¾³ÖМyԇ¼°ÕûºÏ. ÆäÖÐ×îÖ÷ÒªÒ²ÊÇÎ҂ƒËù
+ҪӑՓµÄ†–î}¾ÍÊÇ:
+ƒ5Ç1ƒ3š3ƒ2±1ƒ3•1 ‚7Ñ6ƒ0Œ4ƒ6„3 ‚8‰5‚8û6ƒ3•5 ƒ3Õ1‚95 ƒ0ý9ƒ3†0 ƒ4Ü3ƒ5ö7ƒ5—5 ƒ5ù5ƒ0‘9‚8ƒ9‚9ü3ƒ0ð4 ƒ2ë9ƒ2ë5‚9ƒ9.
+
diff --git a/Lib/test/cjkencodings/gb2312-utf8.txt b/Lib/test/cjkencodings/gb2312-utf8.txt
new file mode 100644
index 0000000..efb7d8f
--- /dev/null
+++ b/Lib/test/cjkencodings/gb2312-utf8.txt
@@ -0,0 +1,6 @@
+Python(派森)语言是一种功能强大而完善的通用型计算机程序设计语言,
+已经具有十多年的发展历史,成熟且稳定。这种语言具有非常简捷而清晰
+的语法特点,适合完成各种高层任务,几乎可以在所有的操作系统中
+运行。这种语言简单而强大,适合各种人士学习使用。目前,基于这
+种语言的相关技术正在飞速的发展,用户数量急剧扩大,相关的资源非常多。
+
diff --git a/Lib/test/cjkencodings/gb2312.txt b/Lib/test/cjkencodings/gb2312.txt
new file mode 100644
index 0000000..1536ac1
--- /dev/null
+++ b/Lib/test/cjkencodings/gb2312.txt
@@ -0,0 +1,6 @@
+Python£¨ÅÉÉ­£©ÓïÑÔÊÇÒ»ÖÖ¹¦ÄÜÇ¿´ó¶øÍêÉÆµÄͨÓÃÐͼÆËã»ú³ÌÐòÉè¼ÆÓïÑÔ£¬
+ÒѾ­¾ßÓÐÊ®¶àÄêµÄ·¢Õ¹ÀúÊ·£¬³ÉÊìÇÒÎȶ¨¡£ÕâÖÖÓïÑÔ¾ßÓзdz£¼ò½Ý¶øÇåÎú
+µÄÓï·¨ÌØµã£¬ÊʺÏÍê³É¸÷Öָ߲ãÈÎÎñ£¬¼¸ºõ¿ÉÒÔÔÚËùÓеIJÙ×÷ϵͳÖÐ
+ÔËÐС£ÕâÖÖÓïÑÔ¼òµ¥¶øÇ¿´ó£¬Êʺϸ÷ÖÖÈËʿѧϰʹÓá£Ä¿Ç°£¬»ùÓÚÕâ
+ÖÖÓïÑÔµÄÏà¹Ø¼¼ÊõÕýÔÚ·ÉËٵķ¢Õ¹£¬Óû§ÊýÁ¿¼±¾çÀ©´ó£¬Ïà¹ØµÄ×ÊÔ´·Ç³£¶à¡£
+
diff --git a/Lib/test/cjkencodings/gbk-utf8.txt b/Lib/test/cjkencodings/gbk-utf8.txt
new file mode 100644
index 0000000..75bbd31
--- /dev/null
+++ b/Lib/test/cjkencodings/gbk-utf8.txt
@@ -0,0 +1,14 @@
+Python(派森)语言是一种功能强大而完善的通用型计算机程序设计语言,
+已经具有十多年的发展历史,成熟且稳定。这种语言具有非常简捷而清晰
+的语法特点,适合完成各种高层任务,几乎可以在所有的操作系统中
+运行。这种语言简单而强大,适合各种人士学习使用。目前,基于这
+种语言的相关技术正在飞速的发展,用户数量急剧扩大,相关的资源非常多。
+如何在 Python 中使用既有的 C library?
+ 在資訊科技快速發展的今天, 開發及測試軟體的速度是不容忽視的
+課題. 為加快開發及測試的速度, 我們便常希望能利用一些已開發好的
+library, 並有一個 fast prototyping 的 programming language 可
+供使用. 目前有許許多多的 library 是以 C 寫成, 而 Python 是一個
+fast prototyping 的 programming language. 故我們希望能將既有的
+C library 拿到 Python 的環境中測試及整合. 其中最主要也是我們所
+要討論的問題就是:
+
diff --git a/Lib/test/cjkencodings/gbk.txt b/Lib/test/cjkencodings/gbk.txt
new file mode 100644
index 0000000..8788f8a
--- /dev/null
+++ b/Lib/test/cjkencodings/gbk.txt
@@ -0,0 +1,14 @@
+Python£¨ÅÉÉ­£©ÓïÑÔÊÇÒ»ÖÖ¹¦ÄÜÇ¿´ó¶øÍêÉÆµÄͨÓÃÐͼÆËã»ú³ÌÐòÉè¼ÆÓïÑÔ£¬
+ÒѾ­¾ßÓÐÊ®¶àÄêµÄ·¢Õ¹ÀúÊ·£¬³ÉÊìÇÒÎȶ¨¡£ÕâÖÖÓïÑÔ¾ßÓзdz£¼ò½Ý¶øÇåÎú
+µÄÓï·¨ÌØµã£¬ÊʺÏÍê³É¸÷Öָ߲ãÈÎÎñ£¬¼¸ºõ¿ÉÒÔÔÚËùÓеIJÙ×÷ϵͳÖÐ
+ÔËÐС£ÕâÖÖÓïÑÔ¼òµ¥¶øÇ¿´ó£¬Êʺϸ÷ÖÖÈËʿѧϰʹÓá£Ä¿Ç°£¬»ùÓÚÕâ
+ÖÖÓïÑÔµÄÏà¹Ø¼¼ÊõÕýÔÚ·ÉËٵķ¢Õ¹£¬Óû§ÊýÁ¿¼±¾çÀ©´ó£¬Ïà¹ØµÄ×ÊÔ´·Ç³£¶à¡£
+ÈçºÎÔÚ Python ÖÐʹÓüÈÓÐµÄ C library?
+¡¡ÔÚÙYӍ¿Æ¼¼¿ìËÙ°lÕ¹µÄ½ñÌì, é_°l¼°œyԇܛówµÄËÙ¶ÈÊDz»ÈݺöҕµÄ
+Õnî}. žé¼Ó¿ìé_°l¼°œyԇµÄËÙ¶È, Î҂ƒ±ã³£Ï£ÍûÄÜÀûÓÃһЩÒÑé_°lºÃµÄ
+library, KÓÐÒ»‚€ fast prototyping µÄ programming language ¿É
+¹©Ê¹ÓÃ. ĿǰÓÐÔSÔS¶à¶àµÄ library ÊÇÒÔ C Œ‘³É, ¶ø Python ÊÇÒ»‚€
+fast prototyping µÄ programming language. ¹ÊÎ҂ƒÏ£ÍûÄ܌¢¼ÈÓеÄ
+C library Äõ½ Python µÄ­h¾³ÖМyԇ¼°ÕûºÏ. ÆäÖÐ×îÖ÷ÒªÒ²ÊÇÎ҂ƒËù
+ҪӑՓµÄ†–î}¾ÍÊÇ:
+
diff --git a/Lib/test/cjkencodings/johab-utf8.txt b/Lib/test/cjkencodings/johab-utf8.txt
new file mode 100644
index 0000000..5655e38
--- /dev/null
+++ b/Lib/test/cjkencodings/johab-utf8.txt
@@ -0,0 +1,9 @@
+똠방각하 펲시콜라
+
+㉯㉯납!! 因九月패믤릔궈 ⓡⓖ훀¿¿¿ 긍뒙 ⓔ뎨 ㉯. .
+亞영ⓔ능횹 . . . . 서울뤄 뎐학乙 家훀 ! ! !ㅠ.ㅠ
+흐흐흐 ㄱㄱㄱ☆ㅠ_ㅠ 어릨 탸콰긐 뎌응 칑九들乙 ㉯드긐
+설릌 家훀 . . . . 굴애쉌 ⓔ궈 ⓡ릘㉱긐 因仁川女中까즼
+와쒀훀 ! ! 亞영ⓔ 家능궈 ☆上관 없능궈능 亞능뒈훀 글애듴
+ⓡ려듀九 싀풔숴훀 어릨 因仁川女中싁⑨들앜!! ㉯㉯납♡ ⌒⌒*
+
diff --git a/Lib/test/cjkencodings/johab.txt b/Lib/test/cjkencodings/johab.txt
new file mode 100644
index 0000000..067781b
--- /dev/null
+++ b/Lib/test/cjkencodings/johab.txt
@@ -0,0 +1,9 @@
+™±¤wˆbÐa Í\¯¡Å©œa
+
+ÜÀÜÀs!! ñgâœðÚ£‰Ÿ…Š¡ ÜÞÜÓÒzٯٯٯ ‹w–Ó Üѕ ÜÀ. .
+í<µwÜѓwÒs . . . . ¬á¶‰ž¡ •eÐbðà à;Òz ! ! !‡A.‡A
+ÓaÓaÓa ˆAˆAˆAÙi‡A_‡A ´áŸš È¡ÅÁ‹z •a·w ×✗iðà ÜÀ—a‹z
+¬éŸz à;Òz . . . . Љ´®º Üъ¡ Üޟ‰Ü‹z ñgñbõIíüóéŒa»š
+µÁ²¡Òz ! ! í<µwÜÑ à;“wŠ¡ Ùi꾉Š´ô“wŠ¡“w í<“w–ÁÒz ‹i´—z
+Üޝa—A✠¯Î¡®¡Òz ´áŸš ñgñbõIíüó鯂Üï—i´z!! ÜÀÜÀsÙ½ ÙbÙb*
+
diff --git a/Lib/test/cjkencodings/shift_jis-utf8.txt b/Lib/test/cjkencodings/shift_jis-utf8.txt
new file mode 100644
index 0000000..7763250
--- /dev/null
+++ b/Lib/test/cjkencodings/shift_jis-utf8.txt
@@ -0,0 +1,7 @@
+Python の開発は、1990 年ごろから開始されています。
+開発者の Guido van Rossum は教育用のプログラミング言語「ABC」の開発に参加していましたが、ABC は実用上の目的にはあまり適していませんでした。
+このため、Guido はより実用的なプログラミング言語の開発を開始し、英国 BBS 放送のコメディ番組「モンティ パイソン」のファンである Guido はこの言語を「Python」と名づけました。
+このような背景から生まれた Python の言語設計は、「シンプル」で「習得が容易」という目標に重点が置かれています。
+多くのスクリプト系言語ではユーザの目先の利便性を優先して色々な機能を言語要素として取り入れる場合が多いのですが、Python ではそういった小細工が追加されることはあまりありません。
+言語自体の機能は最小限に押さえ、必要な機能は拡張モジュールとして追加する、というのが Python のポリシーです。
+
diff --git a/Lib/test/cjkencodings/shift_jis.txt b/Lib/test/cjkencodings/shift_jis.txt
new file mode 100644
index 0000000..10b760d
--- /dev/null
+++ b/Lib/test/cjkencodings/shift_jis.txt
@@ -0,0 +1,7 @@
+Python ‚ÌŠJ”­‚́A1990 ”N‚²‚ë‚©‚çŠJŽn‚³‚ê‚Ä‚¢‚Ü‚·B
+ŠJ”­ŽÒ‚Ì Guido van Rossum ‚Í‹³ˆç—p‚̃vƒƒOƒ‰ƒ~ƒ“ƒOŒ¾ŒêuABCv‚ÌŠJ”­‚ÉŽQ‰Á‚µ‚Ä‚¢‚Ü‚µ‚½‚ªAABC ‚ÍŽÀ—pã‚Ì–Ú“I‚ɂ͂ ‚Ü‚è“K‚µ‚Ä‚¢‚Ü‚¹‚ñ‚Å‚µ‚½B
+‚±‚Ì‚½‚߁AGuido ‚Í‚æ‚èŽÀ—p“I‚ȃvƒƒOƒ‰ƒ~ƒ“ƒOŒ¾Œê‚ÌŠJ”­‚ðŠJŽn‚µA‰p‘ BBS •ú‘—‚̃RƒƒfƒB”Ô‘guƒ‚ƒ“ƒeƒB ƒpƒCƒ\ƒ“v‚̃tƒ@ƒ“‚Å‚ ‚é Guido ‚Í‚±‚ÌŒ¾Œê‚ðuPythonv‚Æ–¼‚¯‚Ü‚µ‚½B
+‚±‚̂悤‚È”wŒi‚©‚琶‚܂ꂽ Python ‚ÌŒ¾ŒêÝŒv‚́AuƒVƒ“ƒvƒ‹v‚ŁuK“¾‚ª—eˆÕv‚Æ‚¢‚¤–Ú•W‚ɏd“_‚ª’u‚©‚ê‚Ä‚¢‚Ü‚·B
+‘½‚­‚̃XƒNƒŠƒvƒgŒnŒ¾Œê‚ł̓†[ƒU‚̖ڐæ‚Ì—˜•֐«‚ð—Dæ‚µ‚ĐFX‚È‹@”\‚ðŒ¾Œê—v‘f‚Æ‚µ‚ÄŽæ‚è“ü‚ê‚éê‡‚ª‘½‚¢‚̂ł·‚ªAPython ‚ł͂»‚¤‚¢‚Á‚½¬×H‚ª’ljÁ‚³‚ê‚邱‚Ƃ͂ ‚܂肠‚è‚Ü‚¹‚ñB
+Œ¾ŒêŽ©‘̂̋@”\‚͍ŏ¬ŒÀ‚ɉŸ‚³‚¦A•K—v‚È‹@”\‚ÍŠg’£ƒ‚ƒWƒ…[ƒ‹‚Æ‚µ‚ĒljÁ‚·‚éA‚Æ‚¢‚¤‚Ì‚ª Python ‚̃|ƒŠƒV[‚Å‚·B
+
diff --git a/Lib/test/cjkencodings/shift_jisx0213-utf8.txt b/Lib/test/cjkencodings/shift_jisx0213-utf8.txt
new file mode 100644
index 0000000..9a56a2e
--- /dev/null
+++ b/Lib/test/cjkencodings/shift_jisx0213-utf8.txt
@@ -0,0 +1,8 @@
+Python の開発は、1990 年ごろから開始されています。
+開発者の Guido van Rossum は教育用のプログラミング言語「ABC」の開発に参加していましたが、ABC は実用上の目的にはあまり適していませんでした。
+このため、Guido はより実用的なプログラミング言語の開発を開始し、英国 BBS 放送のコメディ番組「モンティ パイソン」のファンである Guido はこの言語を「Python」と名づけました。
+このような背景から生まれた Python の言語設計は、「シンプル」で「習得が容易」という目標に重点が置かれています。
+多くのスクリプト系言語ではユーザの目先の利便性を優先して色々な機能を言語要素として取り入れる場合が多いのですが、Python ではそういった小細工が追加されることはあまりありません。
+言語自体の機能は最小限に押さえ、必要な機能は拡張モジュールとして追加する、というのが Python のポリシーです。
+
+ノか゚ ト゚ トキ喝塀 𡚴𪎌 麀齁𩛰
diff --git a/Lib/test/cjkencodings/shift_jisx0213.txt b/Lib/test/cjkencodings/shift_jisx0213.txt
new file mode 100644
index 0000000..08c9ef4
--- /dev/null
+++ b/Lib/test/cjkencodings/shift_jisx0213.txt
@@ -0,0 +1,8 @@
+Python ‚ÌŠJ”­‚́A1990 ”N‚²‚ë‚©‚çŠJŽn‚³‚ê‚Ä‚¢‚Ü‚·B
+ŠJ”­ŽÒ‚Ì Guido van Rossum ‚Í‹³ˆç—p‚̃vƒƒOƒ‰ƒ~ƒ“ƒOŒ¾ŒêuABCv‚ÌŠJ”­‚ÉŽQ‰Á‚µ‚Ä‚¢‚Ü‚µ‚½‚ªAABC ‚ÍŽÀ—pã‚Ì–Ú“I‚ɂ͂ ‚Ü‚è“K‚µ‚Ä‚¢‚Ü‚¹‚ñ‚Å‚µ‚½B
+‚±‚Ì‚½‚߁AGuido ‚Í‚æ‚èŽÀ—p“I‚ȃvƒƒOƒ‰ƒ~ƒ“ƒOŒ¾Œê‚ÌŠJ”­‚ðŠJŽn‚µA‰p‘ BBS •ú‘—‚̃RƒƒfƒB”Ô‘guƒ‚ƒ“ƒeƒB ƒpƒCƒ\ƒ“v‚̃tƒ@ƒ“‚Å‚ ‚é Guido ‚Í‚±‚ÌŒ¾Œê‚ðuPythonv‚Æ–¼‚¯‚Ü‚µ‚½B
+‚±‚̂悤‚È”wŒi‚©‚琶‚܂ꂽ Python ‚ÌŒ¾ŒêÝŒv‚́AuƒVƒ“ƒvƒ‹v‚ŁuK“¾‚ª—eˆÕv‚Æ‚¢‚¤–Ú•W‚ɏd“_‚ª’u‚©‚ê‚Ä‚¢‚Ü‚·B
+‘½‚­‚̃XƒNƒŠƒvƒgŒnŒ¾Œê‚ł̓†[ƒU‚̖ڐæ‚Ì—˜•֐«‚ð—Dæ‚µ‚ĐFX‚È‹@”\‚ðŒ¾Œê—v‘f‚Æ‚µ‚ÄŽæ‚è“ü‚ê‚éê‡‚ª‘½‚¢‚̂ł·‚ªAPython ‚ł͂»‚¤‚¢‚Á‚½¬×H‚ª’ljÁ‚³‚ê‚邱‚Ƃ͂ ‚܂肠‚è‚Ü‚¹‚ñB
+Œ¾ŒêŽ©‘̂̋@”\‚͍ŏ¬ŒÀ‚ɉŸ‚³‚¦A•K—v‚È‹@”\‚ÍŠg’£ƒ‚ƒWƒ…[ƒ‹‚Æ‚µ‚ĒljÁ‚·‚éA‚Æ‚¢‚¤‚Ì‚ª Python ‚̃|ƒŠƒV[‚Å‚·B
+
+ƒm‚õ ƒž ƒgƒLˆKˆy ˜ƒüÖ üÒüæûÔ
diff --git a/Lib/test/cjkencodings_test.py b/Lib/test/cjkencodings_test.py
deleted file mode 100644
index 7e55f37..0000000
--- a/Lib/test/cjkencodings_test.py
+++ /dev/null
@@ -1,1019 +0,0 @@
-teststring = {
-'big5': (
-b"\xa6\x70\xa6\xf3\xa6\x62\x20\x50\x79\x74\x68\x6f\x6e\x20\xa4\xa4"
-b"\xa8\xcf\xa5\xce\xac\x4a\xa6\xb3\xaa\xba\x20\x43\x20\x6c\x69\x62"
-b"\x72\x61\x72\x79\x3f\x0a\xa1\x40\xa6\x62\xb8\xea\xb0\x54\xac\xec"
-b"\xa7\xde\xa7\xd6\xb3\x74\xb5\x6f\xae\x69\xaa\xba\xa4\xb5\xa4\xd1"
-b"\x2c\x20\xb6\x7d\xb5\x6f\xa4\xce\xb4\xfa\xb8\xd5\xb3\x6e\xc5\xe9"
-b"\xaa\xba\xb3\x74\xab\xd7\xac\x4f\xa4\xa3\xae\x65\xa9\xbf\xb5\xf8"
-b"\xaa\xba\x0a\xbd\xd2\xc3\x44\x2e\x20\xac\xb0\xa5\x5b\xa7\xd6\xb6"
-b"\x7d\xb5\x6f\xa4\xce\xb4\xfa\xb8\xd5\xaa\xba\xb3\x74\xab\xd7\x2c"
-b"\x20\xa7\xda\xad\xcc\xab\x4b\xb1\x60\xa7\xc6\xb1\xe6\xaf\xe0\xa7"
-b"\x51\xa5\xce\xa4\x40\xa8\xc7\xa4\x77\xb6\x7d\xb5\x6f\xa6\x6e\xaa"
-b"\xba\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\xa8\xc3\xa6\xb3\xa4"
-b"\x40\xad\xd3\x20\x66\x61\x73\x74\x20\x70\x72\x6f\x74\x6f\x74\x79"
-b"\x70\x69\x6e\x67\x20\xaa\xba\x20\x70\x72\x6f\x67\x72\x61\x6d\x6d"
-b"\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x20\xa5\x69\x0a"
-b"\xa8\xd1\xa8\xcf\xa5\xce\x2e\x20\xa5\xd8\xab\x65\xa6\xb3\xb3\x5c"
-b"\xb3\x5c\xa6\x68\xa6\x68\xaa\xba\x20\x6c\x69\x62\x72\x61\x72\x79"
-b"\x20\xac\x4f\xa5\x48\x20\x43\x20\xbc\x67\xa6\xa8\x2c\x20\xa6\xd3"
-b"\x20\x50\x79\x74\x68\x6f\x6e\x20\xac\x4f\xa4\x40\xad\xd3\x0a\x66"
-b"\x61\x73\x74\x20\x70\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20"
-b"\xaa\xba\x20\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c"
-b"\x61\x6e\x67\x75\x61\x67\x65\x2e\x20\xac\x47\xa7\xda\xad\xcc\xa7"
-b"\xc6\xb1\xe6\xaf\xe0\xb1\x4e\xac\x4a\xa6\xb3\xaa\xba\x0a\x43\x20"
-b"\x6c\x69\x62\x72\x61\x72\x79\x20\xae\xb3\xa8\xec\x20\x50\x79\x74"
-b"\x68\x6f\x6e\x20\xaa\xba\xc0\xf4\xb9\xd2\xa4\xa4\xb4\xfa\xb8\xd5"
-b"\xa4\xce\xbe\xe3\xa6\x58\x2e\x20\xa8\xe4\xa4\xa4\xb3\xcc\xa5\x44"
-b"\xad\x6e\xa4\x5d\xac\x4f\xa7\xda\xad\xcc\xa9\xd2\x0a\xad\x6e\xb0"
-b"\x51\xbd\xd7\xaa\xba\xb0\xdd\xc3\x44\xb4\x4e\xac\x4f\x3a\x0a\x0a",
-b"\xe5\xa6\x82\xe4\xbd\x95\xe5\x9c\xa8\x20\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xe4\xb8\xad\xe4\xbd\xbf\xe7\x94\xa8\xe6\x97\xa2\xe6\x9c\x89"
-b"\xe7\x9a\x84\x20\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xe3"
-b"\x80\x80\xe5\x9c\xa8\xe8\xb3\x87\xe8\xa8\x8a\xe7\xa7\x91\xe6\x8a"
-b"\x80\xe5\xbf\xab\xe9\x80\x9f\xe7\x99\xbc\xe5\xb1\x95\xe7\x9a\x84"
-b"\xe4\xbb\x8a\xe5\xa4\xa9\x2c\x20\xe9\x96\x8b\xe7\x99\xbc\xe5\x8f"
-b"\x8a\xe6\xb8\xac\xe8\xa9\xa6\xe8\xbb\x9f\xe9\xab\x94\xe7\x9a\x84"
-b"\xe9\x80\x9f\xe5\xba\xa6\xe6\x98\xaf\xe4\xb8\x8d\xe5\xae\xb9\xe5"
-b"\xbf\xbd\xe8\xa6\x96\xe7\x9a\x84\x0a\xe8\xaa\xb2\xe9\xa1\x8c\x2e"
-b"\x20\xe7\x82\xba\xe5\x8a\xa0\xe5\xbf\xab\xe9\x96\x8b\xe7\x99\xbc"
-b"\xe5\x8f\x8a\xe6\xb8\xac\xe8\xa9\xa6\xe7\x9a\x84\xe9\x80\x9f\xe5"
-b"\xba\xa6\x2c\x20\xe6\x88\x91\xe5\x80\x91\xe4\xbe\xbf\xe5\xb8\xb8"
-b"\xe5\xb8\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\x88\xa9\xe7\x94\xa8\xe4"
-b"\xb8\x80\xe4\xba\x9b\xe5\xb7\xb2\xe9\x96\x8b\xe7\x99\xbc\xe5\xa5"
-b"\xbd\xe7\x9a\x84\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\xe4\xb8"
-b"\xa6\xe6\x9c\x89\xe4\xb8\x80\xe5\x80\x8b\x20\x66\x61\x73\x74\x20"
-b"\x70\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20"
-b"\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67"
-b"\x75\x61\x67\x65\x20\xe5\x8f\xaf\x0a\xe4\xbe\x9b\xe4\xbd\xbf\xe7"
-b"\x94\xa8\x2e\x20\xe7\x9b\xae\xe5\x89\x8d\xe6\x9c\x89\xe8\xa8\xb1"
-b"\xe8\xa8\xb1\xe5\xa4\x9a\xe5\xa4\x9a\xe7\x9a\x84\x20\x6c\x69\x62"
-b"\x72\x61\x72\x79\x20\xe6\x98\xaf\xe4\xbb\xa5\x20\x43\x20\xe5\xaf"
-b"\xab\xe6\x88\x90\x2c\x20\xe8\x80\x8c\x20\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xe6\x98\xaf\xe4\xb8\x80\xe5\x80\x8b\x0a\x66\x61\x73\x74\x20"
-b"\x70\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20"
-b"\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67"
-b"\x75\x61\x67\x65\x2e\x20\xe6\x95\x85\xe6\x88\x91\xe5\x80\x91\xe5"
-b"\xb8\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\xb0\x87\xe6\x97\xa2\xe6\x9c"
-b"\x89\xe7\x9a\x84\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xe6"
-b"\x8b\xbf\xe5\x88\xb0\x20\x50\x79\x74\x68\x6f\x6e\x20\xe7\x9a\x84"
-b"\xe7\x92\xb0\xe5\xa2\x83\xe4\xb8\xad\xe6\xb8\xac\xe8\xa9\xa6\xe5"
-b"\x8f\x8a\xe6\x95\xb4\xe5\x90\x88\x2e\x20\xe5\x85\xb6\xe4\xb8\xad"
-b"\xe6\x9c\x80\xe4\xb8\xbb\xe8\xa6\x81\xe4\xb9\x9f\xe6\x98\xaf\xe6"
-b"\x88\x91\xe5\x80\x91\xe6\x89\x80\x0a\xe8\xa6\x81\xe8\xa8\x8e\xe8"
-b"\xab\x96\xe7\x9a\x84\xe5\x95\x8f\xe9\xa1\x8c\xe5\xb0\xb1\xe6\x98"
-b"\xaf\x3a\x0a\x0a"),
-'big5hkscs': (
-b"\x88\x45\x88\x5c\x8a\x73\x8b\xda\x8d\xd8\x0a\x88\x66\x88\x62\x88"
-b"\xa7\x20\x88\xa7\x88\xa3\x0a",
-b"\xf0\xa0\x84\x8c\xc4\x9a\xe9\xb5\xae\xe7\xbd\x93\xe6\xb4\x86\x0a"
-b"\xc3\x8a\xc3\x8a\xcc\x84\xc3\xaa\x20\xc3\xaa\xc3\xaa\xcc\x84\x0a"),
-'cp949': (
-b"\x8c\x63\xb9\xe6\xb0\xa2\xc7\xcf\x20\xbc\x84\xbd\xc3\xc4\xdd\xb6"
-b"\xf3\x0a\x0a\xa8\xc0\xa8\xc0\xb3\xb3\x21\x21\x20\xec\xd7\xce\xfa"
-b"\xea\xc5\xc6\xd0\x92\xe6\x90\x70\xb1\xc5\x20\xa8\xde\xa8\xd3\xc4"
-b"\x52\xa2\xaf\xa2\xaf\xa2\xaf\x20\xb1\xe0\x8a\x96\x20\xa8\xd1\xb5"
-b"\xb3\x20\xa8\xc0\x2e\x20\x2e\x0a\xe4\xac\xbf\xb5\xa8\xd1\xb4\xc9"
-b"\xc8\xc2\x20\x2e\x20\x2e\x20\x2e\x20\x2e\x20\xbc\xad\xbf\xef\xb7"
-b"\xef\x20\xb5\xaf\xc7\xd0\xeb\xe0\x20\xca\xab\xc4\x52\x20\x21\x20"
-b"\x21\x20\x21\xa4\xd0\x2e\xa4\xd0\x0a\xc8\xe5\xc8\xe5\xc8\xe5\x20"
-b"\xa4\xa1\xa4\xa1\xa4\xa1\xa1\xd9\xa4\xd0\x5f\xa4\xd0\x20\xbe\xee"
-b"\x90\x8a\x20\xc5\xcb\xc4\xe2\x83\x4f\x20\xb5\xae\xc0\xc0\x20\xaf"
-b"\x68\xce\xfa\xb5\xe9\xeb\xe0\x20\xa8\xc0\xb5\xe5\x83\x4f\x0a\xbc"
-b"\xb3\x90\x6a\x20\xca\xab\xc4\x52\x20\x2e\x20\x2e\x20\x2e\x20\x2e"
-b"\x20\xb1\xbc\xbe\xd6\x9a\x66\x20\xa8\xd1\xb1\xc5\x20\xa8\xde\x90"
-b"\x74\xa8\xc2\x83\x4f\x20\xec\xd7\xec\xd2\xf4\xb9\xe5\xfc\xf1\xe9"
-b"\xb1\xee\xa3\x8e\x0a\xbf\xcd\xbe\xac\xc4\x52\x20\x21\x20\x21\x20"
-b"\xe4\xac\xbf\xb5\xa8\xd1\x20\xca\xab\xb4\xc9\xb1\xc5\x20\xa1\xd9"
-b"\xdf\xbe\xb0\xfc\x20\xbe\xf8\xb4\xc9\xb1\xc5\xb4\xc9\x20\xe4\xac"
-b"\xb4\xc9\xb5\xd8\xc4\x52\x20\xb1\xdb\xbe\xd6\x8a\xdb\x0a\xa8\xde"
-b"\xb7\xc1\xb5\xe0\xce\xfa\x20\x9a\xc3\xc7\xb4\xbd\xa4\xc4\x52\x20"
-b"\xbe\xee\x90\x8a\x20\xec\xd7\xec\xd2\xf4\xb9\xe5\xfc\xf1\xe9\x9a"
-b"\xc4\xa8\xef\xb5\xe9\x9d\xda\x21\x21\x20\xa8\xc0\xa8\xc0\xb3\xb3"
-b"\xa2\xbd\x20\xa1\xd2\xa1\xd2\x2a\x0a\x0a",
-b"\xeb\x98\xa0\xeb\xb0\xa9\xea\xb0\x81\xed\x95\x98\x20\xed\x8e\xb2"
-b"\xec\x8b\x9c\xec\xbd\x9c\xeb\x9d\xbc\x0a\x0a\xe3\x89\xaf\xe3\x89"
-b"\xaf\xeb\x82\xa9\x21\x21\x20\xe5\x9b\xa0\xe4\xb9\x9d\xe6\x9c\x88"
-b"\xed\x8c\xa8\xeb\xaf\xa4\xeb\xa6\x94\xea\xb6\x88\x20\xe2\x93\xa1"
-b"\xe2\x93\x96\xed\x9b\x80\xc2\xbf\xc2\xbf\xc2\xbf\x20\xea\xb8\x8d"
-b"\xeb\x92\x99\x20\xe2\x93\x94\xeb\x8e\xa8\x20\xe3\x89\xaf\x2e\x20"
-b"\x2e\x0a\xe4\xba\x9e\xec\x98\x81\xe2\x93\x94\xeb\x8a\xa5\xed\x9a"
-b"\xb9\x20\x2e\x20\x2e\x20\x2e\x20\x2e\x20\xec\x84\x9c\xec\x9a\xb8"
-b"\xeb\xa4\x84\x20\xeb\x8e\x90\xed\x95\x99\xe4\xb9\x99\x20\xe5\xae"
-b"\xb6\xed\x9b\x80\x20\x21\x20\x21\x20\x21\xe3\x85\xa0\x2e\xe3\x85"
-b"\xa0\x0a\xed\x9d\x90\xed\x9d\x90\xed\x9d\x90\x20\xe3\x84\xb1\xe3"
-b"\x84\xb1\xe3\x84\xb1\xe2\x98\x86\xe3\x85\xa0\x5f\xe3\x85\xa0\x20"
-b"\xec\x96\xb4\xeb\xa6\xa8\x20\xed\x83\xb8\xec\xbd\xb0\xea\xb8\x90"
-b"\x20\xeb\x8e\x8c\xec\x9d\x91\x20\xec\xb9\x91\xe4\xb9\x9d\xeb\x93"
-b"\xa4\xe4\xb9\x99\x20\xe3\x89\xaf\xeb\x93\x9c\xea\xb8\x90\x0a\xec"
-b"\x84\xa4\xeb\xa6\x8c\x20\xe5\xae\xb6\xed\x9b\x80\x20\x2e\x20\x2e"
-b"\x20\x2e\x20\x2e\x20\xea\xb5\xb4\xec\x95\xa0\xec\x89\x8c\x20\xe2"
-b"\x93\x94\xea\xb6\x88\x20\xe2\x93\xa1\xeb\xa6\x98\xe3\x89\xb1\xea"
-b"\xb8\x90\x20\xe5\x9b\xa0\xe4\xbb\x81\xe5\xb7\x9d\xef\xa6\x81\xe4"
-b"\xb8\xad\xea\xb9\x8c\xec\xa6\xbc\x0a\xec\x99\x80\xec\x92\x80\xed"
-b"\x9b\x80\x20\x21\x20\x21\x20\xe4\xba\x9e\xec\x98\x81\xe2\x93\x94"
-b"\x20\xe5\xae\xb6\xeb\x8a\xa5\xea\xb6\x88\x20\xe2\x98\x86\xe4\xb8"
-b"\x8a\xea\xb4\x80\x20\xec\x97\x86\xeb\x8a\xa5\xea\xb6\x88\xeb\x8a"
-b"\xa5\x20\xe4\xba\x9e\xeb\x8a\xa5\xeb\x92\x88\xed\x9b\x80\x20\xea"
-b"\xb8\x80\xec\x95\xa0\xeb\x93\xb4\x0a\xe2\x93\xa1\xeb\xa0\xa4\xeb"
-b"\x93\x80\xe4\xb9\x9d\x20\xec\x8b\x80\xed\x92\x94\xec\x88\xb4\xed"
-b"\x9b\x80\x20\xec\x96\xb4\xeb\xa6\xa8\x20\xe5\x9b\xa0\xe4\xbb\x81"
-b"\xe5\xb7\x9d\xef\xa6\x81\xe4\xb8\xad\xec\x8b\x81\xe2\x91\xa8\xeb"
-b"\x93\xa4\xec\x95\x9c\x21\x21\x20\xe3\x89\xaf\xe3\x89\xaf\xeb\x82"
-b"\xa9\xe2\x99\xa1\x20\xe2\x8c\x92\xe2\x8c\x92\x2a\x0a\x0a"),
-'euc_jisx0213': (
-b"\x50\x79\x74\x68\x6f\x6e\x20\xa4\xce\xb3\xab\xc8\xaf\xa4\xcf\xa1"
-b"\xa2\x31\x39\x39\x30\x20\xc7\xaf\xa4\xb4\xa4\xed\xa4\xab\xa4\xe9"
-b"\xb3\xab\xbb\xcf\xa4\xb5\xa4\xec\xa4\xc6\xa4\xa4\xa4\xde\xa4\xb9"
-b"\xa1\xa3\x0a\xb3\xab\xc8\xaf\xbc\xd4\xa4\xce\x20\x47\x75\x69\x64"
-b"\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73\x73\x75\x6d\x20\xa4\xcf\xb6"
-b"\xb5\xb0\xe9\xcd\xd1\xa4\xce\xa5\xd7\xa5\xed\xa5\xb0\xa5\xe9\xa5"
-b"\xdf\xa5\xf3\xa5\xb0\xb8\xc0\xb8\xec\xa1\xd6\x41\x42\x43\xa1\xd7"
-b"\xa4\xce\xb3\xab\xc8\xaf\xa4\xcb\xbb\xb2\xb2\xc3\xa4\xb7\xa4\xc6"
-b"\xa4\xa4\xa4\xde\xa4\xb7\xa4\xbf\xa4\xac\xa1\xa2\x41\x42\x43\x20"
-b"\xa4\xcf\xbc\xc2\xcd\xd1\xbe\xe5\xa4\xce\xcc\xdc\xc5\xaa\xa4\xcb"
-b"\xa4\xcf\xa4\xa2\xa4\xde\xa4\xea\xc5\xac\xa4\xb7\xa4\xc6\xa4\xa4"
-b"\xa4\xde\xa4\xbb\xa4\xf3\xa4\xc7\xa4\xb7\xa4\xbf\xa1\xa3\x0a\xa4"
-b"\xb3\xa4\xce\xa4\xbf\xa4\xe1\xa1\xa2\x47\x75\x69\x64\x6f\x20\xa4"
-b"\xcf\xa4\xe8\xa4\xea\xbc\xc2\xcd\xd1\xc5\xaa\xa4\xca\xa5\xd7\xa5"
-b"\xed\xa5\xb0\xa5\xe9\xa5\xdf\xa5\xf3\xa5\xb0\xb8\xc0\xb8\xec\xa4"
-b"\xce\xb3\xab\xc8\xaf\xa4\xf2\xb3\xab\xbb\xcf\xa4\xb7\xa1\xa2\xb1"
-b"\xd1\xb9\xf1\x20\x42\x42\x53\x20\xca\xfc\xc1\xf7\xa4\xce\xa5\xb3"
-b"\xa5\xe1\xa5\xc7\xa5\xa3\xc8\xd6\xc1\xc8\xa1\xd6\xa5\xe2\xa5\xf3"
-b"\xa5\xc6\xa5\xa3\x20\xa5\xd1\xa5\xa4\xa5\xbd\xa5\xf3\xa1\xd7\xa4"
-b"\xce\xa5\xd5\xa5\xa1\xa5\xf3\xa4\xc7\xa4\xa2\xa4\xeb\x20\x47\x75"
-b"\x69\x64\x6f\x20\xa4\xcf\xa4\xb3\xa4\xce\xb8\xc0\xb8\xec\xa4\xf2"
-b"\xa1\xd6\x50\x79\x74\x68\x6f\x6e\xa1\xd7\xa4\xc8\xcc\xbe\xa4\xc5"
-b"\xa4\xb1\xa4\xde\xa4\xb7\xa4\xbf\xa1\xa3\x0a\xa4\xb3\xa4\xce\xa4"
-b"\xe8\xa4\xa6\xa4\xca\xc7\xd8\xb7\xca\xa4\xab\xa4\xe9\xc0\xb8\xa4"
-b"\xde\xa4\xec\xa4\xbf\x20\x50\x79\x74\x68\x6f\x6e\x20\xa4\xce\xb8"
-b"\xc0\xb8\xec\xc0\xdf\xb7\xd7\xa4\xcf\xa1\xa2\xa1\xd6\xa5\xb7\xa5"
-b"\xf3\xa5\xd7\xa5\xeb\xa1\xd7\xa4\xc7\xa1\xd6\xbd\xac\xc6\xc0\xa4"
-b"\xac\xcd\xc6\xb0\xd7\xa1\xd7\xa4\xc8\xa4\xa4\xa4\xa6\xcc\xdc\xc9"
-b"\xb8\xa4\xcb\xbd\xc5\xc5\xc0\xa4\xac\xc3\xd6\xa4\xab\xa4\xec\xa4"
-b"\xc6\xa4\xa4\xa4\xde\xa4\xb9\xa1\xa3\x0a\xc2\xbf\xa4\xaf\xa4\xce"
-b"\xa5\xb9\xa5\xaf\xa5\xea\xa5\xd7\xa5\xc8\xb7\xcf\xb8\xc0\xb8\xec"
-b"\xa4\xc7\xa4\xcf\xa5\xe6\xa1\xbc\xa5\xb6\xa4\xce\xcc\xdc\xc0\xe8"
-b"\xa4\xce\xcd\xf8\xca\xd8\xc0\xad\xa4\xf2\xcd\xa5\xc0\xe8\xa4\xb7"
-b"\xa4\xc6\xbf\xa7\xa1\xb9\xa4\xca\xb5\xa1\xc7\xbd\xa4\xf2\xb8\xc0"
-b"\xb8\xec\xcd\xd7\xc1\xc7\xa4\xc8\xa4\xb7\xa4\xc6\xbc\xe8\xa4\xea"
-b"\xc6\xfe\xa4\xec\xa4\xeb\xbe\xec\xb9\xe7\xa4\xac\xc2\xbf\xa4\xa4"
-b"\xa4\xce\xa4\xc7\xa4\xb9\xa4\xac\xa1\xa2\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xa4\xc7\xa4\xcf\xa4\xbd\xa4\xa6\xa4\xa4\xa4\xc3\xa4\xbf\xbe"
-b"\xae\xba\xd9\xb9\xa9\xa4\xac\xc4\xc9\xb2\xc3\xa4\xb5\xa4\xec\xa4"
-b"\xeb\xa4\xb3\xa4\xc8\xa4\xcf\xa4\xa2\xa4\xde\xa4\xea\xa4\xa2\xa4"
-b"\xea\xa4\xde\xa4\xbb\xa4\xf3\xa1\xa3\x0a\xb8\xc0\xb8\xec\xbc\xab"
-b"\xc2\xce\xa4\xce\xb5\xa1\xc7\xbd\xa4\xcf\xba\xc7\xbe\xae\xb8\xc2"
-b"\xa4\xcb\xb2\xa1\xa4\xb5\xa4\xa8\xa1\xa2\xc9\xac\xcd\xd7\xa4\xca"
-b"\xb5\xa1\xc7\xbd\xa4\xcf\xb3\xc8\xc4\xa5\xa5\xe2\xa5\xb8\xa5\xe5"
-b"\xa1\xbc\xa5\xeb\xa4\xc8\xa4\xb7\xa4\xc6\xc4\xc9\xb2\xc3\xa4\xb9"
-b"\xa4\xeb\xa1\xa2\xa4\xc8\xa4\xa4\xa4\xa6\xa4\xce\xa4\xac\x20\x50"
-b"\x79\x74\x68\x6f\x6e\x20\xa4\xce\xa5\xdd\xa5\xea\xa5\xb7\xa1\xbc"
-b"\xa4\xc7\xa4\xb9\xa1\xa3\x0a\x0a\xa5\xce\xa4\xf7\x20\xa5\xfe\x20"
-b"\xa5\xc8\xa5\xad\xaf\xac\xaf\xda\x20\xcf\xe3\x8f\xfe\xd8\x20\x8f"
-b"\xfe\xd4\x8f\xfe\xe8\x8f\xfc\xd6\x0a",
-b"\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
-b"\xe3\x81\xaf\xe3\x80\x81\x31\x39\x39\x30\x20\xe5\xb9\xb4\xe3\x81"
-b"\x94\xe3\x82\x8d\xe3\x81\x8b\xe3\x82\x89\xe9\x96\x8b\xe5\xa7\x8b"
-b"\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3"
-b"\x81\x99\xe3\x80\x82\x0a\xe9\x96\x8b\xe7\x99\xba\xe8\x80\x85\xe3"
-b"\x81\xae\x20\x47\x75\x69\x64\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73"
-b"\x73\x75\x6d\x20\xe3\x81\xaf\xe6\x95\x99\xe8\x82\xb2\xe7\x94\xa8"
-b"\xe3\x81\xae\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83\xa9\xe3"
-b"\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e\xe3\x80"
-b"\x8c\x41\x42\x43\xe3\x80\x8d\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
-b"\xe3\x81\xab\xe5\x8f\x82\xe5\x8a\xa0\xe3\x81\x97\xe3\x81\xa6\xe3"
-b"\x81\x84\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x81\x8c\xe3\x80"
-b"\x81\x41\x42\x43\x20\xe3\x81\xaf\xe5\xae\x9f\xe7\x94\xa8\xe4\xb8"
-b"\x8a\xe3\x81\xae\xe7\x9b\xae\xe7\x9a\x84\xe3\x81\xab\xe3\x81\xaf"
-b"\xe3\x81\x82\xe3\x81\xbe\xe3\x82\x8a\xe9\x81\xa9\xe3\x81\x97\xe3"
-b"\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x81"
-b"\xa7\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a\xe3\x81\x93\xe3\x81"
-b"\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x80\x81\x47\x75\x69\x64\x6f\x20"
-b"\xe3\x81\xaf\xe3\x82\x88\xe3\x82\x8a\xe5\xae\x9f\xe7\x94\xa8\xe7"
-b"\x9a\x84\xe3\x81\xaa\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83"
-b"\xa9\xe3\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e"
-b"\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba\xe3\x82\x92\xe9\x96\x8b\xe5"
-b"\xa7\x8b\xe3\x81\x97\xe3\x80\x81\xe8\x8b\xb1\xe5\x9b\xbd\x20\x42"
-b"\x42\x53\x20\xe6\x94\xbe\xe9\x80\x81\xe3\x81\xae\xe3\x82\xb3\xe3"
-b"\x83\xa1\xe3\x83\x87\xe3\x82\xa3\xe7\x95\xaa\xe7\xb5\x84\xe3\x80"
-b"\x8c\xe3\x83\xa2\xe3\x83\xb3\xe3\x83\x86\xe3\x82\xa3\x20\xe3\x83"
-b"\x91\xe3\x82\xa4\xe3\x82\xbd\xe3\x83\xb3\xe3\x80\x8d\xe3\x81\xae"
-b"\xe3\x83\x95\xe3\x82\xa1\xe3\x83\xb3\xe3\x81\xa7\xe3\x81\x82\xe3"
-b"\x82\x8b\x20\x47\x75\x69\x64\x6f\x20\xe3\x81\xaf\xe3\x81\x93\xe3"
-b"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe3\x82\x92\xe3\x80\x8c\x50\x79"
-b"\x74\x68\x6f\x6e\xe3\x80\x8d\xe3\x81\xa8\xe5\x90\x8d\xe3\x81\xa5"
-b"\xe3\x81\x91\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a"
-b"\xe3\x81\x93\xe3\x81\xae\xe3\x82\x88\xe3\x81\x86\xe3\x81\xaa\xe8"
-b"\x83\x8c\xe6\x99\xaf\xe3\x81\x8b\xe3\x82\x89\xe7\x94\x9f\xe3\x81"
-b"\xbe\xe3\x82\x8c\xe3\x81\x9f\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3"
-b"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa8\xad\xe8\xa8\x88\xe3\x81"
-b"\xaf\xe3\x80\x81\xe3\x80\x8c\xe3\x82\xb7\xe3\x83\xb3\xe3\x83\x97"
-b"\xe3\x83\xab\xe3\x80\x8d\xe3\x81\xa7\xe3\x80\x8c\xe7\xbf\x92\xe5"
-b"\xbe\x97\xe3\x81\x8c\xe5\xae\xb9\xe6\x98\x93\xe3\x80\x8d\xe3\x81"
-b"\xa8\xe3\x81\x84\xe3\x81\x86\xe7\x9b\xae\xe6\xa8\x99\xe3\x81\xab"
-b"\xe9\x87\x8d\xe7\x82\xb9\xe3\x81\x8c\xe7\xbd\xae\xe3\x81\x8b\xe3"
-b"\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80"
-b"\x82\x0a\xe5\xa4\x9a\xe3\x81\x8f\xe3\x81\xae\xe3\x82\xb9\xe3\x82"
-b"\xaf\xe3\x83\xaa\xe3\x83\x97\xe3\x83\x88\xe7\xb3\xbb\xe8\xa8\x80"
-b"\xe8\xaa\x9e\xe3\x81\xa7\xe3\x81\xaf\xe3\x83\xa6\xe3\x83\xbc\xe3"
-b"\x82\xb6\xe3\x81\xae\xe7\x9b\xae\xe5\x85\x88\xe3\x81\xae\xe5\x88"
-b"\xa9\xe4\xbe\xbf\xe6\x80\xa7\xe3\x82\x92\xe5\x84\xaa\xe5\x85\x88"
-b"\xe3\x81\x97\xe3\x81\xa6\xe8\x89\xb2\xe3\x80\x85\xe3\x81\xaa\xe6"
-b"\xa9\x9f\xe8\x83\xbd\xe3\x82\x92\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa6"
-b"\x81\xe7\xb4\xa0\xe3\x81\xa8\xe3\x81\x97\xe3\x81\xa6\xe5\x8f\x96"
-b"\xe3\x82\x8a\xe5\x85\xa5\xe3\x82\x8c\xe3\x82\x8b\xe5\xa0\xb4\xe5"
-b"\x90\x88\xe3\x81\x8c\xe5\xa4\x9a\xe3\x81\x84\xe3\x81\xae\xe3\x81"
-b"\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xe3\x81\xa7\xe3\x81\xaf\xe3\x81\x9d\xe3\x81\x86\xe3\x81\x84"
-b"\xe3\x81\xa3\xe3\x81\x9f\xe5\xb0\x8f\xe7\xb4\xb0\xe5\xb7\xa5\xe3"
-b"\x81\x8c\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x95\xe3\x82\x8c\xe3\x82"
-b"\x8b\xe3\x81\x93\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\x82\xe3\x81\xbe"
-b"\xe3\x82\x8a\xe3\x81\x82\xe3\x82\x8a\xe3\x81\xbe\xe3\x81\x9b\xe3"
-b"\x82\x93\xe3\x80\x82\x0a\xe8\xa8\x80\xe8\xaa\x9e\xe8\x87\xaa\xe4"
-b"\xbd\x93\xe3\x81\xae\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x9c"
-b"\x80\xe5\xb0\x8f\xe9\x99\x90\xe3\x81\xab\xe6\x8a\xbc\xe3\x81\x95"
-b"\xe3\x81\x88\xe3\x80\x81\xe5\xbf\x85\xe8\xa6\x81\xe3\x81\xaa\xe6"
-b"\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x8b\xa1\xe5\xbc\xb5\xe3\x83"
-b"\xa2\xe3\x82\xb8\xe3\x83\xa5\xe3\x83\xbc\xe3\x83\xab\xe3\x81\xa8"
-b"\xe3\x81\x97\xe3\x81\xa6\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x99\xe3"
-b"\x82\x8b\xe3\x80\x81\xe3\x81\xa8\xe3\x81\x84\xe3\x81\x86\xe3\x81"
-b"\xae\xe3\x81\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe3"
-b"\x83\x9d\xe3\x83\xaa\xe3\x82\xb7\xe3\x83\xbc\xe3\x81\xa7\xe3\x81"
-b"\x99\xe3\x80\x82\x0a\x0a\xe3\x83\x8e\xe3\x81\x8b\xe3\x82\x9a\x20"
-b"\xe3\x83\x88\xe3\x82\x9a\x20\xe3\x83\x88\xe3\x82\xad\xef\xa8\xb6"
-b"\xef\xa8\xb9\x20\xf0\xa1\x9a\xb4\xf0\xaa\x8e\x8c\x20\xe9\xba\x80"
-b"\xe9\xbd\x81\xf0\xa9\x9b\xb0\x0a"),
-'euc_jp': (
-b"\x50\x79\x74\x68\x6f\x6e\x20\xa4\xce\xb3\xab\xc8\xaf\xa4\xcf\xa1"
-b"\xa2\x31\x39\x39\x30\x20\xc7\xaf\xa4\xb4\xa4\xed\xa4\xab\xa4\xe9"
-b"\xb3\xab\xbb\xcf\xa4\xb5\xa4\xec\xa4\xc6\xa4\xa4\xa4\xde\xa4\xb9"
-b"\xa1\xa3\x0a\xb3\xab\xc8\xaf\xbc\xd4\xa4\xce\x20\x47\x75\x69\x64"
-b"\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73\x73\x75\x6d\x20\xa4\xcf\xb6"
-b"\xb5\xb0\xe9\xcd\xd1\xa4\xce\xa5\xd7\xa5\xed\xa5\xb0\xa5\xe9\xa5"
-b"\xdf\xa5\xf3\xa5\xb0\xb8\xc0\xb8\xec\xa1\xd6\x41\x42\x43\xa1\xd7"
-b"\xa4\xce\xb3\xab\xc8\xaf\xa4\xcb\xbb\xb2\xb2\xc3\xa4\xb7\xa4\xc6"
-b"\xa4\xa4\xa4\xde\xa4\xb7\xa4\xbf\xa4\xac\xa1\xa2\x41\x42\x43\x20"
-b"\xa4\xcf\xbc\xc2\xcd\xd1\xbe\xe5\xa4\xce\xcc\xdc\xc5\xaa\xa4\xcb"
-b"\xa4\xcf\xa4\xa2\xa4\xde\xa4\xea\xc5\xac\xa4\xb7\xa4\xc6\xa4\xa4"
-b"\xa4\xde\xa4\xbb\xa4\xf3\xa4\xc7\xa4\xb7\xa4\xbf\xa1\xa3\x0a\xa4"
-b"\xb3\xa4\xce\xa4\xbf\xa4\xe1\xa1\xa2\x47\x75\x69\x64\x6f\x20\xa4"
-b"\xcf\xa4\xe8\xa4\xea\xbc\xc2\xcd\xd1\xc5\xaa\xa4\xca\xa5\xd7\xa5"
-b"\xed\xa5\xb0\xa5\xe9\xa5\xdf\xa5\xf3\xa5\xb0\xb8\xc0\xb8\xec\xa4"
-b"\xce\xb3\xab\xc8\xaf\xa4\xf2\xb3\xab\xbb\xcf\xa4\xb7\xa1\xa2\xb1"
-b"\xd1\xb9\xf1\x20\x42\x42\x53\x20\xca\xfc\xc1\xf7\xa4\xce\xa5\xb3"
-b"\xa5\xe1\xa5\xc7\xa5\xa3\xc8\xd6\xc1\xc8\xa1\xd6\xa5\xe2\xa5\xf3"
-b"\xa5\xc6\xa5\xa3\x20\xa5\xd1\xa5\xa4\xa5\xbd\xa5\xf3\xa1\xd7\xa4"
-b"\xce\xa5\xd5\xa5\xa1\xa5\xf3\xa4\xc7\xa4\xa2\xa4\xeb\x20\x47\x75"
-b"\x69\x64\x6f\x20\xa4\xcf\xa4\xb3\xa4\xce\xb8\xc0\xb8\xec\xa4\xf2"
-b"\xa1\xd6\x50\x79\x74\x68\x6f\x6e\xa1\xd7\xa4\xc8\xcc\xbe\xa4\xc5"
-b"\xa4\xb1\xa4\xde\xa4\xb7\xa4\xbf\xa1\xa3\x0a\xa4\xb3\xa4\xce\xa4"
-b"\xe8\xa4\xa6\xa4\xca\xc7\xd8\xb7\xca\xa4\xab\xa4\xe9\xc0\xb8\xa4"
-b"\xde\xa4\xec\xa4\xbf\x20\x50\x79\x74\x68\x6f\x6e\x20\xa4\xce\xb8"
-b"\xc0\xb8\xec\xc0\xdf\xb7\xd7\xa4\xcf\xa1\xa2\xa1\xd6\xa5\xb7\xa5"
-b"\xf3\xa5\xd7\xa5\xeb\xa1\xd7\xa4\xc7\xa1\xd6\xbd\xac\xc6\xc0\xa4"
-b"\xac\xcd\xc6\xb0\xd7\xa1\xd7\xa4\xc8\xa4\xa4\xa4\xa6\xcc\xdc\xc9"
-b"\xb8\xa4\xcb\xbd\xc5\xc5\xc0\xa4\xac\xc3\xd6\xa4\xab\xa4\xec\xa4"
-b"\xc6\xa4\xa4\xa4\xde\xa4\xb9\xa1\xa3\x0a\xc2\xbf\xa4\xaf\xa4\xce"
-b"\xa5\xb9\xa5\xaf\xa5\xea\xa5\xd7\xa5\xc8\xb7\xcf\xb8\xc0\xb8\xec"
-b"\xa4\xc7\xa4\xcf\xa5\xe6\xa1\xbc\xa5\xb6\xa4\xce\xcc\xdc\xc0\xe8"
-b"\xa4\xce\xcd\xf8\xca\xd8\xc0\xad\xa4\xf2\xcd\xa5\xc0\xe8\xa4\xb7"
-b"\xa4\xc6\xbf\xa7\xa1\xb9\xa4\xca\xb5\xa1\xc7\xbd\xa4\xf2\xb8\xc0"
-b"\xb8\xec\xcd\xd7\xc1\xc7\xa4\xc8\xa4\xb7\xa4\xc6\xbc\xe8\xa4\xea"
-b"\xc6\xfe\xa4\xec\xa4\xeb\xbe\xec\xb9\xe7\xa4\xac\xc2\xbf\xa4\xa4"
-b"\xa4\xce\xa4\xc7\xa4\xb9\xa4\xac\xa1\xa2\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xa4\xc7\xa4\xcf\xa4\xbd\xa4\xa6\xa4\xa4\xa4\xc3\xa4\xbf\xbe"
-b"\xae\xba\xd9\xb9\xa9\xa4\xac\xc4\xc9\xb2\xc3\xa4\xb5\xa4\xec\xa4"
-b"\xeb\xa4\xb3\xa4\xc8\xa4\xcf\xa4\xa2\xa4\xde\xa4\xea\xa4\xa2\xa4"
-b"\xea\xa4\xde\xa4\xbb\xa4\xf3\xa1\xa3\x0a\xb8\xc0\xb8\xec\xbc\xab"
-b"\xc2\xce\xa4\xce\xb5\xa1\xc7\xbd\xa4\xcf\xba\xc7\xbe\xae\xb8\xc2"
-b"\xa4\xcb\xb2\xa1\xa4\xb5\xa4\xa8\xa1\xa2\xc9\xac\xcd\xd7\xa4\xca"
-b"\xb5\xa1\xc7\xbd\xa4\xcf\xb3\xc8\xc4\xa5\xa5\xe2\xa5\xb8\xa5\xe5"
-b"\xa1\xbc\xa5\xeb\xa4\xc8\xa4\xb7\xa4\xc6\xc4\xc9\xb2\xc3\xa4\xb9"
-b"\xa4\xeb\xa1\xa2\xa4\xc8\xa4\xa4\xa4\xa6\xa4\xce\xa4\xac\x20\x50"
-b"\x79\x74\x68\x6f\x6e\x20\xa4\xce\xa5\xdd\xa5\xea\xa5\xb7\xa1\xbc"
-b"\xa4\xc7\xa4\xb9\xa1\xa3\x0a\x0a",
-b"\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
-b"\xe3\x81\xaf\xe3\x80\x81\x31\x39\x39\x30\x20\xe5\xb9\xb4\xe3\x81"
-b"\x94\xe3\x82\x8d\xe3\x81\x8b\xe3\x82\x89\xe9\x96\x8b\xe5\xa7\x8b"
-b"\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3"
-b"\x81\x99\xe3\x80\x82\x0a\xe9\x96\x8b\xe7\x99\xba\xe8\x80\x85\xe3"
-b"\x81\xae\x20\x47\x75\x69\x64\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73"
-b"\x73\x75\x6d\x20\xe3\x81\xaf\xe6\x95\x99\xe8\x82\xb2\xe7\x94\xa8"
-b"\xe3\x81\xae\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83\xa9\xe3"
-b"\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e\xe3\x80"
-b"\x8c\x41\x42\x43\xe3\x80\x8d\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
-b"\xe3\x81\xab\xe5\x8f\x82\xe5\x8a\xa0\xe3\x81\x97\xe3\x81\xa6\xe3"
-b"\x81\x84\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x81\x8c\xe3\x80"
-b"\x81\x41\x42\x43\x20\xe3\x81\xaf\xe5\xae\x9f\xe7\x94\xa8\xe4\xb8"
-b"\x8a\xe3\x81\xae\xe7\x9b\xae\xe7\x9a\x84\xe3\x81\xab\xe3\x81\xaf"
-b"\xe3\x81\x82\xe3\x81\xbe\xe3\x82\x8a\xe9\x81\xa9\xe3\x81\x97\xe3"
-b"\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x81"
-b"\xa7\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a\xe3\x81\x93\xe3\x81"
-b"\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x80\x81\x47\x75\x69\x64\x6f\x20"
-b"\xe3\x81\xaf\xe3\x82\x88\xe3\x82\x8a\xe5\xae\x9f\xe7\x94\xa8\xe7"
-b"\x9a\x84\xe3\x81\xaa\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83"
-b"\xa9\xe3\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e"
-b"\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba\xe3\x82\x92\xe9\x96\x8b\xe5"
-b"\xa7\x8b\xe3\x81\x97\xe3\x80\x81\xe8\x8b\xb1\xe5\x9b\xbd\x20\x42"
-b"\x42\x53\x20\xe6\x94\xbe\xe9\x80\x81\xe3\x81\xae\xe3\x82\xb3\xe3"
-b"\x83\xa1\xe3\x83\x87\xe3\x82\xa3\xe7\x95\xaa\xe7\xb5\x84\xe3\x80"
-b"\x8c\xe3\x83\xa2\xe3\x83\xb3\xe3\x83\x86\xe3\x82\xa3\x20\xe3\x83"
-b"\x91\xe3\x82\xa4\xe3\x82\xbd\xe3\x83\xb3\xe3\x80\x8d\xe3\x81\xae"
-b"\xe3\x83\x95\xe3\x82\xa1\xe3\x83\xb3\xe3\x81\xa7\xe3\x81\x82\xe3"
-b"\x82\x8b\x20\x47\x75\x69\x64\x6f\x20\xe3\x81\xaf\xe3\x81\x93\xe3"
-b"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe3\x82\x92\xe3\x80\x8c\x50\x79"
-b"\x74\x68\x6f\x6e\xe3\x80\x8d\xe3\x81\xa8\xe5\x90\x8d\xe3\x81\xa5"
-b"\xe3\x81\x91\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a"
-b"\xe3\x81\x93\xe3\x81\xae\xe3\x82\x88\xe3\x81\x86\xe3\x81\xaa\xe8"
-b"\x83\x8c\xe6\x99\xaf\xe3\x81\x8b\xe3\x82\x89\xe7\x94\x9f\xe3\x81"
-b"\xbe\xe3\x82\x8c\xe3\x81\x9f\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3"
-b"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa8\xad\xe8\xa8\x88\xe3\x81"
-b"\xaf\xe3\x80\x81\xe3\x80\x8c\xe3\x82\xb7\xe3\x83\xb3\xe3\x83\x97"
-b"\xe3\x83\xab\xe3\x80\x8d\xe3\x81\xa7\xe3\x80\x8c\xe7\xbf\x92\xe5"
-b"\xbe\x97\xe3\x81\x8c\xe5\xae\xb9\xe6\x98\x93\xe3\x80\x8d\xe3\x81"
-b"\xa8\xe3\x81\x84\xe3\x81\x86\xe7\x9b\xae\xe6\xa8\x99\xe3\x81\xab"
-b"\xe9\x87\x8d\xe7\x82\xb9\xe3\x81\x8c\xe7\xbd\xae\xe3\x81\x8b\xe3"
-b"\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80"
-b"\x82\x0a\xe5\xa4\x9a\xe3\x81\x8f\xe3\x81\xae\xe3\x82\xb9\xe3\x82"
-b"\xaf\xe3\x83\xaa\xe3\x83\x97\xe3\x83\x88\xe7\xb3\xbb\xe8\xa8\x80"
-b"\xe8\xaa\x9e\xe3\x81\xa7\xe3\x81\xaf\xe3\x83\xa6\xe3\x83\xbc\xe3"
-b"\x82\xb6\xe3\x81\xae\xe7\x9b\xae\xe5\x85\x88\xe3\x81\xae\xe5\x88"
-b"\xa9\xe4\xbe\xbf\xe6\x80\xa7\xe3\x82\x92\xe5\x84\xaa\xe5\x85\x88"
-b"\xe3\x81\x97\xe3\x81\xa6\xe8\x89\xb2\xe3\x80\x85\xe3\x81\xaa\xe6"
-b"\xa9\x9f\xe8\x83\xbd\xe3\x82\x92\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa6"
-b"\x81\xe7\xb4\xa0\xe3\x81\xa8\xe3\x81\x97\xe3\x81\xa6\xe5\x8f\x96"
-b"\xe3\x82\x8a\xe5\x85\xa5\xe3\x82\x8c\xe3\x82\x8b\xe5\xa0\xb4\xe5"
-b"\x90\x88\xe3\x81\x8c\xe5\xa4\x9a\xe3\x81\x84\xe3\x81\xae\xe3\x81"
-b"\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xe3\x81\xa7\xe3\x81\xaf\xe3\x81\x9d\xe3\x81\x86\xe3\x81\x84"
-b"\xe3\x81\xa3\xe3\x81\x9f\xe5\xb0\x8f\xe7\xb4\xb0\xe5\xb7\xa5\xe3"
-b"\x81\x8c\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x95\xe3\x82\x8c\xe3\x82"
-b"\x8b\xe3\x81\x93\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\x82\xe3\x81\xbe"
-b"\xe3\x82\x8a\xe3\x81\x82\xe3\x82\x8a\xe3\x81\xbe\xe3\x81\x9b\xe3"
-b"\x82\x93\xe3\x80\x82\x0a\xe8\xa8\x80\xe8\xaa\x9e\xe8\x87\xaa\xe4"
-b"\xbd\x93\xe3\x81\xae\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x9c"
-b"\x80\xe5\xb0\x8f\xe9\x99\x90\xe3\x81\xab\xe6\x8a\xbc\xe3\x81\x95"
-b"\xe3\x81\x88\xe3\x80\x81\xe5\xbf\x85\xe8\xa6\x81\xe3\x81\xaa\xe6"
-b"\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x8b\xa1\xe5\xbc\xb5\xe3\x83"
-b"\xa2\xe3\x82\xb8\xe3\x83\xa5\xe3\x83\xbc\xe3\x83\xab\xe3\x81\xa8"
-b"\xe3\x81\x97\xe3\x81\xa6\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x99\xe3"
-b"\x82\x8b\xe3\x80\x81\xe3\x81\xa8\xe3\x81\x84\xe3\x81\x86\xe3\x81"
-b"\xae\xe3\x81\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe3"
-b"\x83\x9d\xe3\x83\xaa\xe3\x82\xb7\xe3\x83\xbc\xe3\x81\xa7\xe3\x81"
-b"\x99\xe3\x80\x82\x0a\x0a"),
-'euc_kr': (
-b"\xa1\xdd\x20\xc6\xc4\xc0\xcc\xbd\xe3\x28\x50\x79\x74\x68\x6f\x6e"
-b"\x29\xc0\xba\x20\xb9\xe8\xbf\xec\xb1\xe2\x20\xbd\xb1\xb0\xed\x2c"
-b"\x20\xb0\xad\xb7\xc2\xc7\xd1\x20\xc7\xc1\xb7\xce\xb1\xd7\xb7\xa1"
-b"\xb9\xd6\x20\xbe\xf0\xbe\xee\xc0\xd4\xb4\xcf\xb4\xd9\x2e\x20\xc6"
-b"\xc4\xc0\xcc\xbd\xe3\xc0\xba\x0a\xc8\xbf\xc0\xb2\xc0\xfb\xc0\xce"
-b"\x20\xb0\xed\xbc\xf6\xc1\xd8\x20\xb5\xa5\xc0\xcc\xc5\xcd\x20\xb1"
-b"\xb8\xc1\xb6\xbf\xcd\x20\xb0\xa3\xb4\xdc\xc7\xcf\xc1\xf6\xb8\xb8"
-b"\x20\xc8\xbf\xc0\xb2\xc0\xfb\xc0\xce\x20\xb0\xb4\xc3\xbc\xc1\xf6"
-b"\xc7\xe2\xc7\xc1\xb7\xce\xb1\xd7\xb7\xa1\xb9\xd6\xc0\xbb\x0a\xc1"
-b"\xf6\xbf\xf8\xc7\xd5\xb4\xcf\xb4\xd9\x2e\x20\xc6\xc4\xc0\xcc\xbd"
-b"\xe3\xc0\xc7\x20\xbf\xec\xbe\xc6\x28\xe9\xd0\xe4\xba\x29\xc7\xd1"
-b"\x20\xb9\xae\xb9\xfd\xb0\xfa\x20\xb5\xbf\xc0\xfb\x20\xc5\xb8\xc0"
-b"\xcc\xc7\xce\x2c\x20\xb1\xd7\xb8\xae\xb0\xed\x20\xc0\xce\xc5\xcd"
-b"\xc7\xc1\xb8\xae\xc6\xc3\x0a\xc8\xaf\xb0\xe6\xc0\xba\x20\xc6\xc4"
-b"\xc0\xcc\xbd\xe3\xc0\xbb\x20\xbd\xba\xc5\xa9\xb8\xb3\xc6\xc3\xb0"
-b"\xfa\x20\xbf\xa9\xb7\xaf\x20\xba\xd0\xbe\xdf\xbf\xa1\xbc\xad\xbf"
-b"\xcd\x20\xb4\xeb\xba\xce\xba\xd0\xc0\xc7\x20\xc7\xc3\xb7\xa7\xc6"
-b"\xfb\xbf\xa1\xbc\xad\xc0\xc7\x20\xba\xfc\xb8\xa5\x0a\xbe\xd6\xc7"
-b"\xc3\xb8\xae\xc4\xc9\xc0\xcc\xbc\xc7\x20\xb0\xb3\xb9\xdf\xc0\xbb"
-b"\x20\xc7\xd2\x20\xbc\xf6\x20\xc0\xd6\xb4\xc2\x20\xc0\xcc\xbb\xf3"
-b"\xc0\xfb\xc0\xce\x20\xbe\xf0\xbe\xee\xb7\xce\x20\xb8\xb8\xb5\xe9"
-b"\xbe\xee\xc1\xdd\xb4\xcf\xb4\xd9\x2e\x0a\x0a\xa1\xd9\xc3\xb9\xb0"
-b"\xa1\xb3\xa1\x3a\x20\xb3\xaf\xbe\xc6\xb6\xf3\x20\xa4\xd4\xa4\xb6"
-b"\xa4\xd0\xa4\xd4\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4\xbe\xb1\x7e\x20"
-b"\xa4\xd4\xa4\xa4\xa4\xd2\xa4\xb7\xc5\xad\x21\x20\xa4\xd4\xa4\xa8"
-b"\xa4\xd1\xa4\xb7\xb1\xdd\xbe\xf8\xc0\xcc\x20\xc0\xfc\xa4\xd4\xa4"
-b"\xbe\xa4\xc8\xa4\xb2\xb4\xcf\xb4\xd9\x2e\x20\xa4\xd4\xa4\xb2\xa4"
-b"\xce\xa4\xaa\x2e\x20\xb1\xd7\xb7\xb1\xb0\xc5\x20\xa4\xd4\xa4\xb7"
-b"\xa4\xd1\xa4\xb4\xb4\xd9\x2e\x0a",
-b"\xe2\x97\x8e\x20\xed\x8c\x8c\xec\x9d\xb4\xec\x8d\xac\x28\x50\x79"
-b"\x74\x68\x6f\x6e\x29\xec\x9d\x80\x20\xeb\xb0\xb0\xec\x9a\xb0\xea"
-b"\xb8\xb0\x20\xec\x89\xbd\xea\xb3\xa0\x2c\x20\xea\xb0\x95\xeb\xa0"
-b"\xa5\xed\x95\x9c\x20\xed\x94\x84\xeb\xa1\x9c\xea\xb7\xb8\xeb\x9e"
-b"\x98\xeb\xb0\x8d\x20\xec\x96\xb8\xec\x96\xb4\xec\x9e\x85\xeb\x8b"
-b"\x88\xeb\x8b\xa4\x2e\x20\xed\x8c\x8c\xec\x9d\xb4\xec\x8d\xac\xec"
-b"\x9d\x80\x0a\xed\x9a\xa8\xec\x9c\xa8\xec\xa0\x81\xec\x9d\xb8\x20"
-b"\xea\xb3\xa0\xec\x88\x98\xec\xa4\x80\x20\xeb\x8d\xb0\xec\x9d\xb4"
-b"\xed\x84\xb0\x20\xea\xb5\xac\xec\xa1\xb0\xec\x99\x80\x20\xea\xb0"
-b"\x84\xeb\x8b\xa8\xed\x95\x98\xec\xa7\x80\xeb\xa7\x8c\x20\xed\x9a"
-b"\xa8\xec\x9c\xa8\xec\xa0\x81\xec\x9d\xb8\x20\xea\xb0\x9d\xec\xb2"
-b"\xb4\xec\xa7\x80\xed\x96\xa5\xed\x94\x84\xeb\xa1\x9c\xea\xb7\xb8"
-b"\xeb\x9e\x98\xeb\xb0\x8d\xec\x9d\x84\x0a\xec\xa7\x80\xec\x9b\x90"
-b"\xed\x95\xa9\xeb\x8b\x88\xeb\x8b\xa4\x2e\x20\xed\x8c\x8c\xec\x9d"
-b"\xb4\xec\x8d\xac\xec\x9d\x98\x20\xec\x9a\xb0\xec\x95\x84\x28\xe5"
-b"\x84\xaa\xe9\x9b\x85\x29\xed\x95\x9c\x20\xeb\xac\xb8\xeb\xb2\x95"
-b"\xea\xb3\xbc\x20\xeb\x8f\x99\xec\xa0\x81\x20\xed\x83\x80\xec\x9d"
-b"\xb4\xed\x95\x91\x2c\x20\xea\xb7\xb8\xeb\xa6\xac\xea\xb3\xa0\x20"
-b"\xec\x9d\xb8\xed\x84\xb0\xed\x94\x84\xeb\xa6\xac\xed\x8c\x85\x0a"
-b"\xed\x99\x98\xea\xb2\xbd\xec\x9d\x80\x20\xed\x8c\x8c\xec\x9d\xb4"
-b"\xec\x8d\xac\xec\x9d\x84\x20\xec\x8a\xa4\xed\x81\xac\xeb\xa6\xbd"
-b"\xed\x8c\x85\xea\xb3\xbc\x20\xec\x97\xac\xeb\x9f\xac\x20\xeb\xb6"
-b"\x84\xec\x95\xbc\xec\x97\x90\xec\x84\x9c\xec\x99\x80\x20\xeb\x8c"
-b"\x80\xeb\xb6\x80\xeb\xb6\x84\xec\x9d\x98\x20\xed\x94\x8c\xeb\x9e"
-b"\xab\xed\x8f\xbc\xec\x97\x90\xec\x84\x9c\xec\x9d\x98\x20\xeb\xb9"
-b"\xa0\xeb\xa5\xb8\x0a\xec\x95\xa0\xed\x94\x8c\xeb\xa6\xac\xec\xbc"
-b"\x80\xec\x9d\xb4\xec\x85\x98\x20\xea\xb0\x9c\xeb\xb0\x9c\xec\x9d"
-b"\x84\x20\xed\x95\xa0\x20\xec\x88\x98\x20\xec\x9e\x88\xeb\x8a\x94"
-b"\x20\xec\x9d\xb4\xec\x83\x81\xec\xa0\x81\xec\x9d\xb8\x20\xec\x96"
-b"\xb8\xec\x96\xb4\xeb\xa1\x9c\x20\xeb\xa7\x8c\xeb\x93\xa4\xec\x96"
-b"\xb4\xec\xa4\x8d\xeb\x8b\x88\xeb\x8b\xa4\x2e\x0a\x0a\xe2\x98\x86"
-b"\xec\xb2\xab\xea\xb0\x80\xeb\x81\x9d\x3a\x20\xeb\x82\xa0\xec\x95"
-b"\x84\xeb\x9d\xbc\x20\xec\x93\x94\xec\x93\x94\xec\x93\xa9\x7e\x20"
-b"\xeb\x8b\x81\xed\x81\xbc\x21\x20\xeb\x9c\xbd\xea\xb8\x88\xec\x97"
-b"\x86\xec\x9d\xb4\x20\xec\xa0\x84\xed\x99\xa5\xeb\x8b\x88\xeb\x8b"
-b"\xa4\x2e\x20\xeb\xb7\x81\x2e\x20\xea\xb7\xb8\xeb\x9f\xb0\xea\xb1"
-b"\xb0\x20\xec\x9d\x8e\xeb\x8b\xa4\x2e\x0a"),
-'gb18030': (
-b"\x50\x79\x74\x68\x6f\x6e\xa3\xa8\xc5\xc9\xc9\xad\xa3\xa9\xd3\xef"
-b"\xd1\xd4\xca\xc7\xd2\xbb\xd6\xd6\xb9\xa6\xc4\xdc\xc7\xbf\xb4\xf3"
-b"\xb6\xf8\xcd\xea\xc9\xc6\xb5\xc4\xcd\xa8\xd3\xc3\xd0\xcd\xbc\xc6"
-b"\xcb\xe3\xbb\xfa\xb3\xcc\xd0\xf2\xc9\xe8\xbc\xc6\xd3\xef\xd1\xd4"
-b"\xa3\xac\x0a\xd2\xd1\xbe\xad\xbe\xdf\xd3\xd0\xca\xae\xb6\xe0\xc4"
-b"\xea\xb5\xc4\xb7\xa2\xd5\xb9\xc0\xfa\xca\xb7\xa3\xac\xb3\xc9\xca"
-b"\xec\xc7\xd2\xce\xc8\xb6\xa8\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1"
-b"\xd4\xbe\xdf\xd3\xd0\xb7\xc7\xb3\xa3\xbc\xf2\xbd\xdd\xb6\xf8\xc7"
-b"\xe5\xce\xfa\x0a\xb5\xc4\xd3\xef\xb7\xa8\xcc\xd8\xb5\xe3\xa3\xac"
-b"\xca\xca\xba\xcf\xcd\xea\xb3\xc9\xb8\xf7\xd6\xd6\xb8\xdf\xb2\xe3"
-b"\xc8\xce\xce\xf1\xa3\xac\xbc\xb8\xba\xf5\xbf\xc9\xd2\xd4\xd4\xda"
-b"\xcb\xf9\xd3\xd0\xb5\xc4\xb2\xd9\xd7\xf7\xcf\xb5\xcd\xb3\xd6\xd0"
-b"\x0a\xd4\xcb\xd0\xd0\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1\xd4\xbc"
-b"\xf2\xb5\xa5\xb6\xf8\xc7\xbf\xb4\xf3\xa3\xac\xca\xca\xba\xcf\xb8"
-b"\xf7\xd6\xd6\xc8\xcb\xca\xbf\xd1\xa7\xcf\xb0\xca\xb9\xd3\xc3\xa1"
-b"\xa3\xc4\xbf\xc7\xb0\xa3\xac\xbb\xf9\xd3\xda\xd5\xe2\x0a\xd6\xd6"
-b"\xd3\xef\xd1\xd4\xb5\xc4\xcf\xe0\xb9\xd8\xbc\xbc\xca\xf5\xd5\xfd"
-b"\xd4\xda\xb7\xc9\xcb\xd9\xb5\xc4\xb7\xa2\xd5\xb9\xa3\xac\xd3\xc3"
-b"\xbb\xa7\xca\xfd\xc1\xbf\xbc\xb1\xbe\xe7\xc0\xa9\xb4\xf3\xa3\xac"
-b"\xcf\xe0\xb9\xd8\xb5\xc4\xd7\xca\xd4\xb4\xb7\xc7\xb3\xa3\xb6\xe0"
-b"\xa1\xa3\x0a\xc8\xe7\xba\xce\xd4\xda\x20\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xd6\xd0\xca\xb9\xd3\xc3\xbc\xc8\xd3\xd0\xb5\xc4\x20\x43\x20"
-b"\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xa1\xa1\xd4\xda\xd9\x59\xd3"
-b"\x8d\xbf\xc6\xbc\xbc\xbf\xec\xcb\xd9\xb0\x6c\xd5\xb9\xb5\xc4\xbd"
-b"\xf1\xcc\xec\x2c\x20\xe9\x5f\xb0\x6c\xbc\xb0\x9c\x79\xd4\x87\xdc"
-b"\x9b\xf3\x77\xb5\xc4\xcb\xd9\xb6\xc8\xca\xc7\xb2\xbb\xc8\xdd\xba"
-b"\xf6\xd2\x95\xb5\xc4\x0a\xd5\x6e\xee\x7d\x2e\x20\x9e\xe9\xbc\xd3"
-b"\xbf\xec\xe9\x5f\xb0\x6c\xbc\xb0\x9c\x79\xd4\x87\xb5\xc4\xcb\xd9"
-b"\xb6\xc8\x2c\x20\xce\xd2\x82\x83\xb1\xe3\xb3\xa3\xcf\xa3\xcd\xfb"
-b"\xc4\xdc\xc0\xfb\xd3\xc3\xd2\xbb\xd0\xa9\xd2\xd1\xe9\x5f\xb0\x6c"
-b"\xba\xc3\xb5\xc4\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\x81\x4b"
-b"\xd3\xd0\xd2\xbb\x82\x80\x20\x66\x61\x73\x74\x20\x70\x72\x6f\x74"
-b"\x6f\x74\x79\x70\x69\x6e\x67\x20\xb5\xc4\x20\x70\x72\x6f\x67\x72"
-b"\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x20"
-b"\xbf\xc9\x0a\xb9\xa9\xca\xb9\xd3\xc3\x2e\x20\xc4\xbf\xc7\xb0\xd3"
-b"\xd0\xd4\x53\xd4\x53\xb6\xe0\xb6\xe0\xb5\xc4\x20\x6c\x69\x62\x72"
-b"\x61\x72\x79\x20\xca\xc7\xd2\xd4\x20\x43\x20\x8c\x91\xb3\xc9\x2c"
-b"\x20\xb6\xf8\x20\x50\x79\x74\x68\x6f\x6e\x20\xca\xc7\xd2\xbb\x82"
-b"\x80\x0a\x66\x61\x73\x74\x20\x70\x72\x6f\x74\x6f\x74\x79\x70\x69"
-b"\x6e\x67\x20\xb5\xc4\x20\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e"
-b"\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x2e\x20\xb9\xca\xce\xd2"
-b"\x82\x83\xcf\xa3\xcd\xfb\xc4\xdc\x8c\xa2\xbc\xc8\xd3\xd0\xb5\xc4"
-b"\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xc4\xc3\xb5\xbd\x20"
-b"\x50\x79\x74\x68\x6f\x6e\x20\xb5\xc4\xad\x68\xbe\xb3\xd6\xd0\x9c"
-b"\x79\xd4\x87\xbc\xb0\xd5\xfb\xba\xcf\x2e\x20\xc6\xe4\xd6\xd0\xd7"
-b"\xee\xd6\xf7\xd2\xaa\xd2\xb2\xca\xc7\xce\xd2\x82\x83\xcb\xf9\x0a"
-b"\xd2\xaa\xd3\x91\xd5\x93\xb5\xc4\x86\x96\xee\x7d\xbe\xcd\xca\xc7"
-b"\x3a\x0a\x83\x35\xc7\x31\x83\x33\x9a\x33\x83\x32\xb1\x31\x83\x33"
-b"\x95\x31\x20\x82\x37\xd1\x36\x83\x30\x8c\x34\x83\x36\x84\x33\x20"
-b"\x82\x38\x89\x35\x82\x38\xfb\x36\x83\x33\x95\x35\x20\x83\x33\xd5"
-b"\x31\x82\x39\x81\x35\x20\x83\x30\xfd\x39\x83\x33\x86\x30\x20\x83"
-b"\x34\xdc\x33\x83\x35\xf6\x37\x83\x35\x97\x35\x20\x83\x35\xf9\x35"
-b"\x83\x30\x91\x39\x82\x38\x83\x39\x82\x39\xfc\x33\x83\x30\xf0\x34"
-b"\x20\x83\x32\xeb\x39\x83\x32\xeb\x35\x82\x39\x83\x39\x2e\x0a\x0a",
-b"\x50\x79\x74\x68\x6f\x6e\xef\xbc\x88\xe6\xb4\xbe\xe6\xa3\xae\xef"
-b"\xbc\x89\xe8\xaf\xad\xe8\xa8\x80\xe6\x98\xaf\xe4\xb8\x80\xe7\xa7"
-b"\x8d\xe5\x8a\x9f\xe8\x83\xbd\xe5\xbc\xba\xe5\xa4\xa7\xe8\x80\x8c"
-b"\xe5\xae\x8c\xe5\x96\x84\xe7\x9a\x84\xe9\x80\x9a\xe7\x94\xa8\xe5"
-b"\x9e\x8b\xe8\xae\xa1\xe7\xae\x97\xe6\x9c\xba\xe7\xa8\x8b\xe5\xba"
-b"\x8f\xe8\xae\xbe\xe8\xae\xa1\xe8\xaf\xad\xe8\xa8\x80\xef\xbc\x8c"
-b"\x0a\xe5\xb7\xb2\xe7\xbb\x8f\xe5\x85\xb7\xe6\x9c\x89\xe5\x8d\x81"
-b"\xe5\xa4\x9a\xe5\xb9\xb4\xe7\x9a\x84\xe5\x8f\x91\xe5\xb1\x95\xe5"
-b"\x8e\x86\xe5\x8f\xb2\xef\xbc\x8c\xe6\x88\x90\xe7\x86\x9f\xe4\xb8"
-b"\x94\xe7\xa8\xb3\xe5\xae\x9a\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d"
-b"\xe8\xaf\xad\xe8\xa8\x80\xe5\x85\xb7\xe6\x9c\x89\xe9\x9d\x9e\xe5"
-b"\xb8\xb8\xe7\xae\x80\xe6\x8d\xb7\xe8\x80\x8c\xe6\xb8\x85\xe6\x99"
-b"\xb0\x0a\xe7\x9a\x84\xe8\xaf\xad\xe6\xb3\x95\xe7\x89\xb9\xe7\x82"
-b"\xb9\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\xae\x8c\xe6\x88\x90"
-b"\xe5\x90\x84\xe7\xa7\x8d\xe9\xab\x98\xe5\xb1\x82\xe4\xbb\xbb\xe5"
-b"\x8a\xa1\xef\xbc\x8c\xe5\x87\xa0\xe4\xb9\x8e\xe5\x8f\xaf\xe4\xbb"
-b"\xa5\xe5\x9c\xa8\xe6\x89\x80\xe6\x9c\x89\xe7\x9a\x84\xe6\x93\x8d"
-b"\xe4\xbd\x9c\xe7\xb3\xbb\xe7\xbb\x9f\xe4\xb8\xad\x0a\xe8\xbf\x90"
-b"\xe8\xa1\x8c\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d\xe8\xaf\xad\xe8"
-b"\xa8\x80\xe7\xae\x80\xe5\x8d\x95\xe8\x80\x8c\xe5\xbc\xba\xe5\xa4"
-b"\xa7\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\x90\x84\xe7\xa7\x8d"
-b"\xe4\xba\xba\xe5\xa3\xab\xe5\xad\xa6\xe4\xb9\xa0\xe4\xbd\xbf\xe7"
-b"\x94\xa8\xe3\x80\x82\xe7\x9b\xae\xe5\x89\x8d\xef\xbc\x8c\xe5\x9f"
-b"\xba\xe4\xba\x8e\xe8\xbf\x99\x0a\xe7\xa7\x8d\xe8\xaf\xad\xe8\xa8"
-b"\x80\xe7\x9a\x84\xe7\x9b\xb8\xe5\x85\xb3\xe6\x8a\x80\xe6\x9c\xaf"
-b"\xe6\xad\xa3\xe5\x9c\xa8\xe9\xa3\x9e\xe9\x80\x9f\xe7\x9a\x84\xe5"
-b"\x8f\x91\xe5\xb1\x95\xef\xbc\x8c\xe7\x94\xa8\xe6\x88\xb7\xe6\x95"
-b"\xb0\xe9\x87\x8f\xe6\x80\xa5\xe5\x89\xa7\xe6\x89\xa9\xe5\xa4\xa7"
-b"\xef\xbc\x8c\xe7\x9b\xb8\xe5\x85\xb3\xe7\x9a\x84\xe8\xb5\x84\xe6"
-b"\xba\x90\xe9\x9d\x9e\xe5\xb8\xb8\xe5\xa4\x9a\xe3\x80\x82\x0a\xe5"
-b"\xa6\x82\xe4\xbd\x95\xe5\x9c\xa8\x20\x50\x79\x74\x68\x6f\x6e\x20"
-b"\xe4\xb8\xad\xe4\xbd\xbf\xe7\x94\xa8\xe6\x97\xa2\xe6\x9c\x89\xe7"
-b"\x9a\x84\x20\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xe3\x80"
-b"\x80\xe5\x9c\xa8\xe8\xb3\x87\xe8\xa8\x8a\xe7\xa7\x91\xe6\x8a\x80"
-b"\xe5\xbf\xab\xe9\x80\x9f\xe7\x99\xbc\xe5\xb1\x95\xe7\x9a\x84\xe4"
-b"\xbb\x8a\xe5\xa4\xa9\x2c\x20\xe9\x96\x8b\xe7\x99\xbc\xe5\x8f\x8a"
-b"\xe6\xb8\xac\xe8\xa9\xa6\xe8\xbb\x9f\xe9\xab\x94\xe7\x9a\x84\xe9"
-b"\x80\x9f\xe5\xba\xa6\xe6\x98\xaf\xe4\xb8\x8d\xe5\xae\xb9\xe5\xbf"
-b"\xbd\xe8\xa6\x96\xe7\x9a\x84\x0a\xe8\xaa\xb2\xe9\xa1\x8c\x2e\x20"
-b"\xe7\x82\xba\xe5\x8a\xa0\xe5\xbf\xab\xe9\x96\x8b\xe7\x99\xbc\xe5"
-b"\x8f\x8a\xe6\xb8\xac\xe8\xa9\xa6\xe7\x9a\x84\xe9\x80\x9f\xe5\xba"
-b"\xa6\x2c\x20\xe6\x88\x91\xe5\x80\x91\xe4\xbe\xbf\xe5\xb8\xb8\xe5"
-b"\xb8\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\x88\xa9\xe7\x94\xa8\xe4\xb8"
-b"\x80\xe4\xba\x9b\xe5\xb7\xb2\xe9\x96\x8b\xe7\x99\xbc\xe5\xa5\xbd"
-b"\xe7\x9a\x84\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\xe4\xb8\xa6"
-b"\xe6\x9c\x89\xe4\xb8\x80\xe5\x80\x8b\x20\x66\x61\x73\x74\x20\x70"
-b"\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20\x70"
-b"\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75"
-b"\x61\x67\x65\x20\xe5\x8f\xaf\x0a\xe4\xbe\x9b\xe4\xbd\xbf\xe7\x94"
-b"\xa8\x2e\x20\xe7\x9b\xae\xe5\x89\x8d\xe6\x9c\x89\xe8\xa8\xb1\xe8"
-b"\xa8\xb1\xe5\xa4\x9a\xe5\xa4\x9a\xe7\x9a\x84\x20\x6c\x69\x62\x72"
-b"\x61\x72\x79\x20\xe6\x98\xaf\xe4\xbb\xa5\x20\x43\x20\xe5\xaf\xab"
-b"\xe6\x88\x90\x2c\x20\xe8\x80\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20"
-b"\xe6\x98\xaf\xe4\xb8\x80\xe5\x80\x8b\x0a\x66\x61\x73\x74\x20\x70"
-b"\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20\x70"
-b"\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75"
-b"\x61\x67\x65\x2e\x20\xe6\x95\x85\xe6\x88\x91\xe5\x80\x91\xe5\xb8"
-b"\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\xb0\x87\xe6\x97\xa2\xe6\x9c\x89"
-b"\xe7\x9a\x84\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xe6\x8b"
-b"\xbf\xe5\x88\xb0\x20\x50\x79\x74\x68\x6f\x6e\x20\xe7\x9a\x84\xe7"
-b"\x92\xb0\xe5\xa2\x83\xe4\xb8\xad\xe6\xb8\xac\xe8\xa9\xa6\xe5\x8f"
-b"\x8a\xe6\x95\xb4\xe5\x90\x88\x2e\x20\xe5\x85\xb6\xe4\xb8\xad\xe6"
-b"\x9c\x80\xe4\xb8\xbb\xe8\xa6\x81\xe4\xb9\x9f\xe6\x98\xaf\xe6\x88"
-b"\x91\xe5\x80\x91\xe6\x89\x80\x0a\xe8\xa6\x81\xe8\xa8\x8e\xe8\xab"
-b"\x96\xe7\x9a\x84\xe5\x95\x8f\xe9\xa1\x8c\xe5\xb0\xb1\xe6\x98\xaf"
-b"\x3a\x0a\xed\x8c\x8c\xec\x9d\xb4\xec\x8d\xac\xec\x9d\x80\x20\xea"
-b"\xb0\x95\xeb\xa0\xa5\xed\x95\x9c\x20\xea\xb8\xb0\xeb\x8a\xa5\xec"
-b"\x9d\x84\x20\xec\xa7\x80\xeb\x8b\x8c\x20\xeb\xb2\x94\xec\x9a\xa9"
-b"\x20\xec\xbb\xb4\xed\x93\xa8\xed\x84\xb0\x20\xed\x94\x84\xeb\xa1"
-b"\x9c\xea\xb7\xb8\xeb\x9e\x98\xeb\xb0\x8d\x20\xec\x96\xb8\xec\x96"
-b"\xb4\xeb\x8b\xa4\x2e\x0a\x0a"),
-'gb2312': (
-b"\x50\x79\x74\x68\x6f\x6e\xa3\xa8\xc5\xc9\xc9\xad\xa3\xa9\xd3\xef"
-b"\xd1\xd4\xca\xc7\xd2\xbb\xd6\xd6\xb9\xa6\xc4\xdc\xc7\xbf\xb4\xf3"
-b"\xb6\xf8\xcd\xea\xc9\xc6\xb5\xc4\xcd\xa8\xd3\xc3\xd0\xcd\xbc\xc6"
-b"\xcb\xe3\xbb\xfa\xb3\xcc\xd0\xf2\xc9\xe8\xbc\xc6\xd3\xef\xd1\xd4"
-b"\xa3\xac\x0a\xd2\xd1\xbe\xad\xbe\xdf\xd3\xd0\xca\xae\xb6\xe0\xc4"
-b"\xea\xb5\xc4\xb7\xa2\xd5\xb9\xc0\xfa\xca\xb7\xa3\xac\xb3\xc9\xca"
-b"\xec\xc7\xd2\xce\xc8\xb6\xa8\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1"
-b"\xd4\xbe\xdf\xd3\xd0\xb7\xc7\xb3\xa3\xbc\xf2\xbd\xdd\xb6\xf8\xc7"
-b"\xe5\xce\xfa\x0a\xb5\xc4\xd3\xef\xb7\xa8\xcc\xd8\xb5\xe3\xa3\xac"
-b"\xca\xca\xba\xcf\xcd\xea\xb3\xc9\xb8\xf7\xd6\xd6\xb8\xdf\xb2\xe3"
-b"\xc8\xce\xce\xf1\xa3\xac\xbc\xb8\xba\xf5\xbf\xc9\xd2\xd4\xd4\xda"
-b"\xcb\xf9\xd3\xd0\xb5\xc4\xb2\xd9\xd7\xf7\xcf\xb5\xcd\xb3\xd6\xd0"
-b"\x0a\xd4\xcb\xd0\xd0\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1\xd4\xbc"
-b"\xf2\xb5\xa5\xb6\xf8\xc7\xbf\xb4\xf3\xa3\xac\xca\xca\xba\xcf\xb8"
-b"\xf7\xd6\xd6\xc8\xcb\xca\xbf\xd1\xa7\xcf\xb0\xca\xb9\xd3\xc3\xa1"
-b"\xa3\xc4\xbf\xc7\xb0\xa3\xac\xbb\xf9\xd3\xda\xd5\xe2\x0a\xd6\xd6"
-b"\xd3\xef\xd1\xd4\xb5\xc4\xcf\xe0\xb9\xd8\xbc\xbc\xca\xf5\xd5\xfd"
-b"\xd4\xda\xb7\xc9\xcb\xd9\xb5\xc4\xb7\xa2\xd5\xb9\xa3\xac\xd3\xc3"
-b"\xbb\xa7\xca\xfd\xc1\xbf\xbc\xb1\xbe\xe7\xc0\xa9\xb4\xf3\xa3\xac"
-b"\xcf\xe0\xb9\xd8\xb5\xc4\xd7\xca\xd4\xb4\xb7\xc7\xb3\xa3\xb6\xe0"
-b"\xa1\xa3\x0a\x0a",
-b"\x50\x79\x74\x68\x6f\x6e\xef\xbc\x88\xe6\xb4\xbe\xe6\xa3\xae\xef"
-b"\xbc\x89\xe8\xaf\xad\xe8\xa8\x80\xe6\x98\xaf\xe4\xb8\x80\xe7\xa7"
-b"\x8d\xe5\x8a\x9f\xe8\x83\xbd\xe5\xbc\xba\xe5\xa4\xa7\xe8\x80\x8c"
-b"\xe5\xae\x8c\xe5\x96\x84\xe7\x9a\x84\xe9\x80\x9a\xe7\x94\xa8\xe5"
-b"\x9e\x8b\xe8\xae\xa1\xe7\xae\x97\xe6\x9c\xba\xe7\xa8\x8b\xe5\xba"
-b"\x8f\xe8\xae\xbe\xe8\xae\xa1\xe8\xaf\xad\xe8\xa8\x80\xef\xbc\x8c"
-b"\x0a\xe5\xb7\xb2\xe7\xbb\x8f\xe5\x85\xb7\xe6\x9c\x89\xe5\x8d\x81"
-b"\xe5\xa4\x9a\xe5\xb9\xb4\xe7\x9a\x84\xe5\x8f\x91\xe5\xb1\x95\xe5"
-b"\x8e\x86\xe5\x8f\xb2\xef\xbc\x8c\xe6\x88\x90\xe7\x86\x9f\xe4\xb8"
-b"\x94\xe7\xa8\xb3\xe5\xae\x9a\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d"
-b"\xe8\xaf\xad\xe8\xa8\x80\xe5\x85\xb7\xe6\x9c\x89\xe9\x9d\x9e\xe5"
-b"\xb8\xb8\xe7\xae\x80\xe6\x8d\xb7\xe8\x80\x8c\xe6\xb8\x85\xe6\x99"
-b"\xb0\x0a\xe7\x9a\x84\xe8\xaf\xad\xe6\xb3\x95\xe7\x89\xb9\xe7\x82"
-b"\xb9\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\xae\x8c\xe6\x88\x90"
-b"\xe5\x90\x84\xe7\xa7\x8d\xe9\xab\x98\xe5\xb1\x82\xe4\xbb\xbb\xe5"
-b"\x8a\xa1\xef\xbc\x8c\xe5\x87\xa0\xe4\xb9\x8e\xe5\x8f\xaf\xe4\xbb"
-b"\xa5\xe5\x9c\xa8\xe6\x89\x80\xe6\x9c\x89\xe7\x9a\x84\xe6\x93\x8d"
-b"\xe4\xbd\x9c\xe7\xb3\xbb\xe7\xbb\x9f\xe4\xb8\xad\x0a\xe8\xbf\x90"
-b"\xe8\xa1\x8c\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d\xe8\xaf\xad\xe8"
-b"\xa8\x80\xe7\xae\x80\xe5\x8d\x95\xe8\x80\x8c\xe5\xbc\xba\xe5\xa4"
-b"\xa7\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\x90\x84\xe7\xa7\x8d"
-b"\xe4\xba\xba\xe5\xa3\xab\xe5\xad\xa6\xe4\xb9\xa0\xe4\xbd\xbf\xe7"
-b"\x94\xa8\xe3\x80\x82\xe7\x9b\xae\xe5\x89\x8d\xef\xbc\x8c\xe5\x9f"
-b"\xba\xe4\xba\x8e\xe8\xbf\x99\x0a\xe7\xa7\x8d\xe8\xaf\xad\xe8\xa8"
-b"\x80\xe7\x9a\x84\xe7\x9b\xb8\xe5\x85\xb3\xe6\x8a\x80\xe6\x9c\xaf"
-b"\xe6\xad\xa3\xe5\x9c\xa8\xe9\xa3\x9e\xe9\x80\x9f\xe7\x9a\x84\xe5"
-b"\x8f\x91\xe5\xb1\x95\xef\xbc\x8c\xe7\x94\xa8\xe6\x88\xb7\xe6\x95"
-b"\xb0\xe9\x87\x8f\xe6\x80\xa5\xe5\x89\xa7\xe6\x89\xa9\xe5\xa4\xa7"
-b"\xef\xbc\x8c\xe7\x9b\xb8\xe5\x85\xb3\xe7\x9a\x84\xe8\xb5\x84\xe6"
-b"\xba\x90\xe9\x9d\x9e\xe5\xb8\xb8\xe5\xa4\x9a\xe3\x80\x82\x0a\x0a"),
-'gbk': (
-b"\x50\x79\x74\x68\x6f\x6e\xa3\xa8\xc5\xc9\xc9\xad\xa3\xa9\xd3\xef"
-b"\xd1\xd4\xca\xc7\xd2\xbb\xd6\xd6\xb9\xa6\xc4\xdc\xc7\xbf\xb4\xf3"
-b"\xb6\xf8\xcd\xea\xc9\xc6\xb5\xc4\xcd\xa8\xd3\xc3\xd0\xcd\xbc\xc6"
-b"\xcb\xe3\xbb\xfa\xb3\xcc\xd0\xf2\xc9\xe8\xbc\xc6\xd3\xef\xd1\xd4"
-b"\xa3\xac\x0a\xd2\xd1\xbe\xad\xbe\xdf\xd3\xd0\xca\xae\xb6\xe0\xc4"
-b"\xea\xb5\xc4\xb7\xa2\xd5\xb9\xc0\xfa\xca\xb7\xa3\xac\xb3\xc9\xca"
-b"\xec\xc7\xd2\xce\xc8\xb6\xa8\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1"
-b"\xd4\xbe\xdf\xd3\xd0\xb7\xc7\xb3\xa3\xbc\xf2\xbd\xdd\xb6\xf8\xc7"
-b"\xe5\xce\xfa\x0a\xb5\xc4\xd3\xef\xb7\xa8\xcc\xd8\xb5\xe3\xa3\xac"
-b"\xca\xca\xba\xcf\xcd\xea\xb3\xc9\xb8\xf7\xd6\xd6\xb8\xdf\xb2\xe3"
-b"\xc8\xce\xce\xf1\xa3\xac\xbc\xb8\xba\xf5\xbf\xc9\xd2\xd4\xd4\xda"
-b"\xcb\xf9\xd3\xd0\xb5\xc4\xb2\xd9\xd7\xf7\xcf\xb5\xcd\xb3\xd6\xd0"
-b"\x0a\xd4\xcb\xd0\xd0\xa1\xa3\xd5\xe2\xd6\xd6\xd3\xef\xd1\xd4\xbc"
-b"\xf2\xb5\xa5\xb6\xf8\xc7\xbf\xb4\xf3\xa3\xac\xca\xca\xba\xcf\xb8"
-b"\xf7\xd6\xd6\xc8\xcb\xca\xbf\xd1\xa7\xcf\xb0\xca\xb9\xd3\xc3\xa1"
-b"\xa3\xc4\xbf\xc7\xb0\xa3\xac\xbb\xf9\xd3\xda\xd5\xe2\x0a\xd6\xd6"
-b"\xd3\xef\xd1\xd4\xb5\xc4\xcf\xe0\xb9\xd8\xbc\xbc\xca\xf5\xd5\xfd"
-b"\xd4\xda\xb7\xc9\xcb\xd9\xb5\xc4\xb7\xa2\xd5\xb9\xa3\xac\xd3\xc3"
-b"\xbb\xa7\xca\xfd\xc1\xbf\xbc\xb1\xbe\xe7\xc0\xa9\xb4\xf3\xa3\xac"
-b"\xcf\xe0\xb9\xd8\xb5\xc4\xd7\xca\xd4\xb4\xb7\xc7\xb3\xa3\xb6\xe0"
-b"\xa1\xa3\x0a\xc8\xe7\xba\xce\xd4\xda\x20\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xd6\xd0\xca\xb9\xd3\xc3\xbc\xc8\xd3\xd0\xb5\xc4\x20\x43\x20"
-b"\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xa1\xa1\xd4\xda\xd9\x59\xd3"
-b"\x8d\xbf\xc6\xbc\xbc\xbf\xec\xcb\xd9\xb0\x6c\xd5\xb9\xb5\xc4\xbd"
-b"\xf1\xcc\xec\x2c\x20\xe9\x5f\xb0\x6c\xbc\xb0\x9c\x79\xd4\x87\xdc"
-b"\x9b\xf3\x77\xb5\xc4\xcb\xd9\xb6\xc8\xca\xc7\xb2\xbb\xc8\xdd\xba"
-b"\xf6\xd2\x95\xb5\xc4\x0a\xd5\x6e\xee\x7d\x2e\x20\x9e\xe9\xbc\xd3"
-b"\xbf\xec\xe9\x5f\xb0\x6c\xbc\xb0\x9c\x79\xd4\x87\xb5\xc4\xcb\xd9"
-b"\xb6\xc8\x2c\x20\xce\xd2\x82\x83\xb1\xe3\xb3\xa3\xcf\xa3\xcd\xfb"
-b"\xc4\xdc\xc0\xfb\xd3\xc3\xd2\xbb\xd0\xa9\xd2\xd1\xe9\x5f\xb0\x6c"
-b"\xba\xc3\xb5\xc4\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\x81\x4b"
-b"\xd3\xd0\xd2\xbb\x82\x80\x20\x66\x61\x73\x74\x20\x70\x72\x6f\x74"
-b"\x6f\x74\x79\x70\x69\x6e\x67\x20\xb5\xc4\x20\x70\x72\x6f\x67\x72"
-b"\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x20"
-b"\xbf\xc9\x0a\xb9\xa9\xca\xb9\xd3\xc3\x2e\x20\xc4\xbf\xc7\xb0\xd3"
-b"\xd0\xd4\x53\xd4\x53\xb6\xe0\xb6\xe0\xb5\xc4\x20\x6c\x69\x62\x72"
-b"\x61\x72\x79\x20\xca\xc7\xd2\xd4\x20\x43\x20\x8c\x91\xb3\xc9\x2c"
-b"\x20\xb6\xf8\x20\x50\x79\x74\x68\x6f\x6e\x20\xca\xc7\xd2\xbb\x82"
-b"\x80\x0a\x66\x61\x73\x74\x20\x70\x72\x6f\x74\x6f\x74\x79\x70\x69"
-b"\x6e\x67\x20\xb5\xc4\x20\x70\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e"
-b"\x67\x20\x6c\x61\x6e\x67\x75\x61\x67\x65\x2e\x20\xb9\xca\xce\xd2"
-b"\x82\x83\xcf\xa3\xcd\xfb\xc4\xdc\x8c\xa2\xbc\xc8\xd3\xd0\xb5\xc4"
-b"\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xc4\xc3\xb5\xbd\x20"
-b"\x50\x79\x74\x68\x6f\x6e\x20\xb5\xc4\xad\x68\xbe\xb3\xd6\xd0\x9c"
-b"\x79\xd4\x87\xbc\xb0\xd5\xfb\xba\xcf\x2e\x20\xc6\xe4\xd6\xd0\xd7"
-b"\xee\xd6\xf7\xd2\xaa\xd2\xb2\xca\xc7\xce\xd2\x82\x83\xcb\xf9\x0a"
-b"\xd2\xaa\xd3\x91\xd5\x93\xb5\xc4\x86\x96\xee\x7d\xbe\xcd\xca\xc7"
-b"\x3a\x0a\x0a",
-b"\x50\x79\x74\x68\x6f\x6e\xef\xbc\x88\xe6\xb4\xbe\xe6\xa3\xae\xef"
-b"\xbc\x89\xe8\xaf\xad\xe8\xa8\x80\xe6\x98\xaf\xe4\xb8\x80\xe7\xa7"
-b"\x8d\xe5\x8a\x9f\xe8\x83\xbd\xe5\xbc\xba\xe5\xa4\xa7\xe8\x80\x8c"
-b"\xe5\xae\x8c\xe5\x96\x84\xe7\x9a\x84\xe9\x80\x9a\xe7\x94\xa8\xe5"
-b"\x9e\x8b\xe8\xae\xa1\xe7\xae\x97\xe6\x9c\xba\xe7\xa8\x8b\xe5\xba"
-b"\x8f\xe8\xae\xbe\xe8\xae\xa1\xe8\xaf\xad\xe8\xa8\x80\xef\xbc\x8c"
-b"\x0a\xe5\xb7\xb2\xe7\xbb\x8f\xe5\x85\xb7\xe6\x9c\x89\xe5\x8d\x81"
-b"\xe5\xa4\x9a\xe5\xb9\xb4\xe7\x9a\x84\xe5\x8f\x91\xe5\xb1\x95\xe5"
-b"\x8e\x86\xe5\x8f\xb2\xef\xbc\x8c\xe6\x88\x90\xe7\x86\x9f\xe4\xb8"
-b"\x94\xe7\xa8\xb3\xe5\xae\x9a\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d"
-b"\xe8\xaf\xad\xe8\xa8\x80\xe5\x85\xb7\xe6\x9c\x89\xe9\x9d\x9e\xe5"
-b"\xb8\xb8\xe7\xae\x80\xe6\x8d\xb7\xe8\x80\x8c\xe6\xb8\x85\xe6\x99"
-b"\xb0\x0a\xe7\x9a\x84\xe8\xaf\xad\xe6\xb3\x95\xe7\x89\xb9\xe7\x82"
-b"\xb9\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\xae\x8c\xe6\x88\x90"
-b"\xe5\x90\x84\xe7\xa7\x8d\xe9\xab\x98\xe5\xb1\x82\xe4\xbb\xbb\xe5"
-b"\x8a\xa1\xef\xbc\x8c\xe5\x87\xa0\xe4\xb9\x8e\xe5\x8f\xaf\xe4\xbb"
-b"\xa5\xe5\x9c\xa8\xe6\x89\x80\xe6\x9c\x89\xe7\x9a\x84\xe6\x93\x8d"
-b"\xe4\xbd\x9c\xe7\xb3\xbb\xe7\xbb\x9f\xe4\xb8\xad\x0a\xe8\xbf\x90"
-b"\xe8\xa1\x8c\xe3\x80\x82\xe8\xbf\x99\xe7\xa7\x8d\xe8\xaf\xad\xe8"
-b"\xa8\x80\xe7\xae\x80\xe5\x8d\x95\xe8\x80\x8c\xe5\xbc\xba\xe5\xa4"
-b"\xa7\xef\xbc\x8c\xe9\x80\x82\xe5\x90\x88\xe5\x90\x84\xe7\xa7\x8d"
-b"\xe4\xba\xba\xe5\xa3\xab\xe5\xad\xa6\xe4\xb9\xa0\xe4\xbd\xbf\xe7"
-b"\x94\xa8\xe3\x80\x82\xe7\x9b\xae\xe5\x89\x8d\xef\xbc\x8c\xe5\x9f"
-b"\xba\xe4\xba\x8e\xe8\xbf\x99\x0a\xe7\xa7\x8d\xe8\xaf\xad\xe8\xa8"
-b"\x80\xe7\x9a\x84\xe7\x9b\xb8\xe5\x85\xb3\xe6\x8a\x80\xe6\x9c\xaf"
-b"\xe6\xad\xa3\xe5\x9c\xa8\xe9\xa3\x9e\xe9\x80\x9f\xe7\x9a\x84\xe5"
-b"\x8f\x91\xe5\xb1\x95\xef\xbc\x8c\xe7\x94\xa8\xe6\x88\xb7\xe6\x95"
-b"\xb0\xe9\x87\x8f\xe6\x80\xa5\xe5\x89\xa7\xe6\x89\xa9\xe5\xa4\xa7"
-b"\xef\xbc\x8c\xe7\x9b\xb8\xe5\x85\xb3\xe7\x9a\x84\xe8\xb5\x84\xe6"
-b"\xba\x90\xe9\x9d\x9e\xe5\xb8\xb8\xe5\xa4\x9a\xe3\x80\x82\x0a\xe5"
-b"\xa6\x82\xe4\xbd\x95\xe5\x9c\xa8\x20\x50\x79\x74\x68\x6f\x6e\x20"
-b"\xe4\xb8\xad\xe4\xbd\xbf\xe7\x94\xa8\xe6\x97\xa2\xe6\x9c\x89\xe7"
-b"\x9a\x84\x20\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x3f\x0a\xe3\x80"
-b"\x80\xe5\x9c\xa8\xe8\xb3\x87\xe8\xa8\x8a\xe7\xa7\x91\xe6\x8a\x80"
-b"\xe5\xbf\xab\xe9\x80\x9f\xe7\x99\xbc\xe5\xb1\x95\xe7\x9a\x84\xe4"
-b"\xbb\x8a\xe5\xa4\xa9\x2c\x20\xe9\x96\x8b\xe7\x99\xbc\xe5\x8f\x8a"
-b"\xe6\xb8\xac\xe8\xa9\xa6\xe8\xbb\x9f\xe9\xab\x94\xe7\x9a\x84\xe9"
-b"\x80\x9f\xe5\xba\xa6\xe6\x98\xaf\xe4\xb8\x8d\xe5\xae\xb9\xe5\xbf"
-b"\xbd\xe8\xa6\x96\xe7\x9a\x84\x0a\xe8\xaa\xb2\xe9\xa1\x8c\x2e\x20"
-b"\xe7\x82\xba\xe5\x8a\xa0\xe5\xbf\xab\xe9\x96\x8b\xe7\x99\xbc\xe5"
-b"\x8f\x8a\xe6\xb8\xac\xe8\xa9\xa6\xe7\x9a\x84\xe9\x80\x9f\xe5\xba"
-b"\xa6\x2c\x20\xe6\x88\x91\xe5\x80\x91\xe4\xbe\xbf\xe5\xb8\xb8\xe5"
-b"\xb8\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\x88\xa9\xe7\x94\xa8\xe4\xb8"
-b"\x80\xe4\xba\x9b\xe5\xb7\xb2\xe9\x96\x8b\xe7\x99\xbc\xe5\xa5\xbd"
-b"\xe7\x9a\x84\x0a\x6c\x69\x62\x72\x61\x72\x79\x2c\x20\xe4\xb8\xa6"
-b"\xe6\x9c\x89\xe4\xb8\x80\xe5\x80\x8b\x20\x66\x61\x73\x74\x20\x70"
-b"\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20\x70"
-b"\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75"
-b"\x61\x67\x65\x20\xe5\x8f\xaf\x0a\xe4\xbe\x9b\xe4\xbd\xbf\xe7\x94"
-b"\xa8\x2e\x20\xe7\x9b\xae\xe5\x89\x8d\xe6\x9c\x89\xe8\xa8\xb1\xe8"
-b"\xa8\xb1\xe5\xa4\x9a\xe5\xa4\x9a\xe7\x9a\x84\x20\x6c\x69\x62\x72"
-b"\x61\x72\x79\x20\xe6\x98\xaf\xe4\xbb\xa5\x20\x43\x20\xe5\xaf\xab"
-b"\xe6\x88\x90\x2c\x20\xe8\x80\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20"
-b"\xe6\x98\xaf\xe4\xb8\x80\xe5\x80\x8b\x0a\x66\x61\x73\x74\x20\x70"
-b"\x72\x6f\x74\x6f\x74\x79\x70\x69\x6e\x67\x20\xe7\x9a\x84\x20\x70"
-b"\x72\x6f\x67\x72\x61\x6d\x6d\x69\x6e\x67\x20\x6c\x61\x6e\x67\x75"
-b"\x61\x67\x65\x2e\x20\xe6\x95\x85\xe6\x88\x91\xe5\x80\x91\xe5\xb8"
-b"\x8c\xe6\x9c\x9b\xe8\x83\xbd\xe5\xb0\x87\xe6\x97\xa2\xe6\x9c\x89"
-b"\xe7\x9a\x84\x0a\x43\x20\x6c\x69\x62\x72\x61\x72\x79\x20\xe6\x8b"
-b"\xbf\xe5\x88\xb0\x20\x50\x79\x74\x68\x6f\x6e\x20\xe7\x9a\x84\xe7"
-b"\x92\xb0\xe5\xa2\x83\xe4\xb8\xad\xe6\xb8\xac\xe8\xa9\xa6\xe5\x8f"
-b"\x8a\xe6\x95\xb4\xe5\x90\x88\x2e\x20\xe5\x85\xb6\xe4\xb8\xad\xe6"
-b"\x9c\x80\xe4\xb8\xbb\xe8\xa6\x81\xe4\xb9\x9f\xe6\x98\xaf\xe6\x88"
-b"\x91\xe5\x80\x91\xe6\x89\x80\x0a\xe8\xa6\x81\xe8\xa8\x8e\xe8\xab"
-b"\x96\xe7\x9a\x84\xe5\x95\x8f\xe9\xa1\x8c\xe5\xb0\xb1\xe6\x98\xaf"
-b"\x3a\x0a\x0a"),
-'johab': (
-b"\x99\xb1\xa4\x77\x88\x62\xd0\x61\x20\xcd\x5c\xaf\xa1\xc5\xa9\x9c"
-b"\x61\x0a\x0a\xdc\xc0\xdc\xc0\x90\x73\x21\x21\x20\xf1\x67\xe2\x9c"
-b"\xf0\x55\xcc\x81\xa3\x89\x9f\x85\x8a\xa1\x20\xdc\xde\xdc\xd3\xd2"
-b"\x7a\xd9\xaf\xd9\xaf\xd9\xaf\x20\x8b\x77\x96\xd3\x20\xdc\xd1\x95"
-b"\x81\x20\xdc\xc0\x2e\x20\x2e\x0a\xed\x3c\xb5\x77\xdc\xd1\x93\x77"
-b"\xd2\x73\x20\x2e\x20\x2e\x20\x2e\x20\x2e\x20\xac\xe1\xb6\x89\x9e"
-b"\xa1\x20\x95\x65\xd0\x62\xf0\xe0\x20\xe0\x3b\xd2\x7a\x20\x21\x20"
-b"\x21\x20\x21\x87\x41\x2e\x87\x41\x0a\xd3\x61\xd3\x61\xd3\x61\x20"
-b"\x88\x41\x88\x41\x88\x41\xd9\x69\x87\x41\x5f\x87\x41\x20\xb4\xe1"
-b"\x9f\x9a\x20\xc8\xa1\xc5\xc1\x8b\x7a\x20\x95\x61\xb7\x77\x20\xc3"
-b"\x97\xe2\x9c\x97\x69\xf0\xe0\x20\xdc\xc0\x97\x61\x8b\x7a\x0a\xac"
-b"\xe9\x9f\x7a\x20\xe0\x3b\xd2\x7a\x20\x2e\x20\x2e\x20\x2e\x20\x2e"
-b"\x20\x8a\x89\xb4\x81\xae\xba\x20\xdc\xd1\x8a\xa1\x20\xdc\xde\x9f"
-b"\x89\xdc\xc2\x8b\x7a\x20\xf1\x67\xf1\x62\xf5\x49\xed\xfc\xf3\xe9"
-b"\x8c\x61\xbb\x9a\x0a\xb5\xc1\xb2\xa1\xd2\x7a\x20\x21\x20\x21\x20"
-b"\xed\x3c\xb5\x77\xdc\xd1\x20\xe0\x3b\x93\x77\x8a\xa1\x20\xd9\x69"
-b"\xea\xbe\x89\xc5\x20\xb4\xf4\x93\x77\x8a\xa1\x93\x77\x20\xed\x3c"
-b"\x93\x77\x96\xc1\xd2\x7a\x20\x8b\x69\xb4\x81\x97\x7a\x0a\xdc\xde"
-b"\x9d\x61\x97\x41\xe2\x9c\x20\xaf\x81\xce\xa1\xae\xa1\xd2\x7a\x20"
-b"\xb4\xe1\x9f\x9a\x20\xf1\x67\xf1\x62\xf5\x49\xed\xfc\xf3\xe9\xaf"
-b"\x82\xdc\xef\x97\x69\xb4\x7a\x21\x21\x20\xdc\xc0\xdc\xc0\x90\x73"
-b"\xd9\xbd\x20\xd9\x62\xd9\x62\x2a\x0a\x0a",
-b"\xeb\x98\xa0\xeb\xb0\xa9\xea\xb0\x81\xed\x95\x98\x20\xed\x8e\xb2"
-b"\xec\x8b\x9c\xec\xbd\x9c\xeb\x9d\xbc\x0a\x0a\xe3\x89\xaf\xe3\x89"
-b"\xaf\xeb\x82\xa9\x21\x21\x20\xe5\x9b\xa0\xe4\xb9\x9d\xe6\x9c\x88"
-b"\xed\x8c\xa8\xeb\xaf\xa4\xeb\xa6\x94\xea\xb6\x88\x20\xe2\x93\xa1"
-b"\xe2\x93\x96\xed\x9b\x80\xc2\xbf\xc2\xbf\xc2\xbf\x20\xea\xb8\x8d"
-b"\xeb\x92\x99\x20\xe2\x93\x94\xeb\x8e\xa8\x20\xe3\x89\xaf\x2e\x20"
-b"\x2e\x0a\xe4\xba\x9e\xec\x98\x81\xe2\x93\x94\xeb\x8a\xa5\xed\x9a"
-b"\xb9\x20\x2e\x20\x2e\x20\x2e\x20\x2e\x20\xec\x84\x9c\xec\x9a\xb8"
-b"\xeb\xa4\x84\x20\xeb\x8e\x90\xed\x95\x99\xe4\xb9\x99\x20\xe5\xae"
-b"\xb6\xed\x9b\x80\x20\x21\x20\x21\x20\x21\xe3\x85\xa0\x2e\xe3\x85"
-b"\xa0\x0a\xed\x9d\x90\xed\x9d\x90\xed\x9d\x90\x20\xe3\x84\xb1\xe3"
-b"\x84\xb1\xe3\x84\xb1\xe2\x98\x86\xe3\x85\xa0\x5f\xe3\x85\xa0\x20"
-b"\xec\x96\xb4\xeb\xa6\xa8\x20\xed\x83\xb8\xec\xbd\xb0\xea\xb8\x90"
-b"\x20\xeb\x8e\x8c\xec\x9d\x91\x20\xec\xb9\x91\xe4\xb9\x9d\xeb\x93"
-b"\xa4\xe4\xb9\x99\x20\xe3\x89\xaf\xeb\x93\x9c\xea\xb8\x90\x0a\xec"
-b"\x84\xa4\xeb\xa6\x8c\x20\xe5\xae\xb6\xed\x9b\x80\x20\x2e\x20\x2e"
-b"\x20\x2e\x20\x2e\x20\xea\xb5\xb4\xec\x95\xa0\xec\x89\x8c\x20\xe2"
-b"\x93\x94\xea\xb6\x88\x20\xe2\x93\xa1\xeb\xa6\x98\xe3\x89\xb1\xea"
-b"\xb8\x90\x20\xe5\x9b\xa0\xe4\xbb\x81\xe5\xb7\x9d\xef\xa6\x81\xe4"
-b"\xb8\xad\xea\xb9\x8c\xec\xa6\xbc\x0a\xec\x99\x80\xec\x92\x80\xed"
-b"\x9b\x80\x20\x21\x20\x21\x20\xe4\xba\x9e\xec\x98\x81\xe2\x93\x94"
-b"\x20\xe5\xae\xb6\xeb\x8a\xa5\xea\xb6\x88\x20\xe2\x98\x86\xe4\xb8"
-b"\x8a\xea\xb4\x80\x20\xec\x97\x86\xeb\x8a\xa5\xea\xb6\x88\xeb\x8a"
-b"\xa5\x20\xe4\xba\x9e\xeb\x8a\xa5\xeb\x92\x88\xed\x9b\x80\x20\xea"
-b"\xb8\x80\xec\x95\xa0\xeb\x93\xb4\x0a\xe2\x93\xa1\xeb\xa0\xa4\xeb"
-b"\x93\x80\xe4\xb9\x9d\x20\xec\x8b\x80\xed\x92\x94\xec\x88\xb4\xed"
-b"\x9b\x80\x20\xec\x96\xb4\xeb\xa6\xa8\x20\xe5\x9b\xa0\xe4\xbb\x81"
-b"\xe5\xb7\x9d\xef\xa6\x81\xe4\xb8\xad\xec\x8b\x81\xe2\x91\xa8\xeb"
-b"\x93\xa4\xec\x95\x9c\x21\x21\x20\xe3\x89\xaf\xe3\x89\xaf\xeb\x82"
-b"\xa9\xe2\x99\xa1\x20\xe2\x8c\x92\xe2\x8c\x92\x2a\x0a\x0a"),
-'shift_jis': (
-b"\x50\x79\x74\x68\x6f\x6e\x20\x82\xcc\x8a\x4a\x94\xad\x82\xcd\x81"
-b"\x41\x31\x39\x39\x30\x20\x94\x4e\x82\xb2\x82\xeb\x82\xa9\x82\xe7"
-b"\x8a\x4a\x8e\x6e\x82\xb3\x82\xea\x82\xc4\x82\xa2\x82\xdc\x82\xb7"
-b"\x81\x42\x0a\x8a\x4a\x94\xad\x8e\xd2\x82\xcc\x20\x47\x75\x69\x64"
-b"\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73\x73\x75\x6d\x20\x82\xcd\x8b"
-b"\xb3\x88\xe7\x97\x70\x82\xcc\x83\x76\x83\x8d\x83\x4f\x83\x89\x83"
-b"\x7e\x83\x93\x83\x4f\x8c\xbe\x8c\xea\x81\x75\x41\x42\x43\x81\x76"
-b"\x82\xcc\x8a\x4a\x94\xad\x82\xc9\x8e\x51\x89\xc1\x82\xb5\x82\xc4"
-b"\x82\xa2\x82\xdc\x82\xb5\x82\xbd\x82\xaa\x81\x41\x41\x42\x43\x20"
-b"\x82\xcd\x8e\xc0\x97\x70\x8f\xe3\x82\xcc\x96\xda\x93\x49\x82\xc9"
-b"\x82\xcd\x82\xa0\x82\xdc\x82\xe8\x93\x4b\x82\xb5\x82\xc4\x82\xa2"
-b"\x82\xdc\x82\xb9\x82\xf1\x82\xc5\x82\xb5\x82\xbd\x81\x42\x0a\x82"
-b"\xb1\x82\xcc\x82\xbd\x82\xdf\x81\x41\x47\x75\x69\x64\x6f\x20\x82"
-b"\xcd\x82\xe6\x82\xe8\x8e\xc0\x97\x70\x93\x49\x82\xc8\x83\x76\x83"
-b"\x8d\x83\x4f\x83\x89\x83\x7e\x83\x93\x83\x4f\x8c\xbe\x8c\xea\x82"
-b"\xcc\x8a\x4a\x94\xad\x82\xf0\x8a\x4a\x8e\x6e\x82\xb5\x81\x41\x89"
-b"\x70\x8d\x91\x20\x42\x42\x53\x20\x95\xfa\x91\x97\x82\xcc\x83\x52"
-b"\x83\x81\x83\x66\x83\x42\x94\xd4\x91\x67\x81\x75\x83\x82\x83\x93"
-b"\x83\x65\x83\x42\x20\x83\x70\x83\x43\x83\x5c\x83\x93\x81\x76\x82"
-b"\xcc\x83\x74\x83\x40\x83\x93\x82\xc5\x82\xa0\x82\xe9\x20\x47\x75"
-b"\x69\x64\x6f\x20\x82\xcd\x82\xb1\x82\xcc\x8c\xbe\x8c\xea\x82\xf0"
-b"\x81\x75\x50\x79\x74\x68\x6f\x6e\x81\x76\x82\xc6\x96\xbc\x82\xc3"
-b"\x82\xaf\x82\xdc\x82\xb5\x82\xbd\x81\x42\x0a\x82\xb1\x82\xcc\x82"
-b"\xe6\x82\xa4\x82\xc8\x94\x77\x8c\x69\x82\xa9\x82\xe7\x90\xb6\x82"
-b"\xdc\x82\xea\x82\xbd\x20\x50\x79\x74\x68\x6f\x6e\x20\x82\xcc\x8c"
-b"\xbe\x8c\xea\x90\xdd\x8c\x76\x82\xcd\x81\x41\x81\x75\x83\x56\x83"
-b"\x93\x83\x76\x83\x8b\x81\x76\x82\xc5\x81\x75\x8f\x4b\x93\xbe\x82"
-b"\xaa\x97\x65\x88\xd5\x81\x76\x82\xc6\x82\xa2\x82\xa4\x96\xda\x95"
-b"\x57\x82\xc9\x8f\x64\x93\x5f\x82\xaa\x92\x75\x82\xa9\x82\xea\x82"
-b"\xc4\x82\xa2\x82\xdc\x82\xb7\x81\x42\x0a\x91\xbd\x82\xad\x82\xcc"
-b"\x83\x58\x83\x4e\x83\x8a\x83\x76\x83\x67\x8c\x6e\x8c\xbe\x8c\xea"
-b"\x82\xc5\x82\xcd\x83\x86\x81\x5b\x83\x55\x82\xcc\x96\xda\x90\xe6"
-b"\x82\xcc\x97\x98\x95\xd6\x90\xab\x82\xf0\x97\x44\x90\xe6\x82\xb5"
-b"\x82\xc4\x90\x46\x81\x58\x82\xc8\x8b\x40\x94\x5c\x82\xf0\x8c\xbe"
-b"\x8c\xea\x97\x76\x91\x66\x82\xc6\x82\xb5\x82\xc4\x8e\xe6\x82\xe8"
-b"\x93\xfc\x82\xea\x82\xe9\x8f\xea\x8d\x87\x82\xaa\x91\xbd\x82\xa2"
-b"\x82\xcc\x82\xc5\x82\xb7\x82\xaa\x81\x41\x50\x79\x74\x68\x6f\x6e"
-b"\x20\x82\xc5\x82\xcd\x82\xbb\x82\xa4\x82\xa2\x82\xc1\x82\xbd\x8f"
-b"\xac\x8d\xd7\x8d\x48\x82\xaa\x92\xc7\x89\xc1\x82\xb3\x82\xea\x82"
-b"\xe9\x82\xb1\x82\xc6\x82\xcd\x82\xa0\x82\xdc\x82\xe8\x82\xa0\x82"
-b"\xe8\x82\xdc\x82\xb9\x82\xf1\x81\x42\x0a\x8c\xbe\x8c\xea\x8e\xa9"
-b"\x91\xcc\x82\xcc\x8b\x40\x94\x5c\x82\xcd\x8d\xc5\x8f\xac\x8c\xc0"
-b"\x82\xc9\x89\x9f\x82\xb3\x82\xa6\x81\x41\x95\x4b\x97\x76\x82\xc8"
-b"\x8b\x40\x94\x5c\x82\xcd\x8a\x67\x92\xa3\x83\x82\x83\x57\x83\x85"
-b"\x81\x5b\x83\x8b\x82\xc6\x82\xb5\x82\xc4\x92\xc7\x89\xc1\x82\xb7"
-b"\x82\xe9\x81\x41\x82\xc6\x82\xa2\x82\xa4\x82\xcc\x82\xaa\x20\x50"
-b"\x79\x74\x68\x6f\x6e\x20\x82\xcc\x83\x7c\x83\x8a\x83\x56\x81\x5b"
-b"\x82\xc5\x82\xb7\x81\x42\x0a\x0a",
-b"\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
-b"\xe3\x81\xaf\xe3\x80\x81\x31\x39\x39\x30\x20\xe5\xb9\xb4\xe3\x81"
-b"\x94\xe3\x82\x8d\xe3\x81\x8b\xe3\x82\x89\xe9\x96\x8b\xe5\xa7\x8b"
-b"\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3"
-b"\x81\x99\xe3\x80\x82\x0a\xe9\x96\x8b\xe7\x99\xba\xe8\x80\x85\xe3"
-b"\x81\xae\x20\x47\x75\x69\x64\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73"
-b"\x73\x75\x6d\x20\xe3\x81\xaf\xe6\x95\x99\xe8\x82\xb2\xe7\x94\xa8"
-b"\xe3\x81\xae\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83\xa9\xe3"
-b"\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e\xe3\x80"
-b"\x8c\x41\x42\x43\xe3\x80\x8d\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
-b"\xe3\x81\xab\xe5\x8f\x82\xe5\x8a\xa0\xe3\x81\x97\xe3\x81\xa6\xe3"
-b"\x81\x84\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x81\x8c\xe3\x80"
-b"\x81\x41\x42\x43\x20\xe3\x81\xaf\xe5\xae\x9f\xe7\x94\xa8\xe4\xb8"
-b"\x8a\xe3\x81\xae\xe7\x9b\xae\xe7\x9a\x84\xe3\x81\xab\xe3\x81\xaf"
-b"\xe3\x81\x82\xe3\x81\xbe\xe3\x82\x8a\xe9\x81\xa9\xe3\x81\x97\xe3"
-b"\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x81"
-b"\xa7\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a\xe3\x81\x93\xe3\x81"
-b"\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x80\x81\x47\x75\x69\x64\x6f\x20"
-b"\xe3\x81\xaf\xe3\x82\x88\xe3\x82\x8a\xe5\xae\x9f\xe7\x94\xa8\xe7"
-b"\x9a\x84\xe3\x81\xaa\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83"
-b"\xa9\xe3\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e"
-b"\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba\xe3\x82\x92\xe9\x96\x8b\xe5"
-b"\xa7\x8b\xe3\x81\x97\xe3\x80\x81\xe8\x8b\xb1\xe5\x9b\xbd\x20\x42"
-b"\x42\x53\x20\xe6\x94\xbe\xe9\x80\x81\xe3\x81\xae\xe3\x82\xb3\xe3"
-b"\x83\xa1\xe3\x83\x87\xe3\x82\xa3\xe7\x95\xaa\xe7\xb5\x84\xe3\x80"
-b"\x8c\xe3\x83\xa2\xe3\x83\xb3\xe3\x83\x86\xe3\x82\xa3\x20\xe3\x83"
-b"\x91\xe3\x82\xa4\xe3\x82\xbd\xe3\x83\xb3\xe3\x80\x8d\xe3\x81\xae"
-b"\xe3\x83\x95\xe3\x82\xa1\xe3\x83\xb3\xe3\x81\xa7\xe3\x81\x82\xe3"
-b"\x82\x8b\x20\x47\x75\x69\x64\x6f\x20\xe3\x81\xaf\xe3\x81\x93\xe3"
-b"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe3\x82\x92\xe3\x80\x8c\x50\x79"
-b"\x74\x68\x6f\x6e\xe3\x80\x8d\xe3\x81\xa8\xe5\x90\x8d\xe3\x81\xa5"
-b"\xe3\x81\x91\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a"
-b"\xe3\x81\x93\xe3\x81\xae\xe3\x82\x88\xe3\x81\x86\xe3\x81\xaa\xe8"
-b"\x83\x8c\xe6\x99\xaf\xe3\x81\x8b\xe3\x82\x89\xe7\x94\x9f\xe3\x81"
-b"\xbe\xe3\x82\x8c\xe3\x81\x9f\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3"
-b"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa8\xad\xe8\xa8\x88\xe3\x81"
-b"\xaf\xe3\x80\x81\xe3\x80\x8c\xe3\x82\xb7\xe3\x83\xb3\xe3\x83\x97"
-b"\xe3\x83\xab\xe3\x80\x8d\xe3\x81\xa7\xe3\x80\x8c\xe7\xbf\x92\xe5"
-b"\xbe\x97\xe3\x81\x8c\xe5\xae\xb9\xe6\x98\x93\xe3\x80\x8d\xe3\x81"
-b"\xa8\xe3\x81\x84\xe3\x81\x86\xe7\x9b\xae\xe6\xa8\x99\xe3\x81\xab"
-b"\xe9\x87\x8d\xe7\x82\xb9\xe3\x81\x8c\xe7\xbd\xae\xe3\x81\x8b\xe3"
-b"\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80"
-b"\x82\x0a\xe5\xa4\x9a\xe3\x81\x8f\xe3\x81\xae\xe3\x82\xb9\xe3\x82"
-b"\xaf\xe3\x83\xaa\xe3\x83\x97\xe3\x83\x88\xe7\xb3\xbb\xe8\xa8\x80"
-b"\xe8\xaa\x9e\xe3\x81\xa7\xe3\x81\xaf\xe3\x83\xa6\xe3\x83\xbc\xe3"
-b"\x82\xb6\xe3\x81\xae\xe7\x9b\xae\xe5\x85\x88\xe3\x81\xae\xe5\x88"
-b"\xa9\xe4\xbe\xbf\xe6\x80\xa7\xe3\x82\x92\xe5\x84\xaa\xe5\x85\x88"
-b"\xe3\x81\x97\xe3\x81\xa6\xe8\x89\xb2\xe3\x80\x85\xe3\x81\xaa\xe6"
-b"\xa9\x9f\xe8\x83\xbd\xe3\x82\x92\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa6"
-b"\x81\xe7\xb4\xa0\xe3\x81\xa8\xe3\x81\x97\xe3\x81\xa6\xe5\x8f\x96"
-b"\xe3\x82\x8a\xe5\x85\xa5\xe3\x82\x8c\xe3\x82\x8b\xe5\xa0\xb4\xe5"
-b"\x90\x88\xe3\x81\x8c\xe5\xa4\x9a\xe3\x81\x84\xe3\x81\xae\xe3\x81"
-b"\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xe3\x81\xa7\xe3\x81\xaf\xe3\x81\x9d\xe3\x81\x86\xe3\x81\x84"
-b"\xe3\x81\xa3\xe3\x81\x9f\xe5\xb0\x8f\xe7\xb4\xb0\xe5\xb7\xa5\xe3"
-b"\x81\x8c\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x95\xe3\x82\x8c\xe3\x82"
-b"\x8b\xe3\x81\x93\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\x82\xe3\x81\xbe"
-b"\xe3\x82\x8a\xe3\x81\x82\xe3\x82\x8a\xe3\x81\xbe\xe3\x81\x9b\xe3"
-b"\x82\x93\xe3\x80\x82\x0a\xe8\xa8\x80\xe8\xaa\x9e\xe8\x87\xaa\xe4"
-b"\xbd\x93\xe3\x81\xae\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x9c"
-b"\x80\xe5\xb0\x8f\xe9\x99\x90\xe3\x81\xab\xe6\x8a\xbc\xe3\x81\x95"
-b"\xe3\x81\x88\xe3\x80\x81\xe5\xbf\x85\xe8\xa6\x81\xe3\x81\xaa\xe6"
-b"\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x8b\xa1\xe5\xbc\xb5\xe3\x83"
-b"\xa2\xe3\x82\xb8\xe3\x83\xa5\xe3\x83\xbc\xe3\x83\xab\xe3\x81\xa8"
-b"\xe3\x81\x97\xe3\x81\xa6\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x99\xe3"
-b"\x82\x8b\xe3\x80\x81\xe3\x81\xa8\xe3\x81\x84\xe3\x81\x86\xe3\x81"
-b"\xae\xe3\x81\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe3"
-b"\x83\x9d\xe3\x83\xaa\xe3\x82\xb7\xe3\x83\xbc\xe3\x81\xa7\xe3\x81"
-b"\x99\xe3\x80\x82\x0a\x0a"),
-'shift_jisx0213': (
-b"\x50\x79\x74\x68\x6f\x6e\x20\x82\xcc\x8a\x4a\x94\xad\x82\xcd\x81"
-b"\x41\x31\x39\x39\x30\x20\x94\x4e\x82\xb2\x82\xeb\x82\xa9\x82\xe7"
-b"\x8a\x4a\x8e\x6e\x82\xb3\x82\xea\x82\xc4\x82\xa2\x82\xdc\x82\xb7"
-b"\x81\x42\x0a\x8a\x4a\x94\xad\x8e\xd2\x82\xcc\x20\x47\x75\x69\x64"
-b"\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73\x73\x75\x6d\x20\x82\xcd\x8b"
-b"\xb3\x88\xe7\x97\x70\x82\xcc\x83\x76\x83\x8d\x83\x4f\x83\x89\x83"
-b"\x7e\x83\x93\x83\x4f\x8c\xbe\x8c\xea\x81\x75\x41\x42\x43\x81\x76"
-b"\x82\xcc\x8a\x4a\x94\xad\x82\xc9\x8e\x51\x89\xc1\x82\xb5\x82\xc4"
-b"\x82\xa2\x82\xdc\x82\xb5\x82\xbd\x82\xaa\x81\x41\x41\x42\x43\x20"
-b"\x82\xcd\x8e\xc0\x97\x70\x8f\xe3\x82\xcc\x96\xda\x93\x49\x82\xc9"
-b"\x82\xcd\x82\xa0\x82\xdc\x82\xe8\x93\x4b\x82\xb5\x82\xc4\x82\xa2"
-b"\x82\xdc\x82\xb9\x82\xf1\x82\xc5\x82\xb5\x82\xbd\x81\x42\x0a\x82"
-b"\xb1\x82\xcc\x82\xbd\x82\xdf\x81\x41\x47\x75\x69\x64\x6f\x20\x82"
-b"\xcd\x82\xe6\x82\xe8\x8e\xc0\x97\x70\x93\x49\x82\xc8\x83\x76\x83"
-b"\x8d\x83\x4f\x83\x89\x83\x7e\x83\x93\x83\x4f\x8c\xbe\x8c\xea\x82"
-b"\xcc\x8a\x4a\x94\xad\x82\xf0\x8a\x4a\x8e\x6e\x82\xb5\x81\x41\x89"
-b"\x70\x8d\x91\x20\x42\x42\x53\x20\x95\xfa\x91\x97\x82\xcc\x83\x52"
-b"\x83\x81\x83\x66\x83\x42\x94\xd4\x91\x67\x81\x75\x83\x82\x83\x93"
-b"\x83\x65\x83\x42\x20\x83\x70\x83\x43\x83\x5c\x83\x93\x81\x76\x82"
-b"\xcc\x83\x74\x83\x40\x83\x93\x82\xc5\x82\xa0\x82\xe9\x20\x47\x75"
-b"\x69\x64\x6f\x20\x82\xcd\x82\xb1\x82\xcc\x8c\xbe\x8c\xea\x82\xf0"
-b"\x81\x75\x50\x79\x74\x68\x6f\x6e\x81\x76\x82\xc6\x96\xbc\x82\xc3"
-b"\x82\xaf\x82\xdc\x82\xb5\x82\xbd\x81\x42\x0a\x82\xb1\x82\xcc\x82"
-b"\xe6\x82\xa4\x82\xc8\x94\x77\x8c\x69\x82\xa9\x82\xe7\x90\xb6\x82"
-b"\xdc\x82\xea\x82\xbd\x20\x50\x79\x74\x68\x6f\x6e\x20\x82\xcc\x8c"
-b"\xbe\x8c\xea\x90\xdd\x8c\x76\x82\xcd\x81\x41\x81\x75\x83\x56\x83"
-b"\x93\x83\x76\x83\x8b\x81\x76\x82\xc5\x81\x75\x8f\x4b\x93\xbe\x82"
-b"\xaa\x97\x65\x88\xd5\x81\x76\x82\xc6\x82\xa2\x82\xa4\x96\xda\x95"
-b"\x57\x82\xc9\x8f\x64\x93\x5f\x82\xaa\x92\x75\x82\xa9\x82\xea\x82"
-b"\xc4\x82\xa2\x82\xdc\x82\xb7\x81\x42\x0a\x91\xbd\x82\xad\x82\xcc"
-b"\x83\x58\x83\x4e\x83\x8a\x83\x76\x83\x67\x8c\x6e\x8c\xbe\x8c\xea"
-b"\x82\xc5\x82\xcd\x83\x86\x81\x5b\x83\x55\x82\xcc\x96\xda\x90\xe6"
-b"\x82\xcc\x97\x98\x95\xd6\x90\xab\x82\xf0\x97\x44\x90\xe6\x82\xb5"
-b"\x82\xc4\x90\x46\x81\x58\x82\xc8\x8b\x40\x94\x5c\x82\xf0\x8c\xbe"
-b"\x8c\xea\x97\x76\x91\x66\x82\xc6\x82\xb5\x82\xc4\x8e\xe6\x82\xe8"
-b"\x93\xfc\x82\xea\x82\xe9\x8f\xea\x8d\x87\x82\xaa\x91\xbd\x82\xa2"
-b"\x82\xcc\x82\xc5\x82\xb7\x82\xaa\x81\x41\x50\x79\x74\x68\x6f\x6e"
-b"\x20\x82\xc5\x82\xcd\x82\xbb\x82\xa4\x82\xa2\x82\xc1\x82\xbd\x8f"
-b"\xac\x8d\xd7\x8d\x48\x82\xaa\x92\xc7\x89\xc1\x82\xb3\x82\xea\x82"
-b"\xe9\x82\xb1\x82\xc6\x82\xcd\x82\xa0\x82\xdc\x82\xe8\x82\xa0\x82"
-b"\xe8\x82\xdc\x82\xb9\x82\xf1\x81\x42\x0a\x8c\xbe\x8c\xea\x8e\xa9"
-b"\x91\xcc\x82\xcc\x8b\x40\x94\x5c\x82\xcd\x8d\xc5\x8f\xac\x8c\xc0"
-b"\x82\xc9\x89\x9f\x82\xb3\x82\xa6\x81\x41\x95\x4b\x97\x76\x82\xc8"
-b"\x8b\x40\x94\x5c\x82\xcd\x8a\x67\x92\xa3\x83\x82\x83\x57\x83\x85"
-b"\x81\x5b\x83\x8b\x82\xc6\x82\xb5\x82\xc4\x92\xc7\x89\xc1\x82\xb7"
-b"\x82\xe9\x81\x41\x82\xc6\x82\xa2\x82\xa4\x82\xcc\x82\xaa\x20\x50"
-b"\x79\x74\x68\x6f\x6e\x20\x82\xcc\x83\x7c\x83\x8a\x83\x56\x81\x5b"
-b"\x82\xc5\x82\xb7\x81\x42\x0a\x0a\x83\x6d\x82\xf5\x20\x83\x9e\x20"
-b"\x83\x67\x83\x4c\x88\x4b\x88\x79\x20\x98\x83\xfc\xd6\x20\xfc\xd2"
-b"\xfc\xe6\xfb\xd4\x0a",
-b"\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
-b"\xe3\x81\xaf\xe3\x80\x81\x31\x39\x39\x30\x20\xe5\xb9\xb4\xe3\x81"
-b"\x94\xe3\x82\x8d\xe3\x81\x8b\xe3\x82\x89\xe9\x96\x8b\xe5\xa7\x8b"
-b"\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3"
-b"\x81\x99\xe3\x80\x82\x0a\xe9\x96\x8b\xe7\x99\xba\xe8\x80\x85\xe3"
-b"\x81\xae\x20\x47\x75\x69\x64\x6f\x20\x76\x61\x6e\x20\x52\x6f\x73"
-b"\x73\x75\x6d\x20\xe3\x81\xaf\xe6\x95\x99\xe8\x82\xb2\xe7\x94\xa8"
-b"\xe3\x81\xae\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83\xa9\xe3"
-b"\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e\xe3\x80"
-b"\x8c\x41\x42\x43\xe3\x80\x8d\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba"
-b"\xe3\x81\xab\xe5\x8f\x82\xe5\x8a\xa0\xe3\x81\x97\xe3\x81\xa6\xe3"
-b"\x81\x84\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x81\x8c\xe3\x80"
-b"\x81\x41\x42\x43\x20\xe3\x81\xaf\xe5\xae\x9f\xe7\x94\xa8\xe4\xb8"
-b"\x8a\xe3\x81\xae\xe7\x9b\xae\xe7\x9a\x84\xe3\x81\xab\xe3\x81\xaf"
-b"\xe3\x81\x82\xe3\x81\xbe\xe3\x82\x8a\xe9\x81\xa9\xe3\x81\x97\xe3"
-b"\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x81"
-b"\xa7\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a\xe3\x81\x93\xe3\x81"
-b"\xae\xe3\x81\x9f\xe3\x82\x81\xe3\x80\x81\x47\x75\x69\x64\x6f\x20"
-b"\xe3\x81\xaf\xe3\x82\x88\xe3\x82\x8a\xe5\xae\x9f\xe7\x94\xa8\xe7"
-b"\x9a\x84\xe3\x81\xaa\xe3\x83\x97\xe3\x83\xad\xe3\x82\xb0\xe3\x83"
-b"\xa9\xe3\x83\x9f\xe3\x83\xb3\xe3\x82\xb0\xe8\xa8\x80\xe8\xaa\x9e"
-b"\xe3\x81\xae\xe9\x96\x8b\xe7\x99\xba\xe3\x82\x92\xe9\x96\x8b\xe5"
-b"\xa7\x8b\xe3\x81\x97\xe3\x80\x81\xe8\x8b\xb1\xe5\x9b\xbd\x20\x42"
-b"\x42\x53\x20\xe6\x94\xbe\xe9\x80\x81\xe3\x81\xae\xe3\x82\xb3\xe3"
-b"\x83\xa1\xe3\x83\x87\xe3\x82\xa3\xe7\x95\xaa\xe7\xb5\x84\xe3\x80"
-b"\x8c\xe3\x83\xa2\xe3\x83\xb3\xe3\x83\x86\xe3\x82\xa3\x20\xe3\x83"
-b"\x91\xe3\x82\xa4\xe3\x82\xbd\xe3\x83\xb3\xe3\x80\x8d\xe3\x81\xae"
-b"\xe3\x83\x95\xe3\x82\xa1\xe3\x83\xb3\xe3\x81\xa7\xe3\x81\x82\xe3"
-b"\x82\x8b\x20\x47\x75\x69\x64\x6f\x20\xe3\x81\xaf\xe3\x81\x93\xe3"
-b"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe3\x82\x92\xe3\x80\x8c\x50\x79"
-b"\x74\x68\x6f\x6e\xe3\x80\x8d\xe3\x81\xa8\xe5\x90\x8d\xe3\x81\xa5"
-b"\xe3\x81\x91\xe3\x81\xbe\xe3\x81\x97\xe3\x81\x9f\xe3\x80\x82\x0a"
-b"\xe3\x81\x93\xe3\x81\xae\xe3\x82\x88\xe3\x81\x86\xe3\x81\xaa\xe8"
-b"\x83\x8c\xe6\x99\xaf\xe3\x81\x8b\xe3\x82\x89\xe7\x94\x9f\xe3\x81"
-b"\xbe\xe3\x82\x8c\xe3\x81\x9f\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3"
-b"\x81\xae\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa8\xad\xe8\xa8\x88\xe3\x81"
-b"\xaf\xe3\x80\x81\xe3\x80\x8c\xe3\x82\xb7\xe3\x83\xb3\xe3\x83\x97"
-b"\xe3\x83\xab\xe3\x80\x8d\xe3\x81\xa7\xe3\x80\x8c\xe7\xbf\x92\xe5"
-b"\xbe\x97\xe3\x81\x8c\xe5\xae\xb9\xe6\x98\x93\xe3\x80\x8d\xe3\x81"
-b"\xa8\xe3\x81\x84\xe3\x81\x86\xe7\x9b\xae\xe6\xa8\x99\xe3\x81\xab"
-b"\xe9\x87\x8d\xe7\x82\xb9\xe3\x81\x8c\xe7\xbd\xae\xe3\x81\x8b\xe3"
-b"\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80"
-b"\x82\x0a\xe5\xa4\x9a\xe3\x81\x8f\xe3\x81\xae\xe3\x82\xb9\xe3\x82"
-b"\xaf\xe3\x83\xaa\xe3\x83\x97\xe3\x83\x88\xe7\xb3\xbb\xe8\xa8\x80"
-b"\xe8\xaa\x9e\xe3\x81\xa7\xe3\x81\xaf\xe3\x83\xa6\xe3\x83\xbc\xe3"
-b"\x82\xb6\xe3\x81\xae\xe7\x9b\xae\xe5\x85\x88\xe3\x81\xae\xe5\x88"
-b"\xa9\xe4\xbe\xbf\xe6\x80\xa7\xe3\x82\x92\xe5\x84\xaa\xe5\x85\x88"
-b"\xe3\x81\x97\xe3\x81\xa6\xe8\x89\xb2\xe3\x80\x85\xe3\x81\xaa\xe6"
-b"\xa9\x9f\xe8\x83\xbd\xe3\x82\x92\xe8\xa8\x80\xe8\xaa\x9e\xe8\xa6"
-b"\x81\xe7\xb4\xa0\xe3\x81\xa8\xe3\x81\x97\xe3\x81\xa6\xe5\x8f\x96"
-b"\xe3\x82\x8a\xe5\x85\xa5\xe3\x82\x8c\xe3\x82\x8b\xe5\xa0\xb4\xe5"
-b"\x90\x88\xe3\x81\x8c\xe5\xa4\x9a\xe3\x81\x84\xe3\x81\xae\xe3\x81"
-b"\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\x50\x79\x74\x68\x6f\x6e"
-b"\x20\xe3\x81\xa7\xe3\x81\xaf\xe3\x81\x9d\xe3\x81\x86\xe3\x81\x84"
-b"\xe3\x81\xa3\xe3\x81\x9f\xe5\xb0\x8f\xe7\xb4\xb0\xe5\xb7\xa5\xe3"
-b"\x81\x8c\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x95\xe3\x82\x8c\xe3\x82"
-b"\x8b\xe3\x81\x93\xe3\x81\xa8\xe3\x81\xaf\xe3\x81\x82\xe3\x81\xbe"
-b"\xe3\x82\x8a\xe3\x81\x82\xe3\x82\x8a\xe3\x81\xbe\xe3\x81\x9b\xe3"
-b"\x82\x93\xe3\x80\x82\x0a\xe8\xa8\x80\xe8\xaa\x9e\xe8\x87\xaa\xe4"
-b"\xbd\x93\xe3\x81\xae\xe6\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x9c"
-b"\x80\xe5\xb0\x8f\xe9\x99\x90\xe3\x81\xab\xe6\x8a\xbc\xe3\x81\x95"
-b"\xe3\x81\x88\xe3\x80\x81\xe5\xbf\x85\xe8\xa6\x81\xe3\x81\xaa\xe6"
-b"\xa9\x9f\xe8\x83\xbd\xe3\x81\xaf\xe6\x8b\xa1\xe5\xbc\xb5\xe3\x83"
-b"\xa2\xe3\x82\xb8\xe3\x83\xa5\xe3\x83\xbc\xe3\x83\xab\xe3\x81\xa8"
-b"\xe3\x81\x97\xe3\x81\xa6\xe8\xbf\xbd\xe5\x8a\xa0\xe3\x81\x99\xe3"
-b"\x82\x8b\xe3\x80\x81\xe3\x81\xa8\xe3\x81\x84\xe3\x81\x86\xe3\x81"
-b"\xae\xe3\x81\x8c\x20\x50\x79\x74\x68\x6f\x6e\x20\xe3\x81\xae\xe3"
-b"\x83\x9d\xe3\x83\xaa\xe3\x82\xb7\xe3\x83\xbc\xe3\x81\xa7\xe3\x81"
-b"\x99\xe3\x80\x82\x0a\x0a\xe3\x83\x8e\xe3\x81\x8b\xe3\x82\x9a\x20"
-b"\xe3\x83\x88\xe3\x82\x9a\x20\xe3\x83\x88\xe3\x82\xad\xef\xa8\xb6"
-b"\xef\xa8\xb9\x20\xf0\xa1\x9a\xb4\xf0\xaa\x8e\x8c\x20\xe9\xba\x80"
-b"\xe9\xbd\x81\xf0\xa9\x9b\xb0\x0a"),
-}
diff --git a/Lib/test/fork_wait.py b/Lib/test/fork_wait.py
index 1caab1c..88527df 100644
--- a/Lib/test/fork_wait.py
+++ b/Lib/test/fork_wait.py
@@ -43,6 +43,7 @@
         self.assertEqual(spid, cpid)
         self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
 
+    @support.reap_threads
     def test_wait(self):
         for i in range(NUM_THREADS):
             _thread.start_new(self.f, (i,))
@@ -69,7 +70,8 @@
             os._exit(n)
         else:
             # Parent
-            self.wait_impl(cpid)
-            # Tell threads to die
-            self.stop = 1
-            time.sleep(2*SHORTSLEEP) # Wait for threads to die
+            try:
+                self.wait_impl(cpid)
+            finally:
+                # Tell threads to die
+                self.stop = 1
diff --git a/Lib/test/json_tests/__init__.py b/Lib/test/json_tests/__init__.py
index 4977468..779c7a4 100644
--- a/Lib/test/json_tests/__init__.py
+++ b/Lib/test/json_tests/__init__.py
@@ -1,7 +1,46 @@
 import os
 import sys
-import unittest
+import json
 import doctest
+import unittest
+
+from test import support
+
+# import json with and without accelerations
+cjson = support.import_fresh_module('json', fresh=['_json'])
+pyjson = support.import_fresh_module('json', blocked=['_json'])
+
+# create two base classes that will be used by the other tests
+class PyTest(unittest.TestCase):
+    json = pyjson
+    loads = staticmethod(pyjson.loads)
+    dumps = staticmethod(pyjson.dumps)
+
+@unittest.skipUnless(cjson, 'requires _json')
+class CTest(unittest.TestCase):
+    if cjson is not None:
+        json = cjson
+        loads = staticmethod(cjson.loads)
+        dumps = staticmethod(cjson.dumps)
+
+# test PyTest and CTest checking if the functions come from the right module
+class TestPyTest(PyTest):
+    def test_pyjson(self):
+        self.assertEqual(self.json.scanner.make_scanner.__module__,
+                         'json.scanner')
+        self.assertEqual(self.json.decoder.scanstring.__module__,
+                         'json.decoder')
+        self.assertEqual(self.json.encoder.encode_basestring_ascii.__module__,
+                         'json.encoder')
+
+class TestCTest(CTest):
+    def test_cjson(self):
+        self.assertEqual(self.json.scanner.make_scanner.__module__, '_json')
+        self.assertEqual(self.json.decoder.scanstring.__module__, '_json')
+        self.assertEqual(self.json.encoder.c_make_encoder.__module__, '_json')
+        self.assertEqual(self.json.encoder.encode_basestring_ascii.__module__,
+                         '_json')
+
 
 here = os.path.dirname(__file__)
 
@@ -17,12 +56,11 @@
     return suite
 
 def additional_tests():
-    import json
-    import json.encoder
-    import json.decoder
     suite = unittest.TestSuite()
     for mod in (json, json.encoder, json.decoder):
         suite.addTest(doctest.DocTestSuite(mod))
+    suite.addTest(TestPyTest('test_pyjson'))
+    suite.addTest(TestCTest('test_cjson'))
     return suite
 
 def main():
diff --git a/Lib/test/json_tests/test_decode.py b/Lib/test/json_tests/test_decode.py
index 07dadfe..9fbaa3b 100644
--- a/Lib/test/json_tests/test_decode.py
+++ b/Lib/test/json_tests/test_decode.py
@@ -1,55 +1,38 @@
 import decimal
-from unittest import TestCase
 from io import StringIO
-from contextlib import contextmanager
-
-import json
-import json.decoder
-import json.scanner
 from collections import OrderedDict
+from test.json_tests import PyTest, CTest
 
 
-@contextmanager
-def use_python_scanner():
-    py_scanner = json.scanner.py_make_scanner
-    old_scanner = json.decoder.make_scanner
-    json.decoder.make_scanner = py_scanner
-    try:
-        yield
-    finally:
-        json.decoder.make_scanner = old_scanner
-
-
-class TestDecode(TestCase):
+class TestDecode:
     def test_decimal(self):
-        rval = json.loads('1.1', parse_float=decimal.Decimal)
+        rval = self.loads('1.1', parse_float=decimal.Decimal)
         self.assertTrue(isinstance(rval, decimal.Decimal))
         self.assertEqual(rval, decimal.Decimal('1.1'))
 
     def test_float(self):
-        rval = json.loads('1', parse_int=float)
+        rval = self.loads('1', parse_int=float)
         self.assertTrue(isinstance(rval, float))
         self.assertEqual(rval, 1.0)
 
     def test_empty_objects(self):
-        self.assertEqual(json.loads('{}'), {})
-        self.assertEqual(json.loads('[]'), [])
-        self.assertEqual(json.loads('""'), "")
+        self.assertEqual(self.loads('{}'), {})
+        self.assertEqual(self.loads('[]'), [])
+        self.assertEqual(self.loads('""'), "")
 
     def test_object_pairs_hook(self):
         s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
         p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4),
              ("qrt", 5), ("pad", 6), ("hoy", 7)]
-        self.assertEqual(json.loads(s), eval(s))
-        self.assertEqual(json.loads(s, object_pairs_hook = lambda x: x), p)
-        self.assertEqual(json.load(StringIO(s),
-                                   object_pairs_hook=lambda x: x), p)
-        od = json.loads(s, object_pairs_hook = OrderedDict)
+        self.assertEqual(self.loads(s), eval(s))
+        self.assertEqual(self.loads(s, object_pairs_hook = lambda x: x), p)
+        self.assertEqual(self.json.load(StringIO(s),
+                                        object_pairs_hook=lambda x: x), p)
+        od = self.loads(s, object_pairs_hook = OrderedDict)
         self.assertEqual(od, OrderedDict(p))
         self.assertEqual(type(od), OrderedDict)
         # the object_pairs_hook takes priority over the object_hook
-        self.assertEqual(json.loads(s,
-                                    object_pairs_hook = OrderedDict,
+        self.assertEqual(self.loads(s, object_pairs_hook = OrderedDict,
                                     object_hook = lambda x: None),
                          OrderedDict(p))
 
@@ -57,7 +40,7 @@
         # Several optimizations were made that skip over calls to
         # the whitespace regex, so this test is designed to try and
         # exercise the uncommon cases. The array cases are already covered.
-        rval = json.loads('{   "key"    :    "value"    ,  "k":"v"    }')
+        rval = self.loads('{   "key"    :    "value"    ,  "k":"v"    }')
         self.assertEqual(rval, {"key":"value", "k":"v"})
 
     def check_keys_reuse(self, source, loads):
@@ -68,7 +51,9 @@
 
     def test_keys_reuse(self):
         s = '[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'
-        self.check_keys_reuse(s, json.loads)
-        # Disabled: the pure Python version of json simply doesn't work
-        with use_python_scanner():
-            self.check_keys_reuse(s, json.decoder.JSONDecoder().decode)
+        self.check_keys_reuse(s, self.loads)
+        self.check_keys_reuse(s, self.json.decoder.JSONDecoder().decode)
+
+
+class TestPyDecode(TestDecode, PyTest): pass
+class TestCDecode(TestDecode, CTest): pass
diff --git a/Lib/test/json_tests/test_default.py b/Lib/test/json_tests/test_default.py
index 6a03eeb..672c753 100644
--- a/Lib/test/json_tests/test_default.py
+++ b/Lib/test/json_tests/test_default.py
@@ -1,9 +1,12 @@
-from unittest import TestCase
+from test.json_tests import PyTest, CTest
 
-import json
 
-class TestDefault(TestCase):
+class TestDefault:
     def test_default(self):
         self.assertEqual(
-            json.dumps(type, default=repr),
-            json.dumps(repr(type)))
+            self.dumps(type, default=repr),
+            self.dumps(repr(type)))
+
+
+class TestPyDefault(TestDefault, PyTest): pass
+class TestCDefault(TestDefault, CTest): pass
diff --git a/Lib/test/json_tests/test_dump.py b/Lib/test/json_tests/test_dump.py
index 8df234b..083c11f 100644
--- a/Lib/test/json_tests/test_dump.py
+++ b/Lib/test/json_tests/test_dump.py
@@ -1,21 +1,24 @@
-from unittest import TestCase
 from io import StringIO
+from test.json_tests import PyTest, CTest
 
-import json
 
-class TestDump(TestCase):
+class TestDump:
     def test_dump(self):
         sio = StringIO()
-        json.dump({}, sio)
+        self.json.dump({}, sio)
         self.assertEqual(sio.getvalue(), '{}')
 
     def test_dumps(self):
-        self.assertEqual(json.dumps({}), '{}')
+        self.assertEqual(self.dumps({}), '{}')
 
     def test_encode_truefalse(self):
-        self.assertEqual(json.dumps(
+        self.assertEqual(self.dumps(
                  {True: False, False: True}, sort_keys=True),
                  '{"false": true, "true": false}')
-        self.assertEqual(json.dumps(
+        self.assertEqual(self.dumps(
                 {2: 3.0, 4.0: 5, False: 1, 6: True}, sort_keys=True),
                 '{"false": 1, "2": 3.0, "4.0": 5, "6": true}')
+
+
+class TestPyDump(TestDump, PyTest): pass
+class TestCDump(TestDump, CTest): pass
diff --git a/Lib/test/json_tests/test_encode_basestring_ascii.py b/Lib/test/json_tests/test_encode_basestring_ascii.py
index 4fddd12..bfca69d 100644
--- a/Lib/test/json_tests/test_encode_basestring_ascii.py
+++ b/Lib/test/json_tests/test_encode_basestring_ascii.py
@@ -1,8 +1,6 @@
-from unittest import TestCase
-
-import json.encoder
-from json import dumps
 from collections import OrderedDict
+from test.json_tests import PyTest, CTest
+
 
 CASES = [
     ('/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
@@ -21,19 +19,11 @@
     ('\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
 ]
 
-class TestEncodeBaseStringAscii(TestCase):
-    def test_py_encode_basestring_ascii(self):
-        self._test_encode_basestring_ascii(json.encoder.py_encode_basestring_ascii)
-
-    def test_c_encode_basestring_ascii(self):
-        if not json.encoder.c_encode_basestring_ascii:
-            return
-        self._test_encode_basestring_ascii(json.encoder.c_encode_basestring_ascii)
-
-    def _test_encode_basestring_ascii(self, encode_basestring_ascii):
-        fname = encode_basestring_ascii.__name__
+class TestEncodeBasestringAscii:
+    def test_encode_basestring_ascii(self):
+        fname = self.json.encoder.encode_basestring_ascii.__name__
         for input_string, expect in CASES:
-            result = encode_basestring_ascii(input_string)
+            result = self.json.encoder.encode_basestring_ascii(input_string)
             self.assertEqual(result, expect,
                 '{0!r} != {1!r} for {2}({3!r})'.format(
                     result, expect, fname, input_string))
@@ -41,10 +31,14 @@
     def test_ordered_dict(self):
         # See issue 6105
         items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
-        s = json.dumps(OrderedDict(items))
+        s = self.dumps(OrderedDict(items))
         self.assertEqual(s, '{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}')
 
     def test_sorted_dict(self):
         items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
-        s = json.dumps(dict(items), sort_keys=True)
+        s = self.dumps(dict(items), sort_keys=True)
         self.assertEqual(s, '{"five": 5, "four": 4, "one": 1, "three": 3, "two": 2}')
+
+
+class TestPyEncodeBasestringAscii(TestEncodeBasestringAscii, PyTest): pass
+class TestCEncodeBasestringAscii(TestEncodeBasestringAscii, CTest): pass
diff --git a/Lib/test/json_tests/test_fail.py b/Lib/test/json_tests/test_fail.py
index 9768edf..fcff1d4 100644
--- a/Lib/test/json_tests/test_fail.py
+++ b/Lib/test/json_tests/test_fail.py
@@ -1,6 +1,4 @@
-from unittest import TestCase
-
-import json
+from test.json_tests import PyTest, CTest
 
 # Fri Dec 30 18:57:26 2005
 JSONDOCS = [
@@ -61,15 +59,15 @@
     18: "spec doesn't specify any nesting limitations",
 }
 
-class TestFail(TestCase):
+class TestFail:
     def test_failures(self):
         for idx, doc in enumerate(JSONDOCS):
             idx = idx + 1
             if idx in SKIPS:
-                json.loads(doc)
+                self.loads(doc)
                 continue
             try:
-                json.loads(doc)
+                self.loads(doc)
             except ValueError:
                 pass
             else:
@@ -79,7 +77,11 @@
         data = {'a' : 1, (1, 2) : 2}
 
         #This is for c encoder
-        self.assertRaises(TypeError, json.dumps, data)
+        self.assertRaises(TypeError, self.dumps, data)
 
         #This is for python encoder
-        self.assertRaises(TypeError, json.dumps, data, indent=True)
+        self.assertRaises(TypeError, self.dumps, data, indent=True)
+
+
+class TestPyFail(TestFail, PyTest): pass
+class TestCFail(TestFail, CTest): pass
diff --git a/Lib/test/json_tests/test_float.py b/Lib/test/json_tests/test_float.py
index ca4a506..a3441cf 100644
--- a/Lib/test/json_tests/test_float.py
+++ b/Lib/test/json_tests/test_float.py
@@ -1,15 +1,18 @@
 import math
-from unittest import TestCase
+from test.json_tests import PyTest, CTest
 
-import json
 
-class TestFloat(TestCase):
+class TestFloat:
     def test_floats(self):
         for num in [1617161771.7650001, math.pi, math.pi**100, math.pi**-100, 3.1]:
-            self.assertEqual(float(json.dumps(num)), num)
-            self.assertEqual(json.loads(json.dumps(num)), num)
+            self.assertEqual(float(self.dumps(num)), num)
+            self.assertEqual(self.loads(self.dumps(num)), num)
 
     def test_ints(self):
         for num in [1, 1<<32, 1<<64]:
-            self.assertEqual(json.dumps(num), str(num))
-            self.assertEqual(int(json.dumps(num)), num)
+            self.assertEqual(self.dumps(num), str(num))
+            self.assertEqual(int(self.dumps(num)), num)
+
+
+class TestPyFloat(TestFloat, PyTest): pass
+class TestCFloat(TestFloat, CTest): pass
diff --git a/Lib/test/json_tests/test_indent.py b/Lib/test/json_tests/test_indent.py
index 692a494..4c70646 100644
--- a/Lib/test/json_tests/test_indent.py
+++ b/Lib/test/json_tests/test_indent.py
@@ -1,10 +1,9 @@
-from unittest import TestCase
-
-import json
 import textwrap
 from io import StringIO
+from test.json_tests import PyTest, CTest
 
-class TestIndent(TestCase):
+
+class TestIndent:
     def test_indent(self):
         h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
              {'nifty': 87}, {'field': 'yes', 'morefield': False} ]
@@ -30,14 +29,13 @@
         \t}
         ]""")
 
+        d1 = self.dumps(h)
+        d2 = self.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
+        d3 = self.dumps(h, indent='\t', sort_keys=True, separators=(',', ': '))
 
-        d1 = json.dumps(h)
-        d2 = json.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
-        d3 = json.dumps(h, indent='\t', sort_keys=True, separators=(',', ': '))
-
-        h1 = json.loads(d1)
-        h2 = json.loads(d2)
-        h3 = json.loads(d3)
+        h1 = self.loads(d1)
+        h2 = self.loads(d2)
+        h3 = self.loads(d3)
 
         self.assertEqual(h1, h)
         self.assertEqual(h2, h)
@@ -48,14 +46,18 @@
     def test_indent0(self):
         h = {3: 1}
         def check(indent, expected):
-            d1 = json.dumps(h, indent=indent)
+            d1 = self.dumps(h, indent=indent)
             self.assertEqual(d1, expected)
 
             sio = StringIO()
-            json.dump(h, sio, indent=indent)
+            self.json.dump(h, sio, indent=indent)
             self.assertEqual(sio.getvalue(), expected)
 
         # indent=0 should emit newlines
         check(0, '{\n"3": 1\n}')
         # indent=None is more compact
         check(None, '{"3": 1}')
+
+
+class TestPyIndent(TestIndent, PyTest): pass
+class TestCIndent(TestIndent, CTest): pass
diff --git a/Lib/test/json_tests/test_pass1.py b/Lib/test/json_tests/test_pass1.py
index 719c113..036a2e4 100644
--- a/Lib/test/json_tests/test_pass1.py
+++ b/Lib/test/json_tests/test_pass1.py
@@ -1,6 +1,5 @@
-from unittest import TestCase
+from test.json_tests import PyTest, CTest
 
-import json
 
 # from http://json.org/JSON_checker/test/pass1.json
 JSON = r'''
@@ -62,15 +61,19 @@
 ,"rosebud"]
 '''
 
-class TestPass1(TestCase):
+class TestPass1:
     def test_parse(self):
         # test in/out equivalence and parsing
-        res = json.loads(JSON)
-        out = json.dumps(res)
-        self.assertEqual(res, json.loads(out))
+        res = self.loads(JSON)
+        out = self.dumps(res)
+        self.assertEqual(res, self.loads(out))
         try:
-            json.dumps(res, allow_nan=False)
+            self.dumps(res, allow_nan=False)
         except ValueError:
             pass
         else:
             self.fail("23456789012E666 should be out of range")
+
+
+class TestPyPass1(TestPass1, PyTest): pass
+class TestCPass1(TestPass1, CTest): pass
diff --git a/Lib/test/json_tests/test_pass2.py b/Lib/test/json_tests/test_pass2.py
index 379117e..eee6383 100644
--- a/Lib/test/json_tests/test_pass2.py
+++ b/Lib/test/json_tests/test_pass2.py
@@ -1,14 +1,18 @@
-from unittest import TestCase
-import json
+from test.json_tests import PyTest, CTest
+
 
 # from http://json.org/JSON_checker/test/pass2.json
 JSON = r'''
 [[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]]
 '''
 
-class TestPass2(TestCase):
+class TestPass2:
     def test_parse(self):
         # test in/out equivalence and parsing
-        res = json.loads(JSON)
-        out = json.dumps(res)
-        self.assertEqual(res, json.loads(out))
+        res = self.loads(JSON)
+        out = self.dumps(res)
+        self.assertEqual(res, self.loads(out))
+
+
+class TestPyPass2(TestPass2, PyTest): pass
+class TestCPass2(TestPass2, CTest): pass
diff --git a/Lib/test/json_tests/test_pass3.py b/Lib/test/json_tests/test_pass3.py
index 9151c43..228eee8 100644
--- a/Lib/test/json_tests/test_pass3.py
+++ b/Lib/test/json_tests/test_pass3.py
@@ -1,6 +1,5 @@
-from unittest import TestCase
+from test.json_tests import PyTest, CTest
 
-import json
 
 # from http://json.org/JSON_checker/test/pass3.json
 JSON = r'''
@@ -12,9 +11,14 @@
 }
 '''
 
-class TestPass3(TestCase):
+
+class TestPass3:
     def test_parse(self):
         # test in/out equivalence and parsing
-        res = json.loads(JSON)
-        out = json.dumps(res)
-        self.assertEqual(res, json.loads(out))
+        res = self.loads(JSON)
+        out = self.dumps(res)
+        self.assertEqual(res, self.loads(out))
+
+
+class TestPyPass3(TestPass3, PyTest): pass
+class TestCPass3(TestPass3, CTest): pass
diff --git a/Lib/test/json_tests/test_recursion.py b/Lib/test/json_tests/test_recursion.py
index 1e9b8ab..192ed9c 100644
--- a/Lib/test/json_tests/test_recursion.py
+++ b/Lib/test/json_tests/test_recursion.py
@@ -1,28 +1,16 @@
-from unittest import TestCase
+from test.json_tests import PyTest, CTest
 
-import json
 
 class JSONTestObject:
     pass
 
 
-class RecursiveJSONEncoder(json.JSONEncoder):
-    recurse = False
-    def default(self, o):
-        if o is JSONTestObject:
-            if self.recurse:
-                return [JSONTestObject]
-            else:
-                return 'JSONTestObject'
-        return json.JSONEncoder.default(o)
-
-
-class TestRecursion(TestCase):
+class TestRecursion:
     def test_listrecursion(self):
         x = []
         x.append(x)
         try:
-            json.dumps(x)
+            self.dumps(x)
         except ValueError:
             pass
         else:
@@ -31,7 +19,7 @@
         y = [x]
         x.append(y)
         try:
-            json.dumps(x)
+            self.dumps(x)
         except ValueError:
             pass
         else:
@@ -39,13 +27,13 @@
         y = []
         x = [y, y]
         # ensure that the marker is cleared
-        json.dumps(x)
+        self.dumps(x)
 
     def test_dictrecursion(self):
         x = {}
         x["test"] = x
         try:
-            json.dumps(x)
+            self.dumps(x)
         except ValueError:
             pass
         else:
@@ -53,9 +41,19 @@
         x = {}
         y = {"a": x, "b": x}
         # ensure that the marker is cleared
-        json.dumps(x)
+        self.dumps(x)
 
     def test_defaultrecursion(self):
+        class RecursiveJSONEncoder(self.json.JSONEncoder):
+            recurse = False
+            def default(self, o):
+                if o is JSONTestObject:
+                    if self.recurse:
+                        return [JSONTestObject]
+                    else:
+                        return 'JSONTestObject'
+                return pyjson.JSONEncoder.default(o)
+
         enc = RecursiveJSONEncoder()
         self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"')
         enc.recurse = True
@@ -65,3 +63,38 @@
             pass
         else:
             self.fail("didn't raise ValueError on default recursion")
+
+
+    def test_highly_nested_objects_decoding(self):
+        # test that loading highly-nested objects doesn't segfault when C
+        # accelerations are used. See #12017
+        with self.assertRaises(RuntimeError):
+            self.loads('{"a":' * 100000 + '1' + '}' * 100000)
+        with self.assertRaises(RuntimeError):
+            self.loads('{"a":' * 100000 + '[1]' + '}' * 100000)
+        with self.assertRaises(RuntimeError):
+            self.loads('[' * 100000 + '1' + ']' * 100000)
+
+    def test_highly_nested_objects_encoding(self):
+        # See #12051
+        l, d = [], {}
+        for x in range(100000):
+            l, d = [l], {'k':d}
+        with self.assertRaises(RuntimeError):
+            self.dumps(l)
+        with self.assertRaises(RuntimeError):
+            self.dumps(d)
+
+    def test_endless_recursion(self):
+        # See #12051
+        class EndlessJSONEncoder(self.json.JSONEncoder):
+            def default(self, o):
+                """If check_circular is False, this will keep adding another list."""
+                return [o]
+
+        with self.assertRaises(RuntimeError):
+            EndlessJSONEncoder(check_circular=False).encode(5j)
+
+
+class TestPyRecursion(TestRecursion, PyTest): pass
+class TestCRecursion(TestRecursion, CTest): pass
diff --git a/Lib/test/json_tests/test_scanstring.py b/Lib/test/json_tests/test_scanstring.py
index abd3253..f82cdee 100644
--- a/Lib/test/json_tests/test_scanstring.py
+++ b/Lib/test/json_tests/test_scanstring.py
@@ -1,24 +1,10 @@
 import sys
-from unittest import TestCase, skipUnless
+from test.json_tests import PyTest, CTest
 
-import json
-import json.decoder
 
-try:
-    import _json
-except ImportError:
-    _json = None
-
-class TestScanString(TestCase):
-    def test_py_scanstring(self):
-        self._test_scanstring(json.decoder.py_scanstring)
-
-    @skipUnless(_json, 'test requires the _json module')
-    def test_c_scanstring(self):
-        if json.decoder.c_scanstring is not None:
-            self._test_scanstring(json.decoder.c_scanstring)
-
-    def _test_scanstring(self, scanstring):
+class TestScanstring:
+    def test_scanstring(self):
+        scanstring = self.json.decoder.scanstring
         self.assertEqual(
             scanstring('"z\\ud834\\udd20x"', 1, True),
             ('z\U0001d120x', 16))
@@ -109,4 +95,9 @@
             ('Bad value', 12))
 
     def test_overflow(self):
-        self.assertRaises(OverflowError, json.decoder.scanstring, b"xxx", sys.maxsize+1)
+        with self.assertRaises(OverflowError):
+            self.json.decoder.scanstring(b"xxx", sys.maxsize+1)
+
+
+class TestPyScanstring(TestScanstring, PyTest): pass
+class TestCScanstring(TestScanstring, CTest): pass
diff --git a/Lib/test/json_tests/test_separators.py b/Lib/test/json_tests/test_separators.py
index d5b92bd..a01b38c 100644
--- a/Lib/test/json_tests/test_separators.py
+++ b/Lib/test/json_tests/test_separators.py
@@ -1,10 +1,8 @@
 import textwrap
-from unittest import TestCase
-
-import json
+from test.json_tests import PyTest, CTest
 
 
-class TestSeparators(TestCase):
+class TestSeparators:
     def test_separators(self):
         h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
              {'nifty': 87}, {'field': 'yes', 'morefield': False} ]
@@ -31,12 +29,16 @@
         ]""")
 
 
-        d1 = json.dumps(h)
-        d2 = json.dumps(h, indent=2, sort_keys=True, separators=(' ,', ' : '))
+        d1 = self.dumps(h)
+        d2 = self.dumps(h, indent=2, sort_keys=True, separators=(' ,', ' : '))
 
-        h1 = json.loads(d1)
-        h2 = json.loads(d2)
+        h1 = self.loads(d1)
+        h2 = self.loads(d2)
 
         self.assertEqual(h1, h)
         self.assertEqual(h2, h)
         self.assertEqual(d2, expect)
+
+
+class TestPySeparators(TestSeparators, PyTest): pass
+class TestCSeparators(TestSeparators, CTest): pass
diff --git a/Lib/test/json_tests/test_speedups.py b/Lib/test/json_tests/test_speedups.py
index b7c141f..5c24c05 100644
--- a/Lib/test/json_tests/test_speedups.py
+++ b/Lib/test/json_tests/test_speedups.py
@@ -1,29 +1,24 @@
-from unittest import TestCase, skipUnless
+from test.json_tests import CTest
 
-from json import decoder, encoder, scanner
 
-try:
-    import _json
-except ImportError:
-    _json = None
-
-@skipUnless(_json, 'test requires the _json module')
-class TestSpeedups(TestCase):
+class TestSpeedups(CTest):
     def test_scanstring(self):
-        self.assertEqual(decoder.scanstring.__module__, "_json")
-        self.assertIs(decoder.scanstring, decoder.c_scanstring)
+        self.assertEqual(self.json.decoder.scanstring.__module__, "_json")
+        self.assertIs(self.json.decoder.scanstring, self.json.decoder.c_scanstring)
 
     def test_encode_basestring_ascii(self):
-        self.assertEqual(encoder.encode_basestring_ascii.__module__, "_json")
-        self.assertIs(encoder.encode_basestring_ascii,
-                      encoder.c_encode_basestring_ascii)
+        self.assertEqual(self.json.encoder.encode_basestring_ascii.__module__,
+                         "_json")
+        self.assertIs(self.json.encoder.encode_basestring_ascii,
+                      self.json.encoder.c_encode_basestring_ascii)
 
-class TestDecode(TestCase):
+
+class TestDecode(CTest):
     def test_make_scanner(self):
-        self.assertRaises(AttributeError, scanner.c_make_scanner, 1)
+        self.assertRaises(AttributeError, self.json.scanner.c_make_scanner, 1)
 
     def test_make_encoder(self):
-        self.assertRaises(TypeError, encoder.c_make_encoder,
+        self.assertRaises(TypeError, self.json.encoder.c_make_encoder,
             (True, False),
             b"\xCD\x7D\x3D\x4E\x12\x4C\xF9\x79\xD7\x52\xBA\x82\xF2\x27\x4A\x7D\xA0\xCA\x75",
             None)
diff --git a/Lib/test/json_tests/test_unicode.py b/Lib/test/json_tests/test_unicode.py
index e336c91..f226aa6 100644
--- a/Lib/test/json_tests/test_unicode.py
+++ b/Lib/test/json_tests/test_unicode.py
@@ -1,73 +1,75 @@
-from unittest import TestCase
-
-import json
 from collections import OrderedDict
+from test.json_tests import PyTest, CTest
 
-class TestUnicode(TestCase):
+
+class TestUnicode:
     # test_encoding1 and test_encoding2 from 2.x are irrelevant (only str
     # is supported as input, not bytes).
 
     def test_encoding3(self):
         u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
-        j = json.dumps(u)
+        j = self.dumps(u)
         self.assertEqual(j, '"\\u03b1\\u03a9"')
 
     def test_encoding4(self):
         u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
-        j = json.dumps([u])
+        j = self.dumps([u])
         self.assertEqual(j, '["\\u03b1\\u03a9"]')
 
     def test_encoding5(self):
         u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
-        j = json.dumps(u, ensure_ascii=False)
+        j = self.dumps(u, ensure_ascii=False)
         self.assertEqual(j, '"{0}"'.format(u))
 
     def test_encoding6(self):
         u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
-        j = json.dumps([u], ensure_ascii=False)
+        j = self.dumps([u], ensure_ascii=False)
         self.assertEqual(j, '["{0}"]'.format(u))
 
     def test_big_unicode_encode(self):
         u = '\U0001d120'
-        self.assertEqual(json.dumps(u), '"\\ud834\\udd20"')
-        self.assertEqual(json.dumps(u, ensure_ascii=False), '"\U0001d120"')
+        self.assertEqual(self.dumps(u), '"\\ud834\\udd20"')
+        self.assertEqual(self.dumps(u, ensure_ascii=False), '"\U0001d120"')
 
     def test_big_unicode_decode(self):
         u = 'z\U0001d120x'
-        self.assertEqual(json.loads('"' + u + '"'), u)
-        self.assertEqual(json.loads('"z\\ud834\\udd20x"'), u)
+        self.assertEqual(self.loads('"' + u + '"'), u)
+        self.assertEqual(self.loads('"z\\ud834\\udd20x"'), u)
 
     def test_unicode_decode(self):
         for i in range(0, 0xd7ff):
             u = chr(i)
             s = '"\\u{0:04x}"'.format(i)
-            self.assertEqual(json.loads(s), u)
+            self.assertEqual(self.loads(s), u)
 
     def test_unicode_preservation(self):
-        self.assertEqual(type(json.loads('""')), str)
-        self.assertEqual(type(json.loads('"a"')), str)
-        self.assertEqual(type(json.loads('["a"]')[0]), str)
+        self.assertEqual(type(self.loads('""')), str)
+        self.assertEqual(type(self.loads('"a"')), str)
+        self.assertEqual(type(self.loads('["a"]')[0]), str)
 
     def test_bytes_encode(self):
-        self.assertRaises(TypeError, json.dumps, b"hi")
-        self.assertRaises(TypeError, json.dumps, [b"hi"])
+        self.assertRaises(TypeError, self.dumps, b"hi")
+        self.assertRaises(TypeError, self.dumps, [b"hi"])
 
     def test_bytes_decode(self):
-        self.assertRaises(TypeError, json.loads, b'"hi"')
-        self.assertRaises(TypeError, json.loads, b'["hi"]')
+        self.assertRaises(TypeError, self.loads, b'"hi"')
+        self.assertRaises(TypeError, self.loads, b'["hi"]')
 
 
     def test_object_pairs_hook_with_unicode(self):
         s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
         p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4),
              ("qrt", 5), ("pad", 6), ("hoy", 7)]
-        self.assertEqual(json.loads(s), eval(s))
-        self.assertEqual(json.loads(s, object_pairs_hook = lambda x: x), p)
-        od = json.loads(s, object_pairs_hook = OrderedDict)
+        self.assertEqual(self.loads(s), eval(s))
+        self.assertEqual(self.loads(s, object_pairs_hook = lambda x: x), p)
+        od = self.loads(s, object_pairs_hook = OrderedDict)
         self.assertEqual(od, OrderedDict(p))
         self.assertEqual(type(od), OrderedDict)
         # the object_pairs_hook takes priority over the object_hook
-        self.assertEqual(json.loads(s,
-                                    object_pairs_hook = OrderedDict,
+        self.assertEqual(self.loads(s, object_pairs_hook = OrderedDict,
                                     object_hook = lambda x: None),
                          OrderedDict(p))
+
+
+class TestPyUnicode(TestUnicode, PyTest): pass
+class TestCUnicode(TestUnicode, CTest): pass
diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py
index 3ed61f3..126f97c 100644
--- a/Lib/test/lock_tests.py
+++ b/Lib/test/lock_tests.py
@@ -474,7 +474,7 @@
                 self.assertEqual(state, 4)
         b = Bunch(f, 1)
         b.wait_for_started()
-        for i in range(5):
+        for i in range(4):
             time.sleep(0.01)
             with cond:
                 state += 1
diff --git a/Lib/test/math_testcases.txt b/Lib/test/math_testcases.txt
index 5e24335..9585188 100644
--- a/Lib/test/math_testcases.txt
+++ b/Lib/test/math_testcases.txt
@@ -517,3 +517,117 @@
 
 -- weaker version of expm10302
 expm10307 expm1 709.5 -> 1.3549863193146328e+308
+
+-------------------------
+-- log2: log to base 2 --
+-------------------------
+
+-- special values
+log20000 log2 0.0 -> -inf               divide-by-zero
+log20001 log2 -0.0 -> -inf              divide-by-zero
+log20002 log2 inf -> inf
+log20003 log2 -inf -> nan               invalid
+log20004 log2 nan -> nan
+
+-- exact value at 1.0
+log20010 log2 1.0 -> 0.0
+
+-- negatives
+log20020 log2 -5e-324 -> nan            invalid
+log20021 log2 -1.0 -> nan               invalid
+log20022 log2 -1.7e-308 -> nan          invalid
+
+-- exact values at powers of 2
+log20100 log2 2.0 -> 1.0
+log20101 log2 4.0 -> 2.0
+log20102 log2 8.0 -> 3.0
+log20103 log2 16.0 -> 4.0
+log20104 log2 32.0 -> 5.0
+log20105 log2 64.0 -> 6.0
+log20106 log2 128.0 -> 7.0
+log20107 log2 256.0 -> 8.0
+log20108 log2 512.0 -> 9.0
+log20109 log2 1024.0 -> 10.0
+log20110 log2 2048.0 -> 11.0
+
+log20200 log2 0.5 -> -1.0
+log20201 log2 0.25 -> -2.0
+log20202 log2 0.125 -> -3.0
+log20203 log2 0.0625 -> -4.0
+
+-- values close to 1.0
+log20300 log2 1.0000000000000002 -> 3.2034265038149171e-16
+log20301 log2 1.0000000001 -> 1.4426951601859516e-10
+log20302 log2 1.00001 -> 1.4426878274712997e-5
+
+log20310 log2 0.9999999999999999 -> -1.6017132519074588e-16
+log20311 log2 0.9999999999 -> -1.4426951603302210e-10
+log20312 log2 0.99999 -> -1.4427022544056922e-5
+
+-- tiny values
+log20400 log2 5e-324 -> -1074.0
+log20401 log2 1e-323 -> -1073.0
+log20402 log2 1.5e-323 -> -1072.4150374992789
+log20403 log2 2e-323 -> -1072.0
+
+log20410 log2 1e-308 -> -1023.1538532253076
+log20411 log2 2.2250738585072014e-308 -> -1022.0
+log20412 log2 4.4501477170144028e-308 -> -1021.0
+log20413 log2 1e-307 -> -1019.8319251304202
+
+-- huge values
+log20500 log2 1.7976931348623157e+308 -> 1024.0
+log20501 log2 1.7e+308 -> 1023.9193879716706
+log20502 log2 8.9884656743115795e+307 -> 1023.0
+
+-- selection of random values
+log20600 log2 -7.2174324841039838e+289 -> nan   invalid
+log20601 log2 -2.861319734089617e+265 -> nan    invalid
+log20602 log2 -4.3507646894008962e+257 -> nan   invalid
+log20603 log2 -6.6717265307520224e+234 -> nan   invalid
+log20604 log2 -3.9118023786619294e+229 -> nan   invalid
+log20605 log2 -1.5478221302505161e+206 -> nan   invalid
+log20606 log2 -1.4380485131364602e+200 -> nan   invalid
+log20607 log2 -3.7235198730382645e+185 -> nan   invalid
+log20608 log2 -1.0472242235095724e+184 -> nan   invalid
+log20609 log2 -5.0141781956163884e+160 -> nan   invalid
+log20610 log2 -2.1157958031160324e+124 -> nan   invalid
+log20611 log2 -7.9677558612567718e+90 -> nan    invalid
+log20612 log2 -5.5553906194063732e+45 -> nan    invalid
+log20613 log2 -16573900952607.953 -> nan        invalid
+log20614 log2 -37198371019.888618 -> nan        invalid
+log20615 log2 -6.0727115121422674e-32 -> nan    invalid
+log20616 log2 -2.5406841656526057e-38 -> nan    invalid
+log20617 log2 -4.9056766703267657e-43 -> nan    invalid
+log20618 log2 -2.1646786075228305e-71 -> nan    invalid
+log20619 log2 -2.470826790488573e-78 -> nan     invalid
+log20620 log2 -3.8661709303489064e-165 -> nan   invalid
+log20621 log2 -1.0516496976649986e-182 -> nan   invalid
+log20622 log2 -1.5935458614317996e-255 -> nan   invalid
+log20623 log2 -2.8750977267336654e-293 -> nan   invalid
+log20624 log2 -7.6079466794732585e-296 -> nan   invalid
+log20625 log2 3.2073253539988545e-307 -> -1018.1505544209213
+log20626 log2 1.674937885472249e-244 -> -809.80634755783126
+log20627 log2 1.0911259044931283e-214 -> -710.76679472274213
+log20628 log2 2.0275372624809709e-154 -> -510.55719818383272
+log20629 log2 7.3926087369631841e-115 -> -379.13564735312292
+log20630 log2 1.3480198206342423e-86 -> -285.25497445094436
+log20631 log2 8.9927384655719947e-83 -> -272.55127136401637
+log20632 log2 3.1452398713597487e-60 -> -197.66251564496875
+log20633 log2 7.0706573215457351e-55 -> -179.88420087782217
+log20634 log2 3.1258285390731669e-49 -> -161.13023800505653
+log20635 log2 8.2253046627829942e-41 -> -133.15898277355879
+log20636 log2 7.8691367397519897e+49 -> 165.75068202732419
+log20637 log2 2.9920561983925013e+64 -> 214.18453534573757
+log20638 log2 4.7827254553946841e+77 -> 258.04629628445673
+log20639 log2 3.1903566496481868e+105 -> 350.47616767491166
+log20640 log2 5.6195082449502419e+113 -> 377.86831861008250
+log20641 log2 9.9625658250651047e+125 -> 418.55752921228753
+log20642 log2 2.7358945220961532e+145 -> 483.13158636923413
+log20643 log2 2.785842387926931e+174 -> 579.49360214860280
+log20644 log2 2.4169172507252751e+193 -> 642.40529039289652
+log20645 log2 3.1689091206395632e+205 -> 682.65924573798395
+log20646 log2 2.535995592365391e+208 -> 692.30359597460460
+log20647 log2 6.2011236566089916e+233 -> 776.64177576730913
+log20648 log2 2.1843274820677632e+253 -> 841.57499717289647
+log20649 log2 8.7493931063474791e+297 -> 989.74182713073981
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index 120bd60..04e87e3 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -265,6 +265,10 @@
     directly to set the values that would normally be set by flags
     on the command line.
     """
+
+    # Display the Python traceback fatal errors (e.g. segfault)
+    faulthandler.enable(all_threads=True)
+
     if hasattr(faulthandler, 'dump_tracebacks_later'):
         timeout = 60*60
     else:
@@ -626,9 +630,12 @@
                 if test is None:
                     finished += 1
                     continue
+                accumulate_result(test, result)
                 if not quiet:
-                    print("[{1:{0}}{2}] {3}".format(
-                        test_count_width, test_index, test_count, test))
+                    fmt = "[{1:{0}}{2}/{3}] {4}" if bad else "[{1:{0}}{2}] {4}"
+                    print(fmt.format(
+                        test_count_width, test_index, test_count,
+                        len(bad), test))
                 if stdout:
                     print(stdout)
                 if stderr:
@@ -638,7 +645,6 @@
                     raise KeyboardInterrupt   # What else?
                 if result[0] == CHILD_ERROR:
                     raise Exception("Child error on {}: {}".format(test, result[1]))
-                accumulate_result(test, result)
                 test_index += 1
         except KeyboardInterrupt:
             interrupted = True
@@ -648,8 +654,9 @@
     else:
         for test_index, test in enumerate(tests, 1):
             if not quiet:
-                print("[{1:{0}}{2}] {3}".format(
-                    test_count_width, test_index, test_count, test))
+                fmt = "[{1:{0}}{2}/{3}] {4}" if bad else "[{1:{0}}{2}] {4}"
+                print(fmt.format(
+                    test_count_width, test_index, test_count, len(bad), test))
                 sys.stdout.flush()
             if trace:
                 # If we're tracing code coverage, then we don't exit with status
@@ -1588,9 +1595,6 @@
     return TEMPDIR, TESTCWD
 
 if __name__ == '__main__':
-    # Display the Python traceback on segfault and division by zero
-    faulthandler.enable(all_threads=True)
-
     # Remove regrtest.py's own directory from the module search path. Despite
     # the elimination of implicit relative imports, this is still needed to
     # ensure that submodules of the test package do not inappropriately appear
diff --git a/Lib/test/support.py b/Lib/test/support.py
index e8a5911..b03069c 100644
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -28,6 +28,11 @@
 except ImportError:
     _thread = None
 
+try:
+    import zlib
+except ImportError:
+    zlib = None
+
 __all__ = [
     "Error", "TestFailed", "ResourceDenied", "import_module",
     "verbose", "use_resources", "max_memuse", "record_original_stdout",
@@ -35,15 +40,16 @@
     "is_resource_enabled", "requires", "find_unused_port", "bind_port",
     "IPV6_ENABLED", "is_jython", "TESTFN", "HOST", "SAVEDCWD", "temp_cwd",
     "findfile", "sortdict", "check_syntax_error", "open_urlresource",
-    "check_warnings", "CleanImport", "EnvironmentVarGuard",
-    "TransientResource", "captured_output", "captured_stdout", "time_out",
+    "check_warnings", "CleanImport", "EnvironmentVarGuard", "TransientResource",
+    "captured_stdout", "captured_stdin", "captured_stderr", "time_out",
     "socket_peer_reset", "ioerror_peer_reset", "run_with_locale", 'temp_umask',
     "transient_internet", "set_memlimit", "bigmemtest", "bigaddrspacetest",
     "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup",
     "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail",
     "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754",
-    "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink"]
-
+    "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink",
+    "import_fresh_module", "requires_zlib"
+    ]
 
 class Error(Exception):
     """Base class for regression test exceptions."""
@@ -90,23 +96,20 @@
 def _save_and_remove_module(name, orig_modules):
     """Helper function to save and remove a module from sys.modules
 
-       Return value is True if the module was in sys.modules and
-       False otherwise."""
-    saved = True
-    try:
-        orig_modules[name] = sys.modules[name]
-    except KeyError:
-        saved = False
-    else:
+       Raise ImportError if the module can't be imported."""
+    # try to import the module and raise an error if it can't be imported
+    if name not in sys.modules:
+        __import__(name)
         del sys.modules[name]
-    return saved
-
+    for modname in list(sys.modules):
+        if modname == name or modname.startswith(name + '.'):
+            orig_modules[modname] = sys.modules[modname]
+            del sys.modules[modname]
 
 def _save_and_block_module(name, orig_modules):
     """Helper function to save and block a module in sys.modules
 
-       Return value is True if the module was in sys.modules and
-       False otherwise."""
+       Return True if the module was in sys.modules, False otherwise."""
     saved = True
     try:
         orig_modules[name] = sys.modules[name]
@@ -122,14 +125,15 @@
     the sys.modules cache is restored to its original state.
 
     Modules named in fresh are also imported anew if needed by the import.
+    If one of these modules can't be imported, None is returned.
 
     Importing of modules named in blocked is prevented while the fresh import
     takes place.
 
     If deprecated is True, any module or package deprecation messages
     will be suppressed."""
-    # NOTE: test_heapq and test_warnings include extra sanity checks to make
-    # sure that this utility function is working as expected
+    # NOTE: test_heapq, test_json and test_warnings include extra sanity checks
+    # to make sure that this utility function is working as expected
     with _ignore_deprecated_imports(deprecated):
         # Keep track of modules saved for later restoration as well
         # as those which just need a blocking entry removed
@@ -143,6 +147,8 @@
                 if not _save_and_block_module(blocked_name, orig_modules):
                     names_to_remove.append(blocked_name)
             fresh_module = importlib.import_module(name)
+        except ImportError:
+            fresh_module = None
         finally:
             for orig_name, module in orig_modules.items():
                 sys.modules[orig_name] = module
@@ -285,6 +291,14 @@
             msg = "Use of the `%s' resource not enabled" % resource
         raise ResourceDenied(msg)
 
+def linux_version():
+    try:
+        # platform.release() is something like '2.6.33.7-desktop-2mnb'
+        version_string = platform.release().split('-')[0]
+        return tuple(map(int, version_string.split('.')))
+    except ValueError:
+        return 0, 0, 0
+
 HOST = 'localhost'
 
 def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
@@ -400,6 +414,8 @@
     float.__getformat__("double").startswith("IEEE"),
     "test requires IEEE 754 doubles")
 
+requires_zlib = unittest.skipUnless(zlib, 'requires zlib')
+
 is_jython = sys.platform.startswith('java')
 
 # Filename used for testing
@@ -886,14 +902,8 @@
 
 @contextlib.contextmanager
 def captured_output(stream_name):
-    """Run the 'with' statement body using a StringIO object in place of a
-    specific attribute on the sys module.
-    Example use (with 'stream_name=stdout')::
-
-       with captured_stdout() as s:
-           print("hello")
-       assert s.getvalue() == "hello"
-    """
+    """Return a context manager used by captured_stdout/stdin/stderr
+    that temporarily replaces the sys stream *stream_name* with a StringIO."""
     import io
     orig_stdout = getattr(sys, stream_name)
     setattr(sys, stream_name, io.StringIO())
@@ -903,6 +913,12 @@
         setattr(sys, stream_name, orig_stdout)
 
 def captured_stdout():
+    """Capture the output of sys.stdout:
+
+       with captured_stdout() as s:
+           print("hello")
+       self.assertEqual(s.getvalue(), "hello")
+    """
     return captured_output("stdout")
 
 def captured_stderr():
@@ -911,6 +927,7 @@
 def captured_stdin():
     return captured_output("stdin")
 
+
 def gc_collect():
     """Force as many objects as possible to be collected.
 
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index 67a5aed..c0450e7 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -514,13 +514,11 @@
         s1 = 'Hello\r\nworld\r\n'
 
         s = s1.encode(self.encoding)
-        try:
-            with open(support.TESTFN, 'wb') as fp:
-                fp.write(s)
-            with codecs.open(support.TESTFN, 'U', encoding=self.encoding) as reader:
-                self.assertEqual(reader.read(), s1)
-        finally:
-            support.unlink(support.TESTFN)
+        self.addCleanup(support.unlink, support.TESTFN)
+        with open(support.TESTFN, 'wb') as fp:
+            fp.write(s)
+        with codecs.open(support.TESTFN, 'U', encoding=self.encoding) as reader:
+            self.assertEqual(reader.read(), s1)
 
 class UTF16LETest(ReadTest):
     encoding = "utf-16-le"
@@ -1624,6 +1622,7 @@
                  "utf-32",
                  "utf-32-le",
                  "utf-32-be")
+        self.addCleanup(support.unlink, support.TESTFN)
         for encoding in tests:
             # Check if the BOM is written only once
             with codecs.open(support.TESTFN, 'w+', encoding=encoding) as f:
diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py
index a63af4c..d1c9ee5 100644
--- a/Lib/test/test_compileall.py
+++ b/Lib/test/test_compileall.py
@@ -248,7 +248,7 @@
         self.assertEqual(b'', quiet)
 
     def test_regexp(self):
-        self.assertRunOK('-q', '-x', 'ba.*', self.pkgdir)
+        self.assertRunOK('-q', '-x', 'ba[^\/]*$', self.pkgdir)
         self.assertNotCompiled(self.barfn)
         self.assertCompiled(self.initfn)
 
diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py
index e46cd91..96bbafe 100644
--- a/Lib/test/test_decimal.py
+++ b/Lib/test/test_decimal.py
@@ -1970,6 +1970,17 @@
         self.assertRaises(TypeError, c.fma, 2, '3', 4)
         self.assertRaises(TypeError, c.fma, 2, 3, '4')
 
+        # Issue 12079 for Context.fma ...
+        self.assertRaises(TypeError, c.fma,
+                          Decimal('Infinity'), Decimal(0), "not a decimal")
+        self.assertRaises(TypeError, c.fma,
+                          Decimal(1), Decimal('snan'), 1.222)
+        # ... and for Decimal.fma.
+        self.assertRaises(TypeError, Decimal('Infinity').fma,
+                          Decimal(0), "not a decimal")
+        self.assertRaises(TypeError, Decimal(1).fma,
+                          Decimal('snan'), 1.222)
+
     def test_is_finite(self):
         c = Context()
         d = c.is_finite(Decimal(10))
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index 9273c46..31731d2 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -1587,6 +1587,7 @@
             ("__floor__", math.floor, zero, set(), {}),
             ("__trunc__", math.trunc, zero, set(), {}),
             ("__ceil__", math.ceil, zero, set(), {}),
+            ("__dir__", dir, empty_seq, set(), {}),
             ]
 
         class Checker(object):
diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py
index d08347d..dbc1917 100644
--- a/Lib/test/test_faulthandler.py
+++ b/Lib/test/test_faulthandler.py
@@ -75,7 +75,7 @@
         return output.splitlines(), exitcode
 
     def check_fatal_error(self, code, line_number, name_regex,
-                          filename=None, all_threads=False, other_regex=None):
+                          filename=None, all_threads=True, other_regex=None):
         """
         Check that the fault handler for fatal errors is enabled and check the
         traceback from the child process output.
@@ -204,15 +204,15 @@
                 '(?:Segmentation fault|Bus error)',
                 filename=filename)
 
-    def test_enable_threads(self):
+    def test_enable_single_thread(self):
         self.check_fatal_error("""
 import faulthandler
-faulthandler.enable(all_threads=True)
+faulthandler.enable(all_threads=False)
 faulthandler._read_null()
 """.strip(),
             3,
             '(?:Segmentation fault|Bus error)',
-            all_threads=True)
+            all_threads=False)
 
     def test_disable(self):
         code = """
@@ -252,9 +252,9 @@
 def funcB():
     if {has_filename}:
         with open({filename}, "wb") as fp:
-            faulthandler.dump_traceback(fp)
+            faulthandler.dump_traceback(fp, all_threads=False)
     else:
-        faulthandler.dump_traceback()
+        faulthandler.dump_traceback(all_threads=False)
 
 def funcA():
     funcB()
diff --git a/Lib/test/test_fileinput.py b/Lib/test/test_fileinput.py
index 76e4d16..a96d48a 100644
--- a/Lib/test/test_fileinput.py
+++ b/Lib/test/test_fileinput.py
@@ -7,7 +7,6 @@
 import re
 import fileinput
 import collections
-import gzip
 import types
 import codecs
 import unittest
@@ -16,6 +15,10 @@
     import bz2
 except ImportError:
     bz2 = None
+try:
+    import gzip
+except ImportError:
+    gzip = None
 
 from io import StringIO
 from fileinput import FileInput, hook_encoded
@@ -758,6 +761,7 @@
     def test_no_ext(self):
         self.do_test_use_builtin_open("abcd", 2)
 
+    @unittest.skipUnless(gzip, "Requires gzip and zlib")
     def test_gz_ext_fake(self):
         original_open = gzip.open
         gzip.open = self.fake_open
diff --git a/Lib/test/test_frozen.py b/Lib/test/test_frozen.py
index 07131af..5243ebb 100644
--- a/Lib/test/test_frozen.py
+++ b/Lib/test/test_frozen.py
@@ -6,31 +6,37 @@
 
 class FrozenTests(unittest.TestCase):
     def test_frozen(self):
-        try:
-            import __hello__
-        except ImportError as x:
-            self.fail("import __hello__ failed:" + str(x))
-        self.assertEqual(__hello__.initialized, True)
-        self.assertEqual(len(dir(__hello__)), 7, dir(__hello__))
+        with captured_stdout() as stdout:
+            try:
+                import __hello__
+            except ImportError as x:
+                self.fail("import __hello__ failed:" + str(x))
+            self.assertEqual(__hello__.initialized, True)
+            self.assertEqual(len(dir(__hello__)), 7, dir(__hello__))
+            self.assertEqual(stdout.getvalue(), 'Hello world!\n')
 
-        try:
-            import __phello__
-        except ImportError as x:
-            self.fail("import __phello__ failed:" + str(x))
-        self.assertEqual(__phello__.initialized, True)
-        if not "__phello__.spam" in sys.modules:
-            self.assertEqual(len(dir(__phello__)), 8, dir(__phello__))
-        else:
-            self.assertEqual(len(dir(__phello__)), 9, dir(__phello__))
-        self.assertEqual(__phello__.__path__, [__phello__.__name__])
+        with captured_stdout() as stdout:
+            try:
+                import __phello__
+            except ImportError as x:
+                self.fail("import __phello__ failed:" + str(x))
+            self.assertEqual(__phello__.initialized, True)
+            if not "__phello__.spam" in sys.modules:
+                self.assertEqual(len(dir(__phello__)), 8, dir(__phello__))
+            else:
+                self.assertEqual(len(dir(__phello__)), 9, dir(__phello__))
+            self.assertEqual(__phello__.__path__, [__phello__.__name__])
+            self.assertEqual(stdout.getvalue(), 'Hello world!\n')
 
-        try:
-            import __phello__.spam
-        except ImportError as x:
-            self.fail("import __phello__.spam failed:" + str(x))
-        self.assertEqual(__phello__.spam.initialized, True)
-        self.assertEqual(len(dir(__phello__.spam)), 7)
-        self.assertEqual(len(dir(__phello__)), 9)
+        with captured_stdout() as stdout:
+            try:
+                import __phello__.spam
+            except ImportError as x:
+                self.fail("import __phello__.spam failed:" + str(x))
+            self.assertEqual(__phello__.spam.initialized, True)
+            self.assertEqual(len(dir(__phello__.spam)), 7)
+            self.assertEqual(len(dir(__phello__)), 9)
+            self.assertEqual(stdout.getvalue(), 'Hello world!\n')
 
         try:
             import __phello__.foo
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
index 6dbe2b6..78e0fb4 100644
--- a/Lib/test/test_ftplib.py
+++ b/Lib/test/test_ftplib.py
@@ -22,10 +22,25 @@
 threading = support.import_module('threading')
 
 # the dummy data returned by server over the data channel when
-# RETR, LIST and NLST commands are issued
+# RETR, LIST, NLST, MLSD commands are issued
 RETR_DATA = 'abcde12345\r\n' * 1000
 LIST_DATA = 'foo\r\nbar\r\n'
 NLST_DATA = 'foo\r\nbar\r\n'
+MLSD_DATA = ("type=cdir;perm=el;unique==keVO1+ZF4; test\r\n"
+             "type=pdir;perm=e;unique==keVO1+d?3; ..\r\n"
+             "type=OS.unix=slink:/foobar;perm=;unique==keVO1+4G4; foobar\r\n"
+             "type=OS.unix=chr-13/29;perm=;unique==keVO1+5G4; device\r\n"
+             "type=OS.unix=blk-11/108;perm=;unique==keVO1+6G4; block\r\n"
+             "type=file;perm=awr;unique==keVO1+8G4; writable\r\n"
+             "type=dir;perm=cpmel;unique==keVO1+7G4; promiscuous\r\n"
+             "type=dir;perm=;unique==keVO1+1t2; no-exec\r\n"
+             "type=file;perm=r;unique==keVO1+EG4; two words\r\n"
+             "type=file;perm=r;unique==keVO1+IH4;  leading space\r\n"
+             "type=file;perm=r;unique==keVO1+1G4; file1\r\n"
+             "type=dir;perm=cpmel;unique==keVO1+7G4; incoming\r\n"
+             "type=file;perm=r;unique==keVO1+1G4; file2\r\n"
+             "type=file;perm=r;unique==keVO1+1G4; file3\r\n"
+             "type=file;perm=r;unique==keVO1+1G4; file4\r\n")
 
 
 class DummyDTPHandler(asynchat.async_chat):
@@ -49,6 +64,11 @@
             self.dtp_conn_closed = True
 
     def push(self, what):
+        if self.baseclass.next_data is not None:
+            what = self.baseclass.next_data
+            self.baseclass.next_data = None
+        if not what:
+            return self.close_when_done()
         super(DummyDTPHandler, self).push(what.encode('ascii'))
 
     def handle_error(self):
@@ -61,12 +81,15 @@
 
     def __init__(self, conn):
         asynchat.async_chat.__init__(self, conn)
+        # tells the socket to handle urgent data inline (ABOR command)
+        self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_OOBINLINE, 1)
         self.set_terminator(b"\r\n")
         self.in_buffer = []
         self.dtp = None
         self.last_received_cmd = None
         self.last_received_data = ''
         self.next_response = ''
+        self.next_data = None
         self.rest = None
         self.push('220 welcome')
 
@@ -102,7 +125,7 @@
         addr = list(map(int, arg.split(',')))
         ip = '%d.%d.%d.%d' %tuple(addr[:4])
         port = (addr[4] * 256) + addr[5]
-        s = socket.create_connection((ip, port), timeout=10)
+        s = socket.create_connection((ip, port), timeout=2)
         self.dtp = self.dtp_handler(s, baseclass=self)
         self.push('200 active data connection established')
 
@@ -120,7 +143,7 @@
     def cmd_eprt(self, arg):
         af, ip, port = arg.split(arg[0])[1:-1]
         port = int(port)
-        s = socket.create_connection((ip, port), timeout=10)
+        s = socket.create_connection((ip, port), timeout=2)
         self.dtp = self.dtp_handler(s, baseclass=self)
         self.push('200 active data connection established')
 
@@ -181,6 +204,9 @@
         self.push('221 quit ok')
         self.close()
 
+    def cmd_abor(self, arg):
+        self.push('226 abor ok')
+
     def cmd_stor(self, arg):
         self.push('125 stor ok')
 
@@ -208,6 +234,14 @@
         self.dtp.push(NLST_DATA)
         self.dtp.close_when_done()
 
+    def cmd_opts(self, arg):
+        self.push('200 opts ok')
+
+    def cmd_mlsd(self, arg):
+        self.push('125 mlsd ok')
+        self.dtp.push(MLSD_DATA)
+        self.dtp.close_when_done()
+
 
 class DummyFTPServer(asyncore.dispatcher, threading.Thread):
 
@@ -411,7 +445,7 @@
     def setUp(self):
         self.server = DummyFTPServer((HOST, 0))
         self.server.start()
-        self.client = ftplib.FTP(timeout=10)
+        self.client = ftplib.FTP(timeout=2)
         self.client.connect(self.server.host, self.server.port)
 
     def tearDown(self):
@@ -491,6 +525,9 @@
         # Ensure the connection gets closed; sock attribute should be None
         self.assertEqual(self.client.sock, None)
 
+    def test_abort(self):
+        self.client.abort()
+
     def test_retrbinary(self):
         def callback(data):
             received.append(data.decode('ascii'))
@@ -550,6 +587,64 @@
         self.client.dir(lambda x: l.append(x))
         self.assertEqual(''.join(l), LIST_DATA.replace('\r\n', ''))
 
+    def test_mlsd(self):
+        list(self.client.mlsd())
+        list(self.client.mlsd(path='/'))
+        list(self.client.mlsd(path='/', facts=['size', 'type']))
+
+        ls = list(self.client.mlsd())
+        for name, facts in ls:
+            self.assertIsInstance(name, str)
+            self.assertIsInstance(facts, dict)
+            self.assertTrue(name)
+            self.assertIn('type', facts)
+            self.assertIn('perm', facts)
+            self.assertIn('unique', facts)
+
+        def set_data(data):
+            self.server.handler_instance.next_data = data
+
+        def test_entry(line, type=None, perm=None, unique=None, name=None):
+            type = 'type' if type is None else type
+            perm = 'perm' if perm is None else perm
+            unique = 'unique' if unique is None else unique
+            name = 'name' if name is None else name
+            set_data(line)
+            _name, facts = next(self.client.mlsd())
+            self.assertEqual(_name, name)
+            self.assertEqual(facts['type'], type)
+            self.assertEqual(facts['perm'], perm)
+            self.assertEqual(facts['unique'], unique)
+
+        # plain
+        test_entry('type=type;perm=perm;unique=unique; name\r\n')
+        # "=" in fact value
+        test_entry('type=ty=pe;perm=perm;unique=unique; name\r\n', type="ty=pe")
+        test_entry('type==type;perm=perm;unique=unique; name\r\n', type="=type")
+        test_entry('type=t=y=pe;perm=perm;unique=unique; name\r\n', type="t=y=pe")
+        test_entry('type=====;perm=perm;unique=unique; name\r\n', type="====")
+        # spaces in name
+        test_entry('type=type;perm=perm;unique=unique; na me\r\n', name="na me")
+        test_entry('type=type;perm=perm;unique=unique; name \r\n', name="name ")
+        test_entry('type=type;perm=perm;unique=unique;  name\r\n', name=" name")
+        test_entry('type=type;perm=perm;unique=unique; n am  e\r\n', name="n am  e")
+        # ";" in name
+        test_entry('type=type;perm=perm;unique=unique; na;me\r\n', name="na;me")
+        test_entry('type=type;perm=perm;unique=unique; ;name\r\n', name=";name")
+        test_entry('type=type;perm=perm;unique=unique; ;name;\r\n', name=";name;")
+        test_entry('type=type;perm=perm;unique=unique; ;;;;\r\n', name=";;;;")
+        # case sensitiveness
+        set_data('Type=type;TyPe=perm;UNIQUE=unique; name\r\n')
+        _name, facts = next(self.client.mlsd())
+        for x in facts:
+            self.assertTrue(x.islower())
+        # no data (directory empty)
+        set_data('')
+        self.assertRaises(StopIteration, next, self.client.mlsd())
+        set_data('')
+        for x in self.client.mlsd():
+            self.fail("unexpected data %s" % data)
+
     def test_makeport(self):
         with self.client.makeport():
             # IPv4 is in use, just make sure send_eprt has not been used
@@ -576,7 +671,7 @@
             return True
 
         # base test
-        with ftplib.FTP(timeout=10) as self.client:
+        with ftplib.FTP(timeout=2) as self.client:
             self.client.connect(self.server.host, self.server.port)
             self.client.sendcmd('noop')
             self.assertTrue(is_client_connected())
@@ -584,7 +679,7 @@
         self.assertFalse(is_client_connected())
 
         # QUIT sent inside the with block
-        with ftplib.FTP(timeout=10) as self.client:
+        with ftplib.FTP(timeout=2) as self.client:
             self.client.connect(self.server.host, self.server.port)
             self.client.sendcmd('noop')
             self.client.quit()
@@ -594,7 +689,7 @@
         # force a wrong response code to be sent on QUIT: error_perm
         # is expected and the connection is supposed to be closed
         try:
-            with ftplib.FTP(timeout=10) as self.client:
+            with ftplib.FTP(timeout=2) as self.client:
                 self.client.connect(self.server.host, self.server.port)
                 self.client.sendcmd('noop')
                 self.server.handler_instance.next_response = '550 error on quit'
@@ -692,7 +787,7 @@
     def setUp(self):
         self.server = DummyTLS_FTPServer((HOST, 0))
         self.server.start()
-        self.client = ftplib.FTP_TLS(timeout=10)
+        self.client = ftplib.FTP_TLS(timeout=2)
         self.client.connect(self.server.host, self.server.port)
         # enable TLS
         self.client.auth()
@@ -705,7 +800,7 @@
     def setUp(self):
         self.server = DummyTLS_FTPServer((HOST, 0))
         self.server.start()
-        self.client = ftplib.FTP_TLS(timeout=10)
+        self.client = ftplib.FTP_TLS(timeout=2)
         self.client.connect(self.server.host, self.server.port)
 
     def tearDown(self):
@@ -765,7 +860,7 @@
         self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE,
                           keyfile=CERTFILE, context=ctx)
 
-        self.client = ftplib.FTP_TLS(context=ctx, timeout=10)
+        self.client = ftplib.FTP_TLS(context=ctx, timeout=2)
         self.client.connect(self.server.host, self.server.port)
         self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
         self.client.auth()
diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py
index 17b44ea..d288d79 100644
--- a/Lib/test/test_gdb.py
+++ b/Lib/test/test_gdb.py
@@ -611,12 +611,29 @@
 $''')
 
 class PyBtTests(DebuggerTests):
-    def test_basic_command(self):
+    def test_bt(self):
         'Verify that the "py-bt" command works'
         bt = self.get_stack_trace(script=self.get_sample_script(),
                                   cmds_after_breakpoint=['py-bt'])
         self.assertMultilineMatches(bt,
                                     r'''^.*
+Traceback \(most recent call first\):
+  File ".*gdb_sample.py", line 10, in baz
+    id\(42\)
+  File ".*gdb_sample.py", line 7, in bar
+    baz\(a, b, c\)
+  File ".*gdb_sample.py", line 4, in foo
+    bar\(a, b, c\)
+  File ".*gdb_sample.py", line 12, in <module>
+    foo\(1, 2, 3\)
+''')
+
+    def test_bt_full(self):
+        'Verify that the "py-bt-full" command works'
+        bt = self.get_stack_trace(script=self.get_sample_script(),
+                                  cmds_after_breakpoint=['py-bt-full'])
+        self.assertMultilineMatches(bt,
+                                    r'''^.*
 #[0-9]+ Frame 0x[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
     baz\(a, b, c\)
 #[0-9]+ Frame 0x[0-9a-f]+, for file .*gdb_sample.py, line 4, in foo \(a=1, b=2, c=3\)
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index fe316fd..17d752b 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -118,6 +118,24 @@
         else:
             self.assertTrue(0 == "hashlib didn't reject bogus hash name")
 
+    def test_get_builtin_constructor(self):
+        get_builtin_constructor = hashlib.__dict__[
+                '__get_builtin_constructor']
+        self.assertRaises(ValueError, get_builtin_constructor, 'test')
+        try:
+            import _md5
+        except ImportError:
+            pass
+        # This forces an ImportError for "import _md5" statements
+        sys.modules['_md5'] = None
+        try:
+            self.assertRaises(ValueError, get_builtin_constructor, 'md5')
+        finally:
+            if '_md5' in locals():
+                sys.modules['_md5'] = _md5
+            else:
+                del sys.modules['_md5']
+
     def test_hexdigest(self):
         for name in self.supported_hash_names:
             h = hashlib.new(name)
diff --git a/Lib/test/test_heapq.py b/Lib/test/test_heapq.py
index b41458b..e0c49c1 100644
--- a/Lib/test/test_heapq.py
+++ b/Lib/test/test_heapq.py
@@ -1,16 +1,31 @@
 """Unittests for heapq."""
 
-import random
-import unittest
-from test import support
 import sys
+import random
 
-# We do a bit of trickery here to be able to test both the C implementation
-# and the Python implementation of the module.
-import heapq as c_heapq
+from test import support
+from unittest import TestCase, skipUnless
+
 py_heapq = support.import_fresh_module('heapq', blocked=['_heapq'])
+c_heapq = support.import_fresh_module('heapq', fresh=['_heapq'])
 
-class TestHeap(unittest.TestCase):
+# _heapq.nlargest/nsmallest are saved in heapq._nlargest/_smallest when
+# _heapq is imported, so check them there
+func_names = ['heapify', 'heappop', 'heappush', 'heappushpop',
+              'heapreplace', '_nlargest', '_nsmallest']
+
+class TestModules(TestCase):
+    def test_py_functions(self):
+        for fname in func_names:
+            self.assertEqual(getattr(py_heapq, fname).__module__, 'heapq')
+
+    @skipUnless(c_heapq, 'requires _heapq')
+    def test_c_functions(self):
+        for fname in func_names:
+            self.assertEqual(getattr(c_heapq, fname).__module__, '_heapq')
+
+
+class TestHeap(TestCase):
     module = None
 
     def test_push_pop(self):
@@ -176,21 +191,8 @@
                 self.assertEqual(list(self.module.nlargest(n, data, key=f)),
                                  sorted(data, key=f, reverse=True)[:n])
 
-class TestHeapPython(TestHeap):
-    module = py_heapq
-
-    # As an early adopter, we sanity check the
-    # test.support.import_fresh_module utility function
-    def test_pure_python(self):
-        self.assertFalse(sys.modules['heapq'] is self.module)
-        self.assertTrue(hasattr(self.module.heapify, '__code__'))
-
-
-class TestHeapC(TestHeap):
-    module = c_heapq
-
     def test_comparison_operator(self):
-        # Issue 3501: Make sure heapq works with both __lt__
+        # Issue 3051: Make sure heapq works with both __lt__
         # For python 3.0, __le__ alone is not enough
         def hsort(data, comp):
             data = [comp(x) for x in data]
@@ -212,6 +214,15 @@
         self.assertRaises(TypeError, data, LE)
 
 
+class TestHeapPython(TestHeap):
+    module = py_heapq
+
+
+@skipUnless(c_heapq, 'requires _heapq')
+class TestHeapC(TestHeap):
+    module = c_heapq
+
+
 #==============================================================================
 
 class LenOnly:
@@ -307,9 +318,9 @@
     'Test multiple tiers of iterators'
     return chain(map(lambda x:x, R(Ig(G(seqn)))))
 
-class TestErrorHandling(unittest.TestCase):
-    # only for C implementation
-    module = c_heapq
+
+class TestErrorHandling(TestCase):
+    module = None
 
     def test_non_sequence(self):
         for f in (self.module.heapify, self.module.heappop):
@@ -360,11 +371,20 @@
                 self.assertRaises(ZeroDivisionError, f, 2, E(s))
 
 
+class TestErrorHandlingPython(TestErrorHandling):
+    module = py_heapq
+
+@skipUnless(c_heapq, 'requires _heapq')
+class TestErrorHandlingC(TestErrorHandling):
+    module = c_heapq
+
+
 #==============================================================================
 
 
 def test_main(verbose=None):
-    test_classes = [TestHeapPython, TestHeapC, TestErrorHandling]
+    test_classes = [TestModules, TestHeapPython, TestHeapC,
+                    TestErrorHandlingPython, TestErrorHandlingC]
     support.run_unittest(*test_classes)
 
     # verify reference counting
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index 39ebc26..1bbaf0e 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -461,6 +461,23 @@
         self.send_error(417)
         return False
 
+
+class AuditableBytesIO:
+
+    def __init__(self):
+        self.datas = []
+
+    def write(self, data):
+        self.datas.append(data)
+
+    def getData(self):
+        return b''.join(self.datas)
+
+    @property
+    def numWrites(self):
+        return len(self.datas)
+
+
 class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
     """Test the functionality of the BaseHTTPServer.
 
@@ -527,27 +544,49 @@
         self.verify_get_called()
         self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
 
-    def test_header_buffering(self):
-
-        def _readAndReseek(f):
-            pos = f.tell()
-            f.seek(0)
-            data = f.read()
-            f.seek(pos)
-            return data
+    def test_header_buffering_of_send_error(self):
 
         input = BytesIO(b'GET / HTTP/1.1\r\n\r\n')
-        output = BytesIO()
-        self.handler.rfile = input
-        self.handler.wfile = output
-        self.handler.request_version = 'HTTP/1.1'
+        output = AuditableBytesIO()
+        handler = SocketlessRequestHandler()
+        handler.rfile = input
+        handler.wfile = output
+        handler.request_version = 'HTTP/1.1'
+        handler.requestline = ''
+        handler.command = None
 
-        self.handler.send_header('Foo', 'foo')
-        self.handler.send_header('bar', 'bar')
-        self.assertEqual(_readAndReseek(output), b'')
-        self.handler.end_headers()
-        self.assertEqual(_readAndReseek(output),
-                         b'Foo: foo\r\nbar: bar\r\n\r\n')
+        handler.send_error(418)
+        self.assertEqual(output.numWrites, 2)
+
+    def test_header_buffering_of_send_response_only(self):
+
+        input = BytesIO(b'GET / HTTP/1.1\r\n\r\n')
+        output = AuditableBytesIO()
+        handler = SocketlessRequestHandler()
+        handler.rfile = input
+        handler.wfile = output
+        handler.request_version = 'HTTP/1.1'
+
+        handler.send_response_only(418)
+        self.assertEqual(output.numWrites, 0)
+        handler.end_headers()
+        self.assertEqual(output.numWrites, 1)
+
+    def test_header_buffering_of_send_header(self):
+
+        input = BytesIO(b'GET / HTTP/1.1\r\n\r\n')
+        output = AuditableBytesIO()
+        handler = SocketlessRequestHandler()
+        handler.rfile = input
+        handler.wfile = output
+        handler.request_version = 'HTTP/1.1'
+
+        handler.send_header('Foo', 'foo')
+        handler.send_header('bar', 'bar')
+        self.assertEqual(output.numWrites, 0)
+        handler.end_headers()
+        self.assertEqual(output.getData(), b'Foo: foo\r\nbar: bar\r\n\r\n')
+        self.assertEqual(output.numWrites, 1)
 
     def test_header_unbuffered_when_continue(self):
 
diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py
index 8034000..c4c7ecc 100644
--- a/Lib/test/test_imaplib.py
+++ b/Lib/test/test_imaplib.py
@@ -258,11 +258,58 @@
     port = 993
     imap_class = IMAP4_SSL
 
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def create_ssl_context(self):
+        ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+        ssl_context.load_cert_chain(CERTFILE)
+        return ssl_context
+
+    def check_logincapa(self, server):
+        try:
+            for cap in server.capabilities:
+                self.assertIsInstance(cap, str)
+            self.assertFalse('LOGINDISABLED' in server.capabilities)
+            self.assertTrue('AUTH=PLAIN' in server.capabilities)
+            rs = server.login(self.username, self.password)
+            self.assertEqual(rs[0], 'OK')
+        finally:
+            server.logout()
+
     def test_logincapa(self):
-        for cap in self.server.capabilities:
-            self.assertIsInstance(cap, str)
-        self.assertFalse('LOGINDISABLED' in self.server.capabilities)
-        self.assertTrue('AUTH=PLAIN' in self.server.capabilities)
+        with transient_internet(self.host):
+            _server = self.imap_class(self.host, self.port)
+            self.check_logincapa(_server)
+
+    def test_logincapa_with_client_certfile(self):
+        with transient_internet(self.host):
+            _server = self.imap_class(self.host, self.port, certfile=CERTFILE)
+            self.check_logincapa(_server)
+
+    def test_logincapa_with_client_ssl_context(self):
+        with transient_internet(self.host):
+            _server = self.imap_class(self.host, self.port, ssl_context=self.create_ssl_context())
+            self.check_logincapa(_server)
+
+    def test_logout(self):
+        with transient_internet(self.host):
+            _server = self.imap_class(self.host, self.port)
+            rs = _server.logout()
+            self.assertEqual(rs[0], 'BYE')
+
+    def test_ssl_context_certfile_exclusive(self):
+        with transient_internet(self.host):
+            self.assertRaises(ValueError, self.imap_class, self.host, self.port,
+                              certfile=CERTFILE, ssl_context=self.create_ssl_context())
+
+    def test_ssl_context_keyfile_exclusive(self):
+        with transient_internet(self.host):
+            self.assertRaises(ValueError, self.imap_class, self.host, self.port,
+                              keyfile=CERTFILE, ssl_context=self.create_ssl_context())
 
 
 def test_main():
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 8d293d0..3724b5c 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -794,6 +794,12 @@
         self.assertEqual(b, b"gf")
         self.assertEqual(bufio.readinto(b), 0)
         self.assertEqual(b, b"gf")
+        rawio = self.MockRawIO((b"abc", None))
+        bufio = self.tp(rawio)
+        self.assertEqual(bufio.readinto(b), 2)
+        self.assertEqual(b, b"ab")
+        self.assertEqual(bufio.readinto(b), 1)
+        self.assertEqual(b, b"cb")
 
     def test_readlines(self):
         def bufio():
@@ -1490,6 +1496,32 @@
             self.assertEqual(s,
                 b"A" + b"B" * overwrite_size + b"A" * (9 - overwrite_size))
 
+    def test_write_rewind_write(self):
+        # Various combinations of reading / writing / seeking backwards / writing again
+        def mutate(bufio, pos1, pos2):
+            assert pos2 >= pos1
+            # Fill the buffer
+            bufio.seek(pos1)
+            bufio.read(pos2 - pos1)
+            bufio.write(b'\x02')
+            # This writes earlier than the previous write, but still inside
+            # the buffer.
+            bufio.seek(pos1)
+            bufio.write(b'\x01')
+
+        b = b"\x80\x81\x82\x83\x84"
+        for i in range(0, len(b)):
+            for j in range(i, len(b)):
+                raw = self.BytesIO(b)
+                bufio = self.tp(raw, 100)
+                mutate(bufio, i, j)
+                bufio.flush()
+                expected = bytearray(b)
+                expected[j] = 2
+                expected[i] = 1
+                self.assertEqual(raw.getvalue(), expected,
+                                 "failed result for i=%d, j=%d" % (i, j))
+
     def test_truncate_after_read_or_write(self):
         raw = self.BytesIO(b"A" * 10)
         bufio = self.tp(raw, 100)
diff --git a/Lib/test/test_linecache.py b/Lib/test/test_linecache.py
index fcc390a..7d14534 100644
--- a/Lib/test/test_linecache.py
+++ b/Lib/test/test_linecache.py
@@ -9,7 +9,7 @@
 FILENAME = linecache.__file__
 INVALID_NAME = '!@$)(!@#_1'
 EMPTY = ''
-TESTS = 'cjkencodings_test inspect_fodder inspect_fodder2 mapping_tests'
+TESTS = 'inspect_fodder inspect_fodder2 mapping_tests'
 TESTS = TESTS.split()
 TEST_PATH = os.path.dirname(support.__file__)
 MODULES = "linecache abc".split()
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index fe42594..b27ff6a 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -25,24 +25,17 @@
 import logging.handlers
 import logging.config
 
-import asynchat
-import asyncore
 import codecs
 import datetime
-import errno
 import pickle
 import io
 import gc
-from http.server import HTTPServer, BaseHTTPRequestHandler
 import json
 import os
 import queue
 import re
 import select
-import smtpd
 import socket
-from socketserver import (ThreadingUDPServer, DatagramRequestHandler,
-                          ThreadingTCPServer, StreamRequestHandler)
 import struct
 import sys
 import tempfile
@@ -51,13 +44,30 @@
 import textwrap
 import time
 import unittest
-from urllib.parse import urlparse, parse_qs
 import warnings
 import weakref
 try:
     import threading
+    # The following imports are needed only for tests which
+    import asynchat
+    import asyncore
+    import errno
+    from http.server import HTTPServer, BaseHTTPRequestHandler
+    import smtpd
+    from urllib.parse import urlparse, parse_qs
+    from socketserver import (ThreadingUDPServer, DatagramRequestHandler,
+                              ThreadingTCPServer, StreamRequestHandler)
 except ImportError:
     threading = None
+try:
+    import win32evtlog
+except ImportError:
+    win32evtlog = None
+try:
+    import win32evtlogutil
+except ImportError:
+    win32evtlogutil = None
+    win32evtlog = None
 
 
 class BaseTest(unittest.TestCase):
@@ -602,277 +612,315 @@
 # -- The following section could be moved into a server_helper.py module
 # -- if it proves to be of wider utility than just test_logging
 
-class TestSMTPChannel(smtpd.SMTPChannel):
-    """
-    This derived class has had to be created because smtpd does not
-    support use of custom channel maps, although they are allowed by
-    asyncore's design. Issue #11959 has been raised to address this,
-    and if resolved satisfactorily, some of this code can be removed.
-    """
-    def __init__(self, server, conn, addr, sockmap):
-        asynchat.async_chat.__init__(self, conn, sockmap)
-        self.smtp_server = server
-        self.conn = conn
-        self.addr = addr
-        self.received_lines = []
-        self.smtp_state = self.COMMAND
-        self.seen_greeting = ''
-        self.mailfrom = None
-        self.rcpttos = []
-        self.received_data = ''
-        self.fqdn = socket.getfqdn()
-        self.num_bytes = 0
-        try:
-            self.peer = conn.getpeername()
-        except socket.error as err:
-            # a race condition  may occur if the other end is closing
-            # before we can get the peername
-            self.close()
-            if err.args[0] != errno.ENOTCONN:
-                raise
-            return
-        self.push('220 %s %s' % (self.fqdn, smtpd.__version__))
-        self.set_terminator(b'\r\n')
-
-
-class TestSMTPServer(smtpd.SMTPServer):
-    """
-    This class implements a test SMTP server.
-
-    :param addr: A (host, port) tuple which the server listens on.
-                 You can specify a port value of zero: the server's
-                 *port* attribute will hold the actual port number
-                 used, which can be used in client connections.
-    :param handler: A callable which will be called to process
-                    incoming messages. The handler will be passed
-                    the client address tuple, who the message is from,
-                    a list of recipients and the message data.
-    :param poll_interval: The interval, in seconds, used in the underlying
-                          :func:`select` or :func:`poll` call by
-                          :func:`asyncore.loop`.
-    :param sockmap: A dictionary which will be used to hold
-                    :class:`asyncore.dispatcher` instances used by
-                    :func:`asyncore.loop`. This avoids changing the
-                    :mod:`asyncore` module's global state.
-    """
-    channel_class = TestSMTPChannel
-
-    def __init__(self, addr, handler, poll_interval, sockmap):
-        self._localaddr = addr
-        self._remoteaddr = None
-        self.sockmap = sockmap
-        asyncore.dispatcher.__init__(self, map=sockmap)
-        try:
-            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-            sock.setblocking(0)
-            self.set_socket(sock, map=sockmap)
-            # try to re-use a server port if possible
-            self.set_reuse_addr()
-            self.bind(addr)
-            self.port = sock.getsockname()[1]
-            self.listen(5)
-        except:
-            self.close()
-            raise
-        self._handler = handler
-        self._thread = None
-        self.poll_interval = poll_interval
-
-    def handle_accepted(self, conn, addr):
+if threading:
+    class TestSMTPChannel(smtpd.SMTPChannel):
         """
-        Redefined only because the base class does not pass in a
-        map, forcing use of a global in :mod:`asyncore`.
+        This derived class has had to be created because smtpd does not
+        support use of custom channel maps, although they are allowed by
+        asyncore's design. Issue #11959 has been raised to address this,
+        and if resolved satisfactorily, some of this code can be removed.
         """
-        channel = self.channel_class(self, conn, addr, self.sockmap)
+        def __init__(self, server, conn, addr, sockmap):
+            asynchat.async_chat.__init__(self, conn, sockmap)
+            self.smtp_server = server
+            self.conn = conn
+            self.addr = addr
+            self.received_lines = []
+            self.smtp_state = self.COMMAND
+            self.seen_greeting = ''
+            self.mailfrom = None
+            self.rcpttos = []
+            self.received_data = ''
+            self.fqdn = socket.getfqdn()
+            self.num_bytes = 0
+            try:
+                self.peer = conn.getpeername()
+            except socket.error as err:
+                # a race condition  may occur if the other end is closing
+                # before we can get the peername
+                self.close()
+                if err.args[0] != errno.ENOTCONN:
+                    raise
+                return
+            self.push('220 %s %s' % (self.fqdn, smtpd.__version__))
+            self.set_terminator(b'\r\n')
 
-    def process_message(self, peer, mailfrom, rcpttos, data):
-        """
-        Delegates to the handler passed in to the server's constructor.
 
-        Typically, this will be a test case method.
-        :param peer: The client (host, port) tuple.
-        :param mailfrom: The address of the sender.
-        :param rcpttos: The addresses of the recipients.
-        :param data: The message.
+    class TestSMTPServer(smtpd.SMTPServer):
         """
-        self._handler(peer, mailfrom, rcpttos, data)
+        This class implements a test SMTP server.
 
-    def start(self):
-        """
-        Start the server running on a separate daemon thread.
-        """
-        self._thread = t = threading.Thread(target=self.serve_forever,
-                                            args=(self.poll_interval,))
-        t.setDaemon(True)
-        t.start()
-
-    def serve_forever(self, poll_interval):
-        """
-        Run the :mod:`asyncore` loop until normal termination
-        conditions arise.
+        :param addr: A (host, port) tuple which the server listens on.
+                     You can specify a port value of zero: the server's
+                     *port* attribute will hold the actual port number
+                     used, which can be used in client connections.
+        :param handler: A callable which will be called to process
+                        incoming messages. The handler will be passed
+                        the client address tuple, who the message is from,
+                        a list of recipients and the message data.
         :param poll_interval: The interval, in seconds, used in the underlying
                               :func:`select` or :func:`poll` call by
                               :func:`asyncore.loop`.
+        :param sockmap: A dictionary which will be used to hold
+                        :class:`asyncore.dispatcher` instances used by
+                        :func:`asyncore.loop`. This avoids changing the
+                        :mod:`asyncore` module's global state.
         """
-        asyncore.loop(poll_interval, map=self.sockmap)
+        channel_class = TestSMTPChannel
 
-    def stop(self, timeout=None):
-        """
-        Stop the thread by closing the server instance.
-        Wait for the server thread to terminate.
+        def __init__(self, addr, handler, poll_interval, sockmap):
+            self._localaddr = addr
+            self._remoteaddr = None
+            self.sockmap = sockmap
+            asyncore.dispatcher.__init__(self, map=sockmap)
+            try:
+                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+                sock.setblocking(0)
+                self.set_socket(sock, map=sockmap)
+                # try to re-use a server port if possible
+                self.set_reuse_addr()
+                self.bind(addr)
+                self.port = sock.getsockname()[1]
+                self.listen(5)
+            except:
+                self.close()
+                raise
+            self._handler = handler
+            self._thread = None
+            self.poll_interval = poll_interval
 
-        :param timeout: How long to wait for the server thread
-                        to terminate.
-        """
-        self.close()
-        self._thread.join(timeout)
-        self._thread = None
+        def handle_accepted(self, conn, addr):
+            """
+            Redefined only because the base class does not pass in a
+            map, forcing use of a global in :mod:`asyncore`.
+            """
+            channel = self.channel_class(self, conn, addr, self.sockmap)
 
-class ControlMixin(object):
-    """
-    This mixin is used to start a server on a separate thread, and
-    shut it down programmatically. Request handling is simplified - instead
-    of needing to derive a suitable RequestHandler subclass, you just
-    provide a callable which will be passed each received request to be
-    processed.
+        def process_message(self, peer, mailfrom, rcpttos, data):
+            """
+            Delegates to the handler passed in to the server's constructor.
 
-    :param handler: A handler callable which will be called with a
-                    single parameter - the request - in order to
-                    process the request. This handler is called on the
-                    server thread, effectively meaning that requests are
-                    processed serially. While not quite Web scale ;-),
-                    this should be fine for testing applications.
-    :param poll_interval: The polling interval in seconds.
-    """
-    def __init__(self, handler, poll_interval):
-        self._thread = None
-        self.poll_interval = poll_interval
-        self._handler = handler
-        self.ready = threading.Event()
+            Typically, this will be a test case method.
+            :param peer: The client (host, port) tuple.
+            :param mailfrom: The address of the sender.
+            :param rcpttos: The addresses of the recipients.
+            :param data: The message.
+            """
+            self._handler(peer, mailfrom, rcpttos, data)
 
-    def start(self):
-        """
-        Create a daemon thread to run the server, and start it.
-        """
-        self._thread = t = threading.Thread(target=self.serve_forever,
-                                            args=(self.poll_interval,))
-        t.setDaemon(True)
-        t.start()
+        def start(self):
+            """
+            Start the server running on a separate daemon thread.
+            """
+            self._thread = t = threading.Thread(target=self.serve_forever,
+                                                args=(self.poll_interval,))
+            t.setDaemon(True)
+            t.start()
 
-    def serve_forever(self, poll_interval):
-        """
-        Run the server. Set the ready flag before entering the
-        service loop.
-        """
-        self.ready.set()
-        super(ControlMixin, self).serve_forever(poll_interval)
+        def serve_forever(self, poll_interval):
+            """
+            Run the :mod:`asyncore` loop until normal termination
+            conditions arise.
+            :param poll_interval: The interval, in seconds, used in the underlying
+                                  :func:`select` or :func:`poll` call by
+                                  :func:`asyncore.loop`.
+            """
+            try:
+                asyncore.loop(poll_interval, map=self.sockmap)
+            except select.error:
+                # On FreeBSD 8, closing the server repeatably
+                # raises this error. We swallow it if the
+                # server has been closed.
+                if self.connected or self.accepting:
+                    raise
 
-    def stop(self, timeout=None):
-        """
-        Tell the server thread to stop, and wait for it to do so.
+        def stop(self, timeout=None):
+            """
+            Stop the thread by closing the server instance.
+            Wait for the server thread to terminate.
 
-        :param timeout: How long to wait for the server thread
-                        to terminate.
-        """
-        self.shutdown()
-        if self._thread is not None:
+            :param timeout: How long to wait for the server thread
+                            to terminate.
+            """
+            self.close()
             self._thread.join(timeout)
             self._thread = None
-        self.server_close()
-        self.ready.clear()
 
-class TestHTTPServer(ControlMixin, HTTPServer):
-    """
-    An HTTP server which is controllable using :class:`ControlMixin`.
+    class ControlMixin(object):
+        """
+        This mixin is used to start a server on a separate thread, and
+        shut it down programmatically. Request handling is simplified - instead
+        of needing to derive a suitable RequestHandler subclass, you just
+        provide a callable which will be passed each received request to be
+        processed.
 
-    :param addr: A tuple with the IP address and port to listen on.
-    :param handler: A handler callable which will be called with a
-                    single parameter - the request - in order to
-                    process the request.
-    :param poll_interval: The polling interval in seconds.
-    :param log: Pass ``True`` to enable log messages.
-    """
-    def __init__(self, addr, handler, poll_interval=0.5, log=False):
-        class DelegatingHTTPRequestHandler(BaseHTTPRequestHandler):
-            def __getattr__(self, name, default=None):
-                if name.startswith('do_'):
-                    return self.process_request
-                raise AttributeError(name)
+        :param handler: A handler callable which will be called with a
+                        single parameter - the request - in order to
+                        process the request. This handler is called on the
+                        server thread, effectively meaning that requests are
+                        processed serially. While not quite Web scale ;-),
+                        this should be fine for testing applications.
+        :param poll_interval: The polling interval in seconds.
+        """
+        def __init__(self, handler, poll_interval):
+            self._thread = None
+            self.poll_interval = poll_interval
+            self._handler = handler
+            self.ready = threading.Event()
 
-            def process_request(self):
-                self.server._handler(self)
+        def start(self):
+            """
+            Create a daemon thread to run the server, and start it.
+            """
+            self._thread = t = threading.Thread(target=self.serve_forever,
+                                                args=(self.poll_interval,))
+            t.setDaemon(True)
+            t.start()
 
-            def log_message(self, format, *args):
-                if log:
-                    super(DelegatingHTTPRequestHandler,
-                          self).log_message(format, *args)
-        HTTPServer.__init__(self, addr, DelegatingHTTPRequestHandler)
-        ControlMixin.__init__(self, handler, poll_interval)
+        def serve_forever(self, poll_interval):
+            """
+            Run the server. Set the ready flag before entering the
+            service loop.
+            """
+            self.ready.set()
+            super(ControlMixin, self).serve_forever(poll_interval)
 
-class TestTCPServer(ControlMixin, ThreadingTCPServer):
-    """
-    A TCP server which is controllable using :class:`ControlMixin`.
+        def stop(self, timeout=None):
+            """
+            Tell the server thread to stop, and wait for it to do so.
 
-    :param addr: A tuple with the IP address and port to listen on.
-    :param handler: A handler callable which will be called with a single
-                    parameter - the request - in order to process the request.
-    :param poll_interval: The polling interval in seconds.
-    :bind_and_activate: If True (the default), binds the server and starts it
-                        listening. If False, you need to call
-                        :meth:`server_bind` and :meth:`server_activate` at
-                        some later time before calling :meth:`start`, so that
-                        the server will set up the socket and listen on it.
-    """
+            :param timeout: How long to wait for the server thread
+                            to terminate.
+            """
+            self.shutdown()
+            if self._thread is not None:
+                self._thread.join(timeout)
+                self._thread = None
+            self.server_close()
+            self.ready.clear()
 
-    allow_reuse_address = True
+    class TestHTTPServer(ControlMixin, HTTPServer):
+        """
+        An HTTP server which is controllable using :class:`ControlMixin`.
 
-    def __init__(self, addr, handler, poll_interval=0.5,
-                 bind_and_activate=True):
-        class DelegatingTCPRequestHandler(StreamRequestHandler):
+        :param addr: A tuple with the IP address and port to listen on.
+        :param handler: A handler callable which will be called with a
+                        single parameter - the request - in order to
+                        process the request.
+        :param poll_interval: The polling interval in seconds.
+        :param log: Pass ``True`` to enable log messages.
+        """
+        def __init__(self, addr, handler, poll_interval=0.5,
+                     log=False, sslctx=None):
+            class DelegatingHTTPRequestHandler(BaseHTTPRequestHandler):
+                def __getattr__(self, name, default=None):
+                    if name.startswith('do_'):
+                        return self.process_request
+                    raise AttributeError(name)
 
-            def handle(self):
-                self.server._handler(self)
-        ThreadingTCPServer.__init__(self, addr, DelegatingTCPRequestHandler,
-                                    bind_and_activate)
-        ControlMixin.__init__(self, handler, poll_interval)
+                def process_request(self):
+                    self.server._handler(self)
 
-    def server_bind(self):
-        super(TestTCPServer, self).server_bind()
-        self.port = self.socket.getsockname()[1]
+                def log_message(self, format, *args):
+                    if log:
+                        super(DelegatingHTTPRequestHandler,
+                              self).log_message(format, *args)
+            HTTPServer.__init__(self, addr, DelegatingHTTPRequestHandler)
+            ControlMixin.__init__(self, handler, poll_interval)
+            self.sslctx = sslctx
 
-class TestUDPServer(ControlMixin, ThreadingUDPServer):
-    """
-    A UDP server which is controllable using :class:`ControlMixin`.
+        def get_request(self):
+            try:
+                sock, addr = self.socket.accept()
+                if self.sslctx:
+                    sock = self.sslctx.wrap_socket(sock, server_side=True)
+            except socket.error as e:
+                # socket errors are silenced by the caller, print them here
+                sys.stderr.write("Got an error:\n%s\n" % e)
+                raise
+            return sock, addr
 
-    :param addr: A tuple with the IP address and port to listen on.
-    :param handler: A handler callable which will be called with a
-                    single parameter - the request - in order to
-                    process the request.
-    :param poll_interval: The polling interval for shutdown requests,
-                          in seconds.
-    :bind_and_activate: If True (the default), binds the server and
-                        starts it listening. If False, you need to
-                        call :meth:`server_bind` and
-                        :meth:`server_activate` at some later time
-                        before calling :meth:`start`, so that the server will
-                        set up the socket and listen on it.
-    """
-    def __init__(self, addr, handler, poll_interval=0.5, bind_and_activate=True):
-        class DelegatingUDPRequestHandler(DatagramRequestHandler):
+    class TestTCPServer(ControlMixin, ThreadingTCPServer):
+        """
+        A TCP server which is controllable using :class:`ControlMixin`.
 
-            def handle(self):
-                self.server._handler(self)
-        ThreadingUDPServer.__init__(self, addr, DelegatingUDPRequestHandler,
-                                    bind_and_activate)
-        ControlMixin.__init__(self, handler, poll_interval)
+        :param addr: A tuple with the IP address and port to listen on.
+        :param handler: A handler callable which will be called with a single
+                        parameter - the request - in order to process the request.
+        :param poll_interval: The polling interval in seconds.
+        :bind_and_activate: If True (the default), binds the server and starts it
+                            listening. If False, you need to call
+                            :meth:`server_bind` and :meth:`server_activate` at
+                            some later time before calling :meth:`start`, so that
+                            the server will set up the socket and listen on it.
+        """
 
-    def server_bind(self):
-        super(TestUDPServer, self).server_bind()
-        self.port = self.socket.getsockname()[1]
+        allow_reuse_address = True
 
+        def __init__(self, addr, handler, poll_interval=0.5,
+                     bind_and_activate=True):
+            class DelegatingTCPRequestHandler(StreamRequestHandler):
+
+                def handle(self):
+                    self.server._handler(self)
+            ThreadingTCPServer.__init__(self, addr, DelegatingTCPRequestHandler,
+                                        bind_and_activate)
+            ControlMixin.__init__(self, handler, poll_interval)
+
+        def server_bind(self):
+            super(TestTCPServer, self).server_bind()
+            self.port = self.socket.getsockname()[1]
+
+    class TestUDPServer(ControlMixin, ThreadingUDPServer):
+        """
+        A UDP server which is controllable using :class:`ControlMixin`.
+
+        :param addr: A tuple with the IP address and port to listen on.
+        :param handler: A handler callable which will be called with a
+                        single parameter - the request - in order to
+                        process the request.
+        :param poll_interval: The polling interval for shutdown requests,
+                              in seconds.
+        :bind_and_activate: If True (the default), binds the server and
+                            starts it listening. If False, you need to
+                            call :meth:`server_bind` and
+                            :meth:`server_activate` at some later time
+                            before calling :meth:`start`, so that the server will
+                            set up the socket and listen on it.
+        """
+        def __init__(self, addr, handler, poll_interval=0.5,
+                     bind_and_activate=True):
+            class DelegatingUDPRequestHandler(DatagramRequestHandler):
+
+                def handle(self):
+                    self.server._handler(self)
+
+                def finish(self):
+                    data = self.wfile.getvalue()
+                    if data:
+                        try:
+                            super(DelegatingUDPRequestHandler, self).finish()
+                        except socket.error:
+                            if not self.server._closed:
+                                raise
+
+            ThreadingUDPServer.__init__(self, addr,
+                                        DelegatingUDPRequestHandler,
+                                        bind_and_activate)
+            ControlMixin.__init__(self, handler, poll_interval)
+            self._closed = False
+
+        def server_bind(self):
+            super(TestUDPServer, self).server_bind()
+            self.port = self.socket.getsockname()[1]
+
+        def server_close(self):
+            super(TestUDPServer, self).server_close()
+            self._closed = True
 
 # - end of server_helper section
 
+@unittest.skipUnless(threading, 'Threading required for this test.')
 class SMTPHandlerTest(BaseTest):
     def test_basic(self):
         sockmap = {}
@@ -1435,34 +1483,44 @@
 
 @unittest.skipUnless(threading, 'Threading required for this test.')
 class HTTPHandlerTest(BaseTest):
-
     """Test for HTTPHandler."""
 
+    PEMFILE = """-----BEGIN RSA PRIVATE KEY-----
+MIICXQIBAAKBgQDGT4xS5r91rbLJQK2nUDenBhBG6qFk+bVOjuAGC/LSHlAoBnvG
+zQG3agOG+e7c5z2XT8m2ktORLqG3E4mYmbxgyhDrzP6ei2Anc+pszmnxPoK3Puh5
+aXV+XKt0bU0C1m2+ACmGGJ0t3P408art82nOxBw8ZHgIg9Dtp6xIUCyOqwIDAQAB
+AoGBAJFTnFboaKh5eUrIzjmNrKsG44jEyy+vWvHN/FgSC4l103HxhmWiuL5Lv3f7
+0tMp1tX7D6xvHwIG9VWvyKb/Cq9rJsDibmDVIOslnOWeQhG+XwJyitR0pq/KlJIB
+5LjORcBw795oKWOAi6RcOb1ON59tysEFYhAGQO9k6VL621gRAkEA/Gb+YXULLpbs
+piXN3q4zcHzeaVANo69tUZ6TjaQqMeTxE4tOYM0G0ZoSeHEdaP59AOZGKXXNGSQy
+2z/MddcYGQJBAMkjLSYIpOLJY11ja8OwwswFG2hEzHe0cS9bzo++R/jc1bHA5R0Y
+i6vA5iPi+wopPFvpytdBol7UuEBe5xZrxWMCQQCWxELRHiP2yWpEeLJ3gGDzoXMN
+PydWjhRju7Bx3AzkTtf+D6lawz1+eGTuEss5i0JKBkMEwvwnN2s1ce+EuF4JAkBb
+E96h1lAzkVW5OAfYOPY8RCPA90ZO/hoyg7PpSxR0ECuDrgERR8gXIeYUYfejBkEa
+rab4CfRoVJKKM28Yq/xZAkBvuq670JRCwOgfUTdww7WpdOQBYPkzQccsKNCslQW8
+/DyW6y06oQusSENUvynT6dr3LJxt/NgZPhZX2+k1eYDV
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIICGzCCAYSgAwIBAgIJAIq84a2Q/OvlMA0GCSqGSIb3DQEBBQUAMBQxEjAQBgNV
+BAMTCWxvY2FsaG9zdDAeFw0xMTA1MjExMDIzMzNaFw03NTAzMjEwMzU1MTdaMBQx
+EjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA
+xk+MUua/da2yyUCtp1A3pwYQRuqhZPm1To7gBgvy0h5QKAZ7xs0Bt2oDhvnu3Oc9
+l0/JtpLTkS6htxOJmJm8YMoQ68z+notgJ3PqbM5p8T6Ctz7oeWl1flyrdG1NAtZt
+vgAphhidLdz+NPGq7fNpzsQcPGR4CIPQ7aesSFAsjqsCAwEAAaN1MHMwHQYDVR0O
+BBYEFLWaUPO6N7efGiuoS9i3DVYcUwn0MEQGA1UdIwQ9MDuAFLWaUPO6N7efGiuo
+S9i3DVYcUwn0oRikFjAUMRIwEAYDVQQDEwlsb2NhbGhvc3SCCQCKvOGtkPzr5TAM
+BgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAMK5whPjLNQK1Ivvk88oqJqq
+4f889OwikGP0eUhOBhbFlsZs+jq5YZC2UzHz+evzKBlgAP1u4lP/cB85CnjvWqM+
+1c/lywFHQ6HOdDeQ1L72tSYMrNOG4XNmLn0h7rx6GoTU7dcFRfseahBCq8mv0IDt
+IRbTpvlHWPjsSvHz0ZOH
+-----END CERTIFICATE-----"""
+
     def setUp(self):
         """Set up an HTTP server to receive log messages, and a HTTPHandler
         pointing to that server's address and port."""
         BaseTest.setUp(self)
-        addr = ('localhost', 0)
-        self.server = server = TestHTTPServer(addr, self.handle_request,
-                                                0.01)
-        server.start()
-        server.ready.wait()
-        host = 'localhost:%d' % server.server_port
-        self.h_hdlr = logging.handlers.HTTPHandler(host, '/frob')
-        self.log_data = None
-        self.root_logger.removeHandler(self.root_logger.handlers[0])
-        self.root_logger.addHandler(self.h_hdlr)
         self.handled = threading.Event()
 
-    def tearDown(self):
-        """Shutdown the UDP server."""
-        try:
-            self.server.stop(2.0)
-            self.root_logger.removeHandler(self.h_hdlr)
-            self.h_hdlr.close()
-        finally:
-            BaseTest.tearDown(self)
-
     def handle_request(self, request):
         self.command = request.command
         self.log_data = urlparse(request.path)
@@ -1473,25 +1531,60 @@
             except:
                 self.post_data = None
         request.send_response(200)
+        request.end_headers()
         self.handled.set()
 
     def test_output(self):
-        # The log message sent to the SysLogHandler is properly received.
+        # The log message sent to the HTTPHandler is properly received.
         logger = logging.getLogger("http")
-        for method in ('GET', 'POST'):
-            self.h_hdlr.method = method
-            msg = "sp\xe4m"
-            logger.error(msg)
-            self.handled.wait()
-            self.assertEqual(self.log_data.path, '/frob')
-            self.assertEqual(self.command, method)
-            if method == 'GET':
-                d = parse_qs(self.log_data.query)
+        root_logger = self.root_logger
+        root_logger.removeHandler(self.root_logger.handlers[0])
+        for secure in (False, True):
+            addr = ('localhost', 0)
+            if secure:
+                try:
+                    import ssl
+                    fd, fn = tempfile.mkstemp()
+                    os.close(fd)
+                    with open(fn, 'w') as f:
+                        f.write(self.PEMFILE)
+                    sslctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+                    sslctx.load_cert_chain(fn)
+                    os.unlink(fn)
+                except ImportError:
+                    sslctx = None
             else:
-                d = parse_qs(self.post_data.decode('utf-8'))
-            self.assertEqual(d['name'], ['http'])
-            self.assertEqual(d['funcName'], ['test_output'])
-            self.assertEqual(d['msg'], [msg])
+                sslctx = None
+            self.server = server = TestHTTPServer(addr, self.handle_request,
+                                                    0.01, sslctx=sslctx)
+            server.start()
+            server.ready.wait()
+            host = 'localhost:%d' % server.server_port
+            secure_client = secure and sslctx
+            self.h_hdlr = logging.handlers.HTTPHandler(host, '/frob',
+                                                       secure=secure_client)
+            self.log_data = None
+            root_logger.addHandler(self.h_hdlr)
+
+            for method in ('GET', 'POST'):
+                self.h_hdlr.method = method
+                self.handled.clear()
+                msg = "sp\xe4m"
+                logger.error(msg)
+                self.handled.wait()
+                self.assertEqual(self.log_data.path, '/frob')
+                self.assertEqual(self.command, method)
+                if method == 'GET':
+                    d = parse_qs(self.log_data.query)
+                else:
+                    d = parse_qs(self.post_data.decode('utf-8'))
+                self.assertEqual(d['name'], ['http'])
+                self.assertEqual(d['funcName'], ['test_output'])
+                self.assertEqual(d['msg'], [msg])
+
+            self.server.stop(2.0)
+            self.root_logger.removeHandler(self.h_hdlr)
+            self.h_hdlr.close()
 
 class MemoryTest(BaseTest):
 
@@ -2484,6 +2577,7 @@
             logging.config.stopListening()
             t.join(2.0)
 
+    @unittest.skipUnless(threading, 'Threading required for this test.')
     def test_listen_config_10_ok(self):
         with captured_stdout() as output:
             self.setup_via_listener(json.dumps(self.config10))
@@ -2503,6 +2597,7 @@
                 ('ERROR', '4'),
             ], stream=output)
 
+    @unittest.skipUnless(threading, 'Threading required for this test.')
     def test_listen_config_1_ok(self):
         with captured_stdout() as output:
             self.setup_via_listener(textwrap.dedent(ConfigFileTest.config1))
@@ -3042,15 +3137,19 @@
     def test_multiprocessing(self):
         r = logging.makeLogRecord({})
         self.assertEqual(r.processName, 'MainProcess')
-        import multiprocessing as mp
-        r = logging.makeLogRecord({})
-        self.assertEqual(r.processName, mp.current_process().name)
+        try:
+            import multiprocessing as mp
+            r = logging.makeLogRecord({})
+            self.assertEqual(r.processName, mp.current_process().name)
+        except ImportError:
+            pass
 
     def test_optional(self):
         r = logging.makeLogRecord({})
         NOT_NONE = self.assertIsNotNone
-        NOT_NONE(r.thread)
-        NOT_NONE(r.threadName)
+        if threading:
+            NOT_NONE(r.thread)
+            NOT_NONE(r.threadName)
         NOT_NONE(r.process)
         NOT_NONE(r.processName)
         log_threads = logging.logThreads
@@ -3487,25 +3586,42 @@
         r = logging.makeLogRecord({'msg': 'testing'})
         fh.emit(r)
         self.assertLogFile(self.fn)
-        time.sleep(1.0)
+        time.sleep(1.01)    # just a little over a second ...
         fh.emit(r)
+        fh.close()
+        # At this point, we should have a recent rotated file which we
+        # can test for the existence of. However, in practice, on some
+        # machines which run really slowly, we don't know how far back
+        # in time to go to look for the log file. So, we go back a fair
+        # bit, and stop as soon as we see a rotated file. In theory this
+        # could of course still fail, but the chances are lower.
+        found = False
         now = datetime.datetime.now()
-        prevsec = now - datetime.timedelta(seconds=1)
-        earlier = now - datetime.timedelta(seconds=2)
-        fn1 = self.fn + prevsec.strftime(".%Y-%m-%d_%H-%M-%S")
-        fn2 = self.fn + earlier.strftime(".%Y-%m-%d_%H-%M-%S")
-        self.assertTrue(os.path.exists(fn1) or
-                        os.path.exists(fn2),
-                        msg="Neither exists: %s nor %s" % (fn1, fn2))
+        GO_BACK = 5 * 60 # seconds
+        for secs in range(GO_BACK):
+            prev = now - datetime.timedelta(seconds=secs)
+            fn = self.fn + prev.strftime(".%Y-%m-%d_%H-%M-%S")
+            found = os.path.exists(fn)
+            if found:
+                self.rmfiles.append(fn)
+                break
+        msg = 'No rotated files found, went back %d seconds' % GO_BACK
+        if not found:
+            #print additional diagnostics
+            dn, fn = os.path.split(self.fn)
+            files = [f for f in os.listdir(dn) if f.startswith(fn)]
+            print('Test time: %s' % now.strftime("%Y-%m-%d %H-%M-%S"), file=sys.stderr)
+            print('The only matching files are: %s' % files, file=sys.stderr)
+        self.assertTrue(found, msg=msg)
 
     def test_invalid(self):
         assertRaises = self.assertRaises
         assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
-                     self.fn, 'X')
+                     self.fn, 'X', delay=True)
         assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
-                     self.fn, 'W')
+                     self.fn, 'W', delay=True)
         assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
-                     self.fn, 'W7')
+                     self.fn, 'W7', delay=True)
 
 def secs(**kw):
     return datetime.timedelta(**kw) // datetime.timedelta(seconds=1)
@@ -3554,6 +3670,35 @@
         rh.close()
     setattr(TimedRotatingFileHandlerTest, "test_compute_rollover_%s" % when, test_compute_rollover)
 
+
+@unittest.skipUnless(win32evtlog, 'win32evtlog/win32evtlogutil required for this test.')
+class NTEventLogHandlerTest(BaseTest):
+    def test_basic(self):
+        logtype = 'Application'
+        elh = win32evtlog.OpenEventLog(None, logtype)
+        num_recs = win32evtlog.GetNumberOfEventLogRecords(elh)
+        h = logging.handlers.NTEventLogHandler('test_logging')
+        r = logging.makeLogRecord({'msg': 'Test Log Message'})
+        h.handle(r)
+        h.close()
+        # Now see if the event is recorded
+        self.assertTrue(num_recs < win32evtlog.GetNumberOfEventLogRecords(elh))
+        flags = win32evtlog.EVENTLOG_BACKWARDS_READ | \
+                win32evtlog.EVENTLOG_SEQUENTIAL_READ
+        found = False
+        GO_BACK = 100
+        events = win32evtlog.ReadEventLog(elh, flags, GO_BACK)
+        for e in events:
+            if e.SourceName != 'test_logging':
+                continue
+            msg = win32evtlogutil.SafeFormatMessage(e, logtype)
+            if msg != 'Test Log Message\r\n':
+                continue
+            found = True
+            break
+        msg = 'Record not found in event log, went back %d records' % GO_BACK
+        self.assertTrue(found, msg=msg)
+
 # Set the locale to the platform-dependent default.  I have no idea
 # why the test does this, but in any case we save the current locale
 # first and restore it at the end.
@@ -3569,7 +3714,7 @@
                  BasicConfigTest, LoggerAdapterTest, LoggerTest,
                  SMTPHandlerTest, FileHandlerTest, RotatingFileHandlerTest,
                  LastResortTest, LogRecordTest, ExceptionTest,
-                 SysLogHandlerTest, HTTPHandlerTest,
+                 SysLogHandlerTest, HTTPHandlerTest, NTEventLogHandlerTest,
                  TimedRotatingFileHandlerTest
                 )
 
diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py
index 9b4613e..e462fe3 100644
--- a/Lib/test/test_mailbox.py
+++ b/Lib/test/test_mailbox.py
@@ -870,21 +870,18 @@
 
     def test_reread(self):
 
-        # Initially, the mailbox has not been read and the time is null.
-        assert getattr(self._box, '_last_read', None) is None
-
-        # Refresh mailbox; the times should now be set to something.
-        self._box._refresh()
-        assert getattr(self._box, '_last_read', None) is not None
-
-        # Put the last modified times more than one second into the past
-        # (because mtime has a one second granularity, a refresh is done
-        # unconditionally if called for within the same second, just in case
-        # the mbox has changed).
+        # Put the last modified times more than two seconds into the past
+        # (because mtime may have a two second granularity)
         for subdir in ('cur', 'new'):
             os.utime(os.path.join(self._box._path, subdir),
                      (time.time()-5,)*2)
 
+        # Because mtime has a two second granularity in worst case (FAT), a
+        # refresh is done unconditionally if called for within
+        # two-second-plus-a-bit of the last one, just in case the mbox has
+        # changed; so now we have to wait for that interval to expire.
+        time.sleep(2.01 + self._box._skewfactor)
+
         # Re-reading causes the ._toc attribute to be assigned a new dictionary
         # object, so we'll check that the ._toc attribute isn't a different
         # object.
@@ -893,7 +890,7 @@
             return self._box._toc is not orig_toc
 
         self._box._refresh()
-        assert not refreshed()
+        self.assertFalse(refreshed())
 
         # Now, write something into cur and remove it.  This changes
         # the mtime and should cause a re-read.
@@ -902,7 +899,7 @@
         f.close()
         os.unlink(filename)
         self._box._refresh()
-        assert refreshed()
+        self.assertTrue(refreshed())
 
 class _TestMboxMMDF(TestMailbox):
 
diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py
index 1bbc22d..5b914d5 100644
--- a/Lib/test/test_math.py
+++ b/Lib/test/test_math.py
@@ -5,6 +5,7 @@
 import unittest
 import math
 import os
+import platform
 import sys
 import struct
 import sysconfig
@@ -649,6 +650,34 @@
         n= 2**90
         self.assertAlmostEqual(math.log1p(n), math.log1p(float(n)))
 
+    @requires_IEEE_754
+    def testLog2(self):
+        self.assertRaises(TypeError, math.log2)
+
+        # Check some integer values
+        self.assertEqual(math.log2(1), 0.0)
+        self.assertEqual(math.log2(2), 1.0)
+        self.assertEqual(math.log2(4), 2.0)
+
+        # Large integer values
+        self.assertEqual(math.log2(2**1023), 1023.0)
+        self.assertEqual(math.log2(2**1024), 1024.0)
+        self.assertEqual(math.log2(2**2000), 2000.0)
+
+        self.assertRaises(ValueError, math.log2, -1.5)
+        self.assertRaises(ValueError, math.log2, NINF)
+        self.assertTrue(math.isnan(math.log2(NAN)))
+
+    @requires_IEEE_754
+    @unittest.skipIf(sys.platform == 'darwin'
+                     and platform.mac_ver()[0].startswith('10.4.'),
+                     'Mac OS X Tiger log2() is not accurate enough')
+    def testLog2Exact(self):
+        # Check that we get exact equality for log2 of powers of 2.
+        actual = [math.log2(math.ldexp(1.0, n)) for n in range(-1074, 1024)]
+        expected = [float(n) for n in range(-1074, 1024)]
+        self.assertEqual(actual, expected)
+
     def testLog10(self):
         self.assertRaises(TypeError, math.log10)
         self.ftest('log10(0.1)', math.log10(0.1), -1)
diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py
index acc4819..a3a2e35 100644
--- a/Lib/test/test_minidom.py
+++ b/Lib/test/test_minidom.py
@@ -42,26 +42,6 @@
     return doc
 
 class MinidomTest(unittest.TestCase):
-    def tearDown(self):
-        try:
-            Node.allnodes
-        except AttributeError:
-            # We don't actually have the minidom from the standard library,
-            # but are picking up the PyXML version from site-packages.
-            pass
-        else:
-            self.confirm(len(Node.allnodes) == 0,
-                    "assertion: len(Node.allnodes) == 0")
-            if len(Node.allnodes):
-                print("Garbage left over:")
-                if verbose:
-                    print(list(Node.allnodes.items())[0:10])
-                else:
-                    # Don't print specific nodes if repeatable results
-                    # are needed
-                    print(len(Node.allnodes))
-            Node.allnodes = {}
-
     def confirm(self, test, testname = "Test"):
         self.assertTrue(test, testname)
 
diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py
index e62a046..712378b 100644
--- a/Lib/test/test_mmap.py
+++ b/Lib/test/test_mmap.py
@@ -1,4 +1,5 @@
-from test.support import TESTFN, run_unittest, import_module, unlink, requires
+from test.support import (TESTFN, run_unittest, import_module, unlink,
+                          requires, _2G, _4G)
 import unittest
 import os
 import re
@@ -662,45 +663,48 @@
     def tearDown(self):
         unlink(TESTFN)
 
-    def _working_largefile(self):
-        # Only run if the current filesystem supports large files.
-        f = open(TESTFN, 'wb', buffering=0)
-        try:
-            f.seek(0x80000001)
-            f.write(b'x')
-            f.flush()
-        except (IOError, OverflowError):
-            raise unittest.SkipTest("filesystem does not have largefile support")
-        finally:
-            f.close()
-            unlink(TESTFN)
-
-    def test_large_offset(self):
+    def _make_test_file(self, num_zeroes, tail):
         if sys.platform[:3] == 'win' or sys.platform == 'darwin':
             requires('largefile',
                 'test requires %s bytes and a long time to run' % str(0x180000000))
-        self._working_largefile()
-        with open(TESTFN, 'wb') as f:
-            f.seek(0x14FFFFFFF)
-            f.write(b" ")
+        f = open(TESTFN, 'w+b')
+        try:
+            f.seek(num_zeroes)
+            f.write(tail)
+            f.flush()
+        except (IOError, OverflowError):
+            f.close()
+            raise unittest.SkipTest("filesystem does not have largefile support")
+        return f
 
-        with open(TESTFN, 'rb') as f:
+    def test_large_offset(self):
+        with self._make_test_file(0x14FFFFFFF, b" ") as f:
             with mmap.mmap(f.fileno(), 0, offset=0x140000000, access=mmap.ACCESS_READ) as m:
                 self.assertEqual(m[0xFFFFFFF], 32)
 
     def test_large_filesize(self):
-        if sys.platform[:3] == 'win' or sys.platform == 'darwin':
-            requires('largefile',
-                'test requires %s bytes and a long time to run' % str(0x180000000))
-        self._working_largefile()
-        with open(TESTFN, 'wb') as f:
-            f.seek(0x17FFFFFFF)
-            f.write(b" ")
-
-        with open(TESTFN, 'rb') as f:
+        with self._make_test_file(0x17FFFFFFF, b" ") as f:
             with mmap.mmap(f.fileno(), 0x10000, access=mmap.ACCESS_READ) as m:
                 self.assertEqual(m.size(), 0x180000000)
 
+    # Issue 11277: mmap() with large (~4GB) sparse files crashes on OS X.
+
+    def _test_around_boundary(self, boundary):
+        tail = b'  DEARdear  '
+        start = boundary - len(tail) // 2
+        end = start + len(tail)
+        with self._make_test_file(start, tail) as f:
+            with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as m:
+                self.assertEqual(m[start:end], tail)
+
+    @unittest.skipUnless(sys.maxsize > _4G, "test cannot run on 32-bit systems")
+    def test_around_2GB(self):
+        self._test_around_boundary(_2G)
+
+    @unittest.skipUnless(sys.maxsize > _4G, "test cannot run on 32-bit systems")
+    def test_around_4GB(self):
+        self._test_around_boundary(_4G)
+
 
 def test_main():
     run_unittest(MmapTests, LargeMmapTests)
diff --git a/Lib/test/test_multibytecodec_support.py b/Lib/test/test_multibytecodec_support.py
index ed15ce1..a48fbe4 100644
--- a/Lib/test/test_multibytecodec_support.py
+++ b/Lib/test/test_multibytecodec_support.py
@@ -4,8 +4,11 @@
 #   Common Unittest Routines for CJK codecs
 #
 
-import sys, codecs
-import unittest, re
+import codecs
+import os
+import re
+import sys
+import unittest
 from http.client import HTTPException
 from test import support
 from io import BytesIO
@@ -343,6 +346,10 @@
         if (csetch, unich) not in self.pass_dectest:
             self.assertEqual(str(csetch, self.encoding), unich)
 
-def load_teststring(encoding):
-    from test import cjkencodings_test
-    return cjkencodings_test.teststring[encoding]
+def load_teststring(name):
+    dir = os.path.join(os.path.dirname(__file__), 'cjkencodings')
+    with open(os.path.join(dir, name + '.txt'), 'rb') as f:
+        encoded = f.read()
+    with open(os.path.join(dir, name + '-utf8.txt'), 'rb') as f:
+        utf8 = f.read()
+    return encoded, utf8
diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py
index a7f0391..0c05ff6 100644
--- a/Lib/test/test_multiprocessing.py
+++ b/Lib/test/test_multiprocessing.py
@@ -1915,9 +1915,15 @@
 
     @unittest.skipIf(WIN32, "skipped on Windows")
     def test_invalid_handles(self):
-        conn = _multiprocessing.Connection(44977608)
-        self.assertRaises(IOError, conn.poll)
-        self.assertRaises(IOError, _multiprocessing.Connection, -1)
+        conn = multiprocessing.connection.Connection(44977608)
+        try:
+            self.assertRaises((ValueError, IOError), conn.poll)
+        finally:
+            # Hack private attribute _handle to avoid printing an error
+            # in conn.__del__
+            conn._handle = None
+        self.assertRaises((ValueError, IOError),
+                          multiprocessing.connection.Connection, -1)
 
 #
 # Functions used to create test cases from the base ones in this module
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index aa9ff5d..82a29fe 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -1309,89 +1309,90 @@
                     raise
 
 
-class SendfileTestServer(asyncore.dispatcher, threading.Thread):
+if threading is not None:
+    class SendfileTestServer(asyncore.dispatcher, threading.Thread):
 
-    class Handler(asynchat.async_chat):
+        class Handler(asynchat.async_chat):
 
-        def __init__(self, conn):
-            asynchat.async_chat.__init__(self, conn)
-            self.in_buffer = []
-            self.closed = False
-            self.push(b"220 ready\r\n")
+            def __init__(self, conn):
+                asynchat.async_chat.__init__(self, conn)
+                self.in_buffer = []
+                self.closed = False
+                self.push(b"220 ready\r\n")
 
-        def handle_read(self):
-            data = self.recv(4096)
-            self.in_buffer.append(data)
+            def handle_read(self):
+                data = self.recv(4096)
+                self.in_buffer.append(data)
 
-        def get_data(self):
-            return b''.join(self.in_buffer)
+            def get_data(self):
+                return b''.join(self.in_buffer)
 
-        def handle_close(self):
+            def handle_close(self):
+                self.close()
+                self.closed = True
+
+            def handle_error(self):
+                raise
+
+        def __init__(self, address):
+            threading.Thread.__init__(self)
+            asyncore.dispatcher.__init__(self)
+            self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+            self.bind(address)
+            self.listen(5)
+            self.host, self.port = self.socket.getsockname()[:2]
+            self.handler_instance = None
+            self._active = False
+            self._active_lock = threading.Lock()
+
+        # --- public API
+
+        @property
+        def running(self):
+            return self._active
+
+        def start(self):
+            assert not self.running
+            self.__flag = threading.Event()
+            threading.Thread.start(self)
+            self.__flag.wait()
+
+        def stop(self):
+            assert self.running
+            self._active = False
+            self.join()
+
+        def wait(self):
+            # wait for handler connection to be closed, then stop the server
+            while not getattr(self.handler_instance, "closed", False):
+                time.sleep(0.001)
+            self.stop()
+
+        # --- internals
+
+        def run(self):
+            self._active = True
+            self.__flag.set()
+            while self._active and asyncore.socket_map:
+                self._active_lock.acquire()
+                asyncore.loop(timeout=0.001, count=1)
+                self._active_lock.release()
+            asyncore.close_all()
+
+        def handle_accept(self):
+            conn, addr = self.accept()
+            self.handler_instance = self.Handler(conn)
+
+        def handle_connect(self):
             self.close()
-            self.closed = True
+        handle_read = handle_connect
+
+        def writable(self):
+            return 0
 
         def handle_error(self):
             raise
 
-    def __init__(self, address):
-        threading.Thread.__init__(self)
-        asyncore.dispatcher.__init__(self)
-        self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
-        self.bind(address)
-        self.listen(5)
-        self.host, self.port = self.socket.getsockname()[:2]
-        self.handler_instance = None
-        self._active = False
-        self._active_lock = threading.Lock()
-
-    # --- public API
-
-    @property
-    def running(self):
-        return self._active
-
-    def start(self):
-        assert not self.running
-        self.__flag = threading.Event()
-        threading.Thread.start(self)
-        self.__flag.wait()
-
-    def stop(self):
-        assert self.running
-        self._active = False
-        self.join()
-
-    def wait(self):
-        # wait for handler connection to be closed, then stop the server
-        while not getattr(self.handler_instance, "closed", False):
-            time.sleep(0.001)
-        self.stop()
-
-    # --- internals
-
-    def run(self):
-        self._active = True
-        self.__flag.set()
-        while self._active and asyncore.socket_map:
-            self._active_lock.acquire()
-            asyncore.loop(timeout=0.001, count=1)
-            self._active_lock.release()
-        asyncore.close_all()
-
-    def handle_accept(self):
-        conn, addr = self.accept()
-        self.handler_instance = self.Handler(conn)
-
-    def handle_connect(self):
-        self.close()
-    handle_read = handle_connect
-
-    def writable(self):
-        return 0
-
-    def handle_error(self):
-        raise
-
 
 @unittest.skipUnless(threading is not None, "test needs threading module")
 @unittest.skipUnless(hasattr(os, 'sendfile'), "test needs os.sendfile()")
diff --git a/Lib/test/test_packaging.py b/Lib/test/test_packaging.py
new file mode 100644
index 0000000..250d661
--- /dev/null
+++ b/Lib/test/test_packaging.py
@@ -0,0 +1,5 @@
+import sys
+from packaging.tests.__main__ import test_main
+
+if __name__ == '__main__':
+    test_main()
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
index 0e9ac75..9d9802b 100644
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -9,6 +9,7 @@
 import sys
 import time
 import os
+import fcntl
 import pwd
 import shutil
 import stat
@@ -307,6 +308,16 @@
                 fp1.close()
                 fp2.close()
 
+    @unittest.skipUnless(hasattr(os, 'O_CLOEXEC'), "needs os.O_CLOEXEC")
+    def test_oscloexec(self):
+        version = support.linux_version()
+        if sys.platform == 'linux2' and version < (2, 6, 23):
+            self.skipTest("Linux kernel 2.6.23 or higher required, "
+                          "not %s.%s.%s" % version)
+        fd = os.open(support.TESTFN, os.O_RDONLY|os.O_CLOEXEC)
+        self.addCleanup(os.close, fd)
+        self.assertTrue(fcntl.fcntl(fd, fcntl.F_GETFD) & fcntl.FD_CLOEXEC)
+
     def test_osexlock(self):
         if hasattr(posix, "O_EXLOCK"):
             fd = os.open(support.TESTFN,
diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py
index 6dca0c9..7d17a98 100644
--- a/Lib/test/test_pydoc.py
+++ b/Lib/test/test_pydoc.py
@@ -20,6 +20,11 @@
 
 from test import pydoc_mod
 
+try:
+    import threading
+except ImportError:
+    threading = None
+
 # Just in case sys.modules["test"] has the optional attribute __loader__.
 if hasattr(pydoc_mod, "__loader__"):
     del pydoc_mod.__loader__
@@ -409,6 +414,7 @@
         self.assertIn(expected, pydoc.render_doc(c))
 
 
+@unittest.skipUnless(threading, 'Threading required for this test.')
 class PydocServerTest(unittest.TestCase):
     """Tests for pydoc._start_server"""
 
diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py
index bddb375..8e00889 100644
--- a/Lib/test/test_sax.py
+++ b/Lib/test/test_sax.py
@@ -794,51 +794,6 @@
         self.assertEqual(attrs.getQNameByName((ns_uri, "attr")), "ns:attr")
 
 
-    # During the development of Python 2.5, an attempt to move the "xml"
-    # package implementation to a new package ("xmlcore") proved painful.
-    # The goal of this change was to allow applications to be able to
-    # obtain and rely on behavior in the standard library implementation
-    # of the XML support without needing to be concerned about the
-    # availability of the PyXML implementation.
-    #
-    # While the existing import hackery in Lib/xml/__init__.py can cause
-    # PyXML's _xmlpus package to supplant the "xml" package, that only
-    # works because either implementation uses the "xml" package name for
-    # imports.
-    #
-    # The move resulted in a number of problems related to the fact that
-    # the import machinery's "package context" is based on the name that's
-    # being imported rather than the __name__ of the actual package
-    # containment; it wasn't possible for the "xml" package to be replaced
-    # by a simple module that indirected imports to the "xmlcore" package.
-    #
-    # The following two tests exercised bugs that were introduced in that
-    # attempt.  Keeping these tests around will help detect problems with
-    # other attempts to provide reliable access to the standard library's
-    # implementation of the XML support.
-
-    def test_sf_1511497(self):
-        # Bug report: http://www.python.org/sf/1511497
-        import sys
-        old_modules = sys.modules.copy()
-        for modname in list(sys.modules.keys()):
-            if modname.startswith("xml."):
-                del sys.modules[modname]
-        try:
-            import xml.sax.expatreader
-            module = xml.sax.expatreader
-            self.assertEqual(module.__name__, "xml.sax.expatreader")
-        finally:
-            sys.modules.update(old_modules)
-
-    def test_sf_1513611(self):
-        # Bug report: http://www.python.org/sf/1513611
-        sio = StringIO("invalid")
-        parser = make_parser()
-        from xml.sax import SAXParseException
-        self.assertRaises(SAXParseException, parser.parse, sio)
-
-
 def test_main():
     run_unittest(MakeParserTest,
                  SaxutilsTest,
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index 5252d4d..839f742 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -21,7 +21,7 @@
 import warnings
 
 from test import support
-from test.support import TESTFN, check_warnings, captured_stdout
+from test.support import TESTFN, check_warnings, captured_stdout, requires_zlib
 
 try:
     import bz2
@@ -39,11 +39,6 @@
     UID_GID_SUPPORT = False
 
 try:
-    import zlib
-except ImportError:
-    zlib = None
-
-try:
     import zipfile
     ZIP_SUPPORT = True
 except ImportError:
@@ -444,7 +439,7 @@
             self.assertEqual(getattr(file1_stat, 'st_flags'),
                              getattr(file2_stat, 'st_flags'))
 
-    @unittest.skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_make_tarball(self):
         # creating something to tar
         tmpdir = self.mkdtemp()
@@ -507,7 +502,7 @@
         base_name = os.path.join(tmpdir2, 'archive')
         return tmpdir, tmpdir2, base_name
 
-    @unittest.skipUnless(zlib, "Requires zlib")
+    @requires_zlib
     @unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
                          'Need the tar command to run')
     def test_tarfile_vs_tar(self):
@@ -562,7 +557,7 @@
         tarball = base_name + '.tar'
         self.assertTrue(os.path.exists(tarball))
 
-    @unittest.skipUnless(zlib, "Requires zlib")
+    @requires_zlib
     @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
     def test_make_zipfile(self):
         # creating something to tar
@@ -586,7 +581,7 @@
         base_name = os.path.join(tmpdir, 'archive')
         self.assertRaises(ValueError, make_archive, base_name, 'xxx')
 
-    @unittest.skipUnless(zlib, "Requires zlib")
+    @requires_zlib
     def test_make_archive_owner_group(self):
         # testing make_archive with owner and group, with various combinations
         # this works even if there's not gid/uid support
@@ -614,7 +609,7 @@
         self.assertTrue(os.path.exists(res))
 
 
-    @unittest.skipUnless(zlib, "Requires zlib")
+    @requires_zlib
     @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
     def test_tarfile_root_owner(self):
         tmpdir, tmpdir2, base_name =  self._create_files()
@@ -683,7 +678,7 @@
                     diff.append(file_)
         return diff
 
-    @unittest.skipUnless(zlib, "Requires zlib")
+    @requires_zlib
     def test_unpack_archive(self):
         formats = ['tar', 'gztar', 'zip']
         if BZ2_SUPPORTED:
@@ -951,6 +946,24 @@
         self.assertTrue(srcfile._exited_with[0] is None)
         self.assertTrue(srcfile._raised)
 
+    def test_move_dir_caseinsensitive(self):
+        # Renames a folder to the same name
+        # but a different case.
+
+        self.src_dir = tempfile.mkdtemp()
+        dst_dir = os.path.join(
+                os.path.dirname(self.src_dir),
+                os.path.basename(self.src_dir).upper())
+        self.assertNotEqual(self.src_dir, dst_dir)
+
+        try:
+            shutil.move(self.src_dir, dst_dir)
+            self.assertTrue(os.path.isdir(dst_dir))
+        finally:
+            if os.path.exists(dst_dir):
+                os.rmdir(dst_dir)
+
+
 
 def test_main():
     support.run_unittest(TestShutil, TestMove, TestCopyFile)
diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py
index c74f001..3134031 100644
--- a/Lib/test/test_signal.py
+++ b/Lib/test/test_signal.py
@@ -5,9 +5,14 @@
 import pickle
 import select
 import signal
+import struct
 import subprocess
 import traceback
 import sys, os, time, errno
+try:
+    import threading
+except ImportError:
+    threading = None
 
 if sys.platform in ('os2', 'riscos'):
     raise unittest.SkipTest("Can't test signal on %s" % sys.platform)
@@ -53,15 +58,9 @@
 
     def handlerA(self, signum, frame):
         self.a_called = True
-        if support.verbose:
-            print("handlerA invoked from signal %s at:\n%s" % (
-                signum, self.format_frame(frame, limit=1)))
 
     def handlerB(self, signum, frame):
         self.b_called = True
-        if support.verbose:
-            print ("handlerB invoked from signal %s at:\n%s" % (
-                signum, self.format_frame(frame, limit=1)))
         raise HandlerBCalled(signum, self.format_frame(frame))
 
     def wait(self, child):
@@ -88,8 +87,6 @@
 
         # Let the sub-processes know who to send signals to.
         pid = os.getpid()
-        if support.verbose:
-            print("test runner's pid is", pid)
 
         child = ignoring_eintr(subprocess.Popen, ['kill', '-HUP', str(pid)])
         if child:
@@ -113,8 +110,6 @@
         except HandlerBCalled:
             self.assertTrue(self.b_called)
             self.assertFalse(self.a_called)
-            if support.verbose:
-                print("HandlerBCalled exception caught")
 
         child = ignoring_eintr(subprocess.Popen, ['kill', '-USR2', str(pid)])
         if child:
@@ -130,8 +125,7 @@
             # may return early.
             time.sleep(1)
         except KeyboardInterrupt:
-            if support.verbose:
-                print("KeyboardInterrupt (the alarm() went off)")
+            pass
         except:
             self.fail("Some other exception woke us from pause: %s" %
                       traceback.format_exc())
@@ -187,7 +181,7 @@
 
 
 @unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
-class BasicSignalTests(unittest.TestCase):
+class PosixTests(unittest.TestCase):
     def trivial_signal_handler(self, *args):
         pass
 
@@ -232,6 +226,11 @@
     TIMEOUT_FULL = 10
     TIMEOUT_HALF = 5
 
+    def check_signum(self, *signals):
+        data = os.read(self.read, len(signals)+1)
+        raised = struct.unpack('%uB' % len(data), data)
+        self.assertSequenceEqual(raised, signals)
+
     def test_wakeup_fd_early(self):
         import select
 
@@ -245,6 +244,7 @@
         select.select([self.read], [], [], self.TIMEOUT_FULL)
         after_time = time.time()
         self.assertTrue(after_time - mid_time < self.TIMEOUT_HALF)
+        self.check_signum(signal.SIGALRM)
 
     def test_wakeup_fd_during(self):
         import select
@@ -256,6 +256,14 @@
             [self.read], [], [], self.TIMEOUT_FULL)
         after_time = time.time()
         self.assertTrue(after_time - before_time < self.TIMEOUT_HALF)
+        self.check_signum(signal.SIGALRM)
+
+    def test_signum(self):
+        old_handler = signal.signal(signal.SIGUSR1, lambda x,y:None)
+        self.addCleanup(signal.signal, signal.SIGUSR1, old_handler)
+        os.kill(os.getpid(), signal.SIGUSR1)
+        os.kill(os.getpid(), signal.SIGALRM)
+        self.check_signum(signal.SIGUSR1, signal.SIGALRM)
 
     def setUp(self):
         import fcntl
@@ -340,10 +348,9 @@
                 return True
 
     def test_without_siginterrupt(self):
-        """If a signal handler is installed and siginterrupt is not called
-        at all, when that signal arrives, it interrupts a syscall that's in
-        progress.
-        """
+        # If a signal handler is installed and siginterrupt is not called
+        # at all, when that signal arrives, it interrupts a syscall that's in
+        # progress.
         i = self.readpipe_interrupted()
         self.assertTrue(i)
         # Arrival of the signal shouldn't have changed anything.
@@ -351,10 +358,9 @@
         self.assertTrue(i)
 
     def test_siginterrupt_on(self):
-        """If a signal handler is installed and siginterrupt is called with
-        a true value for the second argument, when that signal arrives, it
-        interrupts a syscall that's in progress.
-        """
+        # If a signal handler is installed and siginterrupt is called with
+        # a true value for the second argument, when that signal arrives, it
+        # interrupts a syscall that's in progress.
         signal.siginterrupt(self.signum, 1)
         i = self.readpipe_interrupted()
         self.assertTrue(i)
@@ -363,10 +369,9 @@
         self.assertTrue(i)
 
     def test_siginterrupt_off(self):
-        """If a signal handler is installed and siginterrupt is called with
-        a false value for the second argument, when that signal arrives, it
-        does not interrupt a syscall that's in progress.
-        """
+        # If a signal handler is installed and siginterrupt is called with
+        # a false value for the second argument, when that signal arrives, it
+        # does not interrupt a syscall that's in progress.
         signal.siginterrupt(self.signum, 0)
         i = self.readpipe_interrupted()
         self.assertFalse(i)
@@ -391,8 +396,6 @@
 
     def sig_alrm(self, *args):
         self.hndl_called = True
-        if support.verbose:
-            print("SIGALRM handler invoked", args)
 
     def sig_vtalrm(self, *args):
         self.hndl_called = True
@@ -404,21 +407,13 @@
         elif self.hndl_count == 3:
             # disable ITIMER_VIRTUAL, this function shouldn't be called anymore
             signal.setitimer(signal.ITIMER_VIRTUAL, 0)
-            if support.verbose:
-                print("last SIGVTALRM handler call")
 
         self.hndl_count += 1
 
-        if support.verbose:
-            print("SIGVTALRM handler invoked", args)
-
     def sig_prof(self, *args):
         self.hndl_called = True
         signal.setitimer(signal.ITIMER_PROF, 0)
 
-        if support.verbose:
-            print("SIGPROF handler invoked", args)
-
     def test_itimer_exc(self):
         # XXX I'm assuming -1 is an invalid itimer, but maybe some platform
         # defines it ?
@@ -431,10 +426,7 @@
     def test_itimer_real(self):
         self.itimer = signal.ITIMER_REAL
         signal.setitimer(self.itimer, 1.0)
-        if support.verbose:
-            print("\ncall pause()...")
         signal.pause()
-
         self.assertEqual(self.hndl_called, True)
 
     # Issue 3864, unknown if this affects earlier versions of freebsd also
@@ -484,50 +476,121 @@
         self.assertEqual(self.hndl_called, True)
 
 
-@unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
-                     'need signal.pthread_sigmask()')
 class PendingSignalsTests(unittest.TestCase):
     """
-    Tests for the pthread_sigmask() function.
+    Test pthread_sigmask(), pthread_kill(), sigpending() and sigwait()
+    functions.
     """
+    def setUp(self):
+        self.has_pthread_kill = hasattr(signal, 'pthread_kill')
+
     def handler(self, signum, frame):
         1/0
 
     def read_sigmask(self):
         return signal.pthread_sigmask(signal.SIG_BLOCK, [])
 
-    def test_pthread_sigmask_arguments(self):
-        self.assertRaises(TypeError, signal.pthread_sigmask)
-        self.assertRaises(TypeError, signal.pthread_sigmask, 1)
-        self.assertRaises(TypeError, signal.pthread_sigmask, 1, 2, 3)
-        self.assertRaises(RuntimeError, signal.pthread_sigmask, 1700, [])
+    def can_test_blocked_signals(self, skip):
+        """
+        Check if a blocked signal can be raised to the main thread without
+        calling its signal handler. We need pthread_kill() or exactly one
+        thread (the main thread).
 
-    def test_pthread_sigmask(self):
-        import faulthandler
-        pid = os.getpid()
-        signum = signal.SIGUSR1
+        Return True if it's possible. Otherwise, return False and print a
+        warning if skip is False, or raise a SkipTest exception if skip is
+        True.
+        """
+        if self.has_pthread_kill:
+            return True
 
         # The fault handler timeout thread masks all signals. If the main
         # thread masks also SIGUSR1, all threads mask this signal. In this
         # case, if we send SIGUSR1 to the process, the signal is pending in the
         # main or the faulthandler timeout thread.  Unblock SIGUSR1 in the main
         # thread calls the signal handler only if the signal is pending for the
-        # main thread.
-        #
-        # Stop the faulthandler timeout thread to workaround this problem.
-        # Another solution would be to send the signal directly to the main
-        # thread using pthread_kill(), but Python doesn't expose this
-        # function.
+        # main thread. Stop the faulthandler timeout thread to workaround this
+        # problem.
+        import faulthandler
         faulthandler.cancel_dump_tracebacks_later()
 
-        # Issue #11998: The _tkinter module loads the Tcl library which creates
-        # a thread waiting events in select(). This thread receives signals
-        # blocked by all other threads. We cannot test blocked signals if the
-        # _tkinter module is loaded.
-        can_test_blocked_signals = ('_tkinter' not in sys.modules)
-        if not can_test_blocked_signals:
-            print("WARNING: _tkinter is loaded, cannot test signals "
-                  "blocked by pthread_sigmask() (issue #11998)")
+        # Issue #11998: The _tkinter module loads the Tcl library which
+        # creates a thread waiting events in select(). This thread receives
+        # signals blocked by all other threads. We cannot test blocked
+        # signals
+        if '_tkinter' in sys.modules:
+            message = ("_tkinter is loaded and pthread_kill() is missing, "
+                       "cannot test blocked signals (issue #11998)")
+            if skip:
+                self.skipTest(message)
+            else:
+                print("WARNING: %s" % message)
+            return False
+        return True
+
+    def kill(self, signum):
+        if self.has_pthread_kill:
+            tid = threading.current_thread().ident
+            signal.pthread_kill(tid, signum)
+        else:
+            pid = os.getpid()
+            os.kill(pid, signum)
+
+    @unittest.skipUnless(hasattr(signal, 'sigpending'),
+                         'need signal.sigpending()')
+    def test_sigpending_empty(self):
+        self.assertEqual(signal.sigpending(), set())
+
+    @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+                         'need signal.pthread_sigmask()')
+    @unittest.skipUnless(hasattr(signal, 'sigpending'),
+                         'need signal.sigpending()')
+    def test_sigpending(self):
+        self.can_test_blocked_signals(True)
+
+        signum = signal.SIGUSR1
+        old_handler = signal.signal(signum, self.handler)
+        self.addCleanup(signal.signal, signum, old_handler)
+
+        signal.pthread_sigmask(signal.SIG_BLOCK, [signum])
+        self.kill(signum)
+        self.assertEqual(signal.sigpending(), {signum})
+        with self.assertRaises(ZeroDivisionError):
+            signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum])
+
+    @unittest.skipUnless(hasattr(signal, 'pthread_kill'),
+                         'need signal.pthread_kill()')
+    def test_pthread_kill(self):
+        signum = signal.SIGUSR1
+        current = threading.current_thread().ident
+
+        old_handler = signal.signal(signum, self.handler)
+        self.addCleanup(signal.signal, signum, old_handler)
+
+        with self.assertRaises(ZeroDivisionError):
+            signal.pthread_kill(current, signum)
+
+    @unittest.skipUnless(hasattr(signal, 'sigwait'),
+                         'need signal.sigwait()')
+    def test_sigwait(self):
+        old_handler = signal.signal(signal.SIGALRM, self.handler)
+        self.addCleanup(signal.signal, signal.SIGALRM, old_handler)
+
+        signal.alarm(1)
+        self.assertEqual(signal.sigwait([signal.SIGALRM]), signal.SIGALRM)
+
+    @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+                         'need signal.pthread_sigmask()')
+    def test_pthread_sigmask_arguments(self):
+        self.assertRaises(TypeError, signal.pthread_sigmask)
+        self.assertRaises(TypeError, signal.pthread_sigmask, 1)
+        self.assertRaises(TypeError, signal.pthread_sigmask, 1, 2, 3)
+        self.assertRaises(OSError, signal.pthread_sigmask, 1700, [])
+
+    @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+                         'need signal.pthread_sigmask()')
+    def test_pthread_sigmask(self):
+        test_blocked_signals = self.can_test_blocked_signals(False)
+        signum = signal.SIGUSR1
 
         # Install our signal handler
         old_handler = signal.signal(signum, self.handler)
@@ -537,13 +600,13 @@
         old_mask = signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum])
         self.addCleanup(signal.pthread_sigmask, signal.SIG_SETMASK, old_mask)
         with self.assertRaises(ZeroDivisionError):
-            os.kill(pid, signum)
+            self.kill(signum)
 
         # Block and then raise SIGUSR1. The signal is blocked: the signal
         # handler is not called, and the signal is now pending
         signal.pthread_sigmask(signal.SIG_BLOCK, [signum])
-        if can_test_blocked_signals:
-            os.kill(pid, signum)
+        if test_blocked_signals:
+            self.kill(signum)
 
         # Check the new mask
         blocked = self.read_sigmask()
@@ -551,14 +614,14 @@
         self.assertEqual(old_mask ^ blocked, {signum})
 
         # Unblock SIGUSR1
-        if can_test_blocked_signals:
+        if test_blocked_signals:
             with self.assertRaises(ZeroDivisionError):
                 # unblock the pending signal calls immediatly the signal handler
                 signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum])
         else:
             signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum])
         with self.assertRaises(ZeroDivisionError):
-            os.kill(pid, signum)
+            self.kill(signum)
 
         # Check the new mask
         unblocked = self.read_sigmask()
@@ -570,7 +633,7 @@
 
 def test_main():
     try:
-        support.run_unittest(BasicSignalTests, InterProcessSignalTests,
+        support.run_unittest(PosixTests, InterProcessSignalTests,
                              WakeupSignalTests, SiginterruptTest,
                              ItimerTest, WindowsSignalTests,
                              PendingSignalsTests)
diff --git a/Lib/test/test_smtpnet.py b/Lib/test/test_smtpnet.py
index f0f1edd..7d0fa98 100644
--- a/Lib/test/test_smtpnet.py
+++ b/Lib/test/test_smtpnet.py
@@ -4,21 +4,54 @@
 from test import support
 import smtplib
 
+ssl = support.import_module("ssl")
+
 support.requires("network")
 
+
+class SmtpTest(unittest.TestCase):
+    testServer = 'smtp.gmail.com'
+    remotePort = 25
+    context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+
+    def test_connect_starttls(self):
+        support.get_attribute(smtplib, 'SMTP_SSL')
+        with support.transient_internet(self.testServer):
+            server = smtplib.SMTP(self.testServer, self.remotePort)
+            server.starttls(context=self.context)
+            server.ehlo()
+            server.quit()
+
+
 class SmtpSSLTest(unittest.TestCase):
     testServer = 'smtp.gmail.com'
     remotePort = 465
+    context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
 
     def test_connect(self):
         support.get_attribute(smtplib, 'SMTP_SSL')
         with support.transient_internet(self.testServer):
             server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
-        server.ehlo()
-        server.quit()
+            server.ehlo()
+            server.quit()
+
+    def test_connect_default_port(self):
+        support.get_attribute(smtplib, 'SMTP_SSL')
+        with support.transient_internet(self.testServer):
+            server = smtplib.SMTP_SSL(self.testServer)
+            server.ehlo()
+            server.quit()
+
+    def test_connect_using_sslcontext(self):
+        support.get_attribute(smtplib, 'SMTP_SSL')
+        with support.transient_internet(self.testServer):
+            server = smtplib.SMTP_SSL(self.testServer, self.remotePort, context=self.context)
+            server.ehlo()
+            server.quit()
+
 
 def test_main():
-    support.run_unittest(SmtpSSLTest)
+    support.run_unittest(SmtpTest, SmtpSSLTest)
 
 if __name__ == "__main__":
     test_main()
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index 99d658b..59e3019 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -24,14 +24,6 @@
 except ImportError:
     fcntl = False
 
-def linux_version():
-    try:
-        # platform.release() is something like '2.6.33.7-desktop-2mnb'
-        version_string = platform.release().split('-')[0]
-        return tuple(map(int, version_string.split('.')))
-    except ValueError:
-        return 0, 0, 0
-
 HOST = support.HOST
 MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf-8') ## test unicode string and carriage return
 
@@ -270,18 +262,57 @@
 
     def testSocketError(self):
         # Testing socket module exceptions
-        def raise_error(*args, **kwargs):
+        msg = "Error raising socket exception (%s)."
+        with self.assertRaises(socket.error, msg=msg % 'socket.error'):
             raise socket.error
-        def raise_herror(*args, **kwargs):
+        with self.assertRaises(socket.error, msg=msg % 'socket.herror'):
             raise socket.herror
-        def raise_gaierror(*args, **kwargs):
+        with self.assertRaises(socket.error, msg=msg % 'socket.gaierror'):
             raise socket.gaierror
-        self.assertRaises(socket.error, raise_error,
-                              "Error raising socket exception.")
-        self.assertRaises(socket.error, raise_herror,
-                              "Error raising socket exception.")
-        self.assertRaises(socket.error, raise_gaierror,
-                              "Error raising socket exception.")
+
+    def testSendtoErrors(self):
+        # Testing that sendto doens't masks failures. See #10169.
+        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        self.addCleanup(s.close)
+        s.bind(('', 0))
+        sockname = s.getsockname()
+        # 2 args
+        with self.assertRaises(TypeError) as cm:
+            s.sendto('\u2620', sockname)
+        self.assertEqual(str(cm.exception),
+                         "'str' does not support the buffer interface")
+        with self.assertRaises(TypeError) as cm:
+            s.sendto(5j, sockname)
+        self.assertEqual(str(cm.exception),
+                         "'complex' does not support the buffer interface")
+        with self.assertRaises(TypeError) as cm:
+            s.sendto(b'foo', None)
+        self.assertIn('not NoneType',str(cm.exception))
+        # 3 args
+        with self.assertRaises(TypeError) as cm:
+            s.sendto('\u2620', 0, sockname)
+        self.assertEqual(str(cm.exception),
+                         "'str' does not support the buffer interface")
+        with self.assertRaises(TypeError) as cm:
+            s.sendto(5j, 0, sockname)
+        self.assertEqual(str(cm.exception),
+                         "'complex' does not support the buffer interface")
+        with self.assertRaises(TypeError) as cm:
+            s.sendto(b'foo', 0, None)
+        self.assertIn('not NoneType', str(cm.exception))
+        with self.assertRaises(TypeError) as cm:
+            s.sendto(b'foo', 'bar', sockname)
+        self.assertIn('an integer is required', str(cm.exception))
+        with self.assertRaises(TypeError) as cm:
+            s.sendto(b'foo', None, None)
+        self.assertIn('an integer is required', str(cm.exception))
+        # wrong number of args
+        with self.assertRaises(TypeError) as cm:
+            s.sendto(b'foo')
+        self.assertIn('(1 given)', str(cm.exception))
+        with self.assertRaises(TypeError) as cm:
+            s.sendto(b'foo', 0, sockname, 4)
+        self.assertIn('(4 given)', str(cm.exception))
 
     def testCrucialConstants(self):
         # Testing for mission critical constants
@@ -333,6 +364,32 @@
         finally:
             socket.sethostname(oldhn)
 
+    @unittest.skipUnless(hasattr(socket, 'if_nameindex'),
+                         'socket.if_nameindex() not available.')
+    def testInterfaceNameIndex(self):
+        interfaces = socket.if_nameindex()
+        for index, name in interfaces:
+            self.assertIsInstance(index, int)
+            self.assertIsInstance(name, str)
+            # interface indices are non-zero integers
+            self.assertGreater(index, 0)
+            _index = socket.if_nametoindex(name)
+            self.assertIsInstance(_index, int)
+            self.assertEqual(index, _index)
+            _name = socket.if_indextoname(index)
+            self.assertIsInstance(_name, str)
+            self.assertEqual(name, _name)
+
+    @unittest.skipUnless(hasattr(socket, 'if_nameindex'),
+                         'socket.if_nameindex() not available.')
+    def testInvalidInterfaceNameIndex(self):
+        # test nonexistent interface index/name
+        self.assertRaises(socket.error, socket.if_indextoname, 0)
+        self.assertRaises(socket.error, socket.if_nametoindex, '_DEADBEEF')
+        # test with invalid values
+        self.assertRaises(TypeError, socket.if_nametoindex, 0)
+        self.assertRaises(TypeError, socket.if_indextoname, '_DEADBEEF')
+
     def testRefCountGetNameInfo(self):
         # Testing reference count for getnameinfo
         if hasattr(sys, "getrefcount"):
@@ -528,23 +585,9 @@
 
     # XXX The following don't test module-level functionality...
 
-    def _get_unused_port(self, bind_address='0.0.0.0'):
-        """Use a temporary socket to elicit an unused ephemeral port.
-
-        Args:
-            bind_address: Hostname or IP address to search for a port on.
-
-        Returns: A most likely to be unused port.
-        """
-        tempsock = socket.socket()
-        tempsock.bind((bind_address, 0))
-        host, port = tempsock.getsockname()
-        tempsock.close()
-        return port
-
     def testSockName(self):
         # Testing getsockname()
-        port = self._get_unused_port()
+        port = support.find_unused_port()
         sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         self.addCleanup(sock.close)
         sock.bind(("0.0.0.0", port))
@@ -593,7 +636,7 @@
 
     def test_getsockaddrarg(self):
         host = '0.0.0.0'
-        port = self._get_unused_port(bind_address=host)
+        port = support.find_unused_port()
         big_port = port + 65536
         neg_port = port - 65536
         sock = socket.socket()
@@ -758,6 +801,13 @@
             for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
                 self.assertRaises(TypeError, pickle.dumps, sock, protocol)
 
+    def test_listen_backlog0(self):
+        srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        srv.bind((HOST, 0))
+        # backlog = 0
+        srv.listen(0)
+        srv.close()
+
 
 @unittest.skipUnless(thread, 'Threading required for this test.')
 class BasicTCPTest(SocketConnectedTest):
@@ -974,7 +1024,7 @@
 
     if hasattr(socket, "SOCK_NONBLOCK"):
         def testInitNonBlocking(self):
-            v = linux_version()
+            v = support.linux_version()
             if v < (2, 6, 28):
                 self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
                               % ".".join(map(str, v)))
@@ -1952,7 +2002,7 @@
 @unittest.skipUnless(fcntl, "module fcntl not available")
 class CloexecConstantTest(unittest.TestCase):
     def test_SOCK_CLOEXEC(self):
-        v = linux_version()
+        v = support.linux_version()
         if v < (2, 6, 28):
             self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
                           % ".".join(map(str, v)))
@@ -1974,7 +2024,7 @@
             self.assertEqual(s.gettimeout(), None)
 
     def test_SOCK_NONBLOCK(self):
-        v = linux_version()
+        v = support.linux_version()
         if v < (2, 6, 28):
             self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
                           % ".".join(map(str, v)))
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index 138367b..8c21975 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -21,9 +21,11 @@
 ssl = support.import_module("ssl")
 
 PROTOCOLS = [
-    ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3,
+    ssl.PROTOCOL_SSLv3,
     ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1
 ]
+if hasattr(ssl, 'PROTOCOL_SSLv2'):
+    PROTOCOLS.append(ssl.PROTOCOL_SSLv2)
 
 HOST = support.HOST
 
@@ -67,22 +69,25 @@
 
 # Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2
 def skip_if_broken_ubuntu_ssl(func):
-    @functools.wraps(func)
-    def f(*args, **kwargs):
-        try:
-            ssl.SSLContext(ssl.PROTOCOL_SSLv2)
-        except ssl.SSLError:
-            if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
-                platform.linux_distribution() == ('debian', 'squeeze/sid', '')):
-                raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
-        return func(*args, **kwargs)
-    return f
+    if hasattr(ssl, 'PROTOCOL_SSLv2'):
+        @functools.wraps(func)
+        def f(*args, **kwargs):
+            try:
+                ssl.SSLContext(ssl.PROTOCOL_SSLv2)
+            except ssl.SSLError:
+                if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
+                    platform.linux_distribution() == ('debian', 'squeeze/sid', '')):
+                    raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
+            return func(*args, **kwargs)
+        return f
+    else:
+        return func
 
 
 class BasicSocketTests(unittest.TestCase):
 
     def test_constants(self):
-        ssl.PROTOCOL_SSLv2
+        #ssl.PROTOCOL_SSLv2
         ssl.PROTOCOL_SSLv23
         ssl.PROTOCOL_SSLv3
         ssl.PROTOCOL_TLSv1
@@ -97,6 +102,14 @@
             sys.stdout.write("\n RAND_status is %d (%s)\n"
                              % (v, (v and "sufficient randomness") or
                                 "insufficient randomness"))
+
+        data, is_cryptographic = ssl.RAND_pseudo_bytes(16)
+        self.assertEqual(len(data), 16)
+        self.assertEqual(is_cryptographic, v == 1)
+        if v:
+            data = ssl.RAND_bytes(16)
+            self.assertEqual(len(data), 16)
+
         try:
             ssl.RAND_egd(1)
         except TypeError:
@@ -277,6 +290,24 @@
                             (('organizationName', 'Google Inc'),))}
         fail(cert, 'mail.google.com')
 
+        # No DNS entry in subjectAltName but a commonName
+        cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT',
+                'subject': ((('countryName', 'US'),),
+                            (('stateOrProvinceName', 'California'),),
+                            (('localityName', 'Mountain View'),),
+                            (('commonName', 'mail.google.com'),)),
+                'subjectAltName': (('othername', 'blabla'), )}
+        ok(cert, 'mail.google.com')
+
+        # No DNS entry subjectAltName and no commonName
+        cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT',
+                'subject': ((('countryName', 'US'),),
+                            (('stateOrProvinceName', 'California'),),
+                            (('localityName', 'Mountain View'),),
+                            (('organizationName', 'Google Inc'),)),
+                'subjectAltName': (('othername', 'blabla'),)}
+        fail(cert, 'google.com')
+
         # Empty cert / no cert
         self.assertRaises(ValueError, ssl.match_hostname, None, 'example.com')
         self.assertRaises(ValueError, ssl.match_hostname, {}, 'example.com')
@@ -292,7 +323,8 @@
 
     @skip_if_broken_ubuntu_ssl
     def test_constructor(self):
-        ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv2)
+        if hasattr(ssl, 'PROTOCOL_SSLv2'):
+            ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv2)
         ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
         ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv3)
         ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
@@ -495,6 +527,23 @@
             finally:
                 s.close()
 
+    def test_timeout_connect_ex(self):
+        # Issue #12065: on a timeout, connect_ex() should return the original
+        # errno (mimicking the behaviour of non-SSL sockets).
+        with support.transient_internet("svn.python.org"):
+            s = ssl.wrap_socket(socket.socket(socket.AF_INET),
+                                cert_reqs=ssl.CERT_REQUIRED,
+                                ca_certs=SVN_PYTHON_ORG_ROOT_CERT,
+                                do_handshake_on_connect=False)
+            try:
+                s.settimeout(0.0000001)
+                rc = s.connect_ex(('svn.python.org', 443))
+                if rc == 0:
+                    self.skipTest("svn.python.org responded too quickly")
+                self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK))
+            finally:
+                s.close()
+
     def test_connect_with_context(self):
         with support.transient_internet("svn.python.org"):
             # Same as test_connect, but with a separately created context
@@ -1186,6 +1235,8 @@
                 t.join()
 
         @skip_if_broken_ubuntu_ssl
+        @unittest.skipUnless(hasattr(ssl, 'PROTOCOL_SSLv2'),
+                             "OpenSSL is compiled without SSLv2 support")
         def test_protocol_sslv2(self):
             """Connecting to an SSLv2 server with various client options"""
             if support.verbose:
@@ -1211,14 +1262,15 @@
             """Connecting to an SSLv23 server with various client options"""
             if support.verbose:
                 sys.stdout.write("\n")
-            try:
-                try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv2, True)
-            except (ssl.SSLError, socket.error) as x:
-                # this fails on some older versions of OpenSSL (0.9.7l, for instance)
-                if support.verbose:
-                    sys.stdout.write(
-                        " SSL2 client to SSL23 server test unexpectedly failed:\n %s\n"
-                        % str(x))
+            if hasattr(ssl, 'PROTOCOL_SSLv2'):
+                try:
+                    try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv2, True)
+                except (ssl.SSLError, socket.error) as x:
+                    # this fails on some older versions of OpenSSL (0.9.7l, for instance)
+                    if support.verbose:
+                        sys.stdout.write(
+                            " SSL2 client to SSL23 server test unexpectedly failed:\n %s\n"
+                            % str(x))
             try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True)
             try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True)
             try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True)
@@ -1249,7 +1301,8 @@
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED)
-            try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
+            if hasattr(ssl, 'PROTOCOL_SSLv2'):
+                try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, False)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False)
             if no_sslv2_implies_sslv3_hello():
@@ -1265,7 +1318,8 @@
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
-            try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
+            if hasattr(ssl, 'PROTOCOL_SSLv2'):
+                try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv23, False)
 
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index e8abfef..776e143 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -1590,7 +1590,8 @@
     def test_returncode(self):
         with subprocess.Popen([sys.executable, "-c",
                                "import sys; sys.exit(100)"]) as proc:
-            proc.wait()
+            pass
+        # __exit__ calls wait(), so the returncode should be set
         self.assertEqual(proc.returncode, 100)
 
     def test_communicate_stdin(self):
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
index 193b5f0..77c2364 100644
--- a/Lib/test/test_sysconfig.py
+++ b/Lib/test/test_sysconfig.py
@@ -1,9 +1,3 @@
-"""Tests for 'site'.
-
-Tests assume the initial paths in sys.path once the interpreter has begun
-executing have not been removed.
-
-"""
 import unittest
 import sys
 import os
@@ -16,14 +10,13 @@
 
 import sysconfig
 from sysconfig import (get_paths, get_platform, get_config_vars,
-                       get_path, get_path_names, _INSTALL_SCHEMES,
+                       get_path, get_path_names, _SCHEMES,
                        _get_default_scheme, _expand_vars,
                        get_scheme_names, get_config_var, _main)
 
 class TestSysConfig(unittest.TestCase):
 
     def setUp(self):
-        """Make a copy of sys.path"""
         super(TestSysConfig, self).setUp()
         self.sys_path = sys.path[:]
         self.makefile = None
@@ -44,10 +37,15 @@
         self.isabs = os.path.isabs
         self.splitdrive = os.path.splitdrive
         self._config_vars = copy(sysconfig._CONFIG_VARS)
-        self.old_environ = deepcopy(os.environ)
+        self._added_envvars = []
+        self._changed_envvars = []
+        for var in ('MACOSX_DEPLOYMENT_TARGET', 'Path'):
+            if var in os.environ:
+                self._changed_envvars.append((var, os.environ[var]))
+            else:
+                self._added_envvars.append(var)
 
     def tearDown(self):
-        """Restore sys.path"""
         sys.path[:] = self.sys_path
         if self.makefile is not None:
             os.unlink(self.makefile)
@@ -64,13 +62,10 @@
         os.path.isabs = self.isabs
         os.path.splitdrive = self.splitdrive
         sysconfig._CONFIG_VARS = copy(self._config_vars)
-        for key, value in self.old_environ.items():
-            if os.environ.get(key) != value:
-                os.environ[key] = value
-
-        for key in list(os.environ.keys()):
-            if key not in self.old_environ:
-                del os.environ[key]
+        for var, value in self._changed_envvars:
+            os.environ[var] = value
+        for var in self._added_envvars:
+            os.environ.pop(var, None)
 
         super(TestSysConfig, self).tearDown()
 
@@ -88,7 +83,7 @@
             shutil.rmtree(path)
 
     def test_get_path_names(self):
-        self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS)
+        self.assertEqual(get_path_names(), _SCHEMES.options('posix_prefix'))
 
     def test_get_paths(self):
         scheme = get_paths()
@@ -102,8 +97,8 @@
 
     def test_get_path(self):
         # xxx make real tests here
-        for scheme in _INSTALL_SCHEMES:
-            for name in _INSTALL_SCHEMES[scheme]:
+        for scheme in _SCHEMES:
+            for name in _SCHEMES[scheme]:
                 res = get_path(name, scheme)
 
     def test_get_config_vars(self):
@@ -142,7 +137,7 @@
                    ('Darwin Kernel Version 8.11.1: '
                     'Wed Oct 10 18:23:28 PDT 2007; '
                     'root:xnu-792.25.20~1/RELEASE_I386'), 'PowerPC'))
-        os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
+        get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
 
         get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g '
                                        '-fwrapv -O3 -Wall -Wstrict-prototypes')
@@ -156,13 +151,12 @@
         finally:
             sys.maxsize = maxint
 
-
         self._set_uname(('Darwin', 'macziade', '8.11.1',
                    ('Darwin Kernel Version 8.11.1: '
                     'Wed Oct 10 18:23:28 PDT 2007; '
                     'root:xnu-792.25.20~1/RELEASE_I386'), 'i386'))
         get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
-        os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
+        get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
 
         get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g '
                                        '-fwrapv -O3 -Wall -Wstrict-prototypes')
@@ -176,7 +170,7 @@
             sys.maxsize = maxint
 
         # macbook with fat binaries (fat, universal or fat64)
-        os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.4'
+        get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4'
         get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot '
                                        '/Developer/SDKs/MacOSX10.4u.sdk  '
                                        '-fno-strict-aliasing -fno-common '
@@ -290,6 +284,50 @@
 
         self.assertIn(ldflags, ldshared)
 
+    @unittest.skipUnless(sys.platform == "darwin", "test only relevant on MacOSX")
+    def test_platform_in_subprocess(self):
+        my_platform = sysconfig.get_platform()
+
+        # Test without MACOSX_DEPLOYMENT_TARGET in the environment
+
+        env = os.environ.copy()
+        if 'MACOSX_DEPLOYMENT_TARGET' in env:
+            del env['MACOSX_DEPLOYMENT_TARGET']
+
+        with open('/dev/null', 'w') as devnull_fp:
+            p = subprocess.Popen([
+                    sys.executable, '-c',
+                    'import sysconfig; print(sysconfig.get_platform())',
+                ],
+                stdout=subprocess.PIPE,
+                stderr=devnull_fp,
+                env=env)
+        test_platform = p.communicate()[0].strip()
+        test_platform = test_platform.decode('utf-8')
+        status = p.wait()
+
+        self.assertEqual(status, 0)
+        self.assertEqual(my_platform, test_platform)
+
+        # Test with MACOSX_DEPLOYMENT_TARGET in the environment, and
+        # using a value that is unlikely to be the default one.
+        env = os.environ.copy()
+        env['MACOSX_DEPLOYMENT_TARGET'] = '10.1'
+
+        p = subprocess.Popen([
+                sys.executable, '-c',
+                'import sysconfig; print(sysconfig.get_platform())',
+            ],
+            stdout=subprocess.PIPE,
+            stderr=open('/dev/null'),
+            env=env)
+        test_platform = p.communicate()[0].strip()
+        test_platform = test_platform.decode('utf-8')
+        status = p.wait()
+
+        self.assertEqual(status, 0)
+        self.assertEqual(my_platform, test_platform)
+
 
 def test_main():
     run_unittest(TestSysConfig)
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index c6682d6..2dc7773 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -1,18 +1,17 @@
 # Very rudimentary test of threading module
 
 import test.support
-from test.support import verbose, strip_python_stderr
+from test.support import verbose, strip_python_stderr, import_module
 import random
 import re
 import sys
-_thread = test.support.import_module('_thread')
-threading = test.support.import_module('threading')
+_thread = import_module('_thread')
+threading = import_module('threading')
 import time
 import unittest
 import weakref
 import os
-import subprocess
-from test.script_helper import assert_python_ok
+from test.script_helper import assert_python_ok, assert_python_failure
 
 from test import lock_tests
 
@@ -163,10 +162,7 @@
     # PyThreadState_SetAsyncExc() is a CPython-only gimmick, not (currently)
     # exposed at the Python level.  This test relies on ctypes to get at it.
     def test_PyThreadState_SetAsyncExc(self):
-        try:
-            import ctypes
-        except ImportError:
-            raise unittest.SkipTest("cannot import ctypes")
+        ctypes = import_module("ctypes")
 
         set_async_exc = ctypes.pythonapi.PyThreadState_SetAsyncExc
 
@@ -269,12 +265,9 @@
         # Issue 1402: the PyGILState_Ensure / _Release functions may be called
         # very late on python exit: on deallocation of a running thread for
         # example.
-        try:
-            import ctypes
-        except ImportError:
-            raise unittest.SkipTest("cannot import ctypes")
+        import_module("ctypes")
 
-        rc = subprocess.call([sys.executable, "-c", """if 1:
+        rc, out, err = assert_python_failure("-c", """if 1:
             import ctypes, sys, time, _thread
 
             # This lock is used as a simple event variable.
@@ -298,13 +291,13 @@
             _thread.start_new_thread(waitingThread, ())
             ready.acquire()  # Be sure the other thread is waiting.
             sys.exit(42)
-            """])
+            """)
         self.assertEqual(rc, 42)
 
     def test_finalize_with_trace(self):
         # Issue1733757
         # Avoid a deadlock when sys.settrace steps into threading._shutdown
-        p = subprocess.Popen([sys.executable, "-c", """if 1:
+        assert_python_ok("-c", """if 1:
             import sys, threading
 
             # A deadlock-killer, to prevent the
@@ -324,21 +317,12 @@
                 return func
 
             sys.settrace(func)
-            """],
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE)
-        self.addCleanup(p.stdout.close)
-        self.addCleanup(p.stderr.close)
-        stdout, stderr = p.communicate()
-        rc = p.returncode
-        self.assertFalse(rc == 2, "interpreted was blocked")
-        self.assertTrue(rc == 0,
-                        "Unexpected error: " + ascii(stderr))
+            """)
 
     def test_join_nondaemon_on_shutdown(self):
         # Issue 1722344
         # Raising SystemExit skipped threading._shutdown
-        p = subprocess.Popen([sys.executable, "-c", """if 1:
+        rc, out, err = assert_python_ok("-c", """if 1:
                 import threading
                 from time import sleep
 
@@ -350,16 +334,10 @@
 
                 threading.Thread(target=child).start()
                 raise SystemExit
-            """],
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE)
-        self.addCleanup(p.stdout.close)
-        self.addCleanup(p.stderr.close)
-        stdout, stderr = p.communicate()
-        self.assertEqual(stdout.strip(),
+            """)
+        self.assertEqual(out.strip(),
             b"Woke up, sleep function is: <built-in function sleep>")
-        stderr = strip_python_stderr(stderr)
-        self.assertEqual(stderr, b"")
+        self.assertEqual(err, b"")
 
     def test_enumerate_after_join(self):
         # Try hard to trigger #1703448: a thread is still returned in
@@ -452,13 +430,9 @@
                 sys.stdout.flush()
         \n""" + script
 
-        p = subprocess.Popen([sys.executable, "-c", script], stdout=subprocess.PIPE)
-        rc = p.wait()
-        data = p.stdout.read().decode().replace('\r', '')
-        p.stdout.close()
+        rc, out, err = assert_python_ok("-c", script)
+        data = out.decode().replace('\r', '')
         self.assertEqual(data, "end of main\nend of thread\n")
-        self.assertFalse(rc == 2, "interpreter was blocked")
-        self.assertTrue(rc == 0, "Unexpected error")
 
     def test_1_join_on_shutdown(self):
         # The usual case: on exit, wait for a non-daemon thread
@@ -518,11 +492,8 @@
         self._run_and_join(script)
 
     def assertScriptHasOutput(self, script, expected_output):
-        p = subprocess.Popen([sys.executable, "-c", script],
-                             stdout=subprocess.PIPE)
-        stdout, stderr = p.communicate()
-        data = stdout.decode().replace('\r', '')
-        self.assertEqual(p.returncode, 0, "Unexpected error")
+        rc, out, err = assert_python_ok("-c", script)
+        data = out.decode().replace('\r', '')
         self.assertEqual(data, expected_output)
 
     @unittest.skipUnless(hasattr(os, 'fork'), "needs os.fork()")
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index b6e4e91..8cd5ca0 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -5,6 +5,7 @@
 import io
 import socket
 import array
+import sys
 
 import urllib.request
 # The proxy bypass method imported below has logic specific to the OSX
@@ -1162,6 +1163,8 @@
         self.assertEqual(req.get_host(), "proxy.example.com:3128")
         self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
 
+    # TODO: This should be only for OSX
+    @unittest.skipUnless(sys.platform == 'darwin', "only relevant for OSX")
     def test_osx_proxy_bypass(self):
         bypass = {
             'exclude_simple': False,
@@ -1265,6 +1268,26 @@
         # _test_basic_auth called .open() twice)
         self.assertEqual(opener.recorded, ["digest", "basic"]*2)
 
+    def test_unsupported_auth_digest_handler(self):
+        opener = OpenerDirector()
+        # While using DigestAuthHandler
+        digest_auth_handler = urllib.request.HTTPDigestAuthHandler(None)
+        http_handler = MockHTTPHandler(
+            401, 'WWW-Authenticate: Kerberos\r\n\r\n')
+        opener.add_handler(digest_auth_handler)
+        opener.add_handler(http_handler)
+        self.assertRaises(ValueError,opener.open,"http://www.example.com")
+
+    def test_unsupported_auth_basic_handler(self):
+        # While using BasicAuthHandler
+        opener = OpenerDirector()
+        basic_auth_handler = urllib.request.HTTPBasicAuthHandler(None)
+        http_handler = MockHTTPHandler(
+            401, 'WWW-Authenticate: NTLM\r\n\r\n')
+        opener.add_handler(basic_auth_handler)
+        opener.add_handler(http_handler)
+        self.assertRaises(ValueError,opener.open,"http://www.example.com")
+
     def _test_basic_auth(self, opener, auth_handler, auth_header,
                          realm, http_handler, password_manager,
                          request_url, protected_url):
@@ -1302,6 +1325,7 @@
         self.assertEqual(len(http_handler.requests), 1)
         self.assertFalse(http_handler.requests[0].has_header(auth_header))
 
+
 class MiscTests(unittest.TestCase):
 
     def test_build_opener(self):
diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py
index 32efb2b..03d1708 100644
--- a/Lib/test/test_urllibnet.py
+++ b/Lib/test/test_urllibnet.py
@@ -3,6 +3,7 @@
 import unittest
 from test import support
 
+import contextlib
 import socket
 import urllib.request
 import sys
@@ -27,6 +28,7 @@
             f = urllib.request.urlopen("http://www.python.org/")
             x = f.read()
 
+
 class urlopenNetworkTests(unittest.TestCase):
     """Tests urllib.reqest.urlopen using the network.
 
@@ -42,43 +44,37 @@
 
     """
 
+    @contextlib.contextmanager
     def urlopen(self, *args, **kwargs):
         resource = args[0]
-        cm = support.transient_internet(resource)
-        cm.__enter__()
-        self.addCleanup(cm.__exit__, None, None, None)
-        return urllib.request.urlopen(*args, **kwargs)
+        with support.transient_internet(resource):
+            r = urllib.request.urlopen(*args, **kwargs)
+            try:
+                yield r
+            finally:
+                r.close()
 
     def test_basic(self):
         # Simple test expected to pass.
-        open_url = self.urlopen("http://www.python.org/")
-        for attr in ("read", "readline", "readlines", "fileno", "close",
-                     "info", "geturl"):
-            self.assertTrue(hasattr(open_url, attr), "object returned from "
-                            "urlopen lacks the %s attribute" % attr)
-        try:
+        with self.urlopen("http://www.python.org/") as open_url:
+            for attr in ("read", "readline", "readlines", "fileno", "close",
+                         "info", "geturl"):
+                self.assertTrue(hasattr(open_url, attr), "object returned from "
+                                "urlopen lacks the %s attribute" % attr)
             self.assertTrue(open_url.read(), "calling 'read' failed")
-        finally:
-            open_url.close()
 
     def test_readlines(self):
         # Test both readline and readlines.
-        open_url = self.urlopen("http://www.python.org/")
-        try:
+        with self.urlopen("http://www.python.org/") as open_url:
             self.assertIsInstance(open_url.readline(), bytes,
                                   "readline did not return a string")
             self.assertIsInstance(open_url.readlines(), list,
                                   "readlines did not return a list")
-        finally:
-            open_url.close()
 
     def test_info(self):
         # Test 'info'.
-        open_url = self.urlopen("http://www.python.org/")
-        try:
+        with self.urlopen("http://www.python.org/") as open_url:
             info_obj = open_url.info()
-        finally:
-            open_url.close()
             self.assertIsInstance(info_obj, email.message.Message,
                                   "object returned by 'info' is not an "
                                   "instance of email.message.Message")
@@ -87,22 +83,20 @@
     def test_geturl(self):
         # Make sure same URL as opened is returned by geturl.
         URL = "http://www.python.org/"
-        open_url = self.urlopen(URL)
-        try:
+        with self.urlopen(URL) as open_url:
             gotten_url = open_url.geturl()
-        finally:
-            open_url.close()
-        self.assertEqual(gotten_url, URL)
+            self.assertEqual(gotten_url, URL)
 
     def test_getcode(self):
         # test getcode() with the fancy opener to get 404 error codes
         URL = "http://www.python.org/XXXinvalidXXX"
-        open_url = urllib.request.FancyURLopener().open(URL)
-        try:
-            code = open_url.getcode()
-        finally:
-            open_url.close()
-        self.assertEqual(code, 404)
+        with support.transient_internet(URL):
+            open_url = urllib.request.FancyURLopener().open(URL)
+            try:
+                code = open_url.getcode()
+            finally:
+                open_url.close()
+            self.assertEqual(code, 404)
 
     def test_fileno(self):
         if sys.platform in ('win32',):
@@ -110,14 +104,11 @@
             # test can't pass on Windows.
             return
         # Make sure fd returned by fileno is valid.
-        open_url = self.urlopen("http://www.python.org/", timeout=None)
-        fd = open_url.fileno()
-        FILE = os.fdopen(fd, encoding='utf-8')
-        try:
-            self.assertTrue(FILE.read(), "reading from file created using fd "
-                                      "returned by fileno failed")
-        finally:
-            FILE.close()
+        with self.urlopen("http://www.python.org/", timeout=None) as open_url:
+            fd = open_url.fileno()
+            with os.fdopen(fd, encoding='utf-8') as f:
+                self.assertTrue(f.read(), "reading from file created using fd "
+                                          "returned by fileno failed")
 
     def test_bad_address(self):
         # Make sure proper exception is raised when connecting to a bogus
@@ -133,66 +124,60 @@
                           urllib.request.urlopen,
                           "http://sadflkjsasf.i.nvali.d/")
 
+
 class urlretrieveNetworkTests(unittest.TestCase):
     """Tests urllib.request.urlretrieve using the network."""
 
+    @contextlib.contextmanager
     def urlretrieve(self, *args):
         resource = args[0]
-        cm = support.transient_internet(resource)
-        cm.__enter__()
-        self.addCleanup(cm.__exit__, None, None, None)
-        return urllib.request.urlretrieve(*args)
+        with support.transient_internet(resource):
+            file_location, info = urllib.request.urlretrieve(*args)
+            try:
+                yield file_location, info
+            finally:
+                support.unlink(file_location)
 
     def test_basic(self):
         # Test basic functionality.
-        file_location,info = self.urlretrieve("http://www.python.org/")
-        self.assertTrue(os.path.exists(file_location), "file location returned by"
-                        " urlretrieve is not a valid path")
-        FILE = open(file_location, encoding='utf-8')
-        try:
-            self.assertTrue(FILE.read(), "reading from the file location returned"
-                         " by urlretrieve failed")
-        finally:
-            FILE.close()
-            os.unlink(file_location)
+        with self.urlretrieve("http://www.python.org/") as (file_location, info):
+            self.assertTrue(os.path.exists(file_location), "file location returned by"
+                            " urlretrieve is not a valid path")
+            with open(file_location, encoding='utf-8') as f:
+                self.assertTrue(f.read(), "reading from the file location returned"
+                                " by urlretrieve failed")
 
     def test_specified_path(self):
         # Make sure that specifying the location of the file to write to works.
-        file_location,info = self.urlretrieve("http://www.python.org/",
-                                              support.TESTFN)
-        self.assertEqual(file_location, support.TESTFN)
-        self.assertTrue(os.path.exists(file_location))
-        FILE = open(file_location, encoding='utf-8')
-        try:
-            self.assertTrue(FILE.read(), "reading from temporary file failed")
-        finally:
-            FILE.close()
-            os.unlink(file_location)
+        with self.urlretrieve("http://www.python.org/",
+                              support.TESTFN) as (file_location, info):
+            self.assertEqual(file_location, support.TESTFN)
+            self.assertTrue(os.path.exists(file_location))
+            with open(file_location, encoding='utf-8') as f:
+                self.assertTrue(f.read(), "reading from temporary file failed")
 
     def test_header(self):
         # Make sure header returned as 2nd value from urlretrieve is good.
-        file_location, header = self.urlretrieve("http://www.python.org/")
-        os.unlink(file_location)
-        self.assertIsInstance(header, email.message.Message,
-                              "header is not an instance of email.message.Message")
+        with self.urlretrieve("http://www.python.org/") as (file_location, info):
+            self.assertIsInstance(info, email.message.Message,
+                                  "info is not an instance of email.message.Message")
 
     def test_data_header(self):
         logo = "http://www.python.org/community/logos/python-logo-master-v3-TM.png"
-        file_location, fileheaders = self.urlretrieve(logo)
-        os.unlink(file_location)
-        datevalue = fileheaders.get('Date')
-        dateformat = '%a, %d %b %Y %H:%M:%S GMT'
-        try:
-            time.strptime(datevalue, dateformat)
-        except ValueError:
-            self.fail('Date value not in %r format', dateformat)
+        with self.urlretrieve(logo) as (file_location, fileheaders):
+            datevalue = fileheaders.get('Date')
+            dateformat = '%a, %d %b %Y %H:%M:%S GMT'
+            try:
+                time.strptime(datevalue, dateformat)
+            except ValueError:
+                self.fail('Date value not in %r format', dateformat)
 
 
 def test_main():
     support.requires('network')
     support.run_unittest(URLTimeoutTest,
-                              urlopenNetworkTests,
-                              urlretrieveNetworkTests)
+                         urlopenNetworkTests,
+                         urlretrieveNetworkTests)
 
 if __name__ == "__main__":
     test_main()
diff --git a/Lib/test/test_userlist.py b/Lib/test/test_userlist.py
index 868ed24..6381070 100644
--- a/Lib/test/test_userlist.py
+++ b/Lib/test/test_userlist.py
@@ -52,6 +52,12 @@
                 return str(key) + '!!!'
         self.assertEqual(next(iter(T((1,2)))), "0!!!")
 
+    def test_userlist_copy(self):
+        u = self.type2test([6, 8, 1, 9, 1])
+        v = u.copy()
+        self.assertEqual(u, v)
+        self.assertEqual(type(u), type(v))
+
 def test_main():
     support.run_unittest(UserListTest)
 
diff --git a/Lib/test/test_wait3.py b/Lib/test/test_wait3.py
index 786e60b..bd06c8d 100644
--- a/Lib/test/test_wait3.py
+++ b/Lib/test/test_wait3.py
@@ -19,13 +19,16 @@
 
 class Wait3Test(ForkWait):
     def wait_impl(self, cpid):
-        for i in range(10):
+        # This many iterations can be required, since some previously run
+        # tests (e.g. test_ctypes) could have spawned a lot of children
+        # very quickly.
+        for i in range(30):
             # wait3() shouldn't hang, but some of the buildbots seem to hang
             # in the forking tests.  This is an attempt to fix the problem.
             spid, status, rusage = os.wait3(os.WNOHANG)
             if spid == cpid:
                 break
-            time.sleep(1.0)
+            time.sleep(0.1)
 
         self.assertEqual(spid, cpid)
         self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index 90aab86..9663715 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -1,9 +1,3 @@
-# We can test part of the module without zlib.
-try:
-    import zlib
-except ImportError:
-    zlib = None
-
 import io
 import os
 import sys
@@ -19,7 +13,7 @@
 from random import randint, random
 from unittest import skipUnless
 
-from test.support import TESTFN, run_unittest, findfile, unlink
+from test.support import TESTFN, run_unittest, findfile, unlink, requires_zlib
 
 TESTFN2 = TESTFN + "2"
 TESTFNDIR = TESTFN + "d"
@@ -269,44 +263,44 @@
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_iterlines_test(f, zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_test(f, zipfile.ZIP_DEFLATED)
 
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_open_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_open_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_random_open_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_random_open_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_readline_read_deflated(self):
         # Issue #7610: calls to readline() interleaved with calls to read().
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_readline_read_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_readline_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_readline_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_readlines_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_readlines_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_iterlines_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_iterlines_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_low_compression(self):
         """Check for cases where compressed data is larger than original."""
         # Create the ZIP archive
@@ -359,7 +353,7 @@
             with open(TESTFN, "rb") as f:
                 self.assertEqual(zipfp.read(TESTFN), f.read())
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_per_file_compression(self):
         """Check that files within a Zip archive can have different
         compression options."""
@@ -428,19 +422,18 @@
         # remove the test file subdirectories
         shutil.rmtree(os.path.join(os.getcwd(), 'ziptest2dir'))
 
-    def test_writestr_compression(self):
+    def test_writestr_compression_stored(self):
         zipfp = zipfile.ZipFile(TESTFN2, "w")
         zipfp.writestr("a.txt", "hello world", compress_type=zipfile.ZIP_STORED)
-        if zlib:
-            zipfp.writestr("b.txt", "hello world", compress_type=zipfile.ZIP_DEFLATED)
-
         info = zipfp.getinfo('a.txt')
         self.assertEqual(info.compress_type, zipfile.ZIP_STORED)
 
-        if zlib:
-            info = zipfp.getinfo('b.txt')
-            self.assertEqual(info.compress_type, zipfile.ZIP_DEFLATED)
-
+    @requires_zlib
+    def test_writestr_compression_deflated(self):
+        zipfp = zipfile.ZipFile(TESTFN2, "w")
+        zipfp.writestr("b.txt", "hello world", compress_type=zipfile.ZIP_DEFLATED)
+        info = zipfp.getinfo('b.txt')
+        self.assertEqual(info.compress_type, zipfile.ZIP_DEFLATED)
 
     def zip_test_writestr_permissions(self, f, compression):
         # Make sure that writestr creates files with mode 0600,
@@ -489,7 +482,7 @@
         except zipfile.BadZipFile:
             self.assertTrue(zipfp2.fp is None, 'zipfp is not closed')
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_unicode_filenames(self):
         # bug #10801
         fname = findfile('zip_cp437_header.zip')
@@ -598,7 +591,7 @@
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_test(f, zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_test(f, zipfile.ZIP_DEFLATED)
@@ -958,7 +951,7 @@
     def test_testzip_with_bad_crc_stored(self):
         self.check_testzip_with_bad_crc(zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_testzip_with_bad_crc_deflated(self):
         self.check_testzip_with_bad_crc(zipfile.ZIP_DEFLATED)
 
@@ -986,7 +979,7 @@
     def test_read_with_bad_crc_stored(self):
         self.check_read_with_bad_crc(zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_read_with_bad_crc_deflated(self):
         self.check_read_with_bad_crc(zipfile.ZIP_DEFLATED)
 
@@ -1006,7 +999,7 @@
     def test_read_return_size_stored(self):
         self.check_read_return_size(zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_read_return_size_deflated(self):
         self.check_read_return_size(zipfile.ZIP_DEFLATED)
 
@@ -1092,7 +1085,7 @@
         self.zip2.setpassword(b"perl")
         self.assertRaises(RuntimeError, self.zip2.read, "zero")
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_good_password(self):
         self.zip.setpassword(b"python")
         self.assertEqual(self.zip.read("test.txt"), self.plain)
@@ -1142,7 +1135,7 @@
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_test(f, zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_test(f, zipfile.ZIP_DEFLATED)
@@ -1182,7 +1175,7 @@
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_open_test(f, zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_open_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_open_test(f, zipfile.ZIP_DEFLATED)
@@ -1210,13 +1203,13 @@
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_random_open_test(f, zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_random_open_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.zip_random_open_test(f, zipfile.ZIP_DEFLATED)
 
 
-@skipUnless(zlib, "requires zlib")
+@requires_zlib
 class TestsWithMultipleOpens(unittest.TestCase):
     def setUp(self):
         # Create the ZIP archive
@@ -1408,28 +1401,28 @@
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.iterlines_test(f, zipfile.ZIP_STORED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_read_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.read_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_readline_read_deflated(self):
         # Issue #7610: calls to readline() interleaved with calls to read().
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.readline_read_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_readline_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.readline_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_readlines_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.readlines_test(f, zipfile.ZIP_DEFLATED)
 
-    @skipUnless(zlib, "requires zlib")
+    @requires_zlib
     def test_iterlines_deflated(self):
         for f in (TESTFN2, TemporaryFile(), io.BytesIO()):
             self.iterlines_test(f, zipfile.ZIP_DEFLATED)
diff --git a/Lib/test/test_zipfile64.py b/Lib/test/test_zipfile64.py
index 0e7d73f..a8fb7ab 100644
--- a/Lib/test/test_zipfile64.py
+++ b/Lib/test/test_zipfile64.py
@@ -11,12 +11,6 @@
         'test requires loads of disk-space bytes and a long time to run'
     )
 
-# We can test part of the module without zlib.
-try:
-    import zlib
-except ImportError:
-    zlib = None
-
 import zipfile, os, unittest
 import time
 import sys
@@ -24,7 +18,7 @@
 from io import StringIO
 from tempfile import TemporaryFile
 
-from test.support import TESTFN, run_unittest
+from test.support import TESTFN, run_unittest, requires_zlib
 
 TESTFN2 = TESTFN + "2"
 
@@ -81,12 +75,12 @@
         for f in TemporaryFile(), TESTFN2:
             self.zipTest(f, zipfile.ZIP_STORED)
 
-    if zlib:
-        def testDeflated(self):
-            # Try the temp file first.  If we do TESTFN2 first, then it hogs
-            # gigabytes of disk space for the duration of the test.
-            for f in TemporaryFile(), TESTFN2:
-                self.zipTest(f, zipfile.ZIP_DEFLATED)
+    @requires_zlib
+    def testDeflated(self):
+        # Try the temp file first.  If we do TESTFN2 first, then it hogs
+        # gigabytes of disk space for the duration of the test.
+        for f in TemporaryFile(), TESTFN2:
+            self.zipTest(f, zipfile.ZIP_DEFLATED)
 
     def tearDown(self):
         for fname in TESTFN, TESTFN2:
diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py
index 090f0f3..ab669cf 100644
--- a/Lib/test/test_zipimport.py
+++ b/Lib/test/test_zipimport.py
@@ -9,12 +9,6 @@
 from test import support
 from test.test_importhooks import ImportHooksBaseTestCase, test_src, test_co
 
-# some tests can be ran even without zlib
-try:
-    import zlib
-except ImportError:
-    zlib = None
-
 from zipfile import ZipFile, ZipInfo, ZIP_STORED, ZIP_DEFLATED
 
 import zipimport
@@ -25,11 +19,6 @@
 from traceback import extract_tb, extract_stack, print_tb
 raise_src = 'def do_raise(): raise TypeError\n'
 
-# so we only run testAFakeZlib once if this test is run repeatedly
-# which happens when we look for ref leaks
-test_imported = False
-
-
 def make_pyc(co, mtime):
     data = marshal.dumps(co)
     if type(mtime) is type(0.0):
@@ -397,7 +386,7 @@
             os.remove(filename)
 
 
-@unittest.skipUnless(zlib, "requires zlib")
+@support.requires_zlib
 class CompressedZipImportTestCase(UncompressedZipImportTestCase):
     compression = ZIP_DEFLATED
 
@@ -473,19 +462,7 @@
             zipimport._zip_directory_cache.clear()
 
 
-def cleanup():
-    # this is necessary if test is run repeated (like when finding leaks)
-    global test_imported
-    if test_imported:
-        zipimport._zip_directory_cache.clear()
-        if hasattr(UncompressedZipImportTestCase, 'testAFakeZlib'):
-            delattr(UncompressedZipImportTestCase, 'testAFakeZlib')
-        if hasattr(CompressedZipImportTestCase, 'testAFakeZlib'):
-            delattr(CompressedZipImportTestCase, 'testAFakeZlib')
-    test_imported = True
-
 def test_main():
-    cleanup()
     try:
         support.run_unittest(
               UncompressedZipImportTestCase,
diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py
index 9aafffa..68dd3ea 100644
--- a/Lib/test/test_zlib.py
+++ b/Lib/test/test_zlib.py
@@ -193,6 +193,7 @@
         data = b'x' * size
         try:
             self.assertRaises(OverflowError, zlib.compress, data, 1)
+            self.assertRaises(OverflowError, zlib.decompress, data)
         finally:
             data = None
 
@@ -360,6 +361,15 @@
         self.assertRaises(ValueError, dco.decompress, b"", -1)
         self.assertEqual(b'', dco.unconsumed_tail)
 
+    def test_clear_unconsumed_tail(self):
+        # Issue #12050: calling decompress() without providing max_length
+        # should clear the unconsumed_tail attribute.
+        cdata = b"x\x9cKLJ\x06\x00\x02M\x01"    # "abc"
+        dco = zlib.decompressobj()
+        ddata = dco.decompress(cdata, 1)
+        ddata += dco.decompress(dco.unconsumed_tail)
+        self.assertEqual(dco.unconsumed_tail, b"")
+
     def test_flushes(self):
         # Test flush() with the various options, using all the
         # different levels in order to provide more variations.
@@ -513,6 +523,19 @@
         decompress = lambda s: d.decompress(s) + d.flush()
         self.check_big_decompress_buffer(size, decompress)
 
+    @precisionbigmemtest(size=_4G + 100, memuse=1)
+    def test_length_overflow(self, size):
+        if size < _4G + 100:
+            self.skipTest("not enough free memory, need at least 4 GB")
+        data = b'x' * size
+        c = zlib.compressobj(1)
+        d = zlib.decompressobj()
+        try:
+            self.assertRaises(OverflowError, c.compress, data)
+            self.assertRaises(OverflowError, d.decompress, data)
+        finally:
+            data = None
+
 
 def genblock(seed, length, step=1024, generator=random):
     """length-byte stream of random data from a seed (in step-byte blocks)."""
diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py
index 881de6f..5b7c29d 100644
--- a/Lib/unittest/case.py
+++ b/Lib/unittest/case.py
@@ -104,9 +104,9 @@
 class _AssertRaisesBaseContext(object):
 
     def __init__(self, expected, test_case, callable_obj=None,
-                  expected_regex=None):
+                 expected_regex=None):
         self.expected = expected
-        self.failureException = test_case.failureException
+        self.test_case = test_case
         if callable_obj is not None:
             try:
                 self.obj_name = callable_obj.__name__
@@ -117,6 +117,24 @@
         if isinstance(expected_regex, (bytes, str)):
             expected_regex = re.compile(expected_regex)
         self.expected_regex = expected_regex
+        self.msg = None
+
+    def _raiseFailure(self, standardMsg):
+        msg = self.test_case._formatMessage(self.msg, standardMsg)
+        raise self.test_case.failureException(msg)
+
+    def handle(self, name, callable_obj, args, kwargs):
+        """
+        If callable_obj is None, assertRaises/Warns is being used as a
+        context manager, so check for a 'msg' kwarg and return self.
+        If callable_obj is not None, call it passing args and kwargs.
+        """
+        if callable_obj is None:
+            self.msg = kwargs.pop('msg', None)
+            return self
+        with self:
+            callable_obj(*args, **kwargs)
+
 
 
 class _AssertRaisesContext(_AssertRaisesBaseContext):
@@ -132,11 +150,10 @@
             except AttributeError:
                 exc_name = str(self.expected)
             if self.obj_name:
-                raise self.failureException("{0} not raised by {1}"
-                    .format(exc_name, self.obj_name))
+                self._raiseFailure("{} not raised by {}".format(exc_name,
+                                                                self.obj_name))
             else:
-                raise self.failureException("{0} not raised"
-                    .format(exc_name))
+                self._raiseFailure("{} not raised".format(exc_name))
         if not issubclass(exc_type, self.expected):
             # let unexpected exceptions pass through
             return False
@@ -147,8 +164,8 @@
 
         expected_regex = self.expected_regex
         if not expected_regex.search(str(exc_value)):
-            raise self.failureException('"%s" does not match "%s"' %
-                     (expected_regex.pattern, str(exc_value)))
+            self._raiseFailure('"{}" does not match "{}"'.format(
+                     expected_regex.pattern, str(exc_value)))
         return True
 
 
@@ -192,14 +209,13 @@
             return
         # Now we simply try to choose a helpful failure message
         if first_matching is not None:
-            raise self.failureException('"%s" does not match "%s"' %
-                     (self.expected_regex.pattern, str(first_matching)))
+            self._raiseFailure('"{}" does not match "{}"'.format(
+                     self.expected_regex.pattern, str(first_matching)))
         if self.obj_name:
-            raise self.failureException("{0} not triggered by {1}"
-                .format(exc_name, self.obj_name))
+            self._raiseFailure("{} not triggered by {}".format(exc_name,
+                                                               self.obj_name))
         else:
-            raise self.failureException("{0} not triggered"
-                .format(exc_name))
+            self._raiseFailure("{} not triggered".format(exc_name))
 
 
 class _TypeEqualityDict(object):
@@ -547,7 +563,6 @@
         except UnicodeDecodeError:
             return  '%s : %s' % (safe_repr(standardMsg), safe_repr(msg))
 
-
     def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
         """Fail unless an exception of class excClass is thrown
            by callableObj when invoked with arguments args and keyword
@@ -562,6 +577,9 @@
                 with self.assertRaises(SomeException):
                     do_something()
 
+           An optional keyword argument 'msg' can be provided when assertRaises
+           is used as a context object.
+
            The context manager keeps a reference to the exception as
            the 'exception' attribute. This allows you to inspect the
            exception after the assertion::
@@ -572,25 +590,25 @@
                self.assertEqual(the_exception.error_code, 3)
         """
         context = _AssertRaisesContext(excClass, self, callableObj)
-        if callableObj is None:
-            return context
-        with context:
-            callableObj(*args, **kwargs)
+        return context.handle('assertRaises', callableObj, args, kwargs)
 
     def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs):
         """Fail unless a warning of class warnClass is triggered
-           by callableObj when invoked with arguments args and keyword
+           by callable_obj when invoked with arguments args and keyword
            arguments kwargs.  If a different type of warning is
            triggered, it will not be handled: depending on the other
            warning filtering rules in effect, it might be silenced, printed
            out, or raised as an exception.
 
-           If called with callableObj omitted or None, will return a
+           If called with callable_obj omitted or None, will return a
            context object used like this::
 
                 with self.assertWarns(SomeWarning):
                     do_something()
 
+           An optional keyword argument 'msg' can be provided when assertWarns
+           is used as a context object.
+
            The context manager keeps a reference to the first matching
            warning as the 'warning' attribute; similarly, the 'filename'
            and 'lineno' attributes give you information about the line
@@ -603,10 +621,7 @@
                self.assertEqual(the_warning.some_attribute, 147)
         """
         context = _AssertWarnsContext(expected_warning, self, callable_obj)
-        if callable_obj is None:
-            return context
-        with context:
-            callable_obj(*args, **kwargs)
+        return context.handle('assertWarns', callable_obj, args, kwargs)
 
     def _getAssertEqualityFunc(self, first, second):
         """Get a detailed comparison function for the types of the two args.
@@ -1083,15 +1098,15 @@
             expected_regex: Regex (re pattern object or string) expected
                     to be found in error message.
             callable_obj: Function to be called.
+            msg: Optional message used in case of failure. Can only be used
+                    when assertRaisesRegex is used as a context manager.
             args: Extra args.
             kwargs: Extra kwargs.
         """
         context = _AssertRaisesContext(expected_exception, self, callable_obj,
                                        expected_regex)
-        if callable_obj is None:
-            return context
-        with context:
-            callable_obj(*args, **kwargs)
+
+        return context.handle('assertRaisesRegex', callable_obj, args, kwargs)
 
     def assertWarnsRegex(self, expected_warning, expected_regex,
                          callable_obj=None, *args, **kwargs):
@@ -1105,15 +1120,14 @@
             expected_regex: Regex (re pattern object or string) expected
                     to be found in error message.
             callable_obj: Function to be called.
+            msg: Optional message used in case of failure. Can only be used
+                    when assertWarnsRegex is used as a context manager.
             args: Extra args.
             kwargs: Extra kwargs.
         """
         context = _AssertWarnsContext(expected_warning, self, callable_obj,
                                       expected_regex)
-        if callable_obj is None:
-            return context
-        with context:
-            callable_obj(*args, **kwargs)
+        return context.handle('assertWarnsRegex', callable_obj, args, kwargs)
 
     def assertRegex(self, text, expected_regex, msg=None):
         """Fail the test unless the text matches the regular expression."""
diff --git a/Lib/unittest/test/test_assertions.py b/Lib/unittest/test/test_assertions.py
index a1d20eb..d43fe5a 100644
--- a/Lib/unittest/test/test_assertions.py
+++ b/Lib/unittest/test/test_assertions.py
@@ -1,6 +1,7 @@
 import datetime
 import warnings
 import unittest
+from itertools import product
 
 
 class Test_Assertions(unittest.TestCase):
@@ -145,6 +146,14 @@
         self.testableTrue._formatMessage(one, '\uFFFD')
 
     def assertMessages(self, methodName, args, errors):
+        """
+        Check that methodName(*args) raises the correct error messages.
+        errors should be a list of 4 regex that match the error when:
+          1) longMessage = False and no msg passed;
+          2) longMessage = False and msg passed;
+          3) longMessage = True and no msg passed;
+          4) longMessage = True and msg passed;
+        """
         def getMethod(i):
             useTestableFalse  = i < 2
             if useTestableFalse:
@@ -284,3 +293,67 @@
                             ["^unexpectedly identical: None$", "^oops$",
                              "^unexpectedly identical: None$",
                              "^unexpectedly identical: None : oops$"])
+
+
+    def assertMessagesCM(self, methodName, args, func, errors):
+        """
+        Check that the correct error messages are raised while executing:
+          with method(*args):
+              func()
+        *errors* should be a list of 4 regex that match the error when:
+          1) longMessage = False and no msg passed;
+          2) longMessage = False and msg passed;
+          3) longMessage = True and no msg passed;
+          4) longMessage = True and msg passed;
+        """
+        p = product((self.testableFalse, self.testableTrue),
+                    ({}, {"msg": "oops"}))
+        for (cls, kwargs), err in zip(p, errors):
+            method = getattr(cls, methodName)
+            with self.assertRaisesRegex(cls.failureException, err):
+                with method(*args, **kwargs) as cm:
+                    func()
+
+    def testAssertRaises(self):
+        self.assertMessagesCM('assertRaises', (TypeError,), lambda: None,
+                              ['^TypeError not raised$', '^oops$',
+                               '^TypeError not raised$',
+                               '^TypeError not raised : oops$'])
+
+    def testAssertRaisesRegex(self):
+        # test error not raised
+        self.assertMessagesCM('assertRaisesRegex', (TypeError, 'unused regex'),
+                              lambda: None,
+                              ['^TypeError not raised$', '^oops$',
+                               '^TypeError not raised$',
+                               '^TypeError not raised : oops$'])
+        # test error raised but with wrong message
+        def raise_wrong_message():
+            raise TypeError('foo')
+        self.assertMessagesCM('assertRaisesRegex', (TypeError, 'regex'),
+                              raise_wrong_message,
+                              ['^"regex" does not match "foo"$', '^oops$',
+                               '^"regex" does not match "foo"$',
+                               '^"regex" does not match "foo" : oops$'])
+
+    def testAssertWarns(self):
+        self.assertMessagesCM('assertWarns', (UserWarning,), lambda: None,
+                              ['^UserWarning not triggered$', '^oops$',
+                               '^UserWarning not triggered$',
+                               '^UserWarning not triggered : oops$'])
+
+    def testAssertWarnsRegex(self):
+        # test error not raised
+        self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'unused regex'),
+                              lambda: None,
+                              ['^UserWarning not triggered$', '^oops$',
+                               '^UserWarning not triggered$',
+                               '^UserWarning not triggered : oops$'])
+        # test warning raised but with wrong message
+        def raise_wrong_message():
+            warnings.warn('foo')
+        self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'regex'),
+                              raise_wrong_message,
+                              ['^"regex" does not match "foo"$', '^oops$',
+                               '^"regex" does not match "foo"$',
+                               '^"regex" does not match "foo" : oops$'])
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index c80b7d1..e98a976 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -825,14 +825,20 @@
             self.retried += 1
 
         if authreq:
-            mo = AbstractBasicAuthHandler.rx.search(authreq)
-            if mo:
-                scheme, quote, realm = mo.groups()
-                if scheme.lower() == 'basic':
-                    response = self.retry_http_basic_auth(host, req, realm)
-                    if response and response.code != 401:
-                        self.retried = 0
-                    return response
+            scheme = authreq.split()[0]
+            if not scheme.lower() == 'basic':
+                raise ValueError("AbstractBasicAuthHandler does not"
+                                 " support the following scheme: '%s'" %
+                                 scheme)
+            else:
+                mo = AbstractBasicAuthHandler.rx.search(authreq)
+                if mo:
+                    scheme, quote, realm = mo.groups()
+                    if scheme.lower() == 'basic':
+                        response = self.retry_http_basic_auth(host, req, realm)
+                        if response and response.code != 401:
+                            self.retried = 0
+                        return response
 
     def retry_http_basic_auth(self, host, req, realm):
         user, pw = self.passwd.find_user_password(realm, host)
@@ -918,6 +924,9 @@
             scheme = authreq.split()[0]
             if scheme.lower() == 'digest':
                 return self.retry_http_digest_auth(req, authreq)
+            elif not scheme.lower() == 'basic':
+                raise ValueError("AbstractDigestAuthHandler does not support"
+                                 " the following scheme: '%s'" % scheme)
 
     def retry_http_digest_auth(self, req, auth):
         token, challenge = auth.split(' ', 1)
diff --git a/Lib/xml/__init__.py b/Lib/xml/__init__.py
index deed983..bf6d8dd 100644
--- a/Lib/xml/__init__.py
+++ b/Lib/xml/__init__.py
@@ -18,24 +18,3 @@
 
 
 __all__ = ["dom", "parsers", "sax", "etree"]
-
-_MINIMUM_XMLPLUS_VERSION = (0, 8, 4)
-
-
-try:
-    import _xmlplus
-except ImportError:
-    pass
-else:
-    try:
-        v = _xmlplus.version_info
-    except AttributeError:
-        # _xmlplus is too old; ignore it
-        pass
-    else:
-        if v >= _MINIMUM_XMLPLUS_VERSION:
-            import sys
-            _xmlplus.__path__.extend(__path__)
-            sys.modules[__name__] = _xmlplus
-        else:
-            del v
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 7912368..04d126f 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -75,6 +75,7 @@
 # environment variables
 PY_CPPFLAGS=	-I. -IInclude -I$(srcdir)/Include $(CONFIGURE_CPPFLAGS) $(CPPFLAGS)
 PY_LDFLAGS=	$(CONFIGURE_LDFLAGS) $(LDFLAGS)
+NO_AS_NEEDED=	@NO_AS_NEEDED@
 LDLAST=		@LDLAST@
 SGI_ABI=	@SGI_ABI@
 CCSHARED=	@CCSHARED@
@@ -459,7 +460,7 @@
 	fi
 
 libpython3.so:	libpython$(LDVERSION).so
-	$(BLDSHARED) -o $@ -Wl,-hl$@ $^
+	$(BLDSHARED) $(NO_AS_NEEDED) -o $@ -Wl,-h$@ $^
 
 libpython$(LDVERSION).dylib: $(LIBRARY_OBJS)
 	 $(CC) -dynamiclib -Wl,-single_module $(PY_LDFLAGS) -undefined dynamic_lookup -Wl,-install_name,$(prefix)/lib/libpython$(LDVERSION).dylib -Wl,-compatibility_version,$(VERSION) -Wl,-current_version,$(VERSION) -o $@ $(LIBRARY_OBJS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
@@ -946,7 +947,7 @@
 		else	true; \
 		fi; \
 	done
-	@for i in $(srcdir)/Lib/*.py $(srcdir)/Lib/*.egg-info ; \
+	@for i in $(srcdir)/Lib/*.py $(srcdir)/Lib/*.egg-info $(srcdir)/Lib/*.cfg ; \
 	do \
 		if test -x $$i; then \
 			$(INSTALL_SCRIPT) $$i $(DESTDIR)$(LIBDEST); \
diff --git a/Misc/ACKS b/Misc/ACKS
index 3516b1c..506e5c9 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -50,6 +50,7 @@
 Jeff Balogh
 Matt Bandy
 Michael J. Barber
+Nicolas Bareil
 Chris Barker
 Nick Barnes
 Quentin Barnes
@@ -269,6 +270,8 @@
 Tim Everett
 Paul Everitt
 David Everly
+Daniel Evers
+Winston Ewert
 Greg Ewing
 Martijn Faassen
 Clovis Fabricio
@@ -369,6 +372,7 @@
 Lance Finn Helsten
 Jonathan Hendry
 James Henstridge
+Kasun Herath
 Chris Herborth
 Ivan Herman
 Jürgen Hermann
@@ -641,6 +645,7 @@
 Michal Nowikowski
 Steffen Daode Nurpmeso
 Nigel O'Brian
+John O'Connor
 Kevin O'Connor
 Tim O'Malley
 Pascal Oberndoerfer
@@ -987,3 +992,4 @@
 Tarek Ziadé
 Peter Åstrand
 Torsten Becker
+Pierre Carrier
diff --git a/Misc/NEWS b/Misc/NEWS
index 8cfce02..967e2ef 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -10,6 +10,22 @@
 Core and Builtins
 -----------------
 
+- Correct lookup of __dir__ on objects. Among other things, this causes errors
+  besides AttributeError found on lookup to be propagated.
+
+- Issue #12060: Use sig_atomic_t type and volatile keyword in the signal
+  module. Patch written by Charles-François Natali.
+
+- Added the if_nameindex, if_indextoname, if_nametoindex methods to
+  the socket module as requested in issue #1746656.
+
+- Issue #12044: Fixed subprocess.Popen when used as a context manager to
+  wait for the process to end when exiting the context to avoid unintentionally
+  leaving zombie processes around.
+
+- Issue #1195: Fix input() if it is interrupted by CTRL+d and then CTRL+c,
+  clear the end-of-file indicator after CTRL+d.
+
 - Issue #1856: Avoid crashes and lockups when daemon threads run while the
   interpreter is shutting down; instead, these threads are now killed when
   they try to take the GIL.
@@ -140,6 +156,129 @@
 Library
 -------
 
+- Issue #12049: Add RAND_bytes() and RAND_pseudo_bytes() functions to the ssl
+  module.
+
+- Issue #12125: fixed the failures under Solaris due to improper test cleanup.
+
+- Issue #6501: os.device_encoding() returns None on Windows if the application
+  has no console.
+
+- Issue #12132: Skip test_build_ext in case the xxmodule is not found.
+
+- Issue #12105: Add O_CLOEXEC to the os module.
+
+- Issue #12079: Decimal('Infinity').fma(Decimal('0'), (3.91224318126786e+19+0j))
+  now raises TypeError (reflecting the invalid type of the 3rd argument) rather
+  than Decimal.InvalidOperation.
+
+- Issue #12124: zipimport doesn't keep a reference to zlib.decompress() anymore
+  to be able to unload the module.
+
+- Issue #12120, #12119: skip a test in packaging and distutils
+  if sys.dont_write_bytecode is set to True.
+
+- Issue #12065: connect_ex() on an SSL socket now returns the original errno
+  when the socket's timeout expires (it used to return None).
+
+- Issue #8809: The SMTP_SSL constructor and SMTP.starttls() now support
+  passing a ``context`` argument pointing to an ssl.SSLContext instance.
+  Patch by Kasun Herath.
+
+- Issue #11088: don't crash when using F5 to run a script in IDLE on MacOSX
+  with Tk 8.5.
+
+- Issue #9516: Issue #9516: avoid errors in sysconfig when MACOSX_DEPLOYMENT_TARGET
+  is set in shell.
+
+- Issue #8650: Make zlib module 64-bit clean. compress(), decompress() and
+  their incremental counterparts now raise OverflowError if given an input
+  larger than 4GB, instead of silently truncating the input and returning
+  an incorrect result.
+
+- Issue #12050: zlib.decompressobj().decompress() now clears the unconsumed_tail
+  attribute when called without a max_length argument.
+
+- Issue #12062: Fix a flushing bug when doing a certain type of I/O sequence
+  on a file opened in read+write mode (namely: reading, seeking a bit forward,
+  writing, then seeking before the previous write but still within buffered
+  data, and writing again).
+
+- Issue #9971: Write an optimized implementation of BufferedReader.readinto().
+  Patch by John O'Connor.
+
+- Issue #1028: Tk returns invalid Unicode null in %A: UnicodeDecodeError.
+  With Tk < 8.5 _tkinter.c:PythonCmd() raised UnicodeDecodeError, caused
+  IDLE to exit.  Converted to valid Unicode null in PythonCmd().
+
+- Issue #11799: urllib.request Authentication Handlers will raise a ValueError
+  when presented with an unsupported Authentication Scheme. Patch contributed
+  by Yuval Greenfield.
+
+- Issue #10419, #6011: build_scripts command of distutils handles correctly
+  non-ASCII path (path to the Python executable). Open and write the script in
+  binary mode, but ensure that the shebang is decodable from UTF-8 and from the
+  encoding of the script.
+
+- Issue #8498: In socket.accept(), allow to specify 0 as a backlog value in
+  order to accept exactly one connection.  Patch by Daniel Evers.
+
+- Issue #12011: signal.signal() and signal.siginterrupt() raise an OSError,
+  instead of a RuntimeError: OSError has an errno attribute.
+
+- Issue #3709: a flush_headers method to BaseHTTPRequestHandler which manages
+  the sending of headers to output stream and flushing the internal headers
+  buffer. Patch contribution by Andrew Schaaf
+
+- Issue #11743: Rewrite multiprocessing connection classes in pure Python.
+
+- Issue #11164: Stop trying to use _xmlplus in the xml module.
+
+- Issue #11888: Add log2 function to math module. Patch written by Mark
+  Dickinson.
+
+- Issue #12012: ssl.PROTOCOL_SSLv2 becomes optional.
+
+- Issue #8407: The signal handler writes the signal number as a single byte
+  instead of a nul byte into the wakeup file descriptor. So it is possible to
+  wait more than one signal and know which signals were raised.
+
+- Issue #8407: Add pthread_kill(), sigpending() and sigwait() functions to the
+  signal module.
+
+- Issue #11927: SMTP_SSL now uses port 465 by default as documented.  Patch
+  by Kasun Herath.
+
+- Issue #12002: ftplib's abort() method raises TypeError.
+
+- Issue #11916: Add a number of MacOSX specific definitions to the errno module.
+  Patch by Pierre Carrier.
+
+- Issue #11999: fixed sporadic sync failure mailbox.Maildir due to its trying to
+  detect mtime changes by comparing to the system clock instead of to the
+  previous value of the mtime.
+
+- Issue #11072: added MLSD command (RFC-3659) support to ftplib.
+
+- Issue #8808: The IMAP4_SSL constructor now allows passing an SSLContext
+  parameter to control parameters of the secure channel.  Patch by Sijin
+  Joseph.
+
+- ntpath.samefile failed to notice that "a.txt" and "A.TXT" refer to the same
+  file on Windows XP. As noticed in issue #10684.
+
+- Issue #12000: When a SSL certificate has a subjectAltName without any
+  dNSName entry, ssl.match_hostname() should use the subject's commonName.
+  Patch by Nicolas Bareil.
+
+- Issue #10775: assertRaises, assertRaisesRegex, assertWarns, and
+  assertWarnsRegex now accept a keyword argument 'msg' when used as context
+  managers.  Initial patch by Winston Ewert.
+
+- Issue #10684: shutil.move used to delete a folder on case insensitive
+  filesystems when the source and destination name where the same except
+  for the case.
+
 - Issue #11647: objects created using contextlib.contextmanager now support
   more than one call to the function when used as a decorator. Initial patch
   by Ysj Ray.
@@ -533,6 +672,8 @@
 Build
 -----
 
+- Issue #11347: Use --no-as-needed when linking libpython3.so.
+
 - Issue #11411: Fix 'make DESTDIR=' with a relative destination.
 
 - Issue #11268: Prevent Mac OS X Installer failure if Documentation
@@ -550,10 +691,19 @@
 Tools/Demos
 -----------
 
+- Issue #11996: libpython (gdb), replace "py-bt" command by "py-bt-full" and
+  add a smarter "py-bt" command printing a classic Python traceback.
+
 - Issue #11179: Make ccbench work under Python 3.1 and 2.7 again.
 
-Extensions
-----------
+Extension Modules
+-----------------
+
+- Issue #12051: Fix segfault in json.dumps() while encoding highly-nested
+  objects using the C accelerations.
+
+- Issue #12017: Fix segfault in json.loads() while decoding highly-nested
+  objects using the C accelerations.
 
 - Issue #1838: Prevent segfault in ctypes, when _as_parameter_ on a class is set
   to an instance of the class.
@@ -561,6 +711,19 @@
 Tests
 -----
 
+- Issue #12096: Fix a race condition in test_threading.test_waitfor(). Patch
+  written by Charles-François Natali.
+
+- Issue #11614: import __hello__ prints "Hello World!". Patch written by
+  Andreas Stührk.
+
+- Issue #5723: Improve json tests to be executed with and without accelerations.
+
+- Issue #12041: Make test_wait3 more robust.
+
+- Issue #11873: Change regex in test_compileall to fix occasional failures when
+  when the randomly generated temporary path happened to match the regex.
+
 - Issue #11958: Fix FTP tests for IPv6, bind to "::1" instead of "localhost".
   Patch written by Charles-Francois Natali.
 
@@ -874,6 +1037,9 @@
   and a non-zero offset, and an attempt to read past the end of file is made
   (IndexError is raised instead).  Patch by Ross Lagerwall.
 
+- Issue #10154, #10090: change the normalization of UTF-8 to "UTF-8" instead
+  of "UTF8" in the locale module as the latter is not supported MacOSX and OpenBSD.
+
 - Issue #10907: Warn OS X 10.6 IDLE users to use ActiveState Tcl/Tk 8.5, rather
   than the currently problematic Apple-supplied one, when running with the
   64-/32-bit installer variant.
@@ -1081,6 +1247,8 @@
 Tests
 -----
 
+- Issue #11910: Fix test_heapq to skip the C tests when _heapq is missing.
+
 - Fix test_startfile to wait for child process to terminate before finishing.
 
 - Issue #10822: Fix test_posix:test_getgroups failure under Solaris.  Patch
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
index 386a880..3b8b7e9 100644
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -1,9 +1,9 @@
 /*
     An implementation of Buffered I/O as defined by PEP 3116 - "New I/O"
-    
+
     Classes defined here: BufferedIOBase, BufferedReader, BufferedWriter,
     BufferedRandom.
-    
+
     Written by Amaury Forgeot d'Arc and Antoine Pitrou
 */
 
@@ -198,7 +198,7 @@
     int readable;
     int writable;
     int deallocating;
-    
+
     /* True if this is a vanilla Buffered object (rather than a user derived
        class) *and* the raw stream is a vanilla FileIO object. */
     int fast_closed_checks;
@@ -237,7 +237,7 @@
 
 /*
     Implementation notes:
-    
+
     * BufferedReader, BufferedWriter and BufferedRandom try to share most
       methods (this is helped by the members `readable` and `writable`, which
       are initialized in the respective constructors)
@@ -255,7 +255,7 @@
     NOTE: we should try to maintain block alignment of reads and writes to the
     raw stream (according to the buffer size), but for now it is only done
     in read() and friends.
-    
+
 */
 
 /* These macros protect the buffered object against concurrent operations. */
@@ -596,7 +596,8 @@
 _bufferedreader_read_fast(buffered *self, Py_ssize_t);
 static PyObject *
 _bufferedreader_read_generic(buffered *self, Py_ssize_t);
-
+static Py_ssize_t
+_bufferedreader_raw_read(buffered *self, char *start, Py_ssize_t len);
 
 /*
  * Helpers
@@ -635,7 +636,7 @@
         if (!PyErr_Occurred())
             PyErr_Format(PyExc_IOError,
                          "Raw stream returned invalid position %" PY_PRIdOFF,
-			 (PY_OFF_T_COMPAT)n);
+                         (PY_OFF_T_COMPAT)n);
         return -1;
     }
     self->abs_pos = n;
@@ -668,7 +669,7 @@
         if (!PyErr_Occurred())
             PyErr_Format(PyExc_IOError,
                          "Raw stream returned invalid position %" PY_PRIdOFF,
-			 (PY_OFF_T_COMPAT)n);
+                         (PY_OFF_T_COMPAT)n);
         return -1;
     }
     self->abs_pos = n;
@@ -863,7 +864,7 @@
 
     if (!ENTER_BUFFERED(self))
         return NULL;
-    
+
     if (self->writable) {
         res = _bufferedwriter_flush_unlocked(self, 1);
         if (res == NULL)
@@ -912,23 +913,77 @@
 static PyObject *
 buffered_readinto(buffered *self, PyObject *args)
 {
+    Py_buffer buf;
+    Py_ssize_t n, written = 0, remaining;
     PyObject *res = NULL;
 
     CHECK_INITIALIZED(self)
-    
-    /* TODO: use raw.readinto() instead! */
+
+    if (!PyArg_ParseTuple(args, "w*:readinto", &buf))
+        return NULL;
+
+    n = Py_SAFE_DOWNCAST(READAHEAD(self), Py_off_t, Py_ssize_t);
+    if (n > 0) {
+        if (n >= buf.len) {
+            memcpy(buf.buf, self->buffer + self->pos, buf.len);
+            self->pos += buf.len;
+            res = PyLong_FromSsize_t(buf.len);
+            goto end_unlocked;
+        }
+        memcpy(buf.buf, self->buffer + self->pos, n);
+        self->pos += n;
+        written = n;
+    }
+
+    if (!ENTER_BUFFERED(self))
+        goto end_unlocked;
+
     if (self->writable) {
-        if (!ENTER_BUFFERED(self))
-            return NULL;
         res = _bufferedwriter_flush_unlocked(self, 0);
-        LEAVE_BUFFERED(self)
         if (res == NULL)
             goto end;
-        Py_DECREF(res);
+        Py_CLEAR(res);
     }
-    res = bufferediobase_readinto((PyObject *)self, args);
+
+    _bufferedreader_reset_buf(self);
+    self->pos = 0;
+
+    for (remaining = buf.len - written;
+         remaining > 0;
+         written += n, remaining -= n) {
+        /* If remaining bytes is larger than internal buffer size, copy
+         * directly into caller's buffer. */
+        if (remaining > self->buffer_size) {
+            n = _bufferedreader_raw_read(self, (char *) buf.buf + written,
+                                         remaining);
+        }
+        else {
+            n = _bufferedreader_fill_buffer(self);
+            if (n > 0) {
+                if (n > remaining)
+                    n = remaining;
+                memcpy((char *) buf.buf + written,
+                       self->buffer + self->pos, n);
+                self->pos += n;
+                continue; /* short circuit */
+            }
+        }
+        if (n == 0 || (n == -2 && written > 0))
+            break;
+        if (n < 0) {
+            if (n == -2) {
+                Py_INCREF(Py_None);
+                res = Py_None;
+            }
+            goto end;
+        }
+    }
+    res = PyLong_FromSsize_t(written);
 
 end:
+    LEAVE_BUFFERED(self);
+end_unlocked:
+    PyBuffer_Release(&buf);
     return res;
 }
 
@@ -1573,6 +1628,7 @@
     {"read", (PyCFunction)buffered_read, METH_VARARGS},
     {"peek", (PyCFunction)buffered_peek, METH_VARARGS},
     {"read1", (PyCFunction)buffered_read1, METH_VARARGS},
+    {"readinto", (PyCFunction)buffered_readinto, METH_VARARGS},
     {"readline", (PyCFunction)buffered_readline, METH_VARARGS},
     {"seek", (PyCFunction)buffered_seek, METH_VARARGS},
     {"tell", (PyCFunction)buffered_tell, METH_NOARGS},
@@ -1838,7 +1894,7 @@
     avail = Py_SAFE_DOWNCAST(self->buffer_size - self->pos, Py_off_t, Py_ssize_t);
     if (buf.len <= avail) {
         memcpy(self->buffer + self->pos, buf.buf, buf.len);
-        if (!VALID_WRITE_BUFFER(self)) {
+        if (!VALID_WRITE_BUFFER(self) || self->write_pos > self->pos) {
             self->write_pos = self->pos;
         }
         ADJUST_POSITION(self, self->pos + buf.len);
diff --git a/Modules/_json.c b/Modules/_json.c
index 9177094..a392c97 100644
--- a/Modules/_json.c
+++ b/Modules/_json.c
@@ -943,6 +943,7 @@
 
     Returns a new PyObject representation of the term.
     */
+    PyObject *res;
     Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr);
     Py_ssize_t length = PyUnicode_GET_SIZE(pystr);
     if (idx >= length) {
@@ -957,10 +958,20 @@
                 next_idx_ptr);
         case '{':
             /* object */
-            return _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr);
+            if (Py_EnterRecursiveCall(" while decoding a JSON object "
+                                      "from a unicode string"))
+                return NULL;
+            res = _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr);
+            Py_LeaveRecursiveCall();
+            return res;
         case '[':
             /* array */
-            return _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr);
+            if (Py_EnterRecursiveCall(" while decoding a JSON array "
+                                      "from a unicode string"))
+                return NULL;
+            res = _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr);
+            Py_LeaveRecursiveCall();
+            return res;
         case 'n':
             /* null */
             if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') {
@@ -1343,10 +1354,18 @@
         return _steal_list_append(rval, encoded);
     }
     else if (PyList_Check(obj) || PyTuple_Check(obj)) {
-        return encoder_listencode_list(s, rval, obj, indent_level);
+        if (Py_EnterRecursiveCall(" while encoding a JSON object"))
+            return -1;
+        rv = encoder_listencode_list(s, rval, obj, indent_level);
+        Py_LeaveRecursiveCall();
+        return rv;
     }
     else if (PyDict_Check(obj)) {
-        return encoder_listencode_dict(s, rval, obj, indent_level);
+        if (Py_EnterRecursiveCall(" while encoding a JSON object"))
+            return -1;
+        rv = encoder_listencode_dict(s, rval, obj, indent_level);
+        Py_LeaveRecursiveCall();
+        return rv;
     }
     else {
         PyObject *ident = NULL;
@@ -1372,7 +1391,12 @@
             Py_XDECREF(ident);
             return -1;
         }
+
+        if (Py_EnterRecursiveCall(" while encoding a JSON object"))
+            return -1;
         rv = encoder_listencode_obj(s, rval, newobj, indent_level);
+        Py_LeaveRecursiveCall();
+
         Py_DECREF(newobj);
         if (rv) {
             Py_XDECREF(ident);
diff --git a/Modules/_multiprocessing/connection.h b/Modules/_multiprocessing/connection.h
deleted file mode 100644
index 002d5aa..0000000
--- a/Modules/_multiprocessing/connection.h
+++ /dev/null
@@ -1,527 +0,0 @@
-/*
- * Definition of a `Connection` type.
- * Used by `socket_connection.c` and `pipe_connection.c`.
- *
- * connection.h
- *
- * Copyright (c) 2006-2008, R Oudkerk --- see COPYING.txt
- */
-
-#ifndef CONNECTION_H
-#define CONNECTION_H
-
-/*
- * Read/write flags
- */
-
-#define READABLE 1
-#define WRITABLE 2
-
-#define CHECK_READABLE(self) \
-    if (!(self->flags & READABLE)) { \
-    PyErr_SetString(PyExc_IOError, "connection is write-only"); \
-    return NULL; \
-    }
-
-#define CHECK_WRITABLE(self) \
-    if (!(self->flags & WRITABLE)) { \
-    PyErr_SetString(PyExc_IOError, "connection is read-only"); \
-    return NULL; \
-    }
-
-/*
- * Allocation and deallocation
- */
-
-static PyObject *
-connection_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
-{
-    ConnectionObject *self;
-    HANDLE handle;
-    BOOL readable = TRUE, writable = TRUE;
-
-    static char *kwlist[] = {"handle", "readable", "writable", NULL};
-
-    if (!PyArg_ParseTupleAndKeywords(args, kwds, F_HANDLE "|ii", kwlist,
-                                     &handle, &readable, &writable))
-        return NULL;
-
-    if (handle == INVALID_HANDLE_VALUE || (Py_ssize_t)handle < 0) {
-        PyErr_Format(PyExc_IOError, "invalid handle %zd",
-                     (Py_ssize_t)handle);
-        return NULL;
-    }
-
-    if (!readable && !writable) {
-        PyErr_SetString(PyExc_ValueError,
-                        "either readable or writable must be true");
-        return NULL;
-    }
-
-    self = PyObject_New(ConnectionObject, type);
-    if (self == NULL)
-        return NULL;
-
-    self->weakreflist = NULL;
-    self->handle = handle;
-    self->flags = 0;
-
-    if (readable)
-        self->flags |= READABLE;
-    if (writable)
-        self->flags |= WRITABLE;
-    assert(self->flags >= 1 && self->flags <= 3);
-
-    return (PyObject*)self;
-}
-
-static void
-connection_dealloc(ConnectionObject* self)
-{
-    if (self->weakreflist != NULL)
-        PyObject_ClearWeakRefs((PyObject*)self);
-
-    if (self->handle != INVALID_HANDLE_VALUE) {
-        Py_BEGIN_ALLOW_THREADS
-        CLOSE(self->handle);
-        Py_END_ALLOW_THREADS
-    }
-    PyObject_Del(self);
-}
-
-/*
- * Functions for transferring buffers
- */
-
-static PyObject *
-connection_sendbytes(ConnectionObject *self, PyObject *args)
-{
-    Py_buffer pbuffer;
-    char *buffer;
-    Py_ssize_t length, offset=0, size=PY_SSIZE_T_MIN;
-    int res;
-
-    if (!PyArg_ParseTuple(args, F_RBUFFER "*|" F_PY_SSIZE_T F_PY_SSIZE_T,
-                          &pbuffer, &offset, &size))
-        return NULL;
-    buffer = pbuffer.buf;
-    length = pbuffer.len;
-
-    CHECK_WRITABLE(self); /* XXX release buffer in case of failure */
-
-    if (offset < 0) {
-        PyBuffer_Release(&pbuffer);
-        PyErr_SetString(PyExc_ValueError, "offset is negative");
-        return NULL;
-    }
-    if (length < offset) {
-        PyBuffer_Release(&pbuffer);
-        PyErr_SetString(PyExc_ValueError, "buffer length < offset");
-        return NULL;
-    }
-
-    if (size == PY_SSIZE_T_MIN) {
-        size = length - offset;
-    } else {
-        if (size < 0) {
-            PyBuffer_Release(&pbuffer);
-            PyErr_SetString(PyExc_ValueError, "size is negative");
-            return NULL;
-        }
-        if (offset + size > length) {
-            PyBuffer_Release(&pbuffer);
-            PyErr_SetString(PyExc_ValueError,
-                            "buffer length < offset + size");
-            return NULL;
-        }
-    }
-
-    res = conn_send_string(self, buffer + offset, size);
-
-    PyBuffer_Release(&pbuffer);
-    if (res < 0) {
-        if (PyErr_Occurred())
-            return NULL;
-        else
-            return mp_SetError(PyExc_IOError, res);
-    }
-
-    Py_RETURN_NONE;
-}
-
-static PyObject *
-connection_recvbytes(ConnectionObject *self, PyObject *args)
-{
-    char *freeme = NULL;
-    Py_ssize_t res, maxlength = PY_SSIZE_T_MAX;
-    PyObject *result = NULL;
-
-    if (!PyArg_ParseTuple(args, "|" F_PY_SSIZE_T, &maxlength))
-        return NULL;
-
-    CHECK_READABLE(self);
-
-    if (maxlength < 0) {
-        PyErr_SetString(PyExc_ValueError, "maxlength < 0");
-        return NULL;
-    }
-
-    res = conn_recv_string(self, self->buffer, CONNECTION_BUFFER_SIZE,
-                           &freeme, maxlength);
-
-    if (res < 0) {
-        if (res == MP_BAD_MESSAGE_LENGTH) {
-            if ((self->flags & WRITABLE) == 0) {
-                Py_BEGIN_ALLOW_THREADS
-                CLOSE(self->handle);
-                Py_END_ALLOW_THREADS
-                self->handle = INVALID_HANDLE_VALUE;
-            } else {
-                self->flags = WRITABLE;
-            }
-        }
-        mp_SetError(PyExc_IOError, res);
-    } else {
-        if (freeme == NULL) {
-            result = PyBytes_FromStringAndSize(self->buffer, res);
-        } else {
-            result = PyBytes_FromStringAndSize(freeme, res);
-            PyMem_Free(freeme);
-        }
-    }
-
-    return result;
-}
-
-static PyObject *
-connection_recvbytes_into(ConnectionObject *self, PyObject *args)
-{
-    char *freeme = NULL, *buffer = NULL;
-    Py_ssize_t res, length, offset = 0;
-    PyObject *result = NULL;
-    Py_buffer pbuf;
-
-    CHECK_READABLE(self);
-
-    if (!PyArg_ParseTuple(args, "w*|" F_PY_SSIZE_T,
-                          &pbuf, &offset))
-        return NULL;
-
-    buffer = pbuf.buf;
-    length = pbuf.len;
-
-    if (offset < 0) {
-        PyErr_SetString(PyExc_ValueError, "negative offset");
-        goto _error;
-    }
-
-    if (offset > length) {
-        PyErr_SetString(PyExc_ValueError, "offset too large");
-        goto _error;
-    }
-
-    res = conn_recv_string(self, buffer+offset, length-offset,
-                           &freeme, PY_SSIZE_T_MAX);
-
-    if (res < 0) {
-        if (res == MP_BAD_MESSAGE_LENGTH) {
-            if ((self->flags & WRITABLE) == 0) {
-                Py_BEGIN_ALLOW_THREADS
-                CLOSE(self->handle);
-                Py_END_ALLOW_THREADS
-                self->handle = INVALID_HANDLE_VALUE;
-            } else {
-                self->flags = WRITABLE;
-            }
-        }
-        mp_SetError(PyExc_IOError, res);
-    } else {
-        if (freeme == NULL) {
-            result = PyInt_FromSsize_t(res);
-        } else {
-            result = PyObject_CallFunction(BufferTooShort,
-                                           F_RBUFFER "#",
-                                           freeme, res);
-            PyMem_Free(freeme);
-            if (result) {
-                PyErr_SetObject(BufferTooShort, result);
-                Py_DECREF(result);
-            }
-            goto _error;
-        }
-    }
-
-_cleanup:
-    PyBuffer_Release(&pbuf);
-    return result;
-
-_error:
-    result = NULL;
-    goto _cleanup;
-}
-
-/*
- * Functions for transferring objects
- */
-
-static PyObject *
-connection_send_obj(ConnectionObject *self, PyObject *obj)
-{
-    char *buffer;
-    int res;
-    Py_ssize_t length;
-    PyObject *pickled_string = NULL;
-
-    CHECK_WRITABLE(self);
-
-    pickled_string = PyObject_CallFunctionObjArgs(pickle_dumps, obj,
-                                                  pickle_protocol, NULL);
-    if (!pickled_string)
-        goto failure;
-
-    if (PyBytes_AsStringAndSize(pickled_string, &buffer, &length) < 0)
-        goto failure;
-
-    res = conn_send_string(self, buffer, (int)length);
-
-    if (res < 0) {
-        mp_SetError(PyExc_IOError, res);
-        goto failure;
-    }
-
-    Py_XDECREF(pickled_string);
-    Py_RETURN_NONE;
-
-  failure:
-    Py_XDECREF(pickled_string);
-    return NULL;
-}
-
-static PyObject *
-connection_recv_obj(ConnectionObject *self)
-{
-    char *freeme = NULL;
-    Py_ssize_t res;
-    PyObject *temp = NULL, *result = NULL;
-
-    CHECK_READABLE(self);
-
-    res = conn_recv_string(self, self->buffer, CONNECTION_BUFFER_SIZE,
-                           &freeme, PY_SSIZE_T_MAX);
-
-    if (res < 0) {
-        if (res == MP_BAD_MESSAGE_LENGTH) {
-            if ((self->flags & WRITABLE) == 0) {
-                Py_BEGIN_ALLOW_THREADS
-                CLOSE(self->handle);
-                Py_END_ALLOW_THREADS
-                self->handle = INVALID_HANDLE_VALUE;
-            } else {
-                self->flags = WRITABLE;
-            }
-        }
-        mp_SetError(PyExc_IOError, res);
-    } else {
-        if (freeme == NULL) {
-            temp = PyBytes_FromStringAndSize(self->buffer, res);
-        } else {
-            temp = PyBytes_FromStringAndSize(freeme, res);
-            PyMem_Free(freeme);
-        }
-    }
-
-    if (temp)
-        result = PyObject_CallFunctionObjArgs(pickle_loads,
-                                              temp, NULL);
-    Py_XDECREF(temp);
-    return result;
-}
-
-/*
- * Other functions
- */
-
-static PyObject *
-connection_poll(ConnectionObject *self, PyObject *args)
-{
-    PyObject *timeout_obj = NULL;
-    double timeout = 0.0;
-    int res;
-
-    CHECK_READABLE(self);
-
-    if (!PyArg_ParseTuple(args, "|O", &timeout_obj))
-        return NULL;
-
-    if (timeout_obj == NULL) {
-        timeout = 0.0;
-    } else if (timeout_obj == Py_None) {
-        timeout = -1.0;                                 /* block forever */
-    } else {
-        timeout = PyFloat_AsDouble(timeout_obj);
-        if (PyErr_Occurred())
-            return NULL;
-        if (timeout < 0.0)
-            timeout = 0.0;
-    }
-
-    Py_BEGIN_ALLOW_THREADS
-    res = conn_poll(self, timeout, _save);
-    Py_END_ALLOW_THREADS
-
-    switch (res) {
-    case TRUE:
-        Py_RETURN_TRUE;
-    case FALSE:
-        Py_RETURN_FALSE;
-    default:
-        return mp_SetError(PyExc_IOError, res);
-    }
-}
-
-static PyObject *
-connection_fileno(ConnectionObject* self)
-{
-    if (self->handle == INVALID_HANDLE_VALUE) {
-        PyErr_SetString(PyExc_IOError, "handle is invalid");
-        return NULL;
-    }
-    return PyInt_FromLong((long)self->handle);
-}
-
-static PyObject *
-connection_close(ConnectionObject *self)
-{
-    if (self->handle != INVALID_HANDLE_VALUE) {
-        Py_BEGIN_ALLOW_THREADS
-        CLOSE(self->handle);
-        Py_END_ALLOW_THREADS
-        self->handle = INVALID_HANDLE_VALUE;
-    }
-
-    Py_RETURN_NONE;
-}
-
-static PyObject *
-connection_repr(ConnectionObject *self)
-{
-    static char *conn_type[] = {"read-only", "write-only", "read-write"};
-
-    assert(self->flags >= 1 && self->flags <= 3);
-    return FROM_FORMAT("<%s %s, handle %zd>",
-                       conn_type[self->flags - 1],
-                       CONNECTION_NAME, (Py_ssize_t)self->handle);
-}
-
-/*
- * Getters and setters
- */
-
-static PyObject *
-connection_closed(ConnectionObject *self, void *closure)
-{
-    return PyBool_FromLong((long)(self->handle == INVALID_HANDLE_VALUE));
-}
-
-static PyObject *
-connection_readable(ConnectionObject *self, void *closure)
-{
-    return PyBool_FromLong((long)(self->flags & READABLE));
-}
-
-static PyObject *
-connection_writable(ConnectionObject *self, void *closure)
-{
-    return PyBool_FromLong((long)(self->flags & WRITABLE));
-}
-
-/*
- * Tables
- */
-
-static PyMethodDef connection_methods[] = {
-    {"send_bytes", (PyCFunction)connection_sendbytes, METH_VARARGS,
-     "send the byte data from a readable buffer-like object"},
-    {"recv_bytes", (PyCFunction)connection_recvbytes, METH_VARARGS,
-     "receive byte data as a string"},
-    {"recv_bytes_into",(PyCFunction)connection_recvbytes_into,METH_VARARGS,
-     "receive byte data into a writeable buffer-like object\n"
-     "returns the number of bytes read"},
-
-    {"send", (PyCFunction)connection_send_obj, METH_O,
-     "send a (picklable) object"},
-    {"recv", (PyCFunction)connection_recv_obj, METH_NOARGS,
-     "receive a (picklable) object"},
-
-    {"poll", (PyCFunction)connection_poll, METH_VARARGS,
-     "whether there is any input available to be read"},
-    {"fileno", (PyCFunction)connection_fileno, METH_NOARGS,
-     "file descriptor or handle of the connection"},
-    {"close", (PyCFunction)connection_close, METH_NOARGS,
-     "close the connection"},
-
-    {NULL}  /* Sentinel */
-};
-
-static PyGetSetDef connection_getset[] = {
-    {"closed", (getter)connection_closed, NULL,
-     "True if the connection is closed", NULL},
-    {"readable", (getter)connection_readable, NULL,
-     "True if the connection is readable", NULL},
-    {"writable", (getter)connection_writable, NULL,
-     "True if the connection is writable", NULL},
-    {NULL}
-};
-
-/*
- * Connection type
- */
-
-PyDoc_STRVAR(connection_doc,
-             "Connection type whose constructor signature is\n\n"
-             "    Connection(handle, readable=True, writable=True).\n\n"
-             "The constructor does *not* duplicate the handle.");
-
-PyTypeObject CONNECTION_TYPE = {
-    PyVarObject_HEAD_INIT(NULL, 0)
-    /* tp_name           */ "_multiprocessing." CONNECTION_NAME,
-    /* tp_basicsize      */ sizeof(ConnectionObject),
-    /* tp_itemsize       */ 0,
-    /* tp_dealloc        */ (destructor)connection_dealloc,
-    /* tp_print          */ 0,
-    /* tp_getattr        */ 0,
-    /* tp_setattr        */ 0,
-    /* tp_reserved       */ 0,
-    /* tp_repr           */ (reprfunc)connection_repr,
-    /* tp_as_number      */ 0,
-    /* tp_as_sequence    */ 0,
-    /* tp_as_mapping     */ 0,
-    /* tp_hash           */ 0,
-    /* tp_call           */ 0,
-    /* tp_str            */ 0,
-    /* tp_getattro       */ 0,
-    /* tp_setattro       */ 0,
-    /* tp_as_buffer      */ 0,
-    /* tp_flags          */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE |
-                            Py_TPFLAGS_HAVE_WEAKREFS,
-    /* tp_doc            */ connection_doc,
-    /* tp_traverse       */ 0,
-    /* tp_clear          */ 0,
-    /* tp_richcompare    */ 0,
-    /* tp_weaklistoffset */ offsetof(ConnectionObject, weakreflist),
-    /* tp_iter           */ 0,
-    /* tp_iternext       */ 0,
-    /* tp_methods        */ connection_methods,
-    /* tp_members        */ 0,
-    /* tp_getset         */ connection_getset,
-    /* tp_base           */ 0,
-    /* tp_dict           */ 0,
-    /* tp_descr_get      */ 0,
-    /* tp_descr_set      */ 0,
-    /* tp_dictoffset     */ 0,
-    /* tp_init           */ 0,
-    /* tp_alloc          */ 0,
-    /* tp_new            */ connection_new,
-};
-
-#endif /* CONNECTION_H */
diff --git a/Modules/_multiprocessing/multiprocessing.c b/Modules/_multiprocessing/multiprocessing.c
index 7c4f52d..5d1cf56 100644
--- a/Modules/_multiprocessing/multiprocessing.c
+++ b/Modules/_multiprocessing/multiprocessing.c
@@ -16,7 +16,6 @@
 
 PyObject *create_win32_namespace(void);
 
-PyObject *pickle_dumps, *pickle_loads, *pickle_protocol;
 PyObject *ProcessError, *BufferTooShort;
 
 /*
@@ -49,16 +48,6 @@
     case MP_MEMORY_ERROR:
         PyErr_NoMemory();
         break;
-    case MP_END_OF_FILE:
-        PyErr_SetNone(PyExc_EOFError);
-        break;
-    case MP_EARLY_END_OF_FILE:
-        PyErr_SetString(PyExc_IOError,
-                        "got end of file during message");
-        break;
-    case MP_BAD_MESSAGE_LENGTH:
-        PyErr_SetString(PyExc_IOError, "bad message length");
-        break;
     case MP_EXCEPTION_HAS_BEEN_SET:
         break;
     default:
@@ -187,7 +176,7 @@
     if (PyObject_AsWriteBuffer(obj, &buffer, &buffer_len) < 0)
         return NULL;
 
-    return Py_BuildValue("N" F_PY_SSIZE_T,
+    return Py_BuildValue("Nn",
                          PyLong_FromVoidPtr(buffer), buffer_len);
 }
 
@@ -241,15 +230,6 @@
     if (!module)
         return NULL;
 
-    /* Get copy of objects from pickle */
-    temp = PyImport_ImportModule(PICKLE_MODULE);
-    if (!temp)
-        return NULL;
-    pickle_dumps = PyObject_GetAttrString(temp, "dumps");
-    pickle_loads = PyObject_GetAttrString(temp, "loads");
-    pickle_protocol = PyObject_GetAttrString(temp, "HIGHEST_PROTOCOL");
-    Py_XDECREF(temp);
-
     /* Get copy of BufferTooShort */
     temp = PyImport_ImportModule("multiprocessing");
     if (!temp)
@@ -257,12 +237,6 @@
     BufferTooShort = PyObject_GetAttrString(temp, "BufferTooShort");
     Py_XDECREF(temp);
 
-    /* Add connection type to module */
-    if (PyType_Ready(&ConnectionType) < 0)
-        return NULL;
-    Py_INCREF(&ConnectionType);
-    PyModule_AddObject(module, "Connection", (PyObject*)&ConnectionType);
-
 #if defined(MS_WINDOWS) ||                                              \
   (defined(HAVE_SEM_OPEN) && !defined(POSIX_SEMAPHORES_NOT_ENABLED))
     /* Add SemLock type to module */
@@ -286,13 +260,6 @@
 #endif
 
 #ifdef MS_WINDOWS
-    /* Add PipeConnection to module */
-    if (PyType_Ready(&PipeConnectionType) < 0)
-        return NULL;
-    Py_INCREF(&PipeConnectionType);
-    PyModule_AddObject(module, "PipeConnection",
-                       (PyObject*)&PipeConnectionType);
-
     /* Initialize win32 class and add to multiprocessing */
     temp = create_win32_namespace();
     if (!temp)
diff --git a/Modules/_multiprocessing/multiprocessing.h b/Modules/_multiprocessing/multiprocessing.h
index b9917c3..c303447 100644
--- a/Modules/_multiprocessing/multiprocessing.h
+++ b/Modules/_multiprocessing/multiprocessing.h
@@ -64,20 +64,6 @@
 
 
 /*
- * Make sure Py_ssize_t available
- */
-
-#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
-   typedef int Py_ssize_t;
-#  define PY_SSIZE_T_MAX INT_MAX
-#  define PY_SSIZE_T_MIN INT_MIN
-#  define F_PY_SSIZE_T "i"
-#  define PyInt_FromSsize_t(n) PyInt_FromLong((long)n)
-#else
-#  define F_PY_SSIZE_T "n"
-#endif
-
-/*
  * Format codes
  */
 
@@ -105,12 +91,6 @@
 #  define T_SEM_HANDLE T_POINTER
 #endif
 
-#if PY_VERSION_HEX >= 0x03000000
-#  define F_RBUFFER "y"
-#else
-#  define F_RBUFFER "s"
-#endif
-
 /*
  * Error codes which can be returned by functions called without GIL
  */
@@ -118,11 +98,8 @@
 #define MP_SUCCESS (0)
 #define MP_STANDARD_ERROR (-1)
 #define MP_MEMORY_ERROR (-1001)
-#define MP_END_OF_FILE (-1002)
-#define MP_EARLY_END_OF_FILE (-1003)
-#define MP_BAD_MESSAGE_LENGTH (-1004)
-#define MP_SOCKET_ERROR (-1005)
-#define MP_EXCEPTION_HAS_BEEN_SET (-1006)
+#define MP_SOCKET_ERROR (-1002)
+#define MP_EXCEPTION_HAS_BEEN_SET (-1003)
 
 PyObject *mp_SetError(PyObject *Type, int num);
 
@@ -130,57 +107,15 @@
  * Externs - not all will really exist on all platforms
  */
 
-extern PyObject *pickle_dumps;
-extern PyObject *pickle_loads;
-extern PyObject *pickle_protocol;
 extern PyObject *BufferTooShort;
 extern PyTypeObject SemLockType;
-extern PyTypeObject ConnectionType;
 extern PyTypeObject PipeConnectionType;
 extern HANDLE sigint_event;
 
 /*
- * Py3k compatibility
- */
-
-#if PY_VERSION_HEX >= 0x03000000
-#  define PICKLE_MODULE "pickle"
-#  define FROM_FORMAT PyUnicode_FromFormat
-#  define PyInt_FromLong PyLong_FromLong
-#  define PyInt_FromSsize_t PyLong_FromSsize_t
-#else
-#  define PICKLE_MODULE "cPickle"
-#  define FROM_FORMAT PyString_FromFormat
-#endif
-
-#ifndef PyVarObject_HEAD_INIT
-#  define PyVarObject_HEAD_INIT(type, size) PyObject_HEAD_INIT(type) size,
-#endif
-
-#ifndef Py_TPFLAGS_HAVE_WEAKREFS
-#  define Py_TPFLAGS_HAVE_WEAKREFS 0
-#endif
-
-/*
- * Connection definition
- */
-
-#define CONNECTION_BUFFER_SIZE 1024
-
-typedef struct {
-    PyObject_HEAD
-    HANDLE handle;
-    int flags;
-    PyObject *weakreflist;
-    char buffer[CONNECTION_BUFFER_SIZE];
-} ConnectionObject;
-
-/*
  * Miscellaneous
  */
 
-#define MAX_MESSAGE_LENGTH 0x7fffffff
-
 #ifndef MIN
 #  define MIN(x, y) ((x) < (y) ? x : y)
 #  define MAX(x, y) ((x) > (y) ? x : y)
diff --git a/Modules/_multiprocessing/pipe_connection.c b/Modules/_multiprocessing/pipe_connection.c
deleted file mode 100644
index 05dde0c..0000000
--- a/Modules/_multiprocessing/pipe_connection.c
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * A type which wraps a pipe handle in message oriented mode
- *
- * pipe_connection.c
- *
- * Copyright (c) 2006-2008, R Oudkerk --- see COPYING.txt
- */
-
-#include "multiprocessing.h"
-
-#define CLOSE(h) CloseHandle(h)
-
-/*
- * Send string to the pipe; assumes in message oriented mode
- */
-
-static Py_ssize_t
-conn_send_string(ConnectionObject *conn, char *string, size_t length)
-{
-    DWORD amount_written;
-    BOOL ret;
-
-    Py_BEGIN_ALLOW_THREADS
-    ret = WriteFile(conn->handle, string, length, &amount_written, NULL);
-    Py_END_ALLOW_THREADS
-
-    if (ret == 0 && GetLastError() == ERROR_NO_SYSTEM_RESOURCES) {
-        PyErr_Format(PyExc_ValueError, "Cannnot send %" PY_FORMAT_SIZE_T "d bytes over connection", length);
-        return MP_STANDARD_ERROR;
-    }
-
-    return ret ? MP_SUCCESS : MP_STANDARD_ERROR;
-}
-
-/*
- * Attempts to read into buffer, or if buffer too small into *newbuffer.
- *
- * Returns number of bytes read.  Assumes in message oriented mode.
- */
-
-static Py_ssize_t
-conn_recv_string(ConnectionObject *conn, char *buffer,
-                 size_t buflength, char **newbuffer, size_t maxlength)
-{
-    DWORD left, length, full_length, err;
-    BOOL ret;
-    *newbuffer = NULL;
-
-    Py_BEGIN_ALLOW_THREADS
-    ret = ReadFile(conn->handle, buffer, MIN(buflength, maxlength),
-                  &length, NULL);
-    Py_END_ALLOW_THREADS
-    if (ret)
-        return length;
-
-    err = GetLastError();
-    if (err != ERROR_MORE_DATA) {
-        if (err == ERROR_BROKEN_PIPE)
-            return MP_END_OF_FILE;
-        return MP_STANDARD_ERROR;
-    }
-
-    if (!PeekNamedPipe(conn->handle, NULL, 0, NULL, NULL, &left))
-        return MP_STANDARD_ERROR;
-
-    full_length = length + left;
-    if (full_length > maxlength)
-        return MP_BAD_MESSAGE_LENGTH;
-
-    *newbuffer = PyMem_Malloc(full_length);
-    if (*newbuffer == NULL)
-        return MP_MEMORY_ERROR;
-
-    memcpy(*newbuffer, buffer, length);
-
-    Py_BEGIN_ALLOW_THREADS
-    ret = ReadFile(conn->handle, *newbuffer+length, left, &length, NULL);
-    Py_END_ALLOW_THREADS
-    if (ret) {
-        assert(length == left);
-        return full_length;
-    } else {
-        PyMem_Free(*newbuffer);
-        return MP_STANDARD_ERROR;
-    }
-}
-
-/*
- * Check whether any data is available for reading
- */
-
-static int
-conn_poll(ConnectionObject *conn, double timeout, PyThreadState *_save)
-{
-    DWORD bytes, deadline, delay;
-    int difference, res;
-    BOOL block = FALSE;
-
-    if (!PeekNamedPipe(conn->handle, NULL, 0, NULL, &bytes, NULL))
-        return MP_STANDARD_ERROR;
-
-    if (timeout == 0.0)
-        return bytes > 0;
-
-    if (timeout < 0.0)
-        block = TRUE;
-    else
-        /* XXX does not check for overflow */
-        deadline = GetTickCount() + (DWORD)(1000 * timeout + 0.5);
-
-    Sleep(0);
-
-    for (delay = 1 ; ; delay += 1) {
-        if (!PeekNamedPipe(conn->handle, NULL, 0, NULL, &bytes, NULL))
-            return MP_STANDARD_ERROR;
-        else if (bytes > 0)
-            return TRUE;
-
-        if (!block) {
-            difference = deadline - GetTickCount();
-            if (difference < 0)
-                return FALSE;
-            if ((int)delay > difference)
-                delay = difference;
-        }
-
-        if (delay > 20)
-            delay = 20;
-
-        Sleep(delay);
-
-        /* check for signals */
-        Py_BLOCK_THREADS
-        res = PyErr_CheckSignals();
-        Py_UNBLOCK_THREADS
-
-        if (res)
-            return MP_EXCEPTION_HAS_BEEN_SET;
-    }
-}
-
-/*
- * "connection.h" defines the PipeConnection type using the definitions above
- */
-
-#define CONNECTION_NAME "PipeConnection"
-#define CONNECTION_TYPE PipeConnectionType
-
-#include "connection.h"
diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c
index c2cd914..6749f23 100644
--- a/Modules/_multiprocessing/semaphore.c
+++ b/Modules/_multiprocessing/semaphore.c
@@ -481,7 +481,7 @@
 static PyObject *
 semlock_count(SemLockObject *self)
 {
-    return PyInt_FromLong((long)self->count);
+    return PyLong_FromLong((long)self->count);
 }
 
 static PyObject *
@@ -505,7 +505,7 @@
        the number of waiting threads */
     if (sval < 0)
         sval = 0;
-    return PyInt_FromLong((long)sval);
+    return PyLong_FromLong((long)sval);
 #endif
 }
 
diff --git a/Modules/_multiprocessing/socket_connection.c b/Modules/_multiprocessing/socket_connection.c
deleted file mode 100644
index 7ebf338..0000000
--- a/Modules/_multiprocessing/socket_connection.c
+++ /dev/null
@@ -1,202 +0,0 @@
-/*
- * A type which wraps a socket
- *
- * socket_connection.c
- *
- * Copyright (c) 2006-2008, R Oudkerk --- see COPYING.txt
- */
-
-#include "multiprocessing.h"
-
-#ifdef MS_WINDOWS
-#  define WRITE(h, buffer, length) send((SOCKET)h, buffer, length, 0)
-#  define READ(h, buffer, length) recv((SOCKET)h, buffer, length, 0)
-#  define CLOSE(h) closesocket((SOCKET)h)
-#else
-#  define WRITE(h, buffer, length) write(h, buffer, length)
-#  define READ(h, buffer, length) read(h, buffer, length)
-#  define CLOSE(h) close(h)
-#endif
-
-/*
- * Send string to file descriptor
- */
-
-static Py_ssize_t
-_conn_sendall(HANDLE h, char *string, size_t length)
-{
-    char *p = string;
-    Py_ssize_t res;
-
-    while (length > 0) {
-        res = WRITE(h, p, length);
-        if (res < 0)
-            return MP_SOCKET_ERROR;
-        length -= res;
-        p += res;
-    }
-
-    return MP_SUCCESS;
-}
-
-/*
- * Receive string of exact length from file descriptor
- */
-
-static Py_ssize_t
-_conn_recvall(HANDLE h, char *buffer, size_t length)
-{
-    size_t remaining = length;
-    Py_ssize_t temp;
-    char *p = buffer;
-
-    while (remaining > 0) {
-        temp = READ(h, p, remaining);
-        if (temp <= 0) {
-            if (temp == 0)
-                return remaining == length ?
-                    MP_END_OF_FILE : MP_EARLY_END_OF_FILE;
-            else
-                return temp;
-        }
-        remaining -= temp;
-        p += temp;
-    }
-
-    return MP_SUCCESS;
-}
-
-/*
- * Send a string prepended by the string length in network byte order
- */
-
-static Py_ssize_t
-conn_send_string(ConnectionObject *conn, char *string, size_t length)
-{
-    Py_ssize_t res;
-    /* The "header" of the message is a 32 bit unsigned number (in
-       network order) which specifies the length of the "body".  If
-       the message is shorter than about 16kb then it is quicker to
-       combine the "header" and the "body" of the message and send
-       them at once. */
-    if (length < (16*1024)) {
-        char *message;
-
-        message = PyMem_Malloc(length+4);
-        if (message == NULL)
-            return MP_MEMORY_ERROR;
-
-        *(UINT32*)message = htonl((UINT32)length);
-        memcpy(message+4, string, length);
-        Py_BEGIN_ALLOW_THREADS
-        res = _conn_sendall(conn->handle, message, length+4);
-        Py_END_ALLOW_THREADS
-        PyMem_Free(message);
-    } else {
-        UINT32 lenbuff;
-
-        if (length > MAX_MESSAGE_LENGTH)
-            return MP_BAD_MESSAGE_LENGTH;
-
-        lenbuff = htonl((UINT32)length);
-        Py_BEGIN_ALLOW_THREADS
-        res = _conn_sendall(conn->handle, (char*)&lenbuff, 4) ||
-            _conn_sendall(conn->handle, string, length);
-        Py_END_ALLOW_THREADS
-    }
-    return res;
-}
-
-/*
- * Attempts to read into buffer, or failing that into *newbuffer
- *
- * Returns number of bytes read.
- */
-
-static Py_ssize_t
-conn_recv_string(ConnectionObject *conn, char *buffer,
-                 size_t buflength, char **newbuffer, size_t maxlength)
-{
-    int res;
-    UINT32 ulength;
-
-    *newbuffer = NULL;
-
-    Py_BEGIN_ALLOW_THREADS
-    res = _conn_recvall(conn->handle, (char*)&ulength, 4);
-    Py_END_ALLOW_THREADS
-    if (res < 0)
-        return res;
-
-    ulength = ntohl(ulength);
-    if (ulength > maxlength)
-        return MP_BAD_MESSAGE_LENGTH;
-
-    if (ulength <= buflength) {
-        Py_BEGIN_ALLOW_THREADS
-        res = _conn_recvall(conn->handle, buffer, (size_t)ulength);
-        Py_END_ALLOW_THREADS
-        return res < 0 ? res : ulength;
-    } else {
-        *newbuffer = PyMem_Malloc((size_t)ulength);
-        if (*newbuffer == NULL)
-            return MP_MEMORY_ERROR;
-        Py_BEGIN_ALLOW_THREADS
-        res = _conn_recvall(conn->handle, *newbuffer, (size_t)ulength);
-        Py_END_ALLOW_THREADS
-        return res < 0 ? (Py_ssize_t)res : (Py_ssize_t)ulength;
-    }
-}
-
-/*
- * Check whether any data is available for reading -- neg timeout blocks
- */
-
-static int
-conn_poll(ConnectionObject *conn, double timeout, PyThreadState *_save)
-{
-    int res;
-    fd_set rfds;
-
-    /*
-     * Verify the handle, issue 3321. Not required for windows.
-     */
-    #ifndef MS_WINDOWS
-        if (((int)conn->handle) < 0 || ((int)conn->handle) >= FD_SETSIZE) {
-            Py_BLOCK_THREADS
-            PyErr_SetString(PyExc_IOError, "handle out of range in select()");
-            Py_UNBLOCK_THREADS
-            return MP_EXCEPTION_HAS_BEEN_SET;
-        }
-    #endif
-
-    FD_ZERO(&rfds);
-    FD_SET((SOCKET)conn->handle, &rfds);
-
-    if (timeout < 0.0) {
-        res = select((int)conn->handle+1, &rfds, NULL, NULL, NULL);
-    } else {
-        struct timeval tv;
-        tv.tv_sec = (long)timeout;
-        tv.tv_usec = (long)((timeout - tv.tv_sec) * 1e6 + 0.5);
-        res = select((int)conn->handle+1, &rfds, NULL, NULL, &tv);
-    }
-
-    if (res < 0) {
-        return MP_SOCKET_ERROR;
-    } else if (FD_ISSET(conn->handle, &rfds)) {
-        return TRUE;
-    } else {
-        assert(res == 0);
-        return FALSE;
-    }
-}
-
-/*
- * "connection.h" defines the Connection type using defs above
- */
-
-#define CONNECTION_NAME "Connection"
-#define CONNECTION_TYPE ConnectionType
-
-#include "connection.h"
diff --git a/Modules/_multiprocessing/win32_functions.c b/Modules/_multiprocessing/win32_functions.c
index 452d608..12dc0cd 100644
--- a/Modules/_multiprocessing/win32_functions.c
+++ b/Modules/_multiprocessing/win32_functions.c
@@ -215,6 +215,164 @@
     Py_RETURN_NONE;
 }
 
+static PyObject *
+win32_closesocket(PyObject *self, PyObject *args)
+{
+    HANDLE handle;
+    int ret;
+
+    if (!PyArg_ParseTuple(args, F_HANDLE ":closesocket" , &handle))
+        return NULL;
+
+    Py_BEGIN_ALLOW_THREADS
+    ret = closesocket((SOCKET) handle);
+    Py_END_ALLOW_THREADS
+
+    if (ret)
+        return PyErr_SetExcFromWindowsErr(PyExc_IOError, WSAGetLastError());
+    Py_RETURN_NONE;
+}
+
+static PyObject *
+win32_recv(PyObject *self, PyObject *args)
+{
+    HANDLE handle;
+    int size, nread;
+    PyObject *buf;
+
+    if (!PyArg_ParseTuple(args, F_HANDLE "i:recv" , &handle, &size))
+        return NULL;
+
+    buf = PyBytes_FromStringAndSize(NULL, size);
+    if (!buf)
+        return NULL;
+
+    Py_BEGIN_ALLOW_THREADS
+    nread = recv((SOCKET) handle, PyBytes_AS_STRING(buf), size, 0);
+    Py_END_ALLOW_THREADS
+
+    if (nread < 0) {
+        Py_DECREF(buf);
+        return PyErr_SetExcFromWindowsErr(PyExc_IOError, WSAGetLastError());
+    }
+    _PyBytes_Resize(&buf, nread);
+    return buf;
+}
+
+static PyObject *
+win32_send(PyObject *self, PyObject *args)
+{
+    HANDLE handle;
+    Py_buffer buf;
+    int ret;
+
+    if (!PyArg_ParseTuple(args, F_HANDLE "y*:send" , &handle, &buf))
+        return NULL;
+
+    Py_BEGIN_ALLOW_THREADS
+    ret = send((SOCKET) handle, buf.buf, buf.len, 0);
+    Py_END_ALLOW_THREADS
+
+    PyBuffer_Release(&buf);
+    if (ret < 0)
+        return PyErr_SetExcFromWindowsErr(PyExc_IOError, WSAGetLastError());
+    return PyLong_FromLong(ret);
+}
+
+static PyObject *
+win32_WriteFile(PyObject *self, PyObject *args)
+{
+    HANDLE handle;
+    Py_buffer buf;
+    int written;
+    BOOL ret;
+
+    if (!PyArg_ParseTuple(args, F_HANDLE "y*:WriteFile" , &handle, &buf))
+        return NULL;
+
+    Py_BEGIN_ALLOW_THREADS
+    ret = WriteFile(handle, buf.buf, buf.len, &written, NULL);
+    Py_END_ALLOW_THREADS
+
+    PyBuffer_Release(&buf);
+    if (!ret)
+        return PyErr_SetExcFromWindowsErr(PyExc_IOError, 0);
+    return PyLong_FromLong(written);
+}
+
+static PyObject *
+win32_ReadFile(PyObject *self, PyObject *args)
+{
+    HANDLE handle;
+    int size;
+    DWORD nread;
+    PyObject *buf;
+    BOOL ret;
+
+    if (!PyArg_ParseTuple(args, F_HANDLE "i:ReadFile" , &handle, &size))
+        return NULL;
+
+    buf = PyBytes_FromStringAndSize(NULL, size);
+    if (!buf)
+        return NULL;
+
+    Py_BEGIN_ALLOW_THREADS
+    ret = ReadFile(handle, PyBytes_AS_STRING(buf), size, &nread, NULL);
+    Py_END_ALLOW_THREADS
+
+    if (!ret && GetLastError() != ERROR_MORE_DATA) {
+        Py_DECREF(buf);
+        return PyErr_SetExcFromWindowsErr(PyExc_IOError, 0);
+    }
+    if (_PyBytes_Resize(&buf, nread))
+        return NULL;
+    return Py_BuildValue("NN", buf, PyBool_FromLong(ret));
+}
+
+static PyObject *
+win32_PeekNamedPipe(PyObject *self, PyObject *args)
+{
+    HANDLE handle;
+    int size = 0;
+    PyObject *buf = NULL;
+    DWORD nread, navail, nleft;
+    BOOL ret;
+
+    if (!PyArg_ParseTuple(args, F_HANDLE "|i:PeekNamedPipe" , &handle, &size))
+        return NULL;
+
+    if (size < 0) {
+        PyErr_SetString(PyExc_ValueError, "negative size");
+        return NULL;
+    }
+
+    if (size) {
+        buf = PyBytes_FromStringAndSize(NULL, size);
+        if (!buf)
+            return NULL;
+        Py_BEGIN_ALLOW_THREADS
+        ret = PeekNamedPipe(handle, PyBytes_AS_STRING(buf), size, &nread,
+                            &navail, &nleft);
+        Py_END_ALLOW_THREADS
+        if (!ret) {
+            Py_DECREF(buf);
+            return PyErr_SetExcFromWindowsErr(PyExc_IOError, 0);
+        }
+        if (_PyBytes_Resize(&buf, nread))
+            return NULL;
+        return Py_BuildValue("Nii", buf, navail, nleft);
+    }
+    else {
+        Py_BEGIN_ALLOW_THREADS
+        ret = PeekNamedPipe(handle, NULL, 0, NULL, &navail, &nleft);
+        Py_END_ALLOW_THREADS
+        if (!ret) {
+            return PyErr_SetExcFromWindowsErr(PyExc_IOError, 0);
+        }
+        return Py_BuildValue("ii", navail, nleft);
+    }
+}
+
 static PyMethodDef win32_methods[] = {
     WIN32_FUNCTION(CloseHandle),
     WIN32_FUNCTION(GetLastError),
@@ -223,8 +381,14 @@
     WIN32_FUNCTION(ConnectNamedPipe),
     WIN32_FUNCTION(CreateFile),
     WIN32_FUNCTION(CreateNamedPipe),
+    WIN32_FUNCTION(ReadFile),
+    WIN32_FUNCTION(PeekNamedPipe),
     WIN32_FUNCTION(SetNamedPipeHandleState),
     WIN32_FUNCTION(WaitNamedPipe),
+    WIN32_FUNCTION(WriteFile),
+    WIN32_FUNCTION(closesocket),
+    WIN32_FUNCTION(recv),
+    WIN32_FUNCTION(send),
     {NULL}
 };
 
@@ -244,6 +408,8 @@
     Py_INCREF(&Win32Type);
 
     WIN32_CONSTANT(F_DWORD, ERROR_ALREADY_EXISTS);
+    WIN32_CONSTANT(F_DWORD, ERROR_BROKEN_PIPE);
+    WIN32_CONSTANT(F_DWORD, ERROR_NO_SYSTEM_RESOURCES);
     WIN32_CONSTANT(F_DWORD, ERROR_PIPE_BUSY);
     WIN32_CONSTANT(F_DWORD, ERROR_PIPE_CONNECTED);
     WIN32_CONSTANT(F_DWORD, ERROR_SEM_TIMEOUT);
diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c
index eb099c4..7f5c213 100644
--- a/Modules/_sqlite/cursor.c
+++ b/Modules/_sqlite/cursor.c
@@ -430,9 +430,14 @@
     if (cur->closed) {
         PyErr_SetString(pysqlite_ProgrammingError, "Cannot operate on a closed cursor.");
         return 0;
-    } else {
-        return pysqlite_check_thread(cur->connection) && pysqlite_check_connection(cur->connection);
     }
+
+    if (cur->locked) {
+        PyErr_SetString(pysqlite_ProgrammingError, "Recursive use of cursors not allowed.");
+        return 0;
+    }
+
+    return pysqlite_check_thread(cur->connection) && pysqlite_check_connection(cur->connection);
 }
 
 PyObject* _pysqlite_query_execute(pysqlite_Cursor* self, int multiple, PyObject* args)
@@ -455,9 +460,10 @@
     int allow_8bit_chars;
 
     if (!check_cursor(self)) {
-        return NULL;
+        goto error;
     }
 
+    self->locked = 1;
     self->reset = 0;
 
     /* Make shooting yourself in the foot with not utf-8 decodable 8-bit-strings harder */
@@ -470,12 +476,12 @@
     if (multiple) {
         /* executemany() */
         if (!PyArg_ParseTuple(args, "OO", &operation, &second_argument)) {
-            return NULL;
+            goto error;
         }
 
         if (!PyUnicode_Check(operation)) {
             PyErr_SetString(PyExc_ValueError, "operation parameter must be str");
-            return NULL;
+            goto error;
         }
 
         if (PyIter_Check(second_argument)) {
@@ -486,23 +492,23 @@
             /* sequence */
             parameters_iter = PyObject_GetIter(second_argument);
             if (!parameters_iter) {
-                return NULL;
+                goto error;
             }
         }
     } else {
         /* execute() */
         if (!PyArg_ParseTuple(args, "O|O", &operation, &second_argument)) {
-            return NULL;
+            goto error;
         }
 
         if (!PyUnicode_Check(operation)) {
             PyErr_SetString(PyExc_ValueError, "operation parameter must be str");
-            return NULL;
+            goto error;
         }
 
         parameters_list = PyList_New(0);
         if (!parameters_list) {
-            return NULL;
+            goto error;
         }
 
         if (second_argument == NULL) {
@@ -742,7 +748,8 @@
      * ROLLBACK could have happened */
     #ifdef SQLITE_VERSION_NUMBER
     #if SQLITE_VERSION_NUMBER >= 3002002
-    self->connection->inTransaction = !sqlite3_get_autocommit(self->connection->db);
+    if (self->connection && self->connection->db)
+        self->connection->inTransaction = !sqlite3_get_autocommit(self->connection->db);
     #endif
     #endif
 
@@ -750,6 +757,8 @@
     Py_XDECREF(parameters_iter);
     Py_XDECREF(parameters_list);
 
+    self->locked = 0;
+
     if (PyErr_Occurred()) {
         self->rowcount = -1L;
         return NULL;
diff --git a/Modules/_sqlite/cursor.h b/Modules/_sqlite/cursor.h
index 5d8b5c1..118ba38 100644
--- a/Modules/_sqlite/cursor.h
+++ b/Modules/_sqlite/cursor.h
@@ -42,6 +42,7 @@
     pysqlite_Statement* statement;
     int closed;
     int reset;
+    int locked;
     int initialized;
 
     /* the next row to be returned, NULL if no next row available */
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index 0ae02a1..3f631e3 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -63,8 +63,10 @@
 };
 
 enum py_ssl_version {
+#ifndef OPENSSL_NO_SSL2
     PY_SSL_VERSION_SSL2,
-    PY_SSL_VERSION_SSL3,
+#endif
+    PY_SSL_VERSION_SSL3=1,
     PY_SSL_VERSION_SSL23,
     PY_SSL_VERSION_TLS1
 };
@@ -1447,8 +1449,10 @@
         ctx = SSL_CTX_new(TLSv1_method());
     else if (proto_version == PY_SSL_VERSION_SSL3)
         ctx = SSL_CTX_new(SSLv3_method());
+#ifndef OPENSSL_NO_SSL2
     else if (proto_version == PY_SSL_VERSION_SSL2)
         ctx = SSL_CTX_new(SSLv2_method());
+#endif
     else if (proto_version == PY_SSL_VERSION_SSL23)
         ctx = SSL_CTX_new(SSLv23_method());
     else
@@ -1883,6 +1887,69 @@
 bound on the entropy contained in string.  See RFC 1750.");
 
 static PyObject *
+PySSL_RAND(int len, int pseudo)
+{
+    int ok;
+    PyObject *bytes;
+    unsigned long err;
+    const char *errstr;
+    PyObject *v;
+
+    bytes = PyBytes_FromStringAndSize(NULL, len);
+    if (bytes == NULL)
+        return NULL;
+    if (pseudo) {
+        ok = RAND_pseudo_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len);
+        if (ok == 0 || ok == 1)
+            return Py_BuildValue("NO", bytes, ok == 1 ? Py_True : Py_False);
+    }
+    else {
+        ok = RAND_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len);
+        if (ok == 1)
+            return bytes;
+    }
+    Py_DECREF(bytes);
+
+    err = ERR_get_error();
+    errstr = ERR_reason_error_string(err);
+    v = Py_BuildValue("(ks)", err, errstr);
+    if (v != NULL) {
+        PyErr_SetObject(PySSLErrorObject, v);
+        Py_DECREF(v);
+    }
+    return NULL;
+}
+
+static PyObject *
+PySSL_RAND_bytes(PyObject *self, PyObject *args)
+{
+    int len;
+    if (!PyArg_ParseTuple(args, "i:RAND_bytes", &len))
+        return NULL;
+    return PySSL_RAND(len, 0);
+}
+
+PyDoc_STRVAR(PySSL_RAND_bytes_doc,
+"RAND_bytes(n) -> bytes\n\
+\n\
+Generate n cryptographically strong pseudo-random bytes.");
+
+static PyObject *
+PySSL_RAND_pseudo_bytes(PyObject *self, PyObject *args)
+{
+    int len;
+    if (!PyArg_ParseTuple(args, "i:RAND_pseudo_bytes", &len))
+        return NULL;
+    return PySSL_RAND(len, 1);
+}
+
+PyDoc_STRVAR(PySSL_RAND_pseudo_bytes_doc,
+"RAND_pseudo_bytes(n) -> (bytes, is_cryptographic)\n\
+\n\
+Generate n pseudo-random bytes. is_cryptographic is True if the bytes\
+generated are cryptographically strong.");
+
+static PyObject *
 PySSL_RAND_status(PyObject *self)
 {
     return PyLong_FromLong(RAND_status());
@@ -1935,6 +2002,10 @@
 #ifdef HAVE_OPENSSL_RAND
     {"RAND_add",            PySSL_RAND_add, METH_VARARGS,
      PySSL_RAND_add_doc},
+    {"RAND_bytes",          PySSL_RAND_bytes, METH_VARARGS,
+     PySSL_RAND_bytes_doc},
+    {"RAND_pseudo_bytes",   PySSL_RAND_pseudo_bytes, METH_VARARGS,
+     PySSL_RAND_pseudo_bytes_doc},
     {"RAND_egd",            PySSL_RAND_egd, METH_VARARGS,
      PySSL_RAND_egd_doc},
     {"RAND_status",         (PyCFunction)PySSL_RAND_status, METH_NOARGS,
@@ -2107,8 +2178,10 @@
                             PY_SSL_CERT_REQUIRED);
 
     /* protocol versions */
+#ifndef OPENSSL_NO_SSL2
     PyModule_AddIntConstant(m, "PROTOCOL_SSLv2",
                             PY_SSL_VERSION_SSL2);
+#endif
     PyModule_AddIntConstant(m, "PROTOCOL_SSLv3",
                             PY_SSL_VERSION_SSL3);
     PyModule_AddIntConstant(m, "PROTOCOL_SSLv23",
diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c
index 91b1b53..6879975 100644
--- a/Modules/_tkinter.c
+++ b/Modules/_tkinter.c
@@ -2022,7 +2022,19 @@
 
     for (i = 0; i < (argc - 1); i++) {
         PyObject *s = PyUnicode_FromString(argv[i + 1]);
-        if (!s || PyTuple_SetItem(arg, i, s)) {
+        if (!s) {
+            /* Is Tk leaking 0xC080 in %A - a "modified" utf-8 null? */
+            if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError) &&
+                !strcmp(argv[i + 1], "\xC0\x80")) {
+                PyErr_Clear();
+                /* Convert to "strict" utf-8 null */
+                s = PyUnicode_FromString("\0");
+            } else {
+                Py_DECREF(arg);
+                return PythonCmd_Error(interp);
+            }
+        }
+        if (PyTuple_SetItem(arg, i, s)) {
             Py_DECREF(arg);
             return PythonCmd_Error(interp);
         }
diff --git a/Modules/errnomodule.c b/Modules/errnomodule.c
index 86720af..86b0a01 100644
--- a/Modules/errnomodule.c
+++ b/Modules/errnomodule.c
@@ -84,6 +84,8 @@
      * The names and comments are borrowed from linux/include/errno.h,
      * which should be pretty all-inclusive.  However, the Solaris specific
      * names and comments are borrowed from sys/errno.h in Solaris.
+     * MacOSX specific names and comments are borrowed from sys/errno.h in
+     * MacOSX.
      */
 
 #ifdef ENODEV
@@ -848,6 +850,59 @@
     inscode(d, ds, de, "ENOTACTIVE", ENOTACTIVE, "Facility is not active");
 #endif
 
+    /* MacOSX specific errnos */
+#ifdef EAUTH
+    inscode(d, ds, de, "EAUTH", EAUTH, "Authentication error");
+#endif
+#ifdef EBADARCH
+    inscode(d, ds, de, "EBADARCH", EBADARCH, "Bad CPU type in executable");
+#endif
+#ifdef EBADEXEC
+    inscode(d, ds, de, "EBADEXEC", EBADEXEC, "Bad executable (or shared library)");
+#endif
+#ifdef EBADMACHO
+    inscode(d, ds, de, "EBADMACHO", EBADMACHO, "Malformed Mach-o file");
+#endif
+#ifdef EBADRPC
+    inscode(d, ds, de, "EBADRPC", EBADRPC, "RPC struct is bad");
+#endif
+#ifdef EDEVERR
+    inscode(d, ds, de, "EDEVERR", EDEVERR, "Device error");
+#endif
+#ifdef EFTYPE
+    inscode(d, ds, de, "EFTYPE", EFTYPE, "Inappropriate file type or format");
+#endif
+#ifdef ENEEDAUTH
+    inscode(d, ds, de, "ENEEDAUTH", ENEEDAUTH, "Need authenticator");
+#endif
+#ifdef ENOATTR
+    inscode(d, ds, de, "ENOATTR", ENOATTR, "Attribute not found");
+#endif
+#ifdef ENOPOLICY
+    inscode(d, ds, de, "ENOPOLICY", ENOPOLICY, "Policy not found");
+#endif
+#ifdef EPROCLIM
+    inscode(d, ds, de, "EPROCLIM", EPROCLIM, "Too many processes");
+#endif
+#ifdef EPROCUNAVAIL
+    inscode(d, ds, de, "EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program");
+#endif
+#ifdef EPROGMISMATCH
+    inscode(d, ds, de, "EPROGMISMATCH", EPROGMISMATCH, "Program version wrong");
+#endif
+#ifdef EPROGUNAVAIL
+    inscode(d, ds, de, "EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail");
+#endif
+#ifdef EPWROFF
+    inscode(d, ds, de, "EPWROFF", EPWROFF, "Device power is off");
+#endif
+#ifdef ERPCMISMATCH
+    inscode(d, ds, de, "ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong");
+#endif
+#ifdef ESHLIBVERS
+    inscode(d, ds, de, "ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch");
+#endif
+
     Py_DECREF(de);
     return m;
 }
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
index 48c4391..83c47ce 100644
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -53,8 +53,8 @@
     int exit;
     char *header;
     size_t header_len;
-    /* The main thread always hold this lock. It is only released when
-       faulthandler_thread() is interrupted until this thread exits, or at
+    /* The main thread always holds this lock. It is only released when
+       faulthandler_thread() is interrupted before this thread exits, or at
        Python exit. */
     PyThread_type_lock cancel_event;
     /* released by child thread when joined */
@@ -186,7 +186,7 @@
 {
     static char *kwlist[] = {"file", "all_threads", NULL};
     PyObject *file = NULL;
-    int all_threads = 0;
+    int all_threads = 1;
     PyThreadState *tstate;
     const char *errmsg;
     int fd;
@@ -218,18 +218,18 @@
 }
 
 
-/* Handler of SIGSEGV, SIGFPE, SIGABRT, SIGBUS and SIGILL signals.
+/* Handler for SIGSEGV, SIGFPE, SIGABRT, SIGBUS and SIGILL signals.
 
    Display the current Python traceback, restore the previous handler and call
    the previous handler.
 
-   On Windows, don't call explictly the previous handler, because Windows
+   On Windows, don't explicitly call the previous handler, because the Windows
    signal handler would not be called (for an unknown reason). The execution of
    the program continues at faulthandler_fatal_error() exit, but the same
    instruction will raise the same fault (signal), and so the previous handler
    will be called.
 
-   This function is signal safe and should only call signal safe functions. */
+   This function is signal-safe and should only call signal-safe functions. */
 
 static void
 faulthandler_fatal_error(int signum)
@@ -238,6 +238,7 @@
     unsigned int i;
     fault_handler_t *handler = NULL;
     PyThreadState *tstate;
+    int save_errno = errno;
 
     if (!fatal_error.enabled)
         return;
@@ -266,7 +267,7 @@
 
 #ifdef WITH_THREAD
     /* SIGSEGV, SIGFPE, SIGABRT, SIGBUS and SIGILL are synchronous signals and
-       so are delivered to the thread that caused the fault. Get the Python
+       are thus delivered to the thread that caused the fault. Get the Python
        thread state of the current thread.
 
        PyThreadState_Get() doesn't give the state of the thread that caused the
@@ -285,9 +286,10 @@
             _Py_DumpTraceback(fd, tstate);
     }
 
+    errno = save_errno;
 #ifdef MS_WINDOWS
     if (signum == SIGSEGV) {
-        /* don't call explictly the previous handler for SIGSEGV in this signal
+        /* don't explicitly call the previous handler for SIGSEGV in this signal
            handler, because the Windows signal handler would not be called */
         return;
     }
@@ -304,7 +306,7 @@
 {
     static char *kwlist[] = {"file", "all_threads", NULL};
     PyObject *file = NULL;
-    int all_threads = 0;
+    int all_threads = 1;
     unsigned int i;
     fault_handler_t *handler;
 #ifdef HAVE_SIGACTION
@@ -455,7 +457,7 @@
 static void
 cancel_dump_tracebacks_later(void)
 {
-    /* notify cancellation */
+    /* Notify cancellation */
     PyThread_release_lock(thread.cancel_event);
 
     /* Wait for thread to join */
@@ -578,7 +580,7 @@
     cancel_dump_tracebacks_later();
     Py_RETURN_NONE;
 }
-#endif /* FAULTHANDLER_LATER */
+#endif  /* FAULTHANDLER_LATER */
 
 #ifdef FAULTHANDLER_USER
 /* Handler of user signals (e.g. SIGUSR1).
@@ -593,6 +595,7 @@
 {
     user_signal_t *user;
     PyThreadState *tstate;
+    int save_errno = errno;
 
     user = &user_signals[signum];
     if (!user->enabled)
@@ -614,6 +617,7 @@
             return;
         _Py_DumpTraceback(user->fd, tstate);
     }
+    errno = save_errno;
 }
 
 static int
@@ -644,7 +648,7 @@
     static char *kwlist[] = {"signum", "file", "all_threads", NULL};
     int signum;
     PyObject *file = NULL;
-    int all_threads = 0;
+    int all_threads = 1;
     int fd;
     user_signal_t *user;
     _Py_sighandler_t previous;
@@ -777,7 +781,7 @@
 #if defined(MS_WINDOWS)
     /* For SIGSEGV, faulthandler_fatal_error() restores the previous signal
        handler and then gives back the execution flow to the program (without
-       calling explicitly the previous error handler). In a normal case, the
+       explicitly calling the previous error handler). In a normal case, the
        SIGSEGV was raised by the kernel because of a fault, and so if the
        program retries to execute the same instruction, the fault will be
        raised again.
@@ -801,22 +805,23 @@
        PowerPC. Use volatile to disable compile-time optimizations. */
     volatile int x = 1, y = 0, z;
     z = x / y;
-    /* if the division by zero didn't raise a SIGFPE, raise it manually */
+    /* If the division by zero didn't raise a SIGFPE (e.g. on PowerPC),
+       raise it manually. */
     raise(SIGFPE);
-    Py_RETURN_NONE;
+    /* This line is never reached, but we pretend to make something with z
+       to silence a compiler warning. */
+    return PyLong_FromLong(z);
 }
 
 static PyObject *
 faulthandler_sigabrt(PyObject *self, PyObject *args)
 {
-#if _MSC_VER
-    /* If Python is compiled in debug mode with Visual Studio, abort() opens
-       a popup asking the user how to handle the assertion. Use raise(SIGABRT)
-       instead. */
-    raise(SIGABRT);
-#else
-    abort();
+#ifdef _MSC_VER
+    /* Visual Studio: configure abort() to not display an error message nor
+       open a popup asking to report the fault. */
+    _set_abort_behavior(0, _WRITE_ABORT_MSG | _CALL_REPORTFAULT);
 #endif
+    abort();
     Py_RETURN_NONE;
 }
 
@@ -849,7 +854,7 @@
 }
 
 #if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION)
-void*
+static void*
 stack_overflow(void *min_sp, void *max_sp, size_t *depth)
 {
     /* allocate 4096 bytes on the stack at each call */
@@ -912,7 +917,7 @@
 static PyMethodDef module_methods[] = {
     {"enable",
      (PyCFunction)faulthandler_enable, METH_VARARGS|METH_KEYWORDS,
-     PyDoc_STR("enable(file=sys.stderr, all_threads=False): "
+     PyDoc_STR("enable(file=sys.stderr, all_threads=True): "
                "enable the fault handler")},
     {"disable", (PyCFunction)faulthandler_disable_py, METH_NOARGS,
      PyDoc_STR("disable(): disable the fault handler")},
@@ -920,7 +925,7 @@
      PyDoc_STR("is_enabled()->bool: check if the handler is enabled")},
     {"dump_traceback",
      (PyCFunction)faulthandler_dump_traceback_py, METH_VARARGS|METH_KEYWORDS,
-     PyDoc_STR("dump_traceback(file=sys.stderr, all_threads=False): "
+     PyDoc_STR("dump_traceback(file=sys.stderr, all_threads=True): "
                "dump the traceback of the current thread, or of all threads "
                "if all_threads is True, into file")},
 #ifdef FAULTHANDLER_LATER
@@ -939,7 +944,7 @@
 #ifdef FAULTHANDLER_USER
     {"register",
      (PyCFunction)faulthandler_register, METH_VARARGS|METH_KEYWORDS,
-     PyDoc_STR("register(signum, file=sys.stderr, all_threads=False): "
+     PyDoc_STR("register(signum, file=sys.stderr, all_threads=True): "
                "register an handler for the signal 'signum': dump the "
                "traceback of the current thread, or of all threads if "
                "all_threads is True, into file")},
@@ -972,14 +977,14 @@
     {"_stack_overflow", (PyCFunction)faulthandler_stack_overflow, METH_NOARGS,
      PyDoc_STR("_stack_overflow(): recursive call to raise a stack overflow")},
 #endif
-    {NULL, NULL} /* terminator */
+    {NULL, NULL}  /* sentinel */
 };
 
 static struct PyModuleDef module_def = {
     PyModuleDef_HEAD_INIT,
     "faulthandler",
     module_doc,
-    0, /* non negative size to be able to unload the module */
+    0, /* non-negative size to be able to unload the module */
     module_methods,
     NULL,
     faulthandler_traverse,
@@ -993,8 +998,8 @@
     return PyModule_Create(&module_def);
 }
 
-/* Call faulthandler.enable() if PYTHONFAULTHANDLER environment variable is
-   defined, or if sys._xoptions has a 'faulthandler' key. */
+/* Call faulthandler.enable() if the PYTHONFAULTHANDLER environment variable
+   is defined, or if sys._xoptions has a 'faulthandler' key. */
 
 static int
 faulthandler_env_options(void)
diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c
index d173bff..cebb4ff 100644
--- a/Modules/mathmodule.c
+++ b/Modules/mathmodule.c
@@ -577,6 +577,61 @@
     }
 }
 
+/*
+   log2: log to base 2.
+
+   Uses an algorithm that should:
+
+     (a) produce exact results for powers of 2, and
+     (b) give a monotonic log2 (for positive finite floats),
+         assuming that the system log is monotonic.
+*/
+
+static double
+m_log2(double x)
+{
+    if (!Py_IS_FINITE(x)) {
+        if (Py_IS_NAN(x))
+            return x; /* log2(nan) = nan */
+        else if (x > 0.0)
+            return x; /* log2(+inf) = +inf */
+        else {
+            errno = EDOM;
+            return Py_NAN; /* log2(-inf) = nan, invalid-operation */
+        }
+    }
+
+    if (x > 0.0) {
+#ifdef HAVE_LOG2
+        return log2(x);
+#else
+        double m;
+        int e;
+        m = frexp(x, &e);
+        /* We want log2(m * 2**e) == log(m) / log(2) + e.  Care is needed when
+         * x is just greater than 1.0: in that case e is 1, log(m) is negative,
+         * and we get significant cancellation error from the addition of
+         * log(m) / log(2) to e.  The slight rewrite of the expression below
+         * avoids this problem.
+         */
+        if (x >= 1.0) {
+            return log(2.0 * m) / log(2.0) + (e - 1);
+        }
+        else {
+            return log(m) / log(2.0) + e;
+        }
+#endif
+    }
+    else if (x == 0.0) {
+        errno = EDOM;
+        return -Py_HUGE_VAL; /* log2(0) = -inf, divide-by-zero */
+    }
+    else {
+        errno = EDOM;
+        return Py_NAN; /* log2(-inf) = nan, invalid-operation */
+    }
+}
+
 static double
 m_log10(double x)
 {
@@ -1623,6 +1678,15 @@
 If the base not specified, returns the natural logarithm (base e) of x.");
 
 static PyObject *
+math_log2(PyObject *self, PyObject *arg)
+{
+    return loghelper(arg, m_log2, "log2");
+}
+
+PyDoc_STRVAR(math_log2_doc,
+"log2(x)\n\nReturn the base 2 logarithm of x.");
+
+static PyObject *
 math_log10(PyObject *self, PyObject *arg)
 {
     return loghelper(arg, m_log10, "log10");
@@ -1894,6 +1958,7 @@
     {"log",             math_log,       METH_VARARGS,   math_log_doc},
     {"log1p",           math_log1p,     METH_O,         math_log1p_doc},
     {"log10",           math_log10,     METH_O,         math_log10_doc},
+    {"log2",            math_log2,      METH_O,         math_log2_doc},
     {"modf",            math_modf,      METH_O,         math_modf_doc},
     {"pow",             math_pow,       METH_VARARGS,   math_pow_doc},
     {"radians",         math_radians,   METH_O,         math_radians_doc},
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index 396243e..add3b35 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -8495,6 +8495,9 @@
 device_encoding(PyObject *self, PyObject *args)
 {
     int fd;
+#if defined(MS_WINDOWS) || defined(MS_WIN64)
+    UINT cp;
+#endif
     if (!PyArg_ParseTuple(args, "i:device_encoding", &fd))
         return NULL;
     if (!_PyVerify_fd(fd) || !isatty(fd)) {
@@ -8502,16 +8505,16 @@
         return Py_None;
     }
 #if defined(MS_WINDOWS) || defined(MS_WIN64)
-    if (fd == 0) {
-        char buf[100];
-        sprintf(buf, "cp%d", GetConsoleCP());
-        return PyUnicode_FromString(buf);
-    }
-    if (fd == 1 || fd == 2) {
-        char buf[100];
-        sprintf(buf, "cp%d", GetConsoleOutputCP());
-        return PyUnicode_FromString(buf);
-    }
+    if (fd == 0)
+        cp = GetConsoleCP();
+    else if (fd == 1 || fd == 2)
+        cp = GetConsoleOutputCP();
+    else
+        cp = 0;
+    /* GetConsoleCP() and GetConsoleOutputCP() return 0 if the application
+       has no console */
+    if (cp != 0)
+        return PyUnicode_FromFormat("cp%u", (unsigned int)cp);
 #elif defined(CODESET)
     {
         char *codeset = nl_langinfo(CODESET);
@@ -9783,6 +9786,9 @@
 #ifdef PRIO_USER
     if (ins(d, "PRIO_USER", (long)PRIO_USER)) return -1;
 #endif
+#ifdef O_CLOEXEC
+    if (ins(d, "O_CLOEXEC", (long)O_CLOEXEC)) return -1;
+#endif
 /* posix - constants for *at functions */
 #ifdef AT_SYMLINK_NOFOLLOW
         if (ins(d, "AT_SYMLINK_NOFOLLOW", (long)AT_SYMLINK_NOFOLLOW)) return -1;
diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c
index c8626ad..ff65f04 100644
--- a/Modules/signalmodule.c
+++ b/Modules/signalmodule.c
@@ -88,12 +88,12 @@
 static pid_t main_pid;
 #endif
 
-static struct {
-    int tripped;
+static volatile struct {
+    sig_atomic_t tripped;
     PyObject *func;
 } Handlers[NSIG];
 
-static sig_atomic_t wakeup_fd = -1;
+static volatile sig_atomic_t wakeup_fd = -1;
 
 /* Speed up sigcheck() when none tripped */
 static volatile sig_atomic_t is_tripped = 0;
@@ -176,6 +176,7 @@
 static void
 trip_signal(int sig_num)
 {
+    unsigned char byte;
     Handlers[sig_num].tripped = 1;
     if (is_tripped)
         return;
@@ -183,8 +184,10 @@
        cleared in PyErr_CheckSignals() before .tripped. */
     is_tripped = 1;
     Py_AddPendingCall(checksignals_witharg, NULL);
-    if (wakeup_fd != -1)
-        write(wakeup_fd, "\0", 1);
+    if (wakeup_fd != -1) {
+        byte = (unsigned char)sig_num;
+        write(wakeup_fd, &byte, 1);
+    }
 }
 
 static void
@@ -321,7 +324,7 @@
     else
         func = signal_handler;
     if (PyOS_setsig(sig_num, func) == SIG_ERR) {
-        PyErr_SetFromErrno(PyExc_RuntimeError);
+        PyErr_SetFromErrno(PyExc_OSError);
         return NULL;
     }
     old_handler = Handlers[sig_num].func;
@@ -390,7 +393,7 @@
         return NULL;
     }
     if (siginterrupt(sig_num, flag)<0) {
-        PyErr_SetFromErrno(PyExc_RuntimeError);
+        PyErr_SetFromErrno(PyExc_OSError);
         return NULL;
     }
 
@@ -503,7 +506,7 @@
 Returns current value of given itimer.");
 #endif
 
-#ifdef PYPTHREAD_SIGMASK
+#if defined(PYPTHREAD_SIGMASK) || defined(HAVE_SIGWAIT)
 /* Convert an iterable to a sigset.
    Return 0 on success, return -1 and raise an exception on error. */
 
@@ -551,7 +554,9 @@
     Py_XDECREF(iterator);
     return result;
 }
+#endif
 
+#if defined(PYPTHREAD_SIGMASK) || defined(HAVE_SIGPENDING)
 static PyObject*
 sigset_to_set(sigset_t mask)
 {
@@ -585,7 +590,9 @@
     }
     return result;
 }
+#endif
 
+#ifdef PYPTHREAD_SIGMASK
 static PyObject *
 signal_pthread_sigmask(PyObject *self, PyObject *args)
 {
@@ -603,7 +610,7 @@
     err = pthread_sigmask(how, &mask, &previous);
     if (err != 0) {
         errno = err;
-        PyErr_SetFromErrno(PyExc_RuntimeError);
+        PyErr_SetFromErrno(PyExc_OSError);
         return NULL;
     }
 
@@ -621,6 +628,88 @@
 #endif   /* #ifdef PYPTHREAD_SIGMASK */
 
 
+#ifdef HAVE_SIGPENDING
+static PyObject *
+signal_sigpending(PyObject *self)
+{
+    int err;
+    sigset_t mask;
+    err = sigpending(&mask);
+    if (err)
+        return PyErr_SetFromErrno(PyExc_OSError);
+    return sigset_to_set(mask);
+}
+
+PyDoc_STRVAR(signal_sigpending_doc,
+"sigpending() -> list\n\
+\n\
+Examine pending signals.");
+#endif   /* #ifdef HAVE_SIGPENDING */
+
+
+#ifdef HAVE_SIGWAIT
+static PyObject *
+signal_sigwait(PyObject *self, PyObject *args)
+{
+    PyObject *signals;
+    sigset_t set;
+    int err, signum;
+
+    if (!PyArg_ParseTuple(args, "O:sigwait", &signals))
+        return NULL;
+
+    if (iterable_to_sigset(signals, &set))
+        return NULL;
+
+    err = sigwait(&set, &signum);
+    if (err) {
+        errno = err;
+        return PyErr_SetFromErrno(PyExc_OSError);
+    }
+
+    return PyLong_FromLong(signum);
+}
+
+PyDoc_STRVAR(signal_sigwait_doc,
+"sigwait(sigset) -> signum\n\
+\n\
+Wait a signal.");
+#endif   /* #ifdef HAVE_SIGPENDING */
+
+
+#if defined(HAVE_PTHREAD_KILL) && defined(WITH_THREAD)
+static PyObject *
+signal_pthread_kill(PyObject *self, PyObject *args)
+{
+    long tid;
+    int signum;
+    int err;
+
+    if (!PyArg_ParseTuple(args, "li:pthread_kill", &tid, &signum))
+        return NULL;
+
+    err = pthread_kill((pthread_t)tid, signum);
+    if (err != 0) {
+        errno = err;
+        PyErr_SetFromErrno(PyExc_OSError);
+        return NULL;
+    }
+
+    /* the signal may have been send to the current thread */
+    if (PyErr_CheckSignals())
+        return NULL;
+
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(signal_pthread_kill_doc,
+"pthread_kill(thread_id, signum)\n\
+\n\
+Send a signal to a thread.");
+#endif   /* #if defined(HAVE_PTHREAD_KILL) && defined(WITH_THREAD) */
+
+
+
 /* List of functions defined in the module */
 static PyMethodDef signal_methods[] = {
 #ifdef HAVE_ALARM
@@ -644,10 +733,22 @@
 #endif
     {"default_int_handler", signal_default_int_handler,
      METH_VARARGS, default_int_handler_doc},
+#if defined(HAVE_PTHREAD_KILL) && defined(WITH_THREAD)
+    {"pthread_kill",            (PyCFunction)signal_pthread_kill,
+     METH_VARARGS, signal_pthread_kill_doc},
+#endif
 #ifdef PYPTHREAD_SIGMASK
     {"pthread_sigmask",         (PyCFunction)signal_pthread_sigmask,
      METH_VARARGS, signal_pthread_sigmask_doc},
 #endif
+#ifdef HAVE_SIGPENDING
+    {"sigpending",              (PyCFunction)signal_sigpending,
+     METH_NOARGS, signal_sigpending_doc},
+#endif
+#ifdef HAVE_SIGWAIT
+    {"sigwait",                 (PyCFunction)signal_sigwait,
+     METH_VARARGS, signal_sigwait_doc},
+#endif
     {NULL,                      NULL}           /* sentinel */
 };
 
diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c
index 1631363..8107b98 100644
--- a/Modules/socketmodule.c
+++ b/Modules/socketmodule.c
@@ -42,6 +42,9 @@
 - socket.inet_ntoa(packed IP) -> IP address string
 - socket.getdefaulttimeout() -> None | float
 - socket.setdefaulttimeout(None | float)
+- socket.if_nameindex() -> list of tuples (if_index, if_name)
+- socket.if_nametoindex(name) -> corresponding interface index
+- socket.if_indextoname(index) -> corresponding interface name
 - an Internet socket address is a pair (hostname, port)
   where hostname can be anything recognized by gethostbyname()
   (including the dd.dd.dd.dd notation) and port is in host byte order
@@ -133,6 +136,9 @@
 setsockopt(level, optname, value) -- set socket options\n\
 settimeout(None | float) -- set or clear the timeout\n\
 shutdown(how) -- shut down traffic in one or both directions\n\
+if_nameindex() -- return all network interface indices and names\n\
+if_nametoindex(name) -- returns the corresponding interface index\n\
+if_indextoname(index) -- returns the corresponding interface name\n\
 \n\
  [*] not available on all platforms!");
 
@@ -250,6 +256,14 @@
 #include <sys/types.h>
 #endif
 
+#ifdef HAVE_SYS_SOCKET_H
+#include <sys/socket.h>
+#endif
+
+#ifdef HAVE_NET_IF_H
+#include <net/if.h>
+#endif
+
 /* Generic socket object definitions and includes */
 #define PySocket_BUILDING_SOCKET
 #include "socketmodule.h"
@@ -2220,8 +2234,10 @@
     if (backlog == -1 && PyErr_Occurred())
         return NULL;
     Py_BEGIN_ALLOW_THREADS
-    if (backlog < 1)
-        backlog = 1;
+    /* To avoid problems on systems that don't allow a negative backlog
+     * (which doesn't make sense anyway) we force a minimum value of 0. */
+    if (backlog < 0)
+        backlog = 0;
     res = listen(s->sock_fd, backlog);
     Py_END_ALLOW_THREADS
     if (res < 0)
@@ -2234,8 +2250,9 @@
 "listen(backlog)\n\
 \n\
 Enable a server to accept connections.  The backlog argument must be at\n\
-least 1; it specifies the number of unaccepted connection that the system\n\
-will allow before refusing new connections.");
+least 0 (if it is lower, it is set to 0); it specifies the number of\n\
+unaccepted connections that the system will allow before refusing new\n\
+connections.");
 
 
 /*
@@ -4263,6 +4280,101 @@
 A value of None indicates that new socket objects have no timeout.\n\
 When the socket module is first imported, the default is None.");
 
+#ifdef HAVE_IF_NAMEINDEX
+/* Python API for getting interface indices and names */
+
+static PyObject *
+socket_if_nameindex(PyObject *self, PyObject *arg)
+{
+    PyObject *list;
+    int i;
+    struct if_nameindex *ni;
+  
+    ni = if_nameindex();
+    if (ni == NULL) {
+        PyErr_SetFromErrno(socket_error);
+        return NULL;
+    }
+
+    list = PyList_New(0);
+    if (list == NULL) {
+        if_freenameindex(ni);
+        return NULL;
+    }
+
+    for (i = 0; ni[i].if_index != 0 && i < INT_MAX; i++) {
+        PyObject *ni_tuple = Py_BuildValue("IO&",
+                ni[i].if_index, PyUnicode_DecodeFSDefault, ni[i].if_name);
+
+        if (ni_tuple == NULL || PyList_Append(list, ni_tuple) == -1) {
+            Py_XDECREF(ni_tuple);
+            Py_DECREF(list);
+            if_freenameindex(ni);
+            return NULL;
+        }
+        Py_DECREF(ni_tuple);
+    }
+
+    if_freenameindex(ni);
+    return list;
+}
+
+PyDoc_STRVAR(if_nameindex_doc,
+"if_nameindex()\n\
+\n\
+Returns a list of network interface information (index, name) tuples.");
+
+static PyObject *
+socket_if_nametoindex(PyObject *self, PyObject *args)
+{
+    PyObject *oname;
+    unsigned long index;
+
+    if (!PyArg_ParseTuple(args, "O&:if_nametoindex",
+                          PyUnicode_FSConverter, &oname))
+        return NULL;
+
+    index = if_nametoindex(PyBytes_AS_STRING(oname));
+    Py_DECREF(oname);
+    if (index == 0) {
+        /* if_nametoindex() doesn't set errno */
+        PyErr_SetString(socket_error, "no interface with this name");
+        return NULL;
+    }
+
+    return PyLong_FromUnsignedLong(index);
+}
+
+PyDoc_STRVAR(if_nametoindex_doc,
+"if_nametoindex(if_name)\n\
+\n\
+Returns the interface index corresponding to the interface name if_name.");
+
+static PyObject *
+socket_if_indextoname(PyObject *self, PyObject *arg)
+{
+    unsigned long index;
+    char name[IF_NAMESIZE + 1];
+
+    index = PyLong_AsUnsignedLong(arg);
+    if (index == (unsigned long) -1)
+        return NULL;
+
+    if (if_indextoname(index, name) == NULL) {
+        PyErr_SetFromErrno(socket_error);
+        return NULL;
+    }
+
+    return PyUnicode_DecodeFSDefault(name);
+}
+
+PyDoc_STRVAR(if_indextoname_doc,
+"if_indextoname(if_index)\n\
+\n\
+Returns the interface name corresponding to the interface index if_index.");
+
+#endif  /* HAVE_IF_NAMEINDEX */
+
 
 /* List of functions exported by this module. */
 
@@ -4319,6 +4431,14 @@
      METH_NOARGS, getdefaulttimeout_doc},
     {"setdefaulttimeout",       socket_setdefaulttimeout,
      METH_O, setdefaulttimeout_doc},
+#ifdef HAVE_IF_NAMEINDEX
+    {"if_nameindex", socket_if_nameindex,
+     METH_NOARGS, if_nameindex_doc},
+    {"if_nametoindex", socket_if_nametoindex,
+     METH_VARARGS, if_nametoindex_doc},
+    {"if_indextoname", socket_if_indextoname,
+     METH_O, if_indextoname_doc},
+#endif
     {NULL,                      NULL}            /* Sentinel */
 };
 
diff --git a/Modules/socketmodule.h b/Modules/socketmodule.h
index f064795..db44fd3 100644
--- a/Modules/socketmodule.h
+++ b/Modules/socketmodule.h
@@ -59,9 +59,12 @@
 #include <bluetooth.h>
 #endif
 
+#ifdef HAVE_NET_IF_H
+# include <net/if.h>
+#endif
+
 #ifdef HAVE_NETPACKET_PACKET_H
 # include <sys/ioctl.h>
-# include <net/if.h>
 # include <netpacket/packet.h>
 #endif
 
diff --git a/Modules/zipimport.c b/Modules/zipimport.c
index 4079261..de89a76 100644
--- a/Modules/zipimport.c
+++ b/Modules/zipimport.c
@@ -867,35 +867,33 @@
 
 /* Return the zlib.decompress function object, or NULL if zlib couldn't
    be imported. The function is cached when found, so subsequent calls
-   don't import zlib again. Returns a *borrowed* reference.
-   XXX This makes zlib.decompress immortal. */
+   don't import zlib again. */
 static PyObject *
 get_decompress_func(void)
 {
-    static PyObject *decompress = NULL;
+    static int importing_zlib = 0;
+    PyObject *zlib;
+    PyObject *decompress;
 
-    if (decompress == NULL) {
-        PyObject *zlib;
-        static int importing_zlib = 0;
-
-        if (importing_zlib != 0)
-            /* Someone has a zlib.py[co] in their Zip file;
-               let's avoid a stack overflow. */
-            return NULL;
-        importing_zlib = 1;
-        zlib = PyImport_ImportModuleNoBlock("zlib");
-        importing_zlib = 0;
-        if (zlib != NULL) {
-            decompress = PyObject_GetAttrString(zlib,
-                                                "decompress");
-            Py_DECREF(zlib);
-        }
-        else
-            PyErr_Clear();
-        if (Py_VerboseFlag)
-            PySys_WriteStderr("# zipimport: zlib %s\n",
-                zlib != NULL ? "available": "UNAVAILABLE");
+    if (importing_zlib != 0)
+        /* Someone has a zlib.py[co] in their Zip file;
+           let's avoid a stack overflow. */
+        return NULL;
+    importing_zlib = 1;
+    zlib = PyImport_ImportModuleNoBlock("zlib");
+    importing_zlib = 0;
+    if (zlib != NULL) {
+        decompress = PyObject_GetAttrString(zlib,
+                                            "decompress");
+        Py_DECREF(zlib);
     }
+    else {
+        PyErr_Clear();
+        decompress = NULL;
+    }
+    if (Py_VerboseFlag)
+        PySys_WriteStderr("# zipimport: zlib %s\n",
+            zlib != NULL ? "available": "UNAVAILABLE");
     return decompress;
 }
 
@@ -986,6 +984,7 @@
         goto error;
     }
     data = PyObject_CallFunction(decompress, "Oi", raw_data, -15);
+    Py_DECREF(decompress);
 error:
     Py_DECREF(raw_data);
     return data;
@@ -1196,7 +1195,7 @@
 get_module_code(ZipImporter *self, PyObject *fullname,
                 int *p_ispackage, PyObject **p_modpath)
 {
-    PyObject *code, *toc_entry, *subname;
+    PyObject *code = NULL, *toc_entry, *subname;
     PyObject *path, *fullpath;
     struct st_zip_searchorder *zso;
 
diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c
index 206ef0c..ba0e59c 100644
--- a/Modules/zlibmodule.c
+++ b/Modules/zlibmodule.c
@@ -116,7 +116,7 @@
 {
     PyObject *ReturnVal = NULL;
     Py_buffer pinput;
-    Byte *input, *output;
+    Byte *input, *output = NULL;
     unsigned int length;
     int level=Z_DEFAULT_COMPRESSION, err;
     z_stream zst;
@@ -127,20 +127,19 @@
 
     if (pinput.len > UINT_MAX) {
         PyErr_SetString(PyExc_OverflowError,
-            "size does not fit in an unsigned int");
-        return NULL;
+                        "Size does not fit in an unsigned int");
+        goto error;
     }
-    length = pinput.len;
     input = pinput.buf;
+    length = pinput.len;
 
     zst.avail_out = length + length/1000 + 12 + 1;
 
     output = (Byte*)malloc(zst.avail_out);
     if (output == NULL) {
-        PyBuffer_Release(&pinput);
         PyErr_SetString(PyExc_MemoryError,
                         "Can't allocate memory to compress data");
-        return NULL;
+        goto error;
     }
 
     /* Past the point of no return.  From here on out, we need to make sure
@@ -203,7 +202,7 @@
 static PyObject *
 PyZlib_decompress(PyObject *self, PyObject *args)
 {
-    PyObject *result_str;
+    PyObject *result_str = NULL;
     Py_buffer pinput;
     Byte *input;
     unsigned int length;
@@ -218,11 +217,11 @@
 
     if (pinput.len > UINT_MAX) {
         PyErr_SetString(PyExc_OverflowError,
-            "size does not fit in an unsigned int");
-        return NULL;
+                        "Size does not fit in an unsigned int");
+        goto error;
     }
-    length = pinput.len;
     input = pinput.buf;
+    length = pinput.len;
 
     if (r_strlen <= 0)
         r_strlen = 1;
@@ -230,10 +229,8 @@
     zst.avail_in = length;
     zst.avail_out = r_strlen;
 
-    if (!(result_str = PyBytes_FromStringAndSize(NULL, r_strlen))) {
-        PyBuffer_Release(&pinput);
-        return NULL;
-    }
+    if (!(result_str = PyBytes_FromStringAndSize(NULL, r_strlen)))
+        goto error;
 
     zst.zalloc = (alloc_func)NULL;
     zst.zfree = (free_func)Z_NULL;
@@ -423,22 +420,26 @@
 static PyObject *
 PyZlib_objcompress(compobject *self, PyObject *args)
 {
-    int err, inplen;
+    int err;
+    unsigned int inplen;
     Py_ssize_t length = DEFAULTALLOC;
-    PyObject *RetVal;
+    PyObject *RetVal = NULL;
     Py_buffer pinput;
     Byte *input;
     unsigned long start_total_out;
 
     if (!PyArg_ParseTuple(args, "y*:compress", &pinput))
         return NULL;
+    if (pinput.len > UINT_MAX) {
+        PyErr_SetString(PyExc_OverflowError,
+                        "Size does not fit in an unsigned int");
+        goto error_outer;
+    }
     input = pinput.buf;
     inplen = pinput.len;
 
-    if (!(RetVal = PyBytes_FromStringAndSize(NULL, length))) {
-        PyBuffer_Release(&pinput);
-        return NULL;
-    }
+    if (!(RetVal = PyBytes_FromStringAndSize(NULL, length)))
+        goto error_outer;
 
     ENTER_ZLIB(self);
 
@@ -487,6 +488,7 @@
 
  error:
     LEAVE_ZLIB(self);
+ error_outer:
     PyBuffer_Release(&pinput);
     return RetVal;
 }
@@ -505,9 +507,10 @@
 static PyObject *
 PyZlib_objdecompress(compobject *self, PyObject *args)
 {
-    int err, inplen, max_length = 0;
+    int err, max_length = 0;
+    unsigned int inplen;
     Py_ssize_t old_length, length = DEFAULTALLOC;
-    PyObject *RetVal;
+    PyObject *RetVal = NULL;
     Py_buffer pinput;
     Byte *input;
     unsigned long start_total_out;
@@ -515,22 +518,24 @@
     if (!PyArg_ParseTuple(args, "y*|i:decompress", &pinput,
                           &max_length))
         return NULL;
+    if (pinput.len > UINT_MAX) {
+        PyErr_SetString(PyExc_OverflowError,
+                        "Size does not fit in an unsigned int");
+        goto error_outer;
+    }
     input = pinput.buf;
     inplen = pinput.len;
     if (max_length < 0) {
-        PyBuffer_Release(&pinput);
         PyErr_SetString(PyExc_ValueError,
                         "max_length must be greater than zero");
-        return NULL;
+        goto error_outer;
     }
 
     /* limit amount of data allocated to max_length */
     if (max_length && length > max_length)
         length = max_length;
-    if (!(RetVal = PyBytes_FromStringAndSize(NULL, length))) {
-        PyBuffer_Release(&pinput);
-        return NULL;
-    }
+    if (!(RetVal = PyBytes_FromStringAndSize(NULL, length)))
+        goto error_outer;
 
     ENTER_ZLIB(self);
 
@@ -574,17 +579,22 @@
         Py_END_ALLOW_THREADS
     }
 
-    /* Not all of the compressed data could be accommodated in the output buffer
-       of specified size. Return the unconsumed tail in an attribute.*/
     if(max_length) {
+        /* Not all of the compressed data could be accommodated in a buffer of
+           the specified size. Return the unconsumed tail in an attribute. */
         Py_DECREF(self->unconsumed_tail);
         self->unconsumed_tail = PyBytes_FromStringAndSize((char *)self->zst.next_in,
                                                            self->zst.avail_in);
-        if(!self->unconsumed_tail) {
-            Py_DECREF(RetVal);
-            RetVal = NULL;
-            goto error;
-        }
+    }
+    else if (PyBytes_GET_SIZE(self->unconsumed_tail) > 0) {
+        /* All of the compressed data was consumed. Clear unconsumed_tail. */
+        Py_DECREF(self->unconsumed_tail);
+        self->unconsumed_tail = PyBytes_FromStringAndSize("", 0);
+    }
+    if (self->unconsumed_tail == NULL) {
+        Py_DECREF(RetVal);
+        RetVal = NULL;
+        goto error;
     }
 
     /* The end of the compressed data has been reached, so set the
@@ -619,6 +629,7 @@
 
  error:
     LEAVE_ZLIB(self);
+ error_outer:
     PyBuffer_Release(&pinput);
     return RetVal;
 }
diff --git a/Objects/object.c b/Objects/object.c
index db7882a..d8e2ffb 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -1364,14 +1364,15 @@
 static PyObject *
 _dir_object(PyObject *obj)
 {
-    PyObject * result = NULL;
-    PyObject * dirfunc = PyObject_GetAttrString((PyObject*)obj->ob_type,
-                                                "__dir__");
+    PyObject *result = NULL;
+    static PyObject *dir_str = NULL;
+    PyObject *dirfunc = _PyObject_LookupSpecial(obj, "__dir__", &dir_str);
 
     assert(obj);
     if (dirfunc == NULL) {
+        if (PyErr_Occurred())
+            return NULL;
         /* use default implementation */
-        PyErr_Clear();
         if (PyModule_Check(obj))
             result = _specialized_dir_module(obj);
         else if (PyType_Check(obj))
@@ -1381,7 +1382,7 @@
     }
     else {
         /* use __dir__ */
-        result = PyObject_CallFunctionObjArgs(dirfunc, obj, NULL);
+        result = PyObject_CallFunctionObjArgs(dirfunc, NULL);
         Py_DECREF(dirfunc);
         if (result == NULL)
             return NULL;
diff --git a/PC/VC6/_multiprocessing.dsp b/PC/VC6/_multiprocessing.dsp
index 2dcaf83..e1b8470 100644
--- a/PC/VC6/_multiprocessing.dsp
+++ b/PC/VC6/_multiprocessing.dsp
@@ -97,18 +97,10 @@
 # End Source File

 # Begin Source File

 

-SOURCE=..\..\Modules\_multiprocessing\pipe_connection.c

-# End Source File

-# Begin Source File

-

 SOURCE=..\..\Modules\_multiprocessing\semaphore.c

 # End Source File

 # Begin Source File

 

-SOURCE=..\..\Modules\_multiprocessing\socket_connection.c

-# End Source File

-# Begin Source File

-

 SOURCE=..\..\Modules\_multiprocessing\win32_functions.c

 # End Source File

 # End Target

diff --git a/PC/VS8.0/_multiprocessing.vcproj b/PC/VS8.0/_multiprocessing.vcproj
index c2bbec4..8f1cafc 100644
--- a/PC/VS8.0/_multiprocessing.vcproj
+++ b/PC/VS8.0/_multiprocessing.vcproj
@@ -522,10 +522,6 @@
 				RelativePath="..\..\Modules\_multiprocessing\multiprocessing.h"

 				>

 			</File>

-			<File

-				RelativePath="..\..\Modules\_multiprocessing\connection.h"

-				>

-			</File>

 		</Filter>

 		<Filter

 			Name="Source Files"

@@ -535,18 +531,10 @@
 				>

 			</File>

 			<File

-				RelativePath="..\..\Modules\_multiprocessing\pipe_connection.c"

-				>

-			</File>

-			<File

 				RelativePath="..\..\Modules\_multiprocessing\semaphore.c"

 				>

 			</File>

 			<File

-				RelativePath="..\..\Modules\_multiprocessing\socket_connection.c"

-				>

-			</File>

-			<File

 				RelativePath="..\..\Modules\_multiprocessing\win32_functions.c"

 				>

 			</File>

diff --git a/PCbuild/_multiprocessing.vcproj b/PCbuild/_multiprocessing.vcproj
index 5d6337d..e9cd3a8 100644
--- a/PCbuild/_multiprocessing.vcproj
+++ b/PCbuild/_multiprocessing.vcproj
@@ -522,10 +522,6 @@
 				RelativePath="..\Modules\_multiprocessing\multiprocessing.h"

 				>

 			</File>

-			<File

-				RelativePath="..\Modules\_multiprocessing\connection.h"

-				>

-			</File>

 		</Filter>

 		<Filter

 			Name="Source Files"

@@ -535,18 +531,10 @@
 				>

 			</File>

 			<File

-				RelativePath="..\Modules\_multiprocessing\pipe_connection.c"

-				>

-			</File>

-			<File

 				RelativePath="..\Modules\_multiprocessing\semaphore.c"

 				>

 			</File>

 			<File

-				RelativePath="..\Modules\_multiprocessing\socket_connection.c"

-				>

-			</File>

-			<File

 				RelativePath="..\Modules\_multiprocessing\win32_functions.c"

 				>

 			</File>

diff --git a/Parser/myreadline.c b/Parser/myreadline.c
index 50802c3..b12d052 100644
--- a/Parser/myreadline.c
+++ b/Parser/myreadline.c
@@ -73,6 +73,7 @@
         }
 #endif /* MS_WINDOWS */
         if (feof(fp)) {
+            clearerr(fp);
             return -1; /* EOF */
         }
 #ifdef EINTR
diff --git a/Python/frozen.c b/Python/frozen.c
index 57d8257..ddf6224 100644
--- a/Python/frozen.c
+++ b/Python/frozen.c
@@ -12,14 +12,17 @@
    the appropriate bytes from M___main__.c. */
 
 static unsigned char M___hello__[] = {
-    99,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
-    0,64,0,0,0,115,10,0,0,0,100,1,0,90,1,0,
-    100,0,0,83,40,2,0,0,0,78,84,40,2,0,0,0,
-    117,4,0,0,0,84,114,117,101,117,11,0,0,0,105,110,
-    105,116,105,97,108,105,122,101,100,40,0,0,0,0,40,0,
-    0,0,0,40,0,0,0,0,117,7,0,0,0,102,108,97,
-    103,46,112,121,117,8,0,0,0,60,109,111,100,117,108,101,
-    62,1,0,0,0,115,0,0,0,0,
+    99,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,
+    0,64,0,0,0,115,20,0,0,0,100,2,0,90,1,0,
+    101,2,0,100,0,0,131,1,0,1,100,1,0,83,40,3,
+    0,0,0,117,12,0,0,0,72,101,108,108,111,32,119,111,
+    114,108,100,33,78,84,40,3,0,0,0,117,4,0,0,0,
+    84,114,117,101,117,11,0,0,0,105,110,105,116,105,97,108,
+    105,122,101,100,117,5,0,0,0,112,114,105,110,116,40,0,
+    0,0,0,40,0,0,0,0,40,0,0,0,0,117,7,0,
+    0,0,102,108,97,103,46,112,121,117,8,0,0,0,60,109,
+    111,100,117,108,101,62,1,0,0,0,115,2,0,0,0,6,
+    1,
 };
 
 #define SIZE (int)sizeof(M___hello__)
diff --git a/Python/importdl.c b/Python/importdl.c
index 69bb711..9127d61 100644
--- a/Python/importdl.c
+++ b/Python/importdl.c
@@ -20,31 +20,36 @@
                                            const char *pathname, FILE *fp);
 #endif
 
-/* name should be ASCII only because the C language doesn't accept non-ASCII
-   identifiers, and dynamic modules are written in C. */
-
 PyObject *
 _PyImport_LoadDynamicModule(PyObject *name, PyObject *path, FILE *fp)
 {
-    PyObject *m;
+    PyObject *m = NULL;
 #ifndef MS_WINDOWS
     PyObject *pathbytes;
 #endif
+    PyObject *nameascii;
     char *namestr, *lastdot, *shortname, *packagecontext, *oldcontext;
     dl_funcptr p0;
     PyObject* (*p)(void);
     struct PyModuleDef *def;
 
-    namestr = _PyUnicode_AsString(name);
-    if (namestr == NULL)
-        return NULL;
-
     m = _PyImport_FindExtensionObject(name, path);
     if (m != NULL) {
         Py_INCREF(m);
         return m;
     }
 
+    /* name must be encodable to ASCII because dynamic module must have a
+       function called "PyInit_NAME", they are written in C, and the C language
+       doesn't accept non-ASCII identifiers. */
+    nameascii = PyUnicode_AsEncodedString(name, "ascii", NULL);
+    if (nameascii == NULL)
+        return NULL;
+
+    namestr = PyBytes_AS_STRING(nameascii);
+    if (namestr == NULL)
+        goto error;
+
     lastdot = strrchr(namestr, '.');
     if (lastdot == NULL) {
         packagecontext = NULL;
@@ -60,34 +65,33 @@
 #else
     pathbytes = PyUnicode_EncodeFSDefault(path);
     if (pathbytes == NULL)
-        return NULL;
+        goto error;
     p0 = _PyImport_GetDynLoadFunc(shortname,
                                   PyBytes_AS_STRING(pathbytes), fp);
     Py_DECREF(pathbytes);
 #endif
     p = (PyObject*(*)(void))p0;
     if (PyErr_Occurred())
-        return NULL;
+        goto error;
     if (p == NULL) {
         PyErr_Format(PyExc_ImportError,
                      "dynamic module does not define init function"
                      " (PyInit_%s)",
                      shortname);
-        return NULL;
+        goto error;
     }
     oldcontext = _Py_PackageContext;
     _Py_PackageContext = packagecontext;
     m = (*p)();
     _Py_PackageContext = oldcontext;
     if (m == NULL)
-        return NULL;
+        goto error;
 
     if (PyErr_Occurred()) {
-        Py_DECREF(m);
         PyErr_Format(PyExc_SystemError,
                      "initialization of %s raised unreported exception",
                      shortname);
-        return NULL;
+        goto error;
     }
 
     /* Remember pointer to module init function. */
@@ -101,12 +105,18 @@
         Py_INCREF(path);
 
     if (_PyImport_FixupExtensionObject(m, name, path) < 0)
-        return NULL;
+        goto error;
     if (Py_VerboseFlag)
         PySys_FormatStderr(
             "import %U # dynamically loaded from %R\n",
             name, path);
+    Py_DECREF(nameascii);
     return m;
+
+error:
+    Py_DECREF(nameascii);
+    Py_XDECREF(m);
+    return NULL;
 }
 
 #endif /* HAVE_DYNAMIC_LOADING */
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
index ebc4f1c..b55dc5b 100644
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -83,7 +83,7 @@
 int Py_VerboseFlag; /* Needed by import.c */
 int Py_QuietFlag; /* Needed by sysmodule.c */
 int Py_InteractiveFlag; /* Needed by Py_FdIsInteractive() below */
-int Py_InspectFlag; /* Needed to determine whether to exit at SystemError */
+int Py_InspectFlag; /* Needed to determine whether to exit at SystemExit */
 int Py_NoSiteFlag; /* Suppress 'import site' */
 int Py_BytesWarningFlag; /* Warn on str(bytes) and str(buffer) */
 int Py_DontWriteBytecodeFlag; /* Suppress writing bytecode files (*.py[co]) */
@@ -2144,7 +2144,7 @@
         if (tstate != NULL) {
             fputc('\n', stderr);
             fflush(stderr);
-            _Py_DumpTraceback(fd, tstate);
+            _Py_DumpTracebackThreads(fd, tstate->interp, tstate);
         }
         _PyFaulthandler_Fini();
     }
diff --git a/Tools/freeze/flag.py b/Tools/freeze/flag.py
index ea6d633..1cefa0a 100644
--- a/Tools/freeze/flag.py
+++ b/Tools/freeze/flag.py
@@ -1 +1,2 @@
 initialized = True
+print("Hello world!")
diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py
index 93b0528..f3cb1b0 100644
--- a/Tools/gdb/libpython.py
+++ b/Tools/gdb/libpython.py
@@ -931,6 +931,15 @@
 
         out.write(')')
 
+    def print_traceback(self):
+        if self.is_optimized_out():
+            sys.stdout.write('  (frame information optimized out)\n')
+        visited = set()
+        sys.stdout.write('  File "%s", line %i, in %s\n'
+                  % (self.co_filename.proxyval(visited),
+                     self.current_line_num(),
+                     self.co_name.proxyval(visited)))
+
 class PySetObjectPtr(PyObjectPtr):
     _typename = 'PySetObject'
 
@@ -1427,6 +1436,17 @@
         else:
             sys.stdout.write('#%i\n' % self.get_index())
 
+    def print_traceback(self):
+        if self.is_evalframeex():
+            pyop = self.get_pyop()
+            if pyop:
+                pyop.print_traceback()
+                sys.stdout.write('    %s\n' % pyop.current_line().strip())
+            else:
+                sys.stdout.write('  (unable to read python frame information)\n')
+        else:
+            sys.stdout.write('  (not a python frame)\n')
+
 class PyList(gdb.Command):
     '''List the current Python source code, if any
 
@@ -1551,6 +1571,24 @@
     PyUp()
     PyDown()
 
+class PyBacktraceFull(gdb.Command):
+    'Display the current python frame and all the frames within its call stack (if any)'
+    def __init__(self):
+        gdb.Command.__init__ (self,
+                              "py-bt-full",
+                              gdb.COMMAND_STACK,
+                              gdb.COMPLETE_NONE)
+
+
+    def invoke(self, args, from_tty):
+        frame = Frame.get_selected_python_frame()
+        while frame:
+            if frame.is_evalframeex():
+                frame.print_summary()
+            frame = frame.older()
+
+PyBacktraceFull()
+
 class PyBacktrace(gdb.Command):
     'Display the current python frame and all the frames within its call stack (if any)'
     def __init__(self):
@@ -1561,10 +1599,11 @@
 
 
     def invoke(self, args, from_tty):
+        sys.stdout.write('Traceback (most recent call first):\n')
         frame = Frame.get_selected_python_frame()
         while frame:
             if frame.is_evalframeex():
-                frame.print_summary()
+                frame.print_traceback()
             frame = frame.older()
 
 PyBacktrace()
diff --git a/Tools/msi/uuids.py b/Tools/msi/uuids.py
index 09da40a..64b9b9b 100644
--- a/Tools/msi/uuids.py
+++ b/Tools/msi/uuids.py
@@ -87,4 +87,7 @@
     '3.2.122' :'{4f3edfa6-cf70-469a-825f-e1206aa7f412}', # 3.2rc2
     '3.2.123' :'{90c673d7-8cfd-4969-9816-f7d70bad87f3}', # 3.2rc3
     '3.2.150' :'{b2042d5e-986d-44ec-aee3-afe4108ccc93}', # 3.2.0
+    '3.2.1121':'{4f90de4a-83dd-4443-b625-ca130ff361dd}', # 3.2.1rc1
+    '3.2.1122':'{dc5eb04d-ff8a-4bed-8f96-23942fd59e5f}', # 3.2.1rc2
+    '3.2.1150':'{34b2530c-6349-4292-9dc3-60bda4aed93c}', # 3.2.1
 }
diff --git a/Tools/scripts/crlf.py b/Tools/scripts/crlf.py
index 0622282..f231d29 100755
--- a/Tools/scripts/crlf.py
+++ b/Tools/scripts/crlf.py
@@ -8,16 +8,16 @@
         if os.path.isdir(filename):
             print(filename, "Directory!")
             continue
-        data = open(filename, "rb").read()
-        if '\0' in data:
+        with open(filename, "rb") as f:
+            data = f.read()
+        if b'\0' in data:
             print(filename, "Binary!")
             continue
-        newdata = data.replace("\r\n", "\n")
+        newdata = data.replace(b"\r\n", b"\n")
         if newdata != data:
             print(filename)
-            f = open(filename, "wb")
-            f.write(newdata)
-            f.close()
+            with open(filename, "wb") as f:
+                f.write(newdata)
 
 if __name__ == '__main__':
     main()
diff --git a/Tools/scripts/pysetup3 b/Tools/scripts/pysetup3
new file mode 100755
index 0000000..e6a908d
--- /dev/null
+++ b/Tools/scripts/pysetup3
@@ -0,0 +1,4 @@
+#!/usr/bin/env python3
+import sys
+from packaging.run import main
+sys.exit(main())
diff --git a/configure b/configure
index 6b4e83b..e1ab99b 100755
--- a/configure
+++ b/configure
@@ -1,14 +1,14 @@
 #! /bin/sh
 # From configure.in Revision.
 # Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.65 for python 3.3.
+# Generated by GNU Autoconf 2.67 for python 3.3.
 #
 # Report bugs to <http://bugs.python.org/>.
 #
 #
 # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
-# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation,
-# Inc.
+# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
 #
 #
 # This configure script is free software; the Free Software Foundation
@@ -320,7 +320,7 @@
       test -d "$as_dir" && break
     done
     test -z "$as_dirs" || eval "mkdir $as_dirs"
-  } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir"
+  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
 
 
 } # as_fn_mkdir_p
@@ -360,19 +360,19 @@
 fi # as_fn_arith
 
 
-# as_fn_error ERROR [LINENO LOG_FD]
-# ---------------------------------
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
 # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
 # provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with status $?, using 1 if that was 0.
+# script with STATUS, using 1 if that was 0.
 as_fn_error ()
 {
-  as_status=$?; test $as_status -eq 0 && as_status=1
-  if test "$3"; then
-    as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-    $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  if test "$4"; then
+    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
   fi
-  $as_echo "$as_me: error: $1" >&2
+  $as_echo "$as_me: error: $2" >&2
   as_fn_exit $as_status
 } # as_fn_error
 
@@ -534,7 +534,7 @@
 exec 6>&1
 
 # Name of the host.
-# hostname on some systems (SVR3.2, Linux) returns a bogus exit status,
+# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
 # so uname gets run too.
 ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
 
@@ -659,6 +659,7 @@
 EGREP
 GREP
 CPP
+NO_AS_NEEDED
 MAINCC
 CXX
 OBJEXT
@@ -831,8 +832,9 @@
   fi
 
   case $ac_option in
-  *=*)	ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
-  *)	ac_optarg=yes ;;
+  *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
+  *=)   ac_optarg= ;;
+  *)    ac_optarg=yes ;;
   esac
 
   # Accept the important Cygnus configure options, so we can diagnose typos.
@@ -877,7 +879,7 @@
     ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
     # Reject names that are not valid shell variable names.
     expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error "invalid feature name: $ac_useropt"
+      as_fn_error $? "invalid feature name: $ac_useropt"
     ac_useropt_orig=$ac_useropt
     ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
     case $ac_user_opts in
@@ -903,7 +905,7 @@
     ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
     # Reject names that are not valid shell variable names.
     expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error "invalid feature name: $ac_useropt"
+      as_fn_error $? "invalid feature name: $ac_useropt"
     ac_useropt_orig=$ac_useropt
     ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
     case $ac_user_opts in
@@ -1107,7 +1109,7 @@
     ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
     # Reject names that are not valid shell variable names.
     expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error "invalid package name: $ac_useropt"
+      as_fn_error $? "invalid package name: $ac_useropt"
     ac_useropt_orig=$ac_useropt
     ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
     case $ac_user_opts in
@@ -1123,7 +1125,7 @@
     ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
     # Reject names that are not valid shell variable names.
     expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error "invalid package name: $ac_useropt"
+      as_fn_error $? "invalid package name: $ac_useropt"
     ac_useropt_orig=$ac_useropt
     ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
     case $ac_user_opts in
@@ -1153,8 +1155,8 @@
   | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
     x_libraries=$ac_optarg ;;
 
-  -*) as_fn_error "unrecognized option: \`$ac_option'
-Try \`$0 --help' for more information."
+  -*) as_fn_error $? "unrecognized option: \`$ac_option'
+Try \`$0 --help' for more information"
     ;;
 
   *=*)
@@ -1162,7 +1164,7 @@
     # Reject names that are not valid shell variable names.
     case $ac_envvar in #(
       '' | [0-9]* | *[!_$as_cr_alnum]* )
-      as_fn_error "invalid variable name: \`$ac_envvar'" ;;
+      as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
     esac
     eval $ac_envvar=\$ac_optarg
     export $ac_envvar ;;
@@ -1180,13 +1182,13 @@
 
 if test -n "$ac_prev"; then
   ac_option=--`echo $ac_prev | sed 's/_/-/g'`
-  as_fn_error "missing argument to $ac_option"
+  as_fn_error $? "missing argument to $ac_option"
 fi
 
 if test -n "$ac_unrecognized_opts"; then
   case $enable_option_checking in
     no) ;;
-    fatal) as_fn_error "unrecognized options: $ac_unrecognized_opts" ;;
+    fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
     *)     $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
   esac
 fi
@@ -1209,7 +1211,7 @@
     [\\/$]* | ?:[\\/]* )  continue;;
     NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
   esac
-  as_fn_error "expected an absolute directory name for --$ac_var: $ac_val"
+  as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
 done
 
 # There might be people who depend on the old broken behavior: `$host'
@@ -1223,8 +1225,8 @@
 if test "x$host_alias" != x; then
   if test "x$build_alias" = x; then
     cross_compiling=maybe
-    $as_echo "$as_me: WARNING: If you wanted to set the --build type, don't use --host.
-    If a cross compiler is detected then cross compile mode will be used." >&2
+    $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host.
+    If a cross compiler is detected then cross compile mode will be used" >&2
   elif test "x$build_alias" != "x$host_alias"; then
     cross_compiling=yes
   fi
@@ -1239,9 +1241,9 @@
 ac_pwd=`pwd` && test -n "$ac_pwd" &&
 ac_ls_di=`ls -di .` &&
 ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
-  as_fn_error "working directory cannot be determined"
+  as_fn_error $? "working directory cannot be determined"
 test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
-  as_fn_error "pwd does not report name of working directory"
+  as_fn_error $? "pwd does not report name of working directory"
 
 
 # Find the source files, if location was not specified.
@@ -1280,11 +1282,11 @@
 fi
 if test ! -r "$srcdir/$ac_unique_file"; then
   test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
-  as_fn_error "cannot find sources ($ac_unique_file) in $srcdir"
+  as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
 fi
 ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
 ac_abs_confdir=`(
-	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error "$ac_msg"
+	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
 	pwd)`
 # When building in place, set srcdir=.
 if test "$ac_abs_confdir" = "$ac_pwd"; then
@@ -1324,7 +1326,7 @@
       --help=short        display options specific to this package
       --help=recursive    display the short help of all the included packages
   -V, --version           display version information and exit
-  -q, --quiet, --silent   do not print \`checking...' messages
+  -q, --quiet, --silent   do not print \`checking ...' messages
       --cache-file=FILE   cache test results in FILE [disabled]
   -C, --config-cache      alias for \`--cache-file=config.cache'
   -n, --no-create         do not create output files
@@ -1509,9 +1511,9 @@
 if $ac_init_version; then
   cat <<\_ACEOF
 python configure 3.3
-generated by GNU Autoconf 2.65
+generated by GNU Autoconf 2.67
 
-Copyright (C) 2009 Free Software Foundation, Inc.
+Copyright (C) 2010 Free Software Foundation, Inc.
 This configure script is free software; the Free Software Foundation
 gives unlimited permission to copy, distribute and modify it.
 _ACEOF
@@ -1560,6 +1562,52 @@
 
 } # ac_fn_c_try_compile
 
+# ac_fn_c_try_link LINENO
+# -----------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_link ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  rm -f conftest.$ac_objext conftest$ac_exeext
+  if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && {
+	 test -z "$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest$ac_exeext && {
+	 test "$cross_compiling" = yes ||
+	 $as_test_x conftest$ac_exeext
+       }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+	ac_retval=1
+fi
+  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
+  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
+  # interfere with the next link command; also delete a directory that is
+  # left behind by Apple's compiler.  We do this before executing the actions.
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_link
+
 # ac_fn_c_try_cpp LINENO
 # ----------------------
 # Try to preprocess conftest.$ac_ext, and return whether this succeeded.
@@ -1581,7 +1629,7 @@
     mv -f conftest.er1 conftest.err
   fi
   $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } >/dev/null && {
+  test $ac_status = 0; } > conftest.i && {
 	 test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
 	 test ! -s conftest.err
        }; then :
@@ -1605,10 +1653,10 @@
 ac_fn_c_check_header_mongrel ()
 {
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+  if eval "test \"\${$3+set}\"" = set; then :
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 fi
 eval ac_res=\$$3
@@ -1644,7 +1692,7 @@
 else
   ac_header_preproc=no
 fi
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.i conftest.$ac_ext
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5
 $as_echo "$ac_header_preproc" >&6; }
 
@@ -1667,17 +1715,15 @@
 $as_echo "$as_me: WARNING: $2:     section \"Present But Cannot Be Compiled\"" >&2;}
     { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
-( cat <<\_ASBOX
-## -------------------------------------- ##
+( $as_echo "## -------------------------------------- ##
 ## Report this to http://bugs.python.org/ ##
-## -------------------------------------- ##
-_ASBOX
+## -------------------------------------- ##"
      ) | sed "s/^/$as_me: WARNING:     /" >&2
     ;;
 esac
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=\$ac_header_compiler"
@@ -1741,7 +1787,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -1763,52 +1809,6 @@
 
 } # ac_fn_c_check_header_compile
 
-# ac_fn_c_try_link LINENO
-# -----------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_link ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext conftest$ac_exeext
-  if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_c_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest$ac_exeext && {
-	 test "$cross_compiling" = yes ||
-	 $as_test_x conftest$ac_exeext
-       }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
-  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
-  # interfere with the next link command; also delete a directory that is
-  # left behind by Apple's compiler.  We do this before executing the actions.
-  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;}
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_link
-
 # ac_fn_c_check_type LINENO TYPE VAR INCLUDES
 # -------------------------------------------
 # Tests whether TYPE exists after having included INCLUDES, setting cache
@@ -1818,7 +1818,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=no"
@@ -1872,7 +1872,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for uint$2_t" >&5
 $as_echo_n "checking for uint$2_t... " >&6; }
-if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=no"
@@ -1902,8 +1902,7 @@
 esac
 fi
 rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-       eval as_val=\$$3
-   if test "x$as_val" = x""no; then :
+       if eval test \"x\$"$3"\" = x"no"; then :
 
 else
   break
@@ -1926,7 +1925,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for int$2_t" >&5
 $as_echo_n "checking for int$2_t... " >&6; }
-if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=no"
@@ -1937,11 +1936,11 @@
        cat confdefs.h - <<_ACEOF >conftest.$ac_ext
 /* end confdefs.h.  */
 $ac_includes_default
+	     enum { N = $2 / 2 - 1 };
 int
 main ()
 {
-static int test_array [1 - 2 * !(enum { N = $2 / 2 - 1 };
-	     0 < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1))];
+static int test_array [1 - 2 * !(0 < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1))];
 test_array [0] = 0
 
   ;
@@ -1952,11 +1951,11 @@
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
 /* end confdefs.h.  */
 $ac_includes_default
+	        enum { N = $2 / 2 - 1 };
 int
 main ()
 {
-static int test_array [1 - 2 * !(enum { N = $2 / 2 - 1 };
-		($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1)
+static int test_array [1 - 2 * !(($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1)
 		 < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 2))];
 test_array [0] = 0
 
@@ -1977,8 +1976,7 @@
 rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
 fi
 rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-       eval as_val=\$$3
-   if test "x$as_val" = x""no; then :
+       if eval test \"x\$"$3"\" = x"no"; then :
 
 else
   break
@@ -2178,7 +2176,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2246,7 +2244,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2.$3" >&5
 $as_echo_n "checking for $2.$3... " >&6; }
-if { as_var=$4; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$4+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2294,15 +2292,18 @@
 
 } # ac_fn_c_check_member
 
-# ac_fn_c_check_decl LINENO SYMBOL VAR
-# ------------------------------------
-# Tests whether SYMBOL is declared, setting cache variable VAR accordingly.
+# ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES
+# ---------------------------------------------
+# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR
+# accordingly.
 ac_fn_c_check_decl ()
 {
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $2 is declared" >&5
-$as_echo_n "checking whether $2 is declared... " >&6; }
-if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
+  as_decl_name=`echo $2|sed 's/ *(.*//'`
+  as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'`
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5
+$as_echo_n "checking whether $as_decl_name is declared... " >&6; }
+if eval "test \"\${$3+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2311,8 +2312,12 @@
 int
 main ()
 {
-#ifndef $2
-  (void) $2;
+#ifndef $as_decl_name
+#ifdef __cplusplus
+  (void) $as_decl_use;
+#else
+  (void) $as_decl_name;
+#endif
 #endif
 
   ;
@@ -2337,7 +2342,7 @@
 running configure, to aid debugging if configure makes a mistake.
 
 It was created by python $as_me 3.3, which was
-generated by GNU Autoconf 2.65.  Invocation command line was
+generated by GNU Autoconf 2.67.  Invocation command line was
 
   $ $0 $@
 
@@ -2447,11 +2452,9 @@
   {
     echo
 
-    cat <<\_ASBOX
-## ---------------- ##
+    $as_echo "## ---------------- ##
 ## Cache variables. ##
-## ---------------- ##
-_ASBOX
+## ---------------- ##"
     echo
     # The following way of writing the cache mishandles newlines in values,
 (
@@ -2485,11 +2488,9 @@
 )
     echo
 
-    cat <<\_ASBOX
-## ----------------- ##
+    $as_echo "## ----------------- ##
 ## Output variables. ##
-## ----------------- ##
-_ASBOX
+## ----------------- ##"
     echo
     for ac_var in $ac_subst_vars
     do
@@ -2502,11 +2503,9 @@
     echo
 
     if test -n "$ac_subst_files"; then
-      cat <<\_ASBOX
-## ------------------- ##
+      $as_echo "## ------------------- ##
 ## File substitutions. ##
-## ------------------- ##
-_ASBOX
+## ------------------- ##"
       echo
       for ac_var in $ac_subst_files
       do
@@ -2520,11 +2519,9 @@
     fi
 
     if test -s confdefs.h; then
-      cat <<\_ASBOX
-## ----------- ##
+      $as_echo "## ----------- ##
 ## confdefs.h. ##
-## ----------- ##
-_ASBOX
+## ----------- ##"
       echo
       cat confdefs.h
       echo
@@ -2579,7 +2576,12 @@
 ac_site_file1=NONE
 ac_site_file2=NONE
 if test -n "$CONFIG_SITE"; then
-  ac_site_file1=$CONFIG_SITE
+  # We do not want a PATH search for config.site.
+  case $CONFIG_SITE in #((
+    -*)  ac_site_file1=./$CONFIG_SITE;;
+    */*) ac_site_file1=$CONFIG_SITE;;
+    *)   ac_site_file1=./$CONFIG_SITE;;
+  esac
 elif test "x$prefix" != xNONE; then
   ac_site_file1=$prefix/share/config.site
   ac_site_file2=$prefix/etc/config.site
@@ -2594,7 +2596,11 @@
     { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
 $as_echo "$as_me: loading site script $ac_site_file" >&6;}
     sed 's/^/| /' "$ac_site_file" >&5
-    . "$ac_site_file"
+    . "$ac_site_file" \
+      || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "failed to load site script $ac_site_file
+See \`config.log' for more details" "$LINENO" 5 ; }
   fi
 done
 
@@ -2670,7 +2676,7 @@
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
   { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
-  as_fn_error "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
+  as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
 fi
 ## -------------------- ##
 ## Main body of script. ##
@@ -2771,7 +2777,7 @@
 		UNIVERSALSDK=$enableval
 		if test ! -d "${UNIVERSALSDK}"
 		then
-			as_fn_error "--enable-universalsdk specifies non-existing SDK: ${UNIVERSALSDK}" "$LINENO" 5
+			as_fn_error $? "--enable-universalsdk specifies non-existing SDK: ${UNIVERSALSDK}" "$LINENO" 5
 		fi
 		;;
 	esac
@@ -3163,7 +3169,7 @@
 # If the user switches compilers, we can't believe the cache
 if test ! -z "$ac_cv_prog_CC" -a ! -z "$CC" -a "$CC" != "$ac_cv_prog_CC"
 then
-  as_fn_error "cached CC is different -- throw away $cache_file
+  as_fn_error $? "cached CC is different -- throw away $cache_file
 (it is also a good idea to do 'make clean' before compiling)" "$LINENO" 5
 fi
 
@@ -3473,8 +3479,8 @@
 
 test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error "no acceptable C compiler found in \$PATH
-See \`config.log' for more details." "$LINENO" 5; }
+as_fn_error $? "no acceptable C compiler found in \$PATH
+See \`config.log' for more details" "$LINENO" 5 ; }
 
 # Provide some information about the compiler.
 $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
@@ -3588,9 +3594,8 @@
 
 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "C compiler cannot create executables
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "C compiler cannot create executables
+See \`config.log' for more details" "$LINENO" 5 ; }
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
 $as_echo "yes" >&6; }
@@ -3632,8 +3637,8 @@
 else
   { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error "cannot compute suffix of executables: cannot compile and link
-See \`config.log' for more details." "$LINENO" 5; }
+as_fn_error $? "cannot compute suffix of executables: cannot compile and link
+See \`config.log' for more details" "$LINENO" 5 ; }
 fi
 rm -f conftest conftest$ac_cv_exeext
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
@@ -3690,9 +3695,9 @@
     else
 	{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error "cannot run C compiled programs.
+as_fn_error $? "cannot run C compiled programs.
 If you meant to cross compile, use \`--host'.
-See \`config.log' for more details." "$LINENO" 5; }
+See \`config.log' for more details" "$LINENO" 5 ; }
     fi
   fi
 fi
@@ -3743,8 +3748,8 @@
 
 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error "cannot compute suffix of object files: cannot compile
-See \`config.log' for more details." "$LINENO" 5; }
+as_fn_error $? "cannot compute suffix of object files: cannot compile
+See \`config.log' for more details" "$LINENO" 5 ; }
 fi
 rm -f conftest.$ac_cv_objext conftest.$ac_ext
 fi
@@ -4153,8 +4158,38 @@
 fi
 
 
-# checks for UNIX variants that set C preprocessor variables
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for -Wl,--no-as-needed" >&5
+$as_echo_n "checking for -Wl,--no-as-needed... " >&6; }
+save_LDFLAGS="$LDFLAGS"
+LDFLAGS="$LDFLAGS -Wl,--no-as-needed"
 
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  NO_AS_NEEDED="-Wl,--no-as-needed"
+   { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+  NO_AS_NEEDED=""
+   { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LDFLAGS="$save_LDFLAGS"
+
+
+
+# checks for UNIX variants that set C preprocessor variables
 ac_ext=c
 ac_cpp='$CPP $CPPFLAGS'
 ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
@@ -4197,7 +4232,7 @@
   # Broken: fails on valid input.
 continue
 fi
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.i conftest.$ac_ext
 
   # OK, works on sane cases.  Now check whether nonexistent headers
   # can be detected and how.
@@ -4213,11 +4248,11 @@
 ac_preproc_ok=:
 break
 fi
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.i conftest.$ac_ext
 
 done
 # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.i conftest.err conftest.$ac_ext
 if $ac_preproc_ok; then :
   break
 fi
@@ -4256,7 +4291,7 @@
   # Broken: fails on valid input.
 continue
 fi
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.i conftest.$ac_ext
 
   # OK, works on sane cases.  Now check whether nonexistent headers
   # can be detected and how.
@@ -4272,18 +4307,18 @@
 ac_preproc_ok=:
 break
 fi
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.i conftest.$ac_ext
 
 done
 # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.i conftest.err conftest.$ac_ext
 if $ac_preproc_ok; then :
 
 else
   { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error "C preprocessor \"$CPP\" fails sanity check
-See \`config.log' for more details." "$LINENO" 5; }
+as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
+See \`config.log' for more details" "$LINENO" 5 ; }
 fi
 
 ac_ext=c
@@ -4344,7 +4379,7 @@
   done
 IFS=$as_save_IFS
   if test -z "$ac_cv_path_GREP"; then
-    as_fn_error "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+    as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
   fi
 else
   ac_cv_path_GREP=$GREP
@@ -4410,7 +4445,7 @@
   done
 IFS=$as_save_IFS
   if test -z "$ac_cv_path_EGREP"; then
-    as_fn_error "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+    as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
   fi
 else
   ac_cv_path_EGREP=$EGREP
@@ -4542,8 +4577,7 @@
   as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
 ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
 "
-eval as_val=\$$as_ac_Header
-   if test "x$as_val" = x""yes; then :
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
 _ACEOF
@@ -5167,16 +5201,22 @@
 esac
 ac_aux_dir=
 for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do
-  for ac_t in install-sh install.sh shtool; do
-    if test -f "$ac_dir/$ac_t"; then
-      ac_aux_dir=$ac_dir
-      ac_install_sh="$ac_aux_dir/$ac_t -c"
-      break 2
-    fi
-  done
+  if test -f "$ac_dir/install-sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install-sh -c"
+    break
+  elif test -f "$ac_dir/install.sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install.sh -c"
+    break
+  elif test -f "$ac_dir/shtool"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/shtool install -c"
+    break
+  fi
 done
 if test -z "$ac_aux_dir"; then
-  as_fn_error "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5
+  as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5
 fi
 
 # These three variables are undocumented and unsupported,
@@ -5514,7 +5554,7 @@
 		   ARCH_RUN_32BIT="/usr/bin/arch -i386 -ppc"
 
 		 else
-	           as_fn_error "proper usage is --with-universal-arch=32-bit|64-bit|all|intel|3-way" "$LINENO" 5
+	           as_fn_error $? "proper usage is --with-universal-arch=32-bit|64-bit|all|intel|3-way" "$LINENO" 5
 
 		 fi
 
@@ -6002,8 +6042,7 @@
 do :
   as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
 ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default"
-eval as_val=\$$as_ac_Header
-   if test "x$as_val" = x""yes; then :
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
 _ACEOF
@@ -6017,7 +6056,7 @@
   as_ac_Header=`$as_echo "ac_cv_header_dirent_$ac_hdr" | $as_tr_sh`
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_hdr that defines DIR" >&5
 $as_echo_n "checking for $ac_hdr that defines DIR... " >&6; }
-if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then :
+if eval "test \"\${$as_ac_Header+set}\"" = set; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -6044,8 +6083,7 @@
 eval ac_res=\$$as_ac_Header
 	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
 $as_echo "$ac_res" >&6; }
-eval as_val=\$$as_ac_Header
-   if test "x$as_val" = x""yes; then :
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_hdr" | $as_tr_cpp` 1
 _ACEOF
@@ -6222,6 +6260,34 @@
 fi
 
 
+# On Darwin (OS X) net/if.h requires sys/socket.h to be imported first.
+for ac_header in net/if.h
+do :
+  ac_fn_c_check_header_compile "$LINENO" "net/if.h" "ac_cv_header_net_if_h" "#include <stdio.h>
+#ifdef STDC_HEADERS
+# include <stdlib.h>
+# include <stddef.h>
+#else
+# ifdef HAVE_STDLIB_H
+#  include <stdlib.h>
+# endif
+#endif
+#ifdef HAVE_SYS_SOCKET_H
+# include <sys/socket.h>
+#endif
+
+"
+if test "x$ac_cv_header_net_if_h" = x""yes; then :
+  cat >>confdefs.h <<_ACEOF
+#define HAVE_NET_IF_H 1
+_ACEOF
+
+fi
+
+done
+
+
+
 # On Solaris, term.h requires curses.h
 for ac_header in term.h
 do :
@@ -6542,9 +6608,8 @@
   if test "$ac_cv_type_int" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (int)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (int)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_int=0
    fi
@@ -6576,9 +6641,8 @@
   if test "$ac_cv_type_long" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (long)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (long)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_long=0
    fi
@@ -6610,9 +6674,8 @@
   if test "$ac_cv_type_void_p" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (void *)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (void *)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_void_p=0
    fi
@@ -6644,9 +6707,8 @@
   if test "$ac_cv_type_short" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (short)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (short)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_short=0
    fi
@@ -6678,9 +6740,8 @@
   if test "$ac_cv_type_float" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (float)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (float)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_float=0
    fi
@@ -6712,9 +6773,8 @@
   if test "$ac_cv_type_double" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (double)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (double)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_double=0
    fi
@@ -6746,9 +6806,8 @@
   if test "$ac_cv_type_fpos_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (fpos_t)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (fpos_t)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_fpos_t=0
    fi
@@ -6780,9 +6839,8 @@
   if test "$ac_cv_type_size_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (size_t)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (size_t)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_size_t=0
    fi
@@ -6814,9 +6872,8 @@
   if test "$ac_cv_type_pid_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (pid_t)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (pid_t)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_pid_t=0
    fi
@@ -6875,9 +6932,8 @@
   if test "$ac_cv_type_long_long" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (long long)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (long long)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_long_long=0
    fi
@@ -6937,9 +6993,8 @@
   if test "$ac_cv_type_long_double" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (long double)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (long double)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_long_double=0
    fi
@@ -7000,9 +7055,8 @@
   if test "$ac_cv_type__Bool" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (_Bool)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (_Bool)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof__Bool=0
    fi
@@ -7049,9 +7103,8 @@
   if test "$ac_cv_type_uintptr_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (uintptr_t)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (uintptr_t)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_uintptr_t=0
    fi
@@ -7091,9 +7144,8 @@
   if test "$ac_cv_type_off_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (off_t)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (off_t)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_off_t=0
    fi
@@ -7154,9 +7206,8 @@
   if test "$ac_cv_type_time_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (time_t)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (time_t)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_time_t=0
    fi
@@ -7227,9 +7278,8 @@
   if test "$ac_cv_type_pthread_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (pthread_t)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (pthread_t)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_pthread_t=0
    fi
@@ -7316,7 +7366,7 @@
     		MACOSX_DEFAULT_ARCH="ppc"
     		;;
     	*)
-    		as_fn_error "Unexpected output of 'arch' on OSX" "$LINENO" 5
+    		as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
     		;;
     	esac
     else
@@ -7328,7 +7378,7 @@
     		MACOSX_DEFAULT_ARCH="ppc64"
     		;;
     	*)
-    		as_fn_error "Unexpected output of 'arch' on OSX" "$LINENO" 5
+    		as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
     		;;
     	esac
 
@@ -7354,7 +7404,7 @@
 $as_echo "yes" >&6; }
 	if test $enable_shared = "yes"
 	then
-		as_fn_error "Specifying both --enable-shared and --enable-framework is not supported, use only --enable-framework instead" "$LINENO" 5
+		as_fn_error $? "Specifying both --enable-shared and --enable-framework is not supported, use only --enable-framework instead" "$LINENO" 5
 	fi
 else
 	{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
@@ -8194,12 +8244,12 @@
   withval=$with_dbmliborder;
 if test x$with_dbmliborder = xyes
 then
-as_fn_error "proper usage is --with-dbmliborder=db1:db2:..." "$LINENO" 5
+as_fn_error $? "proper usage is --with-dbmliborder=db1:db2:..." "$LINENO" 5
 else
   for db in `echo $with_dbmliborder | sed 's/:/ /g'`; do
     if test x$db != xndbm && test x$db != xgdbm && test x$db != xbdb
     then
-      as_fn_error "proper usage is --with-dbmliborder=db1:db2:..." "$LINENO" 5
+      as_fn_error $? "proper usage is --with-dbmliborder=db1:db2:..." "$LINENO" 5
     fi
   done
 fi
@@ -9172,7 +9222,7 @@
 $as_echo "#define WITH_VALGRIND 1" >>confdefs.h
 
 else
-  as_fn_error "Valgrind support requested but headers not available" "$LINENO" 5
+  as_fn_error $? "Valgrind support requested but headers not available" "$LINENO" 5
 
 fi
 
@@ -9255,22 +9305,23 @@
  futimens futimes \
  gai_strerror getgroups getlogin getloadavg getpeername getpgid getpid \
  getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \
+ if_nameindex \
  initgroups kill killpg lchmod lchown lockf linkat lstat lutimes mbrtowc mkdirat mkfifo \
  mkfifoat mknod mknodat mktime mremap nice openat pathconf pause plock poll \
  posix_fallocate posix_fadvise pread \
- pthread_init putenv pwrite readlink readlinkat readv realpath renameat \
+ pthread_init pthread_kill putenv pwrite readlink readlinkat readv realpath renameat \
  select sem_open sem_timedwait sem_getvalue sem_unlink sendfile setegid seteuid \
  setgid sethostname \
  setlocale setregid setreuid setresuid setresgid setsid setpgid setpgrp setpriority setuid setvbuf \
- sigaction sigaltstack siginterrupt sigrelse snprintf strftime strlcpy symlinkat sync \
+ sigaction sigaltstack siginterrupt sigpending \
+ sigrelse sigwait snprintf strftime strlcpy symlinkat sync \
  sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \
  truncate uname unlinkat unsetenv utimensat utimes waitid waitpid wait3 wait4 \
  wcscoll wcsftime wcsxfrm writev _getpty
 do :
   as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
 ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-eval as_val=\$$as_ac_var
-   if test "x$as_val" = x""yes; then :
+if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
 _ACEOF
@@ -10293,8 +10344,7 @@
 do :
   as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
 ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-eval as_val=\$$as_ac_var
-   if test "x$as_val" = x""yes; then :
+if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
 _ACEOF
@@ -10303,25 +10353,44 @@
 done
 
 
-for ac_func in dup2 getcwd strdup
-do :
-  as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
-ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-eval as_val=\$$as_ac_var
-   if test "x$as_val" = x""yes; then :
-  cat >>confdefs.h <<_ACEOF
-#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
-_ACEOF
+ac_fn_c_check_func "$LINENO" "dup2" "ac_cv_func_dup2"
+if test "x$ac_cv_func_dup2" = x""yes; then :
+  $as_echo "#define HAVE_DUP2 1" >>confdefs.h
 
 else
   case " $LIBOBJS " in
-  *" $ac_func.$ac_objext "* ) ;;
-  *) LIBOBJS="$LIBOBJS $ac_func.$ac_objext"
+  *" dup2.$ac_objext "* ) ;;
+  *) LIBOBJS="$LIBOBJS dup2.$ac_objext"
  ;;
 esac
 
 fi
-done
+
+ac_fn_c_check_func "$LINENO" "getcwd" "ac_cv_func_getcwd"
+if test "x$ac_cv_func_getcwd" = x""yes; then :
+  $as_echo "#define HAVE_GETCWD 1" >>confdefs.h
+
+else
+  case " $LIBOBJS " in
+  *" getcwd.$ac_objext "* ) ;;
+  *) LIBOBJS="$LIBOBJS getcwd.$ac_objext"
+ ;;
+esac
+
+fi
+
+ac_fn_c_check_func "$LINENO" "strdup" "ac_cv_func_strdup"
+if test "x$ac_cv_func_strdup" = x""yes; then :
+  $as_echo "#define HAVE_STRDUP 1" >>confdefs.h
+
+else
+  case " $LIBOBJS " in
+  *" strdup.$ac_objext "* ) ;;
+  *) LIBOBJS="$LIBOBJS strdup.$ac_objext"
+ ;;
+esac
+
+fi
 
 
 for ac_func in getpgrp
@@ -11534,7 +11603,7 @@
 then LIBM=$withval
      { $as_echo "$as_me:${as_lineno-$LINENO}: result: set LIBM=\"$withval\"" >&5
 $as_echo "set LIBM=\"$withval\"" >&6; }
-else as_fn_error "proper usage is --with-libm=STRING" "$LINENO" 5
+else as_fn_error $? "proper usage is --with-libm=STRING" "$LINENO" 5
 fi
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: default LIBM=\"$LIBM\"" >&5
@@ -11558,7 +11627,7 @@
 then LIBC=$withval
      { $as_echo "$as_me:${as_lineno-$LINENO}: result: set LIBC=\"$withval\"" >&5
 $as_echo "set LIBC=\"$withval\"" >&6; }
-else as_fn_error "proper usage is --with-libc=STRING" "$LINENO" 5
+else as_fn_error $? "proper usage is --with-libc=STRING" "$LINENO" 5
 fi
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: default LIBC=\"$LIBC\"" >&5
@@ -11808,8 +11877,7 @@
 do :
   as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
 ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-eval as_val=\$$as_ac_var
-   if test "x$as_val" = x""yes; then :
+if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
 _ACEOF
@@ -11817,12 +11885,11 @@
 fi
 done
 
-for ac_func in hypot lgamma log1p round tgamma
+for ac_func in hypot lgamma log1p log2 round tgamma
 do :
   as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
 ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-eval as_val=\$$as_ac_var
-   if test "x$as_val" = x""yes; then :
+if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
 _ACEOF
@@ -12085,7 +12152,7 @@
 15|30)
   ;;
 *)
-  as_fn_error "bad value $enable_big_digits for --enable-big-digits; value should be 15 or 30" "$LINENO" 5 ;;
+  as_fn_error $? "bad value $enable_big_digits for --enable-big-digits; value should be 15 or 30" "$LINENO" 5  ;;
 esac
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_big_digits" >&5
 $as_echo "$enable_big_digits" >&6; }
@@ -12136,9 +12203,8 @@
   if test "$ac_cv_type_wchar_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-{ as_fn_set_status 77
-as_fn_error "cannot compute sizeof (wchar_t)
-See \`config.log' for more details." "$LINENO" 5; }; }
+as_fn_error 77 "cannot compute sizeof (wchar_t)
+See \`config.log' for more details" "$LINENO" 5 ; }
    else
      ac_cv_sizeof_wchar_t=0
    fi
@@ -12507,8 +12573,8 @@
 
      ;; #(
    *)
-     as_fn_error "unknown endianness
- presetting ac_cv_c_bigendian=no (or yes) will help" "$LINENO" 5 ;;
+     as_fn_error $? "unknown endianness
+ presetting ac_cv_c_bigendian=no (or yes) will help" "$LINENO" 5  ;;
  esac
 
 
@@ -12769,7 +12835,7 @@
   have_readline=no
 
 fi
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.i conftest.$ac_ext
 if test $have_readline = yes
 then
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -12943,7 +13009,7 @@
   have_readline=no
 
 fi
-rm -f conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.i conftest.$ac_ext
 if test $have_readline = yes
 then
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -13873,6 +13939,7 @@
 
 ac_libobjs=
 ac_ltlibobjs=
+U=
 for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
   # 1. Remove the extension, and $U if already installed.
   ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
@@ -14035,19 +14102,19 @@
 (unset CDPATH) >/dev/null 2>&1 && unset CDPATH
 
 
-# as_fn_error ERROR [LINENO LOG_FD]
-# ---------------------------------
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
 # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
 # provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with status $?, using 1 if that was 0.
+# script with STATUS, using 1 if that was 0.
 as_fn_error ()
 {
-  as_status=$?; test $as_status -eq 0 && as_status=1
-  if test "$3"; then
-    as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-    $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  if test "$4"; then
+    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
   fi
-  $as_echo "$as_me: error: $1" >&2
+  $as_echo "$as_me: error: $2" >&2
   as_fn_exit $as_status
 } # as_fn_error
 
@@ -14243,7 +14310,7 @@
       test -d "$as_dir" && break
     done
     test -z "$as_dirs" || eval "mkdir $as_dirs"
-  } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir"
+  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
 
 
 } # as_fn_mkdir_p
@@ -14297,7 +14364,7 @@
 # values after options handling.
 ac_log="
 This file was extended by python $as_me 3.3, which was
-generated by GNU Autoconf 2.65.  Invocation command line was
+generated by GNU Autoconf 2.67.  Invocation command line was
 
   CONFIG_FILES    = $CONFIG_FILES
   CONFIG_HEADERS  = $CONFIG_HEADERS
@@ -14321,8 +14388,8 @@
 
 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
 # Files that config.status was made for.
-config_files="`echo $ac_config_files`"
-config_headers="`echo $ac_config_headers`"
+config_files="$ac_config_files"
+config_headers="$ac_config_headers"
 
 _ACEOF
 
@@ -14359,10 +14426,10 @@
 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
 ac_cs_version="\\
 python config.status 3.3
-configured by $0, generated by GNU Autoconf 2.65,
+configured by $0, generated by GNU Autoconf 2.67,
   with options \\"\$ac_cs_config\\"
 
-Copyright (C) 2009 Free Software Foundation, Inc.
+Copyright (C) 2010 Free Software Foundation, Inc.
 This config.status script is free software; the Free Software Foundation
 gives unlimited permission to copy, distribute and modify it."
 
@@ -14378,11 +14445,16 @@
 while test $# != 0
 do
   case $1 in
-  --*=*)
+  --*=?*)
     ac_option=`expr "X$1" : 'X\([^=]*\)='`
     ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
     ac_shift=:
     ;;
+  --*=)
+    ac_option=`expr "X$1" : 'X\([^=]*\)='`
+    ac_optarg=
+    ac_shift=:
+    ;;
   *)
     ac_option=$1
     ac_optarg=$2
@@ -14404,6 +14476,7 @@
     $ac_shift
     case $ac_optarg in
     *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    '') as_fn_error $? "missing file argument" ;;
     esac
     as_fn_append CONFIG_FILES " '$ac_optarg'"
     ac_need_defaults=false;;
@@ -14416,7 +14489,7 @@
     ac_need_defaults=false;;
   --he | --h)
     # Conflict between --help and --header
-    as_fn_error "ambiguous option: \`$1'
+    as_fn_error $? "ambiguous option: \`$1'
 Try \`$0 --help' for more information.";;
   --help | --hel | -h )
     $as_echo "$ac_cs_usage"; exit ;;
@@ -14425,7 +14498,7 @@
     ac_cs_silent=: ;;
 
   # This is an error.
-  -*) as_fn_error "unrecognized option: \`$1'
+  -*) as_fn_error $? "unrecognized option: \`$1'
 Try \`$0 --help' for more information." ;;
 
   *) as_fn_append ac_config_targets " $1"
@@ -14484,7 +14557,7 @@
     "Misc/python.pc") CONFIG_FILES="$CONFIG_FILES Misc/python.pc" ;;
     "Modules/ld_so_aix") CONFIG_FILES="$CONFIG_FILES Modules/ld_so_aix" ;;
 
-  *) as_fn_error "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
+  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5 ;;
   esac
 done
 
@@ -14521,7 +14594,7 @@
 {
   tmp=./conf$$-$RANDOM
   (umask 077 && mkdir "$tmp")
-} || as_fn_error "cannot create a temporary directory in ." "$LINENO" 5
+} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
 
 # Set up the scripts for CONFIG_FILES section.
 # No need to generate them if there are no CONFIG_FILES.
@@ -14538,7 +14611,7 @@
 fi
 ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
 if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
-  ac_cs_awk_cr='\r'
+  ac_cs_awk_cr='\\r'
 else
   ac_cs_awk_cr=$ac_cr
 fi
@@ -14552,18 +14625,18 @@
   echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
   echo "_ACEOF"
 } >conf$$subs.sh ||
-  as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5
-ac_delim_num=`echo "$ac_subst_vars" | grep -c '$'`
+  as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
 ac_delim='%!_!# '
 for ac_last_try in false false false false false :; do
   . ./conf$$subs.sh ||
-    as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5
+    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
 
   ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
   if test $ac_delim_n = $ac_delim_num; then
     break
   elif $ac_last_try; then
-    as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5
+    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
   else
     ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
   fi
@@ -14652,20 +14725,28 @@
 else
   cat
 fi < "$tmp/subs1.awk" > "$tmp/subs.awk" \
-  || as_fn_error "could not setup config files machinery" "$LINENO" 5
+  || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
 _ACEOF
 
-# VPATH may cause trouble with some makes, so we remove $(srcdir),
-# ${srcdir} and @srcdir@ from VPATH if srcdir is ".", strip leading and
+# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
+# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
 # trailing colons and then remove the whole line if VPATH becomes empty
 # (actually we leave an empty line to preserve line numbers).
 if test "x$srcdir" = x.; then
-  ac_vpsub='/^[	 ]*VPATH[	 ]*=/{
-s/:*\$(srcdir):*/:/
-s/:*\${srcdir}:*/:/
-s/:*@srcdir@:*/:/
-s/^\([^=]*=[	 ]*\):*/\1/
+  ac_vpsub='/^[	 ]*VPATH[	 ]*=[	 ]*/{
+h
+s///
+s/^/:/
+s/[	 ]*$/:/
+s/:\$(srcdir):/:/g
+s/:\${srcdir}:/:/g
+s/:@srcdir@:/:/g
+s/^:*//
 s/:*$//
+x
+s/\(=[	 ]*\).*/\1/
+G
+s/\n//
 s/^[^=]*=[	 ]*$//
 }'
 fi
@@ -14693,7 +14774,7 @@
   if test -z "$ac_t"; then
     break
   elif $ac_last_try; then
-    as_fn_error "could not make $CONFIG_HEADERS" "$LINENO" 5
+    as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
   else
     ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
   fi
@@ -14778,7 +14859,7 @@
 _ACAWK
 _ACEOF
 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-  as_fn_error "could not setup config headers machinery" "$LINENO" 5
+  as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
 fi # test -n "$CONFIG_HEADERS"
 
 
@@ -14791,7 +14872,7 @@
   esac
   case $ac_mode$ac_tag in
   :[FHL]*:*);;
-  :L* | :C*:*) as_fn_error "invalid tag \`$ac_tag'" "$LINENO" 5;;
+  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5 ;;
   :[FH]-) ac_tag=-:-;;
   :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
   esac
@@ -14819,7 +14900,7 @@
 	   [\\/$]*) false;;
 	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
 	   esac ||
-	   as_fn_error "cannot find input file: \`$ac_f'" "$LINENO" 5;;
+	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5 ;;
       esac
       case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
       as_fn_append ac_file_inputs " '$ac_f'"
@@ -14846,7 +14927,7 @@
 
     case $ac_tag in
     *:-:* | *:-) cat >"$tmp/stdin" \
-      || as_fn_error "could not create $ac_file" "$LINENO" 5 ;;
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5  ;;
     esac
     ;;
   esac
@@ -14977,22 +15058,22 @@
 $ac_datarootdir_hack
 "
 eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$tmp/subs.awk" >$tmp/out \
-  || as_fn_error "could not create $ac_file" "$LINENO" 5
+  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
 
 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
   { ac_out=`sed -n '/\${datarootdir}/p' "$tmp/out"`; test -n "$ac_out"; } &&
   { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' "$tmp/out"`; test -z "$ac_out"; } &&
   { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined.  Please make sure it is defined." >&5
+which seems to be undefined.  Please make sure it is defined" >&5
 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined.  Please make sure it is defined." >&2;}
+which seems to be undefined.  Please make sure it is defined" >&2;}
 
   rm -f "$tmp/stdin"
   case $ac_file in
   -) cat "$tmp/out" && rm -f "$tmp/out";;
   *) rm -f "$ac_file" && mv "$tmp/out" "$ac_file";;
   esac \
-  || as_fn_error "could not create $ac_file" "$LINENO" 5
+  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
  ;;
   :H)
   #
@@ -15003,19 +15084,19 @@
       $as_echo "/* $configure_input  */" \
       && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs"
     } >"$tmp/config.h" \
-      || as_fn_error "could not create $ac_file" "$LINENO" 5
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5
     if diff "$ac_file" "$tmp/config.h" >/dev/null 2>&1; then
       { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
 $as_echo "$as_me: $ac_file is unchanged" >&6;}
     else
       rm -f "$ac_file"
       mv "$tmp/config.h" "$ac_file" \
-	|| as_fn_error "could not create $ac_file" "$LINENO" 5
+	|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
     fi
   else
     $as_echo "/* $configure_input  */" \
       && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" \
-      || as_fn_error "could not create -" "$LINENO" 5
+      || as_fn_error $? "could not create -" "$LINENO" 5
   fi
  ;;
 
@@ -15035,7 +15116,7 @@
 ac_clean_files=$ac_clean_files_save
 
 test $ac_write_fail = 0 ||
-  as_fn_error "write failure creating $CONFIG_STATUS" "$LINENO" 5
+  as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
 
 
 # configure is writing to config.log, and then calls config.status.
@@ -15056,7 +15137,7 @@
   exec 5>>config.log
   # Use ||, not &&, to avoid exiting from the if with $? = 1, which
   # would make configure fail if this is the last instruction.
-  $ac_cs_success || as_fn_exit $?
+  $ac_cs_success || as_fn_exit 1
 fi
 if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
   { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
diff --git a/configure.in b/configure.in
index 146289f..3bf46b0 100644
--- a/configure.in
+++ b/configure.in
@@ -518,6 +518,18 @@
 fi
 
 
+AC_MSG_CHECKING([for -Wl,--no-as-needed])
+save_LDFLAGS="$LDFLAGS"
+LDFLAGS="$LDFLAGS -Wl,--no-as-needed"
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
+  [NO_AS_NEEDED="-Wl,--no-as-needed"
+   AC_MSG_RESULT([yes])],
+  [NO_AS_NEEDED=""
+   AC_MSG_RESULT([no])])
+LDFLAGS="$save_LDFLAGS"
+AC_SUBST(NO_AS_NEEDED)
+
+
 # checks for UNIX variants that set C preprocessor variables
 AC_USE_SYSTEM_EXTENSIONS
 
@@ -1286,6 +1298,23 @@
 AC_HEADER_DIRENT
 AC_HEADER_MAJOR
 
+# On Darwin (OS X) net/if.h requires sys/socket.h to be imported first.
+AC_CHECK_HEADERS([net/if.h], [], [],
+[#include <stdio.h>
+#ifdef STDC_HEADERS
+# include <stdlib.h>
+# include <stddef.h>
+#else
+# ifdef HAVE_STDLIB_H
+#  include <stdlib.h>
+# endif
+#endif
+#ifdef HAVE_SYS_SOCKET_H
+# include <sys/socket.h>
+#endif
+])
+
+
 # On Solaris, term.h requires curses.h
 AC_CHECK_HEADERS(term.h,,,[
 #ifdef HAVE_CURSES_H
@@ -2500,14 +2529,16 @@
  futimens futimes \
  gai_strerror getgroups getlogin getloadavg getpeername getpgid getpid \
  getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \
+ if_nameindex \
  initgroups kill killpg lchmod lchown lockf linkat lstat lutimes mbrtowc mkdirat mkfifo \
  mkfifoat mknod mknodat mktime mremap nice openat pathconf pause plock poll \
  posix_fallocate posix_fadvise pread \
- pthread_init putenv pwrite readlink readlinkat readv realpath renameat \
+ pthread_init pthread_kill putenv pwrite readlink readlinkat readv realpath renameat \
  select sem_open sem_timedwait sem_getvalue sem_unlink sendfile setegid seteuid \
  setgid sethostname \
  setlocale setregid setreuid setresuid setresgid setsid setpgid setpgrp setpriority setuid setvbuf \
- sigaction sigaltstack siginterrupt sigrelse snprintf strftime strlcpy symlinkat sync \
+ sigaction sigaltstack siginterrupt sigpending \
+ sigrelse sigwait snprintf strftime strlcpy symlinkat sync \
  sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \
  truncate uname unlinkat unsetenv utimensat utimes waitid waitpid wait3 wait4 \
  wcscoll wcsftime wcsxfrm writev _getpty)
@@ -3355,7 +3386,7 @@
 LIBS="$LIBS $LIBM"
 
 AC_CHECK_FUNCS([acosh asinh atanh copysign erf erfc expm1 finite gamma])
-AC_CHECK_FUNCS([hypot lgamma log1p round tgamma])
+AC_CHECK_FUNCS([hypot lgamma log1p log2 round tgamma])
 AC_CHECK_DECLS([isinf, isnan, isfinite], [], [], [[#include <math.h>]])
 
 # On FreeBSD 6.2, it appears that tanh(-0.) returns 0. instead of
diff --git a/pyconfig.h.in b/pyconfig.h.in
index 89565a3..95d71c9 100644
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -389,6 +389,9 @@
 /* Define to 1 if you have the <ieeefp.h> header file. */
 #undef HAVE_IEEEFP_H
 
+/* Define to 1 if you have the `if_nameindex' function. */
+#undef HAVE_IF_NAMEINDEX
+
 /* Define if you have the 'inet_aton' function. */
 #undef HAVE_INET_ATON
 
@@ -476,6 +479,9 @@
 /* Define to 1 if you have the `log1p' function. */
 #undef HAVE_LOG1P
 
+/* Define to 1 if you have the `log2' function. */
+#undef HAVE_LOG2
+
 /* Define this if you have the type long double. */
 #undef HAVE_LONG_DOUBLE
 
@@ -491,7 +497,7 @@
 /* Define this if you have the makedev macro. */
 #undef HAVE_MAKEDEV
 
-/* Define this if mallopt can set malloc mmap threshold. */
+/* Define if mallopt can set malloc mmap threshold. */
 #undef HAVE_MALLOPT_MMAP_THRESHOLD
 
 /* Define to 1 if you have the `mbrtowc' function. */
@@ -533,6 +539,9 @@
 /* Define to 1 if you have the <netpacket/packet.h> header file. */
 #undef HAVE_NETPACKET_PACKET_H
 
+/* Define to 1 if you have the <net/if.h> header file. */
+#undef HAVE_NET_IF_H
+
 /* Define to 1 if you have the `nice' function. */
 #undef HAVE_NICE
 
@@ -587,6 +596,9 @@
 /* Define to 1 if you have the `pthread_init' function. */
 #undef HAVE_PTHREAD_INIT
 
+/* Define to 1 if you have the `pthread_kill' function. */
+#undef HAVE_PTHREAD_KILL
+
 /* Define to 1 if you have the `pthread_sigmask' function. */
 #undef HAVE_PTHREAD_SIGMASK
 
@@ -722,9 +734,15 @@
 /* Define to 1 if you have the <signal.h> header file. */
 #undef HAVE_SIGNAL_H
 
+/* Define to 1 if you have the `sigpending' function. */
+#undef HAVE_SIGPENDING
+
 /* Define to 1 if you have the `sigrelse' function. */
 #undef HAVE_SIGRELSE
 
+/* Define to 1 if you have the `sigwait' function. */
+#undef HAVE_SIGWAIT
+
 /* Define to 1 if you have the `snprintf' function. */
 #undef HAVE_SNPRINTF
 
diff --git a/setup.py b/setup.py
index cbaf1ab..9d46425 100644
--- a/setup.py
+++ b/setup.py
@@ -1353,14 +1353,11 @@
         if platform == 'win32':
             multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
                                      '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
                                      '_multiprocessing/win32_functions.c'
                                    ]
 
         else:
             multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
                                    ]
             if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
                 sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):