initial import of the packaging package in the standard library
diff --git a/Lib/packaging/__init__.py b/Lib/packaging/__init__.py
new file mode 100644
index 0000000..93b6117
--- /dev/null
+++ b/Lib/packaging/__init__.py
@@ -0,0 +1,17 @@
+"""Support for packaging, distribution and installation of Python projects.
+
+Third-party tools can use parts of packaging as building blocks
+without causing the other modules to be imported:
+
+    import packaging.version
+    import packaging.metadata
+    import packaging.pypi.simple
+    import packaging.tests.pypi_server
+"""
+
+from logging import getLogger
+
+__all__ = ['__version__', 'logger']
+
+__version__ = "1.0a3"
+logger = getLogger('packaging')
diff --git a/Lib/packaging/_trove.py b/Lib/packaging/_trove.py
new file mode 100644
index 0000000..9a8719c
--- /dev/null
+++ b/Lib/packaging/_trove.py
@@ -0,0 +1,552 @@
+"""Temporary helper for create."""
+
+# XXX get the list from PyPI and cache it instead of hardcoding
+
+# XXX see if it would be more useful to store it as another structure
+# than a list of strings
+
+all_classifiers = [
+'Development Status :: 1 - Planning',
+'Development Status :: 2 - Pre-Alpha',
+'Development Status :: 3 - Alpha',
+'Development Status :: 4 - Beta',
+'Development Status :: 5 - Production/Stable',
+'Development Status :: 6 - Mature',
+'Development Status :: 7 - Inactive',
+'Environment :: Console',
+'Environment :: Console :: Curses',
+'Environment :: Console :: Framebuffer',
+'Environment :: Console :: Newt',
+'Environment :: Console :: svgalib',
+"Environment :: Handhelds/PDA's",
+'Environment :: MacOS X',
+'Environment :: MacOS X :: Aqua',
+'Environment :: MacOS X :: Carbon',
+'Environment :: MacOS X :: Cocoa',
+'Environment :: No Input/Output (Daemon)',
+'Environment :: Other Environment',
+'Environment :: Plugins',
+'Environment :: Web Environment',
+'Environment :: Web Environment :: Buffet',
+'Environment :: Web Environment :: Mozilla',
+'Environment :: Web Environment :: ToscaWidgets',
+'Environment :: Win32 (MS Windows)',
+'Environment :: X11 Applications',
+'Environment :: X11 Applications :: Gnome',
+'Environment :: X11 Applications :: GTK',
+'Environment :: X11 Applications :: KDE',
+'Environment :: X11 Applications :: Qt',
+'Framework :: BFG',
+'Framework :: Buildout',
+'Framework :: Chandler',
+'Framework :: CubicWeb',
+'Framework :: Django',
+'Framework :: IDLE',
+'Framework :: Paste',
+'Framework :: Plone',
+'Framework :: Pylons',
+'Framework :: Setuptools Plugin',
+'Framework :: Trac',
+'Framework :: TurboGears',
+'Framework :: TurboGears :: Applications',
+'Framework :: TurboGears :: Widgets',
+'Framework :: Twisted',
+'Framework :: ZODB',
+'Framework :: Zope2',
+'Framework :: Zope3',
+'Intended Audience :: Customer Service',
+'Intended Audience :: Developers',
+'Intended Audience :: Education',
+'Intended Audience :: End Users/Desktop',
+'Intended Audience :: Financial and Insurance Industry',
+'Intended Audience :: Healthcare Industry',
+'Intended Audience :: Information Technology',
+'Intended Audience :: Legal Industry',
+'Intended Audience :: Manufacturing',
+'Intended Audience :: Other Audience',
+'Intended Audience :: Religion',
+'Intended Audience :: Science/Research',
+'Intended Audience :: System Administrators',
+'Intended Audience :: Telecommunications Industry',
+'License :: Aladdin Free Public License (AFPL)',
+'License :: DFSG approved',
+'License :: Eiffel Forum License (EFL)',
+'License :: Free For Educational Use',
+'License :: Free For Home Use',
+'License :: Free for non-commercial use',
+'License :: Freely Distributable',
+'License :: Free To Use But Restricted',
+'License :: Freeware',
+'License :: Netscape Public License (NPL)',
+'License :: Nokia Open Source License (NOKOS)',
+'License :: OSI Approved',
+'License :: OSI Approved :: Academic Free License (AFL)',
+'License :: OSI Approved :: Apache Software License',
+'License :: OSI Approved :: Apple Public Source License',
+'License :: OSI Approved :: Artistic License',
+'License :: OSI Approved :: Attribution Assurance License',
+'License :: OSI Approved :: BSD License',
+'License :: OSI Approved :: Common Public License',
+'License :: OSI Approved :: Eiffel Forum License',
+'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)',
+'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)',
+'License :: OSI Approved :: GNU Affero General Public License v3',
+'License :: OSI Approved :: GNU Free Documentation License (FDL)',
+'License :: OSI Approved :: GNU General Public License (GPL)',
+'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
+'License :: OSI Approved :: IBM Public License',
+'License :: OSI Approved :: Intel Open Source License',
+'License :: OSI Approved :: ISC License (ISCL)',
+'License :: OSI Approved :: Jabber Open Source License',
+'License :: OSI Approved :: MIT License',
+'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)',
+'License :: OSI Approved :: Motosoto License',
+'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)',
+'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)',
+'License :: OSI Approved :: Nethack General Public License',
+'License :: OSI Approved :: Nokia Open Source License',
+'License :: OSI Approved :: Open Group Test Suite License',
+'License :: OSI Approved :: Python License (CNRI Python License)',
+'License :: OSI Approved :: Python Software Foundation License',
+'License :: OSI Approved :: Qt Public License (QPL)',
+'License :: OSI Approved :: Ricoh Source Code Public License',
+'License :: OSI Approved :: Sleepycat License',
+'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)',
+'License :: OSI Approved :: Sun Public License',
+'License :: OSI Approved :: University of Illinois/NCSA Open Source License',
+'License :: OSI Approved :: Vovida Software License 1.0',
+'License :: OSI Approved :: W3C License',
+'License :: OSI Approved :: X.Net License',
+'License :: OSI Approved :: zlib/libpng License',
+'License :: OSI Approved :: Zope Public License',
+'License :: Other/Proprietary License',
+'License :: Public Domain',
+'License :: Repoze Public License',
+'Natural Language :: Afrikaans',
+'Natural Language :: Arabic',
+'Natural Language :: Bengali',
+'Natural Language :: Bosnian',
+'Natural Language :: Bulgarian',
+'Natural Language :: Catalan',
+'Natural Language :: Chinese (Simplified)',
+'Natural Language :: Chinese (Traditional)',
+'Natural Language :: Croatian',
+'Natural Language :: Czech',
+'Natural Language :: Danish',
+'Natural Language :: Dutch',
+'Natural Language :: English',
+'Natural Language :: Esperanto',
+'Natural Language :: Finnish',
+'Natural Language :: French',
+'Natural Language :: German',
+'Natural Language :: Greek',
+'Natural Language :: Hebrew',
+'Natural Language :: Hindi',
+'Natural Language :: Hungarian',
+'Natural Language :: Icelandic',
+'Natural Language :: Indonesian',
+'Natural Language :: Italian',
+'Natural Language :: Japanese',
+'Natural Language :: Javanese',
+'Natural Language :: Korean',
+'Natural Language :: Latin',
+'Natural Language :: Latvian',
+'Natural Language :: Macedonian',
+'Natural Language :: Malay',
+'Natural Language :: Marathi',
+'Natural Language :: Norwegian',
+'Natural Language :: Panjabi',
+'Natural Language :: Persian',
+'Natural Language :: Polish',
+'Natural Language :: Portuguese',
+'Natural Language :: Portuguese (Brazilian)',
+'Natural Language :: Romanian',
+'Natural Language :: Russian',
+'Natural Language :: Serbian',
+'Natural Language :: Slovak',
+'Natural Language :: Slovenian',
+'Natural Language :: Spanish',
+'Natural Language :: Swedish',
+'Natural Language :: Tamil',
+'Natural Language :: Telugu',
+'Natural Language :: Thai',
+'Natural Language :: Turkish',
+'Natural Language :: Ukranian',
+'Natural Language :: Urdu',
+'Natural Language :: Vietnamese',
+'Operating System :: BeOS',
+'Operating System :: MacOS',
+'Operating System :: MacOS :: MacOS 9',
+'Operating System :: MacOS :: MacOS X',
+'Operating System :: Microsoft',
+'Operating System :: Microsoft :: MS-DOS',
+'Operating System :: Microsoft :: Windows',
+'Operating System :: Microsoft :: Windows :: Windows 3.1 or Earlier',
+'Operating System :: Microsoft :: Windows :: Windows 95/98/2000',
+'Operating System :: Microsoft :: Windows :: Windows CE',
+'Operating System :: Microsoft :: Windows :: Windows NT/2000',
+'Operating System :: OS/2',
+'Operating System :: OS Independent',
+'Operating System :: Other OS',
+'Operating System :: PalmOS',
+'Operating System :: PDA Systems',
+'Operating System :: POSIX',
+'Operating System :: POSIX :: AIX',
+'Operating System :: POSIX :: BSD',
+'Operating System :: POSIX :: BSD :: BSD/OS',
+'Operating System :: POSIX :: BSD :: FreeBSD',
+'Operating System :: POSIX :: BSD :: NetBSD',
+'Operating System :: POSIX :: BSD :: OpenBSD',
+'Operating System :: POSIX :: GNU Hurd',
+'Operating System :: POSIX :: HP-UX',
+'Operating System :: POSIX :: IRIX',
+'Operating System :: POSIX :: Linux',
+'Operating System :: POSIX :: Other',
+'Operating System :: POSIX :: SCO',
+'Operating System :: POSIX :: SunOS/Solaris',
+'Operating System :: Unix',
+'Programming Language :: Ada',
+'Programming Language :: APL',
+'Programming Language :: ASP',
+'Programming Language :: Assembly',
+'Programming Language :: Awk',
+'Programming Language :: Basic',
+'Programming Language :: C',
+'Programming Language :: C#',
+'Programming Language :: C++',
+'Programming Language :: Cold Fusion',
+'Programming Language :: Cython',
+'Programming Language :: Delphi/Kylix',
+'Programming Language :: Dylan',
+'Programming Language :: Eiffel',
+'Programming Language :: Emacs-Lisp',
+'Programming Language :: Erlang',
+'Programming Language :: Euler',
+'Programming Language :: Euphoria',
+'Programming Language :: Forth',
+'Programming Language :: Fortran',
+'Programming Language :: Haskell',
+'Programming Language :: Java',
+'Programming Language :: JavaScript',
+'Programming Language :: Lisp',
+'Programming Language :: Logo',
+'Programming Language :: ML',
+'Programming Language :: Modula',
+'Programming Language :: Objective C',
+'Programming Language :: Object Pascal',
+'Programming Language :: OCaml',
+'Programming Language :: Other',
+'Programming Language :: Other Scripting Engines',
+'Programming Language :: Pascal',
+'Programming Language :: Perl',
+'Programming Language :: PHP',
+'Programming Language :: Pike',
+'Programming Language :: Pliant',
+'Programming Language :: PL/SQL',
+'Programming Language :: PROGRESS',
+'Programming Language :: Prolog',
+'Programming Language :: Python',
+'Programming Language :: Python :: 2',
+'Programming Language :: Python :: 2.3',
+'Programming Language :: Python :: 2.4',
+'Programming Language :: Python :: 2.5',
+'Programming Language :: Python :: 2.6',
+'Programming Language :: Python :: 2.7',
+'Programming Language :: Python :: 3',
+'Programming Language :: Python :: 3.0',
+'Programming Language :: Python :: 3.1',
+'Programming Language :: Python :: 3.2',
+'Programming Language :: REBOL',
+'Programming Language :: Rexx',
+'Programming Language :: Ruby',
+'Programming Language :: Scheme',
+'Programming Language :: Simula',
+'Programming Language :: Smalltalk',
+'Programming Language :: SQL',
+'Programming Language :: Tcl',
+'Programming Language :: Unix Shell',
+'Programming Language :: Visual Basic',
+'Programming Language :: XBasic',
+'Programming Language :: YACC',
+'Programming Language :: Zope',
+'Topic :: Adaptive Technologies',
+'Topic :: Artistic Software',
+'Topic :: Communications',
+'Topic :: Communications :: BBS',
+'Topic :: Communications :: Chat',
+'Topic :: Communications :: Chat :: AOL Instant Messenger',
+'Topic :: Communications :: Chat :: ICQ',
+'Topic :: Communications :: Chat :: Internet Relay Chat',
+'Topic :: Communications :: Chat :: Unix Talk',
+'Topic :: Communications :: Conferencing',
+'Topic :: Communications :: Email',
+'Topic :: Communications :: Email :: Address Book',
+'Topic :: Communications :: Email :: Email Clients (MUA)',
+'Topic :: Communications :: Email :: Filters',
+'Topic :: Communications :: Email :: Mailing List Servers',
+'Topic :: Communications :: Email :: Mail Transport Agents',
+'Topic :: Communications :: Email :: Post-Office',
+'Topic :: Communications :: Email :: Post-Office :: IMAP',
+'Topic :: Communications :: Email :: Post-Office :: POP3',
+'Topic :: Communications :: Fax',
+'Topic :: Communications :: FIDO',
+'Topic :: Communications :: File Sharing',
+'Topic :: Communications :: File Sharing :: Gnutella',
+'Topic :: Communications :: File Sharing :: Napster',
+'Topic :: Communications :: Ham Radio',
+'Topic :: Communications :: Internet Phone',
+'Topic :: Communications :: Telephony',
+'Topic :: Communications :: Usenet News',
+'Topic :: Database',
+'Topic :: Database :: Database Engines/Servers',
+'Topic :: Database :: Front-Ends',
+'Topic :: Desktop Environment',
+'Topic :: Desktop Environment :: File Managers',
+'Topic :: Desktop Environment :: Gnome',
+'Topic :: Desktop Environment :: GNUstep',
+'Topic :: Desktop Environment :: K Desktop Environment (KDE)',
+'Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes',
+'Topic :: Desktop Environment :: PicoGUI',
+'Topic :: Desktop Environment :: PicoGUI :: Applications',
+'Topic :: Desktop Environment :: PicoGUI :: Themes',
+'Topic :: Desktop Environment :: Screen Savers',
+'Topic :: Desktop Environment :: Window Managers',
+'Topic :: Desktop Environment :: Window Managers :: Afterstep',
+'Topic :: Desktop Environment :: Window Managers :: Afterstep :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Applets',
+'Topic :: Desktop Environment :: Window Managers :: Blackbox',
+'Topic :: Desktop Environment :: Window Managers :: Blackbox :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: CTWM',
+'Topic :: Desktop Environment :: Window Managers :: CTWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Epplets',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR15',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR16',
+'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR17',
+'Topic :: Desktop Environment :: Window Managers :: Fluxbox',
+'Topic :: Desktop Environment :: Window Managers :: Fluxbox :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: FVWM',
+'Topic :: Desktop Environment :: Window Managers :: FVWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: IceWM',
+'Topic :: Desktop Environment :: Window Managers :: IceWM :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: MetaCity',
+'Topic :: Desktop Environment :: Window Managers :: MetaCity :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Oroborus',
+'Topic :: Desktop Environment :: Window Managers :: Oroborus :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes 0.30',
+'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes pre-0.30',
+'Topic :: Desktop Environment :: Window Managers :: Waimea',
+'Topic :: Desktop Environment :: Window Managers :: Waimea :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Applets',
+'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Themes',
+'Topic :: Desktop Environment :: Window Managers :: XFCE',
+'Topic :: Desktop Environment :: Window Managers :: XFCE :: Themes',
+'Topic :: Documentation',
+'Topic :: Education',
+'Topic :: Education :: Computer Aided Instruction (CAI)',
+'Topic :: Education :: Testing',
+'Topic :: Games/Entertainment',
+'Topic :: Games/Entertainment :: Arcade',
+'Topic :: Games/Entertainment :: Board Games',
+'Topic :: Games/Entertainment :: First Person Shooters',
+'Topic :: Games/Entertainment :: Fortune Cookies',
+'Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)',
+'Topic :: Games/Entertainment :: Puzzle Games',
+'Topic :: Games/Entertainment :: Real Time Strategy',
+'Topic :: Games/Entertainment :: Role-Playing',
+'Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games',
+'Topic :: Games/Entertainment :: Simulation',
+'Topic :: Games/Entertainment :: Turn Based Strategy',
+'Topic :: Home Automation',
+'Topic :: Internet',
+'Topic :: Internet :: File Transfer Protocol (FTP)',
+'Topic :: Internet :: Finger',
+'Topic :: Internet :: Log Analysis',
+'Topic :: Internet :: Name Service (DNS)',
+'Topic :: Internet :: Proxy Servers',
+'Topic :: Internet :: WAP',
+'Topic :: Internet :: WWW/HTTP',
+'Topic :: Internet :: WWW/HTTP :: Browsers',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary',
+'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters',
+'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
+'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
+'Topic :: Internet :: WWW/HTTP :: Site Management',
+'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking',
+'Topic :: Internet :: WWW/HTTP :: WSGI',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
+'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
+'Topic :: Internet :: Z39.50',
+'Topic :: Multimedia',
+'Topic :: Multimedia :: Graphics',
+'Topic :: Multimedia :: Graphics :: 3D Modeling',
+'Topic :: Multimedia :: Graphics :: 3D Rendering',
+'Topic :: Multimedia :: Graphics :: Capture',
+'Topic :: Multimedia :: Graphics :: Capture :: Digital Camera',
+'Topic :: Multimedia :: Graphics :: Capture :: Scanners',
+'Topic :: Multimedia :: Graphics :: Capture :: Screen Capture',
+'Topic :: Multimedia :: Graphics :: Editors',
+'Topic :: Multimedia :: Graphics :: Editors :: Raster-Based',
+'Topic :: Multimedia :: Graphics :: Editors :: Vector-Based',
+'Topic :: Multimedia :: Graphics :: Graphics Conversion',
+'Topic :: Multimedia :: Graphics :: Presentation',
+'Topic :: Multimedia :: Graphics :: Viewers',
+'Topic :: Multimedia :: Sound/Audio',
+'Topic :: Multimedia :: Sound/Audio :: Analysis',
+'Topic :: Multimedia :: Sound/Audio :: Capture/Recording',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping',
+'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Writing',
+'Topic :: Multimedia :: Sound/Audio :: Conversion',
+'Topic :: Multimedia :: Sound/Audio :: Editors',
+'Topic :: Multimedia :: Sound/Audio :: MIDI',
+'Topic :: Multimedia :: Sound/Audio :: Mixers',
+'Topic :: Multimedia :: Sound/Audio :: Players',
+'Topic :: Multimedia :: Sound/Audio :: Players :: MP3',
+'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis',
+'Topic :: Multimedia :: Sound/Audio :: Speech',
+'Topic :: Multimedia :: Video',
+'Topic :: Multimedia :: Video :: Capture',
+'Topic :: Multimedia :: Video :: Conversion',
+'Topic :: Multimedia :: Video :: Display',
+'Topic :: Multimedia :: Video :: Non-Linear Editor',
+'Topic :: Office/Business',
+'Topic :: Office/Business :: Financial',
+'Topic :: Office/Business :: Financial :: Accounting',
+'Topic :: Office/Business :: Financial :: Investment',
+'Topic :: Office/Business :: Financial :: Point-Of-Sale',
+'Topic :: Office/Business :: Financial :: Spreadsheet',
+'Topic :: Office/Business :: Groupware',
+'Topic :: Office/Business :: News/Diary',
+'Topic :: Office/Business :: Office Suites',
+'Topic :: Office/Business :: Scheduling',
+'Topic :: Other/Nonlisted Topic',
+'Topic :: Printing',
+'Topic :: Religion',
+'Topic :: Scientific/Engineering',
+'Topic :: Scientific/Engineering :: Artificial Intelligence',
+'Topic :: Scientific/Engineering :: Astronomy',
+'Topic :: Scientific/Engineering :: Atmospheric Science',
+'Topic :: Scientific/Engineering :: Bio-Informatics',
+'Topic :: Scientific/Engineering :: Chemistry',
+'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)',
+'Topic :: Scientific/Engineering :: GIS',
+'Topic :: Scientific/Engineering :: Human Machine Interfaces',
+'Topic :: Scientific/Engineering :: Image Recognition',
+'Topic :: Scientific/Engineering :: Information Analysis',
+'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator',
+'Topic :: Scientific/Engineering :: Mathematics',
+'Topic :: Scientific/Engineering :: Medical Science Apps.',
+'Topic :: Scientific/Engineering :: Physics',
+'Topic :: Scientific/Engineering :: Visualization',
+'Topic :: Security',
+'Topic :: Security :: Cryptography',
+'Topic :: Sociology',
+'Topic :: Sociology :: Genealogy',
+'Topic :: Sociology :: History',
+'Topic :: Software Development',
+'Topic :: Software Development :: Assemblers',
+'Topic :: Software Development :: Bug Tracking',
+'Topic :: Software Development :: Build Tools',
+'Topic :: Software Development :: Code Generators',
+'Topic :: Software Development :: Compilers',
+'Topic :: Software Development :: Debuggers',
+'Topic :: Software Development :: Disassemblers',
+'Topic :: Software Development :: Documentation',
+'Topic :: Software Development :: Embedded Systems',
+'Topic :: Software Development :: Internationalization',
+'Topic :: Software Development :: Interpreters',
+'Topic :: Software Development :: Libraries',
+'Topic :: Software Development :: Libraries :: Application Frameworks',
+'Topic :: Software Development :: Libraries :: Java Libraries',
+'Topic :: Software Development :: Libraries :: Perl Modules',
+'Topic :: Software Development :: Libraries :: PHP Classes',
+'Topic :: Software Development :: Libraries :: Pike Modules',
+'Topic :: Software Development :: Libraries :: pygame',
+'Topic :: Software Development :: Libraries :: Python Modules',
+'Topic :: Software Development :: Libraries :: Ruby Modules',
+'Topic :: Software Development :: Libraries :: Tcl Extensions',
+'Topic :: Software Development :: Localization',
+'Topic :: Software Development :: Object Brokering',
+'Topic :: Software Development :: Object Brokering :: CORBA',
+'Topic :: Software Development :: Pre-processors',
+'Topic :: Software Development :: Quality Assurance',
+'Topic :: Software Development :: Testing',
+'Topic :: Software Development :: Testing :: Traffic Generation',
+'Topic :: Software Development :: User Interfaces',
+'Topic :: Software Development :: Version Control',
+'Topic :: Software Development :: Version Control :: CVS',
+'Topic :: Software Development :: Version Control :: RCS',
+'Topic :: Software Development :: Version Control :: SCCS',
+'Topic :: Software Development :: Widget Sets',
+'Topic :: System',
+'Topic :: System :: Archiving',
+'Topic :: System :: Archiving :: Backup',
+'Topic :: System :: Archiving :: Compression',
+'Topic :: System :: Archiving :: Mirroring',
+'Topic :: System :: Archiving :: Packaging',
+'Topic :: System :: Benchmark',
+'Topic :: System :: Boot',
+'Topic :: System :: Boot :: Init',
+'Topic :: System :: Clustering',
+'Topic :: System :: Console Fonts',
+'Topic :: System :: Distributed Computing',
+'Topic :: System :: Emulators',
+'Topic :: System :: Filesystems',
+'Topic :: System :: Hardware',
+'Topic :: System :: Hardware :: Hardware Drivers',
+'Topic :: System :: Hardware :: Mainframes',
+'Topic :: System :: Hardware :: Symmetric Multi-processing',
+'Topic :: System :: Installation/Setup',
+'Topic :: System :: Logging',
+'Topic :: System :: Monitoring',
+'Topic :: System :: Networking',
+'Topic :: System :: Networking :: Firewalls',
+'Topic :: System :: Networking :: Monitoring',
+'Topic :: System :: Networking :: Monitoring :: Hardware Watchdog',
+'Topic :: System :: Networking :: Time Synchronization',
+'Topic :: System :: Operating System',
+'Topic :: System :: Operating System Kernels',
+'Topic :: System :: Operating System Kernels :: BSD',
+'Topic :: System :: Operating System Kernels :: GNU Hurd',
+'Topic :: System :: Operating System Kernels :: Linux',
+'Topic :: System :: Power (UPS)',
+'Topic :: System :: Recovery Tools',
+'Topic :: System :: Shells',
+'Topic :: System :: Software Distribution',
+'Topic :: System :: Systems Administration',
+'Topic :: System :: Systems Administration :: Authentication/Directory',
+'Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP',
+'Topic :: System :: Systems Administration :: Authentication/Directory :: NIS',
+'Topic :: System :: System Shells',
+'Topic :: Terminals',
+'Topic :: Terminals :: Serial',
+'Topic :: Terminals :: Telnet',
+'Topic :: Terminals :: Terminal Emulators/X Terminals',
+'Topic :: Text Editors',
+'Topic :: Text Editors :: Documentation',
+'Topic :: Text Editors :: Emacs',
+'Topic :: Text Editors :: Integrated Development Environments (IDE)',
+'Topic :: Text Editors :: Text Processing',
+'Topic :: Text Editors :: Word Processors',
+'Topic :: Text Processing',
+'Topic :: Text Processing :: Filters',
+'Topic :: Text Processing :: Fonts',
+'Topic :: Text Processing :: General',
+'Topic :: Text Processing :: Indexing',
+'Topic :: Text Processing :: Linguistic',
+'Topic :: Text Processing :: Markup',
+'Topic :: Text Processing :: Markup :: HTML',
+'Topic :: Text Processing :: Markup :: LaTeX',
+'Topic :: Text Processing :: Markup :: SGML',
+'Topic :: Text Processing :: Markup :: VRML',
+'Topic :: Text Processing :: Markup :: XML',
+'Topic :: Utilities',
+]
diff --git a/Lib/packaging/command/__init__.py b/Lib/packaging/command/__init__.py
new file mode 100644
index 0000000..6a37850
--- /dev/null
+++ b/Lib/packaging/command/__init__.py
@@ -0,0 +1,56 @@
+"""Subpackage containing all standard commands."""
+
+from packaging.errors import PackagingModuleError
+from packaging.util import resolve_name
+
+__all__ = ['get_command_names', 'set_command', 'get_command_class',
+           'STANDARD_COMMANDS']
+
+_COMMANDS = {
+    'check': 'packaging.command.check.check',
+    'test': 'packaging.command.test.test',
+    'build': 'packaging.command.build.build',
+    'build_py': 'packaging.command.build_py.build_py',
+    'build_ext': 'packaging.command.build_ext.build_ext',
+    'build_clib': 'packaging.command.build_clib.build_clib',
+    'build_scripts': 'packaging.command.build_scripts.build_scripts',
+    'clean': 'packaging.command.clean.clean',
+    'install_dist': 'packaging.command.install_dist.install_dist',
+    'install_lib': 'packaging.command.install_lib.install_lib',
+    'install_headers': 'packaging.command.install_headers.install_headers',
+    'install_scripts': 'packaging.command.install_scripts.install_scripts',
+    'install_data': 'packaging.command.install_data.install_data',
+    'install_distinfo':
+        'packaging.command.install_distinfo.install_distinfo',
+    'sdist': 'packaging.command.sdist.sdist',
+    'bdist': 'packaging.command.bdist.bdist',
+    'bdist_dumb': 'packaging.command.bdist_dumb.bdist_dumb',
+    'bdist_wininst': 'packaging.command.bdist_wininst.bdist_wininst',
+    'register': 'packaging.command.register.register',
+    'upload': 'packaging.command.upload.upload',
+    'upload_docs': 'packaging.command.upload_docs.upload_docs'}
+
+STANDARD_COMMANDS = set(_COMMANDS)
+
+
+def get_command_names():
+    """Return registered commands"""
+    return sorted(_COMMANDS)
+
+
+def set_command(location):
+    cls = resolve_name(location)
+    # XXX we want to do the duck-type checking here
+    _COMMANDS[cls.get_command_name()] = cls
+
+
+def get_command_class(name):
+    """Return the registered command"""
+    try:
+        cls = _COMMANDS[name]
+        if isinstance(cls, str):
+            cls = resolve_name(cls)
+            _COMMANDS[name] = cls
+        return cls
+    except KeyError:
+        raise PackagingModuleError("Invalid command %s" % name)
diff --git a/Lib/packaging/command/bdist.py b/Lib/packaging/command/bdist.py
new file mode 100644
index 0000000..4338a97
--- /dev/null
+++ b/Lib/packaging/command/bdist.py
@@ -0,0 +1,141 @@
+"""Create a built (binary) distribution.
+
+If a --formats option was given on the command line, this command will
+call the corresponding bdist_* commands; if the option was absent, a
+bdist_* command depending on the current platform will be called.
+"""
+
+import os
+
+from packaging import util
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError, PackagingOptionError
+
+
+def show_formats():
+    """Print list of available formats (arguments to "--format" option).
+    """
+    from packaging.fancy_getopt import FancyGetopt
+    formats = []
+    for format in bdist.format_commands:
+        formats.append(("formats=" + format, None,
+                        bdist.format_command[format][1]))
+    pretty_printer = FancyGetopt(formats)
+    pretty_printer.print_help("List of available distribution formats:")
+
+
+class bdist(Command):
+
+    description = "create a built (binary) distribution"
+
+    user_options = [('bdist-base=', 'b',
+                     "temporary directory for creating built distributions"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % util.get_platform()),
+                    ('formats=', None,
+                     "formats for distribution (comma-separated list)"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in "
+                     "[default: dist]"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('owner=', 'u',
+                     "Owner name used when creating a tar file"
+                     " [default: current user]"),
+                    ('group=', 'g',
+                     "Group name used when creating a tar file"
+                     " [default: current group]"),
+                   ]
+
+    boolean_options = ['skip-build']
+
+    help_options = [
+        ('help-formats', None,
+         "lists available distribution formats", show_formats),
+        ]
+
+    # This is of course very simplistic.  The various UNIX family operating
+    # systems have their specific formats, but they are out of scope for us;
+    # bdist_dumb is, well, dumb; it's more a building block for other
+    # packaging tools than a real end-user binary format.
+    default_format = {'posix': 'gztar',
+                      'nt': 'zip',
+                      'os2': 'zip'}
+
+    # Establish the preferred order (for the --help-formats option).
+    format_commands = ['gztar', 'bztar', 'ztar', 'tar',
+                       'wininst', 'zip', 'msi']
+
+    # And the real information.
+    format_command = {'gztar': ('bdist_dumb', "gzip'ed tar file"),
+                      'bztar': ('bdist_dumb', "bzip2'ed tar file"),
+                      'ztar':  ('bdist_dumb', "compressed tar file"),
+                      'tar':   ('bdist_dumb', "tar file"),
+                      'wininst': ('bdist_wininst',
+                                  "Windows executable installer"),
+                      'zip':   ('bdist_dumb', "ZIP file"),
+                      'msi':   ('bdist_msi',  "Microsoft Installer")
+                      }
+
+
+    def initialize_options(self):
+        self.bdist_base = None
+        self.plat_name = None
+        self.formats = None
+        self.dist_dir = None
+        self.skip_build = False
+        self.group = None
+        self.owner = None
+
+    def finalize_options(self):
+        # have to finalize 'plat_name' before 'bdist_base'
+        if self.plat_name is None:
+            if self.skip_build:
+                self.plat_name = util.get_platform()
+            else:
+                self.plat_name = self.get_finalized_command('build').plat_name
+
+        # 'bdist_base' -- parent of per-built-distribution-format
+        # temporary directories (eg. we'll probably have
+        # "build/bdist.<plat>/dumb", etc.)
+        if self.bdist_base is None:
+            build_base = self.get_finalized_command('build').build_base
+            self.bdist_base = os.path.join(build_base,
+                                           'bdist.' + self.plat_name)
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise PackagingPlatformError("don't know how to create built distributions " + \
+                      "on platform %s" % os.name)
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+    def run(self):
+        # Figure out which sub-commands we need to run.
+        commands = []
+        for format in self.formats:
+            try:
+                commands.append(self.format_command[format][0])
+            except KeyError:
+                raise PackagingOptionError("invalid format '%s'" % format)
+
+        # Reinitialize and run each command.
+        for i in range(len(self.formats)):
+            cmd_name = commands[i]
+            sub_cmd = self.get_reinitialized_command(cmd_name)
+
+            # passing the owner and group names for tar archiving
+            if cmd_name == 'bdist_dumb':
+                sub_cmd.owner = self.owner
+                sub_cmd.group = self.group
+
+            # If we're going to need to run this command again, tell it to
+            # keep its temporary files around so subsequent runs go faster.
+            if cmd_name in commands[i+1:]:
+                sub_cmd.keep_temp = True
+            self.run_command(cmd_name)
diff --git a/Lib/packaging/command/bdist_dumb.py b/Lib/packaging/command/bdist_dumb.py
new file mode 100644
index 0000000..f74b720
--- /dev/null
+++ b/Lib/packaging/command/bdist_dumb.py
@@ -0,0 +1,137 @@
+"""Create a "dumb" built distribution.
+
+A dumb distribution is just an archive meant to be unpacked under
+sys.prefix or sys.exec_prefix.
+"""
+
+import os
+
+from shutil import rmtree
+from sysconfig import get_python_version
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError
+from packaging import logger
+
+class bdist_dumb(Command):
+
+    description = 'create a "dumb" built distribution'
+
+    user_options = [('bdist-dir=', 'd',
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('format=', 'f',
+                     "archive format to create (tar, ztar, gztar, zip)"),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('relative', None,
+                     "build the archive using relative paths"
+                     "(default: false)"),
+                    ('owner=', 'u',
+                     "Owner name used when creating a tar file"
+                     " [default: current user]"),
+                    ('group=', 'g',
+                     "Group name used when creating a tar file"
+                     " [default: current group]"),
+                   ]
+
+    boolean_options = ['keep-temp', 'skip-build', 'relative']
+
+    default_format = { 'posix': 'gztar',
+                       'nt': 'zip',
+                       'os2': 'zip' }
+
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.format = None
+        self.keep_temp = False
+        self.dist_dir = None
+        self.skip_build = False
+        self.relative = False
+        self.owner = None
+        self.group = None
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'dumb')
+
+        if self.format is None:
+            try:
+                self.format = self.default_format[os.name]
+            except KeyError:
+                raise PackagingPlatformError(("don't know how to create dumb built distributions " +
+                       "on platform %s") % os.name)
+
+        self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
+
+    def run(self):
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.get_reinitialized_command('install_dist',
+                                                 reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        logger.info("installing to %s", self.bdist_dir)
+        self.run_command('install_dist')
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        archive_basename = "%s.%s" % (self.distribution.get_fullname(),
+                                      self.plat_name)
+
+        # OS/2 objects to any ":" characters in a filename (such as when
+        # a timestamp is used in a version) so change them to hyphens.
+        if os.name == "os2":
+            archive_basename = archive_basename.replace(":", "-")
+
+        pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            if (self.distribution.has_ext_modules() and
+                (install.install_base != install.install_platbase)):
+                raise PackagingPlatformError(
+                    "can't make a dumb built distribution where base and "
+                    "platbase are different (%r, %r)" %
+                    (install.install_base, install.install_platbase))
+            else:
+                archive_root = os.path.join(
+                    self.bdist_dir,
+                    self._ensure_relative(install.install_base))
+
+        # Make the archive
+        filename = self.make_archive(pseudoinstall_root,
+                                     self.format, root_dir=archive_root,
+                                     owner=self.owner, group=self.group)
+        if self.distribution.has_ext_modules():
+            pyversion = get_python_version()
+        else:
+            pyversion = 'any'
+        self.distribution.dist_files.append(('bdist_dumb', pyversion,
+                                             filename))
+
+        if not self.keep_temp:
+            if self.dry_run:
+                logger.info('removing %s', self.bdist_dir)
+            else:
+                rmtree(self.bdist_dir)
+
+    def _ensure_relative(self, path):
+        # copied from dir_util, deleted
+        drive, path = os.path.splitdrive(path)
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
diff --git a/Lib/packaging/command/bdist_msi.py b/Lib/packaging/command/bdist_msi.py
new file mode 100644
index 0000000..493f8b3
--- /dev/null
+++ b/Lib/packaging/command/bdist_msi.py
@@ -0,0 +1,740 @@
+"""Create a Microsoft Installer (.msi) binary distribution."""
+
+# Copyright (C) 2005, 2006 Martin von Löwis
+# Licensed to PSF under a Contributor Agreement.
+
+import sys
+import os
+import msilib
+
+
+from sysconfig import get_python_version
+from shutil import rmtree
+from packaging.command.cmd import Command
+from packaging.version import NormalizedVersion
+from packaging.errors import PackagingOptionError
+from packaging import logger as log
+from packaging.util import get_platform
+from msilib import schema, sequence, text
+from msilib import Directory, Feature, Dialog, add_data
+
+class MSIVersion(NormalizedVersion):
+    """
+    MSI ProductVersion must be strictly numeric.
+    MSIVersion disallows prerelease and postrelease versions.
+    """
+    def __init__(self, *args, **kwargs):
+        super(MSIVersion, self).__init__(*args, **kwargs)
+        if not self.is_final:
+            raise ValueError("ProductVersion must be strictly numeric")
+
+class PyDialog(Dialog):
+    """Dialog class with a fixed layout: controls at the top, then a ruler,
+    then a list of buttons: back, next, cancel. Optionally a bitmap at the
+    left."""
+    def __init__(self, *args, **kw):
+        """Dialog(database, name, x, y, w, h, attributes, title, first,
+        default, cancel, bitmap=true)"""
+        Dialog.__init__(self, *args)
+        ruler = self.h - 36
+        #if kw.get("bitmap", True):
+        #    self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
+        self.line("BottomLine", 0, ruler, self.w, 0)
+
+    def title(self, title):
+        "Set the title text of the dialog at the top."
+        # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
+        # text, in VerdanaBold10
+        self.text("Title", 15, 10, 320, 60, 0x30003,
+                  r"{\VerdanaBold10}%s" % title)
+
+    def back(self, title, next, name = "Back", active = 1):
+        """Add a back button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
+
+    def cancel(self, title, next, name = "Cancel", active = 1):
+        """Add a cancel button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
+
+    def next(self, title, next, name = "Next", active = 1):
+        """Add a Next button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
+
+    def xbutton(self, name, title, next, xpos):
+        """Add a button with a given title, the tab-next button,
+        its name in the Control table, giving its x position; the
+        y-position is aligned with the other buttons.
+
+        Return the button, so that events can be associated"""
+        return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
+
+class bdist_msi(Command):
+
+    description = "create a Microsoft Installer (.msi) binary distribution"
+
+    user_options = [('bdist-dir=', None,
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('target-version=', None,
+                     "require a specific python version" +
+                     " on the target system"),
+                    ('no-target-compile', 'c',
+                     "do not compile .py to .pyc on the target system"),
+                    ('no-target-optimize', 'o',
+                     "do not compile .py to .pyo (optimized)"
+                     "on the target system"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('install-script=', None,
+                     "basename of installation script to be run after"
+                     "installation or before deinstallation"),
+                    ('pre-install-script=', None,
+                     "Fully qualified filename of a script to be run before "
+                     "any files are installed.  This script need not be in the "
+                     "distribution"),
+                   ]
+
+    boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+                       'skip-build']
+
+    all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
+                    '2.5', '2.6', '2.7', '2.8', '2.9',
+                    '3.0', '3.1', '3.2', '3.3', '3.4',
+                    '3.5', '3.6', '3.7', '3.8', '3.9']
+    other_version = 'X'
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = False
+        self.no_target_compile = False
+        self.no_target_optimize = False
+        self.target_version = None
+        self.dist_dir = None
+        self.skip_build = False
+        self.install_script = None
+        self.pre_install_script = None
+        self.versions = None
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'msi')
+        short_version = get_python_version()
+        if (not self.target_version) and self.distribution.has_ext_modules():
+            self.target_version = short_version
+        if self.target_version:
+            self.versions = [self.target_version]
+            if not self.skip_build and self.distribution.has_ext_modules()\
+               and self.target_version != short_version:
+                raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
+                      " option must be specified" % (short_version,))
+        else:
+            self.versions = list(self.all_versions)
+
+        self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
+
+        if self.pre_install_script:
+            raise PackagingOptionError("the pre-install-script feature is not yet implemented")
+
+        if self.install_script:
+            for script in self.distribution.scripts:
+                if self.install_script == os.path.basename(script):
+                    break
+            else:
+                raise PackagingOptionError("install_script '%s' not found in scripts" % \
+                      self.install_script)
+        self.install_script_key = None
+
+
+    def run(self):
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.get_reinitialized_command('install_dist',
+                                                 reinit_subcommands=True)
+        install.prefix = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        install_lib = self.get_reinitialized_command('install_lib')
+        # we do not want to include pyc or pyo files
+        install_lib.compile = False
+        install_lib.optimize = 0
+
+        if self.distribution.has_ext_modules():
+            # If we are building an installer for a Python version other
+            # than the one we are currently running, then we need to ensure
+            # our build_lib reflects the other Python version rather than ours.
+            # Note that for target_version!=sys.version, we must have skipped the
+            # build step, so there is no issue with enforcing the build of this
+            # version.
+            target_version = self.target_version
+            if not target_version:
+                assert self.skip_build, "Should have already checked this"
+                target_version = sys.version[0:3]
+            plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+            build = self.get_finalized_command('build')
+            build.build_lib = os.path.join(build.build_base,
+                                           'lib' + plat_specifier)
+
+        log.info("installing to %s", self.bdist_dir)
+        install.ensure_finalized()
+
+        # avoid warning of 'install_lib' about installing
+        # into a directory not in sys.path
+        sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+        install.run()
+
+        del sys.path[0]
+
+        self.mkpath(self.dist_dir)
+        fullname = self.distribution.get_fullname()
+        installer_name = self.get_installer_filename(fullname)
+        installer_name = os.path.abspath(installer_name)
+        if os.path.exists(installer_name): os.unlink(installer_name)
+
+        metadata = self.distribution.metadata
+        author = metadata.author
+        if not author:
+            author = metadata.maintainer
+        if not author:
+            author = "UNKNOWN"
+        version = MSIVersion(metadata.get_version())
+        # Prefix ProductName with Python x.y, so that
+        # it sorts together with the other Python packages
+        # in Add-Remove-Programs (APR)
+        fullname = self.distribution.get_fullname()
+        if self.target_version:
+            product_name = "Python %s %s" % (self.target_version, fullname)
+        else:
+            product_name = "Python %s" % (fullname)
+        self.db = msilib.init_database(installer_name, schema,
+                product_name, msilib.gen_uuid(),
+                str(version), author)
+        msilib.add_tables(self.db, sequence)
+        props = [('DistVersion', version)]
+        email = metadata.author_email or metadata.maintainer_email
+        if email:
+            props.append(("ARPCONTACT", email))
+        if metadata.url:
+            props.append(("ARPURLINFOABOUT", metadata.url))
+        if props:
+            add_data(self.db, 'Property', props)
+
+        self.add_find_python()
+        self.add_files()
+        self.add_scripts()
+        self.add_ui()
+        self.db.Commit()
+
+        if hasattr(self.distribution, 'dist_files'):
+            tup = 'bdist_msi', self.target_version or 'any', fullname
+            self.distribution.dist_files.append(tup)
+
+        if not self.keep_temp:
+            log.info("removing temporary build directory %s", self.bdist_dir)
+            if not self.dry_run:
+                rmtree(self.bdist_dir)
+
+    def add_files(self):
+        db = self.db
+        cab = msilib.CAB("distfiles")
+        rootdir = os.path.abspath(self.bdist_dir)
+
+        root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
+        f = Feature(db, "Python", "Python", "Everything",
+                    0, 1, directory="TARGETDIR")
+
+        items = [(f, root, '')]
+        for version in self.versions + [self.other_version]:
+            target = "TARGETDIR" + version
+            name = default = "Python" + version
+            desc = "Everything"
+            if version is self.other_version:
+                title = "Python from another location"
+                level = 2
+            else:
+                title = "Python %s from registry" % version
+                level = 1
+            f = Feature(db, name, title, desc, 1, level, directory=target)
+            dir = Directory(db, cab, root, rootdir, target, default)
+            items.append((f, dir, version))
+        db.Commit()
+
+        seen = {}
+        for feature, dir, version in items:
+            todo = [dir]
+            while todo:
+                dir = todo.pop()
+                for file in os.listdir(dir.absolute):
+                    afile = os.path.join(dir.absolute, file)
+                    if os.path.isdir(afile):
+                        short = "%s|%s" % (dir.make_short(file), file)
+                        default = file + version
+                        newdir = Directory(db, cab, dir, file, default, short)
+                        todo.append(newdir)
+                    else:
+                        if not dir.component:
+                            dir.start_component(dir.logical, feature, 0)
+                        if afile not in seen:
+                            key = seen[afile] = dir.add_file(file)
+                            if file==self.install_script:
+                                if self.install_script_key:
+                                    raise PackagingOptionError(
+                                          "Multiple files with name %s" % file)
+                                self.install_script_key = '[#%s]' % key
+                        else:
+                            key = seen[afile]
+                            add_data(self.db, "DuplicateFile",
+                                [(key + version, dir.component, key, None, dir.logical)])
+            db.Commit()
+        cab.commit(db)
+
+    def add_find_python(self):
+        """Adds code to the installer to compute the location of Python.
+
+        Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
+        registry for each version of Python.
+
+        Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
+        else from PYTHON.MACHINE.X.Y.
+
+        Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
+
+        start = 402
+        for ver in self.versions:
+            install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
+            machine_reg = "python.machine." + ver
+            user_reg = "python.user." + ver
+            machine_prop = "PYTHON.MACHINE." + ver
+            user_prop = "PYTHON.USER." + ver
+            machine_action = "PythonFromMachine" + ver
+            user_action = "PythonFromUser" + ver
+            exe_action = "PythonExe" + ver
+            target_dir_prop = "TARGETDIR" + ver
+            exe_prop = "PYTHON" + ver
+            if msilib.Win64:
+                # type: msidbLocatorTypeRawValue + msidbLocatorType64bit
+                Type = 2+16
+            else:
+                Type = 2
+            add_data(self.db, "RegLocator",
+                    [(machine_reg, 2, install_path, None, Type),
+                     (user_reg, 1, install_path, None, Type)])
+            add_data(self.db, "AppSearch",
+                    [(machine_prop, machine_reg),
+                     (user_prop, user_reg)])
+            add_data(self.db, "CustomAction",
+                    [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
+                     (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
+                     (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
+                    ])
+            add_data(self.db, "InstallExecuteSequence",
+                    [(machine_action, machine_prop, start),
+                     (user_action, user_prop, start + 1),
+                     (exe_action, None, start + 2),
+                    ])
+            add_data(self.db, "InstallUISequence",
+                    [(machine_action, machine_prop, start),
+                     (user_action, user_prop, start + 1),
+                     (exe_action, None, start + 2),
+                    ])
+            add_data(self.db, "Condition",
+                    [("Python" + ver, 0, "NOT TARGETDIR" + ver)])
+            start += 4
+            assert start < 500
+
+    def add_scripts(self):
+        if self.install_script:
+            start = 6800
+            for ver in self.versions + [self.other_version]:
+                install_action = "install_script." + ver
+                exe_prop = "PYTHON" + ver
+                add_data(self.db, "CustomAction",
+                        [(install_action, 50, exe_prop, self.install_script_key)])
+                add_data(self.db, "InstallExecuteSequence",
+                        [(install_action, "&Python%s=3" % ver, start)])
+                start += 1
+        # XXX pre-install scripts are currently refused in finalize_options()
+        #     but if this feature is completed, it will also need to add
+        #     entries for each version as the above code does
+        if self.pre_install_script:
+            scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
+            with open(scriptfn, "w") as f:
+                # The batch file will be executed with [PYTHON], so that %1
+                # is the path to the Python interpreter; %0 will be the path
+                # of the batch file.
+                # rem ="""
+                # %1 %0
+                # exit
+                # """
+                # <actual script>
+                f.write('rem ="""\n%1 %0\nexit\n"""\n')
+                with open(self.pre_install_script) as fp:
+                    f.write(fp.read())
+            add_data(self.db, "Binary",
+                     [("PreInstall", msilib.Binary(scriptfn)),
+                     ])
+            add_data(self.db, "CustomAction",
+                     [("PreInstall", 2, "PreInstall", None),
+                     ])
+            add_data(self.db, "InstallExecuteSequence",
+                     [("PreInstall", "NOT Installed", 450),
+                     ])
+
+    def add_ui(self):
+        db = self.db
+        x = y = 50
+        w = 370
+        h = 300
+        title = "[ProductName] Setup"
+
+        # see "Dialog Style Bits"
+        modal = 3      # visible | modal
+        modeless = 1   # visible
+
+        # UI customization properties
+        add_data(db, "Property",
+                 # See "DefaultUIFont Property"
+                 [("DefaultUIFont", "DlgFont8"),
+                  # See "ErrorDialog Style Bit"
+                  ("ErrorDialog", "ErrorDlg"),
+                  ("Progress1", "Install"),   # modified in maintenance type dlg
+                  ("Progress2", "installs"),
+                  ("MaintenanceForm_Action", "Repair"),
+                  # possible values: ALL, JUSTME
+                  ("WhichUsers", "ALL")
+                 ])
+
+        # Fonts, see "TextStyle Table"
+        add_data(db, "TextStyle",
+                 [("DlgFont8", "Tahoma", 9, None, 0),
+                  ("DlgFontBold8", "Tahoma", 8, None, 1), #bold
+                  ("VerdanaBold10", "Verdana", 10, None, 1),
+                  ("VerdanaRed9", "Verdana", 9, 255, 0),
+                 ])
+
+        # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
+        # Numbers indicate sequence; see sequence.py for how these action integrate
+        add_data(db, "InstallUISequence",
+                 [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
+                  ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
+                  # In the user interface, assume all-users installation if privileged.
+                  ("SelectFeaturesDlg", "Not Installed", 1230),
+                  # XXX no support for resume installations yet
+                  #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
+                  ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
+                  ("ProgressDlg", None, 1280)])
+
+        add_data(db, 'ActionText', text.ActionText)
+        add_data(db, 'UIText', text.UIText)
+        #####################################################################
+        # Standard dialogs: FatalError, UserExit, ExitDialog
+        fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
+                     "Finish", "Finish", "Finish")
+        fatal.title("[ProductName] Installer ended prematurely")
+        fatal.back("< Back", "Finish", active = 0)
+        fatal.cancel("Cancel", "Back", active = 0)
+        fatal.text("Description1", 15, 70, 320, 80, 0x30003,
+                   "[ProductName] setup ended prematurely because of an error.  Your system has not been modified.  To install this program at a later time, please run the installation again.")
+        fatal.text("Description2", 15, 155, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c=fatal.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Exit")
+
+        user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
+                     "Finish", "Finish", "Finish")
+        user_exit.title("[ProductName] Installer was interrupted")
+        user_exit.back("< Back", "Finish", active = 0)
+        user_exit.cancel("Cancel", "Back", active = 0)
+        user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
+                   "[ProductName] setup was interrupted.  Your system has not been modified.  "
+                   "To install this program at a later time, please run the installation again.")
+        user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c = user_exit.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Exit")
+
+        exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
+                             "Finish", "Finish", "Finish")
+        exit_dialog.title("Completing the [ProductName] Installer")
+        exit_dialog.back("< Back", "Finish", active = 0)
+        exit_dialog.cancel("Cancel", "Back", active = 0)
+        exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c = exit_dialog.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Return")
+
+        #####################################################################
+        # Required dialog: FilesInUse, ErrorDlg
+        inuse = PyDialog(db, "FilesInUse",
+                         x, y, w, h,
+                         19,                # KeepModeless|Modal|Visible
+                         title,
+                         "Retry", "Retry", "Retry", bitmap=False)
+        inuse.text("Title", 15, 6, 200, 15, 0x30003,
+                   r"{\DlgFontBold8}Files in Use")
+        inuse.text("Description", 20, 23, 280, 20, 0x30003,
+               "Some files that need to be updated are currently in use.")
+        inuse.text("Text", 20, 55, 330, 50, 3,
+                   "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
+        inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
+                      None, None, None)
+        c=inuse.back("Exit", "Ignore", name="Exit")
+        c.event("EndDialog", "Exit")
+        c=inuse.next("Ignore", "Retry", name="Ignore")
+        c.event("EndDialog", "Ignore")
+        c=inuse.cancel("Retry", "Exit", name="Retry")
+        c.event("EndDialog","Retry")
+
+        # See "Error Dialog". See "ICE20" for the required names of the controls.
+        error = Dialog(db, "ErrorDlg",
+                       50, 10, 330, 101,
+                       65543,       # Error|Minimize|Modal|Visible
+                       title,
+                       "ErrorText", None, None)
+        error.text("ErrorText", 50,9,280,48,3, "")
+        #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
+        error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
+        error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
+        error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
+        error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
+        error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
+        error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
+        error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
+
+        #####################################################################
+        # Global "Query Cancel" dialog
+        cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
+                        "No", "No", "No")
+        cancel.text("Text", 48, 15, 194, 30, 3,
+                    "Are you sure you want to cancel [ProductName] installation?")
+        #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
+        #               "py.ico", None, None)
+        c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
+        c.event("EndDialog", "Exit")
+
+        c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
+        c.event("EndDialog", "Return")
+
+        #####################################################################
+        # Global "Wait for costing" dialog
+        costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
+                         "Return", "Return", "Return")
+        costing.text("Text", 48, 15, 194, 30, 3,
+                     "Please wait while the installer finishes determining your disk space requirements.")
+        c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
+        c.event("EndDialog", "Exit")
+
+        #####################################################################
+        # Preparation dialog: no user input except cancellation
+        prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
+                        "Cancel", "Cancel", "Cancel")
+        prep.text("Description", 15, 70, 320, 40, 0x30003,
+                  "Please wait while the Installer prepares to guide you through the installation.")
+        prep.title("Welcome to the [ProductName] Installer")
+        c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
+        c.mapping("ActionText", "Text")
+        c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
+        c.mapping("ActionData", "Text")
+        prep.back("Back", None, active=0)
+        prep.next("Next", None, active=0)
+        c=prep.cancel("Cancel", None)
+        c.event("SpawnDialog", "CancelDlg")
+
+        #####################################################################
+        # Feature (Python directory) selection
+        seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
+                        "Next", "Next", "Cancel")
+        seldlg.title("Select Python Installations")
+
+        seldlg.text("Hint", 15, 30, 300, 20, 3,
+                    "Select the Python locations where %s should be installed."
+                    % self.distribution.get_fullname())
+
+        seldlg.back("< Back", None, active=0)
+        c = seldlg.next("Next >", "Cancel")
+        order = 1
+        c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
+        for version in self.versions + [self.other_version]:
+            order += 1
+            c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
+                    "FEATURE_SELECTED AND &Python%s=3" % version,
+                    ordering=order)
+        c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
+        c.event("EndDialog", "Return", ordering=order + 2)
+        c = seldlg.cancel("Cancel", "Features")
+        c.event("SpawnDialog", "CancelDlg")
+
+        c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
+                           "FEATURE", None, "PathEdit", None)
+        c.event("[FEATURE_SELECTED]", "1")
+        ver = self.other_version
+        install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
+        dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
+
+        c = seldlg.text("Other", 15, 200, 300, 15, 3,
+                        "Provide an alternate Python location")
+        c.condition("Enable", install_other_cond)
+        c.condition("Show", install_other_cond)
+        c.condition("Disable", dont_install_other_cond)
+        c.condition("Hide", dont_install_other_cond)
+
+        c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
+                           "TARGETDIR" + ver, None, "Next", None)
+        c.condition("Enable", install_other_cond)
+        c.condition("Show", install_other_cond)
+        c.condition("Disable", dont_install_other_cond)
+        c.condition("Hide", dont_install_other_cond)
+
+        #####################################################################
+        # Disk cost
+        cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
+                        "OK", "OK", "OK", bitmap=False)
+        cost.text("Title", 15, 6, 200, 15, 0x30003,
+                  "{\DlgFontBold8}Disk Space Requirements")
+        cost.text("Description", 20, 20, 280, 20, 0x30003,
+                  "The disk space required for the installation of the selected features.")
+        cost.text("Text", 20, 53, 330, 60, 3,
+                  "The highlighted volumes (if any) do not have enough disk space "
+              "available for the currently selected features.  You can either "
+              "remove some files from the highlighted volumes, or choose to "
+              "install less features onto local drive(s), or select different "
+              "destination drive(s).")
+        cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
+                     None, "{120}{70}{70}{70}{70}", None, None)
+        cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
+
+        #####################################################################
+        # WhichUsers Dialog. Only available on NT, and for privileged users.
+        # This must be run before FindRelatedProducts, because that will
+        # take into account whether the previous installation was per-user
+        # or per-machine. We currently don't support going back to this
+        # dialog after "Next" was selected; to support this, we would need to
+        # find how to reset the ALLUSERS property, and how to re-run
+        # FindRelatedProducts.
+        # On Windows9x, the ALLUSERS property is ignored on the command line
+        # and in the Property table, but installer fails according to the documentation
+        # if a dialog attempts to set ALLUSERS.
+        whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
+                            "AdminInstall", "Next", "Cancel")
+        whichusers.title("Select whether to install [ProductName] for all users of this computer.")
+        # A radio group with two options: allusers, justme
+        g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
+                                  "WhichUsers", "", "Next")
+        g.add("ALL", 0, 5, 150, 20, "Install for all users")
+        g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
+
+        whichusers.back("Back", None, active=0)
+
+        c = whichusers.next("Next >", "Cancel")
+        c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
+        c.event("EndDialog", "Return", ordering = 2)
+
+        c = whichusers.cancel("Cancel", "AdminInstall")
+        c.event("SpawnDialog", "CancelDlg")
+
+        #####################################################################
+        # Installation Progress dialog (modeless)
+        progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
+                            "Cancel", "Cancel", "Cancel", bitmap=False)
+        progress.text("Title", 20, 15, 200, 15, 0x30003,
+                      "{\DlgFontBold8}[Progress1] [ProductName]")
+        progress.text("Text", 35, 65, 300, 30, 3,
+                      "Please wait while the Installer [Progress2] [ProductName]. "
+                      "This may take several minutes.")
+        progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
+
+        c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
+        c.mapping("ActionText", "Text")
+
+        #c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
+        #c.mapping("ActionData", "Text")
+
+        c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
+                           None, "Progress done", None, None)
+        c.mapping("SetProgress", "Progress")
+
+        progress.back("< Back", "Next", active=False)
+        progress.next("Next >", "Cancel", active=False)
+        progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
+
+        ###################################################################
+        # Maintenance type: repair/uninstall
+        maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
+                         "Next", "Next", "Cancel")
+        maint.title("Welcome to the [ProductName] Setup Wizard")
+        maint.text("BodyText", 15, 63, 330, 42, 3,
+                   "Select whether you want to repair or remove [ProductName].")
+        g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
+                            "MaintenanceForm_Action", "", "Next")
+        #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
+        g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
+        g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
+
+        maint.back("< Back", None, active=False)
+        c=maint.next("Finish", "Cancel")
+        # Change installation: Change progress dialog to "Change", then ask
+        # for feature selection
+        #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
+        #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
+
+        # Reinstall: Change progress dialog to "Repair", then invoke reinstall
+        # Also set list of reinstalled features to "ALL"
+        c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
+        c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
+        c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
+        c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
+
+        # Uninstall: Change progress to "Remove", then invoke uninstall
+        # Also set list of removed features to "ALL"
+        c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
+        c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
+        c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
+        c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
+
+        # Close dialog when maintenance action scheduled
+        c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
+        #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
+
+        maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
+
+    def get_installer_filename(self, fullname):
+        # Factored out to allow overriding in subclasses
+        if self.target_version:
+            base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
+                                            self.target_version)
+        else:
+            base_name = "%s.%s.msi" % (fullname, self.plat_name)
+        installer_name = os.path.join(self.dist_dir, base_name)
+        return installer_name
diff --git a/Lib/packaging/command/bdist_wininst.py b/Lib/packaging/command/bdist_wininst.py
new file mode 100644
index 0000000..dbb74ea
--- /dev/null
+++ b/Lib/packaging/command/bdist_wininst.py
@@ -0,0 +1,342 @@
+"""Create an executable installer for Windows."""
+
+# FIXME synchronize bytes/str use with same file in distutils
+
+import sys
+import os
+
+from shutil import rmtree
+from sysconfig import get_python_version
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError, PackagingPlatformError
+from packaging import logger
+from packaging.util import get_platform
+
+
+class bdist_wininst(Command):
+
+    description = "create an executable installer for Windows"
+
+    user_options = [('bdist-dir=', None,
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('target-version=', None,
+                     "require a specific python version" +
+                     " on the target system"),
+                    ('no-target-compile', 'c',
+                     "do not compile .py to .pyc on the target system"),
+                    ('no-target-optimize', 'o',
+                     "do not compile .py to .pyo (optimized)"
+                     "on the target system"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('bitmap=', 'b',
+                     "bitmap to use for the installer instead of python-powered logo"),
+                    ('title=', 't',
+                     "title to display on the installer background instead of default"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('install-script=', None,
+                     "basename of installation script to be run after"
+                     "installation or before deinstallation"),
+                    ('pre-install-script=', None,
+                     "Fully qualified filename of a script to be run before "
+                     "any files are installed.  This script need not be in the "
+                     "distribution"),
+                    ('user-access-control=', None,
+                     "specify Vista's UAC handling - 'none'/default=no "
+                     "handling, 'auto'=use UAC if target Python installed for "
+                     "all users, 'force'=always use UAC"),
+                   ]
+
+    boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+                       'skip-build']
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = False
+        self.no_target_compile = False
+        self.no_target_optimize = False
+        self.target_version = None
+        self.dist_dir = None
+        self.bitmap = None
+        self.title = None
+        self.skip_build = False
+        self.install_script = None
+        self.pre_install_script = None
+        self.user_access_control = None
+
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            if self.skip_build and self.plat_name:
+                # If build is skipped and plat_name is overridden, bdist will
+                # not see the correct 'plat_name' - so set that up manually.
+                bdist = self.distribution.get_command_obj('bdist')
+                bdist.plat_name = self.plat_name
+                # next the command will be initialized using that name
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'wininst')
+        if not self.target_version:
+            self.target_version = ""
+        if not self.skip_build and self.distribution.has_ext_modules():
+            short_version = get_python_version()
+            if self.target_version and self.target_version != short_version:
+                raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
+                      " option must be specified" % (short_version,))
+            self.target_version = short_version
+
+        self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
+
+        if self.install_script:
+            for script in self.distribution.scripts:
+                if self.install_script == os.path.basename(script):
+                    break
+            else:
+                raise PackagingOptionError("install_script '%s' not found in scripts" % \
+                      self.install_script)
+
+    def run(self):
+        if (sys.platform != "win32" and
+            (self.distribution.has_ext_modules() or
+             self.distribution.has_c_libraries())):
+            raise PackagingPlatformError \
+                  ("distribution contains extensions and/or C libraries; "
+                   "must be compiled on a Windows 32 platform")
+
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.get_reinitialized_command('install',
+                                                 reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+        install.plat_name = self.plat_name
+
+        install_lib = self.get_reinitialized_command('install_lib')
+        # we do not want to include pyc or pyo files
+        install_lib.compile = False
+        install_lib.optimize = 0
+
+        if self.distribution.has_ext_modules():
+            # If we are building an installer for a Python version other
+            # than the one we are currently running, then we need to ensure
+            # our build_lib reflects the other Python version rather than ours.
+            # Note that for target_version!=sys.version, we must have skipped the
+            # build step, so there is no issue with enforcing the build of this
+            # version.
+            target_version = self.target_version
+            if not target_version:
+                assert self.skip_build, "Should have already checked this"
+                target_version = sys.version[0:3]
+            plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+            build = self.get_finalized_command('build')
+            build.build_lib = os.path.join(build.build_base,
+                                           'lib' + plat_specifier)
+
+        # Use a custom scheme for the zip-file, because we have to decide
+        # at installation time which scheme to use.
+        for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
+            value = key.upper()
+            if key == 'headers':
+                value = value + '/Include/$dist_name'
+            setattr(install,
+                    'install_' + key,
+                    value)
+
+        logger.info("installing to %s", self.bdist_dir)
+        install.ensure_finalized()
+
+        # avoid warning of 'install_lib' about installing
+        # into a directory not in sys.path
+        sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+        install.run()
+
+        del sys.path[0]
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        from tempfile import NamedTemporaryFile
+        archive_basename = NamedTemporaryFile().name
+        fullname = self.distribution.get_fullname()
+        arcname = self.make_archive(archive_basename, "zip",
+                                    root_dir=self.bdist_dir)
+        # create an exe containing the zip-file
+        self.create_exe(arcname, fullname, self.bitmap)
+        if self.distribution.has_ext_modules():
+            pyversion = get_python_version()
+        else:
+            pyversion = 'any'
+        self.distribution.dist_files.append(('bdist_wininst', pyversion,
+                                             self.get_installer_filename(fullname)))
+        # remove the zip-file again
+        logger.debug("removing temporary file '%s'", arcname)
+        os.remove(arcname)
+
+        if not self.keep_temp:
+            if self.dry_run:
+                logger.info('removing %s', self.bdist_dir)
+            else:
+                rmtree(self.bdist_dir)
+
+    def get_inidata(self):
+        # Return data describing the installation.
+
+        lines = []
+        metadata = self.distribution.metadata
+
+        # Write the [metadata] section.
+        lines.append("[metadata]")
+
+        # 'info' will be displayed in the installer's dialog box,
+        # describing the items to be installed.
+        info = (metadata.long_description or '') + '\n'
+
+        # Escape newline characters
+        def escape(s):
+            return s.replace("\n", "\\n")
+
+        for name in ["author", "author_email", "description", "maintainer",
+                     "maintainer_email", "name", "url", "version"]:
+            data = getattr(metadata, name, "")
+            if data:
+                info = info + ("\n    %s: %s" % \
+                               (name.capitalize(), escape(data)))
+                lines.append("%s=%s" % (name, escape(data)))
+
+        # The [setup] section contains entries controlling
+        # the installer runtime.
+        lines.append("\n[Setup]")
+        if self.install_script:
+            lines.append("install_script=%s" % self.install_script)
+        lines.append("info=%s" % escape(info))
+        lines.append("target_compile=%d" % (not self.no_target_compile))
+        lines.append("target_optimize=%d" % (not self.no_target_optimize))
+        if self.target_version:
+            lines.append("target_version=%s" % self.target_version)
+        if self.user_access_control:
+            lines.append("user_access_control=%s" % self.user_access_control)
+
+        title = self.title or self.distribution.get_fullname()
+        lines.append("title=%s" % escape(title))
+        import time
+        import packaging
+        build_info = "Built %s with packaging-%s" % \
+                     (time.ctime(time.time()), packaging.__version__)
+        lines.append("build_info=%s" % build_info)
+        return "\n".join(lines)
+
+    def create_exe(self, arcname, fullname, bitmap=None):
+        import struct
+
+        self.mkpath(self.dist_dir)
+
+        cfgdata = self.get_inidata()
+
+        installer_name = self.get_installer_filename(fullname)
+        logger.info("creating %s", installer_name)
+
+        if bitmap:
+            with open(bitmap, "rb") as fp:
+                bitmapdata = fp.read()
+            bitmaplen = len(bitmapdata)
+        else:
+            bitmaplen = 0
+
+        with open(installer_name, "wb") as file:
+            file.write(self.get_exe_bytes())
+            if bitmap:
+                file.write(bitmapdata)
+
+            # Convert cfgdata from unicode to ascii, mbcs encoded
+            if isinstance(cfgdata, str):
+                cfgdata = cfgdata.encode("mbcs")
+
+            # Append the pre-install script
+            cfgdata = cfgdata + "\0"
+            if self.pre_install_script:
+                with open(self.pre_install_script) as fp:
+                    script_data = fp.read()
+                cfgdata = cfgdata + script_data + "\n\0"
+            else:
+                # empty pre-install script
+                cfgdata = cfgdata + "\0"
+            file.write(cfgdata)
+
+            # The 'magic number' 0x1234567B is used to make sure that the
+            # binary layout of 'cfgdata' is what the wininst.exe binary
+            # expects.  If the layout changes, increment that number, make
+            # the corresponding changes to the wininst.exe sources, and
+            # recompile them.
+            header = struct.pack("<iii",
+                                 0x1234567B,       # tag
+                                 len(cfgdata),     # length
+                                 bitmaplen,        # number of bytes in bitmap
+                                 )
+            file.write(header)
+            with open(arcname, "rb") as fp:
+                file.write(fp.read())
+
+    def get_installer_filename(self, fullname):
+        # Factored out to allow overriding in subclasses
+        if self.target_version:
+            # if we create an installer for a specific python version,
+            # it's better to include this in the name
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.%s-py%s.exe" %
+                                           (fullname, self.plat_name, self.target_version))
+        else:
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.%s.exe" % (fullname, self.plat_name))
+        return installer_name
+
+    def get_exe_bytes(self):
+        from packaging.compiler.msvccompiler import get_build_version
+        # If a target-version other than the current version has been
+        # specified, then using the MSVC version from *this* build is no good.
+        # Without actually finding and executing the target version and parsing
+        # its sys.version, we just hard-code our knowledge of old versions.
+        # NOTE: Possible alternative is to allow "--target-version" to
+        # specify a Python executable rather than a simple version string.
+        # We can then execute this program to obtain any info we need, such
+        # as the real sys.version string for the build.
+        cur_version = get_python_version()
+        if self.target_version and self.target_version != cur_version:
+            # If the target version is *later* than us, then we assume they
+            # use what we use
+            # string compares seem wrong, but are what sysconfig.py itself uses
+            if self.target_version > cur_version:
+                bv = get_build_version()
+            else:
+                if self.target_version < "2.4":
+                    bv = 6.0
+                else:
+                    bv = 7.1
+        else:
+            # for current version - use authoritative check.
+            bv = get_build_version()
+
+        # wininst-x.y.exe is in the same directory as this file
+        directory = os.path.dirname(__file__)
+        # we must use a wininst-x.y.exe built with the same C compiler
+        # used for python.  XXX What about mingw, borland, and so on?
+
+        # if plat_name starts with "win" but is not "win32"
+        # we want to strip "win" and leave the rest (e.g. -amd64)
+        # for all other cases, we don't want any suffix
+        if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
+            sfix = self.plat_name[3:]
+        else:
+            sfix = ''
+
+        filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
+        with open(filename, "rb") as fp:
+            return fp.read()
diff --git a/Lib/packaging/command/build.py b/Lib/packaging/command/build.py
new file mode 100644
index 0000000..6580fd1
--- /dev/null
+++ b/Lib/packaging/command/build.py
@@ -0,0 +1,151 @@
+"""Main build command, which calls the other build_* commands."""
+
+import sys
+import os
+
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError
+from packaging.compiler import show_compilers
+
+
+class build(Command):
+
+    description = "build everything needed to install"
+
+    user_options = [
+        ('build-base=', 'b',
+         "base directory for build library"),
+        ('build-purelib=', None,
+         "build directory for platform-neutral distributions"),
+        ('build-platlib=', None,
+         "build directory for platform-specific distributions"),
+        ('build-lib=', None,
+         "build directory for all distribution (defaults to either " +
+         "build-purelib or build-platlib"),
+        ('build-scripts=', None,
+         "build directory for scripts"),
+        ('build-temp=', 't',
+         "temporary build directory"),
+        ('plat-name=', 'p',
+         "platform name to build for, if supported "
+         "(default: %s)" % get_platform()),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('debug', 'g',
+         "compile extensions and libraries with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('executable=', 'e',
+         "specify final destination interpreter path (build.py)"),
+        ('use-2to3', None,
+         "use 2to3 to make source python 3.x compatible"),
+        ('convert-2to3-doctests', None,
+         "use 2to3 to convert doctests in seperate text files"),
+        ('use-2to3-fixers', None,
+         "list additional fixers opted for during 2to3 conversion"),
+        ]
+
+    boolean_options = ['debug', 'force']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options(self):
+        self.build_base = 'build'
+        # these are decided only after 'build_base' has its final value
+        # (unless overridden by the user or client)
+        self.build_purelib = None
+        self.build_platlib = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.compiler = None
+        self.plat_name = None
+        self.debug = None
+        self.force = False
+        self.executable = None
+        self.use_2to3 = False
+        self.convert_2to3_doctests = None
+        self.use_2to3_fixers = None
+
+    def finalize_options(self):
+        if self.plat_name is None:
+            self.plat_name = get_platform()
+        else:
+            # plat-name only supported for windows (other platforms are
+            # supported via ./configure flags, if at all).  Avoid misleading
+            # other platforms.
+            if os.name != 'nt':
+                raise PackagingOptionError(
+                            "--plat-name only supported on Windows (try "
+                            "using './configure --help' on your platform)")
+
+        plat_specifier = ".%s-%s" % (self.plat_name, sys.version[0:3])
+
+        # Make it so Python 2.x and Python 2.x with --with-pydebug don't
+        # share the same build directories. Doing so confuses the build
+        # process for C modules
+        if hasattr(sys, 'gettotalrefcount'):
+            plat_specifier += '-pydebug'
+
+        # 'build_purelib' and 'build_platlib' just default to 'lib' and
+        # 'lib.<plat>' under the base build directory.  We only use one of
+        # them for a given distribution, though --
+        if self.build_purelib is None:
+            self.build_purelib = os.path.join(self.build_base, 'lib')
+        if self.build_platlib is None:
+            self.build_platlib = os.path.join(self.build_base,
+                                              'lib' + plat_specifier)
+
+        # 'build_lib' is the actual directory that we will use for this
+        # particular module distribution -- if user didn't supply it, pick
+        # one of 'build_purelib' or 'build_platlib'.
+        if self.build_lib is None:
+            if self.distribution.ext_modules:
+                self.build_lib = self.build_platlib
+            else:
+                self.build_lib = self.build_purelib
+
+        # 'build_temp' -- temporary directory for compiler turds,
+        # "build/temp.<plat>"
+        if self.build_temp is None:
+            self.build_temp = os.path.join(self.build_base,
+                                           'temp' + plat_specifier)
+        if self.build_scripts is None:
+            self.build_scripts = os.path.join(self.build_base,
+                                              'scripts-' + sys.version[0:3])
+
+        if self.executable is None:
+            self.executable = os.path.normpath(sys.executable)
+
+    def run(self):
+        # Run all relevant sub-commands.  This will be some subset of:
+        #  - build_py      - pure Python modules
+        #  - build_clib    - standalone C libraries
+        #  - build_ext     - Python extension modules
+        #  - build_scripts - Python scripts
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+    # -- Predicates for the sub-command list ---------------------------
+
+    def has_pure_modules(self):
+        return self.distribution.has_pure_modules()
+
+    def has_c_libraries(self):
+        return self.distribution.has_c_libraries()
+
+    def has_ext_modules(self):
+        return self.distribution.has_ext_modules()
+
+    def has_scripts(self):
+        return self.distribution.has_scripts()
+
+    sub_commands = [('build_py', has_pure_modules),
+                    ('build_clib', has_c_libraries),
+                    ('build_ext', has_ext_modules),
+                    ('build_scripts', has_scripts),
+                   ]
diff --git a/Lib/packaging/command/build_clib.py b/Lib/packaging/command/build_clib.py
new file mode 100644
index 0000000..4a24996
--- /dev/null
+++ b/Lib/packaging/command/build_clib.py
@@ -0,0 +1,198 @@
+"""Build C/C++ libraries.
+
+This command is useful to build libraries that are included in the
+distribution and needed by extension modules.
+"""
+
+# XXX this module has *lots* of code ripped-off quite transparently from
+# build_ext.py -- not surprisingly really, as the work required to build
+# a static library from a collection of C source files is not really all
+# that different from what's required to build a shared object file from
+# a collection of C source files.  Nevertheless, I haven't done the
+# necessary refactoring to account for the overlap in code between the
+# two modules, mainly because a number of subtle details changed in the
+# cut 'n paste.  Sigh.
+
+import os
+from packaging.command.cmd import Command
+from packaging.errors import PackagingSetupError
+from packaging.compiler import customize_compiler
+from packaging import logger
+
+
+def show_compilers():
+    from packaging.compiler import show_compilers
+    show_compilers()
+
+
+class build_clib(Command):
+
+    description = "build C/C++ libraries used by extension modules"
+
+    user_options = [
+        ('build-clib=', 'b',
+         "directory to build C/C++ libraries to"),
+        ('build-temp=', 't',
+         "directory to put temporary build by-products"),
+        ('debug', 'g',
+         "compile with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ]
+
+    boolean_options = ['debug', 'force']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options(self):
+        self.build_clib = None
+        self.build_temp = None
+
+        # List of libraries to build
+        self.libraries = None
+
+        # Compilation options for all libraries
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.debug = None
+        self.force = False
+        self.compiler = None
+
+
+    def finalize_options(self):
+        # This might be confusing: both build-clib and build-temp default
+        # to build-temp as defined by the "build" command.  This is because
+        # I think that C libraries are really just temporary build
+        # by-products, at least from the point of view of building Python
+        # extensions -- but I want to keep my options open.
+        self.set_undefined_options('build',
+                                   ('build_temp', 'build_clib'),
+                                   ('build_temp', 'build_temp'),
+                                   'compiler', 'debug', 'force')
+
+        self.libraries = self.distribution.libraries
+        if self.libraries:
+            self.check_library_list(self.libraries)
+
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # XXX same as for build_ext -- what about 'self.define' and
+        # 'self.undef' ?
+
+    def run(self):
+        if not self.libraries:
+            return
+
+        # Yech -- this is cut 'n pasted from build_ext.py!
+        from packaging.compiler import new_compiler
+        self.compiler = new_compiler(compiler=self.compiler,
+                                     dry_run=self.dry_run,
+                                     force=self.force)
+        customize_compiler(self.compiler)
+
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for name, value in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+
+        self.build_libraries(self.libraries)
+
+
+    def check_library_list(self, libraries):
+        """Ensure that the list of libraries is valid.
+
+        `library` is presumably provided as a command option 'libraries'.
+        This method checks that it is a list of 2-tuples, where the tuples
+        are (library_name, build_info_dict).
+
+        Raise PackagingSetupError if the structure is invalid anywhere;
+        just returns otherwise.
+        """
+        if not isinstance(libraries, list):
+            raise PackagingSetupError("'libraries' option must be a list of tuples")
+
+        for lib in libraries:
+            if not isinstance(lib, tuple) and len(lib) != 2:
+                raise PackagingSetupError("each element of 'libraries' must a 2-tuple")
+
+            name, build_info = lib
+
+            if not isinstance(name, str):
+                raise PackagingSetupError("first element of each tuple in 'libraries' " + \
+                      "must be a string (the library name)")
+            if '/' in name or (os.sep != '/' and os.sep in name):
+                raise PackagingSetupError(("bad library name '%s': " +
+                       "may not contain directory separators") % \
+                      lib[0])
+
+            if not isinstance(build_info, dict):
+                raise PackagingSetupError("second element of each tuple in 'libraries' " + \
+                      "must be a dictionary (build info)")
+
+    def get_library_names(self):
+        # Assume the library list is valid -- 'check_library_list()' is
+        # called from 'finalize_options()', so it should be!
+        if not self.libraries:
+            return None
+
+        lib_names = []
+        for lib_name, build_info in self.libraries:
+            lib_names.append(lib_name)
+        return lib_names
+
+
+    def get_source_files(self):
+        self.check_library_list(self.libraries)
+        filenames = []
+        for lib_name, build_info in self.libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise PackagingSetupError(("in 'libraries' option (library '%s'), "
+                       "'sources' must be present and must be "
+                       "a list of source filenames") % lib_name)
+
+            filenames.extend(sources)
+        return filenames
+
+    def build_libraries(self, libraries):
+        for lib_name, build_info in libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise PackagingSetupError(("in 'libraries' option (library '%s'), " +
+                       "'sources' must be present and must be " +
+                       "a list of source filenames") % lib_name)
+            sources = list(sources)
+
+            logger.info("building '%s' library", lib_name)
+
+            # First, compile the source code to object files in the library
+            # directory.  (This should probably change to putting object
+            # files in a temporary build directory.)
+            macros = build_info.get('macros')
+            include_dirs = build_info.get('include_dirs')
+            objects = self.compiler.compile(sources,
+                                            output_dir=self.build_temp,
+                                            macros=macros,
+                                            include_dirs=include_dirs,
+                                            debug=self.debug)
+
+            # Now "link" the object files together into a static library.
+            # (On Unix at least, this isn't really linking -- it just
+            # builds an archive.  Whatever.)
+            self.compiler.create_static_lib(objects, lib_name,
+                                            output_dir=self.build_clib,
+                                            debug=self.debug)
diff --git a/Lib/packaging/command/build_ext.py b/Lib/packaging/command/build_ext.py
new file mode 100644
index 0000000..9b71041
--- /dev/null
+++ b/Lib/packaging/command/build_ext.py
@@ -0,0 +1,666 @@
+"""Build extension modules."""
+
+# FIXME Is this module limited to C extensions or do C++ extensions work too?
+# The docstring of this module said that C++ was not supported, but other
+# comments contradict that.
+
+import os
+import re
+import sys
+import logging
+import sysconfig
+
+from packaging.util import get_platform
+from packaging.command.cmd import Command
+from packaging.errors import (CCompilerError, CompileError, PackagingError,
+                               PackagingPlatformError, PackagingSetupError)
+from packaging.compiler import customize_compiler, show_compilers
+from packaging.util import newer_group
+from packaging.compiler.extension import Extension
+from packaging import logger
+
+import site
+HAS_USER_SITE = True
+
+if os.name == 'nt':
+    from packaging.compiler.msvccompiler import get_build_version
+    MSVC_VERSION = int(get_build_version())
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile \
+    (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+class build_ext(Command):
+
+    description = "build C/C++ extension modules (compile/link to build directory)"
+
+    # XXX thoughts on how to deal with complex command-line options like
+    # these, i.e. how to make it so fancy_getopt can suck them off the
+    # command line and make it look like setup.py defined the appropriate
+    # lists of tuples of what-have-you.
+    #   - each command needs a callback to process its command-line options
+    #   - Command.__init__() needs access to its share of the whole
+    #     command line (must ultimately come from
+    #     Distribution.parse_command_line())
+    #   - it then calls the current command class' option-parsing
+    #     callback to deal with weird options like -D, which have to
+    #     parse the option text and churn out some custom data
+    #     structure
+    #   - that data structure (in this case, a list of 2-tuples)
+    #     will then be present in the command object by the time
+    #     we get to finalize_options() (i.e. the constructor
+    #     takes care of both command-line and client options
+    #     in between initialize_options() and finalize_options())
+
+    sep_by = " (separated by '%s')" % os.pathsep
+    user_options = [
+        ('build-lib=', 'b',
+         "directory for compiled extension modules"),
+        ('build-temp=', 't',
+         "directory for temporary files (build by-products)"),
+        ('plat-name=', 'p',
+         "platform name to cross-compile for, if supported "
+         "(default: %s)" % get_platform()),
+        ('inplace', 'i',
+         "ignore build-lib and put compiled extensions into the source " +
+         "directory alongside your pure Python modules"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files" + sep_by),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries" + sep_by),
+        ('rpath=', 'R',
+         "directories to search for shared C libraries at runtime"),
+        ('link-objects=', 'O',
+         "extra explicit link objects to include in the link"),
+        ('debug', 'g',
+         "compile/link with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('swig-opts=', None,
+         "list of SWIG command-line options"),
+        ('swig=', None,
+         "path to the SWIG executable"),
+        ]
+
+    boolean_options = ['inplace', 'debug', 'force']
+
+    if HAS_USER_SITE:
+        user_options.append(('user', None,
+                             "add user include, library and rpath"))
+        boolean_options.append('user')
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options(self):
+        self.extensions = None
+        self.build_lib = None
+        self.plat_name = None
+        self.build_temp = None
+        self.inplace = False
+        self.package = None
+
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.libraries = None
+        self.library_dirs = None
+        self.rpath = None
+        self.link_objects = None
+        self.debug = None
+        self.force = None
+        self.compiler = None
+        self.swig = None
+        self.swig_opts = None
+        if HAS_USER_SITE:
+            self.user = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   'build_lib', 'build_temp', 'compiler',
+                                   'debug', 'force', 'plat_name')
+
+        if self.package is None:
+            self.package = self.distribution.ext_package
+
+        # Ensure that the list of extensions is valid, i.e. it is a list of
+        # Extension objects.
+        self.extensions = self.distribution.ext_modules
+        if self.extensions:
+            if not isinstance(self.extensions, (list, tuple)):
+                type_name = (self.extensions is None and 'None'
+                            or type(self.extensions).__name__)
+                raise PackagingSetupError(
+                    "'ext_modules' must be a sequence of Extension instances,"
+                    " not %s" % (type_name,))
+            for i, ext in enumerate(self.extensions):
+                if isinstance(ext, Extension):
+                    continue                # OK! (assume type-checking done
+                                            # by Extension constructor)
+                type_name = (ext is None and 'None' or type(ext).__name__)
+                raise PackagingSetupError(
+                    "'ext_modules' item %d must be an Extension instance,"
+                    " not %s" % (i, type_name))
+
+        # Make sure Python's include directories (for Python.h, pyconfig.h,
+        # etc.) are in the include search path.
+        py_include = sysconfig.get_path('include')
+        plat_py_include = sysconfig.get_path('platinclude')
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # Put the Python "system" include dir at the end, so that
+        # any local include dirs take precedence.
+        self.include_dirs.append(py_include)
+        if plat_py_include != py_include:
+            self.include_dirs.append(plat_py_include)
+
+        if isinstance(self.libraries, str):
+            self.libraries = [self.libraries]
+
+        # Life is easier if we're not forever checking for None, so
+        # simplify these options to empty lists if unset
+        if self.libraries is None:
+            self.libraries = []
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif isinstance(self.library_dirs, str):
+            self.library_dirs = self.library_dirs.split(os.pathsep)
+
+        if self.rpath is None:
+            self.rpath = []
+        elif isinstance(self.rpath, str):
+            self.rpath = self.rpath.split(os.pathsep)
+
+        # for extensions under windows use different directories
+        # for Release and Debug builds.
+        # also Python's library directory must be appended to library_dirs
+        if os.name == 'nt':
+            # the 'libs' directory is for binary installs - we assume that
+            # must be the *native* platform.  But we don't really support
+            # cross-compiling via a binary install anyway, so we let it go.
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+            if self.debug:
+                self.build_temp = os.path.join(self.build_temp, "Debug")
+            else:
+                self.build_temp = os.path.join(self.build_temp, "Release")
+
+            # Append the source distribution include and library directories,
+            # this allows distutils on windows to work in the source tree
+            self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
+            if MSVC_VERSION == 9:
+                # Use the .lib files for the correct architecture
+                if self.plat_name == 'win32':
+                    suffix = ''
+                else:
+                    # win-amd64 or win-ia64
+                    suffix = self.plat_name[4:]
+                new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
+                if suffix:
+                    new_lib = os.path.join(new_lib, suffix)
+                self.library_dirs.append(new_lib)
+
+            elif MSVC_VERSION == 8:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VS8.0'))
+            elif MSVC_VERSION == 7:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VS7.1'))
+            else:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VC6'))
+
+        # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
+        # import libraries in its "Config" subdirectory
+        if os.name == 'os2':
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
+
+        # for extensions under Cygwin and AtheOS Python's library directory must be
+        # appended to library_dirs
+        if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
+            if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+                # building third party extensions
+                self.library_dirs.append(os.path.join(sys.prefix, "lib",
+                                  "python" + sysconfig.get_python_version(),
+                                                      "config"))
+            else:
+                # building python standard extensions
+                self.library_dirs.append(os.curdir)
+
+        # for extensions under Linux or Solaris with a shared Python library,
+        # Python's library directory must be appended to library_dirs
+        sysconfig.get_config_var('Py_ENABLE_SHARED')
+        if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
+             or sys.platform.startswith('sunos'))
+            and sysconfig.get_config_var('Py_ENABLE_SHARED')):
+            if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+                # building third party extensions
+                self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
+            else:
+                # building python standard extensions
+                self.library_dirs.append(os.curdir)
+
+        # The argument parsing will result in self.define being a string, but
+        # it has to be a list of 2-tuples.  All the preprocessor symbols
+        # specified by the 'define' option will be set to '1'.  Multiple
+        # symbols can be separated with commas.
+
+        if self.define:
+            defines = self.define.split(',')
+            self.define = [(symbol, '1') for symbol in defines]
+
+        # The option for macros to undefine is also a string from the
+        # option parsing, but has to be a list.  Multiple symbols can also
+        # be separated with commas here.
+        if self.undef:
+            self.undef = self.undef.split(',')
+
+        if self.swig_opts is None:
+            self.swig_opts = []
+        else:
+            self.swig_opts = self.swig_opts.split(' ')
+
+        # Finally add the user include and library directories if requested
+        if HAS_USER_SITE and self.user:
+            user_include = os.path.join(site.USER_BASE, "include")
+            user_lib = os.path.join(site.USER_BASE, "lib")
+            if os.path.isdir(user_include):
+                self.include_dirs.append(user_include)
+            if os.path.isdir(user_lib):
+                self.library_dirs.append(user_lib)
+                self.rpath.append(user_lib)
+
+    def run(self):
+        from packaging.compiler import new_compiler
+
+        # 'self.extensions', as supplied by setup.py, is a list of
+        # Extension instances.  See the documentation for Extension (in
+        # distutils.extension) for details.
+        if not self.extensions:
+            return
+
+        # If we were asked to build any C/C++ libraries, make sure that the
+        # directory where we put them is in the library search path for
+        # linking extensions.
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.libraries.extend(build_clib.get_library_names() or [])
+            self.library_dirs.append(build_clib.build_clib)
+
+        # Temporary kludge until we remove the verbose arguments and use
+        # logging everywhere
+        verbose = logger.getEffectiveLevel() >= logging.DEBUG
+
+        # Setup the CCompiler object that we'll use to do all the
+        # compiling and linking
+        self.compiler_obj = new_compiler(compiler=self.compiler,
+                                         verbose=verbose,
+                                         dry_run=self.dry_run,
+                                         force=self.force)
+
+        customize_compiler(self.compiler_obj)
+        # If we are cross-compiling, init the compiler now (if we are not
+        # cross-compiling, init would not hurt, but people may rely on
+        # late initialization of compiler even if they shouldn't...)
+        if os.name == 'nt' and self.plat_name != get_platform():
+            self.compiler_obj.initialize(self.plat_name)
+
+        # And make sure that any compile/link-related options (which might
+        # come from the command line or from the setup script) are set in
+        # that CCompiler object -- that way, they automatically apply to
+        # all compiling and linking done here.
+        if self.include_dirs is not None:
+            self.compiler_obj.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for name, value in self.define:
+                self.compiler_obj.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler_obj.undefine_macro(macro)
+        if self.libraries is not None:
+            self.compiler_obj.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            self.compiler_obj.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            self.compiler_obj.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            self.compiler_obj.set_link_objects(self.link_objects)
+
+        # Now actually compile and link everything.
+        self.build_extensions()
+
+    def get_source_files(self):
+        filenames = []
+
+        # Wouldn't it be neat if we knew the names of header files too...
+        for ext in self.extensions:
+            filenames.extend(ext.sources)
+
+        return filenames
+
+    def get_outputs(self):
+        # And build the list of output (built) filenames.  Note that this
+        # ignores the 'inplace' flag, and assumes everything goes in the
+        # "build" tree.
+        outputs = []
+        for ext in self.extensions:
+            outputs.append(self.get_ext_fullpath(ext.name))
+        return outputs
+
+    def build_extensions(self):
+        for ext in self.extensions:
+            try:
+                self.build_extension(ext)
+            except (CCompilerError, PackagingError, CompileError) as e:
+                if not ext.optional:
+                    raise
+                logger.warning('%s: building extension %r failed: %s',
+                               self.get_command_name(), ext.name, e)
+
+    def build_extension(self, ext):
+        sources = ext.sources
+        if sources is None or not isinstance(sources, (list, tuple)):
+            raise PackagingSetupError(("in 'ext_modules' option (extension '%s'), " +
+                   "'sources' must be present and must be " +
+                   "a list of source filenames") % ext.name)
+        sources = list(sources)
+
+        ext_path = self.get_ext_fullpath(ext.name)
+        depends = sources + ext.depends
+        if not (self.force or newer_group(depends, ext_path, 'newer')):
+            logger.debug("skipping '%s' extension (up-to-date)", ext.name)
+            return
+        else:
+            logger.info("building '%s' extension", ext.name)
+
+        # First, scan the sources for SWIG definition files (.i), run
+        # SWIG on 'em to create .c files, and modify the sources list
+        # accordingly.
+        sources = self.swig_sources(sources, ext)
+
+        # Next, compile the source code to object files.
+
+        # XXX not honouring 'define_macros' or 'undef_macros' -- the
+        # CCompiler API needs to change to accommodate this, and I
+        # want to do one thing at a time!
+
+        # Two possible sources for extra compiler arguments:
+        #   - 'extra_compile_args' in Extension object
+        #   - CFLAGS environment variable (not particularly
+        #     elegant, but people seem to expect it and I
+        #     guess it's useful)
+        # The environment variable should take precedence, and
+        # any sensible compiler will give precedence to later
+        # command-line args.  Hence we combine them in order:
+        extra_args = ext.extra_compile_args or []
+
+        macros = ext.define_macros[:]
+        for undef in ext.undef_macros:
+            macros.append((undef,))
+
+        objects = self.compiler_obj.compile(sources,
+                                            output_dir=self.build_temp,
+                                            macros=macros,
+                                            include_dirs=ext.include_dirs,
+                                            debug=self.debug,
+                                            extra_postargs=extra_args,
+                                            depends=ext.depends)
+
+        # XXX -- this is a Vile HACK!
+        #
+        # The setup.py script for Python on Unix needs to be able to
+        # get this list so it can perform all the clean up needed to
+        # avoid keeping object files around when cleaning out a failed
+        # build of an extension module.  Since Packaging does not
+        # track dependencies, we have to get rid of intermediates to
+        # ensure all the intermediates will be properly re-built.
+        #
+        self._built_objects = objects[:]
+
+        # Now link the object files together into a "shared object" --
+        # of course, first we have to figure out all the other things
+        # that go into the mix.
+        if ext.extra_objects:
+            objects.extend(ext.extra_objects)
+        extra_args = ext.extra_link_args or []
+
+        # Detect target language, if not provided
+        language = ext.language or self.compiler_obj.detect_language(sources)
+
+        self.compiler_obj.link_shared_object(
+            objects, ext_path,
+            libraries=self.get_libraries(ext),
+            library_dirs=ext.library_dirs,
+            runtime_library_dirs=ext.runtime_library_dirs,
+            extra_postargs=extra_args,
+            export_symbols=self.get_export_symbols(ext),
+            debug=self.debug,
+            build_temp=self.build_temp,
+            target_lang=language)
+
+
+    def swig_sources(self, sources, extension):
+        """Walk the list of source files in 'sources', looking for SWIG
+        interface (.i) files.  Run SWIG on all that are found, and
+        return a modified 'sources' list with SWIG source files replaced
+        by the generated C (or C++) files.
+        """
+        new_sources = []
+        swig_sources = []
+        swig_targets = {}
+
+        # XXX this drops generated C/C++ files into the source tree, which
+        # is fine for developers who want to distribute the generated
+        # source -- but there should be an option to put SWIG output in
+        # the temp dir.
+
+        if ('-c++' in self.swig_opts or '-c++' in extension.swig_opts):
+            target_ext = '.cpp'
+        else:
+            target_ext = '.c'
+
+        for source in sources:
+            base, ext = os.path.splitext(source)
+            if ext == ".i":             # SWIG interface file
+                new_sources.append(base + '_wrap' + target_ext)
+                swig_sources.append(source)
+                swig_targets[source] = new_sources[-1]
+            else:
+                new_sources.append(source)
+
+        if not swig_sources:
+            return new_sources
+
+        swig = self.swig or self.find_swig()
+        swig_cmd = [swig, "-python"]
+        swig_cmd.extend(self.swig_opts)
+
+        # Do not override commandline arguments
+        if not self.swig_opts:
+            for o in extension.swig_opts:
+                swig_cmd.append(o)
+
+        for source in swig_sources:
+            target = swig_targets[source]
+            logger.info("swigging %s to %s", source, target)
+            self.spawn(swig_cmd + ["-o", target, source])
+
+        return new_sources
+
+    def find_swig(self):
+        """Return the name of the SWIG executable.  On Unix, this is
+        just "swig" -- it should be in the PATH.  Tries a bit harder on
+        Windows.
+        """
+
+        if os.name == "posix":
+            return "swig"
+        elif os.name == "nt":
+
+            # Look for SWIG in its standard installation directory on
+            # Windows (or so I presume!).  If we find it there, great;
+            # if not, act like Unix and assume it's in the PATH.
+            for vers in ("1.3", "1.2", "1.1"):
+                fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+                if os.path.isfile(fn):
+                    return fn
+            else:
+                return "swig.exe"
+
+        elif os.name == "os2":
+            # assume swig available in the PATH.
+            return "swig.exe"
+
+        else:
+            raise PackagingPlatformError(("I don't know how to find (much less run) SWIG "
+                   "on platform '%s'") % os.name)
+
+    # -- Name generators -----------------------------------------------
+    # (extension names, filenames, whatever)
+    def get_ext_fullpath(self, ext_name):
+        """Returns the path of the filename for a given extension.
+
+        The file is located in `build_lib` or directly in the package
+        (inplace option).
+        """
+        fullname = self.get_ext_fullname(ext_name)
+        modpath = fullname.split('.')
+        filename = self.get_ext_filename(modpath[-1])
+
+        if not self.inplace:
+            # no further work needed
+            # returning :
+            #   build_dir/package/path/filename
+            filename = os.path.join(*modpath[:-1]+[filename])
+            return os.path.join(self.build_lib, filename)
+
+        # the inplace option requires to find the package directory
+        # using the build_py command for that
+        package = '.'.join(modpath[0:-1])
+        build_py = self.get_finalized_command('build_py')
+        package_dir = os.path.abspath(build_py.get_package_dir(package))
+
+        # returning
+        #   package_dir/filename
+        return os.path.join(package_dir, filename)
+
+    def get_ext_fullname(self, ext_name):
+        """Returns the fullname of a given extension name.
+
+        Adds the `package.` prefix"""
+        if self.package is None:
+            return ext_name
+        else:
+            return self.package + '.' + ext_name
+
+    def get_ext_filename(self, ext_name):
+        r"""Convert the name of an extension (eg. "foo.bar") into the name
+        of the file from which it will be loaded (eg. "foo/bar.so", or
+        "foo\bar.pyd").
+        """
+        ext_path = ext_name.split('.')
+        # OS/2 has an 8 character module (extension) limit :-(
+        if os.name == "os2":
+            ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
+        # extensions in debug_mode are named 'module_d.pyd' under windows
+        so_ext = sysconfig.get_config_var('SO')
+        if os.name == 'nt' and self.debug:
+            return os.path.join(*ext_path) + '_d' + so_ext
+        return os.path.join(*ext_path) + so_ext
+
+    def get_export_symbols(self, ext):
+        """Return the list of symbols that a shared extension has to
+        export.  This either uses 'ext.export_symbols' or, if it's not
+        provided, "init" + module_name.  Only relevant on Windows, where
+        the .pyd file (DLL) must export the module "init" function.
+        """
+        initfunc_name = "init" + ext.name.split('.')[-1]
+        if initfunc_name not in ext.export_symbols:
+            ext.export_symbols.append(initfunc_name)
+        return ext.export_symbols
+
+    def get_libraries(self, ext):
+        """Return the list of libraries to link against when building a
+        shared extension.  On most platforms, this is just 'ext.libraries';
+        on Windows and OS/2, we add the Python library (eg. python20.dll).
+        """
+        # The python library is always needed on Windows.  For MSVC, this
+        # is redundant, since the library is mentioned in a pragma in
+        # pyconfig.h that MSVC groks.  The other Windows compilers all seem
+        # to need it mentioned explicitly, though, so that's what we do.
+        # Append '_d' to the python import library on debug builds.
+        if sys.platform == "win32":
+            from packaging.compiler.msvccompiler import MSVCCompiler
+            if not isinstance(self.compiler_obj, MSVCCompiler):
+                template = "python%d%d"
+                if self.debug:
+                    template = template + '_d'
+                pythonlib = (template %
+                       (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                # don't extend ext.libraries, it may be shared with other
+                # extensions, it is a reference to the original list
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
+        elif sys.platform == "os2emx":
+            # EMX/GCC requires the python library explicitly, and I
+            # believe VACPP does as well (though not confirmed) - AIM Apr01
+            template = "python%d%d"
+            # debug versions of the main DLL aren't supported, at least
+            # not at this time - AIM Apr01
+            #if self.debug:
+            #    template = template + '_d'
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "cygwin":
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "atheos":
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # Get SHLIBS from Makefile
+            extra = []
+            for lib in sysconfig.get_config_var('SHLIBS').split():
+                if lib.startswith('-l'):
+                    extra.append(lib[2:])
+                else:
+                    extra.append(lib)
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib, "m"] + extra
+
+        elif sys.platform == 'darwin':
+            # Don't use the default code below
+            return ext.libraries
+
+        else:
+            if sysconfig.get_config_var('Py_ENABLE_SHARED'):
+                template = "python%d.%d"
+                pythonlib = (template %
+                             (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
diff --git a/Lib/packaging/command/build_py.py b/Lib/packaging/command/build_py.py
new file mode 100644
index 0000000..360f4c9
--- /dev/null
+++ b/Lib/packaging/command/build_py.py
@@ -0,0 +1,410 @@
+"""Build pure Python modules (just copy to build directory)."""
+
+import os
+import sys
+from glob import glob
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError, PackagingFileError
+from packaging.util import convert_path
+from packaging.compat import Mixin2to3
+
+# marking public APIs
+__all__ = ['build_py']
+
+class build_py(Command, Mixin2to3):
+
+    description = "build pure Python modules (copy to build directory)"
+
+    user_options = [
+        ('build-lib=', 'd', "directory to build (copy) to"),
+        ('compile', 'c', "compile .py to .pyc"),
+        ('no-compile', None, "don't compile .py files [default]"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+        ('use-2to3', None,
+         "use 2to3 to make source python 3.x compatible"),
+        ('convert-2to3-doctests', None,
+         "use 2to3 to convert doctests in seperate text files"),
+        ('use-2to3-fixers', None,
+         "list additional fixers opted for during 2to3 conversion"),
+        ]
+
+    boolean_options = ['compile', 'force']
+    negative_opt = {'no-compile' : 'compile'}
+
+    def initialize_options(self):
+        self.build_lib = None
+        self.py_modules = None
+        self.package = None
+        self.package_data = None
+        self.package_dir = None
+        self.compile = False
+        self.optimize = 0
+        self.force = None
+        self._updated_files = []
+        self._doctests_2to3 = []
+        self.use_2to3 = False
+        self.convert_2to3_doctests = None
+        self.use_2to3_fixers = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   'use_2to3', 'use_2to3_fixers',
+                                   'convert_2to3_doctests', 'build_lib',
+                                   'force')
+
+        # Get the distribution options that are aliases for build_py
+        # options -- list of packages and list of modules.
+        self.packages = self.distribution.packages
+        self.py_modules = self.distribution.py_modules
+        self.package_data = self.distribution.package_data
+        self.package_dir = None
+        if self.distribution.package_dir is not None:
+            self.package_dir = convert_path(self.distribution.package_dir)
+        self.data_files = self.get_data_files()
+
+        # Ick, copied straight from install_lib.py (fancy_getopt needs a
+        # type system!  Hell, *everything* needs a type system!!!)
+        if not isinstance(self.optimize, int):
+            try:
+                self.optimize = int(self.optimize)
+                assert 0 <= self.optimize <= 2
+            except (ValueError, AssertionError):
+                raise PackagingOptionError("optimize must be 0, 1, or 2")
+
+    def run(self):
+        # XXX copy_file by default preserves atime and mtime.  IMHO this is
+        # the right thing to do, but perhaps it should be an option -- in
+        # particular, a site administrator might want installed files to
+        # reflect the time of installation rather than the last
+        # modification time before the installed release.
+
+        # XXX copy_file by default preserves mode, which appears to be the
+        # wrong thing to do: if a file is read-only in the working
+        # directory, we want it to be installed read/write so that the next
+        # installation of the same module distribution can overwrite it
+        # without problems.  (This might be a Unix-specific issue.)  Thus
+        # we turn off 'preserve_mode' when copying to the build directory,
+        # since the build directory is supposed to be exactly what the
+        # installation will look like (ie. we preserve mode when
+        # installing).
+
+        # Two options control which modules will be installed: 'packages'
+        # and 'py_modules'.  The former lets us work with whole packages, not
+        # specifying individual modules at all; the latter is for
+        # specifying modules one-at-a-time.
+
+        if self.py_modules:
+            self.build_modules()
+        if self.packages:
+            self.build_packages()
+            self.build_package_data()
+
+        if self.use_2to3 and self._updated_files:
+            self.run_2to3(self._updated_files, self._doctests_2to3,
+                                            self.use_2to3_fixers)
+
+        self.byte_compile(self.get_outputs(include_bytecode=False))
+
+    # -- Top-level worker functions ------------------------------------
+
+    def get_data_files(self):
+        """Generate list of '(package,src_dir,build_dir,filenames)' tuples.
+
+        Helper function for `finalize_options()`.
+        """
+        data = []
+        if not self.packages:
+            return data
+        for package in self.packages:
+            # Locate package source directory
+            src_dir = self.get_package_dir(package)
+
+            # Compute package build directory
+            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+            # Length of path to strip from found files
+            plen = 0
+            if src_dir:
+                plen = len(src_dir)+1
+
+            # Strip directory from globbed filenames
+            filenames = [
+                file[plen:] for file in self.find_data_files(package, src_dir)
+                ]
+            data.append((package, src_dir, build_dir, filenames))
+        return data
+
+    def find_data_files(self, package, src_dir):
+        """Return filenames for package's data files in 'src_dir'.
+
+        Helper function for `get_data_files()`.
+        """
+        globs = (self.package_data.get('', [])
+                 + self.package_data.get(package, []))
+        files = []
+        for pattern in globs:
+            # Each pattern has to be converted to a platform-specific path
+            filelist = glob(os.path.join(src_dir, convert_path(pattern)))
+            # Files that match more than one pattern are only added once
+            files.extend(fn for fn in filelist if fn not in files)
+        return files
+
+    def build_package_data(self):
+        """Copy data files into build directory.
+
+        Helper function for `run()`.
+        """
+        # FIXME add tests for this method
+        for package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                srcfile = os.path.join(src_dir, filename)
+                self.mkpath(os.path.dirname(target))
+                outf, copied = self.copy_file(srcfile,
+                               target, preserve_mode=False)
+                if copied and srcfile in self.distribution.convert_2to3.doctests:
+                    self._doctests_2to3.append(outf)
+
+    # XXX - this should be moved to the Distribution class as it is not
+    # only needed for build_py. It also has no dependencies on this class.
+    def get_package_dir(self, package):
+        """Return the directory, relative to the top of the source
+           distribution, where package 'package' should be found
+           (at least according to the 'package_dir' option, if any)."""
+
+        path = package.split('.')
+        if self.package_dir is not None:
+            path.insert(0, self.package_dir)
+
+        if len(path) > 0:
+            return os.path.join(*path)
+
+        return ''
+
+    def check_package(self, package, package_dir):
+        """Helper function for `find_package_modules()` and `find_modules()'.
+        """
+        # Empty dir name means current directory, which we can probably
+        # assume exists.  Also, os.path.exists and isdir don't know about
+        # my "empty string means current dir" convention, so we have to
+        # circumvent them.
+        if package_dir != "":
+            if not os.path.exists(package_dir):
+                raise PackagingFileError(
+                      "package directory '%s' does not exist" % package_dir)
+            if not os.path.isdir(package_dir):
+                raise PackagingFileError(
+                       "supposed package directory '%s' exists, "
+                       "but is not a directory" % package_dir)
+
+        # Require __init__.py for all but the "root package"
+        if package:
+            init_py = os.path.join(package_dir, "__init__.py")
+            if os.path.isfile(init_py):
+                return init_py
+            else:
+                logger.warning(("package init file '%s' not found " +
+                                "(or not a regular file)"), init_py)
+
+        # Either not in a package at all (__init__.py not expected), or
+        # __init__.py doesn't exist -- so don't return the filename.
+        return None
+
+    def check_module(self, module, module_file):
+        if not os.path.isfile(module_file):
+            logger.warning("file %s (for module %s) not found",
+                           module_file, module)
+            return False
+        else:
+            return True
+
+    def find_package_modules(self, package, package_dir):
+        self.check_package(package, package_dir)
+        module_files = glob(os.path.join(package_dir, "*.py"))
+        modules = []
+        if self.distribution.script_name is not None:
+            setup_script = os.path.abspath(self.distribution.script_name)
+        else:
+            setup_script = None
+
+        for f in module_files:
+            abs_f = os.path.abspath(f)
+            if abs_f != setup_script:
+                module = os.path.splitext(os.path.basename(f))[0]
+                modules.append((package, module, f))
+            else:
+                logger.debug("excluding %s", setup_script)
+        return modules
+
+    def find_modules(self):
+        """Finds individually-specified Python modules, ie. those listed by
+        module name in 'self.py_modules'.  Returns a list of tuples (package,
+        module_base, filename): 'package' is a tuple of the path through
+        package-space to the module; 'module_base' is the bare (no
+        packages, no dots) module name, and 'filename' is the path to the
+        ".py" file (relative to the distribution root) that implements the
+        module.
+        """
+        # Map package names to tuples of useful info about the package:
+        #    (package_dir, checked)
+        # package_dir - the directory where we'll find source files for
+        #   this package
+        # checked - true if we have checked that the package directory
+        #   is valid (exists, contains __init__.py, ... ?)
+        packages = {}
+
+        # List of (package, module, filename) tuples to return
+        modules = []
+
+        # We treat modules-in-packages almost the same as toplevel modules,
+        # just the "package" for a toplevel is empty (either an empty
+        # string or empty list, depending on context).  Differences:
+        #   - don't check for __init__.py in directory for empty package
+        for module in self.py_modules:
+            path = module.split('.')
+            package = '.'.join(path[0:-1])
+            module_base = path[-1]
+
+            try:
+                package_dir, checked = packages[package]
+            except KeyError:
+                package_dir = self.get_package_dir(package)
+                checked = False
+
+            if not checked:
+                init_py = self.check_package(package, package_dir)
+                packages[package] = (package_dir, 1)
+                if init_py:
+                    modules.append((package, "__init__", init_py))
+
+            # XXX perhaps we should also check for just .pyc files
+            # (so greedy closed-source bastards can distribute Python
+            # modules too)
+            module_file = os.path.join(package_dir, module_base + ".py")
+            if not self.check_module(module, module_file):
+                continue
+
+            modules.append((package, module_base, module_file))
+
+        return modules
+
+    def find_all_modules(self):
+        """Compute the list of all modules that will be built, whether
+        they are specified one-module-at-a-time ('self.py_modules') or
+        by whole packages ('self.packages').  Return a list of tuples
+        (package, module, module_file), just like 'find_modules()' and
+        'find_package_modules()' do."""
+        modules = []
+        if self.py_modules:
+            modules.extend(self.find_modules())
+        if self.packages:
+            for package in self.packages:
+                package_dir = self.get_package_dir(package)
+                m = self.find_package_modules(package, package_dir)
+                modules.extend(m)
+        return modules
+
+    def get_source_files(self):
+        sources = [module[-1] for module in self.find_all_modules()]
+        sources += [
+            os.path.join(src_dir, filename)
+            for package, src_dir, build_dir, filenames in self.data_files
+            for filename in filenames]
+        return sources
+
+    def get_module_outfile(self, build_dir, package, module):
+        outfile_path = [build_dir] + list(package) + [module + ".py"]
+        return os.path.join(*outfile_path)
+
+    def get_outputs(self, include_bytecode=True):
+        modules = self.find_all_modules()
+        outputs = []
+        for package, module, module_file in modules:
+            package = package.split('.')
+            filename = self.get_module_outfile(self.build_lib, package, module)
+            outputs.append(filename)
+            if include_bytecode:
+                if self.compile:
+                    outputs.append(filename + "c")
+                if self.optimize > 0:
+                    outputs.append(filename + "o")
+
+        outputs += [
+            os.path.join(build_dir, filename)
+            for package, src_dir, build_dir, filenames in self.data_files
+            for filename in filenames]
+
+        return outputs
+
+    def build_module(self, module, module_file, package):
+        if isinstance(package, str):
+            package = package.split('.')
+        elif not isinstance(package, (list, tuple)):
+            raise TypeError(
+                  "'package' must be a string (dot-separated), list, or tuple")
+
+        # Now put the module source file into the "build" area -- this is
+        # easy, we just copy it somewhere under self.build_lib (the build
+        # directory for Python source).
+        outfile = self.get_module_outfile(self.build_lib, package, module)
+        dir = os.path.dirname(outfile)
+        self.mkpath(dir)
+        return self.copy_file(module_file, outfile, preserve_mode=False)
+
+    def build_modules(self):
+        modules = self.find_modules()
+        for package, module, module_file in modules:
+
+            # Now "build" the module -- ie. copy the source file to
+            # self.build_lib (the build directory for Python source).
+            # (Actually, it gets copied to the directory for this package
+            # under self.build_lib.)
+            self.build_module(module, module_file, package)
+
+    def build_packages(self):
+        for package in self.packages:
+
+            # Get list of (package, module, module_file) tuples based on
+            # scanning the package directory.  'package' is only included
+            # in the tuple so that 'find_modules()' and
+            # 'find_package_tuples()' have a consistent interface; it's
+            # ignored here (apart from a sanity check).  Also, 'module' is
+            # the *unqualified* module name (ie. no dots, no package -- we
+            # already know its package!), and 'module_file' is the path to
+            # the .py file, relative to the current directory
+            # (ie. including 'package_dir').
+            package_dir = self.get_package_dir(package)
+            modules = self.find_package_modules(package, package_dir)
+
+            # Now loop over the modules we found, "building" each one (just
+            # copy it to self.build_lib).
+            for package_, module, module_file in modules:
+                assert package == package_
+                self.build_module(module, module_file, package)
+
+    def byte_compile(self, files):
+        if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode:
+            logger.warning('%s: byte-compiling is disabled, skipping.',
+                           self.get_command_name())
+            return
+
+        from packaging.util import byte_compile
+        prefix = self.build_lib
+        if prefix[-1] != os.sep:
+            prefix = prefix + os.sep
+
+        # XXX this code is essentially the same as the 'byte_compile()
+        # method of the "install_lib" command, except for the determination
+        # of the 'prefix' string.  Hmmm.
+
+        if self.compile:
+            byte_compile(files, optimize=0,
+                         force=self.force, prefix=prefix, dry_run=self.dry_run)
+        if self.optimize > 0:
+            byte_compile(files, optimize=self.optimize,
+                         force=self.force, prefix=prefix, dry_run=self.dry_run)
diff --git a/Lib/packaging/command/build_scripts.py b/Lib/packaging/command/build_scripts.py
new file mode 100644
index 0000000..7fba0e5
--- /dev/null
+++ b/Lib/packaging/command/build_scripts.py
@@ -0,0 +1,132 @@
+"""Build scripts (copy to build dir and fix up shebang line)."""
+
+import os
+import re
+import sysconfig
+
+from packaging.command.cmd import Command
+from packaging.util import convert_path, newer
+from packaging import logger
+from packaging.compat import Mixin2to3
+
+
+# check if Python is called on the first line with this expression
+first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
+
+class build_scripts(Command, Mixin2to3):
+
+    description = "build scripts (copy and fix up shebang line)"
+
+    user_options = [
+        ('build-dir=', 'd', "directory to build (copy) to"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps"),
+        ('executable=', 'e', "specify final destination interpreter path"),
+        ]
+
+    boolean_options = ['force']
+
+
+    def initialize_options(self):
+        self.build_dir = None
+        self.scripts = None
+        self.force = None
+        self.executable = None
+        self.outfiles = None
+        self.use_2to3 = False
+        self.convert_2to3_doctests = None
+        self.use_2to3_fixers = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   ('build_scripts', 'build_dir'),
+                                   'use_2to3', 'use_2to3_fixers',
+                                   'convert_2to3_doctests', 'force',
+                                   'executable')
+        self.scripts = self.distribution.scripts
+
+    def get_source_files(self):
+        return self.scripts
+
+    def run(self):
+        if not self.scripts:
+            return
+        copied_files = self.copy_scripts()
+        if self.use_2to3 and copied_files:
+            self._run_2to3(copied_files, fixers=self.use_2to3_fixers)
+
+    def copy_scripts(self):
+        """Copy each script listed in 'self.scripts'; if it's marked as a
+        Python script in the Unix way (first line matches 'first_line_re',
+        ie. starts with "\#!" and contains "python"), then adjust the first
+        line to refer to the current Python interpreter as we copy.
+        """
+        self.mkpath(self.build_dir)
+        outfiles = []
+        for script in self.scripts:
+            adjust = False
+            script = convert_path(script)
+            outfile = os.path.join(self.build_dir, os.path.basename(script))
+            outfiles.append(outfile)
+
+            if not self.force and not newer(script, outfile):
+                logger.debug("not copying %s (up-to-date)", script)
+                continue
+
+            # Always open the file, but ignore failures in dry-run mode --
+            # that way, we'll get accurate feedback if we can read the
+            # script.
+            try:
+                f = open(script, "r")
+            except IOError:
+                if not self.dry_run:
+                    raise
+                f = None
+            else:
+                first_line = f.readline()
+                if not first_line:
+                    logger.warning('%s: %s is an empty file (skipping)',
+                                   self.get_command_name(),  script)
+                    continue
+
+                match = first_line_re.match(first_line)
+                if match:
+                    adjust = True
+                    post_interp = match.group(1) or ''
+
+            if adjust:
+                logger.info("copying and adjusting %s -> %s", script,
+                         self.build_dir)
+                if not self.dry_run:
+                    outf = open(outfile, "w")
+                    if not sysconfig.is_python_build():
+                        outf.write("#!%s%s\n" %
+                                   (self.executable,
+                                    post_interp))
+                    else:
+                        outf.write("#!%s%s\n" %
+                                   (os.path.join(
+                            sysconfig.get_config_var("BINDIR"),
+                           "python%s%s" % (sysconfig.get_config_var("VERSION"),
+                                           sysconfig.get_config_var("EXE"))),
+                                    post_interp))
+                    outf.writelines(f.readlines())
+                    outf.close()
+                if f:
+                    f.close()
+            else:
+                if f:
+                    f.close()
+                self.copy_file(script, outfile)
+
+        if os.name == 'posix':
+            for file in outfiles:
+                if self.dry_run:
+                    logger.info("changing mode of %s", file)
+                else:
+                    oldmode = os.stat(file).st_mode & 0o7777
+                    newmode = (oldmode | 0o555) & 0o7777
+                    if newmode != oldmode:
+                        logger.info("changing mode of %s from %o to %o",
+                                 file, oldmode, newmode)
+                        os.chmod(file, newmode)
+        return outfiles
diff --git a/Lib/packaging/command/check.py b/Lib/packaging/command/check.py
new file mode 100644
index 0000000..94c4a97
--- /dev/null
+++ b/Lib/packaging/command/check.py
@@ -0,0 +1,88 @@
+"""Check PEP compliance of metadata."""
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingSetupError
+from packaging.util import resolve_name
+
+class check(Command):
+
+    description = "check PEP compliance of metadata"
+
+    user_options = [('metadata', 'm', 'Verify metadata'),
+                    ('all', 'a',
+                     ('runs extended set of checks')),
+                    ('strict', 's',
+                     'Will exit with an error if a check fails')]
+
+    boolean_options = ['metadata', 'all', 'strict']
+
+    def initialize_options(self):
+        """Sets default values for options."""
+        self.all = False
+        self.metadata = True
+        self.strict = False
+        self._warnings = []
+
+    def finalize_options(self):
+        pass
+
+    def warn(self, msg, *args):
+        """Wrapper around logging that also remembers messages."""
+        # XXX we could use a special handler for this, but would need to test
+        # if it works even if the logger has a too high level
+        self._warnings.append((msg, args))
+        return logger.warning(self.get_command_name() + msg, *args)
+
+    def run(self):
+        """Runs the command."""
+        # perform the various tests
+        if self.metadata:
+            self.check_metadata()
+        if self.all:
+            self.check_restructuredtext()
+            self.check_hooks_resolvable()
+
+        # let's raise an error in strict mode, if we have at least
+        # one warning
+        if self.strict and len(self._warnings) > 0:
+            msg = '\n'.join(msg % args for msg, args in self._warnings)
+            raise PackagingSetupError(msg)
+
+    def check_metadata(self):
+        """Ensures that all required elements of metadata are supplied.
+
+        name, version, URL, author
+
+        Warns if any are missing.
+        """
+        missing, warnings = self.distribution.metadata.check(strict=True)
+        if missing != []:
+            self.warn('missing required metadata: %s', ', '.join(missing))
+        for warning in warnings:
+            self.warn(warning)
+
+    def check_restructuredtext(self):
+        """Checks if the long string fields are reST-compliant."""
+        missing, warnings = self.distribution.metadata.check(restructuredtext=True)
+        if self.distribution.metadata.docutils_support:
+            for warning in warnings:
+                line = warning[-1].get('line')
+                if line is None:
+                    warning = warning[1]
+                else:
+                    warning = '%s (line %s)' % (warning[1], line)
+                self.warn(warning)
+        elif self.strict:
+            raise PackagingSetupError('The docutils package is needed.')
+
+    def check_hooks_resolvable(self):
+        for options in self.distribution.command_options.values():
+            for hook_kind in ("pre_hook", "post_hook"):
+                if hook_kind not in options:
+                    break
+                for hook_name in options[hook_kind][1].values():
+                    try:
+                        resolve_name(hook_name)
+                    except ImportError:
+                        self.warn('name %r cannot be resolved', hook_name)
diff --git a/Lib/packaging/command/clean.py b/Lib/packaging/command/clean.py
new file mode 100644
index 0000000..4f60f4e
--- /dev/null
+++ b/Lib/packaging/command/clean.py
@@ -0,0 +1,76 @@
+"""Clean up temporary files created by the build command."""
+
+# Contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>
+
+import os
+from shutil import rmtree
+from packaging.command.cmd import Command
+from packaging import logger
+
+class clean(Command):
+
+    description = "clean up temporary files from 'build' command"
+    user_options = [
+        ('build-base=', 'b',
+         "base build directory (default: 'build.build-base')"),
+        ('build-lib=', None,
+         "build directory for all modules (default: 'build.build-lib')"),
+        ('build-temp=', 't',
+         "temporary build directory (default: 'build.build-temp')"),
+        ('build-scripts=', None,
+         "build directory for scripts (default: 'build.build-scripts')"),
+        ('bdist-base=', None,
+         "temporary directory for built distributions"),
+        ('all', 'a',
+         "remove all build output, not just temporary by-products")
+    ]
+
+    boolean_options = ['all']
+
+    def initialize_options(self):
+        self.build_base = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.bdist_base = None
+        self.all = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build', 'build_base', 'build_lib',
+                                   'build_scripts', 'build_temp')
+        self.set_undefined_options('bdist', 'bdist_base')
+
+    def run(self):
+        # remove the build/temp.<plat> directory (unless it's already
+        # gone)
+        if os.path.exists(self.build_temp):
+            if self.dry_run:
+                logger.info('removing %s', self.build_temp)
+            else:
+                rmtree(self.build_temp)
+        else:
+            logger.debug("'%s' does not exist -- can't clean it",
+                      self.build_temp)
+
+        if self.all:
+            # remove build directories
+            for directory in (self.build_lib,
+                              self.bdist_base,
+                              self.build_scripts):
+                if os.path.exists(directory):
+                    if self.dry_run:
+                        logger.info('removing %s', directory)
+                    else:
+                        rmtree(directory)
+                else:
+                    logger.warning("'%s' does not exist -- can't clean it",
+                                directory)
+
+        # just for the heck of it, try to remove the base build directory:
+        # we might have emptied it right now, but if not we don't care
+        if not self.dry_run:
+            try:
+                os.rmdir(self.build_base)
+                logger.info("removing '%s'", self.build_base)
+            except OSError:
+                pass
diff --git a/Lib/packaging/command/cmd.py b/Lib/packaging/command/cmd.py
new file mode 100644
index 0000000..fa56aa6
--- /dev/null
+++ b/Lib/packaging/command/cmd.py
@@ -0,0 +1,440 @@
+"""Base class for commands."""
+
+import os
+import re
+from shutil import copyfile, move, make_archive
+from packaging import util
+from packaging import logger
+from packaging.errors import PackagingOptionError
+
+
+class Command:
+    """Abstract base class for defining command classes, the "worker bees"
+    of the Packaging.  A useful analogy for command classes is to think of
+    them as subroutines with local variables called "options".  The options
+    are "declared" in 'initialize_options()' and "defined" (given their
+    final values, aka "finalized") in 'finalize_options()', both of which
+    must be defined by every command class.  The distinction between the
+    two is necessary because option values might come from the outside
+    world (command line, config file, ...), and any options dependent on
+    other options must be computed *after* these outside influences have
+    been processed -- hence 'finalize_options()'.  The "body" of the
+    subroutine, where it does all its work based on the values of its
+    options, is the 'run()' method, which must also be implemented by every
+    command class.
+    """
+
+    # 'sub_commands' formalizes the notion of a "family" of commands,
+    # eg. "install_dist" as the parent with sub-commands "install_lib",
+    # "install_headers", etc.  The parent of a family of commands
+    # defines 'sub_commands' as a class attribute; it's a list of
+    #    (command_name : string, predicate : unbound_method | string | None)
+    # tuples, where 'predicate' is a method of the parent command that
+    # determines whether the corresponding command is applicable in the
+    # current situation.  (Eg. we "install_headers" is only applicable if
+    # we have any C header files to install.)  If 'predicate' is None,
+    # that command is always applicable.
+    #
+    # 'sub_commands' is usually defined at the *end* of a class, because
+    # predicates can be unbound methods, so they must already have been
+    # defined.  The canonical example is the "install_dist" command.
+    sub_commands = []
+
+    # Pre and post command hooks are run just before or just after the command
+    # itself. They are simple functions that receive the command instance. They
+    # are specified as callable objects or dotted strings (for lazy loading).
+    pre_hook = None
+    post_hook = None
+
+    # -- Creation/initialization methods -------------------------------
+
+    def __init__(self, dist):
+        """Create and initialize a new Command object.  Most importantly,
+        invokes the 'initialize_options()' method, which is the real
+        initializer and depends on the actual command being instantiated.
+        """
+        # late import because of mutual dependence between these classes
+        from packaging.dist import Distribution
+
+        if not isinstance(dist, Distribution):
+            raise TypeError("dist must be a Distribution instance")
+        if self.__class__ is Command:
+            raise RuntimeError("Command is an abstract class")
+
+        self.distribution = dist
+        self.initialize_options()
+
+        # Per-command versions of the global flags, so that the user can
+        # customize Packaging' behaviour command-by-command and let some
+        # commands fall back on the Distribution's behaviour.  None means
+        # "not defined, check self.distribution's copy", while 0 or 1 mean
+        # false and true (duh).  Note that this means figuring out the real
+        # value of each flag is a touch complicated -- hence "self._dry_run"
+        # will be handled by a property, below.
+        # XXX This needs to be fixed. [I changed it to a property--does that
+        #     "fix" it?]
+        self._dry_run = None
+
+        # Some commands define a 'self.force' option to ignore file
+        # timestamps, but methods defined *here* assume that
+        # 'self.force' exists for all commands.  So define it here
+        # just to be safe.
+        self.force = None
+
+        # The 'help' flag is just used for command line parsing, so
+        # none of that complicated bureaucracy is needed.
+        self.help = False
+
+        # 'finalized' records whether or not 'finalize_options()' has been
+        # called.  'finalize_options()' itself should not pay attention to
+        # this flag: it is the business of 'ensure_finalized()', which
+        # always calls 'finalize_options()', to respect/update it.
+        self.finalized = False
+
+    # XXX A more explicit way to customize dry_run would be better.
+    @property
+    def dry_run(self):
+        if self._dry_run is None:
+            return getattr(self.distribution, 'dry_run')
+        else:
+            return self._dry_run
+
+    def ensure_finalized(self):
+        if not self.finalized:
+            self.finalize_options()
+        self.finalized = True
+
+    # Subclasses must define:
+    #   initialize_options()
+    #     provide default values for all options; may be customized by
+    #     setup script, by options from config file(s), or by command-line
+    #     options
+    #   finalize_options()
+    #     decide on the final values for all options; this is called
+    #     after all possible intervention from the outside world
+    #     (command line, option file, etc.) has been processed
+    #   run()
+    #     run the command: do whatever it is we're here to do,
+    #     controlled by the command's various option values
+
+    def initialize_options(self):
+        """Set default values for all the options that this command
+        supports.  Note that these defaults may be overridden by other
+        commands, by the setup script, by config files, or by the
+        command line.  Thus, this is not the place to code dependencies
+        between options; generally, 'initialize_options()' implementations
+        are just a bunch of "self.foo = None" assignments.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError(
+            "abstract method -- subclass %s must override" % self.__class__)
+
+    def finalize_options(self):
+        """Set final values for all the options that this command supports.
+        This is always called as late as possible, ie.  after any option
+        assignments from the command line or from other commands have been
+        done.  Thus, this is the place to code option dependencies: if
+        'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
+        long as 'foo' still has the same value it was assigned in
+        'initialize_options()'.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError(
+            "abstract method -- subclass %s must override" % self.__class__)
+
+    def dump_options(self, header=None, indent=""):
+        if header is None:
+            header = "command options for '%s':" % self.get_command_name()
+        logger.info(indent + header)
+        indent = indent + "  "
+        negative_opt = getattr(self, 'negative_opt', ())
+        for option, _, _ in self.user_options:
+            if option in negative_opt:
+                continue
+            option = option.replace('-', '_')
+            if option[-1] == "=":
+                option = option[:-1]
+            value = getattr(self, option)
+            logger.info(indent + "%s = %s", option, value)
+
+    def run(self):
+        """A command's raison d'etre: carry out the action it exists to
+        perform, controlled by the options initialized in
+        'initialize_options()', customized by other commands, the setup
+        script, the command line and config files, and finalized in
+        'finalize_options()'.  All terminal output and filesystem
+        interaction should be done by 'run()'.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError(
+            "abstract method -- subclass %s must override" % self.__class__)
+
+    # -- External interface --------------------------------------------
+    # (called by outsiders)
+
+    def get_source_files(self):
+        """Return the list of files that are used as inputs to this command,
+        i.e. the files used to generate the output files.  The result is used
+        by the `sdist` command in determining the set of default files.
+
+        Command classes should implement this method if they operate on files
+        from the source tree.
+        """
+        return []
+
+    def get_outputs(self):
+        """Return the list of files that would be produced if this command
+        were actually run.  Not affected by the "dry-run" flag or whether
+        any other commands have been run.
+
+        Command classes should implement this method if they produce any
+        output files that get consumed by another command.  e.g., `build_ext`
+        returns the list of built extension modules, but not any temporary
+        files used in the compilation process.
+        """
+        return []
+
+    # -- Option validation methods -------------------------------------
+    # (these are very handy in writing the 'finalize_options()' method)
+    #
+    # NB. the general philosophy here is to ensure that a particular option
+    # value meets certain type and value constraints.  If not, we try to
+    # force it into conformance (eg. if we expect a list but have a string,
+    # split the string on comma and/or whitespace).  If we can't force the
+    # option into conformance, raise PackagingOptionError.  Thus, command
+    # classes need do nothing more than (eg.)
+    #   self.ensure_string_list('foo')
+    # and they can be guaranteed that thereafter, self.foo will be
+    # a list of strings.
+
+    def _ensure_stringlike(self, option, what, default=None):
+        val = getattr(self, option)
+        if val is None:
+            setattr(self, option, default)
+            return default
+        elif not isinstance(val, str):
+            raise PackagingOptionError("'%s' must be a %s (got `%s`)" %
+                                       (option, what, val))
+        return val
+
+    def ensure_string(self, option, default=None):
+        """Ensure that 'option' is a string; if not defined, set it to
+        'default'.
+        """
+        self._ensure_stringlike(option, "string", default)
+
+    def ensure_string_list(self, option):
+        r"""Ensure that 'option' is a list of strings.  If 'option' is
+        currently a string, we split it either on /,\s*/ or /\s+/, so
+        "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
+        ["foo", "bar", "baz"].
+        """
+        val = getattr(self, option)
+        if val is None:
+            return
+        elif isinstance(val, str):
+            setattr(self, option, re.split(r',\s*|\s+', val))
+        else:
+            if isinstance(val, list):
+                # checks if all elements are str
+                ok = True
+                for element in val:
+                    if not isinstance(element, str):
+                        ok = False
+                        break
+            else:
+                ok = False
+
+            if not ok:
+                raise PackagingOptionError(
+                    "'%s' must be a list of strings (got %r)" % (option, val))
+
+    def _ensure_tested_string(self, option, tester,
+                              what, error_fmt, default=None):
+        val = self._ensure_stringlike(option, what, default)
+        if val is not None and not tester(val):
+            raise PackagingOptionError(
+                ("error in '%s' option: " + error_fmt) % (option, val))
+
+    def ensure_filename(self, option):
+        """Ensure that 'option' is the name of an existing file."""
+        self._ensure_tested_string(option, os.path.isfile,
+                                   "filename",
+                                   "'%s' does not exist or is not a file")
+
+    def ensure_dirname(self, option):
+        self._ensure_tested_string(option, os.path.isdir,
+                                   "directory name",
+                                   "'%s' does not exist or is not a directory")
+
+    # -- Convenience methods for commands ------------------------------
+
+    @classmethod
+    def get_command_name(cls):
+        if hasattr(cls, 'command_name'):
+            return cls.command_name
+        else:
+            return cls.__name__
+
+    def set_undefined_options(self, src_cmd, *options):
+        """Set values of undefined options from another command.
+
+        Undefined options are options set to None, which is the convention
+        used to indicate that an option has not been changed between
+        'initialize_options()' and 'finalize_options()'.  This method is
+        usually called from 'finalize_options()' for options that depend on
+        some other command rather than another option of the same command,
+        typically subcommands.
+
+        The 'src_cmd' argument is the other command from which option values
+        will be taken (a command object will be created for it if necessary);
+        the remaining positional arguments are strings that give the name of
+        the option to set. If the name is different on the source and target
+        command, you can pass a tuple with '(name_on_source, name_on_dest)' so
+        that 'self.name_on_dest' will be set from 'src_cmd.name_on_source'.
+        """
+        src_cmd_obj = self.distribution.get_command_obj(src_cmd)
+        src_cmd_obj.ensure_finalized()
+        for obj in options:
+            if isinstance(obj, tuple):
+                src_option, dst_option = obj
+            else:
+                src_option, dst_option = obj, obj
+            if getattr(self, dst_option) is None:
+                setattr(self, dst_option,
+                        getattr(src_cmd_obj, src_option))
+
+    def get_finalized_command(self, command, create=True):
+        """Wrapper around Distribution's 'get_command_obj()' method: find
+        (create if necessary and 'create' is true) the command object for
+        'command', call its 'ensure_finalized()' method, and return the
+        finalized command object.
+        """
+        cmd_obj = self.distribution.get_command_obj(command, create)
+        cmd_obj.ensure_finalized()
+        return cmd_obj
+
+    def get_reinitialized_command(self, command, reinit_subcommands=False):
+        return self.distribution.get_reinitialized_command(
+            command, reinit_subcommands)
+
+    def run_command(self, command):
+        """Run some other command: uses the 'run_command()' method of
+        Distribution, which creates and finalizes the command object if
+        necessary and then invokes its 'run()' method.
+        """
+        self.distribution.run_command(command)
+
+    def get_sub_commands(self):
+        """Determine the sub-commands that are relevant in the current
+        distribution (ie., that need to be run).  This is based on the
+        'sub_commands' class attribute: each tuple in that list may include
+        a method that we call to determine if the subcommand needs to be
+        run for the current distribution.  Return a list of command names.
+        """
+        commands = []
+        for sub_command in self.sub_commands:
+            if len(sub_command) == 2:
+                cmd_name, method = sub_command
+                if method is None or method(self):
+                    commands.append(cmd_name)
+            else:
+                commands.append(sub_command)
+        return commands
+
+    # -- External world manipulation -----------------------------------
+
+    def execute(self, func, args, msg=None, level=1):
+        util.execute(func, args, msg, dry_run=self.dry_run)
+
+    def mkpath(self, name, mode=0o777, dry_run=None, verbose=0):
+        if dry_run is None:
+            dry_run = self.dry_run
+        name = os.path.normpath(name)
+        if os.path.isdir(name) or name == '':
+            return
+        if dry_run:
+            head = ''
+            for part in name.split(os.sep):
+                logger.info("created directory %s%s", head, part)
+                head += part + os.sep
+            return
+        os.makedirs(name, mode)
+
+    def copy_file(self, infile, outfile,
+                  preserve_mode=True, preserve_times=True, link=None, level=1):
+        """Copy a file respecting verbose, dry-run and force flags.  (The
+        former two default to whatever is in the Distribution object, and
+        the latter defaults to false for commands that don't define it.)"""
+        if self.dry_run:
+            # XXX add a comment
+            return
+        if os.path.isdir(outfile):
+            outfile = os.path.join(outfile, os.path.split(infile)[-1])
+        copyfile(infile, outfile)
+        return outfile, None  # XXX
+
+    def copy_tree(self, infile, outfile, preserve_mode=True,
+                  preserve_times=True, preserve_symlinks=False, level=1):
+        """Copy an entire directory tree respecting verbose, dry-run,
+        and force flags.
+        """
+        if self.dry_run:
+            return  # see if we want to display something
+
+
+        return util.copy_tree(infile, outfile, preserve_mode, preserve_times,
+            preserve_symlinks, not self.force, dry_run=self.dry_run)
+
+    def move_file(self, src, dst, level=1):
+        """Move a file respecting the dry-run flag."""
+        if self.dry_run:
+            return  # XXX log ?
+        return move(src, dst)
+
+    def spawn(self, cmd, search_path=True, level=1):
+        """Spawn an external command respecting dry-run flag."""
+        from packaging.util import spawn
+        spawn(cmd, search_path, dry_run=self.dry_run)
+
+    def make_archive(self, base_name, format, root_dir=None, base_dir=None,
+                     owner=None, group=None):
+        return make_archive(base_name, format, root_dir,
+                            base_dir, dry_run=self.dry_run,
+                            owner=owner, group=group)
+
+    def make_file(self, infiles, outfile, func, args,
+                  exec_msg=None, skip_msg=None, level=1):
+        """Special case of 'execute()' for operations that process one or
+        more input files and generate one output file.  Works just like
+        'execute()', except the operation is skipped and a different
+        message printed if 'outfile' already exists and is newer than all
+        files listed in 'infiles'.  If the command defined 'self.force',
+        and it is true, then the command is unconditionally run -- does no
+        timestamp checks.
+        """
+        if skip_msg is None:
+            skip_msg = "skipping %s (inputs unchanged)" % outfile
+
+        # Allow 'infiles' to be a single string
+        if isinstance(infiles, str):
+            infiles = (infiles,)
+        elif not isinstance(infiles, (list, tuple)):
+            raise TypeError(
+                "'infiles' must be a string, or a list or tuple of strings")
+
+        if exec_msg is None:
+            exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
+
+        # If 'outfile' must be regenerated (either because it doesn't
+        # exist, is out-of-date, or the 'force' flag is true) then
+        # perform the action that presumably regenerates it
+        if self.force or util.newer_group(infiles, outfile):
+            self.execute(func, args, exec_msg, level)
+
+        # Otherwise, print the "skip" message
+        else:
+            logger.debug(skip_msg)
diff --git a/Lib/packaging/command/command_template b/Lib/packaging/command/command_template
new file mode 100644
index 0000000..a12d32b
--- /dev/null
+++ b/Lib/packaging/command/command_template
@@ -0,0 +1,35 @@
+"""Do X and Y."""
+
+from packaging import logger
+from packaging.command.cmd import Command
+
+
+class x(Command):
+
+    # Brief (40-50 characters) description of the command
+    description = ""
+
+    # List of option tuples: long name, short name (None if no short
+    # name), and help string.
+    user_options = [
+        ('', '',  # long option, short option (one letter) or None
+         ""),  # help text
+        ]
+
+    def initialize_options(self):
+        self. = None
+        self. = None
+        self. = None
+
+    def finalize_options(self):
+        if self.x is None:
+            self.x = ...
+
+    def run(self):
+        ...
+        logger.info(...)
+
+        if not self.dry_run:
+            ...
+
+        self.execute(..., dry_run=self.dry_run)
diff --git a/Lib/packaging/command/config.py b/Lib/packaging/command/config.py
new file mode 100644
index 0000000..a5feb91
--- /dev/null
+++ b/Lib/packaging/command/config.py
@@ -0,0 +1,351 @@
+"""Prepare the build.
+
+This module provides config, a (mostly) empty command class
+that exists mainly to be sub-classed by specific module distributions and
+applications.  The idea is that while every "config" command is different,
+at least they're all named the same, and users always see "config" in the
+list of standard commands.  Also, this is a good place to put common
+configure-like tasks: "try to compile this C code", or "figure out where
+this header file lives".
+"""
+
+import os
+import re
+
+from packaging.command.cmd import Command
+from packaging.errors import PackagingExecError
+from packaging.compiler import customize_compiler
+from packaging import logger
+
+LANG_EXT = {'c': '.c', 'c++': '.cxx'}
+
+class config(Command):
+
+    description = "prepare the build"
+
+    user_options = [
+        ('compiler=', None,
+         "specify the compiler type"),
+        ('cc=', None,
+         "specify the compiler executable"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files"),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries"),
+
+        ('noisy', None,
+         "show every action (compile, link, run, ...) taken"),
+        ('dump-source', None,
+         "dump generated source files before attempting to compile them"),
+        ]
+
+
+    # The three standard command methods: since the "config" command
+    # does nothing by default, these are empty.
+
+    def initialize_options(self):
+        self.compiler = None
+        self.cc = None
+        self.include_dirs = None
+        self.libraries = None
+        self.library_dirs = None
+
+        # maximal output for now
+        self.noisy = True
+        self.dump_source = True
+
+        # list of temporary files generated along-the-way that we have
+        # to clean at some point
+        self.temp_files = []
+
+    def finalize_options(self):
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        elif isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        if self.libraries is None:
+            self.libraries = []
+        elif isinstance(self.libraries, str):
+            self.libraries = [self.libraries]
+
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif isinstance(self.library_dirs, str):
+            self.library_dirs = self.library_dirs.split(os.pathsep)
+
+    def run(self):
+        pass
+
+
+    # Utility methods for actual "config" commands.  The interfaces are
+    # loosely based on Autoconf macros of similar names.  Sub-classes
+    # may use these freely.
+
+    def _check_compiler(self):
+        """Check that 'self.compiler' really is a CCompiler object;
+        if not, make it one.
+        """
+        # We do this late, and only on-demand, because this is an expensive
+        # import.
+        from packaging.compiler.ccompiler import CCompiler
+        from packaging.compiler import new_compiler
+        if not isinstance(self.compiler, CCompiler):
+            self.compiler = new_compiler(compiler=self.compiler,
+                                         dry_run=self.dry_run, force=True)
+            customize_compiler(self.compiler)
+            if self.include_dirs:
+                self.compiler.set_include_dirs(self.include_dirs)
+            if self.libraries:
+                self.compiler.set_libraries(self.libraries)
+            if self.library_dirs:
+                self.compiler.set_library_dirs(self.library_dirs)
+
+
+    def _gen_temp_sourcefile(self, body, headers, lang):
+        filename = "_configtest" + LANG_EXT[lang]
+        file = open(filename, "w")
+        if headers:
+            for header in headers:
+                file.write("#include <%s>\n" % header)
+            file.write("\n")
+        file.write(body)
+        if body[-1] != "\n":
+            file.write("\n")
+        file.close()
+        return filename
+
+    def _preprocess(self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        out = "_configtest.i"
+        self.temp_files.extend((src, out))
+        self.compiler.preprocess(src, out, include_dirs=include_dirs)
+        return src, out
+
+    def _compile(self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        if self.dump_source:
+            dump_file(src, "compiling '%s':" % src)
+        obj = self.compiler.object_filenames([src])[0]
+        self.temp_files.extend((src, obj))
+        self.compiler.compile([src], include_dirs=include_dirs)
+        return src, obj
+
+    def _link(self, body, headers, include_dirs, libraries, library_dirs,
+              lang):
+        src, obj = self._compile(body, headers, include_dirs, lang)
+        prog = os.path.splitext(os.path.basename(src))[0]
+        self.compiler.link_executable([obj], prog,
+                                      libraries=libraries,
+                                      library_dirs=library_dirs,
+                                      target_lang=lang)
+
+        if self.compiler.exe_extension is not None:
+            prog = prog + self.compiler.exe_extension
+        self.temp_files.append(prog)
+
+        return src, obj, prog
+
+    def _clean(self, *filenames):
+        if not filenames:
+            filenames = self.temp_files
+            self.temp_files = []
+        logger.info("removing: %s", ' '.join(filenames))
+        for filename in filenames:
+            try:
+                os.remove(filename)
+            except OSError:
+                pass
+
+
+    # XXX these ignore the dry-run flag: what to do, what to do? even if
+    # you want a dry-run build, you still need some sort of configuration
+    # info.  My inclination is to make it up to the real config command to
+    # consult 'dry_run', and assume a default (minimal) configuration if
+    # true.  The problem with trying to do it here is that you'd have to
+    # return either true or false from all the 'try' methods, neither of
+    # which is correct.
+
+    # XXX need access to the header search path and maybe default macros.
+
+    def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
+        """Construct a source file from 'body' (a string containing lines
+        of C/C++ code) and 'headers' (a list of header files to include)
+        and run it through the preprocessor.  Return true if the
+        preprocessor succeeded, false if there were any errors.
+        ('body' probably isn't of much use, but what the heck.)
+        """
+        from packaging.compiler.ccompiler import CompileError
+        self._check_compiler()
+        ok = True
+        try:
+            self._preprocess(body, headers, include_dirs, lang)
+        except CompileError:
+            ok = False
+
+        self._clean()
+        return ok
+
+    def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
+                   lang="c"):
+        """Construct a source file (just like 'try_cpp()'), run it through
+        the preprocessor, and return true if any line of the output matches
+        'pattern'.  'pattern' should either be a compiled regex object or a
+        string containing a regex.  If both 'body' and 'headers' are None,
+        preprocesses an empty file -- which can be useful to determine the
+        symbols the preprocessor and compiler set by default.
+        """
+        self._check_compiler()
+        src, out = self._preprocess(body, headers, include_dirs, lang)
+
+        if isinstance(pattern, str):
+            pattern = re.compile(pattern)
+
+        file = open(out)
+        match = False
+        while True:
+            line = file.readline()
+            if line == '':
+                break
+            if pattern.search(line):
+                match = True
+                break
+
+        file.close()
+        self._clean()
+        return match
+
+    def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
+        """Try to compile a source file built from 'body' and 'headers'.
+        Return true on success, false otherwise.
+        """
+        from packaging.compiler.ccompiler import CompileError
+        self._check_compiler()
+        try:
+            self._compile(body, headers, include_dirs, lang)
+            ok = True
+        except CompileError:
+            ok = False
+
+        logger.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_link(self, body, headers=None, include_dirs=None, libraries=None,
+                 library_dirs=None, lang="c"):
+        """Try to compile and link a source file, built from 'body' and
+        'headers', to executable form.  Return true on success, false
+        otherwise.
+        """
+        from packaging.compiler.ccompiler import CompileError, LinkError
+        self._check_compiler()
+        try:
+            self._link(body, headers, include_dirs,
+                       libraries, library_dirs, lang)
+            ok = True
+        except (CompileError, LinkError):
+            ok = False
+
+        logger.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_run(self, body, headers=None, include_dirs=None, libraries=None,
+                library_dirs=None, lang="c"):
+        """Try to compile, link to an executable, and run a program
+        built from 'body' and 'headers'.  Return true on success, false
+        otherwise.
+        """
+        from packaging.compiler.ccompiler import CompileError, LinkError
+        self._check_compiler()
+        try:
+            src, obj, exe = self._link(body, headers, include_dirs,
+                                       libraries, library_dirs, lang)
+            self.spawn([exe])
+            ok = True
+        except (CompileError, LinkError, PackagingExecError):
+            ok = False
+
+        logger.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+
+    # -- High-level methods --------------------------------------------
+    # (these are the ones that are actually likely to be useful
+    # when implementing a real-world config command!)
+
+    def check_func(self, func, headers=None, include_dirs=None,
+                   libraries=None, library_dirs=None, decl=False, call=False):
+
+        """Determine if function 'func' is available by constructing a
+        source file that refers to 'func', and compiles and links it.
+        If everything succeeds, returns true; otherwise returns false.
+
+        The constructed source file starts out by including the header
+        files listed in 'headers'.  If 'decl' is true, it then declares
+        'func' (as "int func()"); you probably shouldn't supply 'headers'
+        and set 'decl' true in the same call, or you might get errors about
+        a conflicting declarations for 'func'.  Finally, the constructed
+        'main()' function either references 'func' or (if 'call' is true)
+        calls it.  'libraries' and 'library_dirs' are used when
+        linking.
+        """
+
+        self._check_compiler()
+        body = []
+        if decl:
+            body.append("int %s ();" % func)
+        body.append("int main () {")
+        if call:
+            body.append("  %s();" % func)
+        else:
+            body.append("  %s;" % func)
+        body.append("}")
+        body = "\n".join(body) + "\n"
+
+        return self.try_link(body, headers, include_dirs,
+                             libraries, library_dirs)
+
+    def check_lib(self, library, library_dirs=None, headers=None,
+                  include_dirs=None, other_libraries=[]):
+        """Determine if 'library' is available to be linked against,
+        without actually checking that any particular symbols are provided
+        by it.  'headers' will be used in constructing the source file to
+        be compiled, but the only effect of this is to check if all the
+        header files listed are available.  Any libraries listed in
+        'other_libraries' will be included in the link, in case 'library'
+        has symbols that depend on other libraries.
+        """
+        self._check_compiler()
+        return self.try_link("int main (void) { }",
+                             headers, include_dirs,
+                             [library]+other_libraries, library_dirs)
+
+    def check_header(self, header, include_dirs=None, library_dirs=None,
+                     lang="c"):
+        """Determine if the system header file named by 'header_file'
+        exists and can be found by the preprocessor; return true if so,
+        false otherwise.
+        """
+        return self.try_cpp(body="/* No body */", headers=[header],
+                            include_dirs=include_dirs)
+
+
+def dump_file(filename, head=None):
+    """Dumps a file content into log.info.
+
+    If head is not None, will be dumped before the file content.
+    """
+    if head is None:
+        logger.info(filename)
+    else:
+        logger.info(head)
+    with open(filename) as file:
+        logger.info(file.read())
diff --git a/Lib/packaging/command/install_data.py b/Lib/packaging/command/install_data.py
new file mode 100644
index 0000000..9ca6279
--- /dev/null
+++ b/Lib/packaging/command/install_data.py
@@ -0,0 +1,79 @@
+"""Install platform-independent data files."""
+
+# Contributed by Bastian Kleineidam
+
+import os
+from shutil import Error
+from sysconfig import get_paths, format_value
+from packaging import logger
+from packaging.util import convert_path
+from packaging.command.cmd import Command
+
+
+class install_data(Command):
+
+    description = "install platform-independent data files"
+
+    user_options = [
+        ('install-dir=', 'd',
+         "base directory for installing data files "
+         "(default: installation base dir)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ]
+
+    boolean_options = ['force']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.outfiles = []
+        self.data_files_out = []
+        self.root = None
+        self.force = False
+        self.data_files = self.distribution.data_files
+        self.warn_dir = True
+
+    def finalize_options(self):
+        self.set_undefined_options('install_dist',
+                                   ('install_data', 'install_dir'),
+                                   'root', 'force')
+
+    def run(self):
+        self.mkpath(self.install_dir)
+        for _file in self.data_files.items():
+            destination = convert_path(self.expand_categories(_file[1]))
+            dir_dest = os.path.abspath(os.path.dirname(destination))
+
+            self.mkpath(dir_dest)
+            try:
+                out = self.copy_file(_file[0], dir_dest)[0]
+            except Error as e:
+                logger.warning('%s: %s', self.get_command_name(), e)
+                out = destination
+
+            self.outfiles.append(out)
+            self.data_files_out.append((_file[0], destination))
+
+    def expand_categories(self, path_with_categories):
+        local_vars = get_paths()
+        local_vars['distribution.name'] = self.distribution.metadata['Name']
+        expanded_path = format_value(path_with_categories, local_vars)
+        expanded_path = format_value(expanded_path, local_vars)
+        if '{' in expanded_path and '}' in expanded_path:
+            logger.warning(
+                '%s: unable to expand %s, some categories may be missing',
+                self.get_command_name(), path_with_categories)
+        return expanded_path
+
+    def get_source_files(self):
+        return list(self.data_files)
+
+    def get_inputs(self):
+        return list(self.data_files)
+
+    def get_outputs(self):
+        return self.outfiles
+
+    def get_resources_out(self):
+        return self.data_files_out
diff --git a/Lib/packaging/command/install_dist.py b/Lib/packaging/command/install_dist.py
new file mode 100644
index 0000000..dfe6df2
--- /dev/null
+++ b/Lib/packaging/command/install_dist.py
@@ -0,0 +1,625 @@
+"""Main install command, which calls the other install_* commands."""
+
+import sys
+import os
+
+import sysconfig
+from sysconfig import get_config_vars, get_paths, get_path, get_config_var
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingPlatformError
+from packaging.util import write_file
+from packaging.util import convert_path, change_root, get_platform
+from packaging.errors import PackagingOptionError
+
+
+HAS_USER_SITE = True
+
+
+class install_dist(Command):
+
+    description = "install everything from build directory"
+
+    user_options = [
+        # Select installation scheme and set base director(y|ies)
+        ('prefix=', None,
+         "installation prefix"),
+        ('exec-prefix=', None,
+         "(Unix only) prefix for platform-specific files"),
+        ('home=', None,
+         "(Unix only) home directory to install under"),
+
+        # Or just set the base director(y|ies)
+        ('install-base=', None,
+         "base installation directory (instead of --prefix or --home)"),
+        ('install-platbase=', None,
+         "base installation directory for platform-specific files " +
+         "(instead of --exec-prefix or --home)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+
+        # Or explicitly set the installation scheme
+        ('install-purelib=', None,
+         "installation directory for pure Python module distributions"),
+        ('install-platlib=', None,
+         "installation directory for non-pure module distributions"),
+        ('install-lib=', None,
+         "installation directory for all module distributions " +
+         "(overrides --install-purelib and --install-platlib)"),
+
+        ('install-headers=', None,
+         "installation directory for C/C++ headers"),
+        ('install-scripts=', None,
+         "installation directory for Python scripts"),
+        ('install-data=', None,
+         "installation directory for data files"),
+
+        # Byte-compilation options -- see install_lib.py for details, as
+        # these are duplicated from there (but only install_lib does
+        # anything with them).
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         'also compile with optimization: -O1 for "python -O", '
+         '-O2 for "python -OO", and -O0 to disable [default: -O0]'),
+
+        # Miscellaneous control options
+        ('force', 'f',
+         "force installation (overwrite any existing files)"),
+        ('skip-build', None,
+         "skip rebuilding everything (for testing/debugging)"),
+
+        # Where to install documentation (eventually!)
+        #('doc-format=', None, "format of documentation to generate"),
+        #('install-man=', None, "directory for Unix man pages"),
+        #('install-html=', None, "directory for HTML documentation"),
+        #('install-info=', None, "directory for GNU info files"),
+
+        # XXX use a name that makes clear this is the old format
+        ('record=', None,
+         "filename in which to record a list of installed files "
+         "(not PEP 376-compliant)"),
+        ('resources=', None,
+         "data files mapping"),
+
+        # .dist-info related arguments, read by install_dist_info
+        ('no-distinfo', None,
+         "do not create a .dist-info directory"),
+        ('installer=', None,
+         "the name of the installer"),
+        ('requested', None,
+         "generate a REQUESTED file (i.e."),
+        ('no-requested', None,
+         "do not generate a REQUESTED file"),
+        ('no-record', None,
+         "do not generate a RECORD file"),
+        ]
+
+    boolean_options = ['compile', 'force', 'skip-build', 'no-distinfo',
+                       'requested', 'no-record']
+
+    if HAS_USER_SITE:
+        user_options.append(
+            ('user', None,
+             "install in user site-packages directory [%s]" %
+             get_path('purelib', '%s_user' % os.name)))
+
+        boolean_options.append('user')
+
+    negative_opt = {'no-compile': 'compile', 'no-requested': 'requested'}
+
+    def initialize_options(self):
+        # High-level options: these select both an installation base
+        # and scheme.
+        self.prefix = None
+        self.exec_prefix = None
+        self.home = None
+        if HAS_USER_SITE:
+            self.user = False
+
+        # These select only the installation base; it's up to the user to
+        # specify the installation scheme (currently, that means supplying
+        # the --install-{platlib,purelib,scripts,data} options).
+        self.install_base = None
+        self.install_platbase = None
+        self.root = None
+
+        # These options are the actual installation directories; if not
+        # supplied by the user, they are filled in using the installation
+        # scheme implied by prefix/exec-prefix/home and the contents of
+        # that installation scheme.
+        self.install_purelib = None     # for pure module distributions
+        self.install_platlib = None     # non-pure (dists w/ extensions)
+        self.install_headers = None     # for C/C++ headers
+        self.install_lib = None         # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+        if HAS_USER_SITE:
+            self.install_userbase = get_config_var('userbase')
+            self.install_usersite = get_path('purelib', '%s_user' % os.name)
+
+        self.compile = None
+        self.optimize = None
+
+        # These two are for putting non-packagized distributions into their
+        # own directory and creating a .pth file if it makes sense.
+        # 'extra_path' comes from the setup file; 'install_path_file' can
+        # be turned off if it makes no sense to install a .pth file.  (But
+        # better to install it uselessly than to guess wrong and not
+        # install it when it's necessary and would be used!)  Currently,
+        # 'install_path_file' is always true unless some outsider meddles
+        # with it.
+        self.extra_path = None
+        self.install_path_file = True
+
+        # 'force' forces installation, even if target files are not
+        # out-of-date.  'skip_build' skips running the "build" command,
+        # handy if you know it's not necessary.  'warn_dir' (which is *not*
+        # a user option, it's just there so the bdist_* commands can turn
+        # it off) determines whether we warn about installing to a
+        # directory not in sys.path.
+        self.force = False
+        self.skip_build = False
+        self.warn_dir = True
+
+        # These are only here as a conduit from the 'build' command to the
+        # 'install_*' commands that do the real work.  ('build_base' isn't
+        # actually used anywhere, but it might be useful in future.)  They
+        # are not user options, because if the user told the install
+        # command where the build directory is, that wouldn't affect the
+        # build command.
+        self.build_base = None
+        self.build_lib = None
+
+        # Not defined yet because we don't know anything about
+        # documentation yet.
+        #self.install_man = None
+        #self.install_html = None
+        #self.install_info = None
+
+        self.record = None
+        self.resources = None
+
+        # .dist-info related options
+        self.no_distinfo = None
+        self.installer = None
+        self.requested = None
+        self.no_record = None
+        self.no_resources = None
+
+    # -- Option finalizing methods -------------------------------------
+    # (This is rather more involved than for most commands,
+    # because this is where the policy for installing third-
+    # party Python modules on various platforms given a wide
+    # array of user input is decided.  Yes, it's quite complex!)
+
+    def finalize_options(self):
+        # This method (and its pliant slaves, like 'finalize_unix()',
+        # 'finalize_other()', and 'select_scheme()') is where the default
+        # installation directories for modules, extension modules, and
+        # anything else we care to install from a Python module
+        # distribution.  Thus, this code makes a pretty important policy
+        # statement about how third-party stuff is added to a Python
+        # installation!  Note that the actual work of installation is done
+        # by the relatively simple 'install_*' commands; they just take
+        # their orders from the installation directory options determined
+        # here.
+
+        # Check for errors/inconsistencies in the options; first, stuff
+        # that's wrong on any platform.
+
+        if ((self.prefix or self.exec_prefix or self.home) and
+            (self.install_base or self.install_platbase)):
+            raise PackagingOptionError(
+                "must supply either prefix/exec-prefix/home or "
+                "install-base/install-platbase -- not both")
+
+        if self.home and (self.prefix or self.exec_prefix):
+            raise PackagingOptionError(
+                "must supply either home or prefix/exec-prefix -- not both")
+
+        if HAS_USER_SITE and self.user and (
+                self.prefix or self.exec_prefix or self.home or
+                self.install_base or self.install_platbase):
+            raise PackagingOptionError(
+                "can't combine user with prefix/exec_prefix/home or "
+                "install_base/install_platbase")
+
+        # Next, stuff that's wrong (or dubious) only on certain platforms.
+        if os.name != "posix":
+            if self.exec_prefix:
+                logger.warning(
+                    '%s: exec-prefix option ignored on this platform',
+                    self.get_command_name())
+                self.exec_prefix = None
+
+        # Now the interesting logic -- so interesting that we farm it out
+        # to other methods.  The goal of these methods is to set the final
+        # values for the install_{lib,scripts,data,...}  options, using as
+        # input a heady brew of prefix, exec_prefix, home, install_base,
+        # install_platbase, user-supplied versions of
+        # install_{purelib,platlib,lib,scripts,data,...}, and the
+        # INSTALL_SCHEME dictionary above.  Phew!
+
+        self.dump_dirs("pre-finalize_{unix,other}")
+
+        if os.name == 'posix':
+            self.finalize_unix()
+        else:
+            self.finalize_other()
+
+        self.dump_dirs("post-finalize_{unix,other}()")
+
+        # Expand configuration variables, tilde, etc. in self.install_base
+        # and self.install_platbase -- that way, we can use $base or
+        # $platbase in the other installation directories and not worry
+        # about needing recursive variable expansion (shudder).
+
+        py_version = sys.version.split()[0]
+        prefix, exec_prefix, srcdir, projectbase = get_config_vars(
+            'prefix', 'exec_prefix', 'srcdir', 'projectbase')
+
+        metadata = self.distribution.metadata
+        self.config_vars = {
+            'dist_name': metadata['Name'],
+            'dist_version': metadata['Version'],
+            'dist_fullname': metadata.get_fullname(),
+            'py_version': py_version,
+            'py_version_short': py_version[:3],
+            'py_version_nodot': py_version[:3:2],
+            'sys_prefix': prefix,
+            'prefix': prefix,
+            'sys_exec_prefix': exec_prefix,
+            'exec_prefix': exec_prefix,
+            'srcdir': srcdir,
+            'projectbase': projectbase,
+            }
+
+        if HAS_USER_SITE:
+            self.config_vars['userbase'] = self.install_userbase
+            self.config_vars['usersite'] = self.install_usersite
+
+        self.expand_basedirs()
+
+        self.dump_dirs("post-expand_basedirs()")
+
+        # Now define config vars for the base directories so we can expand
+        # everything else.
+        self.config_vars['base'] = self.install_base
+        self.config_vars['platbase'] = self.install_platbase
+
+        # Expand "~" and configuration variables in the installation
+        # directories.
+        self.expand_dirs()
+
+        self.dump_dirs("post-expand_dirs()")
+
+        # Create directories in the home dir:
+        if HAS_USER_SITE and self.user:
+            self.create_home_path()
+
+        # Pick the actual directory to install all modules to: either
+        # install_purelib or install_platlib, depending on whether this
+        # module distribution is pure or not.  Of course, if the user
+        # already specified install_lib, use their selection.
+        if self.install_lib is None:
+            if self.distribution.ext_modules:  # has extensions: non-pure
+                self.install_lib = self.install_platlib
+            else:
+                self.install_lib = self.install_purelib
+
+        # Convert directories from Unix /-separated syntax to the local
+        # convention.
+        self.convert_paths('lib', 'purelib', 'platlib',
+                           'scripts', 'data', 'headers')
+        if HAS_USER_SITE:
+            self.convert_paths('userbase', 'usersite')
+
+        # Well, we're not actually fully completely finalized yet: we still
+        # have to deal with 'extra_path', which is the hack for allowing
+        # non-packagized module distributions (hello, Numerical Python!) to
+        # get their own directories.
+        self.handle_extra_path()
+        self.install_libbase = self.install_lib  # needed for .pth file
+        self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+        # If a new root directory was supplied, make all the installation
+        # dirs relative to it.
+        if self.root is not None:
+            self.change_roots('libbase', 'lib', 'purelib', 'platlib',
+                              'scripts', 'data', 'headers')
+
+        self.dump_dirs("after prepending root")
+
+        # Find out the build directories, ie. where to install from.
+        self.set_undefined_options('build', 'build_base', 'build_lib')
+
+        # Punt on doc directories for now -- after all, we're punting on
+        # documentation completely!
+
+        if self.no_distinfo is None:
+            self.no_distinfo = False
+
+    def finalize_unix(self):
+        """Finalize options for posix platforms."""
+        if self.install_base is not None or self.install_platbase is not None:
+            if ((self.install_lib is None and
+                 self.install_purelib is None and
+                 self.install_platlib is None) or
+                self.install_headers is None or
+                self.install_scripts is None or
+                self.install_data is None):
+                raise PackagingOptionError(
+                    "install-base or install-platbase supplied, but "
+                    "installation scheme is incomplete")
+            return
+
+        if HAS_USER_SITE and self.user:
+            if self.install_userbase is None:
+                raise PackagingPlatformError(
+                    "user base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme("posix_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("posix_home")
+        else:
+            if self.prefix is None:
+                if self.exec_prefix is not None:
+                    raise PackagingOptionError(
+                        "must not supply exec-prefix without prefix")
+
+                self.prefix = os.path.normpath(sys.prefix)
+                self.exec_prefix = os.path.normpath(sys.exec_prefix)
+
+            else:
+                if self.exec_prefix is None:
+                    self.exec_prefix = self.prefix
+
+            self.install_base = self.prefix
+            self.install_platbase = self.exec_prefix
+            self.select_scheme("posix_prefix")
+
+    def finalize_other(self):
+        """Finalize options for non-posix platforms"""
+        if HAS_USER_SITE and self.user:
+            if self.install_userbase is None:
+                raise PackagingPlatformError(
+                    "user base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme(os.name + "_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("posix_home")
+        else:
+            if self.prefix is None:
+                self.prefix = os.path.normpath(sys.prefix)
+
+            self.install_base = self.install_platbase = self.prefix
+            try:
+                self.select_scheme(os.name)
+            except KeyError:
+                raise PackagingPlatformError(
+                    "no support for installation on '%s'" % os.name)
+
+    def dump_dirs(self, msg):
+        """Dump the list of user options."""
+        logger.debug(msg + ":")
+        for opt in self.user_options:
+            opt_name = opt[0]
+            if opt_name[-1] == "=":
+                opt_name = opt_name[0:-1]
+            if opt_name in self.negative_opt:
+                opt_name = self.negative_opt[opt_name]
+                opt_name = opt_name.replace('-', '_')
+                val = not getattr(self, opt_name)
+            else:
+                opt_name = opt_name.replace('-', '_')
+                val = getattr(self, opt_name)
+            logger.debug("  %s: %s", opt_name, val)
+
+    def select_scheme(self, name):
+        """Set the install directories by applying the install schemes."""
+        # it's the caller's problem if they supply a bad name!
+        scheme = get_paths(name, expand=False)
+        for key, value in scheme.items():
+            if key == 'platinclude':
+                key = 'headers'
+                value = os.path.join(value, self.distribution.metadata['Name'])
+            attrname = 'install_' + key
+            if hasattr(self, attrname):
+                if getattr(self, attrname) is None:
+                    setattr(self, attrname, value)
+
+    def _expand_attrs(self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name == 'posix' or os.name == 'nt':
+                    val = os.path.expanduser(val)
+                # see if we want to push this work in sysconfig XXX
+                val = sysconfig._subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+    def expand_basedirs(self):
+        """Call `os.path.expanduser` on install_{base,platbase} and root."""
+        self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+    def expand_dirs(self):
+        """Call `os.path.expanduser` on install dirs."""
+        self._expand_attrs(['install_purelib', 'install_platlib',
+                            'install_lib', 'install_headers',
+                            'install_scripts', 'install_data'])
+
+    def convert_paths(self, *names):
+        """Call `convert_path` over `names`."""
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, convert_path(getattr(self, attr)))
+
+    def handle_extra_path(self):
+        """Set `path_file` and `extra_dirs` using `extra_path`."""
+        if self.extra_path is None:
+            self.extra_path = self.distribution.extra_path
+
+        if self.extra_path is not None:
+            if isinstance(self.extra_path, str):
+                self.extra_path = self.extra_path.split(',')
+
+            if len(self.extra_path) == 1:
+                path_file = extra_dirs = self.extra_path[0]
+            elif len(self.extra_path) == 2:
+                path_file, extra_dirs = self.extra_path
+            else:
+                raise PackagingOptionError(
+                    "'extra_path' option must be a list, tuple, or "
+                    "comma-separated string with 1 or 2 elements")
+
+            # convert to local form in case Unix notation used (as it
+            # should be in setup scripts)
+            extra_dirs = convert_path(extra_dirs)
+        else:
+            path_file = None
+            extra_dirs = ''
+
+        # XXX should we warn if path_file and not extra_dirs? (in which
+        # case the path file would be harmless but pointless)
+        self.path_file = path_file
+        self.extra_dirs = extra_dirs
+
+    def change_roots(self, *names):
+        """Change the install direcories pointed by name using root."""
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+    def create_home_path(self):
+        """Create directories under ~."""
+        if HAS_USER_SITE and not self.user:
+            return
+        home = convert_path(os.path.expanduser("~"))
+        for name, path in self.config_vars.items():
+            if path.startswith(home) and not os.path.isdir(path):
+                os.makedirs(path, 0o700)
+
+    # -- Command execution methods -------------------------------------
+
+    def run(self):
+        """Runs the command."""
+        # Obviously have to build before we can install
+        if not self.skip_build:
+            self.run_command('build')
+            # If we built for any other platform, we can't install.
+            build_plat = self.distribution.get_command_obj('build').plat_name
+            # check warn_dir - it is a clue that the 'install_dist' is happening
+            # internally, and not to sys.path, so we don't check the platform
+            # matches what we are running.
+            if self.warn_dir and build_plat != get_platform():
+                raise PackagingPlatformError("Can't install when "
+                                             "cross-compiling")
+
+        # Run all sub-commands (at least those that need to be run)
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        if self.path_file:
+            self.create_path_file()
+
+        # write list of installed files, if requested.
+        if self.record:
+            outputs = self.get_outputs()
+            if self.root:               # strip any package prefix
+                root_len = len(self.root)
+                for counter in range(len(outputs)):
+                    outputs[counter] = outputs[counter][root_len:]
+            self.execute(write_file,
+                         (self.record, outputs),
+                         "writing list of installed files to '%s'" %
+                         self.record)
+
+        normpath, normcase = os.path.normpath, os.path.normcase
+        sys_path = [normcase(normpath(p)) for p in sys.path]
+        install_lib = normcase(normpath(self.install_lib))
+        if (self.warn_dir and
+            not (self.path_file and self.install_path_file) and
+            install_lib not in sys_path):
+            logger.debug(("modules installed to '%s', which is not in "
+                       "Python's module search path (sys.path) -- "
+                       "you'll have to change the search path yourself"),
+                       self.install_lib)
+
+    def create_path_file(self):
+        """Creates the .pth file"""
+        filename = os.path.join(self.install_libbase,
+                                self.path_file + ".pth")
+        if self.install_path_file:
+            self.execute(write_file,
+                         (filename, [self.extra_dirs]),
+                         "creating %s" % filename)
+        else:
+            logger.warning('%s: path file %r not created',
+                           self.get_command_name(),  filename)
+
+    # -- Reporting methods ---------------------------------------------
+
+    def get_outputs(self):
+        """Assembles the outputs of all the sub-commands."""
+        outputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            # Add the contents of cmd.get_outputs(), ensuring
+            # that outputs doesn't contain duplicate entries
+            for filename in cmd.get_outputs():
+                if filename not in outputs:
+                    outputs.append(filename)
+
+        if self.path_file and self.install_path_file:
+            outputs.append(os.path.join(self.install_libbase,
+                                        self.path_file + ".pth"))
+
+        return outputs
+
+    def get_inputs(self):
+        """Returns the inputs of all the sub-commands"""
+        # XXX gee, this looks familiar ;-(
+        inputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            inputs.extend(cmd.get_inputs())
+
+        return inputs
+
+    # -- Predicates for sub-command list -------------------------------
+
+    def has_lib(self):
+        """Returns true if the current distribution has any Python
+        modules to install."""
+        return (self.distribution.has_pure_modules() or
+                self.distribution.has_ext_modules())
+
+    def has_headers(self):
+        """Returns true if the current distribution has any headers to
+        install."""
+        return self.distribution.has_headers()
+
+    def has_scripts(self):
+        """Returns true if the current distribution has any scripts to.
+        install."""
+        return self.distribution.has_scripts()
+
+    def has_data(self):
+        """Returns true if the current distribution has any data to.
+        install."""
+        return self.distribution.has_data_files()
+
+    # 'sub_commands': a list of commands this command might have to run to
+    # get its work done.  See cmd.py for more info.
+    sub_commands = [('install_lib', has_lib),
+                    ('install_headers', has_headers),
+                    ('install_scripts', has_scripts),
+                    ('install_data', has_data),
+                    # keep install_distinfo last, as it needs the record
+                    # with files to be completely generated
+                    ('install_distinfo', lambda self: not self.no_distinfo),
+                   ]
diff --git a/Lib/packaging/command/install_distinfo.py b/Lib/packaging/command/install_distinfo.py
new file mode 100644
index 0000000..41fe734
--- /dev/null
+++ b/Lib/packaging/command/install_distinfo.py
@@ -0,0 +1,175 @@
+"""Create the PEP 376-compliant .dist-info directory."""
+
+# Forked from the former install_egg_info command by Josip Djolonga
+
+import csv
+import os
+import re
+import hashlib
+
+from packaging.command.cmd import Command
+from packaging import logger
+from shutil import rmtree
+
+
+class install_distinfo(Command):
+
+    description = 'create a .dist-info directory for the distribution'
+
+    user_options = [
+        ('distinfo-dir=', None,
+         "directory where the the .dist-info directory will be installed"),
+        ('installer=', None,
+         "the name of the installer"),
+        ('requested', None,
+         "generate a REQUESTED file"),
+        ('no-requested', None,
+         "do not generate a REQUESTED file"),
+        ('no-record', None,
+         "do not generate a RECORD file"),
+        ('no-resources', None,
+         "do not generate a RESSOURCES list installed file")
+    ]
+
+    boolean_options = ['requested', 'no-record', 'no-resources']
+
+    negative_opt = {'no-requested': 'requested'}
+
+    def initialize_options(self):
+        self.distinfo_dir = None
+        self.installer = None
+        self.requested = None
+        self.no_record = None
+        self.no_resources = None
+
+    def finalize_options(self):
+        self.set_undefined_options('install_dist',
+                                   'installer', 'requested', 'no_record')
+
+        self.set_undefined_options('install_lib',
+                                   ('install_dir', 'distinfo_dir'))
+
+        if self.installer is None:
+            # FIXME distutils or packaging?
+            # + document default in the option help text above and in install
+            self.installer = 'distutils'
+        if self.requested is None:
+            self.requested = True
+        if self.no_record is None:
+            self.no_record = False
+        if self.no_resources is None:
+            self.no_resources = False
+
+        metadata = self.distribution.metadata
+
+        basename = "%s-%s.dist-info" % (
+            to_filename(safe_name(metadata['Name'])),
+            to_filename(safe_version(metadata['Version'])))
+
+        self.distinfo_dir = os.path.join(self.distinfo_dir, basename)
+        self.outputs = []
+
+    def run(self):
+        # FIXME dry-run should be used at a finer level, so that people get
+        # useful logging output and can have an idea of what the command would
+        # have done
+        if not self.dry_run:
+            target = self.distinfo_dir
+
+            if os.path.isdir(target) and not os.path.islink(target):
+                rmtree(target)
+            elif os.path.exists(target):
+                self.execute(os.unlink, (self.distinfo_dir,),
+                             "removing " + target)
+
+            self.execute(os.makedirs, (target,), "creating " + target)
+
+            metadata_path = os.path.join(self.distinfo_dir, 'METADATA')
+            logger.info('creating %s', metadata_path)
+            self.distribution.metadata.write(metadata_path)
+            self.outputs.append(metadata_path)
+
+            installer_path = os.path.join(self.distinfo_dir, 'INSTALLER')
+            logger.info('creating %s', installer_path)
+            with open(installer_path, 'w') as f:
+                f.write(self.installer)
+            self.outputs.append(installer_path)
+
+            if self.requested:
+                requested_path = os.path.join(self.distinfo_dir, 'REQUESTED')
+                logger.info('creating %s', requested_path)
+                open(requested_path, 'w').close()
+                self.outputs.append(requested_path)
+
+
+            if not self.no_resources:
+                install_data = self.get_finalized_command('install_data')
+                if install_data.get_resources_out() != []:
+                    resources_path = os.path.join(self.distinfo_dir,
+                                                  'RESOURCES')
+                    logger.info('creating %s', resources_path)
+                    with open(resources_path, 'wb') as f:
+                        writer = csv.writer(f, delimiter=',',
+                                            lineterminator=os.linesep,
+                                            quotechar='"')
+                        for tuple in install_data.get_resources_out():
+                            writer.writerow(tuple)
+
+                        self.outputs.append(resources_path)
+
+            if not self.no_record:
+                record_path = os.path.join(self.distinfo_dir, 'RECORD')
+                logger.info('creating %s', record_path)
+                with open(record_path, 'w', encoding='utf-8') as f:
+                    writer = csv.writer(f, delimiter=',',
+                                        lineterminator=os.linesep,
+                                        quotechar='"')
+
+                    install = self.get_finalized_command('install_dist')
+
+                    for fpath in install.get_outputs():
+                        if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
+                            # do not put size and md5 hash, as in PEP-376
+                            writer.writerow((fpath, '', ''))
+                        else:
+                            size = os.path.getsize(fpath)
+                            with open(fpath, 'r') as fp:
+                                hash = hashlib.md5()
+                                hash.update(fp.read().encode())
+                            md5sum = hash.hexdigest()
+                            writer.writerow((fpath, md5sum, size))
+
+                    # add the RECORD file itself
+                    writer.writerow((record_path, '', ''))
+                    self.outputs.append(record_path)
+
+    def get_outputs(self):
+        return self.outputs
+
+
+# The following functions are taken from setuptools' pkg_resources module.
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """Convert an arbitrary string to a standard version string
+
+    Spaces become dots, and all other non-alphanumeric characters become
+    dashes, with runs of multiple dashes condensed to a single dash.
+    """
+    version = version.replace(' ', '.')
+    return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-', '_')
diff --git a/Lib/packaging/command/install_headers.py b/Lib/packaging/command/install_headers.py
new file mode 100644
index 0000000..e043d6b
--- /dev/null
+++ b/Lib/packaging/command/install_headers.py
@@ -0,0 +1,43 @@
+"""Install C/C++ header files to the Python include directory."""
+
+from packaging.command.cmd import Command
+
+
+# XXX force is never used
+class install_headers(Command):
+
+    description = "install C/C++ header files"
+
+    user_options = [('install-dir=', 'd',
+                     "directory to install header files to"),
+                    ('force', 'f',
+                     "force installation (overwrite existing files)"),
+                   ]
+
+    boolean_options = ['force']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.force = False
+        self.outfiles = []
+
+    def finalize_options(self):
+        self.set_undefined_options('install_dist',
+                                   ('install_headers', 'install_dir'),
+                                   'force')
+
+    def run(self):
+        headers = self.distribution.headers
+        if not headers:
+            return
+
+        self.mkpath(self.install_dir)
+        for header in headers:
+            out = self.copy_file(header, self.install_dir)[0]
+            self.outfiles.append(out)
+
+    def get_inputs(self):
+        return self.distribution.headers or []
+
+    def get_outputs(self):
+        return self.outfiles
diff --git a/Lib/packaging/command/install_lib.py b/Lib/packaging/command/install_lib.py
new file mode 100644
index 0000000..5ff9cee
--- /dev/null
+++ b/Lib/packaging/command/install_lib.py
@@ -0,0 +1,222 @@
+"""Install all modules (extensions and pure Python)."""
+
+import os
+import sys
+import logging
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.errors import PackagingOptionError
+
+
+# Extension for Python source files.
+if hasattr(os, 'extsep'):
+    PYTHON_SOURCE_EXTENSION = os.extsep + "py"
+else:
+    PYTHON_SOURCE_EXTENSION = ".py"
+
+class install_lib(Command):
+
+    description = "install all modules (extensions and pure Python)"
+
+    # The byte-compilation options are a tad confusing.  Here are the
+    # possible scenarios:
+    #   1) no compilation at all (--no-compile --no-optimize)
+    #   2) compile .pyc only (--compile --no-optimize; default)
+    #   3) compile .pyc and "level 1" .pyo (--compile --optimize)
+    #   4) compile "level 1" .pyo only (--no-compile --optimize)
+    #   5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
+    #   6) compile "level 2" .pyo only (--no-compile --optimize-more)
+    #
+    # The UI for this is two option, 'compile' and 'optimize'.
+    # 'compile' is strictly boolean, and only decides whether to
+    # generate .pyc files.  'optimize' is three-way (0, 1, or 2), and
+    # decides both whether to generate .pyo files and what level of
+    # optimization to use.
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+        ('build-dir=','b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('skip-build', None, "skip the build steps"),
+        ]
+
+    boolean_options = ['force', 'compile', 'skip-build']
+    negative_opt = {'no-compile' : 'compile'}
+
+    def initialize_options(self):
+        # let the 'install_dist' command dictate our installation directory
+        self.install_dir = None
+        self.build_dir = None
+        self.force = False
+        self.compile = None
+        self.optimize = None
+        self.skip_build = None
+
+    def finalize_options(self):
+        # Get all the information we need to install pure Python modules
+        # from the umbrella 'install_dist' command -- build (source) directory,
+        # install (target) directory, and whether to compile .py files.
+        self.set_undefined_options('install_dist',
+                                   ('build_lib', 'build_dir'),
+                                   ('install_lib', 'install_dir'),
+                                   'force', 'compile', 'optimize', 'skip_build')
+
+        if self.compile is None:
+            self.compile = True
+        if self.optimize is None:
+            self.optimize = 0
+
+        if not isinstance(self.optimize, int):
+            try:
+                self.optimize = int(self.optimize)
+                if self.optimize not in (0, 1, 2):
+                    raise AssertionError
+            except (ValueError, AssertionError):
+                raise PackagingOptionError("optimize must be 0, 1, or 2")
+
+    def run(self):
+        # Make sure we have built everything we need first
+        self.build()
+
+        # Install everything: simply dump the entire contents of the build
+        # directory to the installation directory (that's the beauty of
+        # having a build directory!)
+        outfiles = self.install()
+
+        # (Optionally) compile .py to .pyc
+        if outfiles is not None and self.distribution.has_pure_modules():
+            self.byte_compile(outfiles)
+
+    # -- Top-level worker functions ------------------------------------
+    # (called from 'run()')
+
+    def build(self):
+        if not self.skip_build:
+            if self.distribution.has_pure_modules():
+                self.run_command('build_py')
+            if self.distribution.has_ext_modules():
+                self.run_command('build_ext')
+
+    def install(self):
+        if os.path.isdir(self.build_dir):
+            outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        else:
+            logger.warning(
+                '%s: %r does not exist -- no Python modules to install',
+                self.get_command_name(), self.build_dir)
+            return
+        return outfiles
+
+    def byte_compile(self, files):
+        if getattr(sys, 'dont_write_bytecode'):
+            # XXX do we want this?  because a Python runs without bytecode
+            # doesn't mean that the *dists should not contain bytecode
+            #--or does it?
+            logger.warning('%s: byte-compiling is disabled, skipping.',
+                           self.get_command_name())
+            return
+
+        from packaging.util import byte_compile
+
+        # Get the "--root" directory supplied to the "install_dist" command,
+        # and use it as a prefix to strip off the purported filename
+        # encoded in bytecode files.  This is far from complete, but it
+        # should at least generate usable bytecode in RPM distributions.
+        install_root = self.get_finalized_command('install_dist').root
+
+        # Temporary kludge until we remove the verbose arguments and use
+        # logging everywhere
+        verbose = logger.getEffectiveLevel() >= logging.DEBUG
+
+        if self.compile:
+            byte_compile(files, optimize=0,
+                         force=self.force, prefix=install_root,
+                         dry_run=self.dry_run)
+        if self.optimize > 0:
+            byte_compile(files, optimize=self.optimize,
+                         force=self.force, prefix=install_root,
+                         verbose=verbose,
+                         dry_run=self.dry_run)
+
+
+    # -- Utility methods -----------------------------------------------
+
+    def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
+        if not has_any:
+            return []
+
+        build_cmd = self.get_finalized_command(build_cmd)
+        build_files = build_cmd.get_outputs()
+        build_dir = getattr(build_cmd, cmd_option)
+
+        prefix_len = len(build_dir) + len(os.sep)
+        outputs = []
+        for file in build_files:
+            outputs.append(os.path.join(output_dir, file[prefix_len:]))
+
+        return outputs
+
+    def _bytecode_filenames(self, py_filenames):
+        bytecode_files = []
+        for py_file in py_filenames:
+            # Since build_py handles package data installation, the
+            # list of outputs can contain more than just .py files.
+            # Make sure we only report bytecode for the .py files.
+            ext = os.path.splitext(os.path.normcase(py_file))[1]
+            if ext != PYTHON_SOURCE_EXTENSION:
+                continue
+            if self.compile:
+                bytecode_files.append(py_file + "c")
+            if self.optimize > 0:
+                bytecode_files.append(py_file + "o")
+
+        return bytecode_files
+
+
+    # -- External interface --------------------------------------------
+    # (called by outsiders)
+
+    def get_outputs(self):
+        """Return the list of files that would be installed if this command
+        were actually run.  Not affected by the "dry-run" flag or whether
+        modules have actually been built yet.
+        """
+        pure_outputs = \
+            self._mutate_outputs(self.distribution.has_pure_modules(),
+                                 'build_py', 'build_lib',
+                                 self.install_dir)
+        if self.compile:
+            bytecode_outputs = self._bytecode_filenames(pure_outputs)
+        else:
+            bytecode_outputs = []
+
+        ext_outputs = \
+            self._mutate_outputs(self.distribution.has_ext_modules(),
+                                 'build_ext', 'build_lib',
+                                 self.install_dir)
+
+        return pure_outputs + bytecode_outputs + ext_outputs
+
+    def get_inputs(self):
+        """Get the list of files that are input to this command, ie. the
+        files that get installed as they are named in the build tree.
+        The files in this list correspond one-to-one to the output
+        filenames returned by 'get_outputs()'.
+        """
+        inputs = []
+
+        if self.distribution.has_pure_modules():
+            build_py = self.get_finalized_command('build_py')
+            inputs.extend(build_py.get_outputs())
+
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            inputs.extend(build_ext.get_outputs())
+
+        return inputs
diff --git a/Lib/packaging/command/install_scripts.py b/Lib/packaging/command/install_scripts.py
new file mode 100644
index 0000000..cfacbe2
--- /dev/null
+++ b/Lib/packaging/command/install_scripts.py
@@ -0,0 +1,59 @@
+"""Install scripts."""
+
+# Contributed by Bastian Kleineidam
+
+import os
+from packaging.command.cmd import Command
+from packaging import logger
+
+class install_scripts(Command):
+
+    description = "install scripts (Python or otherwise)"
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install scripts to"),
+        ('build-dir=','b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('skip-build', None, "skip the build steps"),
+    ]
+
+    boolean_options = ['force', 'skip-build']
+
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.force = False
+        self.build_dir = None
+        self.skip_build = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build', ('build_scripts', 'build_dir'))
+        self.set_undefined_options('install_dist',
+                                   ('install_scripts', 'install_dir'),
+                                   'force', 'skip_build')
+
+    def run(self):
+        if not self.skip_build:
+            self.run_command('build_scripts')
+
+        if not os.path.exists(self.build_dir):
+            self.outfiles = []
+            return
+
+        self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        if os.name == 'posix':
+            # Set the executable bits (owner, group, and world) on
+            # all the scripts we just installed.
+            for file in self.get_outputs():
+                if self.dry_run:
+                    logger.info("changing mode of %s", file)
+                else:
+                    mode = (os.stat(file).st_mode | 0o555) & 0o7777
+                    logger.info("changing mode of %s to %o", file, mode)
+                    os.chmod(file, mode)
+
+    def get_inputs(self):
+        return self.distribution.scripts or []
+
+    def get_outputs(self):
+        return self.outfiles or []
diff --git a/Lib/packaging/command/register.py b/Lib/packaging/command/register.py
new file mode 100644
index 0000000..962afdc
--- /dev/null
+++ b/Lib/packaging/command/register.py
@@ -0,0 +1,282 @@
+"""Register a release with a project index."""
+
+# Contributed by Richard Jones
+
+import io
+import getpass
+import urllib.error
+import urllib.parse
+import urllib.request
+
+from packaging import logger
+from packaging.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY,
+                            DEFAULT_REALM, get_pypirc_path)
+from packaging.command.cmd import Command
+
+class register(Command):
+
+    description = "register a release with PyPI"
+    user_options = [
+        ('repository=', 'r',
+         "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+        ('show-response', None,
+         "display full response text from server"),
+        ('list-classifiers', None,
+         "list valid Trove classifiers"),
+        ('strict', None ,
+         "stop the registration if the metadata is not fully compliant")
+        ]
+
+    boolean_options = ['show-response', 'list-classifiers', 'strict']
+
+    def initialize_options(self):
+        self.repository = None
+        self.realm = None
+        self.show_response = False
+        self.list_classifiers = False
+        self.strict = False
+
+    def finalize_options(self):
+        if self.repository is None:
+            self.repository = DEFAULT_REPOSITORY
+        if self.realm is None:
+            self.realm = DEFAULT_REALM
+
+    def run(self):
+        self._set_config()
+
+        # Check the package metadata
+        check = self.distribution.get_command_obj('check')
+        if check.strict != self.strict and not check.all:
+            # If check was already run but with different options,
+            # re-run it
+            check.strict = self.strict
+            check.all = True
+            self.distribution.have_run.pop('check', None)
+            self.run_command('check')
+
+        if self.dry_run:
+            self.verify_metadata()
+        elif self.list_classifiers:
+            self.classifiers()
+        else:
+            self.send_metadata()
+
+    def _set_config(self):
+        ''' Reads the configuration file and set attributes.
+        '''
+        config = read_pypirc(self.repository, self.realm)
+        if config != {}:
+            self.username = config['username']
+            self.password = config['password']
+            self.repository = config['repository']
+            self.realm = config['realm']
+            self.has_config = True
+        else:
+            if self.repository not in ('pypi', DEFAULT_REPOSITORY):
+                raise ValueError('%s not found in .pypirc' % self.repository)
+            if self.repository == 'pypi':
+                self.repository = DEFAULT_REPOSITORY
+            self.has_config = False
+
+    def classifiers(self):
+        ''' Fetch the list of classifiers from the server.
+        '''
+        response = urllib.request.urlopen(self.repository+'?:action=list_classifiers')
+        logger.info(response.read())
+
+    def verify_metadata(self):
+        ''' Send the metadata to the package index server to be checked.
+        '''
+        # send the info to the server and report the result
+        code, result = self.post_to_server(self.build_post_data('verify'))
+        logger.info('server response (%s): %s', code, result)
+
+
+    def send_metadata(self):
+        ''' Send the metadata to the package index server.
+
+            Well, do the following:
+            1. figure who the user is, and then
+            2. send the data as a Basic auth'ed POST.
+
+            First we try to read the username/password from $HOME/.pypirc,
+            which is a ConfigParser-formatted file with a section
+            [distutils] containing username and password entries (both
+            in clear text). Eg:
+
+                [distutils]
+                index-servers =
+                    pypi
+
+                [pypi]
+                username: fred
+                password: sekrit
+
+            Otherwise, to figure who the user is, we offer the user three
+            choices:
+
+             1. use existing login,
+             2. register as a new user, or
+             3. set the password to a random string and email the user.
+
+        '''
+        # TODO factor registration out into another method
+        # TODO use print to print, not logging
+
+        # see if we can short-cut and get the username/password from the
+        # config
+        if self.has_config:
+            choice = '1'
+            username = self.username
+            password = self.password
+        else:
+            choice = 'x'
+            username = password = ''
+
+        # get the user's login info
+        choices = '1 2 3 4'.split()
+        while choice not in choices:
+            logger.info('''\
+We need to know who you are, so please choose either:
+ 1. use your existing login,
+ 2. register as a new user,
+ 3. have the server generate a new password for you (and email it to you), or
+ 4. quit
+Your selection [default 1]: ''')
+
+            choice = input()
+            if not choice:
+                choice = '1'
+            elif choice not in choices:
+                print('Please choose one of the four options!')
+
+        if choice == '1':
+            # get the username and password
+            while not username:
+                username = input('Username: ')
+            while not password:
+                password = getpass.getpass('Password: ')
+
+            # set up the authentication
+            auth = urllib.request.HTTPPasswordMgr()
+            host = urllib.parse.urlparse(self.repository)[1]
+            auth.add_password(self.realm, host, username, password)
+            # send the info to the server and report the result
+            code, result = self.post_to_server(self.build_post_data('submit'),
+                auth)
+            logger.info('Server response (%s): %s', code, result)
+
+            # possibly save the login
+            if code == 200:
+                if self.has_config:
+                    # sharing the password in the distribution instance
+                    # so the upload command can reuse it
+                    self.distribution.password = password
+                else:
+                    logger.info(
+                        'I can store your PyPI login so future submissions '
+                        'will be faster.\n(the login will be stored in %s)',
+                        get_pypirc_path())
+                    choice = 'X'
+                    while choice.lower() not in 'yn':
+                        choice = input('Save your login (y/N)?')
+                        if not choice:
+                            choice = 'n'
+                    if choice.lower() == 'y':
+                        generate_pypirc(username, password)
+
+        elif choice == '2':
+            data = {':action': 'user'}
+            data['name'] = data['password'] = data['email'] = ''
+            data['confirm'] = None
+            while not data['name']:
+                data['name'] = input('Username: ')
+            while data['password'] != data['confirm']:
+                while not data['password']:
+                    data['password'] = getpass.getpass('Password: ')
+                while not data['confirm']:
+                    data['confirm'] = getpass.getpass(' Confirm: ')
+                if data['password'] != data['confirm']:
+                    data['password'] = ''
+                    data['confirm'] = None
+                    print("Password and confirm don't match!")
+            while not data['email']:
+                data['email'] = input('   EMail: ')
+            code, result = self.post_to_server(data)
+            if code != 200:
+                logger.info('server response (%s): %s', code, result)
+            else:
+                logger.info('you will receive an email shortly; follow the '
+                            'instructions in it to complete registration.')
+        elif choice == '3':
+            data = {':action': 'password_reset'}
+            data['email'] = ''
+            while not data['email']:
+                data['email'] = input('Your email address: ')
+            code, result = self.post_to_server(data)
+            logger.info('server response (%s): %s', code, result)
+
+    def build_post_data(self, action):
+        # figure the data to send - the metadata plus some additional
+        # information used by the package server
+        data = self.distribution.metadata.todict()
+        data[':action'] = action
+        return data
+
+    # XXX to be refactored with upload.upload_file
+    def post_to_server(self, data, auth=None):
+        ''' Post a query to the server, and return a string response.
+        '''
+        if 'name' in data:
+            logger.info('Registering %s to %s', data['name'], self.repository)
+        # Build up the MIME payload for the urllib2 POST data
+        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = '\n--' + boundary
+        end_boundary = sep_boundary + '--'
+        body = io.StringIO()
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if not isinstance(value, (tuple, list)):
+                value = [value]
+
+            for value in value:
+                body.write(sep_boundary)
+                body.write('\nContent-Disposition: form-data; name="%s"'%key)
+                body.write("\n\n")
+                body.write(value)
+                if value and value[-1] == '\r':
+                    body.write('\n')  # write an extra newline (lurve Macs)
+        body.write(end_boundary)
+        body.write("\n")
+        body = body.getvalue()
+
+        # build the Request
+        headers = {
+            'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
+            'Content-length': str(len(body))
+        }
+        req = urllib.request.Request(self.repository, body, headers)
+
+        # handle HTTP and include the Basic Auth handler
+        opener = urllib.request.build_opener(
+            urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
+        )
+        data = ''
+        try:
+            result = opener.open(req)
+        except urllib.error.HTTPError as e:
+            if self.show_response:
+                data = e.fp.read()
+            result = e.code, e.msg
+        except urllib.error.URLError as e:
+            result = 500, str(e)
+        else:
+            if self.show_response:
+                data = result.read()
+            result = 200, 'OK'
+        if self.show_response:
+            dashes = '-' * 75
+            logger.info('%s%s%s', dashes, data, dashes)
+
+        return result
diff --git a/Lib/packaging/command/sdist.py b/Lib/packaging/command/sdist.py
new file mode 100644
index 0000000..a28019b
--- /dev/null
+++ b/Lib/packaging/command/sdist.py
@@ -0,0 +1,375 @@
+"""Create a source distribution."""
+
+import os
+import sys
+import re
+from io import StringIO
+from glob import glob
+from shutil import get_archive_formats, rmtree
+
+from packaging import logger
+from packaging.util import resolve_name
+from packaging.errors import (PackagingPlatformError, PackagingOptionError,
+                              PackagingModuleError, PackagingFileError)
+from packaging.command import get_command_names
+from packaging.command.cmd import Command
+from packaging.manifest import Manifest
+
+
+def show_formats():
+    """Print all possible values for the 'formats' option (used by
+    the "--help-formats" command-line option).
+    """
+    from packaging.fancy_getopt import FancyGetopt
+    formats = sorted(('formats=' + name, None, desc)
+                     for name, desc in get_archive_formats())
+    FancyGetopt(formats).print_help(
+        "List of available source distribution formats:")
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\w\n', re.M)
+_COMMENTED_LINE = re.compile('^#.*\n$|^\w*\n$', re.M)
+
+
+class sdist(Command):
+
+    description = "create a source distribution (tarball, zip file, etc.)"
+
+    user_options = [
+        ('manifest=', 'm',
+         "name of manifest file [default: MANIFEST]"),
+        ('use-defaults', None,
+         "include the default file set in the manifest "
+         "[default; disable with --no-defaults]"),
+        ('no-defaults', None,
+         "don't include the default file set"),
+        ('prune', None,
+         "specifically exclude files/directories that should not be "
+         "distributed (build tree, RCS/CVS dirs, etc.) "
+         "[default; disable with --no-prune]"),
+        ('no-prune', None,
+         "don't automatically exclude anything"),
+        ('manifest-only', 'o',
+         "just regenerate the manifest and then stop "),
+        ('formats=', None,
+         "formats for source distribution (comma-separated list)"),
+        ('keep-temp', 'k',
+         "keep the distribution tree around after creating " +
+         "archive file(s)"),
+        ('dist-dir=', 'd',
+         "directory to put the source distribution archive(s) in "
+         "[default: dist]"),
+        ('check-metadata', None,
+         "Ensure that all required elements of metadata "
+         "are supplied. Warn if any missing. [default]"),
+        ('owner=', 'u',
+         "Owner name used when creating a tar file [default: current user]"),
+        ('group=', 'g',
+         "Group name used when creating a tar file [default: current group]"),
+        ('manifest-builders=', None,
+         "manifest builders (comma-separated list)"),
+        ]
+
+    boolean_options = ['use-defaults', 'prune',
+                       'manifest-only', 'keep-temp', 'check-metadata']
+
+    help_options = [
+        ('help-formats', None,
+         "list available distribution formats", show_formats),
+        ]
+
+    negative_opt = {'no-defaults': 'use-defaults',
+                    'no-prune': 'prune'}
+
+    default_format = {'posix': 'gztar',
+                      'nt': 'zip'}
+
+    def initialize_options(self):
+        self.manifest = None
+        # 'use_defaults': if true, we will include the default file set
+        # in the manifest
+        self.use_defaults = True
+        self.prune = True
+        self.manifest_only = False
+        self.formats = None
+        self.keep_temp = False
+        self.dist_dir = None
+
+        self.archive_files = None
+        self.metadata_check = True
+        self.owner = None
+        self.group = None
+        self.filelist = None
+        self.manifest_builders = None
+
+    def _check_archive_formats(self, formats):
+        supported_formats = [name for name, desc in get_archive_formats()]
+        for format in formats:
+            if format not in supported_formats:
+                return format
+        return None
+
+    def finalize_options(self):
+        if self.manifest is None:
+            self.manifest = "MANIFEST"
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise PackagingPlatformError("don't know how to create source "
+                       "distributions on platform %s" % os.name)
+
+        bad_format = self._check_archive_formats(self.formats)
+        if bad_format:
+            raise PackagingOptionError("unknown archive format '%s'" \
+                        % bad_format)
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+        if self.filelist is None:
+            self.filelist = Manifest()
+
+        if self.manifest_builders is None:
+            self.manifest_builders = []
+        else:
+            if isinstance(self.manifest_builders, str):
+                self.manifest_builders = self.manifest_builders.split(',')
+            builders = []
+            for builder in self.manifest_builders:
+                builder = builder.strip()
+                if builder == '':
+                    continue
+                try:
+                    builder = resolve_name(builder)
+                except ImportError as e:
+                    raise PackagingModuleError(e)
+
+                builders.append(builder)
+
+            self.manifest_builders = builders
+
+    def run(self):
+        # 'filelist' contains the list of files that will make up the
+        # manifest
+        self.filelist.clear()
+
+        # Check the package metadata
+        if self.metadata_check:
+            self.run_command('check')
+
+        # Do whatever it takes to get the list of files to process
+        # (process the manifest template, read an existing manifest,
+        # whatever).  File list is accumulated in 'self.filelist'.
+        self.get_file_list()
+
+        # If user just wanted us to regenerate the manifest, stop now.
+        if self.manifest_only:
+            return
+
+        # Otherwise, go ahead and create the source distribution tarball,
+        # or zipfile, or whatever.
+        self.make_distribution()
+
+    def get_file_list(self):
+        """Figure out the list of files to include in the source
+        distribution, and put it in 'self.filelist'.  This might involve
+        reading the manifest template (and writing the manifest), or just
+        reading the manifest, or just using the default file set -- it all
+        depends on the user's options.
+        """
+        template_exists = len(self.distribution.extra_files) > 0
+        if not template_exists:
+            logger.warning('%s: using default file list',
+                           self.get_command_name())
+        self.filelist.findall()
+
+        if self.use_defaults:
+            self.add_defaults()
+        if template_exists:
+            template = '\n'.join(self.distribution.extra_files)
+            self.filelist.read_template(StringIO(template))
+
+        # call manifest builders, if any.
+        for builder in self.manifest_builders:
+            builder(self.distribution, self.filelist)
+
+        if self.prune:
+            self.prune_file_list()
+
+        self.filelist.write(self.manifest)
+
+    def add_defaults(self):
+        """Add all the default files to self.filelist:
+          - README or README.txt
+          - test/test*.py
+          - all pure Python modules mentioned in setup script
+          - all files pointed by package_data (build_py)
+          - all files defined in data_files.
+          - all files defined as scripts.
+          - all C sources listed as part of extensions or C libraries
+            in the setup script (doesn't catch C headers!)
+        Warns if (README or README.txt) or setup.py are missing; everything
+        else is optional.
+        """
+        standards = [('README', 'README.txt')]
+        for fn in standards:
+            if isinstance(fn, tuple):
+                alts = fn
+                got_it = False
+                for fn in alts:
+                    if os.path.exists(fn):
+                        got_it = True
+                        self.filelist.append(fn)
+                        break
+
+                if not got_it:
+                    logger.warning(
+                        '%s: standard file not found: should have one of %s',
+                        self.get_command_name(), ', '.join(alts))
+            else:
+                if os.path.exists(fn):
+                    self.filelist.append(fn)
+                else:
+                    logger.warning('%s: standard file %r not found',
+                                   self.get_command_name(), fn)
+
+        optional = ['test/test*.py', 'setup.cfg']
+        for pattern in optional:
+            files = [f for f in glob(pattern) if os.path.isfile(f)]
+            if files:
+                self.filelist.extend(files)
+
+        for cmd_name in get_command_names():
+            try:
+                cmd_obj = self.get_finalized_command(cmd_name)
+            except PackagingOptionError:
+                pass
+            else:
+                self.filelist.extend(cmd_obj.get_source_files())
+
+    def prune_file_list(self):
+        """Prune off branches that might slip into the file list as created
+        by 'read_template()', but really don't belong there:
+          * the build tree (typically "build")
+          * the release tree itself (only an issue if we ran "sdist"
+            previously with --keep-temp, or it aborted)
+          * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
+        """
+        build = self.get_finalized_command('build')
+        base_dir = self.distribution.get_fullname()
+
+        self.filelist.exclude_pattern(None, prefix=build.build_base)
+        self.filelist.exclude_pattern(None, prefix=base_dir)
+
+        # pruning out vcs directories
+        # both separators are used under win32
+        if sys.platform == 'win32':
+            seps = r'/|\\'
+        else:
+            seps = '/'
+
+        vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
+                    '_darcs']
+        vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
+        self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
+
+    def make_release_tree(self, base_dir, files):
+        """Create the directory tree that will become the source
+        distribution archive.  All directories implied by the filenames in
+        'files' are created under 'base_dir', and then we hard link or copy
+        (if hard linking is unavailable) those files into place.
+        Essentially, this duplicates the developer's source tree, but in a
+        directory named after the distribution, containing only the files
+        to be distributed.
+        """
+        # Create all the directories under 'base_dir' necessary to
+        # put 'files' there; the 'mkpath()' is just so we don't die
+        # if the manifest happens to be empty.
+        self.mkpath(base_dir)
+        self.create_tree(base_dir, files, dry_run=self.dry_run)
+
+        # And walk over the list of files, either making a hard link (if
+        # os.link exists) to each one that doesn't already exist in its
+        # corresponding location under 'base_dir', or copying each file
+        # that's out-of-date in 'base_dir'.  (Usually, all files will be
+        # out-of-date, because by default we blow away 'base_dir' when
+        # we're done making the distribution archives.)
+
+        if hasattr(os, 'link'):        # can make hard links on this system
+            link = 'hard'
+            msg = "making hard links in %s..." % base_dir
+        else:                           # nope, have to copy
+            link = None
+            msg = "copying files to %s..." % base_dir
+
+        if not files:
+            logger.warning("no files to distribute -- empty manifest?")
+        else:
+            logger.info(msg)
+
+        for file in self.distribution.metadata.requires_files:
+            if file not in files:
+                msg = "'%s' must be included explicitly in 'extra_files'" \
+                        % file
+                raise PackagingFileError(msg)
+
+        for file in files:
+            if not os.path.isfile(file):
+                logger.warning("'%s' not a regular file -- skipping", file)
+            else:
+                dest = os.path.join(base_dir, file)
+                self.copy_file(file, dest, link=link)
+
+        self.distribution.metadata.write(os.path.join(base_dir, 'PKG-INFO'))
+
+    def make_distribution(self):
+        """Create the source distribution(s).  First, we create the release
+        tree with 'make_release_tree()'; then, we create all required
+        archive files (according to 'self.formats') from the release tree.
+        Finally, we clean up by blowing away the release tree (unless
+        'self.keep_temp' is true).  The list of archive files created is
+        stored so it can be retrieved later by 'get_archive_files()'.
+        """
+        # Don't warn about missing metadata here -- should be (and is!)
+        # done elsewhere.
+        base_dir = self.distribution.get_fullname()
+        base_name = os.path.join(self.dist_dir, base_dir)
+
+        self.make_release_tree(base_dir, self.filelist.files)
+        archive_files = []              # remember names of files we create
+        # tar archive must be created last to avoid overwrite and remove
+        if 'tar' in self.formats:
+            self.formats.append(self.formats.pop(self.formats.index('tar')))
+
+        for fmt in self.formats:
+            file = self.make_archive(base_name, fmt, base_dir=base_dir,
+                                     owner=self.owner, group=self.group)
+            archive_files.append(file)
+            self.distribution.dist_files.append(('sdist', '', file))
+
+        self.archive_files = archive_files
+
+        if not self.keep_temp:
+            if self.dry_run:
+                logger.info('removing %s', base_dir)
+            else:
+                rmtree(base_dir)
+
+    def get_archive_files(self):
+        """Return the list of archive files created when the command
+        was run, or None if the command hasn't run yet.
+        """
+        return self.archive_files
+
+    def create_tree(self, base_dir, files, mode=0o777, verbose=1,
+                    dry_run=False):
+        need_dir = set()
+        for file in files:
+            need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
+
+        # Now create them
+        for dir in sorted(need_dir):
+            self.mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
diff --git a/Lib/packaging/command/test.py b/Lib/packaging/command/test.py
new file mode 100644
index 0000000..7f9015b
--- /dev/null
+++ b/Lib/packaging/command/test.py
@@ -0,0 +1,81 @@
+"""Run the project's test suite."""
+
+import os
+import sys
+import logging
+import unittest
+
+from packaging import logger
+from packaging.command.cmd import Command
+from packaging.database import get_distribution
+from packaging.errors import PackagingOptionError
+from packaging.util import resolve_name
+
+
+class test(Command):
+
+    description = "run the project's test suite"
+
+    user_options = [
+        ('suite=', 's',
+         "test suite to run (for example: 'some_module.test_suite')"),
+        ('runner=', None,
+         "test runner to be called."),
+        ('tests-require=', None,
+         "list of distributions required to run the test suite."),
+    ]
+
+    def initialize_options(self):
+        self.suite = None
+        self.runner = None
+        self.tests_require = []
+
+    def finalize_options(self):
+        self.build_lib = self.get_finalized_command("build").build_lib
+        for requirement in self.tests_require:
+            if get_distribution(requirement) is None:
+                logger.warning("test dependency %s is not installed, "
+                               "tests may fail", requirement)
+        if (not self.suite and not self.runner and
+            self.get_ut_with_discovery() is None):
+            raise PackagingOptionError(
+                "no test discovery available, please give a 'suite' or "
+                "'runner' option or install unittest2")
+
+    def get_ut_with_discovery(self):
+        if hasattr(unittest.TestLoader, "discover"):
+            return unittest
+        else:
+            try:
+                import unittest2
+                return unittest2
+            except ImportError:
+                return None
+
+    def run(self):
+        prev_syspath = sys.path[:]
+        try:
+            # build release
+            build = self.get_reinitialized_command('build')
+            self.run_command('build')
+            sys.path.insert(0, build.build_lib)
+
+            # Temporary kludge until we remove the verbose arguments and use
+            # logging everywhere
+            logger = logging.getLogger('packaging')
+            verbose = logger.getEffectiveLevel() >= logging.DEBUG
+            verbosity = verbose + 1
+
+            # run the tests
+            if self.runner:
+                resolve_name(self.runner)()
+            elif self.suite:
+                runner = unittest.TextTestRunner(verbosity=verbosity)
+                runner.run(resolve_name(self.suite)())
+            elif self.get_ut_with_discovery():
+                ut = self.get_ut_with_discovery()
+                test_suite = ut.TestLoader().discover(os.curdir)
+                runner = ut.TextTestRunner(verbosity=verbosity)
+                runner.run(test_suite)
+        finally:
+            sys.path[:] = prev_syspath
diff --git a/Lib/packaging/command/upload.py b/Lib/packaging/command/upload.py
new file mode 100644
index 0000000..df265c9
--- /dev/null
+++ b/Lib/packaging/command/upload.py
@@ -0,0 +1,201 @@
+"""Upload a distribution to a project index."""
+
+import os
+import socket
+import logging
+import platform
+import urllib.parse
+from io import BytesIO
+from base64 import standard_b64encode
+from hashlib import md5
+from urllib.error import HTTPError
+from urllib.request import urlopen, Request
+
+from packaging import logger
+from packaging.errors import PackagingOptionError
+from packaging.util import (spawn, read_pypirc, DEFAULT_REPOSITORY,
+                            DEFAULT_REALM)
+from packaging.command.cmd import Command
+
+
+class upload(Command):
+
+    description = "upload distribution to PyPI"
+
+    user_options = [
+        ('repository=', 'r',
+         "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+        ('show-response', None,
+         "display full response text from server"),
+        ('sign', 's',
+         "sign files to upload using gpg"),
+        ('identity=', 'i',
+         "GPG identity used to sign files"),
+        ('upload-docs', None,
+         "upload documentation too"),
+        ]
+
+    boolean_options = ['show-response', 'sign']
+
+    def initialize_options(self):
+        self.repository = None
+        self.realm = None
+        self.show_response = False
+        self.username = ''
+        self.password = ''
+        self.show_response = False
+        self.sign = False
+        self.identity = None
+        self.upload_docs = False
+
+    def finalize_options(self):
+        if self.repository is None:
+            self.repository = DEFAULT_REPOSITORY
+        if self.realm is None:
+            self.realm = DEFAULT_REALM
+        if self.identity and not self.sign:
+            raise PackagingOptionError(
+                "Must use --sign for --identity to have meaning")
+        config = read_pypirc(self.repository, self.realm)
+        if config != {}:
+            self.username = config['username']
+            self.password = config['password']
+            self.repository = config['repository']
+            self.realm = config['realm']
+
+        # getting the password from the distribution
+        # if previously set by the register command
+        if not self.password and self.distribution.password:
+            self.password = self.distribution.password
+
+    def run(self):
+        if not self.distribution.dist_files:
+            raise PackagingOptionError(
+                "No dist file created in earlier command")
+        for command, pyversion, filename in self.distribution.dist_files:
+            self.upload_file(command, pyversion, filename)
+        if self.upload_docs:
+            upload_docs = self.get_finalized_command("upload_docs")
+            upload_docs.repository = self.repository
+            upload_docs.username = self.username
+            upload_docs.password = self.password
+            upload_docs.run()
+
+    # XXX to be refactored with register.post_to_server
+    def upload_file(self, command, pyversion, filename):
+        # Makes sure the repository URL is compliant
+        scheme, netloc, url, params, query, fragments = \
+            urllib.parse.urlparse(self.repository)
+        if params or query or fragments:
+            raise AssertionError("Incompatible url %s" % self.repository)
+
+        if scheme not in ('http', 'https'):
+            raise AssertionError("unsupported scheme " + scheme)
+
+        # Sign if requested
+        if self.sign:
+            gpg_args = ["gpg", "--detach-sign", "-a", filename]
+            if self.identity:
+                gpg_args[2:2] = ["--local-user", self.identity]
+            spawn(gpg_args,
+                  dry_run=self.dry_run)
+
+        # Fill in the data - send all the metadata in case we need to
+        # register a new release
+        with open(filename, 'rb') as f:
+            content = f.read()
+
+        data = self.distribution.metadata.todict()
+
+        # extra upload infos
+        data[':action'] = 'file_upload'
+        data['protcol_version'] = '1'
+        data['content'] = (os.path.basename(filename), content)
+        data['filetype'] = command
+        data['pyversion'] = pyversion
+        data['md5_digest'] = md5(content).hexdigest()
+
+        if command == 'bdist_dumb':
+            data['comment'] = 'built for %s' % platform.platform(terse=True)
+
+        if self.sign:
+            with open(filename + '.asc') as fp:
+                sig = fp.read()
+            data['gpg_signature'] = [
+                (os.path.basename(filename) + ".asc", sig)]
+
+        # set up the authentication
+        # The exact encoding of the authentication string is debated.
+        # Anyway PyPI only accepts ascii for both username or password.
+        user_pass = (self.username + ":" + self.password).encode('ascii')
+        auth = b"Basic " + standard_b64encode(user_pass)
+
+        # Build up the MIME payload for the POST data
+        boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = b'\n--' + boundary
+        end_boundary = sep_boundary + b'--'
+        body = BytesIO()
+
+        file_fields = ('content', 'gpg_signature')
+
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if not isinstance(value, tuple):
+                value = [value]
+
+            content_dispo = '\nContent-Disposition: form-data; name="%s"' % key
+
+            if key in file_fields:
+                filename_, content = value
+                filename_ = ';filename="%s"' % filename_
+                body.write(sep_boundary)
+                body.write(content_dispo.encode('utf-8'))
+                body.write(filename_.encode('utf-8'))
+                body.write(b"\n\n")
+                body.write(content)
+            else:
+                for value in value:
+                    value = str(value).encode('utf-8')
+                    body.write(sep_boundary)
+                    body.write(content_dispo.encode('utf-8'))
+                    body.write(b"\n\n")
+                    body.write(value)
+                    if value and value.endswith(b'\r'):
+                        # write an extra newline (lurve Macs)
+                        body.write(b'\n')
+
+        body.write(end_boundary)
+        body.write(b"\n")
+        body = body.getvalue()
+
+        logger.info("Submitting %s to %s", filename, self.repository)
+
+        # build the Request
+        headers = {'Content-type':
+                        'multipart/form-data; boundary=%s' %
+                        boundary.decode('ascii'),
+                   'Content-length': str(len(body)),
+                   'Authorization': auth}
+
+        request = Request(self.repository, data=body,
+                          headers=headers)
+        # send the data
+        try:
+            result = urlopen(request)
+            status = result.code
+            reason = result.msg
+        except socket.error as e:
+            logger.error(e)
+            return
+        except HTTPError as e:
+            status = e.code
+            reason = e.msg
+
+        if status == 200:
+            logger.info('Server response (%s): %s', status, reason)
+        else:
+            logger.error('Upload failed (%s): %s', status, reason)
+
+        if self.show_response and logger.isEnabledFor(logging.INFO):
+            sep = '-' * 75
+            logger.info('%s\n%s\n%s', sep, result.read().decode(), sep)
diff --git a/Lib/packaging/command/upload_docs.py b/Lib/packaging/command/upload_docs.py
new file mode 100644
index 0000000..29ea6e9
--- /dev/null
+++ b/Lib/packaging/command/upload_docs.py
@@ -0,0 +1,173 @@
+"""Upload HTML documentation to a project index."""
+
+import os
+import base64
+import socket
+import zipfile
+import logging
+import http.client
+import urllib.parse
+from io import BytesIO
+
+from packaging import logger
+from packaging.util import read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM
+from packaging.errors import PackagingFileError
+from packaging.command.cmd import Command
+
+
+def zip_dir(directory):
+    """Compresses recursively contents of directory into a BytesIO object"""
+    destination = BytesIO()
+    zip_file = zipfile.ZipFile(destination, "w")
+    for root, dirs, files in os.walk(directory):
+        for name in files:
+            full = os.path.join(root, name)
+            relative = root[len(directory):].lstrip(os.path.sep)
+            dest = os.path.join(relative, name)
+            zip_file.write(full, dest)
+    zip_file.close()
+    return destination
+
+
+# grabbed from
+#    http://code.activestate.com/recipes/
+#    146306-http-client-to-post-using-multipartform-data/
+# TODO factor this out for use by install and command/upload
+
+def encode_multipart(fields, files, boundary=None):
+    """
+    *fields* is a sequence of (name: str, value: str) elements for regular
+    form fields, *files* is a sequence of (name: str, filename: str, value:
+    bytes) elements for data to be uploaded as files.
+
+    Returns (content_type: bytes, body: bytes) ready for http.client.HTTP.
+    """
+    if boundary is None:
+        boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+    elif not isinstance(boundary, bytes):
+        raise TypeError('boundary is not bytes but %r' % type(boundary))
+
+    l = []
+    for key, value in fields:
+        l.extend((
+            b'--' + boundary,
+            ('Content-Disposition: form-data; name="%s"' %
+             key).encode('utf-8'),
+            b'',
+            value.encode('utf-8')))
+
+    for key, filename, value in files:
+        l.extend((
+            b'--' + boundary,
+            ('Content-Disposition: form-data; name="%s"; filename="%s"' %
+             (key, filename)).encode('utf-8'),
+            b'',
+            value))
+    l.append(b'--' + boundary + b'--')
+    l.append(b'')
+
+    body = b'\r\n'.join(l)
+
+    content_type = b'multipart/form-data; boundary=' + boundary
+    return content_type, body
+
+
+class upload_docs(Command):
+
+    description = "upload HTML documentation to PyPI"
+
+    user_options = [
+        ('repository=', 'r',
+         "repository URL [default: %s]" % DEFAULT_REPOSITORY),
+        ('show-response', None,
+         "display full response text from server"),
+        ('upload-dir=', None,
+         "directory to upload"),
+        ]
+
+    def initialize_options(self):
+        self.repository = None
+        self.realm = None
+        self.show_response = False
+        self.upload_dir = None
+        self.username = ''
+        self.password = ''
+
+    def finalize_options(self):
+        if self.repository is None:
+            self.repository = DEFAULT_REPOSITORY
+        if self.realm is None:
+            self.realm = DEFAULT_REALM
+        if self.upload_dir is None:
+            build = self.get_finalized_command('build')
+            self.upload_dir = os.path.join(build.build_base, "docs")
+            if not os.path.isdir(self.upload_dir):
+                self.upload_dir = os.path.join(build.build_base, "doc")
+        logger.info('Using upload directory %s', self.upload_dir)
+        self.verify_upload_dir(self.upload_dir)
+        config = read_pypirc(self.repository, self.realm)
+        if config != {}:
+            self.username = config['username']
+            self.password = config['password']
+            self.repository = config['repository']
+            self.realm = config['realm']
+
+    def verify_upload_dir(self, upload_dir):
+        self.ensure_dirname('upload_dir')
+        index_location = os.path.join(upload_dir, "index.html")
+        if not os.path.exists(index_location):
+            mesg = "No 'index.html found in docs directory (%s)"
+            raise PackagingFileError(mesg % upload_dir)
+
+    def run(self):
+        name = self.distribution.metadata['Name']
+        version = self.distribution.metadata['Version']
+        zip_file = zip_dir(self.upload_dir)
+
+        fields = [(':action', 'doc_upload'),
+                  ('name', name), ('version', version)]
+        files = [('content', name, zip_file.getvalue())]
+        content_type, body = encode_multipart(fields, files)
+
+        credentials = self.username + ':' + self.password
+        auth = b"Basic " + base64.encodebytes(credentials.encode()).strip()
+
+        logger.info("Submitting documentation to %s", self.repository)
+
+        scheme, netloc, url, params, query, fragments = urllib.parse.urlparse(
+            self.repository)
+        if scheme == "http":
+            conn = http.client.HTTPConnection(netloc)
+        elif scheme == "https":
+            conn = http.client.HTTPSConnection(netloc)
+        else:
+            raise AssertionError("unsupported scheme %r" % scheme)
+
+        try:
+            conn.connect()
+            conn.putrequest("POST", url)
+            conn.putheader('Content-type', content_type)
+            conn.putheader('Content-length', str(len(body)))
+            conn.putheader('Authorization', auth)
+            conn.endheaders()
+            conn.send(body)
+
+        except socket.error as e:
+            logger.error(e)
+            return
+
+        r = conn.getresponse()
+
+        if r.status == 200:
+            logger.info('Server response (%s): %s', r.status, r.reason)
+        elif r.status == 301:
+            location = r.getheader('Location')
+            if location is None:
+                location = 'http://packages.python.org/%s/' % name
+            logger.info('Upload successful. Visit %s', location)
+        else:
+            logger.error('Upload failed (%s): %s', r.status, r.reason)
+
+        if self.show_response and logger.isEnabledFor(logging.INFO):
+            sep = '-' * 75
+            logger.info('%s\n%s\n%s', sep, r.read().decode('utf-8'), sep)
diff --git a/Lib/packaging/command/wininst-10.0-amd64.exe b/Lib/packaging/command/wininst-10.0-amd64.exe
new file mode 100644
index 0000000..11f98cd
--- /dev/null
+++ b/Lib/packaging/command/wininst-10.0-amd64.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-10.0.exe b/Lib/packaging/command/wininst-10.0.exe
new file mode 100644
index 0000000..8ac6e19
--- /dev/null
+++ b/Lib/packaging/command/wininst-10.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-6.0.exe b/Lib/packaging/command/wininst-6.0.exe
new file mode 100644
index 0000000..f57c855
--- /dev/null
+++ b/Lib/packaging/command/wininst-6.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-7.1.exe b/Lib/packaging/command/wininst-7.1.exe
new file mode 100644
index 0000000..1433bc1
--- /dev/null
+++ b/Lib/packaging/command/wininst-7.1.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-8.0.exe b/Lib/packaging/command/wininst-8.0.exe
new file mode 100644
index 0000000..7403bfa
--- /dev/null
+++ b/Lib/packaging/command/wininst-8.0.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-9.0-amd64.exe b/Lib/packaging/command/wininst-9.0-amd64.exe
new file mode 100644
index 0000000..11d8011
--- /dev/null
+++ b/Lib/packaging/command/wininst-9.0-amd64.exe
Binary files differ
diff --git a/Lib/packaging/command/wininst-9.0.exe b/Lib/packaging/command/wininst-9.0.exe
new file mode 100644
index 0000000..dadb31d
--- /dev/null
+++ b/Lib/packaging/command/wininst-9.0.exe
Binary files differ
diff --git a/Lib/packaging/compat.py b/Lib/packaging/compat.py
new file mode 100644
index 0000000..a82efd3
--- /dev/null
+++ b/Lib/packaging/compat.py
@@ -0,0 +1,57 @@
+"""Compatibility helpers.
+
+This module provides classes, variables and imports which are used to
+support packaging across Python 2.x and 3.x.
+"""
+
+from packaging import logger
+
+
+# XXX Having two classes with the same name is not a good thing.
+# XXX 2to3-related code should move from util to this module
+
+# TODO Move common code here: PY3 (bool indicating if we're on 3.x), any, etc.
+
+try:
+    from packaging.util import Mixin2to3 as _Mixin2to3
+    _CONVERT = True
+    _KLASS = _Mixin2to3
+except ImportError:
+    _CONVERT = False
+    _KLASS = object
+
+__all__ = ['Mixin2to3']
+
+
+class Mixin2to3(_KLASS):
+    """ The base class which can be used for refactoring. When run under
+    Python 3.0, the run_2to3 method provided by Mixin2to3 is overridden.
+    When run on Python 2.x, it merely creates a class which overrides run_2to3,
+    yet does nothing in particular with it.
+    """
+    if _CONVERT:
+
+        def _run_2to3(self, files, doctests=[], fixers=[]):
+            """ Takes a list of files and doctests, and performs conversion
+            on those.
+              - First, the files which contain the code(`files`) are converted.
+              - Second, the doctests in `files` are converted.
+              - Thirdly, the doctests in `doctests` are converted.
+            """
+            if fixers:
+                self.fixer_names = fixers
+
+            logger.info('converting Python code')
+            _KLASS.run_2to3(self, files)
+
+            logger.info('converting doctests in Python files')
+            _KLASS.run_2to3(self, files, doctests_only=True)
+
+            if doctests != []:
+                logger.info('converting doctest in text files')
+                _KLASS.run_2to3(self, doctests, doctests_only=True)
+    else:
+        # If run on Python 2.x, there is nothing to do.
+
+        def _run_2to3(self, files, doctests=[], fixers=[]):
+            pass
diff --git a/Lib/packaging/compiler/__init__.py b/Lib/packaging/compiler/__init__.py
new file mode 100644
index 0000000..b25cc73
--- /dev/null
+++ b/Lib/packaging/compiler/__init__.py
@@ -0,0 +1,282 @@
+"""Compiler abstraction model used by packaging.
+
+An abstract base class is defined in the ccompiler submodule, and
+concrete implementations suitable for various platforms are defined in
+the other submodules.  The extension module is also placed in this
+package.
+
+In general, code should not instantiate compiler classes directly but
+use the new_compiler and customize_compiler functions provided in this
+module.
+
+The compiler system has a registration API: get_default_compiler,
+set_compiler, show_compilers.
+"""
+
+import os
+import sys
+import re
+
+import sysconfig
+from packaging.util import resolve_name
+from packaging.errors import PackagingPlatformError
+
+
+def customize_compiler(compiler):
+    """Do any platform-specific customization of a CCompiler instance.
+
+    Mainly needed on Unix, so we can plug in the information that
+    varies across Unices and is stored in Python's Makefile.
+    """
+    if compiler.name == "unix":
+        cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags = (
+            sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
+                                      'CCSHARED', 'LDSHARED', 'SO', 'AR',
+                                      'ARFLAGS'))
+
+        if 'CC' in os.environ:
+            cc = os.environ['CC']
+        if 'CXX' in os.environ:
+            cxx = os.environ['CXX']
+        if 'LDSHARED' in os.environ:
+            ldshared = os.environ['LDSHARED']
+        if 'CPP' in os.environ:
+            cpp = os.environ['CPP']
+        else:
+            cpp = cc + " -E"           # not always
+        if 'LDFLAGS' in os.environ:
+            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+        if 'CFLAGS' in os.environ:
+            cflags = opt + ' ' + os.environ['CFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CFLAGS']
+        if 'CPPFLAGS' in os.environ:
+            cpp = cpp + ' ' + os.environ['CPPFLAGS']
+            cflags = cflags + ' ' + os.environ['CPPFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+        if 'AR' in os.environ:
+            ar = os.environ['AR']
+        if 'ARFLAGS' in os.environ:
+            archiver = ar + ' ' + os.environ['ARFLAGS']
+        else:
+            if ar_flags is not None:
+                archiver = ar + ' ' + ar_flags
+            else:
+                # see if its the proper default value
+                # mmm I don't want to backport the makefile
+                archiver = ar + ' rc'
+
+        cc_cmd = cc + ' ' + cflags
+        compiler.set_executables(
+            preprocessor=cpp,
+            compiler=cc_cmd,
+            compiler_so=cc_cmd + ' ' + ccshared,
+            compiler_cxx=cxx,
+            linker_so=ldshared,
+            linker_exe=cc,
+            archiver=archiver)
+
+        compiler.shared_lib_extension = so_ext
+
+
+# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
+# type for that platform. Keys are interpreted as re match
+# patterns. Order is important; platform mappings are preferred over
+# OS names.
+_default_compilers = (
+
+    # Platform string mappings
+
+    # on a cygwin built python we can use gcc like an ordinary UNIXish
+    # compiler
+    ('cygwin.*', 'unix'),
+    ('os2emx', 'emx'),
+
+    # OS name mappings
+    ('posix', 'unix'),
+    ('nt', 'msvc'),
+
+    )
+
+def get_default_compiler(osname=None, platform=None):
+    """ Determine the default compiler to use for the given platform.
+
+        osname should be one of the standard Python OS names (i.e. the
+        ones returned by os.name) and platform the common value
+        returned by sys.platform for the platform in question.
+
+        The default values are os.name and sys.platform in case the
+        parameters are not given.
+
+    """
+    if osname is None:
+        osname = os.name
+    if platform is None:
+        platform = sys.platform
+    for pattern, compiler in _default_compilers:
+        if re.match(pattern, platform) is not None or \
+           re.match(pattern, osname) is not None:
+            return compiler
+    # Defaults to Unix compiler
+    return 'unix'
+
+
+# compiler mapping
+# XXX useful to expose them? (i.e. get_compiler_names)
+_COMPILERS = {
+    'unix': 'packaging.compiler.unixccompiler.UnixCCompiler',
+    'msvc': 'packaging.compiler.msvccompiler.MSVCCompiler',
+    'cygwin': 'packaging.compiler.cygwinccompiler.CygwinCCompiler',
+    'mingw32': 'packaging.compiler.cygwinccompiler.Mingw32CCompiler',
+    'bcpp': 'packaging.compiler.bcppcompiler.BCPPCompiler',
+}
+
+def set_compiler(location):
+    """Add or change a compiler"""
+    cls = resolve_name(location)
+    # XXX we want to check the class here
+    _COMPILERS[cls.name] = cls
+
+
+def show_compilers():
+    """Print list of available compilers (used by the "--help-compiler"
+    options to "build", "build_ext", "build_clib").
+    """
+    from packaging.fancy_getopt import FancyGetopt
+    compilers = []
+
+    for name, cls in _COMPILERS.items():
+        if isinstance(cls, str):
+            cls = resolve_name(cls)
+            _COMPILERS[name] = cls
+
+        compilers.append(("compiler=" + name, None, cls.description))
+
+    compilers.sort()
+    pretty_printer = FancyGetopt(compilers)
+    pretty_printer.print_help("List of available compilers:")
+
+
+def new_compiler(plat=None, compiler=None, verbose=0, dry_run=False,
+                 force=False):
+    """Generate an instance of some CCompiler subclass for the supplied
+    platform/compiler combination.  'plat' defaults to 'os.name'
+    (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
+    for that platform.  Currently only 'posix' and 'nt' are supported, and
+    the default compilers are "traditional Unix interface" (UnixCCompiler
+    class) and Visual C++ (MSVCCompiler class).  Note that it's perfectly
+    possible to ask for a Unix compiler object under Windows, and a
+    Microsoft compiler object under Unix -- if you supply a value for
+    'compiler', 'plat' is ignored.
+    """
+    if plat is None:
+        plat = os.name
+
+    try:
+        if compiler is None:
+            compiler = get_default_compiler(plat)
+
+        cls = _COMPILERS[compiler]
+    except KeyError:
+        msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+        if compiler is not None:
+            msg = msg + " with '%s' compiler" % compiler
+        raise PackagingPlatformError(msg)
+
+    if isinstance(cls, str):
+        cls = resolve_name(cls)
+        _COMPILERS[compiler] = cls
+
+
+    # XXX The None is necessary to preserve backwards compatibility
+    # with classes that expect verbose to be the first positional
+    # argument.
+    return cls(None, dry_run, force)
+
+
+def gen_preprocess_options(macros, include_dirs):
+    """Generate C pre-processor options (-D, -U, -I) as used by at least
+    two types of compilers: the typical Unix compiler and Visual C++.
+    'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
+    means undefine (-U) macro 'name', and (name,value) means define (-D)
+    macro 'name' to 'value'.  'include_dirs' is just a list of directory
+    names to be added to the header file search path (-I).  Returns a list
+    of command-line options suitable for either Unix compilers or Visual
+    C++.
+    """
+    # XXX it would be nice (mainly aesthetic, and so we don't generate
+    # stupid-looking command lines) to go over 'macros' and eliminate
+    # redundant definitions/undefinitions (ie. ensure that only the
+    # latest mention of a particular macro winds up on the command
+    # line).  I don't think it's essential, though, since most (all?)
+    # Unix C compilers only pay attention to the latest -D or -U
+    # mention of a macro on their command line.  Similar situation for
+    # 'include_dirs'.  I'm punting on both for now.  Anyways, weeding out
+    # redundancies like this should probably be the province of
+    # CCompiler, since the data structures used are inherited from it
+    # and therefore common to all CCompiler classes.
+
+    pp_opts = []
+    for macro in macros:
+
+        if not isinstance(macro, tuple) and 1 <= len(macro) <= 2:
+            raise TypeError(
+                "bad macro definition '%s': each element of 'macros'"
+                "list must be a 1- or 2-tuple" % macro)
+
+        if len(macro) == 1:        # undefine this macro
+            pp_opts.append("-U%s" % macro[0])
+        elif len(macro) == 2:
+            if macro[1] is None:    # define with no explicit value
+                pp_opts.append("-D%s" % macro[0])
+            else:
+                # XXX *don't* need to be clever about quoting the
+                # macro value here, because we're going to avoid the
+                # shell at all costs when we spawn the command!
+                pp_opts.append("-D%s=%s" % macro)
+
+    for dir in include_dirs:
+        pp_opts.append("-I%s" % dir)
+
+    return pp_opts
+
+
+def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
+    """Generate linker options for searching library directories and
+    linking with specific libraries.
+
+    'libraries' and 'library_dirs' are, respectively, lists of library names
+    (not filenames!) and search directories.  Returns a list of command-line
+    options suitable for use with some compiler (depending on the two format
+    strings passed in).
+    """
+    lib_opts = []
+
+    for dir in library_dirs:
+        lib_opts.append(compiler.library_dir_option(dir))
+
+    for dir in runtime_library_dirs:
+        opt = compiler.runtime_library_dir_option(dir)
+        if isinstance(opt, list):
+            lib_opts.extend(opt)
+        else:
+            lib_opts.append(opt)
+
+    # XXX it's important that we *not* remove redundant library mentions!
+    # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
+    # resolve all symbols.  I just hope we never have to say "-lfoo obj.o
+    # -lbar" to get things to work -- that's certainly a possibility, but a
+    # pretty nasty way to arrange your C code.
+
+    for lib in libraries:
+        lib_dir, lib_name = os.path.split(lib)
+        if lib_dir != '':
+            lib_file = compiler.find_library_file([lib_dir], lib_name)
+            if lib_file is not None:
+                lib_opts.append(lib_file)
+            else:
+                compiler.warn("no library file corresponding to "
+                              "'%s' found (skipping)" % lib)
+        else:
+            lib_opts.append(compiler.library_option(lib))
+
+    return lib_opts
diff --git a/Lib/packaging/compiler/bcppcompiler.py b/Lib/packaging/compiler/bcppcompiler.py
new file mode 100644
index 0000000..63b6d8b
--- /dev/null
+++ b/Lib/packaging/compiler/bcppcompiler.py
@@ -0,0 +1,356 @@
+"""CCompiler implementation for the Borland C++ compiler."""
+
+# This implementation by Lyle Johnson, based on the original msvccompiler.py
+# module and using the directions originally published by Gordon Williams.
+
+# XXX looks like there's a LOT of overlap between these two classes:
+# someone should sit down and factor out the common code as
+# WindowsCCompiler!  --GPW
+
+import os
+
+from packaging.errors import (PackagingExecError, CompileError, LibError,
+                              LinkError, UnknownFileError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_preprocess_options
+from packaging.file_util import write_file
+from packaging.dep_util import newer
+from packaging import logger
+
+
+class BCPPCompiler(CCompiler) :
+    """Concrete class that implements an interface to the Borland C/C++
+    compiler, as defined by the CCompiler abstract class.
+    """
+
+    name = 'bcpp'
+    description = 'Borland C++ Compiler'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = _c_extensions + _cpp_extensions
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+        CCompiler.__init__(self, verbose, dry_run, force)
+
+        # These executables are assumed to all be in the path.
+        # Borland doesn't seem to use any special registry settings to
+        # indicate their installation locations.
+
+        self.cc = "bcc32.exe"
+        self.linker = "ilink32.exe"
+        self.lib = "tlib.exe"
+
+        self.preprocess_options = None
+        self.compile_options = ['/tWM', '/O2', '/q', '/g0']
+        self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
+
+        self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
+        self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
+        self.ldflags_static = []
+        self.ldflags_exe = ['/Gn', '/q', '/x']
+        self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
+
+
+    # -- Worker methods ------------------------------------------------
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=False,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+        compile_opts = extra_preargs or []
+        compile_opts.append('-c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            # XXX why do the normpath here?
+            src = os.path.normpath(src)
+            obj = os.path.normpath(obj)
+            # XXX _setup_compile() did a mkpath() too but before the normpath.
+            # Is it possible to skip the normpath?
+            self.mkpath(os.path.dirname(obj))
+
+            if ext == '.res':
+                # This is already a binary file -- skip it.
+                continue # the 'for' loop
+            if ext == '.rc':
+                # This needs to be compiled to a .res file -- do it now.
+                try:
+                    self.spawn(["brcc32", "-fo", obj, src])
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue # the 'for' loop
+
+            # The next two are both for the real compiler.
+            if ext in self._c_extensions:
+                input_opt = ""
+            elif ext in self._cpp_extensions:
+                input_opt = "-P"
+            else:
+                # Unknown file type -- no extra options.  The compiler
+                # will probably fail, but let it just in case this is a
+                # file the compiler recognizes even if we don't.
+                input_opt = ""
+
+            output_opt = "-o" + obj
+
+            # Compiler command line syntax is: "bcc32 [options] file(s)".
+            # Note that the source file names must appear at the end of
+            # the command line.
+            try:
+                self.spawn([self.cc] + compile_opts + pp_opts +
+                           [input_opt, output_opt] +
+                           extra_postargs + [src])
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+        return objects
+
+
+    def create_static_lib(self, objects, output_libname, output_dir=None,
+                          debug=False, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        output_filename = \
+            self.library_filename(output_libname, output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            lib_args = [output_filename, '/u'] + objects
+            if debug:
+                pass                    # XXX what goes here?
+            try:
+                self.spawn([self.lib] + lib_args)
+            except PackagingExecError as msg:
+                raise LibError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+
+        # XXX this ignores 'build_temp'!  should follow the lead of
+        # msvccompiler.py
+
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        if runtime_library_dirs:
+            logger.warning("don't know what to do with "
+                           "'runtime_library_dirs': %r", runtime_library_dirs)
+
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+
+            # Figure out linker args based on type of target.
+            if target_desc == CCompiler.EXECUTABLE:
+                startup_obj = 'c0w32'
+                if debug:
+                    ld_args = self.ldflags_exe_debug[:]
+                else:
+                    ld_args = self.ldflags_exe[:]
+            else:
+                startup_obj = 'c0d32'
+                if debug:
+                    ld_args = self.ldflags_shared_debug[:]
+                else:
+                    ld_args = self.ldflags_shared[:]
+
+
+            # Create a temporary exports file for use by the linker
+            if export_symbols is None:
+                def_file = ''
+            else:
+                head, tail = os.path.split(output_filename)
+                modname, ext = os.path.splitext(tail)
+                temp_dir = os.path.dirname(objects[0]) # preserve tree structure
+                def_file = os.path.join(temp_dir, '%s.def' % modname)
+                contents = ['EXPORTS']
+                for sym in (export_symbols or []):
+                    contents.append('  %s=_%s' % (sym, sym))
+                self.execute(write_file, (def_file, contents),
+                             "writing %s" % def_file)
+
+            # Borland C++ has problems with '/' in paths
+            objects2 = [os.path.normpath(o) for o in objects]
+            # split objects in .obj and .res files
+            # Borland C++ needs them at different positions in the command line
+            objects = [startup_obj]
+            resources = []
+            for file in objects2:
+                base, ext = os.path.splitext(os.path.normcase(file))
+                if ext == '.res':
+                    resources.append(file)
+                else:
+                    objects.append(file)
+
+
+            for l in library_dirs:
+                ld_args.append("/L%s" % os.path.normpath(l))
+            ld_args.append("/L.") # we sometimes use relative paths
+
+            # list of object files
+            ld_args.extend(objects)
+
+            # XXX the command line syntax for Borland C++ is a bit wonky;
+            # certain filenames are jammed together in one big string, but
+            # comma-delimited.  This doesn't mesh too well with the
+            # Unix-centric attitude (with a DOS/Windows quoting hack) of
+            # 'spawn()', so constructing the argument list is a bit
+            # awkward.  Note that doing the obvious thing and jamming all
+            # the filenames and commas into one argument would be wrong,
+            # because 'spawn()' would quote any filenames with spaces in
+            # them.  Arghghh!.  Apparently it works fine as coded...
+
+            # name of dll/exe file
+            ld_args.extend((',',output_filename))
+            # no map file and start libraries
+            ld_args.append(',,')
+
+            for lib in libraries:
+                # see if we find it and if there is a bcpp specific lib
+                # (xxx_bcpp.lib)
+                libfile = self.find_library_file(library_dirs, lib, debug)
+                if libfile is None:
+                    ld_args.append(lib)
+                    # probably a BCPP internal library -- don't warn
+                else:
+                    # full name which prefers bcpp_xxx.lib over xxx.lib
+                    ld_args.append(libfile)
+
+            # some default libraries
+            ld_args.append('import32')
+            ld_args.append('cw32mt')
+
+            # def file for export symbols
+            ld_args.extend((',',def_file))
+            # add resource files
+            ld_args.append(',')
+            ld_args.extend(resources)
+
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                self.spawn([self.linker] + ld_args)
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    # -- Miscellaneous methods -----------------------------------------
+
+
+    def find_library_file(self, dirs, lib, debug=False):
+        # List of effective library names to try, in order of preference:
+        # xxx_bcpp.lib is better than xxx.lib
+        # and xxx_d.lib is better than xxx.lib if debug is set
+        #
+        # The "_bcpp" suffix is to handle a Python installation for people
+        # with multiple compilers (primarily Packaging hackers, I suspect
+        # ;-).  The idea is they'd have one static library for each
+        # compiler they care about, since (almost?) every Windows compiler
+        # seems to have a different format for static libraries.
+        if debug:
+            dlib = (lib + "_d")
+            try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
+        else:
+            try_names = (lib + "_bcpp", lib)
+
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename(name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # overwrite the one from CCompiler to support rc and res-files
+    def object_filenames(self, source_filenames, strip_dir=False,
+                         output_dir=''):
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            base, ext = os.path.splitext(os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc','.res']):
+                raise UnknownFileError("unknown file type '%s' (from '%s')" % \
+                      (ext, src_name))
+            if strip_dir:
+                base = os.path.basename(base)
+            if ext == '.res':
+                # these can go unchanged
+                obj_names.append(os.path.join(output_dir, base + ext))
+            elif ext == '.rc':
+                # these need to be compiled to .res-files
+                obj_names.append(os.path.join(output_dir, base + '.res'))
+            else:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.obj_extension))
+        return obj_names
+
+
+    def preprocess(self, source, output_file=None, macros=None,
+                   include_dirs=None, extra_preargs=None,
+                   extra_postargs=None):
+        _, macros, include_dirs = \
+            self._fix_compile_args(None, macros, include_dirs)
+        pp_opts = gen_preprocess_options(macros, include_dirs)
+        pp_args = ['cpp32.exe'] + pp_opts
+        if output_file is not None:
+            pp_args.append('-o' + output_file)
+        if extra_preargs:
+            pp_args[:0] = extra_preargs
+        if extra_postargs:
+            pp_args.extend(extra_postargs)
+        pp_args.append(source)
+
+        # We need to preprocess: either we're being forced to, or the
+        # source file is newer than the target (or the target doesn't
+        # exist).
+        if self.force or output_file is None or newer(source, output_file):
+            if output_file:
+                self.mkpath(os.path.dirname(output_file))
+            try:
+                self.spawn(pp_args)
+            except PackagingExecError as msg:
+                print(msg)
+                raise CompileError(msg)
diff --git a/Lib/packaging/compiler/ccompiler.py b/Lib/packaging/compiler/ccompiler.py
new file mode 100644
index 0000000..551c5dc
--- /dev/null
+++ b/Lib/packaging/compiler/ccompiler.py
@@ -0,0 +1,868 @@
+"""Abstract base class for compilers.
+
+This modules contains CCompiler, an abstract base class that defines the
+interface for the compiler abstraction model used by packaging.
+"""
+
+import os
+import sys
+from shutil import move
+from packaging import logger
+from packaging.util import split_quoted, execute, newer_group, spawn
+from packaging.errors import (CompileError, LinkError, UnknownFileError)
+from packaging.compiler import gen_preprocess_options
+
+
+class CCompiler:
+    """Abstract base class to define the interface that must be implemented
+    by real compiler classes.  Also has some utility methods used by
+    several compiler classes.
+
+    The basic idea behind a compiler abstraction class is that each
+    instance can be used for all the compile/link steps in building a
+    single project.  Thus, attributes common to all of those compile and
+    link steps -- include directories, macros to define, libraries to link
+    against, etc. -- are attributes of the compiler instance.  To allow for
+    variability in how individual files are treated, most of those
+    attributes may be varied on a per-compilation or per-link basis.
+    """
+
+    # 'name' is a class attribute that identifies this class.  It
+    # keeps code that wants to know what kind of compiler it's dealing with
+    # from having to import all possible compiler classes just to do an
+    # 'isinstance'.
+    name = None
+    description = None
+
+    # XXX things not handled by this compiler abstraction model:
+    #   * client can't provide additional options for a compiler,
+    #     e.g. warning, optimization, debugging flags.  Perhaps this
+    #     should be the domain of concrete compiler abstraction classes
+    #     (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
+    #     class should have methods for the common ones.
+    #   * can't completely override the include or library searchg
+    #     path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
+    #     I'm not sure how widely supported this is even by Unix
+    #     compilers, much less on other platforms.  And I'm even less
+    #     sure how useful it is; maybe for cross-compiling, but
+    #     support for that is a ways off.  (And anyways, cross
+    #     compilers probably have a dedicated binary with the
+    #     right paths compiled in.  I hope.)
+    #   * can't do really freaky things with the library list/library
+    #     dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
+    #     different versions of libfoo.a in different locations.  I
+    #     think this is useless without the ability to null out the
+    #     library search path anyways.
+
+
+    # Subclasses that rely on the standard filename generation methods
+    # implemented below should override these; see the comment near
+    # those methods ('object_filenames()' et. al.) for details:
+    src_extensions = None               # list of strings
+    obj_extension = None                # string
+    static_lib_extension = None
+    shared_lib_extension = None         # string
+    static_lib_format = None            # format string
+    shared_lib_format = None            # prob. same as static_lib_format
+    exe_extension = None                # string
+
+    # Default language settings. language_map is used to detect a source
+    # file or Extension target language, checking source filenames.
+    # language_order is used to detect the language precedence, when deciding
+    # what language to use when mixing source types. For example, if some
+    # extension has two files with ".c" extension, and one with ".cpp", it
+    # is still linked as c++.
+    language_map = {".c": "c",
+                    ".cc": "c++",
+                    ".cpp": "c++",
+                    ".cxx": "c++",
+                    ".m": "objc",
+                   }
+    language_order = ["c++", "objc", "c"]
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+        self.dry_run = dry_run
+        self.force = force
+        self.verbose = verbose
+
+        # 'output_dir': a common output directory for object, library,
+        # shared object, and shared library files
+        self.output_dir = None
+
+        # 'macros': a list of macro definitions (or undefinitions).  A
+        # macro definition is a 2-tuple (name, value), where the value is
+        # either a string or None (no explicit value).  A macro
+        # undefinition is a 1-tuple (name,).
+        self.macros = []
+
+        # 'include_dirs': a list of directories to search for include files
+        self.include_dirs = []
+
+        # 'libraries': a list of libraries to include in any link
+        # (library names, not filenames: eg. "foo" not "libfoo.a")
+        self.libraries = []
+
+        # 'library_dirs': a list of directories to search for libraries
+        self.library_dirs = []
+
+        # 'runtime_library_dirs': a list of directories to search for
+        # shared libraries/objects at runtime
+        self.runtime_library_dirs = []
+
+        # 'objects': a list of object files (or similar, such as explicitly
+        # named library files) to include on any link
+        self.objects = []
+
+        for key, value in self.executables.items():
+            self.set_executable(key, value)
+
+    def set_executables(self, **args):
+        """Define the executables (and options for them) that will be run
+        to perform the various stages of compilation.  The exact set of
+        executables that may be specified here depends on the compiler
+        class (via the 'executables' class attribute), but most will have:
+          compiler      the C/C++ compiler
+          linker_so     linker used to create shared objects and libraries
+          linker_exe    linker used to create binary executables
+          archiver      static library creator
+
+        On platforms with a command line (Unix, DOS/Windows), each of these
+        is a string that will be split into executable name and (optional)
+        list of arguments.  (Splitting the string is done similarly to how
+        Unix shells operate: words are delimited by spaces, but quotes and
+        backslashes can override this.  See
+        'distutils.util.split_quoted()'.)
+        """
+
+        # Note that some CCompiler implementation classes will define class
+        # attributes 'cpp', 'cc', etc. with hard-coded executable names;
+        # this is appropriate when a compiler class is for exactly one
+        # compiler/OS combination (eg. MSVCCompiler).  Other compiler
+        # classes (UnixCCompiler, in particular) are driven by information
+        # discovered at run-time, since there are many different ways to do
+        # basically the same things with Unix C compilers.
+
+        for key, value in args.items():
+            if key not in self.executables:
+                raise ValueError("unknown executable '%s' for class %s" % \
+                      (key, self.__class__.__name__))
+            self.set_executable(key, value)
+
+    def set_executable(self, key, value):
+        if isinstance(value, str):
+            setattr(self, key, split_quoted(value))
+        else:
+            setattr(self, key, value)
+
+    def _find_macro(self, name):
+        i = 0
+        for defn in self.macros:
+            if defn[0] == name:
+                return i
+            i = i + 1
+        return None
+
+    def _check_macro_definitions(self, definitions):
+        """Ensures that every element of 'definitions' is a valid macro
+        definition, ie. either (name,value) 2-tuple or a (name,) tuple.  Do
+        nothing if all definitions are OK, raise TypeError otherwise.
+        """
+        for defn in definitions:
+            if not (isinstance(defn, tuple) and
+                    (len(defn) == 1 or
+                     (len(defn) == 2 and
+                      (isinstance(defn[1], str) or defn[1] is None))) and
+                    isinstance(defn[0], str)):
+                raise TypeError(("invalid macro definition '%s': " % defn) + \
+                      "must be tuple (string,), (string, string), or " + \
+                      "(string, None)")
+
+
+    # -- Bookkeeping methods -------------------------------------------
+
+    def define_macro(self, name, value=None):
+        """Define a preprocessor macro for all compilations driven by this
+        compiler object.  The optional parameter 'value' should be a
+        string; if it is not supplied, then the macro will be defined
+        without an explicit value and the exact outcome depends on the
+        compiler used (XXX true? does ANSI say anything about this?)
+        """
+        # Delete from the list of macro definitions/undefinitions if
+        # already there (so that this one will take precedence).
+        i = self._find_macro(name)
+        if i is not None:
+            del self.macros[i]
+
+        defn = (name, value)
+        self.macros.append(defn)
+
+    def undefine_macro(self, name):
+        """Undefine a preprocessor macro for all compilations driven by
+        this compiler object.  If the same macro is defined by
+        'define_macro()' and undefined by 'undefine_macro()' the last call
+        takes precedence (including multiple redefinitions or
+        undefinitions).  If the macro is redefined/undefined on a
+        per-compilation basis (ie. in the call to 'compile()'), then that
+        takes precedence.
+        """
+        # Delete from the list of macro definitions/undefinitions if
+        # already there (so that this one will take precedence).
+        i = self._find_macro(name)
+        if i is not None:
+            del self.macros[i]
+
+        undefn = (name,)
+        self.macros.append(undefn)
+
+    def add_include_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        header files.  The compiler is instructed to search directories in
+        the order in which they are supplied by successive calls to
+        'add_include_dir()'.
+        """
+        self.include_dirs.append(dir)
+
+    def set_include_dirs(self, dirs):
+        """Set the list of directories that will be searched to 'dirs' (a
+        list of strings).  Overrides any preceding calls to
+        'add_include_dir()'; subsequence calls to 'add_include_dir()' add
+        to the list passed to 'set_include_dirs()'.  This does not affect
+        any list of standard include directories that the compiler may
+        search by default.
+        """
+        self.include_dirs = dirs[:]
+
+    def add_library(self, libname):
+        """Add 'libname' to the list of libraries that will be included in
+        all links driven by this compiler object.  Note that 'libname'
+        should *not* be the name of a file containing a library, but the
+        name of the library itself: the actual filename will be inferred by
+        the linker, the compiler, or the compiler class (depending on the
+        platform).
+
+        The linker will be instructed to link against libraries in the
+        order they were supplied to 'add_library()' and/or
+        'set_libraries()'.  It is perfectly valid to duplicate library
+        names; the linker will be instructed to link against libraries as
+        many times as they are mentioned.
+        """
+        self.libraries.append(libname)
+
+    def set_libraries(self, libnames):
+        """Set the list of libraries to be included in all links driven by
+        this compiler object to 'libnames' (a list of strings).  This does
+        not affect any standard system libraries that the linker may
+        include by default.
+        """
+        self.libraries = libnames[:]
+
+
+    def add_library_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        libraries specified to 'add_library()' and 'set_libraries()'.  The
+        linker will be instructed to search for libraries in the order they
+        are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
+        """
+        self.library_dirs.append(dir)
+
+    def set_library_dirs(self, dirs):
+        """Set the list of library search directories to 'dirs' (a list of
+        strings).  This does not affect any standard library search path
+        that the linker may search by default.
+        """
+        self.library_dirs = dirs[:]
+
+    def add_runtime_library_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        shared libraries at runtime.
+        """
+        self.runtime_library_dirs.append(dir)
+
+    def set_runtime_library_dirs(self, dirs):
+        """Set the list of directories to search for shared libraries at
+        runtime to 'dirs' (a list of strings).  This does not affect any
+        standard search path that the runtime linker may search by
+        default.
+        """
+        self.runtime_library_dirs = dirs[:]
+
+    def add_link_object(self, object):
+        """Add 'object' to the list of object files (or analogues, such as
+        explicitly named library files or the output of "resource
+        compilers") to be included in every link driven by this compiler
+        object.
+        """
+        self.objects.append(object)
+
+    def set_link_objects(self, objects):
+        """Set the list of object files (or analogues) to be included in
+        every link to 'objects'.  This does not affect any standard object
+        files that the linker may include by default (such as system
+        libraries).
+        """
+        self.objects = objects[:]
+
+
+    # -- Private utility methods --------------------------------------
+    # (here for the convenience of subclasses)
+
+    # Helper method to prep compiler in subclass compile() methods
+    def _setup_compile(self, outdir, macros, incdirs, sources, depends,
+                       extra):
+        """Process arguments and decide which source files to compile."""
+        if outdir is None:
+            outdir = self.output_dir
+        elif not isinstance(outdir, str):
+            raise TypeError("'output_dir' must be a string or None")
+
+        if macros is None:
+            macros = self.macros
+        elif isinstance(macros, list):
+            macros = macros + (self.macros or [])
+        else:
+            raise TypeError("'macros' (if supplied) must be a list of tuples")
+
+        if incdirs is None:
+            incdirs = self.include_dirs
+        elif isinstance(incdirs, (list, tuple)):
+            incdirs = list(incdirs) + (self.include_dirs or [])
+        else:
+            raise TypeError(
+                "'include_dirs' (if supplied) must be a list of strings")
+
+        if extra is None:
+            extra = []
+
+        # Get the list of expected output (object) files
+        objects = self.object_filenames(sources,
+                                        strip_dir=False,
+                                        output_dir=outdir)
+        assert len(objects) == len(sources)
+
+        pp_opts = gen_preprocess_options(macros, incdirs)
+
+        build = {}
+        for i in range(len(sources)):
+            src = sources[i]
+            obj = objects[i]
+            ext = os.path.splitext(src)[1]
+            self.mkpath(os.path.dirname(obj))
+            build[obj] = (src, ext)
+
+        return macros, objects, extra, pp_opts, build
+
+    def _get_cc_args(self, pp_opts, debug, before):
+        # works for unixccompiler, emxccompiler, cygwinccompiler
+        cc_args = pp_opts + ['-c']
+        if debug:
+            cc_args[:0] = ['-g']
+        if before:
+            cc_args[:0] = before
+        return cc_args
+
+    def _fix_compile_args(self, output_dir, macros, include_dirs):
+        """Typecheck and fix-up some of the arguments to the 'compile()'
+        method, and return fixed-up values.  Specifically: if 'output_dir'
+        is None, replaces it with 'self.output_dir'; ensures that 'macros'
+        is a list, and augments it with 'self.macros'; ensures that
+        'include_dirs' is a list, and augments it with 'self.include_dirs'.
+        Guarantees that the returned values are of the correct type,
+        i.e. for 'output_dir' either string or None, and for 'macros' and
+        'include_dirs' either list or None.
+        """
+        if output_dir is None:
+            output_dir = self.output_dir
+        elif not isinstance(output_dir, str):
+            raise TypeError("'output_dir' must be a string or None")
+
+        if macros is None:
+            macros = self.macros
+        elif isinstance(macros, list):
+            macros = macros + (self.macros or [])
+        else:
+            raise TypeError("'macros' (if supplied) must be a list of tuples")
+
+        if include_dirs is None:
+            include_dirs = self.include_dirs
+        elif isinstance(include_dirs, (list, tuple)):
+            include_dirs = list(include_dirs) + (self.include_dirs or [])
+        else:
+            raise TypeError(
+                "'include_dirs' (if supplied) must be a list of strings")
+
+        return output_dir, macros, include_dirs
+
+    def _fix_object_args(self, objects, output_dir):
+        """Typecheck and fix up some arguments supplied to various methods.
+        Specifically: ensure that 'objects' is a list; if output_dir is
+        None, replace with self.output_dir.  Return fixed versions of
+        'objects' and 'output_dir'.
+        """
+        if not isinstance(objects, (list, tuple)):
+            raise TypeError("'objects' must be a list or tuple of strings")
+        objects = list(objects)
+
+        if output_dir is None:
+            output_dir = self.output_dir
+        elif not isinstance(output_dir, str):
+            raise TypeError("'output_dir' must be a string or None")
+
+        return objects, output_dir
+
+    def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+        """Typecheck and fix up some of the arguments supplied to the
+        'link_*' methods.  Specifically: ensure that all arguments are
+        lists, and augment them with their permanent versions
+        (eg. 'self.libraries' augments 'libraries').  Return a tuple with
+        fixed versions of all arguments.
+        """
+        if libraries is None:
+            libraries = self.libraries
+        elif isinstance(libraries, (list, tuple)):
+            libraries = list(libraries) + (self.libraries or [])
+        else:
+            raise TypeError(
+                "'libraries' (if supplied) must be a list of strings")
+
+        if library_dirs is None:
+            library_dirs = self.library_dirs
+        elif isinstance(library_dirs, (list, tuple)):
+            library_dirs = list(library_dirs) + (self.library_dirs or [])
+        else:
+            raise TypeError(
+                "'library_dirs' (if supplied) must be a list of strings")
+
+        if runtime_library_dirs is None:
+            runtime_library_dirs = self.runtime_library_dirs
+        elif isinstance(runtime_library_dirs, (list, tuple)):
+            runtime_library_dirs = (list(runtime_library_dirs) +
+                                    (self.runtime_library_dirs or []))
+        else:
+            raise TypeError("'runtime_library_dirs' (if supplied) "
+                            "must be a list of strings")
+
+        return libraries, library_dirs, runtime_library_dirs
+
+    def _need_link(self, objects, output_file):
+        """Return true if we need to relink the files listed in 'objects'
+        to recreate 'output_file'.
+        """
+        if self.force:
+            return True
+        else:
+            if self.dry_run:
+                newer = newer_group(objects, output_file, missing='newer')
+            else:
+                newer = newer_group(objects, output_file)
+            return newer
+
+    def detect_language(self, sources):
+        """Detect the language of a given file, or list of files. Uses
+        language_map, and language_order to do the job.
+        """
+        if not isinstance(sources, list):
+            sources = [sources]
+        lang = None
+        index = len(self.language_order)
+        for source in sources:
+            base, ext = os.path.splitext(source)
+            extlang = self.language_map.get(ext)
+            try:
+                extindex = self.language_order.index(extlang)
+                if extindex < index:
+                    lang = extlang
+                    index = extindex
+            except ValueError:
+                pass
+        return lang
+
+    # -- Worker methods ------------------------------------------------
+    # (must be implemented by subclasses)
+
+    def preprocess(self, source, output_file=None, macros=None,
+                   include_dirs=None, extra_preargs=None, extra_postargs=None):
+        """Preprocess a single C/C++ source file, named in 'source'.
+        Output will be written to file named 'output_file', or stdout if
+        'output_file' not supplied.  'macros' is a list of macro
+        definitions as for 'compile()', which will augment the macros set
+        with 'define_macro()' and 'undefine_macro()'.  'include_dirs' is a
+        list of directory names that will be added to the default list.
+
+        Raises PreprocessError on failure.
+        """
+        pass
+
+    def compile(self, sources, output_dir=None, macros=None,
+                include_dirs=None, debug=False, extra_preargs=None,
+                extra_postargs=None, depends=None):
+        """Compile one or more source files.
+
+        'sources' must be a list of filenames, most likely C/C++
+        files, but in reality anything that can be handled by a
+        particular compiler and compiler class (eg. MSVCCompiler can
+        handle resource files in 'sources').  Return a list of object
+        filenames, one per source filename in 'sources'.  Depending on
+        the implementation, not all source files will necessarily be
+        compiled, but all corresponding object filenames will be
+        returned.
+
+        If 'output_dir' is given, object files will be put under it, while
+        retaining their original path component.  That is, "foo/bar.c"
+        normally compiles to "foo/bar.o" (for a Unix implementation); if
+        'output_dir' is "build", then it would compile to
+        "build/foo/bar.o".
+
+        'macros', if given, must be a list of macro definitions.  A macro
+        definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
+        The former defines a macro; if the value is None, the macro is
+        defined without an explicit value.  The 1-tuple case undefines a
+        macro.  Later definitions/redefinitions/ undefinitions take
+        precedence.
+
+        'include_dirs', if given, must be a list of strings, the
+        directories to add to the default include file search path for this
+        compilation only.
+
+        'debug' is a boolean; if true, the compiler will be instructed to
+        output debug symbols in (or alongside) the object file(s).
+
+        'extra_preargs' and 'extra_postargs' are implementation- dependent.
+        On platforms that have the notion of a command line (e.g. Unix,
+        DOS/Windows), they are most likely lists of strings: extra
+        command-line arguments to prepand/append to the compiler command
+        line.  On other platforms, consult the implementation class
+        documentation.  In any event, they are intended as an escape hatch
+        for those occasions when the abstract compiler framework doesn't
+        cut the mustard.
+
+        'depends', if given, is a list of filenames that all targets
+        depend on.  If a source file is older than any file in
+        depends, then the source file will be recompiled.  This
+        supports dependency tracking, but only at a coarse
+        granularity.
+
+        Raises CompileError on failure.
+        """
+        # A concrete compiler class can either override this method
+        # entirely or implement _compile().
+
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+        cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
+
+        # Return *all* object filenames, not just the ones we just built.
+        return objects
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        """Compile 'src' to product 'obj'."""
+
+        # A concrete compiler class that does not override compile()
+        # should implement _compile().
+        pass
+
+    def create_static_lib(self, objects, output_libname, output_dir=None,
+                          debug=False, target_lang=None):
+        """Link a bunch of stuff together to create a static library file.
+        The "bunch of stuff" consists of the list of object files supplied
+        as 'objects', the extra object files supplied to
+        'add_link_object()' and/or 'set_link_objects()', the libraries
+        supplied to 'add_library()' and/or 'set_libraries()', and the
+        libraries supplied as 'libraries' (if any).
+
+        'output_libname' should be a library name, not a filename; the
+        filename will be inferred from the library name.  'output_dir' is
+        the directory where the library file will be put.
+
+        'debug' is a boolean; if true, debugging information will be
+        included in the library (note that on most platforms, it is the
+        compile step where this matters: the 'debug' flag is included here
+        just for consistency).
+
+        'target_lang' is the target language for which the given objects
+        are being compiled. This allows specific linkage time treatment of
+        certain languages.
+
+        Raises LibError on failure.
+        """
+        pass
+
+    # values for target_desc parameter in link()
+    SHARED_OBJECT = "shared_object"
+    SHARED_LIBRARY = "shared_library"
+    EXECUTABLE = "executable"
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        """Link a bunch of stuff together to create an executable or
+        shared library file.
+
+        The "bunch of stuff" consists of the list of object files supplied
+        as 'objects'.  'output_filename' should be a filename.  If
+        'output_dir' is supplied, 'output_filename' is relative to it
+        (i.e. 'output_filename' can provide directory components if
+        needed).
+
+        'libraries' is a list of libraries to link against.  These are
+        library names, not filenames, since they're translated into
+        filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
+        on Unix and "foo.lib" on DOS/Windows).  However, they can include a
+        directory component, which means the linker will look in that
+        specific directory rather than searching all the normal locations.
+
+        'library_dirs', if supplied, should be a list of directories to
+        search for libraries that were specified as bare library names
+        (ie. no directory component).  These are on top of the system
+        default and those supplied to 'add_library_dir()' and/or
+        'set_library_dirs()'.  'runtime_library_dirs' is a list of
+        directories that will be embedded into the shared library and used
+        to search for other shared libraries that *it* depends on at
+        run-time.  (This may only be relevant on Unix.)
+
+        'export_symbols' is a list of symbols that the shared library will
+        export.  (This appears to be relevant only on Windows.)
+
+        'debug' is as for 'compile()' and 'create_static_lib()', with the
+        slight distinction that it actually matters on most platforms (as
+        opposed to 'create_static_lib()', which includes a 'debug' flag
+        mostly for form's sake).
+
+        'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
+        of course that they supply command-line arguments for the
+        particular linker being used).
+
+        'target_lang' is the target language for which the given objects
+        are being compiled. This allows specific linkage time treatment of
+        certain languages.
+
+        Raises LinkError on failure.
+        """
+        raise NotImplementedError
+
+
+    # Old 'link_*()' methods, rewritten to use the new 'link()' method.
+
+    def link_shared_lib(self, objects, output_libname, output_dir=None,
+                        libraries=None, library_dirs=None,
+                        runtime_library_dirs=None, export_symbols=None,
+                        debug=False, extra_preargs=None, extra_postargs=None,
+                        build_temp=None, target_lang=None):
+        self.link(CCompiler.SHARED_LIBRARY, objects,
+                  self.library_filename(output_libname, lib_type='shared'),
+                  output_dir,
+                  libraries, library_dirs, runtime_library_dirs,
+                  export_symbols, debug,
+                  extra_preargs, extra_postargs, build_temp, target_lang)
+
+    def link_shared_object(self, objects, output_filename, output_dir=None,
+                           libraries=None, library_dirs=None,
+                           runtime_library_dirs=None, export_symbols=None,
+                           debug=False, extra_preargs=None, extra_postargs=None,
+                           build_temp=None, target_lang=None):
+        self.link(CCompiler.SHARED_OBJECT, objects,
+                  output_filename, output_dir,
+                  libraries, library_dirs, runtime_library_dirs,
+                  export_symbols, debug,
+                  extra_preargs, extra_postargs, build_temp, target_lang)
+
+    def link_executable(self, objects, output_progname, output_dir=None,
+                        libraries=None, library_dirs=None,
+                        runtime_library_dirs=None, debug=False,
+                        extra_preargs=None, extra_postargs=None,
+                        target_lang=None):
+        self.link(CCompiler.EXECUTABLE, objects,
+                  self.executable_filename(output_progname), output_dir,
+                  libraries, library_dirs, runtime_library_dirs, None,
+                  debug, extra_preargs, extra_postargs, None, target_lang)
+
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function; there is
+    # no appropriate default implementation so subclasses should
+    # implement all of these.
+
+    def library_dir_option(self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for libraries.
+        """
+        raise NotImplementedError
+
+    def runtime_library_dir_option(self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for runtime libraries.
+        """
+        raise NotImplementedError
+
+    def library_option(self, lib):
+        """Return the compiler option to add 'dir' to the list of libraries
+        linked into the shared library or executable.
+        """
+        raise NotImplementedError
+
+    def has_function(self, funcname, includes=None, include_dirs=None,
+                     libraries=None, library_dirs=None):
+        """Return a boolean indicating whether funcname is supported on
+        the current platform.  The optional arguments can be used to
+        augment the compilation environment.
+        """
+
+        # this can't be included at module scope because it tries to
+        # import math which might not be available at that point - maybe
+        # the necessary logic should just be inlined?
+        import tempfile
+        if includes is None:
+            includes = []
+        if include_dirs is None:
+            include_dirs = []
+        if libraries is None:
+            libraries = []
+        if library_dirs is None:
+            library_dirs = []
+        fd, fname = tempfile.mkstemp(".c", funcname, text=True)
+        f = os.fdopen(fd, "w")
+        try:
+            for incl in includes:
+                f.write("""#include "%s"\n""" % incl)
+            f.write("""\
+main (int argc, char **argv) {
+    %s();
+}
+""" % funcname)
+        finally:
+            f.close()
+        try:
+            objects = self.compile([fname], include_dirs=include_dirs)
+        except CompileError:
+            return False
+
+        try:
+            self.link_executable(objects, "a.out",
+                                 libraries=libraries,
+                                 library_dirs=library_dirs)
+        except (LinkError, TypeError):
+            return False
+        return True
+
+    def find_library_file(self, dirs, lib, debug=False):
+        """Search the specified list of directories for a static or shared
+        library file 'lib' and return the full path to that file.  If
+        'debug' is true, look for a debugging version (if that makes sense on
+        the current platform).  Return None if 'lib' wasn't found in any of
+        the specified directories.
+        """
+        raise NotImplementedError
+
+    # -- Filename generation methods -----------------------------------
+
+    # The default implementation of the filename generating methods are
+    # prejudiced towards the Unix/DOS/Windows view of the world:
+    #   * object files are named by replacing the source file extension
+    #     (eg. .c/.cpp -> .o/.obj)
+    #   * library files (shared or static) are named by plugging the
+    #     library name and extension into a format string, eg.
+    #     "lib%s.%s" % (lib_name, ".a") for Unix static libraries
+    #   * executables are named by appending an extension (possibly
+    #     empty) to the program name: eg. progname + ".exe" for
+    #     Windows
+    #
+    # To reduce redundant code, these methods expect to find
+    # several attributes in the current object (presumably defined
+    # as class attributes):
+    #   * src_extensions -
+    #     list of C/C++ source file extensions, eg. ['.c', '.cpp']
+    #   * obj_extension -
+    #     object file extension, eg. '.o' or '.obj'
+    #   * static_lib_extension -
+    #     extension for static library files, eg. '.a' or '.lib'
+    #   * shared_lib_extension -
+    #     extension for shared library/object files, eg. '.so', '.dll'
+    #   * static_lib_format -
+    #     format string for generating static library filenames,
+    #     eg. 'lib%s.%s' or '%s.%s'
+    #   * shared_lib_format
+    #     format string for generating shared library filenames
+    #     (probably same as static_lib_format, since the extension
+    #     is one of the intended parameters to the format string)
+    #   * exe_extension -
+    #     extension for executable files, eg. '' or '.exe'
+
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            base, ext = os.path.splitext(src_name)
+            base = os.path.splitdrive(base)[1]  # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                raise UnknownFileError("unknown file type '%s' (from '%s')" %
+                                       (ext, src_name))
+            if strip_dir:
+                base = os.path.basename(base)
+            obj_names.append(os.path.join(output_dir,
+                                          base + self.obj_extension))
+        return obj_names
+
+    def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
+        assert output_dir is not None
+        if strip_dir:
+            basename = os.path.basename(basename)
+        return os.path.join(output_dir, basename + self.shared_lib_extension)
+
+    def executable_filename(self, basename, strip_dir=False, output_dir=''):
+        assert output_dir is not None
+        if strip_dir:
+            basename = os.path.basename(basename)
+        return os.path.join(output_dir, basename + (self.exe_extension or ''))
+
+    def library_filename(self, libname, lib_type='static',     # or 'shared'
+                         strip_dir=False, output_dir=''):
+        assert output_dir is not None
+        if lib_type not in ("static", "shared", "dylib"):
+            raise ValueError(
+                "'lib_type' must be 'static', 'shared' or 'dylib'")
+        fmt = getattr(self, lib_type + "_lib_format")
+        ext = getattr(self, lib_type + "_lib_extension")
+
+        dir, base = os.path.split(libname)
+        filename = fmt % (base, ext)
+        if strip_dir:
+            dir = ''
+
+        return os.path.join(output_dir, dir, filename)
+
+
+    # -- Utility methods -----------------------------------------------
+
+    def execute(self, func, args, msg=None, level=1):
+        execute(func, args, msg, self.dry_run)
+
+    def spawn(self, cmd):
+        spawn(cmd, dry_run=self.dry_run)
+
+    def move_file(self, src, dst):
+        logger.info("moving %r to %r", src, dst)
+        if self.dry_run:
+            return
+        return move(src, dst)
+
+    def mkpath(self, name, mode=0o777):
+        name = os.path.normpath(name)
+        if os.path.isdir(name) or name == '':
+            return
+        if self.dry_run:
+            head = ''
+            for part in name.split(os.sep):
+                logger.info("created directory %s%s", head, part)
+                head += part + os.sep
+            return
+        os.makedirs(name, mode)
diff --git a/Lib/packaging/compiler/cygwinccompiler.py b/Lib/packaging/compiler/cygwinccompiler.py
new file mode 100644
index 0000000..7bfa611
--- /dev/null
+++ b/Lib/packaging/compiler/cygwinccompiler.py
@@ -0,0 +1,355 @@
+"""CCompiler implementations for Cygwin and mingw32 versions of GCC.
+
+This module contains the CygwinCCompiler class, a subclass of
+UnixCCompiler that handles the Cygwin port of the GNU C compiler to
+Windows, and the Mingw32CCompiler class which handles the mingw32 port
+of GCC (same as cygwin in no-cygwin mode).
+"""
+
+# problems:
+#
+# * if you use a msvc compiled python version (1.5.2)
+#   1. you have to insert a __GNUC__ section in its config.h
+#   2. you have to generate a import library for its dll
+#      - create a def-file for python??.dll
+#      - create a import library using
+#             dlltool --dllname python15.dll --def python15.def \
+#                       --output-lib libpython15.a
+#
+#   see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+#
+# * We put export_symbols in a def-file, and don't use
+#   --export-all-symbols because it doesn't worked reliable in some
+#   tested configurations. And because other windows compilers also
+#   need their symbols specified this no serious problem.
+#
+# tested configurations:
+#
+# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
+#   (after patching python's config.h and for C++ some other include files)
+#   see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
+#   (ld doesn't support -shared, so we use dllwrap)
+# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
+#   - its dllwrap doesn't work, there is a bug in binutils 2.10.90
+#     see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
+#   - using gcc -mdll instead dllwrap doesn't work without -static because
+#     it tries to link against dlls instead their import libraries. (If
+#     it finds the dll first.)
+#     By specifying -static we force ld to link against the import libraries,
+#     this is windows standard and there are normally not the necessary symbols
+#     in the dlls.
+#   *** only the version of June 2000 shows these problems
+# * cygwin gcc 3.2/ld 2.13.90 works
+#   (ld supports -shared)
+# * mingw gcc 3.2/ld 2.13 works
+#   (ld supports -shared)
+
+
+import os
+import sys
+import copy
+
+from packaging import logger
+from packaging.compiler.unixccompiler import UnixCCompiler
+from packaging.util import write_file
+from packaging.errors import PackagingExecError, CompileError, UnknownFileError
+from packaging.util import get_compiler_versions
+import sysconfig
+
+
+def get_msvcr():
+    """Include the appropriate MSVC runtime library if Python was built
+    with MSVC 7.0 or later.
+    """
+    msc_pos = sys.version.find('MSC v.')
+    if msc_pos != -1:
+        msc_ver = sys.version[msc_pos+6:msc_pos+10]
+        if msc_ver == '1300':
+            # MSVC 7.0
+            return ['msvcr70']
+        elif msc_ver == '1310':
+            # MSVC 7.1
+            return ['msvcr71']
+        elif msc_ver == '1400':
+            # VS2005 / MSVC 8.0
+            return ['msvcr80']
+        elif msc_ver == '1500':
+            # VS2008 / MSVC 9.0
+            return ['msvcr90']
+        else:
+            raise ValueError("Unknown MS Compiler version %s " % msc_ver)
+
+
+class CygwinCCompiler(UnixCCompiler):
+    """ Handles the Cygwin port of the GNU C compiler to Windows.
+    """
+    name = 'cygwin'
+    description = 'Cygwin port of GNU C Compiler for Win32'
+    obj_extension = ".o"
+    static_lib_extension = ".a"
+    shared_lib_extension = ".dll"
+    static_lib_format = "lib%s%s"
+    shared_lib_format = "%s%s"
+    exe_extension = ".exe"
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+
+        UnixCCompiler.__init__(self, verbose, dry_run, force)
+
+        status, details = check_config_h()
+        logger.debug("Python's GCC status: %s (details: %s)", status, details)
+        if status is not CONFIG_H_OK:
+            self.warn(
+                "Python's pyconfig.h doesn't seem to support your compiler. "
+                "Reason: %s. "
+                "Compiling may fail because of undefined preprocessor macros."
+                % details)
+
+        self.gcc_version, self.ld_version, self.dllwrap_version = \
+            get_compiler_versions()
+        logger.debug(self.name + ": gcc %s, ld %s, dllwrap %s\n",
+                     self.gcc_version,
+                     self.ld_version,
+                     self.dllwrap_version)
+
+        # ld_version >= "2.10.90" and < "2.13" should also be able to use
+        # gcc -mdll instead of dllwrap
+        # Older dllwraps had own version numbers, newer ones use the
+        # same as the rest of binutils ( also ld )
+        # dllwrap 2.10.90 is buggy
+        if self.ld_version >= "2.10.90":
+            self.linker_dll = "gcc"
+        else:
+            self.linker_dll = "dllwrap"
+
+        # ld_version >= "2.13" support -shared so use it instead of
+        # -mdll -static
+        if self.ld_version >= "2.13":
+            shared_option = "-shared"
+        else:
+            shared_option = "-mdll -static"
+
+        # Hard-code GCC because that's what this is all about.
+        # XXX optimization, warnings etc. should be customizable.
+        self.set_executables(compiler='gcc -mcygwin -O -Wall',
+                             compiler_so='gcc -mcygwin -mdll -O -Wall',
+                             compiler_cxx='g++ -mcygwin -O -Wall',
+                             linker_exe='gcc -mcygwin',
+                             linker_so=('%s -mcygwin %s' %
+                                        (self.linker_dll, shared_option)))
+
+        # cygwin and mingw32 need different sets of libraries
+        if self.gcc_version == "2.91.57":
+            # cygwin shouldn't need msvcrt, but without the dlls will crash
+            # (gcc version 2.91.57) -- perhaps something about initialization
+            self.dll_libraries=["msvcrt"]
+            self.warn(
+                "Consider upgrading to a newer version of gcc")
+        else:
+            # Include the appropriate MSVC runtime library if Python was built
+            # with MSVC 7.0 or later.
+            self.dll_libraries = get_msvcr()
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        """Compile the source by spawning GCC and windres if needed."""
+        if ext == '.rc' or ext == '.res':
+            # gcc needs '.res' and '.rc' compiled to object files !!!
+            try:
+                self.spawn(["windres", "-i", src, "-o", obj])
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+        else: # for other files use the C-compiler
+            try:
+                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+                           extra_postargs)
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        """Link the objects."""
+        # use separate copies, so we can modify the lists
+        extra_preargs = copy.copy(extra_preargs or [])
+        libraries = copy.copy(libraries or [])
+        objects = copy.copy(objects or [])
+
+        # Additional libraries
+        libraries.extend(self.dll_libraries)
+
+        # handle export symbols by creating a def-file
+        # with executables this only works with gcc/ld as linker
+        if ((export_symbols is not None) and
+            (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+            # (The linker doesn't do anything if output is up-to-date.
+            # So it would probably better to check if we really need this,
+            # but for this we had to insert some unchanged parts of
+            # UnixCCompiler, and this is not what we want.)
+
+            # we want to put some files in the same directory as the
+            # object files are, build_temp doesn't help much
+            # where are the object files
+            temp_dir = os.path.dirname(objects[0])
+            # name of dll to give the helper files the same base name
+            dll_name, dll_extension = os.path.splitext(
+                os.path.basename(output_filename))
+
+            # generate the filenames for these files
+            def_file = os.path.join(temp_dir, dll_name + ".def")
+            lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
+
+            # Generate .def file
+            contents = [
+                "LIBRARY %s" % os.path.basename(output_filename),
+                "EXPORTS"]
+            for sym in export_symbols:
+                contents.append(sym)
+            self.execute(write_file, (def_file, contents),
+                         "writing %s" % def_file)
+
+            # next add options for def-file and to creating import libraries
+
+            # dllwrap uses different options than gcc/ld
+            if self.linker_dll == "dllwrap":
+                extra_preargs.extend(("--output-lib", lib_file))
+                # for dllwrap we have to use a special option
+                extra_preargs.extend(("--def", def_file))
+            # we use gcc/ld here and can be sure ld is >= 2.9.10
+            else:
+                # doesn't work: bfd_close build\...\libfoo.a: Invalid operation
+                #extra_preargs.extend(("-Wl,--out-implib,%s" % lib_file))
+                # for gcc/ld the def-file is specified as any object files
+                objects.append(def_file)
+
+        #end: if ((export_symbols is not None) and
+        #        (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+
+        # who wants symbols and a many times larger output file
+        # should explicitly switch the debug mode on
+        # otherwise we let dllwrap/ld strip the output file
+        # (On my machine: 10KB < stripped_file < ??100KB
+        #   unstripped_file = stripped_file + XXX KB
+        #  ( XXX=254 for a typical python extension))
+        if not debug:
+            extra_preargs.append("-s")
+
+        UnixCCompiler.link(self, target_desc, objects, output_filename,
+                           output_dir, libraries, library_dirs,
+                           runtime_library_dirs,
+                           None, # export_symbols, we do this in our def-file
+                           debug, extra_preargs, extra_postargs, build_temp,
+                           target_lang)
+
+    # -- Miscellaneous methods -----------------------------------------
+
+    def object_filenames(self, source_filenames, strip_dir=False,
+                         output_dir=''):
+        """Adds supports for rc and res files."""
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            base, ext = os.path.splitext(os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc','.res']):
+                raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext in ('.res', '.rc'):
+                # these need to be compiled to object files
+                obj_names.append (os.path.join(output_dir,
+                                              base + ext + self.obj_extension))
+            else:
+                obj_names.append (os.path.join(output_dir,
+                                               base + self.obj_extension))
+        return obj_names
+
+# the same as cygwin plus some additional parameters
+class Mingw32CCompiler(CygwinCCompiler):
+    """ Handles the Mingw32 port of the GNU C compiler to Windows.
+    """
+    name = 'mingw32'
+    description = 'MinGW32 compiler'
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+
+        CygwinCCompiler.__init__ (self, verbose, dry_run, force)
+
+        # ld_version >= "2.13" support -shared so use it instead of
+        # -mdll -static
+        if self.ld_version >= "2.13":
+            shared_option = "-shared"
+        else:
+            shared_option = "-mdll -static"
+
+        # A real mingw32 doesn't need to specify a different entry point,
+        # but cygwin 2.91.57 in no-cygwin-mode needs it.
+        if self.gcc_version <= "2.91.57":
+            entry_point = '--entry _DllMain@12'
+        else:
+            entry_point = ''
+
+        self.set_executables(compiler='gcc -mno-cygwin -O -Wall',
+                             compiler_so='gcc -mno-cygwin -mdll -O -Wall',
+                             compiler_cxx='g++ -mno-cygwin -O -Wall',
+                             linker_exe='gcc -mno-cygwin',
+                             linker_so='%s -mno-cygwin %s %s'
+                                        % (self.linker_dll, shared_option,
+                                           entry_point))
+        # Maybe we should also append -mthreads, but then the finished
+        # dlls need another dll (mingwm10.dll see Mingw32 docs)
+        # (-mthreads: Support thread-safe exception handling on `Mingw32')
+
+        # no additional libraries needed
+        self.dll_libraries=[]
+
+        # Include the appropriate MSVC runtime library if Python was built
+        # with MSVC 7.0 or later.
+        self.dll_libraries = get_msvcr()
+
+# Because these compilers aren't configured in Python's pyconfig.h file by
+# default, we should at least warn the user if he is using a unmodified
+# version.
+
+CONFIG_H_OK = "ok"
+CONFIG_H_NOTOK = "not ok"
+CONFIG_H_UNCERTAIN = "uncertain"
+
+def check_config_h():
+    """Check if the current Python installation appears amenable to building
+    extensions with GCC.
+
+    Returns a tuple (status, details), where 'status' is one of the following
+    constants:
+
+    - CONFIG_H_OK: all is well, go ahead and compile
+    - CONFIG_H_NOTOK: doesn't look good
+    - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
+
+    'details' is a human-readable string explaining the situation.
+
+    Note there are two ways to conclude "OK": either 'sys.version' contains
+    the string "GCC" (implying that this Python was built with GCC), or the
+    installed "pyconfig.h" contains the string "__GNUC__".
+    """
+
+    # XXX since this function also checks sys.version, it's not strictly a
+    # "pyconfig.h" check -- should probably be renamed...
+    # if sys.version contains GCC then python was compiled with GCC, and the
+    # pyconfig.h file should be OK
+    if "GCC" in sys.version:
+        return CONFIG_H_OK, "sys.version mentions 'GCC'"
+
+    # let's see if __GNUC__ is mentioned in python.h
+    fn = sysconfig.get_config_h_filename()
+    try:
+        with open(fn) as config_h:
+            if "__GNUC__" in config_h.read():
+                return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
+            else:
+                return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
+    except IOError as exc:
+        return (CONFIG_H_UNCERTAIN,
+                "couldn't read '%s': %s" % (fn, exc.strerror))
diff --git a/Lib/packaging/compiler/extension.py b/Lib/packaging/compiler/extension.py
new file mode 100644
index 0000000..66f6e9a
--- /dev/null
+++ b/Lib/packaging/compiler/extension.py
@@ -0,0 +1,121 @@
+"""Class representing C/C++ extension modules."""
+
+from packaging import logger
+
+# This class is really only used by the "build_ext" command, so it might
+# make sense to put it in distutils.command.build_ext.  However, that
+# module is already big enough, and I want to make this class a bit more
+# complex to simplify some common cases ("foo" module in "foo.c") and do
+# better error-checking ("foo.c" actually exists).
+#
+# Also, putting this in build_ext.py means every setup script would have to
+# import that large-ish module (indirectly, through distutils.core) in
+# order to do anything.
+
+
+class Extension:
+    """Just a collection of attributes that describes an extension
+    module and everything needed to build it (hopefully in a portable
+    way, but there are hooks that let you be as unportable as you need).
+
+    Instance attributes:
+      name : string
+        the full name of the extension, including any packages -- ie.
+        *not* a filename or pathname, but Python dotted name
+      sources : [string]
+        list of source filenames, relative to the distribution root
+        (where the setup script lives), in Unix form (slash-separated)
+        for portability.  Source files may be C, C++, SWIG (.i),
+        platform-specific resource files, or whatever else is recognized
+        by the "build_ext" command as source for a Python extension.
+      include_dirs : [string]
+        list of directories to search for C/C++ header files (in Unix
+        form for portability)
+      define_macros : [(name : string, value : string|None)]
+        list of macros to define; each macro is defined using a 2-tuple,
+        where 'value' is either the string to define it to or None to
+        define it without a particular value (equivalent of "#define
+        FOO" in source or -DFOO on Unix C compiler command line)
+      undef_macros : [string]
+        list of macros to undefine explicitly
+      library_dirs : [string]
+        list of directories to search for C/C++ libraries at link time
+      libraries : [string]
+        list of library names (not filenames or paths) to link against
+      runtime_library_dirs : [string]
+        list of directories to search for C/C++ libraries at run time
+        (for shared extensions, this is when the extension is loaded)
+      extra_objects : [string]
+        list of extra files to link with (eg. object files not implied
+        by 'sources', static library that must be explicitly specified,
+        binary resource files, etc.)
+      extra_compile_args : [string]
+        any extra platform- and compiler-specific information to use
+        when compiling the source files in 'sources'.  For platforms and
+        compilers where "command line" makes sense, this is typically a
+        list of command-line arguments, but for other platforms it could
+        be anything.
+      extra_link_args : [string]
+        any extra platform- and compiler-specific information to use
+        when linking object files together to create the extension (or
+        to create a new static Python interpreter).  Similar
+        interpretation as for 'extra_compile_args'.
+      export_symbols : [string]
+        list of symbols to be exported from a shared extension.  Not
+        used on all platforms, and not generally necessary for Python
+        extensions, which typically export exactly one symbol: "init" +
+        extension_name.
+      swig_opts : [string]
+        any extra options to pass to SWIG if a source file has the .i
+        extension.
+      depends : [string]
+        list of files that the extension depends on
+      language : string
+        extension language (i.e. "c", "c++", "objc"). Will be detected
+        from the source extensions if not provided.
+      optional : boolean
+        specifies that a build failure in the extension should not abort the
+        build process, but simply not install the failing extension.
+    """
+
+    # **kwargs are allowed so that a warning is emitted instead of an
+    # exception
+    def __init__(self, name, sources, include_dirs=None, define_macros=None,
+                 undef_macros=None, library_dirs=None, libraries=None,
+                 runtime_library_dirs=None, extra_objects=None,
+                 extra_compile_args=None, extra_link_args=None,
+                 export_symbols=None, swig_opts=None, depends=None,
+                 language=None, optional=None, **kw):
+        if not isinstance(name, str):
+            raise AssertionError("'name' must be a string")
+
+        if not isinstance(sources, list):
+            raise AssertionError("'sources' must be a list of strings")
+
+        for v in sources:
+            if not isinstance(v, str):
+                raise AssertionError("'sources' must be a list of strings")
+
+        self.name = name
+        self.sources = sources
+        self.include_dirs = include_dirs or []
+        self.define_macros = define_macros or []
+        self.undef_macros = undef_macros or []
+        self.library_dirs = library_dirs or []
+        self.libraries = libraries or []
+        self.runtime_library_dirs = runtime_library_dirs or []
+        self.extra_objects = extra_objects or []
+        self.extra_compile_args = extra_compile_args or []
+        self.extra_link_args = extra_link_args or []
+        self.export_symbols = export_symbols or []
+        self.swig_opts = swig_opts or []
+        self.depends = depends or []
+        self.language = language
+        self.optional = optional
+
+        # If there are unknown keyword options, warn about them
+        if len(kw) > 0:
+            options = [repr(option) for option in kw]
+            options = ', '.join(sorted(options))
+            logger.warning(
+                'unknown arguments given to Extension: %s', options)
diff --git a/Lib/packaging/compiler/msvc9compiler.py b/Lib/packaging/compiler/msvc9compiler.py
new file mode 100644
index 0000000..d304446
--- /dev/null
+++ b/Lib/packaging/compiler/msvc9compiler.py
@@ -0,0 +1,720 @@
+"""CCompiler implementation for the Microsoft Visual Studio 2008 compiler.
+
+The MSVCCompiler class is compatible with VS 2005 and VS 2008.  Legacy
+support for older versions of VS are in the msvccompiler module.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+#   finding DevStudio (through the registry)
+# ported to VS2005 and VS 2008 by Christian Heimes
+import os
+import subprocess
+import sys
+import re
+
+from packaging.errors import (PackagingExecError, PackagingPlatformError,
+                              CompileError, LibError, LinkError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_lib_options
+from packaging import logger
+from packaging.util import get_platform
+
+import winreg
+
+RegOpenKeyEx = winreg.OpenKeyEx
+RegEnumKey = winreg.EnumKey
+RegEnumValue = winreg.EnumValue
+RegError = winreg.error
+
+HKEYS = (winreg.HKEY_USERS,
+         winreg.HKEY_CURRENT_USER,
+         winreg.HKEY_LOCAL_MACHINE,
+         winreg.HKEY_CLASSES_ROOT)
+
+VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
+WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
+NET_BASE = r"Software\Microsoft\.NETFramework"
+
+# A map keyed by get_platform() return values to values accepted by
+# 'vcvarsall.bat'.  Note a cross-compile may combine these (eg, 'x86_amd64' is
+# the param to cross-compile on x86 targetting amd64.)
+PLAT_TO_VCVARS = {
+    'win32' : 'x86',
+    'win-amd64' : 'amd64',
+    'win-ia64' : 'ia64',
+}
+
+
+class Reg:
+    """Helper class to read values from the registry
+    """
+
+    def get_value(cls, path, key):
+        for base in HKEYS:
+            d = cls.read_values(base, path)
+            if d and key in d:
+                return d[key]
+        raise KeyError(key)
+    get_value = classmethod(get_value)
+
+    def read_keys(cls, base, key):
+        """Return list of registry keys."""
+        try:
+            handle = RegOpenKeyEx(base, key)
+        except RegError:
+            return None
+        L = []
+        i = 0
+        while True:
+            try:
+                k = RegEnumKey(handle, i)
+            except RegError:
+                break
+            L.append(k)
+            i += 1
+        return L
+    read_keys = classmethod(read_keys)
+
+    def read_values(cls, base, key):
+        """Return dict of registry keys and values.
+
+        All names are converted to lowercase.
+        """
+        try:
+            handle = RegOpenKeyEx(base, key)
+        except RegError:
+            return None
+        d = {}
+        i = 0
+        while True:
+            try:
+                name, value, type = RegEnumValue(handle, i)
+            except RegError:
+                break
+            name = name.lower()
+            d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
+            i += 1
+        return d
+    read_values = classmethod(read_values)
+
+    def convert_mbcs(s):
+        dec = getattr(s, "decode", None)
+        if dec is not None:
+            try:
+                s = dec("mbcs")
+            except UnicodeError:
+                pass
+        return s
+    convert_mbcs = staticmethod(convert_mbcs)
+
+class MacroExpander:
+
+    def __init__(self, version):
+        self.macros = {}
+        self.vsbase = VS_BASE % version
+        self.load_macros(version)
+
+    def set_macro(self, macro, path, key):
+        self.macros["$(%s)" % macro] = Reg.get_value(path, key)
+
+    def load_macros(self, version):
+        self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
+        self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
+        self.set_macro("FrameworkDir", NET_BASE, "installroot")
+        try:
+            if version >= 8.0:
+                self.set_macro("FrameworkSDKDir", NET_BASE,
+                               "sdkinstallrootv2.0")
+            else:
+                raise KeyError("sdkinstallrootv2.0")
+        except KeyError:
+            raise PackagingPlatformError(
+            """Python was built with Visual Studio 2008;
+extensions must be built with a compiler than can generate compatible binaries.
+Visual Studio 2008 was not found on this system. If you have Cygwin installed,
+you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
+
+        if version >= 9.0:
+            self.set_macro("FrameworkVersion", self.vsbase, "clr version")
+            self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
+        else:
+            p = r"Software\Microsoft\NET Framework Setup\Product"
+            for base in HKEYS:
+                try:
+                    h = RegOpenKeyEx(base, p)
+                except RegError:
+                    continue
+                key = RegEnumKey(h, 0)
+                d = Reg.get_value(base, r"%s\%s" % (p, key))
+                self.macros["$(FrameworkVersion)"] = d["version"]
+
+    def sub(self, s):
+        for k, v in self.macros.items():
+            s = s.replace(k, v)
+        return s
+
+def get_build_version():
+    """Return the version of MSVC that was used to build Python.
+
+    For Python 2.3 and up, the version number is included in
+    sys.version.  For earlier versions, assume the compiler is MSVC 6.
+    """
+    prefix = "MSC v."
+    i = sys.version.find(prefix)
+    if i == -1:
+        return 6
+    i = i + len(prefix)
+    s, rest = sys.version[i:].split(" ", 1)
+    majorVersion = int(s[:-2]) - 6
+    minorVersion = int(s[2:3]) / 10.0
+    # I don't think paths are affected by minor version in version 6
+    if majorVersion == 6:
+        minorVersion = 0
+    if majorVersion >= 6:
+        return majorVersion + minorVersion
+    # else we don't know what version of the compiler this is
+    return None
+
+def normalize_and_reduce_paths(paths):
+    """Return a list of normalized paths with duplicates removed.
+
+    The current order of paths is maintained.
+    """
+    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
+    reduced_paths = []
+    for p in paths:
+        np = os.path.normpath(p)
+        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+        if np not in reduced_paths:
+            reduced_paths.append(np)
+    return reduced_paths
+
+def removeDuplicates(variable):
+    """Remove duplicate values of an environment variable.
+    """
+    oldList = variable.split(os.pathsep)
+    newList = []
+    for i in oldList:
+        if i not in newList:
+            newList.append(i)
+    newVariable = os.pathsep.join(newList)
+    return newVariable
+
+def find_vcvarsall(version):
+    """Find the vcvarsall.bat file
+
+    At first it tries to find the productdir of VS 2008 in the registry. If
+    that fails it falls back to the VS90COMNTOOLS env var.
+    """
+    vsbase = VS_BASE % version
+    try:
+        productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
+                                   "productdir")
+    except KeyError:
+        logger.debug("Unable to find productdir in registry")
+        productdir = None
+
+    if not productdir or not os.path.isdir(productdir):
+        toolskey = "VS%0.f0COMNTOOLS" % version
+        toolsdir = os.environ.get(toolskey, None)
+
+        if toolsdir and os.path.isdir(toolsdir):
+            productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
+            productdir = os.path.abspath(productdir)
+            if not os.path.isdir(productdir):
+                logger.debug("%s is not a valid directory", productdir)
+                return None
+        else:
+            logger.debug("env var %s is not set or invalid", toolskey)
+    if not productdir:
+        logger.debug("no productdir found")
+        return None
+    vcvarsall = os.path.join(productdir, "vcvarsall.bat")
+    if os.path.isfile(vcvarsall):
+        return vcvarsall
+    logger.debug("unable to find vcvarsall.bat")
+    return None
+
+def query_vcvarsall(version, arch="x86"):
+    """Launch vcvarsall.bat and read the settings from its environment
+    """
+    vcvarsall = find_vcvarsall(version)
+    interesting = set(("include", "lib", "libpath", "path"))
+    result = {}
+
+    if vcvarsall is None:
+        raise PackagingPlatformError("Unable to find vcvarsall.bat")
+    logger.debug("calling 'vcvarsall.bat %s' (version=%s)", arch, version)
+    popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+
+    stdout, stderr = popen.communicate()
+    if popen.wait() != 0:
+        raise PackagingPlatformError(stderr.decode("mbcs"))
+
+    stdout = stdout.decode("mbcs")
+    for line in stdout.split("\n"):
+        line = Reg.convert_mbcs(line)
+        if '=' not in line:
+            continue
+        line = line.strip()
+        key, value = line.split('=', 1)
+        key = key.lower()
+        if key in interesting:
+            if value.endswith(os.pathsep):
+                value = value[:-1]
+            result[key] = removeDuplicates(value)
+
+    if len(result) != len(interesting):
+        raise ValueError(str(list(result)))
+
+    return result
+
+# More globals
+VERSION = get_build_version()
+if VERSION < 8.0:
+    raise PackagingPlatformError("VC %0.1f is not supported by this module" % VERSION)
+# MACROS = MacroExpander(VERSION)
+
+class MSVCCompiler(CCompiler) :
+    """Concrete class that implements an interface to Microsoft Visual C++,
+       as defined by the CCompiler abstract class."""
+
+    name = 'msvc'
+    description = 'Microsoft Visual C++'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+    _rc_extensions = ['.rc']
+    _mc_extensions = ['.mc']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = (_c_extensions + _cpp_extensions +
+                      _rc_extensions + _mc_extensions)
+    res_extension = '.res'
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+        CCompiler.__init__(self, verbose, dry_run, force)
+        self.__version = VERSION
+        self.__root = r"Software\Microsoft\VisualStudio"
+        # self.__macros = MACROS
+        self.__paths = []
+        # target platform (.plat_name is consistent with 'bdist')
+        self.plat_name = None
+        self.__arch = None # deprecated name
+        self.initialized = False
+
+    def initialize(self, plat_name=None):
+        # multi-init means we would need to check platform same each time...
+        assert not self.initialized, "don't init multiple times"
+        if plat_name is None:
+            plat_name = get_platform()
+        # sanity check for platforms to prevent obscure errors later.
+        ok_plats = 'win32', 'win-amd64', 'win-ia64'
+        if plat_name not in ok_plats:
+            raise PackagingPlatformError("--plat-name must be one of %s" %
+                                         (ok_plats,))
+
+        if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
+            # Assume that the SDK set up everything alright; don't try to be
+            # smarter
+            self.cc = "cl.exe"
+            self.linker = "link.exe"
+            self.lib = "lib.exe"
+            self.rc = "rc.exe"
+            self.mc = "mc.exe"
+        else:
+            # On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
+            # to cross compile, you use 'x86_amd64'.
+            # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
+            # compile use 'x86' (ie, it runs the x86 compiler directly)
+            # No idea how itanium handles this, if at all.
+            if plat_name == get_platform() or plat_name == 'win32':
+                # native build or cross-compile to win32
+                plat_spec = PLAT_TO_VCVARS[plat_name]
+            else:
+                # cross compile from win32 -> some 64bit
+                plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
+                            PLAT_TO_VCVARS[plat_name]
+
+            vc_env = query_vcvarsall(VERSION, plat_spec)
+
+            # take care to only use strings in the environment.
+            self.__paths = vc_env['path'].encode('mbcs').split(os.pathsep)
+            os.environ['lib'] = vc_env['lib'].encode('mbcs')
+            os.environ['include'] = vc_env['include'].encode('mbcs')
+
+            if len(self.__paths) == 0:
+                raise PackagingPlatformError("Python was built with %s, "
+                       "and extensions need to be built with the same "
+                       "version of the compiler, but it isn't installed."
+                       % self.__product)
+
+            self.cc = self.find_exe("cl.exe")
+            self.linker = self.find_exe("link.exe")
+            self.lib = self.find_exe("lib.exe")
+            self.rc = self.find_exe("rc.exe")   # resource compiler
+            self.mc = self.find_exe("mc.exe")   # message compiler
+            #self.set_path_env_var('lib')
+            #self.set_path_env_var('include')
+
+        # extend the MSVC path with the current path
+        try:
+            for p in os.environ['path'].split(';'):
+                self.__paths.append(p)
+        except KeyError:
+            pass
+        self.__paths = normalize_and_reduce_paths(self.__paths)
+        os.environ['path'] = ";".join(self.__paths)
+
+        self.preprocess_options = None
+        if self.__arch == "x86":
+            self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3',
+                                     '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
+                                          '/Z7', '/D_DEBUG']
+        else:
+            # Win64
+            self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
+                                     '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+                                          '/Z7', '/D_DEBUG']
+
+        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+        if self.__version >= 7:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None'
+                ]
+        self.ldflags_static = [ '/nologo']
+
+        self.initialized = True
+
+    # -- Worker methods ------------------------------------------------
+
+    def object_filenames(self,
+                         source_filenames,
+                         strip_dir=False,
+                         output_dir=''):
+        # Copied from ccompiler.py, extended to return .res as 'object'-file
+        # for .rc input file
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            base, ext = os.path.splitext(src_name)
+            base = os.path.splitdrive(base)[1] # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                # Better to raise an exception instead of silently continuing
+                # and later complain about sources and targets having
+                # different lengths
+                raise CompileError("Don't know how to compile %s" % src_name)
+            if strip_dir:
+                base = os.path.basename(base)
+            if ext in self._rc_extensions:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.res_extension))
+            elif ext in self._mc_extensions:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.res_extension))
+            else:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.obj_extension))
+        return obj_names
+
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=False,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        if not self.initialized:
+            self.initialize()
+        compile_info = self._setup_compile(output_dir, macros, include_dirs,
+                                           sources, depends, extra_postargs)
+        macros, objects, extra_postargs, pp_opts, build = compile_info
+
+        compile_opts = extra_preargs or []
+        compile_opts.append('/c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            if debug:
+                # pass the full pathname to MSVC in debug mode,
+                # this allows the debugger to find the source file
+                # without asking the user to browse for it
+                src = os.path.abspath(src)
+
+            if ext in self._c_extensions:
+                input_opt = "/Tc" + src
+            elif ext in self._cpp_extensions:
+                input_opt = "/Tp" + src
+            elif ext in self._rc_extensions:
+                # compile .RC to .RES file
+                input_opt = src
+                output_opt = "/fo" + obj
+                try:
+                    self.spawn([self.rc] + pp_opts +
+                               [output_opt] + [input_opt])
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue
+            elif ext in self._mc_extensions:
+                # Compile .MC to .RC file to .RES file.
+                #   * '-h dir' specifies the directory for the
+                #     generated include file
+                #   * '-r dir' specifies the target directory of the
+                #     generated RC file and the binary message resource
+                #     it includes
+                #
+                # For now (since there are no options to change this),
+                # we use the source-directory for the include file and
+                # the build directory for the RC file and message
+                # resources. This works at least for win32all.
+                h_dir = os.path.dirname(src)
+                rc_dir = os.path.dirname(obj)
+                try:
+                    # first compile .MC to .RC and .H file
+                    self.spawn([self.mc] +
+                               ['-h', h_dir, '-r', rc_dir] + [src])
+                    base, _ = os.path.splitext(os.path.basename(src))
+                    rc_file = os.path.join(rc_dir, base + '.rc')
+                    # then compile .RC to .RES file
+                    self.spawn([self.rc] +
+                               ["/fo" + obj] + [rc_file])
+
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue
+            else:
+                # how to handle this file?
+                raise CompileError("Don't know how to compile %s to %s"
+                                   % (src, obj))
+
+            output_opt = "/Fo" + obj
+            try:
+                self.spawn([self.cc] + compile_opts + pp_opts +
+                           [input_opt, output_opt] +
+                           extra_postargs)
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+        return objects
+
+
+    def create_static_lib(self,
+                          objects,
+                          output_libname,
+                          output_dir=None,
+                          debug=False,
+                          target_lang=None):
+
+        if not self.initialized:
+            self.initialize()
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        output_filename = self.library_filename(output_libname,
+                                                output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            lib_args = objects + ['/OUT:' + output_filename]
+            if debug:
+                pass # XXX what goes here?
+            try:
+                self.spawn([self.lib] + lib_args)
+            except PackagingExecError as msg:
+                raise LibError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        if not self.initialized:
+            self.initialize()
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        fixed_args = self._fix_lib_args(libraries, library_dirs,
+                                        runtime_library_dirs)
+        libraries, library_dirs, runtime_library_dirs = fixed_args
+
+        if runtime_library_dirs:
+            self.warn("don't know what to do with 'runtime_library_dirs': "
+                      + str(runtime_library_dirs))
+
+        lib_opts = gen_lib_options(self,
+                                   library_dirs, runtime_library_dirs,
+                                   libraries)
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+            if target_desc == CCompiler.EXECUTABLE:
+                if debug:
+                    ldflags = self.ldflags_shared_debug[1:]
+                else:
+                    ldflags = self.ldflags_shared[1:]
+            else:
+                if debug:
+                    ldflags = self.ldflags_shared_debug
+                else:
+                    ldflags = self.ldflags_shared
+
+            export_opts = []
+            for sym in (export_symbols or []):
+                export_opts.append("/EXPORT:" + sym)
+
+            ld_args = (ldflags + lib_opts + export_opts +
+                       objects + ['/OUT:' + output_filename])
+
+            # The MSVC linker generates .lib and .exp files, which cannot be
+            # suppressed by any linker switches. The .lib files may even be
+            # needed! Make sure they are generated in the temporary build
+            # directory. Since they have different names for debug and release
+            # builds, they can go into the same directory.
+            build_temp = os.path.dirname(objects[0])
+            if export_symbols is not None:
+                dll_name, dll_ext = os.path.splitext(
+                    os.path.basename(output_filename))
+                implib_file = os.path.join(
+                    build_temp,
+                    self.library_filename(dll_name))
+                ld_args.append('/IMPLIB:' + implib_file)
+
+            # Embedded manifests are recommended - see MSDN article titled
+            # "How to: Embed a Manifest Inside a C/C++ Application"
+            # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
+            # Ask the linker to generate the manifest in the temp dir, so
+            # we can embed it later.
+            temp_manifest = os.path.join(
+                    build_temp,
+                    os.path.basename(output_filename) + ".manifest")
+            ld_args.append('/MANIFESTFILE:' + temp_manifest)
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                self.spawn([self.linker] + ld_args)
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+
+            # embed the manifest
+            # XXX - this is somewhat fragile - if mt.exe fails, distutils
+            # will still consider the DLL up-to-date, but it will not have a
+            # manifest.  Maybe we should link to a temp file?  OTOH, that
+            # implies a build environment error that shouldn't go undetected.
+            if target_desc == CCompiler.EXECUTABLE:
+                mfid = 1
+            else:
+                mfid = 2
+                self._remove_visual_c_ref(temp_manifest)
+            out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
+            try:
+                self.spawn(['mt.exe', '-nologo', '-manifest',
+                            temp_manifest, out_arg])
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    def _remove_visual_c_ref(self, manifest_file):
+        try:
+            # Remove references to the Visual C runtime, so they will
+            # fall through to the Visual C dependency of Python.exe.
+            # This way, when installed for a restricted user (e.g.
+            # runtimes are not in WinSxS folder, but in Python's own
+            # folder), the runtimes do not need to be in every folder
+            # with .pyd's.
+            with open(manifest_file) as manifest_f:
+                manifest_buf = manifest_f.read()
+            pattern = re.compile(
+                r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
+                r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
+                re.DOTALL)
+            manifest_buf = re.sub(pattern, "", manifest_buf)
+            pattern = "<dependentAssembly>\s*</dependentAssembly>"
+            manifest_buf = re.sub(pattern, "", manifest_buf)
+            with open(manifest_file, 'w') as manifest_f:
+                manifest_f.write(manifest_buf)
+        except IOError:
+            pass
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "/LIBPATH:" + dir
+
+    def runtime_library_dir_option(self, dir):
+        raise PackagingPlatformError(
+              "don't know how to set runtime library search path for MSVC++")
+
+    def library_option(self, lib):
+        return self.library_filename(lib)
+
+
+    def find_library_file(self, dirs, lib, debug=False):
+        # Prefer a debugging library if found (and requested), but deal
+        # with it if we don't have one.
+        if debug:
+            try_names = [lib + "_d", lib]
+        else:
+            try_names = [lib]
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename(name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # Helper methods for using the MSVC registry settings
+
+    def find_exe(self, exe):
+        """Return path to an MSVC executable program.
+
+        Tries to find the program in several places: first, one of the
+        MSVC program search paths from the registry; next, the directories
+        in the PATH environment variable.  If any of those work, return an
+        absolute path that is known to exist.  If none of them work, just
+        return the original program name, 'exe'.
+        """
+        for p in self.__paths:
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        # didn't find it; try existing path
+        for p in os.environ['Path'].split(';'):
+            fn = os.path.join(os.path.abspath(p),exe)
+            if os.path.isfile(fn):
+                return fn
+
+        return exe
diff --git a/Lib/packaging/compiler/msvccompiler.py b/Lib/packaging/compiler/msvccompiler.py
new file mode 100644
index 0000000..97f76bb
--- /dev/null
+++ b/Lib/packaging/compiler/msvccompiler.py
@@ -0,0 +1,636 @@
+"""CCompiler implementation for old Microsoft Visual Studio compilers.
+
+For a compiler compatible with VS 2005 and 2008, use msvc9compiler.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+#   finding DevStudio (through the registry)
+
+
+import sys
+import os
+
+from packaging.errors import (PackagingExecError, PackagingPlatformError,
+                              CompileError, LibError, LinkError)
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_lib_options
+from packaging import logger
+
+_can_read_reg = False
+try:
+    import winreg
+
+    _can_read_reg = True
+    hkey_mod = winreg
+
+    RegOpenKeyEx = winreg.OpenKeyEx
+    RegEnumKey = winreg.EnumKey
+    RegEnumValue = winreg.EnumValue
+    RegError = winreg.error
+
+except ImportError:
+    try:
+        import win32api
+        import win32con
+        _can_read_reg = True
+        hkey_mod = win32con
+
+        RegOpenKeyEx = win32api.RegOpenKeyEx
+        RegEnumKey = win32api.RegEnumKey
+        RegEnumValue = win32api.RegEnumValue
+        RegError = win32api.error
+
+    except ImportError:
+        logger.warning(
+            "can't read registry to find the necessary compiler setting;\n"
+            "make sure that Python modules _winreg, win32api or win32con "
+            "are installed.")
+
+if _can_read_reg:
+    HKEYS = (hkey_mod.HKEY_USERS,
+             hkey_mod.HKEY_CURRENT_USER,
+             hkey_mod.HKEY_LOCAL_MACHINE,
+             hkey_mod.HKEY_CLASSES_ROOT)
+
+
+def read_keys(base, key):
+    """Return list of registry keys."""
+
+    try:
+        handle = RegOpenKeyEx(base, key)
+    except RegError:
+        return None
+    L = []
+    i = 0
+    while True:
+        try:
+            k = RegEnumKey(handle, i)
+        except RegError:
+            break
+        L.append(k)
+        i = i + 1
+    return L
+
+
+def read_values(base, key):
+    """Return dict of registry keys and values.
+
+    All names are converted to lowercase.
+    """
+    try:
+        handle = RegOpenKeyEx(base, key)
+    except RegError:
+        return None
+    d = {}
+    i = 0
+    while True:
+        try:
+            name, value, type = RegEnumValue(handle, i)
+        except RegError:
+            break
+        name = name.lower()
+        d[convert_mbcs(name)] = convert_mbcs(value)
+        i = i + 1
+    return d
+
+
+def convert_mbcs(s):
+    enc = getattr(s, "encode", None)
+    if enc is not None:
+        try:
+            s = enc("mbcs")
+        except UnicodeError:
+            pass
+    return s
+
+
+class MacroExpander:
+
+    def __init__(self, version):
+        self.macros = {}
+        self.load_macros(version)
+
+    def set_macro(self, macro, path, key):
+        for base in HKEYS:
+            d = read_values(base, path)
+            if d:
+                self.macros["$(%s)" % macro] = d[key]
+                break
+
+    def load_macros(self, version):
+        vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
+        self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
+        self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
+        net = r"Software\Microsoft\.NETFramework"
+        self.set_macro("FrameworkDir", net, "installroot")
+        try:
+            if version > 7.0:
+                self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
+            else:
+                self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
+        except KeyError:
+            raise PackagingPlatformError(
+"""Python was built with Visual Studio 2003; extensions must be built with
+a compiler than can generate compatible binaries. Visual Studio 2003 was
+not found on this system. If you have Cygwin installed, you can try
+compiling with MingW32, by passing "-c mingw32" to setup.py.""")
+# XXX update this comment for setup.cfg
+
+        p = r"Software\Microsoft\NET Framework Setup\Product"
+        for base in HKEYS:
+            try:
+                h = RegOpenKeyEx(base, p)
+            except RegError:
+                continue
+            key = RegEnumKey(h, 0)
+            d = read_values(base, r"%s\%s" % (p, key))
+            self.macros["$(FrameworkVersion)"] = d["version"]
+
+    def sub(self, s):
+        for k, v in self.macros.items():
+            s = s.replace(k, v)
+        return s
+
+
+def get_build_version():
+    """Return the version of MSVC that was used to build Python.
+
+    For Python 2.3 and up, the version number is included in
+    sys.version.  For earlier versions, assume the compiler is MSVC 6.
+    """
+
+    prefix = "MSC v."
+    i = sys.version.find(prefix)
+    if i == -1:
+        return 6
+    i = i + len(prefix)
+    s, rest = sys.version[i:].split(" ", 1)
+    majorVersion = int(s[:-2]) - 6
+    minorVersion = int(s[2:3]) / 10.0
+    # I don't think paths are affected by minor version in version 6
+    if majorVersion == 6:
+        minorVersion = 0
+    if majorVersion >= 6:
+        return majorVersion + minorVersion
+    # else we don't know what version of the compiler this is
+    return None
+
+
+def get_build_architecture():
+    """Return the processor architecture.
+
+    Possible results are "Intel", "Itanium", or "AMD64".
+    """
+
+    prefix = " bit ("
+    i = sys.version.find(prefix)
+    if i == -1:
+        return "Intel"
+    j = sys.version.find(")", i)
+    return sys.version[i+len(prefix):j]
+
+
+def normalize_and_reduce_paths(paths):
+    """Return a list of normalized paths with duplicates removed.
+
+    The current order of paths is maintained.
+    """
+    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
+    reduced_paths = []
+    for p in paths:
+        np = os.path.normpath(p)
+        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+        if np not in reduced_paths:
+            reduced_paths.append(np)
+    return reduced_paths
+
+
+class MSVCCompiler(CCompiler):
+    """Concrete class that implements an interface to Microsoft Visual C++,
+       as defined by the CCompiler abstract class."""
+
+    name = 'msvc'
+    description = "Microsoft Visual C++"
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+    _rc_extensions = ['.rc']
+    _mc_extensions = ['.mc']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = (_c_extensions + _cpp_extensions +
+                      _rc_extensions + _mc_extensions)
+    res_extension = '.res'
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+    def __init__(self, verbose=0, dry_run=False, force=False):
+        CCompiler.__init__(self, verbose, dry_run, force)
+        self.__version = get_build_version()
+        self.__arch = get_build_architecture()
+        if self.__arch == "Intel":
+            # x86
+            if self.__version >= 7:
+                self.__root = r"Software\Microsoft\VisualStudio"
+                self.__macros = MacroExpander(self.__version)
+            else:
+                self.__root = r"Software\Microsoft\Devstudio"
+            self.__product = "Visual Studio version %s" % self.__version
+        else:
+            # Win64. Assume this was built with the platform SDK
+            self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
+
+        self.initialized = False
+
+    def initialize(self):
+        self.__paths = []
+        if ("DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and
+            self.find_exe("cl.exe")):
+            # Assume that the SDK set up everything alright; don't try to be
+            # smarter
+            self.cc = "cl.exe"
+            self.linker = "link.exe"
+            self.lib = "lib.exe"
+            self.rc = "rc.exe"
+            self.mc = "mc.exe"
+        else:
+            self.__paths = self.get_msvc_paths("path")
+
+            if len(self.__paths) == 0:
+                raise PackagingPlatformError("Python was built with %s "
+                    "and extensions need to be built with the same "
+                    "version of the compiler, but it isn't installed." %
+                    self.__product)
+
+            self.cc = self.find_exe("cl.exe")
+            self.linker = self.find_exe("link.exe")
+            self.lib = self.find_exe("lib.exe")
+            self.rc = self.find_exe("rc.exe")   # resource compiler
+            self.mc = self.find_exe("mc.exe")   # message compiler
+            self.set_path_env_var('lib')
+            self.set_path_env_var('include')
+
+        # extend the MSVC path with the current path
+        try:
+            for p in os.environ['path'].split(';'):
+                self.__paths.append(p)
+        except KeyError:
+            pass
+        self.__paths = normalize_and_reduce_paths(self.__paths)
+        os.environ['path'] = ';'.join(self.__paths)
+
+        self.preprocess_options = None
+        if self.__arch == "Intel":
+            self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GX',
+                                    '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
+                                          '/Z7', '/D_DEBUG']
+        else:
+            # Win64
+            self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GS-',
+                                    '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+                                          '/Z7', '/D_DEBUG']
+
+        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+        if self.__version >= 7:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
+                ]
+        else:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
+                ]
+        self.ldflags_static = [ '/nologo']
+
+        self.initialized = True
+
+    # -- Worker methods ------------------------------------------------
+
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
+        # Copied from ccompiler.py, extended to return .res as 'object'-file
+        # for .rc input file
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            base, ext = os.path.splitext(src_name)
+            base = os.path.splitdrive(base)[1]  # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                # Better to raise an exception instead of silently continuing
+                # and later complain about sources and targets having
+                # different lengths
+                raise CompileError("Don't know how to compile %s" % src_name)
+            if strip_dir:
+                base = os.path.basename(base)
+            if ext in self._rc_extensions:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.res_extension))
+            elif ext in self._mc_extensions:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.res_extension))
+            else:
+                obj_names.append(os.path.join(output_dir,
+                                              base + self.obj_extension))
+        return obj_names
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=False,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        if not self.initialized:
+            self.initialize()
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+
+        compile_opts = extra_preargs or []
+        compile_opts.append('/c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            if debug:
+                # pass the full pathname to MSVC in debug mode,
+                # this allows the debugger to find the source file
+                # without asking the user to browse for it
+                src = os.path.abspath(src)
+
+            if ext in self._c_extensions:
+                input_opt = "/Tc" + src
+            elif ext in self._cpp_extensions:
+                input_opt = "/Tp" + src
+            elif ext in self._rc_extensions:
+                # compile .RC to .RES file
+                input_opt = src
+                output_opt = "/fo" + obj
+                try:
+                    self.spawn([self.rc] + pp_opts +
+                               [output_opt] + [input_opt])
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue
+            elif ext in self._mc_extensions:
+
+                # Compile .MC to .RC file to .RES file.
+                #   * '-h dir' specifies the directory for the
+                #     generated include file
+                #   * '-r dir' specifies the target directory of the
+                #     generated RC file and the binary message resource
+                #     it includes
+                #
+                # For now (since there are no options to change this),
+                # we use the source-directory for the include file and
+                # the build directory for the RC file and message
+                # resources. This works at least for win32all.
+
+                h_dir = os.path.dirname(src)
+                rc_dir = os.path.dirname(obj)
+                try:
+                    # first compile .MC to .RC and .H file
+                    self.spawn([self.mc] +
+                               ['-h', h_dir, '-r', rc_dir] + [src])
+                    base, _ = os.path.splitext(os.path.basename(src))
+                    rc_file = os.path.join(rc_dir, base + '.rc')
+                    # then compile .RC to .RES file
+                    self.spawn([self.rc] +
+                                ["/fo" + obj] + [rc_file])
+
+                except PackagingExecError as msg:
+                    raise CompileError(msg)
+                continue
+            else:
+                # how to handle this file?
+                raise CompileError(
+                    "Don't know how to compile %s to %s" %
+                    (src, obj))
+
+            output_opt = "/Fo" + obj
+            try:
+                self.spawn([self.cc] + compile_opts + pp_opts +
+                           [input_opt, output_opt] +
+                           extra_postargs)
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+        return objects
+
+    def create_static_lib(self, objects, output_libname, output_dir=None,
+                          debug=False, target_lang=None):
+        if not self.initialized:
+            self.initialize()
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        output_filename = \
+            self.library_filename(output_libname, output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            lib_args = objects + ['/OUT:' + output_filename]
+            if debug:
+                pass                    # XXX what goes here?
+            try:
+                self.spawn([self.lib] + lib_args)
+            except PackagingExecError as msg:
+                raise LibError(msg)
+
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+
+        if not self.initialized:
+            self.initialize()
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        if runtime_library_dirs:
+            self.warn("don't know what to do with 'runtime_library_dirs': %s"
+                      % (runtime_library_dirs,))
+
+        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+                                   libraries)
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+
+            if target_desc == CCompiler.EXECUTABLE:
+                if debug:
+                    ldflags = self.ldflags_shared_debug[1:]
+                else:
+                    ldflags = self.ldflags_shared[1:]
+            else:
+                if debug:
+                    ldflags = self.ldflags_shared_debug
+                else:
+                    ldflags = self.ldflags_shared
+
+            export_opts = []
+            for sym in (export_symbols or []):
+                export_opts.append("/EXPORT:" + sym)
+
+            ld_args = (ldflags + lib_opts + export_opts +
+                       objects + ['/OUT:' + output_filename])
+
+            # The MSVC linker generates .lib and .exp files, which cannot be
+            # suppressed by any linker switches. The .lib files may even be
+            # needed! Make sure they are generated in the temporary build
+            # directory. Since they have different names for debug and release
+            # builds, they can go into the same directory.
+            if export_symbols is not None:
+                dll_name, dll_ext = os.path.splitext(
+                    os.path.basename(output_filename))
+                implib_file = os.path.join(
+                    os.path.dirname(objects[0]),
+                    self.library_filename(dll_name))
+                ld_args.append('/IMPLIB:' + implib_file)
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                self.spawn([self.linker] + ld_args)
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "/LIBPATH:" + dir
+
+    def runtime_library_dir_option(self, dir):
+        raise PackagingPlatformError("don't know how to set runtime library search path for MSVC++")
+
+    def library_option(self, lib):
+        return self.library_filename(lib)
+
+    def find_library_file(self, dirs, lib, debug=False):
+        # Prefer a debugging library if found (and requested), but deal
+        # with it if we don't have one.
+        if debug:
+            try_names = [lib + "_d", lib]
+        else:
+            try_names = [lib]
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename(name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # Helper methods for using the MSVC registry settings
+
+    def find_exe(self, exe):
+        """Return path to an MSVC executable program.
+
+        Tries to find the program in several places: first, one of the
+        MSVC program search paths from the registry; next, the directories
+        in the PATH environment variable.  If any of those work, return an
+        absolute path that is known to exist.  If none of them work, just
+        return the original program name, 'exe'.
+        """
+
+        for p in self.__paths:
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        # didn't find it; try existing path
+        for p in os.environ['Path'].split(';'):
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        return exe
+
+    def get_msvc_paths(self, path, platform='x86'):
+        """Get a list of devstudio directories (include, lib or path).
+
+        Return a list of strings.  The list will be empty if unable to
+        access the registry or appropriate registry keys not found.
+        """
+
+        if not _can_read_reg:
+            return []
+
+        path = path + " dirs"
+        if self.__version >= 7:
+            key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
+                   % (self.__root, self.__version))
+        else:
+            key = (r"%s\6.0\Build System\Components\Platforms"
+                   r"\Win32 (%s)\Directories" % (self.__root, platform))
+
+        for base in HKEYS:
+            d = read_values(base, key)
+            if d:
+                if self.__version >= 7:
+                    return self.__macros.sub(d[path]).split(";")
+                else:
+                    return d[path].split(";")
+        # MSVC 6 seems to create the registry entries we need only when
+        # the GUI is run.
+        if self.__version == 6:
+            for base in HKEYS:
+                if read_values(base, r"%s\6.0" % self.__root) is not None:
+                    self.warn("It seems you have Visual Studio 6 installed, "
+                        "but the expected registry settings are not present.\n"
+                        "You must at least run the Visual Studio GUI once "
+                        "so that these entries are created.")
+                    break
+        return []
+
+    def set_path_env_var(self, name):
+        """Set environment variable 'name' to an MSVC path type value.
+
+        This is equivalent to a SET command prior to execution of spawned
+        commands.
+        """
+
+        if name == "lib":
+            p = self.get_msvc_paths("library")
+        else:
+            p = self.get_msvc_paths(name)
+        if p:
+            os.environ[name] = ';'.join(p)
+
+
+if get_build_version() >= 8.0:
+    logger.debug("importing new compiler from distutils.msvc9compiler")
+    OldMSVCCompiler = MSVCCompiler
+    from packaging.compiler.msvc9compiler import MSVCCompiler
+    # get_build_architecture not really relevant now we support cross-compile
+    from packaging.compiler.msvc9compiler import MacroExpander
diff --git a/Lib/packaging/compiler/unixccompiler.py b/Lib/packaging/compiler/unixccompiler.py
new file mode 100644
index 0000000..8c24c0f
--- /dev/null
+++ b/Lib/packaging/compiler/unixccompiler.py
@@ -0,0 +1,339 @@
+"""CCompiler implementation for Unix compilers.
+
+This module contains the UnixCCompiler class, a subclass of CCompiler
+that handles the "typical" Unix-style command-line C compiler:
+  * macros defined with -Dname[=value]
+  * macros undefined with -Uname
+  * include search directories specified with -Idir
+  * libraries specified with -lllib
+  * library search directories specified with -Ldir
+  * compile handled by 'cc' (or similar) executable with -c option:
+    compiles .c to .o
+  * link static library handled by 'ar' command (possibly with 'ranlib')
+  * link shared library handled by 'cc -shared'
+"""
+
+import os, sys
+
+from packaging.util import newer
+from packaging.compiler.ccompiler import CCompiler
+from packaging.compiler import gen_preprocess_options, gen_lib_options
+from packaging.errors import (PackagingExecError, CompileError,
+                               LibError, LinkError)
+from packaging import logger
+import sysconfig
+
+
+# XXX Things not currently handled:
+#   * optimization/debug/warning flags; we just use whatever's in Python's
+#     Makefile and live with it.  Is this adequate?  If not, we might
+#     have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
+#     SunCCompiler, and I suspect down that road lies madness.
+#   * even if we don't know a warning flag from an optimization flag,
+#     we need some way for outsiders to feed preprocessor/compiler/linker
+#     flags in to us -- eg. a sysadmin might want to mandate certain flags
+#     via a site config file, or a user might want to set something for
+#     compiling this module distribution only via the setup.py command
+#     line, whatever.  As long as these options come from something on the
+#     current system, they can be as system-dependent as they like, and we
+#     should just happily stuff them into the preprocessor/compiler/linker
+#     options and carry on.
+
+def _darwin_compiler_fixup(compiler_so, cc_args):
+    """
+    This function will strip '-isysroot PATH' and '-arch ARCH' from the
+    compile flags if the user has specified one them in extra_compile_flags.
+
+    This is needed because '-arch ARCH' adds another architecture to the
+    build, without a way to remove an architecture. Furthermore GCC will
+    barf if multiple '-isysroot' arguments are present.
+    """
+    stripArch = stripSysroot = False
+
+    compiler_so = list(compiler_so)
+    kernel_version = os.uname()[2] # 8.4.3
+    major_version = int(kernel_version.split('.')[0])
+
+    if major_version < 8:
+        # OSX before 10.4.0, these don't support -arch and -isysroot at
+        # all.
+        stripArch = stripSysroot = True
+    else:
+        stripArch = '-arch' in cc_args
+        stripSysroot = '-isysroot' in cc_args
+
+    if stripArch or 'ARCHFLAGS' in os.environ:
+        while True:
+            try:
+                index = compiler_so.index('-arch')
+                # Strip this argument and the next one:
+                del compiler_so[index:index+2]
+            except ValueError:
+                break
+
+    if 'ARCHFLAGS' in os.environ and not stripArch:
+        # User specified different -arch flags in the environ,
+        # see also the sysconfig
+        compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
+
+    if stripSysroot:
+        try:
+            index = compiler_so.index('-isysroot')
+            # Strip this argument and the next one:
+            del compiler_so[index:index+2]
+        except ValueError:
+            pass
+
+    # Check if the SDK that is used during compilation actually exists,
+    # the universal build requires the usage of a universal SDK and not all
+    # users have that installed by default.
+    sysroot = None
+    if '-isysroot' in cc_args:
+        idx = cc_args.index('-isysroot')
+        sysroot = cc_args[idx+1]
+    elif '-isysroot' in compiler_so:
+        idx = compiler_so.index('-isysroot')
+        sysroot = compiler_so[idx+1]
+
+    if sysroot and not os.path.isdir(sysroot):
+        logger.warning(
+            "compiling with an SDK that doesn't seem to exist: %r;\n"
+            "please check your Xcode installation", sysroot)
+
+    return compiler_so
+
+class UnixCCompiler(CCompiler):
+
+    name = 'unix'
+    description = 'Standard UNIX-style compiler'
+
+    # These are used by CCompiler in two places: the constructor sets
+    # instance attributes 'preprocessor', 'compiler', etc. from them, and
+    # 'set_executable()' allows any of these to be set.  The defaults here
+    # are pretty generic; they will probably have to be set by an outsider
+    # (eg. using information discovered by the sysconfig about building
+    # Python extensions).
+    executables = {'preprocessor' : None,
+                   'compiler'     : ["cc"],
+                   'compiler_so'  : ["cc"],
+                   'compiler_cxx' : ["cc"],
+                   'linker_so'    : ["cc", "-shared"],
+                   'linker_exe'   : ["cc"],
+                   'archiver'     : ["ar", "-cr"],
+                   'ranlib'       : None,
+                  }
+
+    if sys.platform[:6] == "darwin":
+        executables['ranlib'] = ["ranlib"]
+
+    # Needed for the filename generation methods provided by the base
+    # class, CCompiler.  NB. whoever instantiates/uses a particular
+    # UnixCCompiler instance should set 'shared_lib_ext' -- we set a
+    # reasonable common default here, but it's not necessarily used on all
+    # Unices!
+
+    src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
+    obj_extension = ".o"
+    static_lib_extension = ".a"
+    shared_lib_extension = ".so"
+    dylib_lib_extension = ".dylib"
+    static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
+    if sys.platform == "cygwin":
+        exe_extension = ".exe"
+
+    def preprocess(self, source,
+                   output_file=None, macros=None, include_dirs=None,
+                   extra_preargs=None, extra_postargs=None):
+        ignore, macros, include_dirs = \
+            self._fix_compile_args(None, macros, include_dirs)
+        pp_opts = gen_preprocess_options(macros, include_dirs)
+        pp_args = self.preprocessor + pp_opts
+        if output_file:
+            pp_args.extend(('-o', output_file))
+        if extra_preargs:
+            pp_args[:0] = extra_preargs
+        if extra_postargs:
+            pp_args.extend(extra_postargs)
+        pp_args.append(source)
+
+        # We need to preprocess: either we're being forced to, or we're
+        # generating output to stdout, or there's a target output file and
+        # the source file is newer than the target (or the target doesn't
+        # exist).
+        if self.force or output_file is None or newer(source, output_file):
+            if output_file:
+                self.mkpath(os.path.dirname(output_file))
+            try:
+                self.spawn(pp_args)
+            except PackagingExecError as msg:
+                raise CompileError(msg)
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        compiler_so = self.compiler_so
+        if sys.platform == 'darwin':
+            compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs)
+        try:
+            self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+                       extra_postargs)
+        except PackagingExecError as msg:
+            raise CompileError(msg)
+
+    def create_static_lib(self, objects, output_libname,
+                          output_dir=None, debug=False, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+
+        output_filename = \
+            self.library_filename(output_libname, output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            self.mkpath(os.path.dirname(output_filename))
+            self.spawn(self.archiver +
+                       [output_filename] +
+                       objects + self.objects)
+
+            # Not many Unices required ranlib anymore -- SunOS 4.x is, I
+            # think the only major Unix that does.  Maybe we need some
+            # platform intelligence here to skip ranlib if it's not
+            # needed -- or maybe Python's configure script took care of
+            # it for us, hence the check for leading colon.
+            if self.ranlib:
+                try:
+                    self.spawn(self.ranlib + [output_filename])
+                except PackagingExecError as msg:
+                    raise LibError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    def link(self, target_desc, objects,
+             output_filename, output_dir=None, libraries=None,
+             library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=False, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+                                   libraries)
+        if type(output_dir) not in (str, type(None)):
+            raise TypeError("'output_dir' must be a string or None")
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+            ld_args = (objects + self.objects +
+                       lib_opts + ['-o', output_filename])
+            if debug:
+                ld_args[:0] = ['-g']
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                if target_desc == CCompiler.EXECUTABLE:
+                    linker = self.linker_exe[:]
+                else:
+                    linker = self.linker_so[:]
+                if target_lang == "c++" and self.compiler_cxx:
+                    # skip over environment variable settings if /usr/bin/env
+                    # is used to set up the linker's environment.
+                    # This is needed on OSX. Note: this assumes that the
+                    # normal and C++ compiler have the same environment
+                    # settings.
+                    i = 0
+                    if os.path.basename(linker[0]) == "env":
+                        i = 1
+                        while '=' in linker[i]:
+                            i = i + 1
+
+                    linker[i] = self.compiler_cxx[i]
+
+                if sys.platform == 'darwin':
+                    linker = _darwin_compiler_fixup(linker, ld_args)
+
+                self.spawn(linker + ld_args)
+            except PackagingExecError as msg:
+                raise LinkError(msg)
+        else:
+            logger.debug("skipping %s (up-to-date)", output_filename)
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "-L" + dir
+
+    def _is_gcc(self, compiler_name):
+        return "gcc" in compiler_name or "g++" in compiler_name
+
+    def runtime_library_dir_option(self, dir):
+        # XXX Hackish, at the very least.  See Python bug #445902:
+        # http://sourceforge.net/tracker/index.php
+        #   ?func=detail&aid=445902&group_id=5470&atid=105470
+        # Linkers on different platforms need different options to
+        # specify that directories need to be added to the list of
+        # directories searched for dependencies when a dynamic library
+        # is sought.  GCC on GNU systems (Linux, FreeBSD, ...) has to
+        # be told to pass the -R option through to the linker, whereas
+        # other compilers and gcc on other systems just know this.
+        # Other compilers may need something slightly different.  At
+        # this time, there's no way to determine this information from
+        # the configuration data stored in the Python installation, so
+        # we use this hack.
+
+        compiler = os.path.basename(sysconfig.get_config_var("CC"))
+        if sys.platform[:6] == "darwin":
+            # MacOSX's linker doesn't understand the -R flag at all
+            return "-L" + dir
+        elif sys.platform[:5] == "hp-ux":
+            if self._is_gcc(compiler):
+                return ["-Wl,+s", "-L" + dir]
+            return ["+s", "-L" + dir]
+        elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
+            return ["-rpath", dir]
+        elif self._is_gcc(compiler):
+            # gcc on non-GNU systems does not need -Wl, but can
+            # use it anyway.  Since distutils has always passed in
+            # -Wl whenever gcc was used in the past it is probably
+            # safest to keep doing so.
+            if sysconfig.get_config_var("GNULD") == "yes":
+                # GNU ld needs an extra option to get a RUNPATH
+                # instead of just an RPATH.
+                return "-Wl,--enable-new-dtags,-R" + dir
+            else:
+                return "-Wl,-R" + dir
+        elif sys.platform[:3] == "aix":
+            return "-blibpath:" + dir
+        else:
+            # No idea how --enable-new-dtags would be passed on to
+            # ld if this system was using GNU ld.  Don't know if a
+            # system like this even exists.
+            return "-R" + dir
+
+    def library_option(self, lib):
+        return "-l" + lib
+
+    def find_library_file(self, dirs, lib, debug=False):
+        shared_f = self.library_filename(lib, lib_type='shared')
+        dylib_f = self.library_filename(lib, lib_type='dylib')
+        static_f = self.library_filename(lib, lib_type='static')
+
+        for dir in dirs:
+            shared = os.path.join(dir, shared_f)
+            dylib = os.path.join(dir, dylib_f)
+            static = os.path.join(dir, static_f)
+            # We're second-guessing the linker here, with not much hard
+            # data to go on: GCC seems to prefer the shared library, so I'm
+            # assuming that *all* Unix C compilers do.  And of course I'm
+            # ignoring even GCC's "-static" option.  So sue me.
+            if os.path.exists(dylib):
+                return dylib
+            elif os.path.exists(shared):
+                return shared
+            elif os.path.exists(static):
+                return static
+
+        # Oops, didn't find it in *any* of 'dirs'
+        return None
diff --git a/Lib/packaging/config.py b/Lib/packaging/config.py
new file mode 100644
index 0000000..9239f4a
--- /dev/null
+++ b/Lib/packaging/config.py
@@ -0,0 +1,357 @@
+"""Utilities to find and read config files used by packaging."""
+
+import os
+import sys
+import logging
+
+from shlex import split
+from configparser import RawConfigParser
+from packaging import logger
+from packaging.errors import PackagingOptionError
+from packaging.compiler.extension import Extension
+from packaging.util import check_environ, iglob, resolve_name, strtobool
+from packaging.compiler import set_compiler
+from packaging.command import set_command
+from packaging.markers import interpret
+
+
+def _pop_values(values_dct, key):
+    """Remove values from the dictionary and convert them as a list"""
+    vals_str = values_dct.pop(key, '')
+    if not vals_str:
+        return
+    fields = []
+    for field in vals_str.split(os.linesep):
+        tmp_vals = field.split('--')
+        if len(tmp_vals) == 2 and not interpret(tmp_vals[1]):
+            continue
+        fields.append(tmp_vals[0])
+    # Get bash options like `gcc -print-file-name=libgcc.a` XXX bash options?
+    vals = split(' '.join(fields))
+    if vals:
+        return vals
+
+
+def _rel_path(base, path):
+    assert path.startswith(base)
+    return path[len(base):].lstrip('/')
+
+
+def get_resources_dests(resources_root, rules):
+    """Find destinations for resources files"""
+    destinations = {}
+    for base, suffix, dest in rules:
+        prefix = os.path.join(resources_root, base)
+        for abs_base in iglob(prefix):
+            abs_glob = os.path.join(abs_base, suffix)
+            for abs_path in iglob(abs_glob):
+                resource_file = _rel_path(resources_root, abs_path)
+                if dest is None:  # remove the entry if it was here
+                    destinations.pop(resource_file, None)
+                else:
+                    rel_path = _rel_path(abs_base, abs_path)
+                    destinations[resource_file] = os.path.join(dest, rel_path)
+    return destinations
+
+
+class Config:
+    """Reads configuration files and work with the Distribution instance
+    """
+    def __init__(self, dist):
+        self.dist = dist
+        self.setup_hook = None
+
+    def run_hook(self, config):
+        if self.setup_hook is None:
+            return
+        # the hook gets only the config
+        self.setup_hook(config)
+
+    def find_config_files(self):
+        """Find as many configuration files as should be processed for this
+        platform, and return a list of filenames in the order in which they
+        should be parsed.  The filenames returned are guaranteed to exist
+        (modulo nasty race conditions).
+
+        There are three possible config files: packaging.cfg in the
+        Packaging installation directory (ie. where the top-level
+        Packaging __inst__.py file lives), a file in the user's home
+        directory named .pydistutils.cfg on Unix and pydistutils.cfg
+        on Windows/Mac; and setup.cfg in the current directory.
+
+        The file in the user's home directory can be disabled with the
+        --no-user-cfg option.
+        """
+        files = []
+        check_environ()
+
+        # Where to look for the system-wide Packaging config file
+        sys_dir = os.path.dirname(sys.modules['packaging'].__file__)
+
+        # Look for the system config file
+        sys_file = os.path.join(sys_dir, "packaging.cfg")
+        if os.path.isfile(sys_file):
+            files.append(sys_file)
+
+        # What to call the per-user config file
+        if os.name == 'posix':
+            user_filename = ".pydistutils.cfg"
+        else:
+            user_filename = "pydistutils.cfg"
+
+        # And look for the user config file
+        if self.dist.want_user_cfg:
+            user_file = os.path.join(os.path.expanduser('~'), user_filename)
+            if os.path.isfile(user_file):
+                files.append(user_file)
+
+        # All platforms support local setup.cfg
+        local_file = "setup.cfg"
+        if os.path.isfile(local_file):
+            files.append(local_file)
+
+        if logger.isEnabledFor(logging.DEBUG):
+            logger.debug("using config files: %s", ', '.join(files))
+        return files
+
+    def _convert_metadata(self, name, value):
+        # converts a value found in setup.cfg into a valid metadata
+        # XXX
+        return value
+
+    def _multiline(self, value):
+        value = [v for v in
+                 [v.strip() for v in value.split('\n')]
+                 if v != '']
+        return value
+
+    def _read_setup_cfg(self, parser, cfg_filename):
+        cfg_directory = os.path.dirname(os.path.abspath(cfg_filename))
+        content = {}
+        for section in parser.sections():
+            content[section] = dict(parser.items(section))
+
+        # global:setup_hook is called *first*
+        if 'global' in content:
+            if 'setup_hook' in content['global']:
+                setup_hook = content['global']['setup_hook']
+                try:
+                    self.setup_hook = resolve_name(setup_hook)
+                except ImportError as e:
+                    logger.warning('could not import setup_hook: %s',
+                            e.args[0])
+                else:
+                    self.run_hook(content)
+
+        metadata = self.dist.metadata
+
+        # setting the metadata values
+        if 'metadata' in content:
+            for key, value in content['metadata'].items():
+                key = key.replace('_', '-')
+                if metadata.is_multi_field(key):
+                    value = self._multiline(value)
+
+                if key == 'project-url':
+                    value = [(label.strip(), url.strip())
+                             for label, url in
+                             [v.split(',') for v in value]]
+
+                if key == 'description-file':
+                    if 'description' in content['metadata']:
+                        msg = ("description and description-file' are "
+                               "mutually exclusive")
+                        raise PackagingOptionError(msg)
+
+                    if isinstance(value, list):
+                        filenames = value
+                    else:
+                        filenames = value.split()
+
+                    # concatenate each files
+                    value = ''
+                    for filename in filenames:
+                        # will raise if file not found
+                        with open(filename) as description_file:
+                            value += description_file.read().strip() + '\n'
+                        # add filename as a required file
+                        if filename not in metadata.requires_files:
+                            metadata.requires_files.append(filename)
+                    value = value.strip()
+                    key = 'description'
+
+                if metadata.is_metadata_field(key):
+                    metadata[key] = self._convert_metadata(key, value)
+
+        if 'files' in content:
+            files = content['files']
+            self.dist.package_dir = files.pop('packages_root', None)
+
+            files = dict((key, self._multiline(value)) for key, value in
+                         files.items())
+
+            self.dist.packages = []
+
+            packages = files.get('packages', [])
+            if isinstance(packages, str):
+                packages = [packages]
+
+            for package in packages:
+                if ':' in package:
+                    dir_, package = package.split(':')
+                    self.dist.package_dir[package] = dir_
+                self.dist.packages.append(package)
+
+            self.dist.py_modules = files.get('modules', [])
+            if isinstance(self.dist.py_modules, str):
+                self.dist.py_modules = [self.dist.py_modules]
+            self.dist.scripts = files.get('scripts', [])
+            if isinstance(self.dist.scripts, str):
+                self.dist.scripts = [self.dist.scripts]
+
+            self.dist.package_data = {}
+            for data in files.get('package_data', []):
+                data = data.split('=')
+                if len(data) != 2:
+                    continue  # XXX error should never pass silently
+                key, value = data
+                self.dist.package_data[key.strip()] = value.strip()
+
+            self.dist.data_files = []
+            for data in files.get('data_files', []):
+                data = data.split('=')
+                if len(data) != 2:
+                    continue
+                key, value = data
+                values = [v.strip() for v in value.split(',')]
+                self.dist.data_files.append((key, values))
+
+            # manifest template
+            self.dist.extra_files = files.get('extra_files', [])
+
+            resources = []
+            for rule in files.get('resources', []):
+                glob, destination = rule.split('=', 1)
+                rich_glob = glob.strip().split(' ', 1)
+                if len(rich_glob) == 2:
+                    prefix, suffix = rich_glob
+                else:
+                    assert len(rich_glob) == 1
+                    prefix = ''
+                    suffix = glob
+                if destination == '<exclude>':
+                    destination = None
+                resources.append(
+                    (prefix.strip(), suffix.strip(), destination.strip()))
+                self.dist.data_files = get_resources_dests(
+                    cfg_directory, resources)
+
+        ext_modules = self.dist.ext_modules
+        for section_key in content:
+            labels = section_key.split('=')
+            if len(labels) == 2 and labels[0] == 'extension':
+                # labels[1] not used from now but should be implemented
+                # for extension build dependency
+                values_dct = content[section_key]
+                ext_modules.append(Extension(
+                    values_dct.pop('name'),
+                    _pop_values(values_dct, 'sources'),
+                    _pop_values(values_dct, 'include_dirs'),
+                    _pop_values(values_dct, 'define_macros'),
+                    _pop_values(values_dct, 'undef_macros'),
+                    _pop_values(values_dct, 'library_dirs'),
+                    _pop_values(values_dct, 'libraries'),
+                    _pop_values(values_dct, 'runtime_library_dirs'),
+                    _pop_values(values_dct, 'extra_objects'),
+                    _pop_values(values_dct, 'extra_compile_args'),
+                    _pop_values(values_dct, 'extra_link_args'),
+                    _pop_values(values_dct, 'export_symbols'),
+                    _pop_values(values_dct, 'swig_opts'),
+                    _pop_values(values_dct, 'depends'),
+                    values_dct.pop('language', None),
+                    values_dct.pop('optional', None),
+                    **values_dct))
+
+    def parse_config_files(self, filenames=None):
+        if filenames is None:
+            filenames = self.find_config_files()
+
+        logger.debug("Distribution.parse_config_files():")
+
+        parser = RawConfigParser()
+
+        for filename in filenames:
+            logger.debug("  reading %s", filename)
+            parser.read(filename)
+
+            if os.path.split(filename)[-1] == 'setup.cfg':
+                self._read_setup_cfg(parser, filename)
+
+            for section in parser.sections():
+                if section == 'global':
+                    if parser.has_option('global', 'compilers'):
+                        self._load_compilers(parser.get('global', 'compilers'))
+
+                    if parser.has_option('global', 'commands'):
+                        self._load_commands(parser.get('global', 'commands'))
+
+                options = parser.options(section)
+                opt_dict = self.dist.get_option_dict(section)
+
+                for opt in options:
+                    if opt == '__name__':
+                        continue
+                    val = parser.get(section, opt)
+                    opt = opt.replace('-', '_')
+
+                    if opt == 'sub_commands':
+                        val = self._multiline(val)
+                        if isinstance(val, str):
+                            val = [val]
+
+                    # Hooks use a suffix system to prevent being overriden
+                    # by a config file processed later (i.e. a hook set in
+                    # the user config file cannot be replaced by a hook
+                    # set in a project config file, unless they have the
+                    # same suffix).
+                    if (opt.startswith("pre_hook.") or
+                        opt.startswith("post_hook.")):
+                        hook_type, alias = opt.split(".")
+                        hook_dict = opt_dict.setdefault(
+                            hook_type, (filename, {}))[1]
+                        hook_dict[alias] = val
+                    else:
+                        opt_dict[opt] = filename, val
+
+            # Make the RawConfigParser forget everything (so we retain
+            # the original filenames that options come from)
+            parser.__init__()
+
+        # If there was a "global" section in the config file, use it
+        # to set Distribution options.
+        if 'global' in self.dist.command_options:
+            for opt, (src, val) in self.dist.command_options['global'].items():
+                alias = self.dist.negative_opt.get(opt)
+                try:
+                    if alias:
+                        setattr(self.dist, alias, not strtobool(val))
+                    elif opt == 'dry_run':  # FIXME ugh!
+                        setattr(self.dist, opt, strtobool(val))
+                    else:
+                        setattr(self.dist, opt, val)
+                except ValueError as msg:
+                    raise PackagingOptionError(msg)
+
+    def _load_compilers(self, compilers):
+        compilers = self._multiline(compilers)
+        if isinstance(compilers, str):
+            compilers = [compilers]
+        for compiler in compilers:
+            set_compiler(compiler.strip())
+
+    def _load_commands(self, commands):
+        commands = self._multiline(commands)
+        if isinstance(commands, str):
+            commands = [commands]
+        for command in commands:
+            set_command(command.strip())
diff --git a/Lib/packaging/create.py b/Lib/packaging/create.py
new file mode 100644
index 0000000..837d0b6
--- /dev/null
+++ b/Lib/packaging/create.py
@@ -0,0 +1,693 @@
+#!/usr/bin/env python
+"""Interactive helper used to create a setup.cfg file.
+
+This script will generate a packaging configuration file by looking at
+the current directory and asking the user questions.  It is intended to
+be called as
+
+  pysetup create
+
+or
+
+  python3.3 -m packaging.create
+"""
+
+#  Original code by Sean Reifschneider <jafo@tummy.com>
+
+#  Original TODO list:
+#  Look for a license file and automatically add the category.
+#  When a .c file is found during the walk, can we add it as an extension?
+#  Ask if there is a maintainer different that the author
+#  Ask for the platform (can we detect this via "import win32" or something?)
+#  Ask for the dependencies.
+#  Ask for the Requires-Dist
+#  Ask for the Provides-Dist
+#  Ask for a description
+#  Detect scripts (not sure how.  #! outside of package?)
+
+import os
+import imp
+import sys
+import glob
+import re
+import shutil
+import sysconfig
+from configparser import RawConfigParser
+from textwrap import dedent
+from hashlib import md5
+from functools import cmp_to_key
+# importing this with an underscore as it should be replaced by the
+# dict form or another structures for all purposes
+from packaging._trove import all_classifiers as _CLASSIFIERS_LIST
+from packaging.version import is_valid_version
+
+_FILENAME = 'setup.cfg'
+_DEFAULT_CFG = '.pypkgcreate'
+
+_helptext = {
+    'name': '''
+The name of the program to be packaged, usually a single word composed
+of lower-case characters such as "python", "sqlalchemy", or "CherryPy".
+''',
+    'version': '''
+Version number of the software, typically 2 or 3 numbers separated by dots
+such as "1.00", "0.6", or "3.02.01".  "0.1.0" is recommended for initial
+development.
+''',
+    'summary': '''
+A one-line summary of what this project is or does, typically a sentence 80
+characters or less in length.
+''',
+    'author': '''
+The full name of the author (typically you).
+''',
+    'author_email': '''
+E-mail address of the project author (typically you).
+''',
+    'do_classifier': '''
+Trove classifiers are optional identifiers that allow you to specify the
+intended audience by saying things like "Beta software with a text UI
+for Linux under the PSF license.  However, this can be a somewhat involved
+process.
+''',
+    'packages': '''
+You can provide a package name contained in your project.
+''',
+    'modules': '''
+You can provide a python module contained in your project.
+''',
+    'extra_files': '''
+You can provide extra files/dirs contained in your project.
+It has to follow the template syntax. XXX add help here.
+''',
+
+    'home_page': '''
+The home page for the project, typically starting with "http://".
+''',
+    'trove_license': '''
+Optionally you can specify a license.  Type a string that identifies a common
+license, and then you can select a list of license specifiers.
+''',
+    'trove_generic': '''
+Optionally, you can set other trove identifiers for things such as the
+human language, programming language, user interface, etc...
+''',
+    'setup.py found': '''
+The setup.py script will be executed to retrieve the metadata.
+A wizard will be run if you answer "n",
+''',
+}
+
+PROJECT_MATURITY = ['Development Status :: 1 - Planning',
+                    'Development Status :: 2 - Pre-Alpha',
+                    'Development Status :: 3 - Alpha',
+                    'Development Status :: 4 - Beta',
+                    'Development Status :: 5 - Production/Stable',
+                    'Development Status :: 6 - Mature',
+                    'Development Status :: 7 - Inactive']
+
+# XXX everything needs docstrings and tests (both low-level tests of various
+# methods and functional tests of running the script)
+
+
+def load_setup():
+    """run the setup script (i.e the setup.py file)
+
+    This function load the setup file in all cases (even if it have already
+    been loaded before, because we are monkey patching its setup function with
+    a particular one"""
+    with open("setup.py") as f:
+        imp.load_module("setup", f, "setup.py", (".py", "r", imp.PY_SOURCE))
+
+
+def ask_yn(question, default=None, helptext=None):
+    question += ' (y/n)'
+    while True:
+        answer = ask(question, default, helptext, required=True)
+        if answer and answer[0].lower() in 'yn':
+            return answer[0].lower()
+
+        print('\nERROR: You must select "Y" or "N".\n')
+
+
+def ask(question, default=None, helptext=None, required=True,
+        lengthy=False, multiline=False):
+    prompt = '%s: ' % (question,)
+    if default:
+        prompt = '%s [%s]: ' % (question, default)
+        if default and len(question) + len(default) > 70:
+            prompt = '%s\n    [%s]: ' % (question, default)
+    if lengthy or multiline:
+        prompt += '\n   > '
+
+    if not helptext:
+        helptext = 'No additional help available.'
+
+    helptext = helptext.strip("\n")
+
+    while True:
+        sys.stdout.write(prompt)
+        sys.stdout.flush()
+
+        line = sys.stdin.readline().strip()
+        if line == '?':
+            print('=' * 70)
+            print(helptext)
+            print('=' * 70)
+            continue
+        if default and not line:
+            return default
+        if not line and required:
+            print('*' * 70)
+            print('This value cannot be empty.')
+            print('===========================')
+            if helptext:
+                print(helptext)
+            print('*' * 70)
+            continue
+        return line
+
+
+def convert_yn_to_bool(yn, yes=True, no=False):
+    """Convert a y/yes or n/no to a boolean value."""
+    if yn.lower().startswith('y'):
+        return yes
+    else:
+        return no
+
+
+def _build_classifiers_dict(classifiers):
+    d = {}
+    for key in classifiers:
+        subDict = d
+        for subkey in key.split(' :: '):
+            if not subkey in subDict:
+                subDict[subkey] = {}
+            subDict = subDict[subkey]
+    return d
+
+CLASSIFIERS = _build_classifiers_dict(_CLASSIFIERS_LIST)
+
+
+def _build_licences(classifiers):
+    res = []
+    for index, item in enumerate(classifiers):
+        if not item.startswith('License :: '):
+            continue
+        res.append((index, item.split(' :: ')[-1].lower()))
+    return res
+
+LICENCES = _build_licences(_CLASSIFIERS_LIST)
+
+
+class MainProgram:
+    """Make a project setup configuration file (setup.cfg)."""
+
+    def __init__(self):
+        self.configparser = None
+        self.classifiers = set()
+        self.data = {'name': '',
+                     'version': '1.0.0',
+                     'classifier': self.classifiers,
+                     'packages': [],
+                     'modules': [],
+                     'platform': [],
+                     'resources': [],
+                     'extra_files': [],
+                     'scripts': [],
+                     }
+        self._load_defaults()
+
+    def __call__(self):
+        setupcfg_defined = False
+        if self.has_setup_py() and self._prompt_user_for_conversion():
+            setupcfg_defined = self.convert_py_to_cfg()
+        if not setupcfg_defined:
+            self.define_cfg_values()
+        self._write_cfg()
+
+    def has_setup_py(self):
+        """Test for the existance of a setup.py file."""
+        return os.path.exists('setup.py')
+
+    def define_cfg_values(self):
+        self.inspect()
+        self.query_user()
+
+    def _lookup_option(self, key):
+        if not self.configparser.has_option('DEFAULT', key):
+            return None
+        return self.configparser.get('DEFAULT', key)
+
+    def _load_defaults(self):
+        # Load default values from a user configuration file
+        self.configparser = RawConfigParser()
+        # TODO replace with section in distutils config file
+        default_cfg = os.path.expanduser(os.path.join('~', _DEFAULT_CFG))
+        self.configparser.read(default_cfg)
+        self.data['author'] = self._lookup_option('author')
+        self.data['author_email'] = self._lookup_option('author_email')
+
+    def _prompt_user_for_conversion(self):
+        # Prompt the user about whether they would like to use the setup.py
+        # conversion utility to generate a setup.cfg or generate the setup.cfg
+        # from scratch
+        answer = ask_yn(('A legacy setup.py has been found.\n'
+                         'Would you like to convert it to a setup.cfg?'),
+                        default="y",
+                        helptext=_helptext['setup.py found'])
+        return convert_yn_to_bool(answer)
+
+    def _dotted_packages(self, data):
+        packages = sorted(data)
+        modified_pkgs = []
+        for pkg in packages:
+            pkg = pkg.lstrip('./')
+            pkg = pkg.replace('/', '.')
+            modified_pkgs.append(pkg)
+        return modified_pkgs
+
+    def _write_cfg(self):
+        if os.path.exists(_FILENAME):
+            if os.path.exists('%s.old' % _FILENAME):
+                print("ERROR: %(name)s.old backup exists, please check that "
+                      "current %(name)s is correct and remove %(name)s.old" %
+                      {'name': _FILENAME})
+                return
+            shutil.move(_FILENAME, '%s.old' % _FILENAME)
+
+        with open(_FILENAME, 'w') as fp:
+            fp.write('[metadata]\n')
+            # simple string entries
+            for name in ('name', 'version', 'summary', 'download_url'):
+                fp.write('%s = %s\n' % (name, self.data.get(name, 'UNKNOWN')))
+            # optional string entries
+            if 'keywords' in self.data and self.data['keywords']:
+                fp.write('keywords = %s\n' % ' '.join(self.data['keywords']))
+            for name in ('home_page', 'author', 'author_email',
+                         'maintainer', 'maintainer_email', 'description-file'):
+                if name in self.data and self.data[name]:
+                    fp.write('%s = %s\n' % (name, self.data[name]))
+            if 'description' in self.data:
+                fp.write(
+                    'description = %s\n'
+                    % '\n       |'.join(self.data['description'].split('\n')))
+            # multiple use string entries
+            for name in ('platform', 'supported-platform', 'classifier',
+                         'requires-dist', 'provides-dist', 'obsoletes-dist',
+                         'requires-external'):
+                if not(name in self.data and self.data[name]):
+                    continue
+                fp.write('%s = ' % name)
+                fp.write(''.join('    %s\n' % val
+                                 for val in self.data[name]).lstrip())
+            fp.write('\n[files]\n')
+            for name in ('packages', 'modules', 'scripts',
+                         'package_data', 'extra_files'):
+                if not(name in self.data and self.data[name]):
+                    continue
+                fp.write('%s = %s\n'
+                         % (name, '\n    '.join(self.data[name]).strip()))
+            fp.write('\nresources =\n')
+            for src, dest in self.data['resources']:
+                fp.write('    %s = %s\n' % (src, dest))
+            fp.write('\n')
+
+        os.chmod(_FILENAME, 0o644)
+        print('Wrote "%s".' % _FILENAME)
+
+    def convert_py_to_cfg(self):
+        """Generate a setup.cfg from an existing setup.py.
+
+        It only exports the distutils metadata (setuptools specific metadata
+        is not currently supported).
+        """
+        data = self.data
+
+        def setup_mock(**attrs):
+            """Mock the setup(**attrs) in order to retrieve metadata."""
+            # use the distutils v1 processings to correctly parse metadata.
+            #XXX we could also use the setuptools distibution ???
+            from distutils.dist import Distribution
+            dist = Distribution(attrs)
+            dist.parse_config_files()
+
+            # 1. retrieve metadata fields that are quite similar in
+            # PEP 314 and PEP 345
+            labels = (('name',) * 2,
+                      ('version',) * 2,
+                      ('author',) * 2,
+                      ('author_email',) * 2,
+                      ('maintainer',) * 2,
+                      ('maintainer_email',) * 2,
+                      ('description', 'summary'),
+                      ('long_description', 'description'),
+                      ('url', 'home_page'),
+                      ('platforms', 'platform'),
+                      # backport only for 2.5+
+                      ('provides', 'provides-dist'),
+                      ('obsoletes', 'obsoletes-dist'),
+                      ('requires', 'requires-dist'))
+
+            get = lambda lab: getattr(dist.metadata, lab.replace('-', '_'))
+            data.update((new, get(old)) for old, new in labels if get(old))
+
+            # 2. retrieve data that requires special processing
+            data['classifier'].update(dist.get_classifiers() or [])
+            data['scripts'].extend(dist.scripts or [])
+            data['packages'].extend(dist.packages or [])
+            data['modules'].extend(dist.py_modules or [])
+            # 2.1 data_files -> resources
+            if dist.data_files:
+                if len(dist.data_files) < 2 or \
+                   isinstance(dist.data_files[1], str):
+                    dist.data_files = [('', dist.data_files)]
+                # add tokens in the destination paths
+                vars = {'distribution.name': data['name']}
+                path_tokens = list(sysconfig.get_paths(vars=vars).items())
+
+                def length_comparison(x, y):
+                    len_x = len(x[1])
+                    len_y = len(y[1])
+                    if len_x == len_y:
+                        return 0
+                    elif len_x < len_y:
+                        return -1
+                    else:
+                        return 1
+
+                # sort tokens to use the longest one first
+                path_tokens.sort(key=cmp_to_key(length_comparison))
+                for dest, srcs in (dist.data_files or []):
+                    dest = os.path.join(sys.prefix, dest)
+                    for tok, path in path_tokens:
+                        if dest.startswith(path):
+                            dest = ('{%s}' % tok) + dest[len(path):]
+                            files = [('/ '.join(src.rsplit('/', 1)), dest)
+                                     for src in srcs]
+                            data['resources'].extend(files)
+                            continue
+            # 2.2 package_data -> extra_files
+            package_dirs = dist.package_dir or {}
+            for package, extras in iter(dist.package_data.items()) or []:
+                package_dir = package_dirs.get(package, package)
+                files = [os.path.join(package_dir, f) for f in extras]
+                data['extra_files'].extend(files)
+
+            # Use README file if its content is the desciption
+            if "description" in data:
+                ref = md5(re.sub('\s', '',
+                                 self.data['description']).lower().encode())
+                ref = ref.digest()
+                for readme in glob.glob('README*'):
+                    with open(readme) as fp:
+                        contents = fp.read()
+                    val = md5(re.sub('\s', '',
+                                     contents.lower()).encode()).digest()
+                    if val == ref:
+                        del data['description']
+                        data['description-file'] = readme
+                        break
+
+        # apply monkey patch to distutils (v1) and setuptools (if needed)
+        # (abort the feature if distutils v1 has been killed)
+        try:
+            from distutils import core
+            core.setup  # make sure it's not d2 maskerading as d1
+        except (ImportError, AttributeError):
+            return
+        saved_setups = [(core, core.setup)]
+        core.setup = setup_mock
+        try:
+            import setuptools
+        except ImportError:
+            pass
+        else:
+            saved_setups.append((setuptools, setuptools.setup))
+            setuptools.setup = setup_mock
+        # get metadata by executing the setup.py with the patched setup(...)
+        success = False  # for python < 2.4
+        try:
+            load_setup()
+            success = True
+        finally:  # revert monkey patches
+            for patched_module, original_setup in saved_setups:
+                patched_module.setup = original_setup
+        if not self.data:
+            raise ValueError('Unable to load metadata from setup.py')
+        return success
+
+    def inspect_file(self, path):
+        with open(path, 'r') as fp:
+            for _ in range(10):
+                line = fp.readline()
+                m = re.match(r'^#!.*python((?P<major>\d)(\.\d+)?)?$', line)
+                if m:
+                    if m.group('major') == '3':
+                        self.classifiers.add(
+                            'Programming Language :: Python :: 3')
+                    else:
+                        self.classifiers.add(
+                        'Programming Language :: Python :: 2')
+
+    def inspect(self):
+        """Inspect the current working diretory for a name and version.
+
+        This information is harvested in where the directory is named
+        like [name]-[version].
+        """
+        dir_name = os.path.basename(os.getcwd())
+        self.data['name'] = dir_name
+        match = re.match(r'(.*)-(\d.+)', dir_name)
+        if match:
+            self.data['name'] = match.group(1)
+            self.data['version'] = match.group(2)
+            # TODO Needs tested!
+            if not is_valid_version(self.data['version']):
+                msg = "Invalid version discovered: %s" % self.data['version']
+                raise RuntimeError(msg)
+
+    def query_user(self):
+        self.data['name'] = ask('Project name', self.data['name'],
+              _helptext['name'])
+
+        self.data['version'] = ask('Current version number',
+              self.data.get('version'), _helptext['version'])
+        self.data['summary'] = ask('Package summary',
+              self.data.get('summary'), _helptext['summary'],
+              lengthy=True)
+        self.data['author'] = ask('Author name',
+              self.data.get('author'), _helptext['author'])
+        self.data['author_email'] = ask('Author e-mail address',
+              self.data.get('author_email'), _helptext['author_email'])
+        self.data['home_page'] = ask('Project Home Page',
+              self.data.get('home_page'), _helptext['home_page'],
+              required=False)
+
+        if ask_yn('Do you want me to automatically build the file list '
+              'with everything I can find in the current directory ? '
+              'If you say no, you will have to define them manually.') == 'y':
+            self._find_files()
+        else:
+            while ask_yn('Do you want to add a single module ?'
+                        ' (you will be able to add full packages next)',
+                    helptext=_helptext['modules']) == 'y':
+                self._set_multi('Module name', 'modules')
+
+            while ask_yn('Do you want to add a package ?',
+                    helptext=_helptext['packages']) == 'y':
+                self._set_multi('Package name', 'packages')
+
+            while ask_yn('Do you want to add an extra file ?',
+                        helptext=_helptext['extra_files']) == 'y':
+                self._set_multi('Extra file/dir name', 'extra_files')
+
+        if ask_yn('Do you want to set Trove classifiers?',
+                  helptext=_helptext['do_classifier']) == 'y':
+            self.set_classifier()
+
+    def _find_files(self):
+        # we are looking for python modules and packages,
+        # other stuff are added as regular files
+        pkgs = self.data['packages']
+        modules = self.data['modules']
+        extra_files = self.data['extra_files']
+
+        def is_package(path):
+            return os.path.exists(os.path.join(path, '__init__.py'))
+
+        curdir = os.getcwd()
+        scanned = []
+        _pref = ['lib', 'include', 'dist', 'build', '.', '~']
+        _suf = ['.pyc']
+
+        def to_skip(path):
+            path = relative(path)
+
+            for pref in _pref:
+                if path.startswith(pref):
+                    return True
+
+            for suf in _suf:
+                if path.endswith(suf):
+                    return True
+
+            return False
+
+        def relative(path):
+            return path[len(curdir) + 1:]
+
+        def dotted(path):
+            res = relative(path).replace(os.path.sep, '.')
+            if res.endswith('.py'):
+                res = res[:-len('.py')]
+            return res
+
+        # first pass: packages
+        for root, dirs, files in os.walk(curdir):
+            if to_skip(root):
+                continue
+            for dir_ in sorted(dirs):
+                if to_skip(dir_):
+                    continue
+                fullpath = os.path.join(root, dir_)
+                dotted_name = dotted(fullpath)
+                if is_package(fullpath) and dotted_name not in pkgs:
+                    pkgs.append(dotted_name)
+                    scanned.append(fullpath)
+
+        # modules and extra files
+        for root, dirs, files in os.walk(curdir):
+            if to_skip(root):
+                continue
+
+            if any(root.startswith(path) for path in scanned):
+                continue
+
+            for file in sorted(files):
+                fullpath = os.path.join(root, file)
+                if to_skip(fullpath):
+                    continue
+                # single module?
+                if os.path.splitext(file)[-1] == '.py':
+                    modules.append(dotted(fullpath))
+                else:
+                    extra_files.append(relative(fullpath))
+
+    def _set_multi(self, question, name):
+        existing_values = self.data[name]
+        value = ask(question, helptext=_helptext[name]).strip()
+        if value not in existing_values:
+            existing_values.append(value)
+
+    def set_classifier(self):
+        self.set_maturity_status(self.classifiers)
+        self.set_license(self.classifiers)
+        self.set_other_classifier(self.classifiers)
+
+    def set_other_classifier(self, classifiers):
+        if ask_yn('Do you want to set other trove identifiers', 'n',
+                  _helptext['trove_generic']) != 'y':
+            return
+        self.walk_classifiers(classifiers, [CLASSIFIERS], '')
+
+    def walk_classifiers(self, classifiers, trovepath, desc):
+        trove = trovepath[-1]
+
+        if not trove:
+            return
+
+        for key in sorted(trove):
+            if len(trove[key]) == 0:
+                if ask_yn('Add "%s"' % desc[4:] + ' :: ' + key, 'n') == 'y':
+                    classifiers.add(desc[4:] + ' :: ' + key)
+                continue
+
+            if ask_yn('Do you want to set items under\n   "%s" (%d sub-items)'
+                      % (key, len(trove[key])), 'n',
+                      _helptext['trove_generic']) == 'y':
+                self.walk_classifiers(classifiers, trovepath + [trove[key]],
+                                      desc + ' :: ' + key)
+
+    def set_license(self, classifiers):
+        while True:
+            license = ask('What license do you use',
+                          helptext=_helptext['trove_license'], required=False)
+            if not license:
+                return
+
+            license_words = license.lower().split(' ')
+            found_list = []
+
+            for index, licence in LICENCES:
+                for word in license_words:
+                    if word in licence:
+                        found_list.append(index)
+                        break
+
+            if len(found_list) == 0:
+                print('ERROR: Could not find a matching license for "%s"' %
+                      license)
+                continue
+
+            question = 'Matching licenses:\n\n'
+
+            for index, list_index in enumerate(found_list):
+                question += '   %s) %s\n' % (index + 1,
+                                             _CLASSIFIERS_LIST[list_index])
+
+            question += ('\nType the number of the license you wish to use or '
+                         '? to try again:')
+            choice = ask(question, required=False)
+
+            if choice == '?':
+                continue
+            if choice == '':
+                return
+
+            try:
+                index = found_list[int(choice) - 1]
+            except ValueError:
+                print("ERROR: Invalid selection, type a number from the list "
+                      "above.")
+
+            classifiers.add(_CLASSIFIERS_LIST[index])
+
+    def set_maturity_status(self, classifiers):
+        maturity_name = lambda mat: mat.split('- ')[-1]
+        maturity_question = '''\
+            Please select the project status:
+
+            %s
+
+            Status''' % '\n'.join('%s - %s' % (i, maturity_name(n))
+                                  for i, n in enumerate(PROJECT_MATURITY))
+        while True:
+            choice = ask(dedent(maturity_question), required=False)
+
+            if choice:
+                try:
+                    choice = int(choice) - 1
+                    key = PROJECT_MATURITY[choice]
+                    classifiers.add(key)
+                    return
+                except (IndexError, ValueError):
+                    print("ERROR: Invalid selection, type a single digit "
+                          "number.")
+
+
+def main():
+    """Main entry point."""
+    program = MainProgram()
+    # # uncomment when implemented
+    # if not program.load_existing_setup_script():
+    #     program.inspect_directory()
+    #     program.query_user()
+    #     program.update_config_file()
+    # program.write_setup_script()
+    # packaging.util.cfg_to_args()
+    program()
+
+
+if __name__ == '__main__':
+    main()
diff --git a/Lib/packaging/database.py b/Lib/packaging/database.py
new file mode 100644
index 0000000..087a6ec
--- /dev/null
+++ b/Lib/packaging/database.py
@@ -0,0 +1,627 @@
+"""PEP 376 implementation."""
+
+import io
+import os
+import re
+import csv
+import sys
+import zipimport
+from hashlib import md5
+from packaging import logger
+from packaging.errors import PackagingError
+from packaging.version import suggest_normalized_version, VersionPredicate
+from packaging.metadata import Metadata
+
+
+__all__ = [
+    'Distribution', 'EggInfoDistribution', 'distinfo_dirname',
+    'get_distributions', 'get_distribution', 'get_file_users',
+    'provides_distribution', 'obsoletes_distribution',
+    'enable_cache', 'disable_cache', 'clear_cache',
+]
+
+
+# TODO update docs
+
+DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES')
+
+# Cache
+_cache_name = {}  # maps names to Distribution instances
+_cache_name_egg = {}  # maps names to EggInfoDistribution instances
+_cache_path = {}  # maps paths to Distribution instances
+_cache_path_egg = {}  # maps paths to EggInfoDistribution instances
+_cache_generated = False  # indicates if .dist-info distributions are cached
+_cache_generated_egg = False  # indicates if .dist-info and .egg are cached
+_cache_enabled = True
+
+
+def enable_cache():
+    """
+    Enables the internal cache.
+
+    Note that this function will not clear the cache in any case, for that
+    functionality see :func:`clear_cache`.
+    """
+    global _cache_enabled
+
+    _cache_enabled = True
+
+
+def disable_cache():
+    """
+    Disables the internal cache.
+
+    Note that this function will not clear the cache in any case, for that
+    functionality see :func:`clear_cache`.
+    """
+    global _cache_enabled
+
+    _cache_enabled = False
+
+
+def clear_cache():
+    """ Clears the internal cache. """
+    global _cache_name, _cache_name_egg, _cache_path, _cache_path_egg, \
+        _cache_generated, _cache_generated_egg
+
+    _cache_name = {}
+    _cache_name_egg = {}
+    _cache_path = {}
+    _cache_path_egg = {}
+    _cache_generated = False
+    _cache_generated_egg = False
+
+
+def _yield_distributions(include_dist, include_egg, paths=sys.path):
+    """
+    Yield .dist-info and .egg(-info) distributions, based on the arguments
+
+    :parameter include_dist: yield .dist-info distributions
+    :parameter include_egg: yield .egg(-info) distributions
+    """
+    for path in paths:
+        realpath = os.path.realpath(path)
+        if not os.path.isdir(realpath):
+            continue
+        for dir in os.listdir(realpath):
+            dist_path = os.path.join(realpath, dir)
+            if include_dist and dir.endswith('.dist-info'):
+                yield Distribution(dist_path)
+            elif include_egg and (dir.endswith('.egg-info') or
+                                  dir.endswith('.egg')):
+                yield EggInfoDistribution(dist_path)
+
+
+def _generate_cache(use_egg_info=False, paths=sys.path):
+    global _cache_generated, _cache_generated_egg
+
+    if _cache_generated_egg or (_cache_generated and not use_egg_info):
+        return
+    else:
+        gen_dist = not _cache_generated
+        gen_egg = use_egg_info
+
+        for dist in _yield_distributions(gen_dist, gen_egg, paths):
+            if isinstance(dist, Distribution):
+                _cache_path[dist.path] = dist
+                if not dist.name in _cache_name:
+                    _cache_name[dist.name] = []
+                _cache_name[dist.name].append(dist)
+            else:
+                _cache_path_egg[dist.path] = dist
+                if not dist.name in _cache_name_egg:
+                    _cache_name_egg[dist.name] = []
+                _cache_name_egg[dist.name].append(dist)
+
+        if gen_dist:
+            _cache_generated = True
+        if gen_egg:
+            _cache_generated_egg = True
+
+
+class Distribution:
+    """Created with the *path* of the ``.dist-info`` directory provided to the
+    constructor. It reads the metadata contained in ``METADATA`` when it is
+    instantiated."""
+
+    name = ''
+    """The name of the distribution."""
+
+    version = ''
+    """The version of the distribution."""
+
+    metadata = None
+    """A :class:`packaging.metadata.Metadata` instance loaded with
+    the distribution's ``METADATA`` file."""
+
+    requested = False
+    """A boolean that indicates whether the ``REQUESTED`` metadata file is
+    present (in other words, whether the package was installed by user
+    request or it was installed as a dependency)."""
+
+    def __init__(self, path):
+        if _cache_enabled and path in _cache_path:
+            self.metadata = _cache_path[path].metadata
+        else:
+            metadata_path = os.path.join(path, 'METADATA')
+            self.metadata = Metadata(path=metadata_path)
+
+        self.name = self.metadata['Name']
+        self.version = self.metadata['Version']
+        self.path = path
+
+        if _cache_enabled and not path in _cache_path:
+            _cache_path[path] = self
+
+    def __repr__(self):
+        return '<Distribution %r %s at %r>' % (
+            self.name, self.version, self.path)
+
+    def _get_records(self, local=False):
+        with self.get_distinfo_file('RECORD') as record:
+            record_reader = csv.reader(record, delimiter=',')
+            # XXX needs an explaining comment
+            for row in record_reader:
+                path, checksum, size = (row[:] +
+                                        [None for i in range(len(row), 3)])
+                if local:
+                    path = path.replace('/', os.sep)
+                    path = os.path.join(sys.prefix, path)
+                yield path, checksum, size
+
+    def get_resource_path(self, relative_path):
+        with self.get_distinfo_file('RESOURCES') as resources_file:
+            resources_reader = csv.reader(resources_file, delimiter=',')
+            for relative, destination in resources_reader:
+                if relative == relative_path:
+                    return destination
+        raise KeyError(
+            'no resource file with relative path %r is installed' %
+            relative_path)
+
+    def list_installed_files(self, local=False):
+        """
+        Iterates over the ``RECORD`` entries and returns a tuple
+        ``(path, md5, size)`` for each line. If *local* is ``True``,
+        the returned path is transformed into a local absolute path.
+        Otherwise the raw value from RECORD is returned.
+
+        A local absolute path is an absolute path in which occurrences of
+        ``'/'`` have been replaced by the system separator given by ``os.sep``.
+
+        :parameter local: flag to say if the path should be returned a local
+                          absolute path
+
+        :type local: boolean
+        :returns: iterator of (path, md5, size)
+        """
+        return self._get_records(local)
+
+    def uses(self, path):
+        """
+        Returns ``True`` if path is listed in ``RECORD``. *path* can be a local
+        absolute path or a relative ``'/'``-separated path.
+
+        :rtype: boolean
+        """
+        for p, checksum, size in self._get_records():
+            local_absolute = os.path.join(sys.prefix, p)
+            if path == p or path == local_absolute:
+                return True
+        return False
+
+    def get_distinfo_file(self, path, binary=False):
+        """
+        Returns a file located under the ``.dist-info`` directory. Returns a
+        ``file`` instance for the file pointed by *path*.
+
+        :parameter path: a ``'/'``-separated path relative to the
+                         ``.dist-info`` directory or an absolute path;
+                         If *path* is an absolute path and doesn't start
+                         with the ``.dist-info`` directory path,
+                         a :class:`PackagingError` is raised
+        :type path: string
+        :parameter binary: If *binary* is ``True``, opens the file in read-only
+                           binary mode (``rb``), otherwise opens it in
+                           read-only mode (``r``).
+        :rtype: file object
+        """
+        open_flags = 'r'
+        if binary:
+            open_flags += 'b'
+
+        # Check if it is an absolute path  # XXX use relpath, add tests
+        if path.find(os.sep) >= 0:
+            # it's an absolute path?
+            distinfo_dirname, path = path.split(os.sep)[-2:]
+            if distinfo_dirname != self.path.split(os.sep)[-1]:
+                raise PackagingError(
+                    'dist-info file %r does not belong to the %r %s '
+                    'distribution' % (path, self.name, self.version))
+
+        # The file must be relative
+        if path not in DIST_FILES:
+            raise PackagingError('invalid path for a dist-info file: %r' %
+                                 path)
+
+        path = os.path.join(self.path, path)
+        return open(path, open_flags)
+
+    def list_distinfo_files(self, local=False):
+        """
+        Iterates over the ``RECORD`` entries and returns paths for each line if
+        the path is pointing to a file located in the ``.dist-info`` directory
+        or one of its subdirectories.
+
+        :parameter local: If *local* is ``True``, each returned path is
+                          transformed into a local absolute path. Otherwise the
+                          raw value from ``RECORD`` is returned.
+        :type local: boolean
+        :returns: iterator of paths
+        """
+        for path, checksum, size in self._get_records(local):
+            yield path
+
+    def __eq__(self, other):
+        return isinstance(other, Distribution) and self.path == other.path
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+class EggInfoDistribution:
+    """Created with the *path* of the ``.egg-info`` directory or file provided
+    to the constructor. It reads the metadata contained in the file itself, or
+    if the given path happens to be a directory, the metadata is read from the
+    file ``PKG-INFO`` under that directory."""
+
+    name = ''
+    """The name of the distribution."""
+
+    version = ''
+    """The version of the distribution."""
+
+    metadata = None
+    """A :class:`packaging.metadata.Metadata` instance loaded with
+    the distribution's ``METADATA`` file."""
+
+    _REQUIREMENT = re.compile(
+        r'(?P<name>[-A-Za-z0-9_.]+)\s*'
+        r'(?P<first>(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*'
+        r'(?P<rest>(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*'
+        r'(?P<extras>\[.*\])?')
+
+    def __init__(self, path):
+        self.path = path
+        if _cache_enabled and path in _cache_path_egg:
+            self.metadata = _cache_path_egg[path].metadata
+            self.name = self.metadata['Name']
+            self.version = self.metadata['Version']
+            return
+
+        # reused from Distribute's pkg_resources
+        def yield_lines(strs):
+            """Yield non-empty/non-comment lines of a ``basestring``
+            or sequence"""
+            if isinstance(strs, str):
+                for s in strs.splitlines():
+                    s = s.strip()
+                    # skip blank lines/comments
+                    if s and not s.startswith('#'):
+                        yield s
+            else:
+                for ss in strs:
+                    for s in yield_lines(ss):
+                        yield s
+
+        requires = None
+
+        if path.endswith('.egg'):
+            if os.path.isdir(path):
+                meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+                self.metadata = Metadata(path=meta_path)
+                try:
+                    req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
+                    with open(req_path, 'r') as fp:
+                        requires = fp.read()
+                except IOError:
+                    requires = None
+            else:
+                # FIXME handle the case where zipfile is not available
+                zipf = zipimport.zipimporter(path)
+                fileobj = io.StringIO(
+                    zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
+                self.metadata = Metadata(fileobj=fileobj)
+                try:
+                    requires = zipf.get_data('EGG-INFO/requires.txt')
+                except IOError:
+                    requires = None
+            self.name = self.metadata['Name']
+            self.version = self.metadata['Version']
+
+        elif path.endswith('.egg-info'):
+            if os.path.isdir(path):
+                path = os.path.join(path, 'PKG-INFO')
+                try:
+                    with open(os.path.join(path, 'requires.txt'), 'r') as fp:
+                        requires = fp.read()
+                except IOError:
+                    requires = None
+            self.metadata = Metadata(path=path)
+            self.name = self.metadata['name']
+            self.version = self.metadata['Version']
+
+        else:
+            raise ValueError('path must end with .egg-info or .egg, got %r' %
+                             path)
+
+        if requires is not None:
+            if self.metadata['Metadata-Version'] == '1.1':
+                # we can't have 1.1 metadata *and* Setuptools requires
+                for field in ('Obsoletes', 'Requires', 'Provides'):
+                    del self.metadata[field]
+
+        reqs = []
+
+        if requires is not None:
+            for line in yield_lines(requires):
+                if line.startswith('['):
+                    logger.warning(
+                        'extensions in requires.txt are not supported '
+                        '(used by %r %s)', self.name, self.version)
+                    break
+                else:
+                    match = self._REQUIREMENT.match(line.strip())
+                    if not match:
+                        # this happens when we encounter extras; since they
+                        # are written at the end of the file we just exit
+                        break
+                    else:
+                        if match.group('extras'):
+                            msg = ('extra requirements are not supported '
+                                   '(used by %r %s)', self.name, self.version)
+                            logger.warning(msg, self.name)
+                        name = match.group('name')
+                        version = None
+                        if match.group('first'):
+                            version = match.group('first')
+                            if match.group('rest'):
+                                version += match.group('rest')
+                            version = version.replace(' ', '')  # trim spaces
+                        if version is None:
+                            reqs.append(name)
+                        else:
+                            reqs.append('%s (%s)' % (name, version))
+
+            if len(reqs) > 0:
+                self.metadata['Requires-Dist'] += reqs
+
+        if _cache_enabled:
+            _cache_path_egg[self.path] = self
+
+    def __repr__(self):
+        return '<EggInfoDistribution %r %s at %r>' % (
+            self.name, self.version, self.path)
+
+    def list_installed_files(self, local=False):
+
+        def _md5(path):
+            with open(path, 'rb') as f:
+                content = f.read()
+            return md5(content).hexdigest()
+
+        def _size(path):
+            return os.stat(path).st_size
+
+        path = self.path
+        if local:
+            path = path.replace('/', os.sep)
+
+        # XXX What about scripts and data files ?
+        if os.path.isfile(path):
+            return [(path, _md5(path), _size(path))]
+        else:
+            files = []
+            for root, dir, files_ in os.walk(path):
+                for item in files_:
+                    item = os.path.join(root, item)
+                    files.append((item, _md5(item), _size(item)))
+            return files
+
+        return []
+
+    def uses(self, path):
+        return False
+
+    def __eq__(self, other):
+        return (isinstance(other, EggInfoDistribution) and
+                self.path == other.path)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+def distinfo_dirname(name, version):
+    """
+    The *name* and *version* parameters are converted into their
+    filename-escaped form, i.e. any ``'-'`` characters are replaced
+    with ``'_'`` other than the one in ``'dist-info'`` and the one
+    separating the name from the version number.
+
+    :parameter name: is converted to a standard distribution name by replacing
+                     any runs of non- alphanumeric characters with a single
+                     ``'-'``.
+    :type name: string
+    :parameter version: is converted to a standard version string. Spaces
+                        become dots, and all other non-alphanumeric characters
+                        (except dots) become dashes, with runs of multiple
+                        dashes condensed to a single dash.
+    :type version: string
+    :returns: directory name
+    :rtype: string"""
+    file_extension = '.dist-info'
+    name = name.replace('-', '_')
+    normalized_version = suggest_normalized_version(version)
+    # Because this is a lookup procedure, something will be returned even if
+    #   it is a version that cannot be normalized
+    if normalized_version is None:
+        # Unable to achieve normality?
+        normalized_version = version
+    return '-'.join([name, normalized_version]) + file_extension
+
+
+def get_distributions(use_egg_info=False, paths=sys.path):
+    """
+    Provides an iterator that looks for ``.dist-info`` directories in
+    ``sys.path`` and returns :class:`Distribution` instances for each one of
+    them. If the parameters *use_egg_info* is ``True``, then the ``.egg-info``
+    files and directores are iterated as well.
+
+    :rtype: iterator of :class:`Distribution` and :class:`EggInfoDistribution`
+            instances
+    """
+    if not _cache_enabled:
+        for dist in _yield_distributions(True, use_egg_info, paths):
+            yield dist
+    else:
+        _generate_cache(use_egg_info, paths)
+
+        for dist in _cache_path.values():
+            yield dist
+
+        if use_egg_info:
+            for dist in _cache_path_egg.values():
+                yield dist
+
+
+def get_distribution(name, use_egg_info=False, paths=None):
+    """
+    Scans all elements in ``sys.path`` and looks for all directories
+    ending with ``.dist-info``. Returns a :class:`Distribution`
+    corresponding to the ``.dist-info`` directory that contains the
+    ``METADATA`` that matches *name* for the *name* metadata field.
+    If no distribution exists with the given *name* and the parameter
+    *use_egg_info* is set to ``True``, then all files and directories ending
+    with ``.egg-info`` are scanned. A :class:`EggInfoDistribution` instance is
+    returned if one is found that has metadata that matches *name* for the
+    *name* metadata field.
+
+    This function only returns the first result found, as no more than one
+    value is expected. If the directory is not found, ``None`` is returned.
+
+    :rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None
+    """
+    if paths == None:
+        paths = sys.path
+
+    if not _cache_enabled:
+        for dist in _yield_distributions(True, use_egg_info, paths):
+            if dist.name == name:
+                return dist
+    else:
+        _generate_cache(use_egg_info, paths)
+
+        if name in _cache_name:
+            return _cache_name[name][0]
+        elif use_egg_info and name in _cache_name_egg:
+            return _cache_name_egg[name][0]
+        else:
+            return None
+
+
+def obsoletes_distribution(name, version=None, use_egg_info=False):
+    """
+    Iterates over all distributions to find which distributions obsolete
+    *name*.
+
+    If a *version* is provided, it will be used to filter the results.
+    If the argument *use_egg_info* is set to ``True``, then ``.egg-info``
+    distributions will be considered as well.
+
+    :type name: string
+    :type version: string
+    :parameter name:
+    """
+    for dist in get_distributions(use_egg_info):
+        obsoleted = (dist.metadata['Obsoletes-Dist'] +
+                     dist.metadata['Obsoletes'])
+        for obs in obsoleted:
+            o_components = obs.split(' ', 1)
+            if len(o_components) == 1 or version is None:
+                if name == o_components[0]:
+                    yield dist
+                    break
+            else:
+                try:
+                    predicate = VersionPredicate(obs)
+                except ValueError:
+                    raise PackagingError(
+                        'distribution %r has ill-formed obsoletes field: '
+                        '%r' % (dist.name, obs))
+                if name == o_components[0] and predicate.match(version):
+                    yield dist
+                    break
+
+
+def provides_distribution(name, version=None, use_egg_info=False):
+    """
+    Iterates over all distributions to find which distributions provide *name*.
+    If a *version* is provided, it will be used to filter the results. Scans
+    all elements in ``sys.path``  and looks for all directories ending with
+    ``.dist-info``. Returns a :class:`Distribution`  corresponding to the
+    ``.dist-info`` directory that contains a ``METADATA`` that matches *name*
+    for the name metadata. If the argument *use_egg_info* is set to ``True``,
+    then all files and directories ending with ``.egg-info`` are considered
+    as well and returns an :class:`EggInfoDistribution` instance.
+
+    This function only returns the first result found, since no more than
+    one values are expected. If the directory is not found, returns ``None``.
+
+    :parameter version: a version specifier that indicates the version
+                        required, conforming to the format in ``PEP-345``
+
+    :type name: string
+    :type version: string
+    """
+    predicate = None
+    if not version is None:
+        try:
+            predicate = VersionPredicate(name + ' (' + version + ')')
+        except ValueError:
+            raise PackagingError('invalid name or version: %r, %r' %
+                                 (name, version))
+
+    for dist in get_distributions(use_egg_info):
+        provided = dist.metadata['Provides-Dist'] + dist.metadata['Provides']
+
+        for p in provided:
+            p_components = p.rsplit(' ', 1)
+            if len(p_components) == 1 or predicate is None:
+                if name == p_components[0]:
+                    yield dist
+                    break
+            else:
+                p_name, p_ver = p_components
+                if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')':
+                    raise PackagingError(
+                        'distribution %r has invalid Provides field: %r' %
+                        (dist.name, p))
+                p_ver = p_ver[1:-1]  # trim off the parenthesis
+                if p_name == name and predicate.match(p_ver):
+                    yield dist
+                    break
+
+
+def get_file_users(path):
+    """
+    Iterates over all distributions to find out which distributions use
+    *path*.
+
+    :parameter path: can be a local absolute path or a relative
+                     ``'/'``-separated path.
+    :type path: string
+    :rtype: iterator of :class:`Distribution` instances
+    """
+    for dist in get_distributions():
+        if dist.uses(path):
+            yield dist
diff --git a/Lib/packaging/depgraph.py b/Lib/packaging/depgraph.py
new file mode 100644
index 0000000..48ea3d9
--- /dev/null
+++ b/Lib/packaging/depgraph.py
@@ -0,0 +1,270 @@
+"""Class and functions dealing with dependencies between distributions.
+
+This module provides a DependencyGraph class to represent the
+dependencies between distributions.  Auxiliary functions can generate a
+graph, find reverse dependencies, and print a graph in DOT format.
+"""
+
+import sys
+
+from io import StringIO
+from packaging.errors import PackagingError
+from packaging.version import VersionPredicate, IrrationalVersionError
+
+__all__ = ['DependencyGraph', 'generate_graph', 'dependent_dists',
+           'graph_to_dot']
+
+
+class DependencyGraph:
+    """
+    Represents a dependency graph between distributions.
+
+    The dependency relationships are stored in an ``adjacency_list`` that maps
+    distributions to a list of ``(other, label)`` tuples where  ``other``
+    is a distribution and the edge is labeled with ``label`` (i.e. the version
+    specifier, if such was provided). Also, for more efficient traversal, for
+    every distribution ``x``, a list of predecessors is kept in
+    ``reverse_list[x]``. An edge from distribution ``a`` to
+    distribution ``b`` means that ``a`` depends on ``b``. If any missing
+    dependencies are found, they are stored in ``missing``, which is a
+    dictionary that maps distributions to a list of requirements that were not
+    provided by any other distributions.
+    """
+
+    def __init__(self):
+        self.adjacency_list = {}
+        self.reverse_list = {}
+        self.missing = {}
+
+    def add_distribution(self, distribution):
+        """Add the *distribution* to the graph.
+
+        :type distribution: :class:`packaging.database.Distribution` or
+                            :class:`packaging.database.EggInfoDistribution`
+        """
+        self.adjacency_list[distribution] = []
+        self.reverse_list[distribution] = []
+        self.missing[distribution] = []
+
+    def add_edge(self, x, y, label=None):
+        """Add an edge from distribution *x* to distribution *y* with the given
+        *label*.
+
+        :type x: :class:`packaging.database.Distribution` or
+                 :class:`packaging.database.EggInfoDistribution`
+        :type y: :class:`packaging.database.Distribution` or
+                 :class:`packaging.database.EggInfoDistribution`
+        :type label: ``str`` or ``None``
+        """
+        self.adjacency_list[x].append((y, label))
+        # multiple edges are allowed, so be careful
+        if not x in self.reverse_list[y]:
+            self.reverse_list[y].append(x)
+
+    def add_missing(self, distribution, requirement):
+        """
+        Add a missing *requirement* for the given *distribution*.
+
+        :type distribution: :class:`packaging.database.Distribution` or
+                            :class:`packaging.database.EggInfoDistribution`
+        :type requirement: ``str``
+        """
+        self.missing[distribution].append(requirement)
+
+    def _repr_dist(self, dist):
+        return '%s %s' % (dist.name, dist.metadata['Version'])
+
+    def repr_node(self, dist, level=1):
+        """Prints only a subgraph"""
+        output = []
+        output.append(self._repr_dist(dist))
+        for other, label in self.adjacency_list[dist]:
+            dist = self._repr_dist(other)
+            if label is not None:
+                dist = '%s [%s]' % (dist, label)
+            output.append('    ' * level + str(dist))
+            suboutput = self.repr_node(other, level + 1)
+            subs = suboutput.split('\n')
+            output.extend(subs[1:])
+        return '\n'.join(output)
+
+    def __repr__(self):
+        """Representation of the graph"""
+        output = []
+        for dist, adjs in self.adjacency_list.items():
+            output.append(self.repr_node(dist))
+        return '\n'.join(output)
+
+
+def graph_to_dot(graph, f, skip_disconnected=True):
+    """Writes a DOT output for the graph to the provided file *f*.
+
+    If *skip_disconnected* is set to ``True``, then all distributions
+    that are not dependent on any other distribution are skipped.
+
+    :type f: has to support ``file``-like operations
+    :type skip_disconnected: ``bool``
+    """
+    disconnected = []
+
+    f.write("digraph dependencies {\n")
+    for dist, adjs in graph.adjacency_list.items():
+        if len(adjs) == 0 and not skip_disconnected:
+            disconnected.append(dist)
+        for other, label in adjs:
+            if not label is None:
+                f.write('"%s" -> "%s" [label="%s"]\n' %
+                                            (dist.name, other.name, label))
+            else:
+                f.write('"%s" -> "%s"\n' % (dist.name, other.name))
+    if not skip_disconnected and len(disconnected) > 0:
+        f.write('subgraph disconnected {\n')
+        f.write('label = "Disconnected"\n')
+        f.write('bgcolor = red\n')
+
+        for dist in disconnected:
+            f.write('"%s"' % dist.name)
+            f.write('\n')
+        f.write('}\n')
+    f.write('}\n')
+
+
+def generate_graph(dists):
+    """Generates a dependency graph from the given distributions.
+
+    :parameter dists: a list of distributions
+    :type dists: list of :class:`packaging.database.Distribution` and
+                 :class:`packaging.database.EggInfoDistribution` instances
+    :rtype: a :class:`DependencyGraph` instance
+    """
+    graph = DependencyGraph()
+    provided = {}  # maps names to lists of (version, dist) tuples
+
+    # first, build the graph and find out the provides
+    for dist in dists:
+        graph.add_distribution(dist)
+        provides = (dist.metadata['Provides-Dist'] +
+                    dist.metadata['Provides'] +
+                    ['%s (%s)' % (dist.name, dist.metadata['Version'])])
+
+        for p in provides:
+            comps = p.strip().rsplit(" ", 1)
+            name = comps[0]
+            version = None
+            if len(comps) == 2:
+                version = comps[1]
+                if len(version) < 3 or version[0] != '(' or version[-1] != ')':
+                    raise PackagingError('Distribution %s has ill formed' \
+                                         'provides field: %s' % (dist.name, p))
+                version = version[1:-1]  # trim off parenthesis
+            if not name in provided:
+                provided[name] = []
+            provided[name].append((version, dist))
+
+    # now make the edges
+    for dist in dists:
+        requires = dist.metadata['Requires-Dist'] + dist.metadata['Requires']
+        for req in requires:
+            try:
+                predicate = VersionPredicate(req)
+            except IrrationalVersionError:
+                # XXX compat-mode if cannot read the version
+                name = req.split()[0]
+                predicate = VersionPredicate(name)
+
+            name = predicate.name
+
+            if not name in provided:
+                graph.add_missing(dist, req)
+            else:
+                matched = False
+                for version, provider in provided[name]:
+                    try:
+                        match = predicate.match(version)
+                    except IrrationalVersionError:
+                        # XXX small compat-mode
+                        if version.split(' ') == 1:
+                            match = True
+                        else:
+                            match = False
+
+                    if match:
+                        graph.add_edge(dist, provider, req)
+                        matched = True
+                        break
+                if not matched:
+                    graph.add_missing(dist, req)
+    return graph
+
+
+def dependent_dists(dists, dist):
+    """Recursively generate a list of distributions from *dists* that are
+    dependent on *dist*.
+
+    :param dists: a list of distributions
+    :param dist: a distribution, member of *dists* for which we are interested
+    """
+    if not dist in dists:
+        raise ValueError('The given distribution is not a member of the list')
+    graph = generate_graph(dists)
+
+    dep = [dist]  # dependent distributions
+    fringe = graph.reverse_list[dist]  # list of nodes we should inspect
+
+    while not len(fringe) == 0:
+        node = fringe.pop()
+        dep.append(node)
+        for prev in graph.reverse_list[node]:
+            if not prev in dep:
+                fringe.append(prev)
+
+    dep.pop(0)  # remove dist from dep, was there to prevent infinite loops
+    return dep
+
+
+def main():
+    from packaging.database import get_distributions
+    tempout = StringIO()
+    try:
+        old = sys.stderr
+        sys.stderr = tempout
+        try:
+            dists = list(get_distributions(use_egg_info=True))
+            graph = generate_graph(dists)
+        finally:
+            sys.stderr = old
+    except Exception as e:
+        tempout.seek(0)
+        tempout = tempout.read()
+        print('Could not generate the graph\n%s\n%s\n' % (tempout, e))
+        sys.exit(1)
+
+    for dist, reqs in graph.missing.items():
+        if len(reqs) > 0:
+            print("Warning: Missing dependencies for %s:" % dist.name,
+                  ", ".join(reqs))
+    # XXX replace with argparse
+    if len(sys.argv) == 1:
+        print('Dependency graph:')
+        print('    ' + repr(graph).replace('\n', '\n    '))
+        sys.exit(0)
+    elif len(sys.argv) > 1 and sys.argv[1] in ('-d', '--dot'):
+        if len(sys.argv) > 2:
+            filename = sys.argv[2]
+        else:
+            filename = 'depgraph.dot'
+
+        with open(filename, 'w') as f:
+            graph_to_dot(graph, f, True)
+        tempout.seek(0)
+        tempout = tempout.read()
+        print(tempout)
+        print('Dot file written at "%s"' % filename)
+        sys.exit(0)
+    else:
+        print('Supported option: -d [filename]')
+        sys.exit(1)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/Lib/packaging/dist.py b/Lib/packaging/dist.py
new file mode 100644
index 0000000..6065e78
--- /dev/null
+++ b/Lib/packaging/dist.py
@@ -0,0 +1,819 @@
+"""Class representing the distribution being built/installed/etc."""
+
+import os
+import re
+
+from packaging.errors import (PackagingOptionError, PackagingArgError,
+                              PackagingModuleError, PackagingClassError)
+from packaging.fancy_getopt import FancyGetopt
+from packaging.util import strtobool, resolve_name
+from packaging import logger
+from packaging.metadata import Metadata
+from packaging.config import Config
+from packaging.command import get_command_class, STANDARD_COMMANDS
+
+# Regex to define acceptable Packaging command names.  This is not *quite*
+# the same as a Python NAME -- I don't allow leading underscores.  The fact
+# that they're very similar is no coincidence; the default naming scheme is
+# to look for a Python module named after the command.
+command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+USAGE = """\
+usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+   or: %(script)s --help [cmd1 cmd2 ...]
+   or: %(script)s --help-commands
+   or: %(script)s cmd --help
+"""
+
+
+def gen_usage(script_name):
+    script = os.path.basename(script_name)
+    return USAGE % {'script': script}
+
+
+class Distribution:
+    """The core of the Packaging.  Most of the work hiding behind 'setup'
+    is really done within a Distribution instance, which farms the work out
+    to the Packaging commands specified on the command line.
+
+    Setup scripts will almost never instantiate Distribution directly,
+    unless the 'setup()' function is totally inadequate to their needs.
+    However, it is conceivable that a setup script might wish to subclass
+    Distribution for some specialized purpose, and then pass the subclass
+    to 'setup()' as the 'distclass' keyword argument.  If so, it is
+    necessary to respect the expectations that 'setup' has of Distribution.
+    See the code for 'setup()', in run.py, for details.
+    """
+
+    # 'global_options' describes the command-line options that may be
+    # supplied to the setup script prior to any actual commands.
+    # Eg. "./setup.py -n" or "./setup.py --dry-run" both take advantage of
+    # these global options.  This list should be kept to a bare minimum,
+    # since every global option is also valid as a command option -- and we
+    # don't want to pollute the commands with too many options that they
+    # have minimal control over.
+    global_options = [
+        ('dry-run', 'n', "don't actually do anything"),
+        ('help', 'h', "show detailed help message"),
+        ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
+    ]
+
+    # 'common_usage' is a short (2-3 line) string describing the common
+    # usage of the setup script.
+    common_usage = """\
+Common commands: (see '--help-commands' for more)
+
+  setup.py build      will build the package underneath 'build/'
+  setup.py install    will install the package
+"""
+
+    # options that are not propagated to the commands
+    display_options = [
+        ('help-commands', None,
+         "list all available commands"),
+        ('name', None,
+         "print package name"),
+        ('version', 'V',
+         "print package version"),
+        ('fullname', None,
+         "print <package name>-<version>"),
+        ('author', None,
+         "print the author's name"),
+        ('author-email', None,
+         "print the author's email address"),
+        ('maintainer', None,
+         "print the maintainer's name"),
+        ('maintainer-email', None,
+         "print the maintainer's email address"),
+        ('contact', None,
+         "print the maintainer's name if known, else the author's"),
+        ('contact-email', None,
+         "print the maintainer's email address if known, else the author's"),
+        ('url', None,
+         "print the URL for this package"),
+        ('license', None,
+         "print the license of the package"),
+        ('licence', None,
+         "alias for --license"),
+        ('description', None,
+         "print the package description"),
+        ('long-description', None,
+         "print the long package description"),
+        ('platforms', None,
+         "print the list of platforms"),
+        ('classifier', None,
+         "print the list of classifiers"),
+        ('keywords', None,
+         "print the list of keywords"),
+        ('provides', None,
+         "print the list of packages/modules provided"),
+        ('requires', None,
+         "print the list of packages/modules required"),
+        ('obsoletes', None,
+         "print the list of packages/modules made obsolete"),
+        ('use-2to3', None,
+         "use 2to3 to make source python 3.x compatible"),
+        ('convert-2to3-doctests', None,
+         "use 2to3 to convert doctests in seperate text files"),
+        ]
+    display_option_names = [x[0].replace('-', '_') for x in display_options]
+
+    # negative options are options that exclude other options
+    negative_opt = {}
+
+    # -- Creation/initialization methods -------------------------------
+    def __init__(self, attrs=None):
+        """Construct a new Distribution instance: initialize all the
+        attributes of a Distribution, and then use 'attrs' (a dictionary
+        mapping attribute names to values) to assign some of those
+        attributes their "real" values.  (Any attributes not mentioned in
+        'attrs' will be assigned to some null value: 0, None, an empty list
+        or dictionary, etc.)  Most importantly, initialize the
+        'command_obj' attribute to the empty dictionary; this will be
+        filled in with real command objects by 'parse_command_line()'.
+        """
+
+        # Default values for our command-line options
+        self.dry_run = False
+        self.help = False
+        for attr in self.display_option_names:
+            setattr(self, attr, False)
+
+        # Store the configuration
+        self.config = Config(self)
+
+        # Store the distribution metadata (name, version, author, and so
+        # forth) in a separate object -- we're getting to have enough
+        # information here (and enough command-line options) that it's
+        # worth it.
+        self.metadata = Metadata()
+
+        # 'cmdclass' maps command names to class objects, so we
+        # can 1) quickly figure out which class to instantiate when
+        # we need to create a new command object, and 2) have a way
+        # for the setup script to override command classes
+        self.cmdclass = {}
+
+        # 'script_name' and 'script_args' are usually set to sys.argv[0]
+        # and sys.argv[1:], but they can be overridden when the caller is
+        # not necessarily a setup script run from the command line.
+        self.script_name = None
+        self.script_args = None
+
+        # 'command_options' is where we store command options between
+        # parsing them (from config files, the command line, etc.) and when
+        # they are actually needed -- ie. when the command in question is
+        # instantiated.  It is a dictionary of dictionaries of 2-tuples:
+        #   command_options = { command_name : { option : (source, value) } }
+        self.command_options = {}
+
+        # 'dist_files' is the list of (command, pyversion, file) that
+        # have been created by any dist commands run so far. This is
+        # filled regardless of whether the run is dry or not. pyversion
+        # gives sysconfig.get_python_version() if the dist file is
+        # specific to a Python version, 'any' if it is good for all
+        # Python versions on the target platform, and '' for a source
+        # file. pyversion should not be used to specify minimum or
+        # maximum required Python versions; use the metainfo for that
+        # instead.
+        self.dist_files = []
+
+        # These options are really the business of various commands, rather
+        # than of the Distribution itself.  We provide aliases for them in
+        # Distribution as a convenience to the developer.
+        self.packages = []
+        self.package_data = {}
+        self.package_dir = None
+        self.py_modules = []
+        self.libraries = []
+        self.headers = []
+        self.ext_modules = []
+        self.ext_package = None
+        self.include_dirs = []
+        self.extra_path = None
+        self.scripts = []
+        self.data_files = {}
+        self.password = ''
+        self.use_2to3 = False
+        self.convert_2to3_doctests = []
+        self.extra_files = []
+
+        # And now initialize bookkeeping stuff that can't be supplied by
+        # the caller at all.  'command_obj' maps command names to
+        # Command instances -- that's how we enforce that every command
+        # class is a singleton.
+        self.command_obj = {}
+
+        # 'have_run' maps command names to boolean values; it keeps track
+        # of whether we have actually run a particular command, to make it
+        # cheap to "run" a command whenever we think we might need to -- if
+        # it's already been done, no need for expensive filesystem
+        # operations, we just check the 'have_run' dictionary and carry on.
+        # It's only safe to query 'have_run' for a command class that has
+        # been instantiated -- a false value will be inserted when the
+        # command object is created, and replaced with a true value when
+        # the command is successfully run.  Thus it's probably best to use
+        # '.get()' rather than a straight lookup.
+        self.have_run = {}
+
+        # Now we'll use the attrs dictionary (ultimately, keyword args from
+        # the setup script) to possibly override any or all of these
+        # distribution options.
+
+        if attrs is not None:
+            # Pull out the set of command options and work on them
+            # specifically.  Note that this order guarantees that aliased
+            # command options will override any supplied redundantly
+            # through the general options dictionary.
+            options = attrs.get('options')
+            if options is not None:
+                del attrs['options']
+                for command, cmd_options in options.items():
+                    opt_dict = self.get_option_dict(command)
+                    for opt, val in cmd_options.items():
+                        opt_dict[opt] = ("setup script", val)
+
+            # Now work on the rest of the attributes.  Any attribute that's
+            # not already defined is invalid!
+            for key, val in attrs.items():
+                if self.metadata.is_metadata_field(key):
+                    self.metadata[key] = val
+                elif hasattr(self, key):
+                    setattr(self, key, val)
+                else:
+                    logger.warning(
+                        'unknown argument given to Distribution: %r', key)
+
+        # no-user-cfg is handled before other command line args
+        # because other args override the config files, and this
+        # one is needed before we can load the config files.
+        # If attrs['script_args'] wasn't passed, assume false.
+        #
+        # This also make sure we just look at the global options
+        self.want_user_cfg = True
+
+        if self.script_args is not None:
+            for arg in self.script_args:
+                if not arg.startswith('-'):
+                    break
+                if arg == '--no-user-cfg':
+                    self.want_user_cfg = False
+                    break
+
+        self.finalize_options()
+
+    def get_option_dict(self, command):
+        """Get the option dictionary for a given command.  If that
+        command's option dictionary hasn't been created yet, then create it
+        and return the new dictionary; otherwise, return the existing
+        option dictionary.
+        """
+        d = self.command_options.get(command)
+        if d is None:
+            d = self.command_options[command] = {}
+        return d
+
+    def get_fullname(self):
+        return self.metadata.get_fullname()
+
+    def dump_option_dicts(self, header=None, commands=None, indent=""):
+        from pprint import pformat
+
+        if commands is None:             # dump all command option dicts
+            commands = sorted(self.command_options)
+
+        if header is not None:
+            logger.info(indent + header)
+            indent = indent + "  "
+
+        if not commands:
+            logger.info(indent + "no commands known yet")
+            return
+
+        for cmd_name in commands:
+            opt_dict = self.command_options.get(cmd_name)
+            if opt_dict is None:
+                logger.info(indent + "no option dict for %r command",
+                            cmd_name)
+            else:
+                logger.info(indent + "option dict for %r command:", cmd_name)
+                out = pformat(opt_dict)
+                for line in out.split('\n'):
+                    logger.info(indent + "  " + line)
+
+    # -- Config file finding/parsing methods ---------------------------
+    # XXX to be removed
+    def parse_config_files(self, filenames=None):
+        return self.config.parse_config_files(filenames)
+
+    def find_config_files(self):
+        return self.config.find_config_files()
+
+    # -- Command-line parsing methods ----------------------------------
+
+    def parse_command_line(self):
+        """Parse the setup script's command line, taken from the
+        'script_args' instance attribute (which defaults to 'sys.argv[1:]'
+        -- see 'setup()' in run.py).  This list is first processed for
+        "global options" -- options that set attributes of the Distribution
+        instance.  Then, it is alternately scanned for Packaging commands
+        and options for that command.  Each new command terminates the
+        options for the previous command.  The allowed options for a
+        command are determined by the 'user_options' attribute of the
+        command class -- thus, we have to be able to load command classes
+        in order to parse the command line.  Any error in that 'options'
+        attribute raises PackagingGetoptError; any error on the
+        command line raises PackagingArgError.  If no Packaging commands
+        were found on the command line, raises PackagingArgError.  Return
+        true if command line was successfully parsed and we should carry
+        on with executing commands; false if no errors but we shouldn't
+        execute commands (currently, this only happens if user asks for
+        help).
+        """
+        #
+        # We now have enough information to show the Macintosh dialog
+        # that allows the user to interactively specify the "command line".
+        #
+        toplevel_options = self._get_toplevel_options()
+
+        # We have to parse the command line a bit at a time -- global
+        # options, then the first command, then its options, and so on --
+        # because each command will be handled by a different class, and
+        # the options that are valid for a particular class aren't known
+        # until we have loaded the command class, which doesn't happen
+        # until we know what the command is.
+
+        self.commands = []
+        parser = FancyGetopt(toplevel_options + self.display_options)
+        parser.set_negative_aliases(self.negative_opt)
+        parser.set_aliases({'licence': 'license'})
+        args = parser.getopt(args=self.script_args, object=self)
+        option_order = parser.get_option_order()
+
+        # for display options we return immediately
+        if self.handle_display_options(option_order):
+            return
+
+        while args:
+            args = self._parse_command_opts(parser, args)
+            if args is None:            # user asked for help (and got it)
+                return
+
+        # Handle the cases of --help as a "global" option, ie.
+        # "setup.py --help" and "setup.py --help command ...".  For the
+        # former, we show global options (--dry-run, etc.)
+        # and display-only options (--name, --version, etc.); for the
+        # latter, we omit the display-only options and show help for
+        # each command listed on the command line.
+        if self.help:
+            self._show_help(parser,
+                            display_options=len(self.commands) == 0,
+                            commands=self.commands)
+            return
+
+        return 1
+
+    def _get_toplevel_options(self):
+        """Return the non-display options recognized at the top level.
+
+        This includes options that are recognized *only* at the top
+        level as well as options recognized for commands.
+        """
+        return self.global_options
+
+    def _parse_command_opts(self, parser, args):
+        """Parse the command-line options for a single command.
+        'parser' must be a FancyGetopt instance; 'args' must be the list
+        of arguments, starting with the current command (whose options
+        we are about to parse).  Returns a new version of 'args' with
+        the next command at the front of the list; will be the empty
+        list if there are no more commands on the command line.  Returns
+        None if the user asked for help on this command.
+        """
+        # Pull the current command from the head of the command line
+        command = args[0]
+        if not command_re.match(command):
+            raise SystemExit("invalid command name %r" % command)
+        self.commands.append(command)
+
+        # Dig up the command class that implements this command, so we
+        # 1) know that it's a valid command, and 2) know which options
+        # it takes.
+        try:
+            cmd_class = get_command_class(command)
+        except PackagingModuleError as msg:
+            raise PackagingArgError(msg)
+
+        # XXX We want to push this in packaging.command
+        #
+        # Require that the command class be derived from Command -- want
+        # to be sure that the basic "command" interface is implemented.
+        for meth in ('initialize_options', 'finalize_options', 'run'):
+            if hasattr(cmd_class, meth):
+                continue
+            raise PackagingClassError(
+                'command %r must implement %r' % (cmd_class, meth))
+
+        # Also make sure that the command object provides a list of its
+        # known options.
+        if not (hasattr(cmd_class, 'user_options') and
+                isinstance(cmd_class.user_options, list)):
+            raise PackagingClassError(
+                "command class %s must provide "
+                "'user_options' attribute (a list of tuples)" % cmd_class)
+
+        # If the command class has a list of negative alias options,
+        # merge it in with the global negative aliases.
+        negative_opt = self.negative_opt
+        if hasattr(cmd_class, 'negative_opt'):
+            negative_opt = negative_opt.copy()
+            negative_opt.update(cmd_class.negative_opt)
+
+        # Check for help_options in command class.  They have a different
+        # format (tuple of four) so we need to preprocess them here.
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_options = cmd_class.help_options[:]
+        else:
+            help_options = []
+
+        # All commands support the global options too, just by adding
+        # in 'global_options'.
+        parser.set_option_table(self.global_options +
+                                cmd_class.user_options +
+                                help_options)
+        parser.set_negative_aliases(negative_opt)
+        args, opts = parser.getopt(args[1:])
+        if hasattr(opts, 'help') and opts.help:
+            self._show_help(parser, display_options=False,
+                            commands=[cmd_class])
+            return
+
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_option_found = False
+            for help_option, short, desc, func in cmd_class.help_options:
+                if hasattr(opts, help_option.replace('-', '_')):
+                    help_option_found = True
+                    if hasattr(func, '__call__'):
+                        func()
+                    else:
+                        raise PackagingClassError(
+                            "invalid help function %r for help option %r: "
+                            "must be a callable object (function, etc.)"
+                            % (func, help_option))
+
+            if help_option_found:
+                return
+
+        # Put the options from the command line into their official
+        # holding pen, the 'command_options' dictionary.
+        opt_dict = self.get_option_dict(command)
+        for name, value in vars(opts).items():
+            opt_dict[name] = ("command line", value)
+
+        return args
+
+    def finalize_options(self):
+        """Set final values for all the options on the Distribution
+        instance, analogous to the .finalize_options() method of Command
+        objects.
+        """
+        if getattr(self, 'convert_2to3_doctests', None):
+            self.convert_2to3_doctests = [os.path.join(p)
+                                for p in self.convert_2to3_doctests]
+        else:
+            self.convert_2to3_doctests = []
+
+    def _show_help(self, parser, global_options=True, display_options=True,
+                   commands=[]):
+        """Show help for the setup script command line in the form of
+        several lists of command-line options.  'parser' should be a
+        FancyGetopt instance; do not expect it to be returned in the
+        same state, as its option table will be reset to make it
+        generate the correct help text.
+
+        If 'global_options' is true, lists the global options:
+        --dry-run, etc.  If 'display_options' is true, lists
+        the "display-only" options: --name, --version, etc.  Finally,
+        lists per-command help for every command name or command class
+        in 'commands'.
+        """
+        # late import because of mutual dependence between these modules
+        from packaging.command.cmd import Command
+
+        if global_options:
+            if display_options:
+                options = self._get_toplevel_options()
+            else:
+                options = self.global_options
+            parser.set_option_table(options)
+            parser.print_help(self.common_usage + "\nGlobal options:")
+            print('')
+
+        if display_options:
+            parser.set_option_table(self.display_options)
+            parser.print_help(
+                "Information display options (just display " +
+                "information, ignore any commands)")
+            print('')
+
+        for command in self.commands:
+            if isinstance(command, type) and issubclass(command, Command):
+                cls = command
+            else:
+                cls = get_command_class(command)
+            if (hasattr(cls, 'help_options') and
+                isinstance(cls.help_options, list)):
+                parser.set_option_table(cls.user_options + cls.help_options)
+            else:
+                parser.set_option_table(cls.user_options)
+            parser.print_help("Options for %r command:" % cls.__name__)
+            print('')
+
+        print(gen_usage(self.script_name))
+
+    def handle_display_options(self, option_order):
+        """If there were any non-global "display-only" options
+        (--help-commands or the metadata display options) on the command
+        line, display the requested info and return true; else return
+        false.
+        """
+        # User just wants a list of commands -- we'll print it out and stop
+        # processing now (ie. if they ran "setup --help-commands foo bar",
+        # we ignore "foo bar").
+        if self.help_commands:
+            self.print_commands()
+            print('')
+            print(gen_usage(self.script_name))
+            return 1
+
+        # If user supplied any of the "display metadata" options, then
+        # display that metadata in the order in which the user supplied the
+        # metadata options.
+        any_display_options = False
+        is_display_option = set()
+        for option in self.display_options:
+            is_display_option.add(option[0])
+
+        for opt, val in option_order:
+            if val and opt in is_display_option:
+                opt = opt.replace('-', '_')
+                value = self.metadata[opt]
+                if opt in ('keywords', 'platform'):
+                    print(','.join(value))
+                elif opt in ('classifier', 'provides', 'requires',
+                             'obsoletes'):
+                    print('\n'.join(value))
+                else:
+                    print(value)
+                any_display_options = True
+
+        return any_display_options
+
+    def print_command_list(self, commands, header, max_length):
+        """Print a subset of the list of all commands -- used by
+        'print_commands()'.
+        """
+        print(header + ":")
+
+        for cmd in commands:
+            cls = self.cmdclass.get(cmd) or get_command_class(cmd)
+            description = getattr(cls, 'description',
+                                  '(no description available)')
+
+            print("  %-*s  %s" % (max_length, cmd, description))
+
+    def _get_command_groups(self):
+        """Helper function to retrieve all the command class names divided
+        into standard commands (listed in
+        packaging2.command.STANDARD_COMMANDS) and extra commands (given in
+        self.cmdclass and not standard commands).
+        """
+        extra_commands = [cmd for cmd in self.cmdclass
+                          if cmd not in STANDARD_COMMANDS]
+        return STANDARD_COMMANDS, extra_commands
+
+    def print_commands(self):
+        """Print out a help message listing all available commands with a
+        description of each.  The list is divided into standard commands
+        (listed in packaging2.command.STANDARD_COMMANDS) and extra commands
+        (given in self.cmdclass and not standard commands).  The
+        descriptions come from the command class attribute
+        'description'.
+        """
+        std_commands, extra_commands = self._get_command_groups()
+        max_length = 0
+        for cmd in (std_commands + extra_commands):
+            if len(cmd) > max_length:
+                max_length = len(cmd)
+
+        self.print_command_list(std_commands,
+                                "Standard commands",
+                                max_length)
+        if extra_commands:
+            print()
+            self.print_command_list(extra_commands,
+                                    "Extra commands",
+                                    max_length)
+
+    # -- Command class/object methods ----------------------------------
+
+    def get_command_obj(self, command, create=True):
+        """Return the command object for 'command'.  Normally this object
+        is cached on a previous call to 'get_command_obj()'; if no command
+        object for 'command' is in the cache, then we either create and
+        return it (if 'create' is true) or return None.
+        """
+        cmd_obj = self.command_obj.get(command)
+        if not cmd_obj and create:
+            logger.debug("Distribution.get_command_obj(): " \
+                         "creating %r command object", command)
+
+            cls = get_command_class(command)
+            cmd_obj = self.command_obj[command] = cls(self)
+            self.have_run[command] = 0
+
+            # Set any options that were supplied in config files
+            # or on the command line.  (NB. support for error
+            # reporting is lame here: any errors aren't reported
+            # until 'finalize_options()' is called, which means
+            # we won't report the source of the error.)
+            options = self.command_options.get(command)
+            if options:
+                self._set_command_options(cmd_obj, options)
+
+        return cmd_obj
+
+    def _set_command_options(self, command_obj, option_dict=None):
+        """Set the options for 'command_obj' from 'option_dict'.  Basically
+        this means copying elements of a dictionary ('option_dict') to
+        attributes of an instance ('command').
+
+        'command_obj' must be a Command instance.  If 'option_dict' is not
+        supplied, uses the standard option dictionary for this command
+        (from 'self.command_options').
+        """
+        command_name = command_obj.get_command_name()
+        if option_dict is None:
+            option_dict = self.get_option_dict(command_name)
+
+        logger.debug("  setting options for %r command:", command_name)
+
+        for option, (source, value) in option_dict.items():
+            logger.debug("    %s = %s (from %s)", option, value, source)
+            try:
+                bool_opts = [x.replace('-', '_')
+                             for x in command_obj.boolean_options]
+            except AttributeError:
+                bool_opts = []
+            try:
+                neg_opt = command_obj.negative_opt
+            except AttributeError:
+                neg_opt = {}
+
+            try:
+                is_string = isinstance(value, str)
+                if option in neg_opt and is_string:
+                    setattr(command_obj, neg_opt[option], not strtobool(value))
+                elif option in bool_opts and is_string:
+                    setattr(command_obj, option, strtobool(value))
+                elif hasattr(command_obj, option):
+                    setattr(command_obj, option, value)
+                else:
+                    raise PackagingOptionError(
+                        "error in %s: command %r has no such option %r" %
+                        (source, command_name, option))
+            except ValueError as msg:
+                raise PackagingOptionError(msg)
+
+    def get_reinitialized_command(self, command, reinit_subcommands=False):
+        """Reinitializes a command to the state it was in when first
+        returned by 'get_command_obj()': ie., initialized but not yet
+        finalized.  This provides the opportunity to sneak option
+        values in programmatically, overriding or supplementing
+        user-supplied values from the config files and command line.
+        You'll have to re-finalize the command object (by calling
+        'finalize_options()' or 'ensure_finalized()') before using it for
+        real.
+
+        'command' should be a command name (string) or command object.  If
+        'reinit_subcommands' is true, also reinitializes the command's
+        sub-commands, as declared by the 'sub_commands' class attribute (if
+        it has one).  See the "install_dist" command for an example.  Only
+        reinitializes the sub-commands that actually matter, ie. those
+        whose test predicates return true.
+
+        Returns the reinitialized command object.
+        """
+        from packaging.command.cmd import Command
+        if not isinstance(command, Command):
+            command_name = command
+            command = self.get_command_obj(command_name)
+        else:
+            command_name = command.get_command_name()
+
+        if not command.finalized:
+            return command
+        command.initialize_options()
+        self.have_run[command_name] = 0
+        command.finalized = False
+        self._set_command_options(command)
+
+        if reinit_subcommands:
+            for sub in command.get_sub_commands():
+                self.get_reinitialized_command(sub, reinit_subcommands)
+
+        return command
+
+    # -- Methods that operate on the Distribution ----------------------
+
+    def run_commands(self):
+        """Run each command that was seen on the setup script command line.
+        Uses the list of commands found and cache of command objects
+        created by 'get_command_obj()'.
+        """
+        for cmd in self.commands:
+            self.run_command(cmd)
+
+    # -- Methods that operate on its Commands --------------------------
+
+    def run_command(self, command, options=None):
+        """Do whatever it takes to run a command (including nothing at all,
+        if the command has already been run).  Specifically: if we have
+        already created and run the command named by 'command', return
+        silently without doing anything.  If the command named by 'command'
+        doesn't even have a command object yet, create one.  Then invoke
+        'run()' on that command object (or an existing one).
+        """
+        # Already been here, done that? then return silently.
+        if self.have_run.get(command):
+            return
+
+        if options is not None:
+            self.command_options[command] = options
+
+        cmd_obj = self.get_command_obj(command)
+        cmd_obj.ensure_finalized()
+        self.run_command_hooks(cmd_obj, 'pre_hook')
+        logger.info("running %s", command)
+        cmd_obj.run()
+        self.run_command_hooks(cmd_obj, 'post_hook')
+        self.have_run[command] = 1
+
+    def run_command_hooks(self, cmd_obj, hook_kind):
+        """Run hooks registered for that command and phase.
+
+        *cmd_obj* is a finalized command object; *hook_kind* is either
+        'pre_hook' or 'post_hook'.
+        """
+        if hook_kind not in ('pre_hook', 'post_hook'):
+            raise ValueError('invalid hook kind: %r' % hook_kind)
+
+        hooks = getattr(cmd_obj, hook_kind, None)
+
+        if hooks is None:
+            return
+
+        for hook in hooks.values():
+            if isinstance(hook, str):
+                try:
+                    hook_obj = resolve_name(hook)
+                except ImportError as e:
+                    raise PackagingModuleError(e)
+            else:
+                hook_obj = hook
+
+            if not hasattr(hook_obj, '__call__'):
+                raise PackagingOptionError('hook %r is not callable' % hook)
+
+            logger.info('running %s %s for command %s',
+                        hook_kind, hook, cmd_obj.get_command_name())
+            hook_obj(cmd_obj)
+
+    # -- Distribution query methods ------------------------------------
+    def has_pure_modules(self):
+        return len(self.packages or self.py_modules or []) > 0
+
+    def has_ext_modules(self):
+        return self.ext_modules and len(self.ext_modules) > 0
+
+    def has_c_libraries(self):
+        return self.libraries and len(self.libraries) > 0
+
+    def has_modules(self):
+        return self.has_pure_modules() or self.has_ext_modules()
+
+    def has_headers(self):
+        return self.headers and len(self.headers) > 0
+
+    def has_scripts(self):
+        return self.scripts and len(self.scripts) > 0
+
+    def has_data_files(self):
+        return self.data_files and len(self.data_files) > 0
+
+    def is_pure(self):
+        return (self.has_pure_modules() and
+                not self.has_ext_modules() and
+                not self.has_c_libraries())
diff --git a/Lib/packaging/errors.py b/Lib/packaging/errors.py
new file mode 100644
index 0000000..8924a2d
--- /dev/null
+++ b/Lib/packaging/errors.py
@@ -0,0 +1,142 @@
+"""Exceptions used throughout the package.
+
+Submodules of packaging may raise exceptions defined in this module as
+well as standard exceptions; in particular, SystemExit is usually raised
+for errors that are obviously the end-user's fault (e.g. bad
+command-line arguments).
+"""
+
+
+class PackagingError(Exception):
+    """The root of all Packaging evil."""
+
+
+class PackagingModuleError(PackagingError):
+    """Unable to load an expected module, or to find an expected class
+    within some module (in particular, command modules and classes)."""
+
+
+class PackagingClassError(PackagingError):
+    """Some command class (or possibly distribution class, if anyone
+    feels a need to subclass Distribution) is found not to be holding
+    up its end of the bargain, ie. implementing some part of the
+    "command "interface."""
+
+
+class PackagingGetoptError(PackagingError):
+    """The option table provided to 'fancy_getopt()' is bogus."""
+
+
+class PackagingArgError(PackagingError):
+    """Raised by fancy_getopt in response to getopt.error -- ie. an
+    error in the command line usage."""
+
+
+class PackagingFileError(PackagingError):
+    """Any problems in the filesystem: expected file not found, etc.
+    Typically this is for problems that we detect before IOError or
+    OSError could be raised."""
+
+
+class PackagingOptionError(PackagingError):
+    """Syntactic/semantic errors in command options, such as use of
+    mutually conflicting options, or inconsistent options,
+    badly-spelled values, etc.  No distinction is made between option
+    values originating in the setup script, the command line, config
+    files, or what-have-you -- but if we *know* something originated in
+    the setup script, we'll raise PackagingSetupError instead."""
+
+
+class PackagingSetupError(PackagingError):
+    """For errors that can be definitely blamed on the setup script,
+    such as invalid keyword arguments to 'setup()'."""
+
+
+class PackagingPlatformError(PackagingError):
+    """We don't know how to do something on the current platform (but
+    we do know how to do it on some platform) -- eg. trying to compile
+    C files on a platform not supported by a CCompiler subclass."""
+
+
+class PackagingExecError(PackagingError):
+    """Any problems executing an external program (such as the C
+    compiler, when compiling C files)."""
+
+
+class PackagingInternalError(PackagingError):
+    """Internal inconsistencies or impossibilities (obviously, this
+    should never be seen if the code is working!)."""
+
+
+class PackagingTemplateError(PackagingError):
+    """Syntax error in a file list template."""
+
+
+class PackagingByteCompileError(PackagingError):
+    """Byte compile error."""
+
+
+class PackagingPyPIError(PackagingError):
+    """Any problem occuring during using the indexes."""
+
+
+# Exception classes used by the CCompiler implementation classes
+class CCompilerError(Exception):
+    """Some compile/link operation failed."""
+
+
+class PreprocessError(CCompilerError):
+    """Failure to preprocess one or more C/C++ files."""
+
+
+class CompileError(CCompilerError):
+    """Failure to compile one or more C/C++ source files."""
+
+
+class LibError(CCompilerError):
+    """Failure to create a static library from one or more C/C++ object
+    files."""
+
+
+class LinkError(CCompilerError):
+    """Failure to link one or more C/C++ object files into an executable
+    or shared library file."""
+
+
+class UnknownFileError(CCompilerError):
+    """Attempt to process an unknown file type."""
+
+
+class MetadataMissingError(PackagingError):
+    """A required metadata is missing"""
+
+
+class MetadataConflictError(PackagingError):
+    """Attempt to read or write metadata fields that are conflictual."""
+
+
+class MetadataUnrecognizedVersionError(PackagingError):
+    """Unknown metadata version number."""
+
+
+class IrrationalVersionError(Exception):
+    """This is an irrational version."""
+    pass
+
+
+class HugeMajorVersionNumError(IrrationalVersionError):
+    """An irrational version because the major version number is huge
+    (often because a year or date was used).
+
+    See `error_on_huge_major_num` option in `NormalizedVersion` for details.
+    This guard can be disabled by setting that option False.
+    """
+    pass
+
+
+class InstallationException(Exception):
+    """Base exception for installation scripts"""
+
+
+class InstallationConflict(InstallationException):
+    """Raised when a conflict is detected"""
diff --git a/Lib/packaging/fancy_getopt.py b/Lib/packaging/fancy_getopt.py
new file mode 100644
index 0000000..0490864
--- /dev/null
+++ b/Lib/packaging/fancy_getopt.py
@@ -0,0 +1,451 @@
+"""Command line parsing machinery.
+
+The FancyGetopt class is a Wrapper around the getopt module that
+provides the following additional features:
+  * short and long options are tied together
+  * options have help strings, so fancy_getopt could potentially
+    create a complete usage summary
+  * options set attributes of a passed-in object.
+
+It is used under the hood by the command classes.  Do not use directly.
+"""
+
+import getopt
+import re
+import sys
+import string
+import textwrap
+
+from packaging.errors import PackagingGetoptError, PackagingArgError
+
+# Much like command_re in packaging.core, this is close to but not quite
+# the same as a Python NAME -- except, in the spirit of most GNU
+# utilities, we use '-' in place of '_'.  (The spirit of LISP lives on!)
+# The similarities to NAME are again not a coincidence...
+longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
+longopt_re = re.compile(r'^%s$' % longopt_pat)
+
+# For recognizing "negative alias" options, eg. "quiet=!verbose"
+neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
+
+
+class FancyGetopt:
+    """Wrapper around the standard 'getopt()' module that provides some
+    handy extra functionality:
+      * short and long options are tied together
+      * options have help strings, and help text can be assembled
+        from them
+      * options set attributes of a passed-in object
+      * boolean options can have "negative aliases" -- eg. if
+        --quiet is the "negative alias" of --verbose, then "--quiet"
+        on the command line sets 'verbose' to false
+    """
+
+    def __init__(self, option_table=None):
+
+        # The option table is (currently) a list of tuples.  The
+        # tuples may have 3 or four values:
+        #   (long_option, short_option, help_string [, repeatable])
+        # if an option takes an argument, its long_option should have '='
+        # appended; short_option should just be a single character, no ':'
+        # in any case.  If a long_option doesn't have a corresponding
+        # short_option, short_option should be None.  All option tuples
+        # must have long options.
+        self.option_table = option_table
+
+        # 'option_index' maps long option names to entries in the option
+        # table (ie. those 3-tuples).
+        self.option_index = {}
+        if self.option_table:
+            self._build_index()
+
+        # 'alias' records (duh) alias options; {'foo': 'bar'} means
+        # --foo is an alias for --bar
+        self.alias = {}
+
+        # 'negative_alias' keeps track of options that are the boolean
+        # opposite of some other option
+        self.negative_alias = {}
+
+        # These keep track of the information in the option table.  We
+        # don't actually populate these structures until we're ready to
+        # parse the command line, since the 'option_table' passed in here
+        # isn't necessarily the final word.
+        self.short_opts = []
+        self.long_opts = []
+        self.short2long = {}
+        self.attr_name = {}
+        self.takes_arg = {}
+
+        # And 'option_order' is filled up in 'getopt()'; it records the
+        # original order of options (and their values) on the command line,
+        # but expands short options, converts aliases, etc.
+        self.option_order = []
+
+    def _build_index(self):
+        self.option_index.clear()
+        for option in self.option_table:
+            self.option_index[option[0]] = option
+
+    def set_option_table(self, option_table):
+        self.option_table = option_table
+        self._build_index()
+
+    def add_option(self, long_option, short_option=None, help_string=None):
+        if long_option in self.option_index:
+            raise PackagingGetoptError(
+                  "option conflict: already an option '%s'" % long_option)
+        else:
+            option = (long_option, short_option, help_string)
+            self.option_table.append(option)
+            self.option_index[long_option] = option
+
+    def has_option(self, long_option):
+        """Return true if the option table for this parser has an
+        option with long name 'long_option'."""
+        return long_option in self.option_index
+
+    def _check_alias_dict(self, aliases, what):
+        assert isinstance(aliases, dict)
+        for alias, opt in aliases.items():
+            if alias not in self.option_index:
+                raise PackagingGetoptError(
+                      ("invalid %s '%s': "
+                       "option '%s' not defined") % (what, alias, alias))
+            if opt not in self.option_index:
+                raise PackagingGetoptError(
+                      ("invalid %s '%s': "
+                       "aliased option '%s' not defined") % (what, alias, opt))
+
+    def set_aliases(self, alias):
+        """Set the aliases for this option parser."""
+        self._check_alias_dict(alias, "alias")
+        self.alias = alias
+
+    def set_negative_aliases(self, negative_alias):
+        """Set the negative aliases for this option parser.
+        'negative_alias' should be a dictionary mapping option names to
+        option names, both the key and value must already be defined
+        in the option table."""
+        self._check_alias_dict(negative_alias, "negative alias")
+        self.negative_alias = negative_alias
+
+    def _grok_option_table(self):
+        """Populate the various data structures that keep tabs on the
+        option table.  Called by 'getopt()' before it can do anything
+        worthwhile.
+        """
+        self.long_opts = []
+        self.short_opts = []
+        self.short2long.clear()
+        self.repeat = {}
+
+        for option in self.option_table:
+            if len(option) == 3:
+                integer, short, help = option
+                repeat = 0
+            elif len(option) == 4:
+                integer, short, help, repeat = option
+            else:
+                # the option table is part of the code, so simply
+                # assert that it is correct
+                raise ValueError("invalid option tuple: %r" % option)
+
+            # Type- and value-check the option names
+            if not isinstance(integer, str) or len(integer) < 2:
+                raise PackagingGetoptError(
+                      ("invalid long option '%s': "
+                       "must be a string of length >= 2") % integer)
+
+            if (not ((short is None) or
+                     (isinstance(short, str) and len(short) == 1))):
+                raise PackagingGetoptError(
+                      ("invalid short option '%s': "
+                       "must be a single character or None") % short)
+
+            self.repeat[integer] = repeat
+            self.long_opts.append(integer)
+
+            if integer[-1] == '=':             # option takes an argument?
+                if short:
+                    short = short + ':'
+                integer = integer[0:-1]
+                self.takes_arg[integer] = 1
+            else:
+
+                # Is option is a "negative alias" for some other option (eg.
+                # "quiet" == "!verbose")?
+                alias_to = self.negative_alias.get(integer)
+                if alias_to is not None:
+                    if self.takes_arg[alias_to]:
+                        raise PackagingGetoptError(
+                              ("invalid negative alias '%s': "
+                               "aliased option '%s' takes a value") % \
+                               (integer, alias_to))
+
+                    self.long_opts[-1] = integer   # XXX redundant?!
+                    self.takes_arg[integer] = 0
+
+                else:
+                    self.takes_arg[integer] = 0
+
+            # If this is an alias option, make sure its "takes arg" flag is
+            # the same as the option it's aliased to.
+            alias_to = self.alias.get(integer)
+            if alias_to is not None:
+                if self.takes_arg[integer] != self.takes_arg[alias_to]:
+                    raise PackagingGetoptError(
+                          ("invalid alias '%s': inconsistent with "
+                           "aliased option '%s' (one of them takes a value, "
+                           "the other doesn't") % (integer, alias_to))
+
+            # Now enforce some bondage on the long option name, so we can
+            # later translate it to an attribute name on some object.  Have
+            # to do this a bit late to make sure we've removed any trailing
+            # '='.
+            if not longopt_re.match(integer):
+                raise PackagingGetoptError(
+                      ("invalid long option name '%s' " +
+                       "(must be letters, numbers, hyphens only") % integer)
+
+            self.attr_name[integer] = integer.replace('-', '_')
+            if short:
+                self.short_opts.append(short)
+                self.short2long[short[0]] = integer
+
+    def getopt(self, args=None, object=None):
+        """Parse command-line options in args. Store as attributes on object.
+
+        If 'args' is None or not supplied, uses 'sys.argv[1:]'.  If
+        'object' is None or not supplied, creates a new OptionDummy
+        object, stores option values there, and returns a tuple (args,
+        object).  If 'object' is supplied, it is modified in place and
+        'getopt()' just returns 'args'; in both cases, the returned
+        'args' is a modified copy of the passed-in 'args' list, which
+        is left untouched.
+        """
+        if args is None:
+            args = sys.argv[1:]
+        if object is None:
+            object = OptionDummy()
+            created_object = 1
+        else:
+            created_object = 0
+
+        self._grok_option_table()
+
+        short_opts = ' '.join(self.short_opts)
+
+        try:
+            opts, args = getopt.getopt(args, short_opts, self.long_opts)
+        except getopt.error as msg:
+            raise PackagingArgError(msg)
+
+        for opt, val in opts:
+            if len(opt) == 2 and opt[0] == '-':   # it's a short option
+                opt = self.short2long[opt[1]]
+            else:
+                assert len(opt) > 2 and opt[:2] == '--'
+                opt = opt[2:]
+
+            alias = self.alias.get(opt)
+            if alias:
+                opt = alias
+
+            if not self.takes_arg[opt]:     # boolean option?
+                assert val == '', "boolean option can't have value"
+                alias = self.negative_alias.get(opt)
+                if alias:
+                    opt = alias
+                    val = 0
+                else:
+                    val = 1
+
+            attr = self.attr_name[opt]
+            # The only repeating option at the moment is 'verbose'.
+            # It has a negative option -q quiet, which should set verbose = 0.
+            if val and self.repeat.get(attr) is not None:
+                val = getattr(object, attr, 0) + 1
+            setattr(object, attr, val)
+            self.option_order.append((opt, val))
+
+        # for opts
+        if created_object:
+            return args, object
+        else:
+            return args
+
+    def get_option_order(self):
+        """Returns the list of (option, value) tuples processed by the
+        previous run of 'getopt()'.  Raises RuntimeError if
+        'getopt()' hasn't been called yet.
+        """
+        if self.option_order is None:
+            raise RuntimeError("'getopt()' hasn't been called yet")
+        else:
+            return self.option_order
+
+        return self.option_order
+
+    def generate_help(self, header=None):
+        """Generate help text (a list of strings, one per suggested line of
+        output) from the option table for this FancyGetopt object.
+        """
+        # Blithely assume the option table is good: probably wouldn't call
+        # 'generate_help()' unless you've already called 'getopt()'.
+
+        # First pass: determine maximum length of long option names
+        max_opt = 0
+        for option in self.option_table:
+            integer = option[0]
+            short = option[1]
+            l = len(integer)
+            if integer[-1] == '=':
+                l = l - 1
+            if short is not None:
+                l = l + 5                   # " (-x)" where short == 'x'
+            if l > max_opt:
+                max_opt = l
+
+        opt_width = max_opt + 2 + 2 + 2     # room for indent + dashes + gutter
+
+        # Typical help block looks like this:
+        #   --foo       controls foonabulation
+        # Help block for longest option looks like this:
+        #   --flimflam  set the flim-flam level
+        # and with wrapped text:
+        #   --flimflam  set the flim-flam level (must be between
+        #               0 and 100, except on Tuesdays)
+        # Options with short names will have the short name shown (but
+        # it doesn't contribute to max_opt):
+        #   --foo (-f)  controls foonabulation
+        # If adding the short option would make the left column too wide,
+        # we push the explanation off to the next line
+        #   --flimflam (-l)
+        #               set the flim-flam level
+        # Important parameters:
+        #   - 2 spaces before option block start lines
+        #   - 2 dashes for each long option name
+        #   - min. 2 spaces between option and explanation (gutter)
+        #   - 5 characters (incl. space) for short option name
+
+        # Now generate lines of help text.  (If 80 columns were good enough
+        # for Jesus, then 78 columns are good enough for me!)
+        line_width = 78
+        text_width = line_width - opt_width
+        big_indent = ' ' * opt_width
+        if header:
+            lines = [header]
+        else:
+            lines = ['Option summary:']
+
+        for option in self.option_table:
+            integer, short, help = option[:3]
+            text = textwrap.wrap(help, text_width)
+
+            # Case 1: no short option at all (makes life easy)
+            if short is None:
+                if text:
+                    lines.append("  --%-*s  %s" % (max_opt, integer, text[0]))
+                else:
+                    lines.append("  --%-*s  " % (max_opt, integer))
+
+            # Case 2: we have a short option, so we have to include it
+            # just after the long option
+            else:
+                opt_names = "%s (-%s)" % (integer, short)
+                if text:
+                    lines.append("  --%-*s  %s" %
+                                 (max_opt, opt_names, text[0]))
+                else:
+                    lines.append("  --%-*s" % opt_names)
+
+            for l in text[1:]:
+                lines.append(big_indent + l)
+
+        return lines
+
+    def print_help(self, header=None, file=None):
+        if file is None:
+            file = sys.stdout
+        for line in self.generate_help(header):
+            file.write(line + "\n")
+
+
+def fancy_getopt(options, negative_opt, object, args):
+    parser = FancyGetopt(options)
+    parser.set_negative_aliases(negative_opt)
+    return parser.getopt(args, object)
+
+
+WS_TRANS = str.maketrans(string.whitespace, ' ' * len(string.whitespace))
+
+
+def wrap_text(text, width):
+    """Split *text* into lines of no more than *width* characters each.
+
+    *text* is a str and *width* an int.  Returns a list of str.
+    """
+
+    if text is None:
+        return []
+    if len(text) <= width:
+        return [text]
+
+    text = text.expandtabs()
+    text = text.translate(WS_TRANS)
+
+    chunks = re.split(r'( +|-+)', text)
+    chunks = [_f for _f in chunks if _f]      # ' - ' results in empty strings
+    lines = []
+
+    while chunks:
+
+        cur_line = []                   # list of chunks (to-be-joined)
+        cur_len = 0                     # length of current line
+
+        while chunks:
+            l = len(chunks[0])
+            if cur_len + l <= width:    # can squeeze (at least) this chunk in
+                cur_line.append(chunks[0])
+                del chunks[0]
+                cur_len = cur_len + l
+            else:                       # this line is full
+                # drop last chunk if all space
+                if cur_line and cur_line[-1][0] == ' ':
+                    del cur_line[-1]
+                break
+
+        if chunks:                      # any chunks left to process?
+
+            # if the current line is still empty, then we had a single
+            # chunk that's too big too fit on a line -- so we break
+            # down and break it up at the line width
+            if cur_len == 0:
+                cur_line.append(chunks[0][0:width])
+                chunks[0] = chunks[0][width:]
+
+            # all-whitespace chunks at the end of a line can be discarded
+            # (and we know from the re.split above that if a chunk has
+            # *any* whitespace, it is *all* whitespace)
+            if chunks[0][0] == ' ':
+                del chunks[0]
+
+        # and store this line in the list-of-all-lines -- as a single
+        # string, of course!
+        lines.append(''.join(cur_line))
+
+    # while chunks
+
+    return lines
+
+
+class OptionDummy:
+    """Dummy class just used as a place to hold command-line option
+    values as instance attributes."""
+
+    def __init__(self, options=[]):
+        """Create a new OptionDummy instance.  The attributes listed in
+        'options' will be initialized to None."""
+        for opt in options:
+            setattr(self, opt, None)
diff --git a/Lib/packaging/install.py b/Lib/packaging/install.py
new file mode 100644
index 0000000..3904727
--- /dev/null
+++ b/Lib/packaging/install.py
@@ -0,0 +1,483 @@
+"""Building blocks for installers.
+
+When used as a script, this module installs a release thanks to info
+obtained from an index (e.g. PyPI), with dependencies.
+
+This is a higher-level module built on packaging.database and
+packaging.pypi.
+"""
+
+import os
+import sys
+import stat
+import errno
+import shutil
+import logging
+import tempfile
+from sysconfig import get_config_var
+
+from packaging import logger
+from packaging.dist import Distribution
+from packaging.util import (_is_archive_file, ask, get_install_method,
+                            egginfo_to_distinfo)
+from packaging.pypi import wrapper
+from packaging.version import get_version_predicate
+from packaging.database import get_distributions, get_distribution
+from packaging.depgraph import generate_graph
+
+from packaging.errors import (PackagingError, InstallationException,
+                              InstallationConflict, CCompilerError)
+from packaging.pypi.errors import ProjectNotFound, ReleaseNotFound
+
+__all__ = ['install_dists', 'install_from_infos', 'get_infos', 'remove',
+           'install', 'install_local_project']
+
+
+def _move_files(files, destination):
+    """Move the list of files in the destination folder, keeping the same
+    structure.
+
+    Return a list of tuple (old, new) emplacement of files
+
+    :param files: a list of files to move.
+    :param destination: the destination directory to put on the files.
+                        if not defined, create a new one, using mkdtemp
+    """
+    if not destination:
+        destination = tempfile.mkdtemp()
+
+    for old in files:
+        # not using os.path.join() because basename() might not be
+        # unique in destination
+        new = "%s%s" % (destination, old)
+
+        # try to make the paths.
+        try:
+            os.makedirs(os.path.dirname(new))
+        except OSError as e:
+            if e.errno == errno.EEXIST:
+                pass
+            else:
+                raise e
+        os.rename(old, new)
+        yield old, new
+
+
+def _run_distutils_install(path):
+    # backward compat: using setuptools or plain-distutils
+    cmd = '%s setup.py install --record=%s'
+    record_file = os.path.join(path, 'RECORD')
+    os.system(cmd % (sys.executable, record_file))
+    if not os.path.exists(record_file):
+        raise ValueError('failed to install')
+    else:
+        egginfo_to_distinfo(record_file, remove_egginfo=True)
+
+
+def _run_setuptools_install(path):
+    cmd = '%s setup.py install --record=%s --single-version-externally-managed'
+    record_file = os.path.join(path, 'RECORD')
+    os.system(cmd % (sys.executable, record_file))
+    if not os.path.exists(record_file):
+        raise ValueError('failed to install')
+    else:
+        egginfo_to_distinfo(record_file, remove_egginfo=True)
+
+
+def _run_packaging_install(path):
+    # XXX check for a valid setup.cfg?
+    dist = Distribution()
+    dist.parse_config_files()
+    try:
+        dist.run_command('install_dist')
+    except (IOError, os.error, PackagingError, CCompilerError) as msg:
+        raise SystemExit("error: " + str(msg))
+
+
+def _install_dist(dist, path):
+    """Install a distribution into a path.
+
+    This:
+
+    * unpack the distribution
+    * copy the files in "path"
+    * determine if the distribution is packaging or distutils1.
+    """
+    where = dist.unpack()
+
+    if where is None:
+        raise ValueError('Cannot locate the unpacked archive')
+
+    return _run_install_from_archive(where)
+
+
+def install_local_project(path):
+    """Install a distribution from a source directory.
+
+    If the source directory contains a setup.py install using distutils1.
+    If a setup.cfg is found, install using the install_dist command.
+
+    """
+    path = os.path.abspath(path)
+    if os.path.isdir(path):
+        logger.info('installing from source directory: %s', path)
+        _run_install_from_dir(path)
+    elif _is_archive_file(path):
+        logger.info('installing from archive: %s', path)
+        _unpacked_dir = tempfile.mkdtemp()
+        shutil.unpack_archive(path, _unpacked_dir)
+        _run_install_from_archive(_unpacked_dir)
+    else:
+        logger.warning('no projects to install')
+
+
+def _run_install_from_archive(source_dir):
+    # XXX need a better way
+    for item in os.listdir(source_dir):
+        fullpath = os.path.join(source_dir, item)
+        if os.path.isdir(fullpath):
+            source_dir = fullpath
+            break
+    return _run_install_from_dir(source_dir)
+
+
+install_methods = {
+    'packaging': _run_packaging_install,
+    'setuptools': _run_setuptools_install,
+    'distutils': _run_distutils_install}
+
+
+def _run_install_from_dir(source_dir):
+    old_dir = os.getcwd()
+    os.chdir(source_dir)
+    install_method = get_install_method(source_dir)
+    func = install_methods[install_method]
+    try:
+        func = install_methods[install_method]
+        return func(source_dir)
+    finally:
+        os.chdir(old_dir)
+
+
+def install_dists(dists, path, paths=sys.path):
+    """Install all distributions provided in dists, with the given prefix.
+
+    If an error occurs while installing one of the distributions, uninstall all
+    the installed distribution (in the context if this function).
+
+    Return a list of installed dists.
+
+    :param dists: distributions to install
+    :param path: base path to install distribution in
+    :param paths: list of paths (defaults to sys.path) to look for info
+    """
+    if not path:
+        path = tempfile.mkdtemp()
+
+    installed_dists = []
+    for dist in dists:
+        logger.info('installing %s %s', dist.name, dist.version)
+        try:
+            _install_dist(dist, path)
+            installed_dists.append(dist)
+        except Exception as e:
+            logger.info('failed: %s', e)
+
+            # reverting
+            for installed_dist in installed_dists:
+                logger.info('reverting %s', installed_dist)
+                _remove_dist(installed_dist, paths)
+            raise e
+    return installed_dists
+
+
+def install_from_infos(install_path=None, install=[], remove=[], conflicts=[],
+                       paths=sys.path):
+    """Install and remove the given distributions.
+
+    The function signature is made to be compatible with the one of get_infos.
+    The aim of this script is to povide a way to install/remove what's asked,
+    and to rollback if needed.
+
+    So, it's not possible to be in an inconsistant state, it could be either
+    installed, either uninstalled, not half-installed.
+
+    The process follow those steps:
+
+        1. Move all distributions that will be removed in a temporary location
+        2. Install all the distributions that will be installed in a temp. loc.
+        3. If the installation fails, rollback (eg. move back) those
+           distributions, or remove what have been installed.
+        4. Else, move the distributions to the right locations, and remove for
+           real the distributions thats need to be removed.
+
+    :param install_path: the installation path where we want to install the
+                         distributions.
+    :param install: list of distributions that will be installed; install_path
+                    must be provided if this list is not empty.
+    :param remove: list of distributions that will be removed.
+    :param conflicts: list of conflicting distributions, eg. that will be in
+                      conflict once the install and remove distribution will be
+                      processed.
+    :param paths: list of paths (defaults to sys.path) to look for info
+    """
+    # first of all, if we have conflicts, stop here.
+    if conflicts:
+        raise InstallationConflict(conflicts)
+
+    if install and not install_path:
+        raise ValueError("Distributions are to be installed but `install_path`"
+                         " is not provided.")
+
+    # before removing the files, we will start by moving them away
+    # then, if any error occurs, we could replace them in the good place.
+    temp_files = {}  # contains lists of {dist: (old, new)} paths
+    temp_dir = None
+    if remove:
+        temp_dir = tempfile.mkdtemp()
+        for dist in remove:
+            files = dist.list_installed_files()
+            temp_files[dist] = _move_files(files, temp_dir)
+    try:
+        if install:
+            install_dists(install, install_path, paths)
+    except:
+        # if an error occurs, put back the files in the right place.
+        for files in temp_files.values():
+            for old, new in files:
+                shutil.move(new, old)
+        if temp_dir:
+            shutil.rmtree(temp_dir)
+        # now re-raising
+        raise
+
+    # we can remove them for good
+    for files in temp_files.values():
+        for old, new in files:
+            os.remove(new)
+    if temp_dir:
+        shutil.rmtree(temp_dir)
+
+
+def _get_setuptools_deps(release):
+    # NotImplementedError
+    pass
+
+
+def get_infos(requirements, index=None, installed=None, prefer_final=True):
+    """Return the informations on what's going to be installed and upgraded.
+
+    :param requirements: is a *string* containing the requirements for this
+                         project (for instance "FooBar 1.1" or "BarBaz (<1.2)")
+    :param index: If an index is specified, use this one, otherwise, use
+                  :class index.ClientWrapper: to get project metadatas.
+    :param installed: a list of already installed distributions.
+    :param prefer_final: when picking up the releases, prefer a "final" one
+                         over a beta/alpha/etc one.
+
+    The results are returned in a dict, containing all the operations
+    needed to install the given requirements::
+
+        >>> get_install_info("FooBar (<=1.2)")
+        {'install': [<FooBar 1.1>], 'remove': [], 'conflict': []}
+
+    Conflict contains all the conflicting distributions, if there is a
+    conflict.
+    """
+    # this function does several things:
+    # 1. get a release specified by the requirements
+    # 2. gather its metadata, using setuptools compatibility if needed
+    # 3. compare this tree with what is currently installed on the system,
+    #    return the requirements of what is missing
+    # 4. do that recursively and merge back the results
+    # 5. return a dict containing information about what is needed to install
+    #    or remove
+
+    if not installed:
+        logger.info('reading installed distributions')
+        installed = list(get_distributions(use_egg_info=True))
+
+    infos = {'install': [], 'remove': [], 'conflict': []}
+    # Is a compatible version of the project already installed ?
+    predicate = get_version_predicate(requirements)
+    found = False
+
+    # check that the project isn't already installed
+    for installed_project in installed:
+        # is it a compatible project ?
+        if predicate.name.lower() != installed_project.name.lower():
+            continue
+        found = True
+        logger.info('found %s %s', installed_project.name,
+                    installed_project.metadata['version'])
+
+        # if we already have something installed, check it matches the
+        # requirements
+        if predicate.match(installed_project.metadata['version']):
+            return infos
+        break
+
+    if not found:
+        logger.info('project not installed')
+
+    if not index:
+        index = wrapper.ClientWrapper()
+
+    if not installed:
+        installed = get_distributions(use_egg_info=True)
+
+    # Get all the releases that match the requirements
+    try:
+        release = index.get_release(requirements)
+    except (ReleaseNotFound, ProjectNotFound):
+        raise InstallationException('Release not found: "%s"' % requirements)
+
+    if release is None:
+        logger.info('could not find a matching project')
+        return infos
+
+    metadata = release.fetch_metadata()
+
+    # we need to build setuptools deps if any
+    if 'requires_dist' not in metadata:
+        metadata['requires_dist'] = _get_setuptools_deps(release)
+
+    # build the dependency graph with local and required dependencies
+    dists = list(installed)
+    dists.append(release)
+    depgraph = generate_graph(dists)
+
+    # Get what the missing deps are
+    dists = depgraph.missing[release]
+    if dists:
+        logger.info("missing dependencies found, retrieving metadata")
+        # we have missing deps
+        for dist in dists:
+            _update_infos(infos, get_infos(dist, index, installed))
+
+    # Fill in the infos
+    existing = [d for d in installed if d.name == release.name]
+    if existing:
+        infos['remove'].append(existing[0])
+        infos['conflict'].extend(depgraph.reverse_list[existing[0]])
+    infos['install'].append(release)
+    return infos
+
+
+def _update_infos(infos, new_infos):
+    """extends the lists contained in the `info` dict with those contained
+    in the `new_info` one
+    """
+    for key, value in infos.items():
+        if key in new_infos:
+            infos[key].extend(new_infos[key])
+
+
+def _remove_dist(dist, paths=sys.path):
+    remove(dist.name, paths)
+
+
+def remove(project_name, paths=sys.path, auto_confirm=True):
+    """Removes a single project from the installation"""
+    dist = get_distribution(project_name, use_egg_info=True, paths=paths)
+    if dist is None:
+        raise PackagingError('Distribution "%s" not found' % project_name)
+    files = dist.list_installed_files(local=True)
+    rmdirs = []
+    rmfiles = []
+    tmp = tempfile.mkdtemp(prefix=project_name + '-uninstall')
+    try:
+        for file_, md5, size in files:
+            if os.path.isfile(file_):
+                dirname, filename = os.path.split(file_)
+                tmpfile = os.path.join(tmp, filename)
+                try:
+                    os.rename(file_, tmpfile)
+                finally:
+                    if not os.path.isfile(file_):
+                        os.rename(tmpfile, file_)
+                if file_ not in rmfiles:
+                    rmfiles.append(file_)
+                if dirname not in rmdirs:
+                    rmdirs.append(dirname)
+    finally:
+        shutil.rmtree(tmp)
+
+    logger.info('removing %r: ', project_name)
+
+    for file_ in rmfiles:
+        logger.info('  %s', file_)
+
+    # Taken from the pip project
+    if auto_confirm:
+        response = 'y'
+    else:
+        response = ask('Proceed (y/n)? ', ('y', 'n'))
+
+    if response == 'y':
+        file_count = 0
+        for file_ in rmfiles:
+            os.remove(file_)
+            file_count += 1
+
+        dir_count = 0
+        for dirname in rmdirs:
+            if not os.path.exists(dirname):
+                # could
+                continue
+
+            files_count = 0
+            for root, dir, files in os.walk(dirname):
+                files_count += len(files)
+
+            if files_count > 0:
+                # XXX Warning
+                continue
+
+            # empty dirs with only empty dirs
+            if os.stat(dirname).st_mode & stat.S_IWUSR:
+                # XXX Add a callable in shutil.rmtree to count
+                # the number of deleted elements
+                shutil.rmtree(dirname)
+                dir_count += 1
+
+        # removing the top path
+        # XXX count it ?
+        if os.path.exists(dist.path):
+            shutil.rmtree(dist.path)
+
+        logger.info('success: removed %d files and %d dirs',
+                    file_count, dir_count)
+
+
+def install(project):
+    logger.info('getting information about %r', project)
+    try:
+        info = get_infos(project)
+    except InstallationException:
+        logger.info('cound not find %r', project)
+        return
+
+    if info['install'] == []:
+        logger.info('nothing to install')
+        return
+
+    install_path = get_config_var('base')
+    try:
+        install_from_infos(install_path,
+                           info['install'], info['remove'], info['conflict'])
+
+    except InstallationConflict as e:
+        if logger.isEnabledFor(logging.INFO):
+            projects = ['%s %s' % (p.name, p.version) for p in e.args[0]]
+            logger.info('%r conflicts with %s', project, ','.join(projects))
+
+
+def _main(**attrs):
+    if 'script_args' not in attrs:
+        import sys
+        attrs['requirements'] = sys.argv[1]
+    get_infos(**attrs)
+
+if __name__ == '__main__':
+    _main()
diff --git a/Lib/packaging/manifest.py b/Lib/packaging/manifest.py
new file mode 100644
index 0000000..a379853
--- /dev/null
+++ b/Lib/packaging/manifest.py
@@ -0,0 +1,372 @@
+"""Class representing the list of files in a distribution.
+
+The Manifest class can be used to:
+
+ - read or write a MANIFEST file
+ - read a template file and find out the file list
+"""
+# XXX todo: document + add tests
+import re
+import os
+import fnmatch
+
+from packaging import logger
+from packaging.util import write_file, convert_path
+from packaging.errors import (PackagingTemplateError,
+                              PackagingInternalError)
+
+__all__ = ['Manifest']
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
+_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
+
+
+class Manifest(object):
+    """A list of files built by on exploring the filesystem and filtered by
+    applying various patterns to what we find there.
+    """
+
+    def __init__(self):
+        self.allfiles = None
+        self.files = []
+
+    #
+    # Public API
+    #
+
+    def findall(self, dir=os.curdir):
+        self.allfiles = _findall(dir)
+
+    def append(self, item):
+        self.files.append(item)
+
+    def extend(self, items):
+        self.files.extend(items)
+
+    def sort(self):
+        # Not a strict lexical sort!
+        self.files = [os.path.join(*path_tuple) for path_tuple in
+                      sorted(os.path.split(path) for path in self.files)]
+
+    def clear(self):
+        """Clear all collected files."""
+        self.files = []
+        if self.allfiles is not None:
+            self.allfiles = []
+
+    def remove_duplicates(self):
+        # Assumes list has been sorted!
+        for i in range(len(self.files) - 1, 0, -1):
+            if self.files[i] == self.files[i - 1]:
+                del self.files[i]
+
+    def read_template(self, path_or_file):
+        """Read and parse a manifest template file.
+        'path' can be a path or a file-like object.
+
+        Updates the list accordingly.
+        """
+        if isinstance(path_or_file, str):
+            f = open(path_or_file)
+        else:
+            f = path_or_file
+
+        try:
+            content = f.read()
+            # first, let's unwrap collapsed lines
+            content = _COLLAPSE_PATTERN.sub('', content)
+            # next, let's remove commented lines and empty lines
+            content = _COMMENTED_LINE.sub('', content)
+
+            # now we have our cleaned up lines
+            lines = [line.strip() for line in content.split('\n')]
+        finally:
+            f.close()
+
+        for line in lines:
+            if line == '':
+                continue
+            try:
+                self._process_template_line(line)
+            except PackagingTemplateError as msg:
+                logger.warning("%s, %s", path_or_file, msg)
+
+    def write(self, path):
+        """Write the file list in 'self.filelist' (presumably as filled in
+        by 'add_defaults()' and 'read_template()') to the manifest file
+        named by 'self.manifest'.
+        """
+        if os.path.isfile(path):
+            with open(path) as fp:
+                first_line = fp.readline()
+
+            if first_line != '# file GENERATED by packaging, do NOT edit\n':
+                logger.info("not writing to manually maintained "
+                            "manifest file %r", path)
+                return
+
+        self.sort()
+        self.remove_duplicates()
+        content = self.files[:]
+        content.insert(0, '# file GENERATED by packaging, do NOT edit')
+        logger.info("writing manifest file %r", path)
+        write_file(path, content)
+
+    def read(self, path):
+        """Read the manifest file (named by 'self.manifest') and use it to
+        fill in 'self.filelist', the list of files to include in the source
+        distribution.
+        """
+        logger.info("reading manifest file %r", path)
+        with open(path) as manifest:
+            for line in manifest.readlines():
+                self.append(line)
+
+    def exclude_pattern(self, pattern, anchor=True, prefix=None,
+                        is_regex=False):
+        """Remove strings (presumably filenames) from 'files' that match
+        'pattern'.
+
+        Other parameters are the same as for 'include_pattern()', above.
+        The list 'self.files' is modified in place. Return True if files are
+        found.
+        """
+        files_found = False
+        pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
+        for i in range(len(self.files) - 1, -1, -1):
+            if pattern_re.search(self.files[i]):
+                del self.files[i]
+                files_found = True
+
+        return files_found
+
+    #
+    # Private API
+    #
+
+    def _parse_template_line(self, line):
+        words = line.split()
+        if len(words) == 1:
+            # no action given, let's use the default 'include'
+            words.insert(0, 'include')
+
+        action = words[0]
+        patterns = dir = dir_pattern = None
+
+        if action in ('include', 'exclude',
+                      'global-include', 'global-exclude'):
+            if len(words) < 2:
+                raise PackagingTemplateError(
+                      "%r expects <pattern1> <pattern2> ..." % action)
+
+            patterns = [convert_path(word) for word in words[1:]]
+
+        elif action in ('recursive-include', 'recursive-exclude'):
+            if len(words) < 3:
+                raise PackagingTemplateError(
+                      "%r expects <dir> <pattern1> <pattern2> ..." % action)
+
+            dir = convert_path(words[1])
+            patterns = [convert_path(word) for word in words[2:]]
+
+        elif action in ('graft', 'prune'):
+            if len(words) != 2:
+                raise PackagingTemplateError(
+                     "%r expects a single <dir_pattern>" % action)
+
+            dir_pattern = convert_path(words[1])
+
+        else:
+            raise PackagingTemplateError("unknown action %r" % action)
+
+        return action, patterns, dir, dir_pattern
+
+    def _process_template_line(self, line):
+        # Parse the line: split it up, make sure the right number of words
+        # is there, and return the relevant words.  'action' is always
+        # defined: it's the first word of the line.  Which of the other
+        # three are defined depends on the action; it'll be either
+        # patterns, (dir and patterns), or (dir_pattern).
+        action, patterns, dir, dir_pattern = self._parse_template_line(line)
+
+        # OK, now we know that the action is valid and we have the
+        # right number of words on the line for that action -- so we
+        # can proceed with minimal error-checking.
+        if action == 'include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=True):
+                    logger.warning("no files found matching %r", pattern)
+
+        elif action == 'exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=True):
+                    logger.warning("no previously-included files "
+                                   "found matching %r", pattern)
+
+        elif action == 'global-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, anchor=False):
+                    logger.warning("no files found matching %r "
+                                   "anywhere in distribution", pattern)
+
+        elif action == 'global-exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=False):
+                    logger.warning("no previously-included files "
+                                   "matching %r found anywhere in "
+                                   "distribution", pattern)
+
+        elif action == 'recursive-include':
+            for pattern in patterns:
+                if not self._include_pattern(pattern, prefix=dir):
+                    logger.warning("no files found matching %r "
+                                   "under directory %r", pattern, dir)
+
+        elif action == 'recursive-exclude':
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, prefix=dir):
+                    logger.warning("no previously-included files "
+                                   "matching %r found under directory %r",
+                                   pattern, dir)
+
+        elif action == 'graft':
+            if not self._include_pattern(None, prefix=dir_pattern):
+                logger.warning("no directories found matching %r",
+                               dir_pattern)
+
+        elif action == 'prune':
+            if not self.exclude_pattern(None, prefix=dir_pattern):
+                logger.warning("no previously-included directories found "
+                               "matching %r", dir_pattern)
+        else:
+            raise PackagingInternalError(
+                "this cannot happen: invalid action %r" % action)
+
+    def _include_pattern(self, pattern, anchor=True, prefix=None,
+                         is_regex=False):
+        """Select strings (presumably filenames) from 'self.files' that
+        match 'pattern', a Unix-style wildcard (glob) pattern.
+
+        Patterns are not quite the same as implemented by the 'fnmatch'
+        module: '*' and '?'  match non-special characters, where "special"
+        is platform-dependent: slash on Unix; colon, slash, and backslash on
+        DOS/Windows; and colon on Mac OS.
+
+        If 'anchor' is true (the default), then the pattern match is more
+        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
+        'anchor' is false, both of these will match.
+
+        If 'prefix' is supplied, then only filenames starting with 'prefix'
+        (itself a pattern) and ending with 'pattern', with anything in between
+        them, will match.  'anchor' is ignored in this case.
+
+        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+        'pattern' is assumed to be either a string containing a regex or a
+        regex object -- no translation is done, the regex is just compiled
+        and used as-is.
+
+        Selected strings will be added to self.files.
+
+        Return True if files are found.
+        """
+        files_found = False
+        pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
+
+        # delayed loading of allfiles list
+        if self.allfiles is None:
+            self.findall()
+
+        for name in self.allfiles:
+            if pattern_re.search(name):
+                self.files.append(name)
+                files_found = True
+
+        return files_found
+
+
+#
+# Utility functions
+#
+def _findall(dir=os.curdir):
+    """Find all files under 'dir' and return the list of full filenames
+    (relative to 'dir').
+    """
+    from stat import S_ISREG, S_ISDIR, S_ISLNK
+
+    list = []
+    stack = [dir]
+    pop = stack.pop
+    push = stack.append
+
+    while stack:
+        dir = pop()
+        names = os.listdir(dir)
+
+        for name in names:
+            if dir != os.curdir:        # avoid the dreaded "./" syndrome
+                fullname = os.path.join(dir, name)
+            else:
+                fullname = name
+
+            # Avoid excess stat calls -- just one will do, thank you!
+            stat = os.stat(fullname)
+            mode = stat.st_mode
+            if S_ISREG(mode):
+                list.append(fullname)
+            elif S_ISDIR(mode) and not S_ISLNK(mode):
+                push(fullname)
+
+    return list
+
+
+def _glob_to_re(pattern):
+    """Translate a shell-like glob pattern to a regular expression.
+
+    Return a string containing the regex.  Differs from
+    'fnmatch.translate()' in that '*' does not match "special characters"
+    (which are platform-specific).
+    """
+    pattern_re = fnmatch.translate(pattern)
+
+    # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+    # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+    # and by extension they shouldn't match such "special characters" under
+    # any OS.  So change all non-escaped dots in the RE to match any
+    # character except the special characters.
+    # XXX currently the "special characters" are just slash -- i.e. this is
+    # Unix-only.
+    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', r'\1[^/]', pattern_re)
+
+    return pattern_re
+
+
+def _translate_pattern(pattern, anchor=True, prefix=None, is_regex=False):
+    """Translate a shell-like wildcard pattern to a compiled regular
+    expression.
+
+    Return the compiled regex.  If 'is_regex' true,
+    then 'pattern' is directly compiled to a regex (if it's a string)
+    or just returned as-is (assumes it's a regex object).
+    """
+    if is_regex:
+        if isinstance(pattern, str):
+            return re.compile(pattern)
+        else:
+            return pattern
+
+    if pattern:
+        pattern_re = _glob_to_re(pattern)
+    else:
+        pattern_re = ''
+
+    if prefix is not None:
+        # ditch end of pattern character
+        empty_pattern = _glob_to_re('')
+        prefix_re = _glob_to_re(prefix)[:-len(empty_pattern)]
+        pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
+    else:                               # no prefix -- respect anchor flag
+        if anchor:
+            pattern_re = "^" + pattern_re
+
+    return re.compile(pattern_re)
diff --git a/Lib/packaging/markers.py b/Lib/packaging/markers.py
new file mode 100644
index 0000000..4bbac7e
--- /dev/null
+++ b/Lib/packaging/markers.py
@@ -0,0 +1,187 @@
+"""Parser for the environment markers micro-language defined in PEP 345."""
+
+import sys
+import platform
+import os
+
+from tokenize import tokenize, NAME, OP, STRING, ENDMARKER, ENCODING
+from io import BytesIO
+
+__all__ = ['interpret']
+
+
+# allowed operators
+_OPERATORS = {'==': lambda x, y: x == y,
+              '!=': lambda x, y: x != y,
+              '>': lambda x, y: x > y,
+              '>=': lambda x, y: x >= y,
+              '<': lambda x, y: x < y,
+              '<=': lambda x, y: x <= y,
+              'in': lambda x, y: x in y,
+              'not in': lambda x, y: x not in y}
+
+
+def _operate(operation, x, y):
+    return _OPERATORS[operation](x, y)
+
+
+# restricted set of variables
+_VARS = {'sys.platform': sys.platform,
+         'python_version': sys.version[:3],
+         'python_full_version': sys.version.split(' ', 1)[0],
+         'os.name': os.name,
+         'platform.version': platform.version(),
+         'platform.machine': platform.machine(),
+         'platform.python_implementation': platform.python_implementation()}
+
+
+class _Operation:
+
+    def __init__(self, execution_context=None):
+        self.left = None
+        self.op = None
+        self.right = None
+        if execution_context is None:
+            execution_context = {}
+        self.execution_context = execution_context
+
+    def _get_var(self, name):
+        if name in self.execution_context:
+            return self.execution_context[name]
+        return _VARS[name]
+
+    def __repr__(self):
+        return '%s %s %s' % (self.left, self.op, self.right)
+
+    def _is_string(self, value):
+        if value is None or len(value) < 2:
+            return False
+        for delimiter in '"\'':
+            if value[0] == value[-1] == delimiter:
+                return True
+        return False
+
+    def _is_name(self, value):
+        return value in _VARS
+
+    def _convert(self, value):
+        if value in _VARS:
+            return self._get_var(value)
+        return value.strip('"\'')
+
+    def _check_name(self, value):
+        if value not in _VARS:
+            raise NameError(value)
+
+    def _nonsense_op(self):
+        msg = 'This operation is not supported : "%s"' % self
+        raise SyntaxError(msg)
+
+    def __call__(self):
+        # make sure we do something useful
+        if self._is_string(self.left):
+            if self._is_string(self.right):
+                self._nonsense_op()
+            self._check_name(self.right)
+        else:
+            if not self._is_string(self.right):
+                self._nonsense_op()
+            self._check_name(self.left)
+
+        if self.op not in _OPERATORS:
+            raise TypeError('Operator not supported "%s"' % self.op)
+
+        left = self._convert(self.left)
+        right = self._convert(self.right)
+        return _operate(self.op, left, right)
+
+
+class _OR:
+    def __init__(self, left, right=None):
+        self.left = left
+        self.right = right
+
+    def filled(self):
+        return self.right is not None
+
+    def __repr__(self):
+        return 'OR(%r, %r)' % (self.left, self.right)
+
+    def __call__(self):
+        return self.left() or self.right()
+
+
+class _AND:
+    def __init__(self, left, right=None):
+        self.left = left
+        self.right = right
+
+    def filled(self):
+        return self.right is not None
+
+    def __repr__(self):
+        return 'AND(%r, %r)' % (self.left, self.right)
+
+    def __call__(self):
+        return self.left() and self.right()
+
+
+def interpret(marker, execution_context=None):
+    """Interpret a marker and return a result depending on environment."""
+    marker = marker.strip().encode()
+    ops = []
+    op_starting = True
+    for token in tokenize(BytesIO(marker).readline):
+        # Unpack token
+        toktype, tokval, rowcol, line, logical_line = token
+        if toktype not in (NAME, OP, STRING, ENDMARKER, ENCODING):
+            raise SyntaxError('Type not supported "%s"' % tokval)
+
+        if op_starting:
+            op = _Operation(execution_context)
+            if len(ops) > 0:
+                last = ops[-1]
+                if isinstance(last, (_OR, _AND)) and not last.filled():
+                    last.right = op
+                else:
+                    ops.append(op)
+            else:
+                ops.append(op)
+            op_starting = False
+        else:
+            op = ops[-1]
+
+        if (toktype == ENDMARKER or
+            (toktype == NAME and tokval in ('and', 'or'))):
+            if toktype == NAME and tokval == 'and':
+                ops.append(_AND(ops.pop()))
+            elif toktype == NAME and tokval == 'or':
+                ops.append(_OR(ops.pop()))
+            op_starting = True
+            continue
+
+        if isinstance(op, (_OR, _AND)) and op.right is not None:
+            op = op.right
+
+        if ((toktype in (NAME, STRING) and tokval not in ('in', 'not'))
+            or (toktype == OP and tokval == '.')):
+            if op.op is None:
+                if op.left is None:
+                    op.left = tokval
+                else:
+                    op.left += tokval
+            else:
+                if op.right is None:
+                    op.right = tokval
+                else:
+                    op.right += tokval
+        elif toktype == OP or tokval in ('in', 'not'):
+            if tokval == 'in' and op.op == 'not':
+                op.op = 'not in'
+            else:
+                op.op = tokval
+
+    for op in ops:
+        if not op():
+            return False
+    return True
diff --git a/Lib/packaging/metadata.py b/Lib/packaging/metadata.py
new file mode 100644
index 0000000..8abbe38
--- /dev/null
+++ b/Lib/packaging/metadata.py
@@ -0,0 +1,552 @@
+"""Implementation of the Metadata for Python packages PEPs.
+
+Supports all metadata formats (1.0, 1.1, 1.2).
+"""
+
+import re
+import logging
+
+from io import StringIO
+from email import message_from_file
+from packaging import logger
+from packaging.markers import interpret
+from packaging.version import (is_valid_predicate, is_valid_version,
+                               is_valid_versions)
+from packaging.errors import (MetadataMissingError,
+                              MetadataConflictError,
+                              MetadataUnrecognizedVersionError)
+
+try:
+    # docutils is installed
+    from docutils.utils import Reporter
+    from docutils.parsers.rst import Parser
+    from docutils import frontend
+    from docutils import nodes
+
+    class SilentReporter(Reporter):
+
+        def __init__(self, source, report_level, halt_level, stream=None,
+                     debug=0, encoding='ascii', error_handler='replace'):
+            self.messages = []
+            Reporter.__init__(self, source, report_level, halt_level, stream,
+                              debug, encoding, error_handler)
+
+        def system_message(self, level, message, *children, **kwargs):
+            self.messages.append((level, message, children, kwargs))
+
+    _HAS_DOCUTILS = True
+except ImportError:
+    # docutils is not installed
+    _HAS_DOCUTILS = False
+
+# public API of this module
+__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
+
+# Encoding used for the PKG-INFO files
+PKG_INFO_ENCODING = 'utf-8'
+
+# preferred version. Hopefully will be changed
+# to 1.2 once PEP 345 is supported everywhere
+PKG_INFO_PREFERRED_VERSION = '1.0'
+
+_LINE_PREFIX = re.compile('\n       \|')
+_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'License')
+
+_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'License', 'Classifier', 'Download-URL', 'Obsoletes',
+               'Provides', 'Requires')
+
+_314_MARKERS = ('Obsoletes', 'Provides', 'Requires')
+
+_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+               'Supported-Platform', 'Summary', 'Description',
+               'Keywords', 'Home-page', 'Author', 'Author-email',
+               'Maintainer', 'Maintainer-email', 'License',
+               'Classifier', 'Download-URL', 'Obsoletes-Dist',
+               'Project-URL', 'Provides-Dist', 'Requires-Dist',
+               'Requires-Python', 'Requires-External')
+
+_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
+                'Obsoletes-Dist', 'Requires-External', 'Maintainer',
+                'Maintainer-email', 'Project-URL')
+
+_ALL_FIELDS = set()
+_ALL_FIELDS.update(_241_FIELDS)
+_ALL_FIELDS.update(_314_FIELDS)
+_ALL_FIELDS.update(_345_FIELDS)
+
+
+def _version2fieldlist(version):
+    if version == '1.0':
+        return _241_FIELDS
+    elif version == '1.1':
+        return _314_FIELDS
+    elif version == '1.2':
+        return _345_FIELDS
+    raise MetadataUnrecognizedVersionError(version)
+
+
+def _best_version(fields):
+    """Detect the best version depending on the fields used."""
+    def _has_marker(keys, markers):
+        for marker in markers:
+            if marker in keys:
+                return True
+        return False
+
+    keys = list(fields)
+    possible_versions = ['1.0', '1.1', '1.2']
+
+    # first let's try to see if a field is not part of one of the version
+    for key in keys:
+        if key not in _241_FIELDS and '1.0' in possible_versions:
+            possible_versions.remove('1.0')
+        if key not in _314_FIELDS and '1.1' in possible_versions:
+            possible_versions.remove('1.1')
+        if key not in _345_FIELDS and '1.2' in possible_versions:
+            possible_versions.remove('1.2')
+
+    # possible_version contains qualified versions
+    if len(possible_versions) == 1:
+        return possible_versions[0]   # found !
+    elif len(possible_versions) == 0:
+        raise MetadataConflictError('Unknown metadata set')
+
+    # let's see if one unique marker is found
+    is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
+    is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
+    if is_1_1 and is_1_2:
+        raise MetadataConflictError('You used incompatible 1.1 and 1.2 fields')
+
+    # we have the choice, either 1.0, or 1.2
+    #   - 1.0 has a broken Summary field but works with all tools
+    #   - 1.1 is to avoid
+    #   - 1.2 fixes Summary but is not widespread yet
+    if not is_1_1 and not is_1_2:
+        # we couldn't find any specific marker
+        if PKG_INFO_PREFERRED_VERSION in possible_versions:
+            return PKG_INFO_PREFERRED_VERSION
+    if is_1_1:
+        return '1.1'
+
+    # default marker when 1.0 is disqualified
+    return '1.2'
+
+
+_ATTR2FIELD = {
+    'metadata_version': 'Metadata-Version',
+    'name': 'Name',
+    'version': 'Version',
+    'platform': 'Platform',
+    'supported_platform': 'Supported-Platform',
+    'summary': 'Summary',
+    'description': 'Description',
+    'keywords': 'Keywords',
+    'home_page': 'Home-page',
+    'author': 'Author',
+    'author_email': 'Author-email',
+    'maintainer': 'Maintainer',
+    'maintainer_email': 'Maintainer-email',
+    'license': 'License',
+    'classifier': 'Classifier',
+    'download_url': 'Download-URL',
+    'obsoletes_dist': 'Obsoletes-Dist',
+    'provides_dist': 'Provides-Dist',
+    'requires_dist': 'Requires-Dist',
+    'requires_python': 'Requires-Python',
+    'requires_external': 'Requires-External',
+    'requires': 'Requires',
+    'provides': 'Provides',
+    'obsoletes': 'Obsoletes',
+    'project_url': 'Project-URL',
+}
+
+_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
+_VERSIONS_FIELDS = ('Requires-Python',)
+_VERSION_FIELDS = ('Version',)
+_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
+               'Requires', 'Provides', 'Obsoletes-Dist',
+               'Provides-Dist', 'Requires-Dist', 'Requires-External',
+               'Project-URL', 'Supported-Platform')
+_LISTTUPLEFIELDS = ('Project-URL',)
+
+_ELEMENTSFIELD = ('Keywords',)
+
+_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
+
+_MISSING = object()
+
+
+class NoDefault:
+    """Marker object used for clean representation"""
+    def __repr__(self):
+        return '<NoDefault>'
+
+_MISSING = NoDefault()
+
+
+class Metadata:
+    """The metadata of a release.
+
+    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
+    instantiate the class with one of these arguments (or none):
+    - *path*, the path to a METADATA file
+    - *fileobj* give a file-like object with METADATA as content
+    - *mapping* is a dict-like object
+    """
+    # TODO document that execution_context and platform_dependent are used
+    # to filter on query, not when setting a key
+    # also document the mapping API and UNKNOWN default key
+
+    def __init__(self, path=None, platform_dependent=False,
+                 execution_context=None, fileobj=None, mapping=None):
+        self._fields = {}
+        self.requires_files = []
+        self.docutils_support = _HAS_DOCUTILS
+        self.platform_dependent = platform_dependent
+        self.execution_context = execution_context
+        if [path, fileobj, mapping].count(None) < 2:
+            raise TypeError('path, fileobj and mapping are exclusive')
+        if path is not None:
+            self.read(path)
+        elif fileobj is not None:
+            self.read_file(fileobj)
+        elif mapping is not None:
+            self.update(mapping)
+
+    def _set_best_version(self):
+        self._fields['Metadata-Version'] = _best_version(self._fields)
+
+    def _write_field(self, file, name, value):
+        file.write('%s: %s\n' % (name, value))
+
+    def __getitem__(self, name):
+        return self.get(name)
+
+    def __setitem__(self, name, value):
+        return self.set(name, value)
+
+    def __delitem__(self, name):
+        field_name = self._convert_name(name)
+        try:
+            del self._fields[field_name]
+        except KeyError:
+            raise KeyError(name)
+        self._set_best_version()
+
+    def __contains__(self, name):
+        return (name in self._fields or
+                self._convert_name(name) in self._fields)
+
+    def _convert_name(self, name):
+        if name in _ALL_FIELDS:
+            return name
+        name = name.replace('-', '_').lower()
+        return _ATTR2FIELD.get(name, name)
+
+    def _default_value(self, name):
+        if name in _LISTFIELDS or name in _ELEMENTSFIELD:
+            return []
+        return 'UNKNOWN'
+
+    def _check_rst_data(self, data):
+        """Return warnings when the provided data has syntax errors."""
+        source_path = StringIO()
+        parser = Parser()
+        settings = frontend.OptionParser().get_default_values()
+        settings.tab_width = 4
+        settings.pep_references = None
+        settings.rfc_references = None
+        reporter = SilentReporter(source_path,
+                          settings.report_level,
+                          settings.halt_level,
+                          stream=settings.warning_stream,
+                          debug=settings.debug,
+                          encoding=settings.error_encoding,
+                          error_handler=settings.error_encoding_error_handler)
+
+        document = nodes.document(settings, reporter, source=source_path)
+        document.note_source(source_path, -1)
+        try:
+            parser.parse(data, document)
+        except AttributeError:
+            reporter.messages.append((-1, 'Could not finish the parsing.',
+                                      '', {}))
+
+        return reporter.messages
+
+    def _platform(self, value):
+        if not self.platform_dependent or ';' not in value:
+            return True, value
+        value, marker = value.split(';')
+        return interpret(marker, self.execution_context), value
+
+    def _remove_line_prefix(self, value):
+        return _LINE_PREFIX.sub('\n', value)
+
+    #
+    # Public API
+    #
+    def get_fullname(self):
+        """Return the distribution name with version"""
+        return '%s-%s' % (self['Name'], self['Version'])
+
+    def is_metadata_field(self, name):
+        """return True if name is a valid metadata key"""
+        name = self._convert_name(name)
+        return name in _ALL_FIELDS
+
+    def is_multi_field(self, name):
+        name = self._convert_name(name)
+        return name in _LISTFIELDS
+
+    def read(self, filepath):
+        """Read the metadata values from a file path."""
+        with open(filepath, 'r', encoding='ascii') as fp:
+            self.read_file(fp)
+
+    def read_file(self, fileob):
+        """Read the metadata values from a file object."""
+        msg = message_from_file(fileob)
+        self._fields['Metadata-Version'] = msg['metadata-version']
+
+        for field in _version2fieldlist(self['Metadata-Version']):
+            if field in _LISTFIELDS:
+                # we can have multiple lines
+                values = msg.get_all(field)
+                if field in _LISTTUPLEFIELDS and values is not None:
+                    values = [tuple(value.split(',')) for value in values]
+                self.set(field, values)
+            else:
+                # single line
+                value = msg[field]
+                if value is not None and value != 'UNKNOWN':
+                    self.set(field, value)
+
+    def write(self, filepath):
+        """Write the metadata fields to filepath."""
+        with open(filepath, 'w') as fp:
+            self.write_file(fp)
+
+    def write_file(self, fileobject):
+        """Write the PKG-INFO format data to a file object."""
+        self._set_best_version()
+        for field in _version2fieldlist(self['Metadata-Version']):
+            values = self.get(field)
+            if field in _ELEMENTSFIELD:
+                self._write_field(fileobject, field, ','.join(values))
+                continue
+            if field not in _LISTFIELDS:
+                if field == 'Description':
+                    values = values.replace('\n', '\n       |')
+                values = [values]
+
+            if field in _LISTTUPLEFIELDS:
+                values = [','.join(value) for value in values]
+
+            for value in values:
+                self._write_field(fileobject, field, value)
+
+    def update(self, other=None, **kwargs):
+        """Set metadata values from the given iterable `other` and kwargs.
+
+        Behavior is like `dict.update`: If `other` has a ``keys`` method,
+        they are looped over and ``self[key]`` is assigned ``other[key]``.
+        Else, ``other`` is an iterable of ``(key, value)`` iterables.
+
+        Keys that don't match a metadata field or that have an empty value are
+        dropped.
+        """
+        def _set(key, value):
+            if key in _ATTR2FIELD and value:
+                self.set(self._convert_name(key), value)
+
+        if other is None:
+            pass
+        elif hasattr(other, 'keys'):
+            for k in other.keys():
+                _set(k, other[k])
+        else:
+            for k, v in other:
+                _set(k, v)
+
+        if kwargs:
+            self.update(kwargs)
+
+    def set(self, name, value):
+        """Control then set a metadata field."""
+        name = self._convert_name(name)
+
+        if ((name in _ELEMENTSFIELD or name == 'Platform') and
+            not isinstance(value, (list, tuple))):
+            if isinstance(value, str):
+                value = [v.strip() for v in value.split(',')]
+            else:
+                value = []
+        elif (name in _LISTFIELDS and
+              not isinstance(value, (list, tuple))):
+            if isinstance(value, str):
+                value = [value]
+            else:
+                value = []
+
+        if logger.isEnabledFor(logging.WARNING):
+            if name in _PREDICATE_FIELDS and value is not None:
+                for v in value:
+                    # check that the values are valid predicates
+                    if not is_valid_predicate(v.split(';')[0]):
+                        logger.warning(
+                            '%r is not a valid predicate (field %r)',
+                            v, name)
+            # FIXME this rejects UNKNOWN, is that right?
+            elif name in _VERSIONS_FIELDS and value is not None:
+                if not is_valid_versions(value):
+                    logger.warning('%r is not a valid version (field %r)',
+                                   value, name)
+            elif name in _VERSION_FIELDS and value is not None:
+                if not is_valid_version(value):
+                    logger.warning('%r is not a valid version (field %r)',
+                                   value, name)
+
+        if name in _UNICODEFIELDS:
+            if name == 'Description':
+                value = self._remove_line_prefix(value)
+
+        self._fields[name] = value
+        self._set_best_version()
+
+    def get(self, name, default=_MISSING):
+        """Get a metadata field."""
+        name = self._convert_name(name)
+        if name not in self._fields:
+            if default is _MISSING:
+                default = self._default_value(name)
+            return default
+        if name in _UNICODEFIELDS:
+            value = self._fields[name]
+            return value
+        elif name in _LISTFIELDS:
+            value = self._fields[name]
+            if value is None:
+                return []
+            res = []
+            for val in value:
+                valid, val = self._platform(val)
+                if not valid:
+                    continue
+                if name not in _LISTTUPLEFIELDS:
+                    res.append(val)
+                else:
+                    # That's for Project-URL
+                    res.append((val[0], val[1]))
+            return res
+
+        elif name in _ELEMENTSFIELD:
+            valid, value = self._platform(self._fields[name])
+            if not valid:
+                return []
+            if isinstance(value, str):
+                return value.split(',')
+        valid, value = self._platform(self._fields[name])
+        if not valid:
+            return None
+        return value
+
+    def check(self, strict=False, restructuredtext=False):
+        """Check if the metadata is compliant. If strict is False then raise if
+        no Name or Version are provided"""
+        # XXX should check the versions (if the file was loaded)
+        missing, warnings = [], []
+
+        for attr in ('Name', 'Version'):  # required by PEP 345
+            if attr not in self:
+                missing.append(attr)
+
+        if strict and missing != []:
+            msg = 'missing required metadata: %s' % ', '.join(missing)
+            raise MetadataMissingError(msg)
+
+        for attr in ('Home-page', 'Author'):
+            if attr not in self:
+                missing.append(attr)
+
+        if _HAS_DOCUTILS and restructuredtext:
+            warnings.extend(self._check_rst_data(self['Description']))
+
+        # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
+        if self['Metadata-Version'] != '1.2':
+            return missing, warnings
+
+        def is_valid_predicates(value):
+            for v in value:
+                if not is_valid_predicate(v.split(';')[0]):
+                    return False
+            return True
+
+        for fields, controller in ((_PREDICATE_FIELDS, is_valid_predicates),
+                                   (_VERSIONS_FIELDS, is_valid_versions),
+                                   (_VERSION_FIELDS, is_valid_version)):
+            for field in fields:
+                value = self.get(field, None)
+                if value is not None and not controller(value):
+                    warnings.append('Wrong value for %r: %s' % (field, value))
+
+        return missing, warnings
+
+    def todict(self):
+        """Return fields as a dict.
+
+        Field names will be converted to use the underscore-lowercase style
+        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
+        """
+        data = {
+            'metadata_version': self['Metadata-Version'],
+            'name': self['Name'],
+            'version': self['Version'],
+            'summary': self['Summary'],
+            'home_page': self['Home-page'],
+            'author': self['Author'],
+            'author_email': self['Author-email'],
+            'license': self['License'],
+            'description': self['Description'],
+            'keywords': self['Keywords'],
+            'platform': self['Platform'],
+            'classifier': self['Classifier'],
+            'download_url': self['Download-URL'],
+        }
+
+        if self['Metadata-Version'] == '1.2':
+            data['requires_dist'] = self['Requires-Dist']
+            data['requires_python'] = self['Requires-Python']
+            data['requires_external'] = self['Requires-External']
+            data['provides_dist'] = self['Provides-Dist']
+            data['obsoletes_dist'] = self['Obsoletes-Dist']
+            data['project_url'] = [','.join(url) for url in
+                                   self['Project-URL']]
+
+        elif self['Metadata-Version'] == '1.1':
+            data['provides'] = self['Provides']
+            data['requires'] = self['Requires']
+            data['obsoletes'] = self['Obsoletes']
+
+        return data
+
+    # Mapping API
+
+    def keys(self):
+        return _version2fieldlist(self['Metadata-Version'])
+
+    def __iter__(self):
+        for key in self.keys():
+            yield key
+
+    def values(self):
+        return [self[key] for key in list(self.keys())]
+
+    def items(self):
+        return [(key, self[key]) for key in list(self.keys())]
diff --git a/Lib/packaging/pypi/__init__.py b/Lib/packaging/pypi/__init__.py
new file mode 100644
index 0000000..5660c50
--- /dev/null
+++ b/Lib/packaging/pypi/__init__.py
@@ -0,0 +1,9 @@
+"""Low-level and high-level APIs to interact with project indexes."""
+
+__all__ = ['simple',
+           'xmlrpc',
+           'dist',
+           'errors',
+           'mirrors']
+
+from packaging.pypi.dist import ReleaseInfo, ReleasesList, DistInfo
diff --git a/Lib/packaging/pypi/base.py b/Lib/packaging/pypi/base.py
new file mode 100644
index 0000000..305fca9
--- /dev/null
+++ b/Lib/packaging/pypi/base.py
@@ -0,0 +1,48 @@
+"""Base class for index crawlers."""
+
+from packaging.pypi.dist import ReleasesList
+
+
+class BaseClient:
+    """Base class containing common methods for the index crawlers/clients"""
+
+    def __init__(self, prefer_final, prefer_source):
+        self._prefer_final = prefer_final
+        self._prefer_source = prefer_source
+        self._index = self
+
+    def _get_prefer_final(self, prefer_final=None):
+        """Return the prefer_final internal parameter or the specified one if
+        provided"""
+        if prefer_final:
+            return prefer_final
+        else:
+            return self._prefer_final
+
+    def _get_prefer_source(self, prefer_source=None):
+        """Return the prefer_source internal parameter or the specified one if
+        provided"""
+        if prefer_source:
+            return prefer_source
+        else:
+            return self._prefer_source
+
+    def _get_project(self, project_name):
+        """Return an project instance, create it if necessary"""
+        return self._projects.setdefault(project_name.lower(),
+                    ReleasesList(project_name, index=self._index))
+
+    def download_distribution(self, requirements, temp_path=None,
+                              prefer_source=None, prefer_final=None):
+        """Download a distribution from the last release according to the
+        requirements.
+
+        If temp_path is provided, download to this path, otherwise, create a
+        temporary location for the download and return it.
+        """
+        prefer_final = self._get_prefer_final(prefer_final)
+        prefer_source = self._get_prefer_source(prefer_source)
+        release = self.get_release(requirements, prefer_final)
+        if release:
+            dist = release.get_distribution(prefer_source=prefer_source)
+            return dist.download(temp_path)
diff --git a/Lib/packaging/pypi/dist.py b/Lib/packaging/pypi/dist.py
new file mode 100644
index 0000000..16510df
--- /dev/null
+++ b/Lib/packaging/pypi/dist.py
@@ -0,0 +1,547 @@
+"""Classes representing releases and distributions retrieved from indexes.
+
+A project (= unique name) can have several releases (= versions) and
+each release can have several distributions (= sdist and bdists).
+
+Release objects contain metadata-related information (see PEP 376);
+distribution objects contain download-related information.
+"""
+
+import sys
+import mimetypes
+import re
+import tempfile
+import urllib.request
+import urllib.parse
+import urllib.error
+import urllib.parse
+import hashlib
+from shutil import unpack_archive
+
+from packaging.errors import IrrationalVersionError
+from packaging.version import (suggest_normalized_version, NormalizedVersion,
+                               get_version_predicate)
+from packaging.metadata import Metadata
+from packaging.pypi.errors import (HashDoesNotMatch, UnsupportedHashName,
+                                   CantParseArchiveName)
+
+
+__all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url']
+
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz .egg".split()
+MD5_HASH = re.compile(r'^.*#md5=([a-f0-9]+)$')
+DIST_TYPES = ['bdist', 'sdist']
+
+
+class IndexReference:
+    """Mixin used to store the index reference"""
+    def set_index(self, index=None):
+        self._index = index
+
+
+class ReleaseInfo(IndexReference):
+    """Represent a release of a project (a project with a specific version).
+    The release contain the _metadata informations related to this specific
+    version, and is also a container for distribution related informations.
+
+    See the DistInfo class for more information about distributions.
+    """
+
+    def __init__(self, name, version, metadata=None, hidden=False,
+                 index=None, **kwargs):
+        """
+        :param name: the name of the distribution
+        :param version: the version of the distribution
+        :param metadata: the metadata fields of the release.
+        :type metadata: dict
+        :param kwargs: optional arguments for a new distribution.
+        """
+        self.set_index(index)
+        self.name = name
+        self._version = None
+        self.version = version
+        if metadata:
+            self.metadata = Metadata(mapping=metadata)
+        else:
+            self.metadata = None
+        self.dists = {}
+        self.hidden = hidden
+
+        if 'dist_type' in kwargs:
+            dist_type = kwargs.pop('dist_type')
+            self.add_distribution(dist_type, **kwargs)
+
+    def set_version(self, version):
+        try:
+            self._version = NormalizedVersion(version)
+        except IrrationalVersionError:
+            suggestion = suggest_normalized_version(version)
+            if suggestion:
+                self.version = suggestion
+            else:
+                raise IrrationalVersionError(version)
+
+    def get_version(self):
+        return self._version
+
+    version = property(get_version, set_version)
+
+    def fetch_metadata(self):
+        """If the metadata is not set, use the indexes to get it"""
+        if not self.metadata:
+            self._index.get_metadata(self.name, str(self.version))
+        return self.metadata
+
+    @property
+    def is_final(self):
+        """proxy to version.is_final"""
+        return self.version.is_final
+
+    def fetch_distributions(self):
+        if self.dists is None:
+            self._index.get_distributions(self.name, str(self.version))
+            if self.dists is None:
+                self.dists = {}
+        return self.dists
+
+    def add_distribution(self, dist_type='sdist', python_version=None,
+                         **params):
+        """Add distribution informations to this release.
+        If distribution information is already set for this distribution type,
+        add the given url paths to the distribution. This can be useful while
+        some of them fails to download.
+
+        :param dist_type: the distribution type (eg. "sdist", "bdist", etc.)
+        :param params: the fields to be passed to the distribution object
+                       (see the :class:DistInfo constructor).
+        """
+        if dist_type not in DIST_TYPES:
+            raise ValueError(dist_type)
+        if dist_type in self.dists:
+            self.dists[dist_type].add_url(**params)
+        else:
+            self.dists[dist_type] = DistInfo(self, dist_type,
+                                             index=self._index, **params)
+        if python_version:
+            self.dists[dist_type].python_version = python_version
+
+    def get_distribution(self, dist_type=None, prefer_source=True):
+        """Return a distribution.
+
+        If dist_type is set, find first for this distribution type, and just
+        act as an alias of __get_item__.
+
+        If prefer_source is True, search first for source distribution, and if
+        not return one existing distribution.
+        """
+        if len(self.dists) == 0:
+            raise LookupError()
+        if dist_type:
+            return self[dist_type]
+        if prefer_source:
+            if "sdist" in self.dists:
+                dist = self["sdist"]
+            else:
+                dist = next(self.dists.values())
+            return dist
+
+    def unpack(self, path=None, prefer_source=True):
+        """Unpack the distribution to the given path.
+
+        If not destination is given, creates a temporary location.
+
+        Returns the location of the extracted files (root).
+        """
+        return self.get_distribution(prefer_source=prefer_source)\
+                   .unpack(path=path)
+
+    def download(self, temp_path=None, prefer_source=True):
+        """Download the distribution, using the requirements.
+
+        If more than one distribution match the requirements, use the last
+        version.
+        Download the distribution, and put it in the temp_path. If no temp_path
+        is given, creates and return one.
+
+        Returns the complete absolute path to the downloaded archive.
+        """
+        return self.get_distribution(prefer_source=prefer_source)\
+                   .download(path=temp_path)
+
+    def set_metadata(self, metadata):
+        if not self.metadata:
+            self.metadata = Metadata()
+        self.metadata.update(metadata)
+
+    def __getitem__(self, item):
+        """distributions are available using release["sdist"]"""
+        return self.dists[item]
+
+    def _check_is_comparable(self, other):
+        if not isinstance(other, ReleaseInfo):
+            raise TypeError("cannot compare %s and %s"
+                % (type(self).__name__, type(other).__name__))
+        elif self.name != other.name:
+            raise TypeError("cannot compare %s and %s"
+                % (self.name, other.name))
+
+    def __repr__(self):
+        return "<%s %s>" % (self.name, self.version)
+
+    def __eq__(self, other):
+        self._check_is_comparable(other)
+        return self.version == other.version
+
+    def __lt__(self, other):
+        self._check_is_comparable(other)
+        return self.version < other.version
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __gt__(self, other):
+        return not (self.__lt__(other) or self.__eq__(other))
+
+    def __le__(self, other):
+        return self.__eq__(other) or self.__lt__(other)
+
+    def __ge__(self, other):
+        return self.__eq__(other) or self.__gt__(other)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    __hash__ = object.__hash__
+
+
+class DistInfo(IndexReference):
+    """Represents a distribution retrieved from an index (sdist, bdist, ...)
+    """
+
+    def __init__(self, release, dist_type=None, url=None, hashname=None,
+                 hashval=None, is_external=True, python_version=None,
+                 index=None):
+        """Create a new instance of DistInfo.
+
+        :param release: a DistInfo class is relative to a release.
+        :param dist_type: the type of the dist (eg. source, bin-*, etc.)
+        :param url: URL where we found this distribution
+        :param hashname: the name of the hash we want to use. Refer to the
+                         hashlib.new documentation for more information.
+        :param hashval: the hash value.
+        :param is_external: we need to know if the provided url comes from
+                            an index browsing, or from an external resource.
+
+        """
+        self.set_index(index)
+        self.release = release
+        self.dist_type = dist_type
+        self.python_version = python_version
+        self._unpacked_dir = None
+        # set the downloaded path to None by default. The goal here
+        # is to not download distributions multiple times
+        self.downloaded_location = None
+        # We store urls in dict, because we need to have a bit more infos
+        # than the simple URL. It will be used later to find the good url to
+        # use.
+        # We have two _url* attributes: _url and urls. urls contains a list
+        # of dict for the different urls, and _url contains the choosen url, in
+        # order to dont make the selection process multiple times.
+        self.urls = []
+        self._url = None
+        self.add_url(url, hashname, hashval, is_external)
+
+    def add_url(self, url=None, hashname=None, hashval=None, is_external=True):
+        """Add a new url to the list of urls"""
+        if hashname is not None:
+            try:
+                hashlib.new(hashname)
+            except ValueError:
+                raise UnsupportedHashName(hashname)
+        if not url in [u['url'] for u in self.urls]:
+            self.urls.append({
+                'url': url,
+                'hashname': hashname,
+                'hashval': hashval,
+                'is_external': is_external,
+            })
+            # reset the url selection process
+            self._url = None
+
+    @property
+    def url(self):
+        """Pick up the right url for the list of urls in self.urls"""
+        # We return internal urls over externals.
+        # If there is more than one internal or external, return the first
+        # one.
+        if self._url is None:
+            if len(self.urls) > 1:
+                internals_urls = [u for u in self.urls \
+                                  if u['is_external'] == False]
+                if len(internals_urls) >= 1:
+                    self._url = internals_urls[0]
+            if self._url is None:
+                self._url = self.urls[0]
+        return self._url
+
+    @property
+    def is_source(self):
+        """return if the distribution is a source one or not"""
+        return self.dist_type == 'sdist'
+
+    def download(self, path=None):
+        """Download the distribution to a path, and return it.
+
+        If the path is given in path, use this, otherwise, generates a new one
+        Return the download location.
+        """
+        if path is None:
+            path = tempfile.mkdtemp()
+
+        # if we do not have downloaded it yet, do it.
+        if self.downloaded_location is None:
+            url = self.url['url']
+            archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
+            filename, headers = urllib.request.urlretrieve(url,
+                                                   path + "/" + archive_name)
+            self.downloaded_location = filename
+            self._check_md5(filename)
+        return self.downloaded_location
+
+    def unpack(self, path=None):
+        """Unpack the distribution to the given path.
+
+        If not destination is given, creates a temporary location.
+
+        Returns the location of the extracted files (root).
+        """
+        if not self._unpacked_dir:
+            if path is None:
+                path = tempfile.mkdtemp()
+
+            filename = self.download(path)
+            content_type = mimetypes.guess_type(filename)[0]
+            unpack_archive(filename, path)
+            self._unpacked_dir = path
+
+        return path
+
+    def _check_md5(self, filename):
+        """Check that the md5 checksum of the given file matches the one in
+        url param"""
+        hashname = self.url['hashname']
+        expected_hashval = self.url['hashval']
+        if not None in (expected_hashval, hashname):
+            with open(filename, 'rb') as f:
+                hashval = hashlib.new(hashname)
+                hashval.update(f.read())
+
+            if hashval.hexdigest() != expected_hashval:
+                raise HashDoesNotMatch("got %s instead of %s"
+                    % (hashval.hexdigest(), expected_hashval))
+
+    def __repr__(self):
+        if self.release is None:
+            return "<? ? %s>" % self.dist_type
+
+        return "<%s %s %s>" % (
+            self.release.name, self.release.version, self.dist_type or "")
+
+
+class ReleasesList(IndexReference):
+    """A container of Release.
+
+    Provides useful methods and facilities to sort and filter releases.
+    """
+    def __init__(self, name, releases=None, contains_hidden=False, index=None):
+        self.set_index(index)
+        self.releases = []
+        self.name = name
+        self.contains_hidden = contains_hidden
+        if releases:
+            self.add_releases(releases)
+
+    def fetch_releases(self):
+        self._index.get_releases(self.name)
+        return self.releases
+
+    def filter(self, predicate):
+        """Filter and return a subset of releases matching the given predicate.
+        """
+        return ReleasesList(self.name, [release for release in self.releases
+                                        if predicate.match(release.version)],
+                                        index=self._index)
+
+    def get_last(self, requirements, prefer_final=None):
+        """Return the "last" release, that satisfy the given predicates.
+
+        "last" is defined by the version number of the releases, you also could
+        set prefer_final parameter to True or False to change the order results
+        """
+        predicate = get_version_predicate(requirements)
+        releases = self.filter(predicate)
+        if len(releases) == 0:
+            return None
+        releases.sort_releases(prefer_final, reverse=True)
+        return releases[0]
+
+    def add_releases(self, releases):
+        """Add releases in the release list.
+
+        :param: releases is a list of ReleaseInfo objects.
+        """
+        for r in releases:
+            self.add_release(release=r)
+
+    def add_release(self, version=None, dist_type='sdist', release=None,
+                    **dist_args):
+        """Add a release to the list.
+
+        The release can be passed in the `release` parameter, and in this case,
+        it will be crawled to extract the useful informations if necessary, or
+        the release informations can be directly passed in the `version` and
+        `dist_type` arguments.
+
+        Other keywords arguments can be provided, and will be forwarded to the
+        distribution creation (eg. the arguments of the DistInfo constructor).
+        """
+        if release:
+            if release.name.lower() != self.name.lower():
+                raise ValueError("%s is not the same project as %s" %
+                                 (release.name, self.name))
+            version = str(release.version)
+
+            if not version in self.get_versions():
+                # append only if not already exists
+                self.releases.append(release)
+            for dist in release.dists.values():
+                for url in dist.urls:
+                    self.add_release(version, dist.dist_type, **url)
+        else:
+            matches = [r for r in self.releases
+                       if str(r.version) == version and r.name == self.name]
+            if not matches:
+                release = ReleaseInfo(self.name, version, index=self._index)
+                self.releases.append(release)
+            else:
+                release = matches[0]
+
+            release.add_distribution(dist_type=dist_type, **dist_args)
+
+    def sort_releases(self, prefer_final=False, reverse=True, *args, **kwargs):
+        """Sort the results with the given properties.
+
+        The `prefer_final` argument can be used to specify if final
+        distributions (eg. not dev, bet or alpha) would be prefered or not.
+
+        Results can be inverted by using `reverse`.
+
+        Any other parameter provided will be forwarded to the sorted call. You
+        cannot redefine the key argument of "sorted" here, as it is used
+        internally to sort the releases.
+        """
+
+        sort_by = []
+        if prefer_final:
+            sort_by.append("is_final")
+        sort_by.append("version")
+
+        self.releases.sort(
+            key=lambda i: tuple(getattr(i, arg) for arg in sort_by),
+            reverse=reverse, *args, **kwargs)
+
+    def get_release(self, version):
+        """Return a release from its version."""
+        matches = [r for r in self.releases if str(r.version) == version]
+        if len(matches) != 1:
+            raise KeyError(version)
+        return matches[0]
+
+    def get_versions(self):
+        """Return a list of releases versions contained"""
+        return [str(r.version) for r in self.releases]
+
+    def __getitem__(self, key):
+        return self.releases[key]
+
+    def __len__(self):
+        return len(self.releases)
+
+    def __repr__(self):
+        string = 'Project "%s"' % self.name
+        if self.get_versions():
+            string += ' versions: %s' % ', '.join(self.get_versions())
+        return '<%s>' % string
+
+
+def get_infos_from_url(url, probable_dist_name=None, is_external=True):
+    """Get useful informations from an URL.
+
+    Return a dict of (name, version, url, hashtype, hash, is_external)
+
+    :param url: complete url of the distribution
+    :param probable_dist_name: A probable name of the project.
+    :param is_external: Tell if the url commes from an index or from
+                        an external URL.
+    """
+    # if the url contains a md5 hash, get it.
+    md5_hash = None
+    match = MD5_HASH.match(url)
+    if match is not None:
+        md5_hash = match.group(1)
+        # remove the hash
+        url = url.replace("#md5=%s" % md5_hash, "")
+
+    # parse the archive name to find dist name and version
+    archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
+    extension_matched = False
+    # remove the extension from the name
+    for ext in EXTENSIONS:
+        if archive_name.endswith(ext):
+            archive_name = archive_name[:-len(ext)]
+            extension_matched = True
+
+    name, version = split_archive_name(archive_name)
+    if extension_matched is True:
+        return {'name': name,
+                'version': version,
+                'url': url,
+                'hashname': "md5",
+                'hashval': md5_hash,
+                'is_external': is_external,
+                'dist_type': 'sdist'}
+
+
+def split_archive_name(archive_name, probable_name=None):
+    """Split an archive name into two parts: name and version.
+
+    Return the tuple (name, version)
+    """
+    # Try to determine wich part is the name and wich is the version using the
+    # "-" separator. Take the larger part to be the version number then reduce
+    # if this not works.
+    def eager_split(str, maxsplit=2):
+        # split using the "-" separator
+        splits = str.rsplit("-", maxsplit)
+        name = splits[0]
+        version = "-".join(splits[1:])
+        if version.startswith("-"):
+            version = version[1:]
+        if suggest_normalized_version(version) is None and maxsplit >= 0:
+            # we dont get a good version number: recurse !
+            return eager_split(str, maxsplit - 1)
+        else:
+            return name, version
+    if probable_name is not None:
+        probable_name = probable_name.lower()
+    name = None
+    if probable_name is not None and probable_name in archive_name:
+        # we get the name from probable_name, if given.
+        name = probable_name
+        version = archive_name.lstrip(name)
+    else:
+        name, version = eager_split(archive_name)
+
+    version = suggest_normalized_version(version)
+    if version is not None and name != "":
+        return name.lower(), version
+    else:
+        raise CantParseArchiveName(archive_name)
diff --git a/Lib/packaging/pypi/errors.py b/Lib/packaging/pypi/errors.py
new file mode 100644
index 0000000..2191ac1
--- /dev/null
+++ b/Lib/packaging/pypi/errors.py
@@ -0,0 +1,39 @@
+"""Exceptions raised by packaging.pypi code."""
+
+from packaging.errors import PackagingPyPIError
+
+
+class ProjectNotFound(PackagingPyPIError):
+    """Project has not been found"""
+
+
+class DistributionNotFound(PackagingPyPIError):
+    """The release has not been found"""
+
+
+class ReleaseNotFound(PackagingPyPIError):
+    """The release has not been found"""
+
+
+class CantParseArchiveName(PackagingPyPIError):
+    """An archive name can't be parsed to find distribution name and version"""
+
+
+class DownloadError(PackagingPyPIError):
+    """An error has occurs while downloading"""
+
+
+class HashDoesNotMatch(DownloadError):
+    """Compared hashes does not match"""
+
+
+class UnsupportedHashName(PackagingPyPIError):
+    """A unsupported hashname has been used"""
+
+
+class UnableToDownload(PackagingPyPIError):
+    """All mirrors have been tried, without success"""
+
+
+class InvalidSearchField(PackagingPyPIError):
+    """An invalid search field has been used"""
diff --git a/Lib/packaging/pypi/mirrors.py b/Lib/packaging/pypi/mirrors.py
new file mode 100644
index 0000000..a646acf
--- /dev/null
+++ b/Lib/packaging/pypi/mirrors.py
@@ -0,0 +1,52 @@
+"""Utilities related to the mirror infrastructure defined in PEP 381."""
+
+from string import ascii_lowercase
+import socket
+
+DEFAULT_MIRROR_URL = "last.pypi.python.org"
+
+
+def get_mirrors(hostname=None):
+    """Return the list of mirrors from the last record found on the DNS
+    entry::
+
+    >>> from packaging.pypi.mirrors import get_mirrors
+    >>> get_mirrors()
+    ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org',
+    'd.pypi.python.org']
+
+    """
+    if hostname is None:
+        hostname = DEFAULT_MIRROR_URL
+
+    # return the last mirror registered on PyPI.
+    try:
+        hostname = socket.gethostbyname_ex(hostname)[0]
+    except socket.gaierror:
+        return []
+    end_letter = hostname.split(".", 1)
+
+    # determine the list from the last one.
+    return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
+
+
+def string_range(last):
+    """Compute the range of string between "a" and last.
+
+    This works for simple "a to z" lists, but also for "a to zz" lists.
+    """
+    for k in range(len(last)):
+        for x in product(ascii_lowercase, repeat=(k + 1)):
+            result = ''.join(x)
+            yield result
+            if result == last:
+                return
+
+
+def product(*args, **kwds):
+    pools = [tuple(arg) for arg in args] * kwds.get('repeat', 1)
+    result = [[]]
+    for pool in pools:
+        result = [x + [y] for x in result for y in pool]
+    for prod in result:
+        yield tuple(prod)
diff --git a/Lib/packaging/pypi/simple.py b/Lib/packaging/pypi/simple.py
new file mode 100644
index 0000000..8585193
--- /dev/null
+++ b/Lib/packaging/pypi/simple.py
@@ -0,0 +1,452 @@
+"""Spider using the screen-scraping "simple" PyPI API.
+
+This module contains the class SimpleIndexCrawler, a simple spider that
+can be used to find and retrieve distributions from a project index
+(like the Python Package Index), using its so-called simple API (see
+reference implementation available at http://pypi.python.org/simple/).
+"""
+
+import http.client
+import re
+import socket
+import sys
+import urllib.request
+import urllib.parse
+import urllib.error
+import os
+
+
+from fnmatch import translate
+from packaging import logger
+from packaging.metadata import Metadata
+from packaging.version import get_version_predicate
+from packaging import __version__ as packaging_version
+from packaging.pypi.base import BaseClient
+from packaging.pypi.dist import (ReleasesList, EXTENSIONS,
+                                  get_infos_from_url, MD5_HASH)
+from packaging.pypi.errors import (PackagingPyPIError, DownloadError,
+                                    UnableToDownload, CantParseArchiveName,
+                                    ReleaseNotFound, ProjectNotFound)
+from packaging.pypi.mirrors import get_mirrors
+from packaging.metadata import Metadata
+
+__all__ = ['Crawler', 'DEFAULT_SIMPLE_INDEX_URL']
+
+# -- Constants -----------------------------------------------
+DEFAULT_SIMPLE_INDEX_URL = "http://a.pypi.python.org/simple/"
+DEFAULT_HOSTS = ("*",)
+SOCKET_TIMEOUT = 15
+USER_AGENT = "Python-urllib/%s packaging/%s" % (
+    sys.version[:3], packaging_version)
+
+# -- Regexps -------------------------------------------------
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
+HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
+
+# This pattern matches a character entity reference (a decimal numeric
+# references, a hexadecimal numeric reference, or a named reference).
+ENTITY_SUB = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
+REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
+
+
+def socket_timeout(timeout=SOCKET_TIMEOUT):
+    """Decorator to add a socket timeout when requesting pages on PyPI.
+    """
+    def _socket_timeout(func):
+        def _socket_timeout(self, *args, **kwargs):
+            old_timeout = socket.getdefaulttimeout()
+            if hasattr(self, "_timeout"):
+                timeout = self._timeout
+            socket.setdefaulttimeout(timeout)
+            try:
+                return func(self, *args, **kwargs)
+            finally:
+                socket.setdefaulttimeout(old_timeout)
+        return _socket_timeout
+    return _socket_timeout
+
+
+def with_mirror_support():
+    """Decorator that makes the mirroring support easier"""
+    def wrapper(func):
+        def wrapped(self, *args, **kwargs):
+            try:
+                return func(self, *args, **kwargs)
+            except DownloadError:
+                # if an error occurs, try with the next index_url
+                if self._mirrors_tries >= self._mirrors_max_tries:
+                    try:
+                        self._switch_to_next_mirror()
+                    except KeyError:
+                        raise UnableToDownload("Tried all mirrors")
+                else:
+                    self._mirrors_tries += 1
+                self._projects.clear()
+                return wrapped(self, *args, **kwargs)
+        return wrapped
+    return wrapper
+
+
+class Crawler(BaseClient):
+    """Provides useful tools to request the Python Package Index simple API.
+
+    You can specify both mirrors and mirrors_url, but mirrors_url will only be
+    used if mirrors is set to None.
+
+    :param index_url: the url of the simple index to search on.
+    :param prefer_final: if the version is not mentioned, and the last
+                         version is not a "final" one (alpha, beta, etc.),
+                         pick up the last final version.
+    :param prefer_source: if the distribution type is not mentioned, pick up
+                          the source one if available.
+    :param follow_externals: tell if following external links is needed or
+                             not. Default is False.
+    :param hosts: a list of hosts allowed to be processed while using
+                  follow_externals=True. Default behavior is to follow all
+                  hosts.
+    :param follow_externals: tell if following external links is needed or
+                             not. Default is False.
+    :param mirrors_url: the url to look on for DNS records giving mirror
+                        adresses.
+    :param mirrors: a list of mirrors (see PEP 381).
+    :param timeout: time in seconds to consider a url has timeouted.
+    :param mirrors_max_tries": number of times to try requesting informations
+                               on mirrors before switching.
+    """
+
+    def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False,
+                 prefer_source=True, hosts=DEFAULT_HOSTS,
+                 follow_externals=False, mirrors_url=None, mirrors=None,
+                 timeout=SOCKET_TIMEOUT, mirrors_max_tries=0):
+        super(Crawler, self).__init__(prefer_final, prefer_source)
+        self.follow_externals = follow_externals
+
+        # mirroring attributes.
+        if not index_url.endswith("/"):
+            index_url += "/"
+        # if no mirrors are defined, use the method described in PEP 381.
+        if mirrors is None:
+            mirrors = get_mirrors(mirrors_url)
+        self._mirrors = set(mirrors)
+        self._mirrors_used = set()
+        self.index_url = index_url
+        self._mirrors_max_tries = mirrors_max_tries
+        self._mirrors_tries = 0
+        self._timeout = timeout
+
+        # create a regexp to match all given hosts
+        self._allowed_hosts = re.compile('|'.join(map(translate, hosts))).match
+
+        # we keep an index of pages we have processed, in order to avoid
+        # scanning them multple time (eg. if there is multiple pages pointing
+        # on one)
+        self._processed_urls = []
+        self._projects = {}
+
+    @with_mirror_support()
+    def search_projects(self, name=None, **kwargs):
+        """Search the index for projects containing the given name.
+
+        Return a list of names.
+        """
+        with self._open_url(self.index_url) as index:
+            if '*' in name:
+                name.replace('*', '.*')
+            else:
+                name = "%s%s%s" % ('*.?', name, '*.?')
+            name = name.replace('*', '[^<]*')  # avoid matching end tag
+            projectname = re.compile('<a[^>]*>(%s)</a>' % name, re.I)
+            matching_projects = []
+
+            index_content = index.read()
+
+        # FIXME should use bytes I/O and regexes instead of decoding
+        index_content = index_content.decode()
+
+        for match in projectname.finditer(index_content):
+            project_name = match.group(1)
+            matching_projects.append(self._get_project(project_name))
+        return matching_projects
+
+    def get_releases(self, requirements, prefer_final=None,
+                     force_update=False):
+        """Search for releases and return a ReleaseList object containing
+        the results.
+        """
+        predicate = get_version_predicate(requirements)
+        if predicate.name.lower() in self._projects and not force_update:
+            return self._projects.get(predicate.name.lower())
+        prefer_final = self._get_prefer_final(prefer_final)
+        logger.info('reading info on PyPI about %s', predicate.name)
+        self._process_index_page(predicate.name)
+
+        if predicate.name.lower() not in self._projects:
+            raise ProjectNotFound()
+
+        releases = self._projects.get(predicate.name.lower())
+        releases.sort_releases(prefer_final=prefer_final)
+        return releases
+
+    def get_release(self, requirements, prefer_final=None):
+        """Return only one release that fulfill the given requirements"""
+        predicate = get_version_predicate(requirements)
+        release = self.get_releases(predicate, prefer_final)\
+                      .get_last(predicate)
+        if not release:
+            raise ReleaseNotFound("No release matches the given criterias")
+        return release
+
+    def get_distributions(self, project_name, version):
+        """Return the distributions found on the index for the specific given
+        release"""
+        # as the default behavior of get_release is to return a release
+        # containing the distributions, just alias it.
+        return self.get_release("%s (%s)" % (project_name, version))
+
+    def get_metadata(self, project_name, version):
+        """Return the metadatas from the simple index.
+
+        Currently, download one archive, extract it and use the PKG-INFO file.
+        """
+        release = self.get_distributions(project_name, version)
+        if not release.metadata:
+            location = release.get_distribution().unpack()
+            pkg_info = os.path.join(location, 'PKG-INFO')
+            release.metadata = Metadata(pkg_info)
+        return release
+
+    def _switch_to_next_mirror(self):
+        """Switch to the next mirror (eg. point self.index_url to the next
+        mirror url.
+
+        Raise a KeyError if all mirrors have been tried.
+        """
+        self._mirrors_used.add(self.index_url)
+        index_url = self._mirrors.pop()
+        if not ("http://" or "https://" or "file://") in index_url:
+            index_url = "http://%s" % index_url
+
+        if not index_url.endswith("/simple"):
+            index_url = "%s/simple/" % index_url
+
+        self.index_url = index_url
+
+    def _is_browsable(self, url):
+        """Tell if the given URL can be browsed or not.
+
+        It uses the follow_externals and the hosts list to tell if the given
+        url is browsable or not.
+        """
+        # if _index_url is contained in the given URL, we are browsing the
+        # index, and it's always "browsable".
+        # local files are always considered browable resources
+        if self.index_url in url or urllib.parse.urlparse(url)[0] == "file":
+            return True
+        elif self.follow_externals:
+            if self._allowed_hosts(urllib.parse.urlparse(url)[1]):  # 1 is netloc
+                return True
+            else:
+                return False
+        return False
+
+    def _is_distribution(self, link):
+        """Tell if the given URL matches to a distribution name or not.
+        """
+        #XXX find a better way to check that links are distributions
+        # Using a regexp ?
+        for ext in EXTENSIONS:
+            if ext in link:
+                return True
+        return False
+
+    def _register_release(self, release=None, release_info={}):
+        """Register a new release.
+
+        Both a release or a dict of release_info can be provided, the prefered
+        way (eg. the quicker) is the dict one.
+
+        Return the list of existing releases for the given project.
+        """
+        # Check if the project already has a list of releases (refering to
+        # the project name). If not, create a new release list.
+        # Then, add the release to the list.
+        if release:
+            name = release.name
+        else:
+            name = release_info['name']
+        if not name.lower() in self._projects:
+            self._projects[name.lower()] = ReleasesList(name, index=self._index)
+
+        if release:
+            self._projects[name.lower()].add_release(release=release)
+        else:
+            name = release_info.pop('name')
+            version = release_info.pop('version')
+            dist_type = release_info.pop('dist_type')
+            self._projects[name.lower()].add_release(version, dist_type,
+                                                     **release_info)
+        return self._projects[name.lower()]
+
+    def _process_url(self, url, project_name=None, follow_links=True):
+        """Process an url and search for distributions packages.
+
+        For each URL found, if it's a download, creates a PyPIdistribution
+        object. If it's a homepage and we can follow links, process it too.
+
+        :param url: the url to process
+        :param project_name: the project name we are searching for.
+        :param follow_links: Do not want to follow links more than from one
+                             level. This parameter tells if we want to follow
+                             the links we find (eg. run recursively this
+                             method on it)
+        """
+        with self._open_url(url) as f:
+            base_url = f.url
+            if url not in self._processed_urls:
+                self._processed_urls.append(url)
+                link_matcher = self._get_link_matcher(url)
+                for link, is_download in link_matcher(f.read().decode(), base_url):
+                    if link not in self._processed_urls:
+                        if self._is_distribution(link) or is_download:
+                            self._processed_urls.append(link)
+                            # it's a distribution, so create a dist object
+                            try:
+                                infos = get_infos_from_url(link, project_name,
+                                            is_external=not self.index_url in url)
+                            except CantParseArchiveName as e:
+                                logger.warning(
+                                    "version has not been parsed: %s", e)
+                            else:
+                                self._register_release(release_info=infos)
+                        else:
+                            if self._is_browsable(link) and follow_links:
+                                self._process_url(link, project_name,
+                                    follow_links=False)
+
+    def _get_link_matcher(self, url):
+        """Returns the right link matcher function of the given url
+        """
+        if self.index_url in url:
+            return self._simple_link_matcher
+        else:
+            return self._default_link_matcher
+
+    def _get_full_url(self, url, base_url):
+        return urllib.parse.urljoin(base_url, self._htmldecode(url))
+
+    def _simple_link_matcher(self, content, base_url):
+        """Yield all links with a rel="download" or rel="homepage".
+
+        This matches the simple index requirements for matching links.
+        If follow_externals is set to False, dont yeld the external
+        urls.
+
+        :param content: the content of the page we want to parse
+        :param base_url: the url of this page.
+        """
+        for match in HREF.finditer(content):
+            url = self._get_full_url(match.group(1), base_url)
+            if MD5_HASH.match(url):
+                yield (url, True)
+
+        for match in REL.finditer(content):
+            # search for rel links.
+            tag, rel = match.groups()
+            rels = [s.strip() for s in rel.lower().split(',')]
+            if 'homepage' in rels or 'download' in rels:
+                for match in HREF.finditer(tag):
+                    url = self._get_full_url(match.group(1), base_url)
+                    if 'download' in rels or self._is_browsable(url):
+                        # yield a list of (url, is_download)
+                        yield (url, 'download' in rels)
+
+    def _default_link_matcher(self, content, base_url):
+        """Yield all links found on the page.
+        """
+        for match in HREF.finditer(content):
+            url = self._get_full_url(match.group(1), base_url)
+            if self._is_browsable(url):
+                yield (url, False)
+
+    @with_mirror_support()
+    def _process_index_page(self, name):
+        """Find and process a PyPI page for the given project name.
+
+        :param name: the name of the project to find the page
+        """
+        # Browse and index the content of the given PyPI page.
+        url = self.index_url + name + "/"
+        self._process_url(url, name)
+
+    @socket_timeout()
+    def _open_url(self, url):
+        """Open a urllib2 request, handling HTTP authentication, and local
+        files support.
+
+        """
+        scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
+
+        # authentication stuff
+        if scheme in ('http', 'https'):
+            auth, host = urllib.parse.splituser(netloc)
+        else:
+            auth = None
+
+        # add index.html automatically for filesystem paths
+        if scheme == 'file':
+            if url.endswith('/'):
+                url += "index.html"
+
+        # add authorization headers if auth is provided
+        if auth:
+            auth = "Basic " + \
+                urllib.parse.unquote(auth).encode('base64').strip()
+            new_url = urllib.parse.urlunparse((
+                scheme, host, path, params, query, frag))
+            request = urllib.request.Request(new_url)
+            request.add_header("Authorization", auth)
+        else:
+            request = urllib.request.Request(url)
+        request.add_header('User-Agent', USER_AGENT)
+        try:
+            fp = urllib.request.urlopen(request)
+        except (ValueError, http.client.InvalidURL) as v:
+            msg = ' '.join([str(arg) for arg in v.args])
+            raise PackagingPyPIError('%s %s' % (url, msg))
+        except urllib.error.HTTPError as v:
+            return v
+        except urllib.error.URLError as v:
+            raise DownloadError("Download error for %s: %s" % (url, v.reason))
+        except http.client.BadStatusLine as v:
+            raise DownloadError('%s returned a bad status line. '
+                'The server might be down, %s' % (url, v.line))
+        except http.client.HTTPException as v:
+            raise DownloadError("Download error for %s: %s" % (url, v))
+        except socket.timeout:
+            raise DownloadError("The server timeouted")
+
+        if auth:
+            # Put authentication info back into request URL if same host,
+            # so that links found on the page will work
+            s2, h2, path2, param2, query2, frag2 = \
+                urllib.parse.urlparse(fp.url)
+            if s2 == scheme and h2 == host:
+                fp.url = urllib.parse.urlunparse(
+                    (s2, netloc, path2, param2, query2, frag2))
+        return fp
+
+    def _decode_entity(self, match):
+        what = match.group(1)
+        if what.startswith('#x'):
+            what = int(what[2:], 16)
+        elif what.startswith('#'):
+            what = int(what[1:])
+        else:
+            from html.entities import name2codepoint
+            what = name2codepoint.get(what, match.group(0))
+        return chr(what)
+
+    def _htmldecode(self, text):
+        """Decode HTML entities in the given text."""
+        return ENTITY_SUB(self._decode_entity, text)
diff --git a/Lib/packaging/pypi/wrapper.py b/Lib/packaging/pypi/wrapper.py
new file mode 100644
index 0000000..945d08a
--- /dev/null
+++ b/Lib/packaging/pypi/wrapper.py
@@ -0,0 +1,99 @@
+"""Convenient client for all PyPI APIs.
+
+This module provides a ClientWrapper class which will use the "simple"
+or XML-RPC API to request information or files from an index.
+"""
+
+from packaging.pypi import simple, xmlrpc
+
+_WRAPPER_MAPPINGS = {'get_release': 'simple',
+                     'get_releases': 'simple',
+                     'search_projects': 'simple',
+                     'get_metadata': 'xmlrpc',
+                     'get_distributions': 'simple'}
+
+_WRAPPER_INDEXES = {'xmlrpc': xmlrpc.Client,
+                    'simple': simple.Crawler}
+
+
+def switch_index_if_fails(func, wrapper):
+    """Decorator that switch of index (for instance from xmlrpc to simple)
+    if the first mirror return an empty list or raises an exception.
+    """
+    def decorator(*args, **kwargs):
+        retry = True
+        exception = None
+        methods = [func]
+        for f in wrapper._indexes.values():
+            if f != func.__self__ and hasattr(f, func.__name__):
+                methods.append(getattr(f, func.__name__))
+        for method in methods:
+            try:
+                response = method(*args, **kwargs)
+                retry = False
+            except Exception as e:
+                exception = e
+            if not retry:
+                break
+        if retry and exception:
+            raise exception
+        else:
+            return response
+    return decorator
+
+
+class ClientWrapper:
+    """Wrapper around simple and xmlrpc clients,
+
+    Choose the best implementation to use depending the needs, using the given
+    mappings.
+    If one of the indexes returns an error, tries to use others indexes.
+
+    :param index: tell which index to rely on by default.
+    :param index_classes: a dict of name:class to use as indexes.
+    :param indexes: a dict of name:index already instantiated
+    :param mappings: the mappings to use for this wrapper
+    """
+
+    def __init__(self, default_index='simple', index_classes=_WRAPPER_INDEXES,
+                 indexes={}, mappings=_WRAPPER_MAPPINGS):
+        self._projects = {}
+        self._mappings = mappings
+        self._indexes = indexes
+        self._default_index = default_index
+
+        # instantiate the classes and set their _project attribute to the one
+        # of the wrapper.
+        for name, cls in index_classes.items():
+            obj = self._indexes.setdefault(name, cls())
+            obj._projects = self._projects
+            obj._index = self
+
+    def __getattr__(self, method_name):
+        """When asking for methods of the wrapper, return the implementation of
+        the wrapped classes, depending the mapping.
+
+        Decorate the methods to switch of implementation if an error occurs
+        """
+        real_method = None
+        if method_name in _WRAPPER_MAPPINGS:
+            obj = self._indexes[_WRAPPER_MAPPINGS[method_name]]
+            real_method = getattr(obj, method_name)
+        else:
+            # the method is not defined in the mappings, so we try first to get
+            # it via the default index, and rely on others if needed.
+            try:
+                real_method = getattr(self._indexes[self._default_index],
+                                      method_name)
+            except AttributeError:
+                other_indexes = [i for i in self._indexes
+                                 if i != self._default_index]
+                for index in other_indexes:
+                    real_method = getattr(self._indexes[index], method_name,
+                                          None)
+                    if real_method:
+                        break
+        if real_method:
+            return switch_index_if_fails(real_method, self)
+        else:
+            raise AttributeError("No index have attribute '%s'" % method_name)
diff --git a/Lib/packaging/pypi/xmlrpc.py b/Lib/packaging/pypi/xmlrpc.py
new file mode 100644
index 0000000..7a9f6cc
--- /dev/null
+++ b/Lib/packaging/pypi/xmlrpc.py
@@ -0,0 +1,200 @@
+"""Spider using the XML-RPC PyPI API.
+
+This module contains the class Client, a spider that can be used to find
+and retrieve distributions from a project index (like the Python Package
+Index), using its XML-RPC API (see documentation of the reference
+implementation at http://wiki.python.org/moin/PyPiXmlRpc).
+"""
+
+import xmlrpc.client
+
+from packaging import logger
+from packaging.errors import IrrationalVersionError
+from packaging.version import get_version_predicate
+from packaging.pypi.base import BaseClient
+from packaging.pypi.errors import (ProjectNotFound, InvalidSearchField,
+                                   ReleaseNotFound)
+from packaging.pypi.dist import ReleaseInfo
+
+__all__ = ['Client', 'DEFAULT_XMLRPC_INDEX_URL']
+
+DEFAULT_XMLRPC_INDEX_URL = 'http://python.org/pypi'
+
+_SEARCH_FIELDS = ['name', 'version', 'author', 'author_email', 'maintainer',
+                  'maintainer_email', 'home_page', 'license', 'summary',
+                  'description', 'keywords', 'platform', 'download_url']
+
+
+class Client(BaseClient):
+    """Client to query indexes using XML-RPC method calls.
+
+    If no server_url is specified, use the default PyPI XML-RPC URL,
+    defined in the DEFAULT_XMLRPC_INDEX_URL constant::
+
+        >>> client = XMLRPCClient()
+        >>> client.server_url == DEFAULT_XMLRPC_INDEX_URL
+        True
+
+        >>> client = XMLRPCClient("http://someurl/")
+        >>> client.server_url
+        'http://someurl/'
+    """
+
+    def __init__(self, server_url=DEFAULT_XMLRPC_INDEX_URL, prefer_final=False,
+                 prefer_source=True):
+        super(Client, self).__init__(prefer_final, prefer_source)
+        self.server_url = server_url
+        self._projects = {}
+
+    def get_release(self, requirements, prefer_final=False):
+        """Return a release with all complete metadata and distribution
+        related informations.
+        """
+        prefer_final = self._get_prefer_final(prefer_final)
+        predicate = get_version_predicate(requirements)
+        releases = self.get_releases(predicate.name)
+        release = releases.get_last(predicate, prefer_final)
+        self.get_metadata(release.name, str(release.version))
+        self.get_distributions(release.name, str(release.version))
+        return release
+
+    def get_releases(self, requirements, prefer_final=None, show_hidden=True,
+                     force_update=False):
+        """Return the list of existing releases for a specific project.
+
+        Cache the results from one call to another.
+
+        If show_hidden is True, return the hidden releases too.
+        If force_update is True, reprocess the index to update the
+        informations (eg. make a new XML-RPC call).
+        ::
+
+            >>> client = XMLRPCClient()
+            >>> client.get_releases('Foo')
+            ['1.1', '1.2', '1.3']
+
+        If no such project exists, raise a ProjectNotFound exception::
+
+            >>> client.get_project_versions('UnexistingProject')
+            ProjectNotFound: UnexistingProject
+
+        """
+        def get_versions(project_name, show_hidden):
+            return self.proxy.package_releases(project_name, show_hidden)
+
+        predicate = get_version_predicate(requirements)
+        prefer_final = self._get_prefer_final(prefer_final)
+        project_name = predicate.name
+        if not force_update and (project_name.lower() in self._projects):
+            project = self._projects[project_name.lower()]
+            if not project.contains_hidden and show_hidden:
+                # if hidden releases are requested, and have an existing
+                # list of releases that does not contains hidden ones
+                all_versions = get_versions(project_name, show_hidden)
+                existing_versions = project.get_versions()
+                hidden_versions = set(all_versions) - set(existing_versions)
+                for version in hidden_versions:
+                    project.add_release(release=ReleaseInfo(project_name,
+                                            version, index=self._index))
+        else:
+            versions = get_versions(project_name, show_hidden)
+            if not versions:
+                raise ProjectNotFound(project_name)
+            project = self._get_project(project_name)
+            project.add_releases([ReleaseInfo(project_name, version,
+                                              index=self._index)
+                                  for version in versions])
+        project = project.filter(predicate)
+        if len(project) == 0:
+            raise ReleaseNotFound("%s" % predicate)
+        project.sort_releases(prefer_final)
+        return project
+
+
+    def get_distributions(self, project_name, version):
+        """Grab informations about distributions from XML-RPC.
+
+        Return a ReleaseInfo object, with distribution-related informations
+        filled in.
+        """
+        url_infos = self.proxy.release_urls(project_name, version)
+        project = self._get_project(project_name)
+        if version not in project.get_versions():
+            project.add_release(release=ReleaseInfo(project_name, version,
+                                                    index=self._index))
+        release = project.get_release(version)
+        for info in url_infos:
+            packagetype = info['packagetype']
+            dist_infos = {'url': info['url'],
+                          'hashval': info['md5_digest'],
+                          'hashname': 'md5',
+                          'is_external': False,
+                          'python_version': info['python_version']}
+            release.add_distribution(packagetype, **dist_infos)
+        return release
+
+    def get_metadata(self, project_name, version):
+        """Retrieve project metadata.
+
+        Return a ReleaseInfo object, with metadata informations filled in.
+        """
+        # to be case-insensitive, get the informations from the XMLRPC API
+        projects = [d['name'] for d in
+                    self.proxy.search({'name': project_name})
+                    if d['name'].lower() == project_name]
+        if len(projects) > 0:
+            project_name = projects[0]
+
+        metadata = self.proxy.release_data(project_name, version)
+        project = self._get_project(project_name)
+        if version not in project.get_versions():
+            project.add_release(release=ReleaseInfo(project_name, version,
+                                                    index=self._index))
+        release = project.get_release(version)
+        release.set_metadata(metadata)
+        return release
+
+    def search_projects(self, name=None, operator="or", **kwargs):
+        """Find using the keys provided in kwargs.
+
+        You can set operator to "and" or "or".
+        """
+        for key in kwargs:
+            if key not in _SEARCH_FIELDS:
+                raise InvalidSearchField(key)
+        if name:
+            kwargs["name"] = name
+        projects = self.proxy.search(kwargs, operator)
+        for p in projects:
+            project = self._get_project(p['name'])
+            try:
+                project.add_release(release=ReleaseInfo(p['name'],
+                    p['version'], metadata={'summary': p['summary']},
+                    index=self._index))
+            except IrrationalVersionError as e:
+                logger.warning("Irrational version error found: %s", e)
+        return [self._projects[p['name'].lower()] for p in projects]
+
+    def get_all_projects(self):
+        """Return the list of all projects registered in the package index"""
+        projects = self.proxy.list_packages()
+        for name in projects:
+            self.get_releases(name, show_hidden=True)
+
+        return [self._projects[name.lower()] for name in set(projects)]
+
+    @property
+    def proxy(self):
+        """Property used to return the XMLRPC server proxy.
+
+        If no server proxy is defined yet, creates a new one::
+
+            >>> client = XmlRpcClient()
+            >>> client.proxy()
+            <ServerProxy for python.org/pypi>
+
+        """
+        if not hasattr(self, '_server_proxy'):
+            self._server_proxy = xmlrpc.client.ServerProxy(self.server_url)
+
+        return self._server_proxy
diff --git a/Lib/packaging/resources.py b/Lib/packaging/resources.py
new file mode 100644
index 0000000..e5904f3
--- /dev/null
+++ b/Lib/packaging/resources.py
@@ -0,0 +1,25 @@
+"""Data file path abstraction.
+
+Functions in this module use sysconfig to find the paths to the resource
+files registered in project's setup.cfg file.  See the documentation for
+more information.
+"""
+# TODO write that documentation
+
+from packaging.database import get_distribution
+
+__all__ = ['get_file_path', 'get_file']
+
+
+def get_file_path(distribution_name, relative_path):
+    """Return the path to a resource file."""
+    dist = get_distribution(distribution_name)
+    if dist != None:
+        return dist.get_resource_path(relative_path)
+    raise LookupError('no distribution named %r found' % distribution_name)
+
+
+def get_file(distribution_name, relative_path, *args, **kwargs):
+    """Open and return a resource file."""
+    return open(get_file_path(distribution_name, relative_path),
+                *args, **kwargs)
diff --git a/Lib/packaging/run.py b/Lib/packaging/run.py
new file mode 100644
index 0000000..1d4fadb
--- /dev/null
+++ b/Lib/packaging/run.py
@@ -0,0 +1,645 @@
+"""Main command line parser.  Implements the pysetup script."""
+
+import os
+import re
+import sys
+import getopt
+import logging
+
+from packaging import logger
+from packaging.dist import Distribution
+from packaging.util import _is_archive_file
+from packaging.command import get_command_class, STANDARD_COMMANDS
+from packaging.install import install, install_local_project, remove
+from packaging.database import get_distribution, get_distributions
+from packaging.depgraph import generate_graph
+from packaging.fancy_getopt import FancyGetopt
+from packaging.errors import (PackagingArgError, PackagingError,
+                              PackagingModuleError, PackagingClassError,
+                              CCompilerError)
+
+
+command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+common_usage = """\
+Actions:
+%(actions)s
+
+To get more help on an action, use:
+
+    pysetup action --help
+"""
+
+create_usage = """\
+Usage: pysetup create
+   or: pysetup create --help
+
+Create a new Python package.
+"""
+
+graph_usage = """\
+Usage: pysetup graph dist
+   or: pysetup graph --help
+
+Print dependency graph for the distribution.
+
+positional arguments:
+   dist  installed distribution name
+"""
+
+install_usage = """\
+Usage: pysetup install [dist]
+   or: pysetup install [archive]
+   or: pysetup install [src_dir]
+   or: pysetup install --help
+
+Install a Python distribution from the indexes, source directory, or sdist.
+
+positional arguments:
+   archive  path to source distribution (zip, tar.gz)
+   dist     distribution name to install from the indexes
+   scr_dir  path to source directory
+
+"""
+
+metadata_usage = """\
+Usage: pysetup metadata [dist] [-f field ...]
+   or: pysetup metadata [dist] [--all]
+   or: pysetup metadata --help
+
+Print metadata for the distribution.
+
+positional arguments:
+   dist  installed distribution name
+
+optional arguments:
+   -f     metadata field to print
+   --all  print all metadata fields
+"""
+
+remove_usage = """\
+Usage: pysetup remove dist [-y]
+   or: pysetup remove --help
+
+Uninstall a Python distribution.
+
+positional arguments:
+   dist  installed distribution name
+
+optional arguments:
+   -y  auto confirm package removal
+"""
+
+run_usage = """\
+Usage: pysetup run [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+   or: pysetup run --help
+   or: pysetup run --list-commands
+   or: pysetup run cmd --help
+"""
+
+list_usage = """\
+Usage: pysetup list dist [dist ...]
+   or: pysetup list --help
+   or: pysetup list --all
+
+Print name, version and location for the matching installed distributions.
+
+positional arguments:
+   dist  installed distribution name
+
+optional arguments:
+   --all  list all installed distributions
+"""
+
+search_usage = """\
+Usage: pysetup search [project] [--simple [url]] [--xmlrpc [url] [--fieldname value ...] --operator or|and]
+   or: pysetup search --help
+
+Search the indexes for the matching projects.
+
+positional arguments:
+    project     the project pattern to search for
+
+optional arguments:
+    --xmlrpc [url]      wether to use the xmlrpc index or not. If an url is
+                        specified, it will be used rather than the default one.
+
+    --simple [url]      wether to use the simple index or not. If an url is
+                        specified, it will be used rather than the default one.
+
+    --fieldname value   Make a search on this field. Can only be used if
+                        --xmlrpc has been selected or is the default index.
+
+    --operator or|and   Defines what is the operator to use when doing xmlrpc
+                        searchs with multiple fieldnames. Can only be used if
+                        --xmlrpc has been selected or is the default index.
+"""
+
+global_options = [
+    # The fourth entry for verbose means that it can be repeated.
+    ('verbose', 'v', "run verbosely (default)", True),
+    ('quiet', 'q', "run quietly (turns verbosity off)"),
+    ('dry-run', 'n', "don't actually do anything"),
+    ('help', 'h', "show detailed help message"),
+    ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
+    ('version', None, 'Display the version'),
+]
+
+negative_opt = {'quiet': 'verbose'}
+
+display_options = [
+    ('help-commands', None, "list all available commands"),
+]
+
+display_option_names = [x[0].replace('-', '_') for x in display_options]
+
+
+def _parse_args(args, options, long_options):
+    """Transform sys.argv input into a dict.
+
+    :param args: the args to parse (i.e sys.argv)
+    :param options: the list of options to pass to getopt
+    :param long_options: the list of string with the names of the long options
+                         to be passed to getopt.
+
+    The function returns a dict with options/long_options as keys and matching
+    values as values.
+    """
+    optlist, args = getopt.gnu_getopt(args, options, long_options)
+    optdict = {}
+    optdict['args'] = args
+    for k, v in optlist:
+        k = k.lstrip('-')
+        if k not in optdict:
+            optdict[k] = []
+            if v:
+                optdict[k].append(v)
+        else:
+            optdict[k].append(v)
+    return optdict
+
+
+class action_help:
+    """Prints a help message when the standard help flags: -h and --help
+    are used on the commandline.
+    """
+
+    def __init__(self, help_msg):
+        self.help_msg = help_msg
+
+    def __call__(self, f):
+        def wrapper(*args, **kwargs):
+            f_args = args[1]
+            if '--help' in f_args or '-h' in f_args:
+                print(self.help_msg)
+                return
+            return f(*args, **kwargs)
+        return wrapper
+
+
+@action_help(create_usage)
+def _create(distpatcher, args, **kw):
+    from packaging.create import main
+    return main()
+
+
+@action_help(graph_usage)
+def _graph(dispatcher, args, **kw):
+    name = args[1]
+    dist = get_distribution(name, use_egg_info=True)
+    if dist is None:
+        print('Distribution not found.')
+    else:
+        dists = get_distributions(use_egg_info=True)
+        graph = generate_graph(dists)
+        print(graph.repr_node(dist))
+
+
+@action_help(install_usage)
+def _install(dispatcher, args, **kw):
+    # first check if we are in a source directory
+    if len(args) < 2:
+        # are we inside a project dir?
+        listing = os.listdir(os.getcwd())
+        if 'setup.py' in listing or 'setup.cfg' in listing:
+            args.insert(1, os.getcwd())
+        else:
+            logger.warning('no project to install')
+            return
+
+    # installing from a source dir or archive file?
+    if os.path.isdir(args[1]) or _is_archive_file(args[1]):
+        install_local_project(args[1])
+    else:
+        # download from PyPI
+        install(args[1])
+
+
+@action_help(metadata_usage)
+def _metadata(dispatcher, args, **kw):
+    opts = _parse_args(args[1:], 'f:', ['all'])
+    if opts['args']:
+        name = opts['args'][0]
+        dist = get_distribution(name, use_egg_info=True)
+        if dist is None:
+            logger.warning('%s not installed', name)
+            return
+    else:
+        logger.info('searching local dir for metadata')
+        dist = Distribution()
+        dist.parse_config_files()
+
+    metadata = dist.metadata
+
+    if 'all' in opts:
+        keys = metadata.keys()
+    else:
+        if 'f' in opts:
+            keys = (k for k in opts['f'] if k in metadata)
+        else:
+            keys = ()
+
+    for key in keys:
+        if key in metadata:
+            print(metadata._convert_name(key) + ':')
+            value = metadata[key]
+            if isinstance(value, list):
+                for v in value:
+                    print('    ' + v)
+            else:
+                print('    ' + value.replace('\n', '\n    '))
+
+
+@action_help(remove_usage)
+def _remove(distpatcher, args, **kw):
+    opts = _parse_args(args[1:], 'y', [])
+    if 'y' in opts:
+        auto_confirm = True
+    else:
+        auto_confirm = False
+
+    for dist in set(opts['args']):
+        try:
+            remove(dist, auto_confirm=auto_confirm)
+        except PackagingError:
+            logger.warning('%s not installed', dist)
+
+
+@action_help(run_usage)
+def _run(dispatcher, args, **kw):
+    parser = dispatcher.parser
+    args = args[1:]
+
+    commands = STANDARD_COMMANDS  # + extra commands
+
+    if args == ['--list-commands']:
+        print('List of available commands:')
+        cmds = sorted(commands)
+
+        for cmd in cmds:
+            cls = dispatcher.cmdclass.get(cmd) or get_command_class(cmd)
+            desc = getattr(cls, 'description',
+                            '(no description available)')
+            print('  %s: %s' % (cmd, desc))
+        return
+
+    while args:
+        args = dispatcher._parse_command_opts(parser, args)
+        if args is None:
+            return
+
+    # create the Distribution class
+    # need to feed setup.cfg here !
+    dist = Distribution()
+
+    # Find and parse the config file(s): they will override options from
+    # the setup script, but be overridden by the command line.
+
+    # XXX still need to be extracted from Distribution
+    dist.parse_config_files()
+
+    try:
+        for cmd in dispatcher.commands:
+            dist.run_command(cmd, dispatcher.command_options[cmd])
+
+    except KeyboardInterrupt:
+        raise SystemExit("interrupted")
+    except (IOError, os.error, PackagingError, CCompilerError) as msg:
+        raise SystemExit("error: " + str(msg))
+
+    # XXX this is crappy
+    return dist
+
+
+@action_help(list_usage)
+def _list(dispatcher, args, **kw):
+    opts = _parse_args(args[1:], '', ['all'])
+    dists = get_distributions(use_egg_info=True)
+    if 'all' in opts:
+        results = dists
+    else:
+        results = [d for d in dists if d.name.lower() in opts['args']]
+
+    for dist in results:
+        print('%s %s at %s' % (dist.name, dist.metadata['version'], dist.path))
+
+
+@action_help(search_usage)
+def _search(dispatcher, args, **kw):
+    """The search action.
+
+    It is able to search for a specific index (specified with --index), using
+    the simple or xmlrpc index types (with --type xmlrpc / --type simple)
+    """
+    opts = _parse_args(args[1:], '', ['simple', 'xmlrpc'])
+    # 1. what kind of index is requested ? (xmlrpc / simple)
+
+
+actions = [
+    ('run', 'Run one or several commands', _run),
+    ('metadata', 'Display the metadata of a project', _metadata),
+    ('install', 'Install a project', _install),
+    ('remove', 'Remove a project', _remove),
+    ('search', 'Search for a project in the indexes', _search),
+    ('list', 'Search for local projects', _list),
+    ('graph', 'Display a graph', _graph),
+    ('create', 'Create a Project', _create),
+]
+
+
+class Dispatcher:
+    """Reads the command-line options
+    """
+    def __init__(self, args=None):
+        self.verbose = 1
+        self.dry_run = False
+        self.help = False
+        self.script_name = 'pysetup'
+        self.cmdclass = {}
+        self.commands = []
+        self.command_options = {}
+
+        for attr in display_option_names:
+            setattr(self, attr, False)
+
+        self.parser = FancyGetopt(global_options + display_options)
+        self.parser.set_negative_aliases(negative_opt)
+        # FIXME this parses everything, including command options (e.g. "run
+        # build -i" errors with "option -i not recognized")
+        args = self.parser.getopt(args=args, object=self)
+
+        # if first arg is "run", we have some commands
+        if len(args) == 0:
+            self.action = None
+        else:
+            self.action = args[0]
+
+        allowed = [action[0] for action in actions] + [None]
+        if self.action not in allowed:
+            msg = 'Unrecognized action "%s"' % self.action
+            raise PackagingArgError(msg)
+
+        # setting up the logging level from the command-line options
+        # -q gets warning, error and critical
+        if self.verbose == 0:
+            level = logging.WARNING
+        # default level or -v gets info too
+        # XXX there's a bug somewhere: the help text says that -v is default
+        # (and verbose is set to 1 above), but when the user explicitly gives
+        # -v on the command line, self.verbose is incremented to 2!  Here we
+        # compensate for that (I tested manually).  On a related note, I think
+        # it's a good thing to use -q/nothing/-v/-vv on the command line
+        # instead of logging constants; it will be easy to add support for
+        # logging configuration in setup.cfg for advanced users. --merwok
+        elif self.verbose in (1, 2):
+            level = logging.INFO
+        else:  # -vv and more for debug
+            level = logging.DEBUG
+
+        # for display options we return immediately
+        option_order = self.parser.get_option_order()
+
+        self.args = args
+
+        if self.help or self.action is None:
+            self._show_help(self.parser, display_options_=False)
+
+    def _parse_command_opts(self, parser, args):
+        # Pull the current command from the head of the command line
+        command = args[0]
+        if not command_re.match(command):
+            raise SystemExit("invalid command name %r" % (command,))
+        self.commands.append(command)
+
+        # Dig up the command class that implements this command, so we
+        # 1) know that it's a valid command, and 2) know which options
+        # it takes.
+        try:
+            cmd_class = get_command_class(command)
+        except PackagingModuleError as msg:
+            raise PackagingArgError(msg)
+
+        # XXX We want to push this in packaging.command
+        #
+        # Require that the command class be derived from Command -- want
+        # to be sure that the basic "command" interface is implemented.
+        for meth in ('initialize_options', 'finalize_options', 'run'):
+            if hasattr(cmd_class, meth):
+                continue
+            raise PackagingClassError(
+                'command %r must implement %r' % (cmd_class, meth))
+
+        # Also make sure that the command object provides a list of its
+        # known options.
+        if not (hasattr(cmd_class, 'user_options') and
+                isinstance(cmd_class.user_options, list)):
+            raise PackagingClassError(
+                "command class %s must provide "
+                "'user_options' attribute (a list of tuples)" % cmd_class)
+
+        # If the command class has a list of negative alias options,
+        # merge it in with the global negative aliases.
+        _negative_opt = negative_opt.copy()
+
+        if hasattr(cmd_class, 'negative_opt'):
+            _negative_opt.update(cmd_class.negative_opt)
+
+        # Check for help_options in command class.  They have a different
+        # format (tuple of four) so we need to preprocess them here.
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_options = cmd_class.help_options[:]
+        else:
+            help_options = []
+
+        # All commands support the global options too, just by adding
+        # in 'global_options'.
+        parser.set_option_table(global_options +
+                                cmd_class.user_options +
+                                help_options)
+        parser.set_negative_aliases(_negative_opt)
+        args, opts = parser.getopt(args[1:])
+
+        if hasattr(opts, 'help') and opts.help:
+            self._show_command_help(cmd_class)
+            return
+
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_option_found = False
+            for help_option, short, desc, func in cmd_class.help_options:
+                if hasattr(opts, help_option.replace('-', '_')):
+                    help_option_found = True
+                    if hasattr(func, '__call__'):
+                        func()
+                    else:
+                        raise PackagingClassError(
+                            "invalid help function %r for help option %r: "
+                            "must be a callable object (function, etc.)"
+                            % (func, help_option))
+
+            if help_option_found:
+                return
+
+        # Put the options from the command line into their official
+        # holding pen, the 'command_options' dictionary.
+        opt_dict = self.get_option_dict(command)
+        for name, value in vars(opts).items():
+            opt_dict[name] = ("command line", value)
+
+        return args
+
+    def get_option_dict(self, command):
+        """Get the option dictionary for a given command.  If that
+        command's option dictionary hasn't been created yet, then create it
+        and return the new dictionary; otherwise, return the existing
+        option dictionary.
+        """
+        d = self.command_options.get(command)
+        if d is None:
+            d = self.command_options[command] = {}
+        return d
+
+    def show_help(self):
+        self._show_help(self.parser)
+
+    def print_usage(self, parser):
+        parser.set_option_table(global_options)
+
+        actions_ = ['    %s: %s' % (name, desc) for name, desc, __ in actions]
+        usage = common_usage % {'actions': '\n'.join(actions_)}
+
+        parser.print_help(usage + "\nGlobal options:")
+
+    def _show_help(self, parser, global_options_=True, display_options_=True,
+                   commands=[]):
+        # late import because of mutual dependence between these modules
+        from packaging.command.cmd import Command
+
+        print('Usage: pysetup [options] action [action_options]')
+        print('')
+        if global_options_:
+            self.print_usage(self.parser)
+            print('')
+
+        if display_options_:
+            parser.set_option_table(display_options)
+            parser.print_help(
+                "Information display options (just display " +
+                "information, ignore any commands)")
+            print('')
+
+        for command in commands:
+            if isinstance(command, type) and issubclass(command, Command):
+                cls = command
+            else:
+                cls = get_command_class(command)
+            if (hasattr(cls, 'help_options') and
+                isinstance(cls.help_options, list)):
+                parser.set_option_table(cls.user_options + cls.help_options)
+            else:
+                parser.set_option_table(cls.user_options)
+
+            parser.print_help("Options for %r command:" % cls.__name__)
+            print('')
+
+    def _show_command_help(self, command):
+        if isinstance(command, str):
+            command = get_command_class(command)
+
+        name = command.get_command_name()
+
+        desc = getattr(command, 'description', '(no description available)')
+        print('Description: %s' % desc)
+        print('')
+
+        if (hasattr(command, 'help_options') and
+            isinstance(command.help_options, list)):
+            self.parser.set_option_table(command.user_options +
+                                         command.help_options)
+        else:
+            self.parser.set_option_table(command.user_options)
+
+        self.parser.print_help("Options:")
+        print('')
+
+    def _get_command_groups(self):
+        """Helper function to retrieve all the command class names divided
+        into standard commands (listed in
+        packaging.command.STANDARD_COMMANDS) and extra commands (given in
+        self.cmdclass and not standard commands).
+        """
+        extra_commands = [cmd for cmd in self.cmdclass
+                          if cmd not in STANDARD_COMMANDS]
+        return STANDARD_COMMANDS, extra_commands
+
+    def print_commands(self):
+        """Print out a help message listing all available commands with a
+        description of each.  The list is divided into standard commands
+        (listed in packaging.command.STANDARD_COMMANDS) and extra commands
+        (given in self.cmdclass and not standard commands).  The
+        descriptions come from the command class attribute
+        'description'.
+        """
+        std_commands, extra_commands = self._get_command_groups()
+        max_length = max(len(command)
+                         for commands in (std_commands, extra_commands)
+                         for command in commands)
+
+        self.print_command_list(std_commands, "Standard commands", max_length)
+        if extra_commands:
+            print()
+            self.print_command_list(extra_commands, "Extra commands",
+                                    max_length)
+
+    def print_command_list(self, commands, header, max_length):
+        """Print a subset of the list of all commands -- used by
+        'print_commands()'.
+        """
+        print(header + ":")
+
+        for cmd in commands:
+            cls = self.cmdclass.get(cmd) or get_command_class(cmd)
+            description = getattr(cls, 'description',
+                                  '(no description available)')
+
+            print("  %-*s  %s" % (max_length, cmd, description))
+
+    def __call__(self):
+        if self.action is None:
+            return
+        for action, desc, func in actions:
+            if action == self.action:
+                return func(self, self.args)
+        return -1
+
+
+def main(args=None):
+    dispatcher = Dispatcher(args)
+    if dispatcher.action is None:
+        return
+
+    return dispatcher()
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/Lib/packaging/tests/LONG_DESC.txt b/Lib/packaging/tests/LONG_DESC.txt
new file mode 100644
index 0000000..2b4358a
--- /dev/null
+++ b/Lib/packaging/tests/LONG_DESC.txt
@@ -0,0 +1,44 @@
+CLVault
+=======
+
+CLVault uses Keyring to provide a command-line utility to safely store
+and retrieve passwords.
+
+Install it using pip or the setup.py script::
+
+    $ python setup.py install
+
+    $ pip install clvault
+
+Once it's installed, you will have three scripts installed in your
+Python scripts folder, you can use to list, store and retrieve passwords::
+
+    $ clvault-set blog
+    Set your password:
+    Set the associated username (can be blank): tarek
+    Set a description (can be blank): My blog password
+    Password set.
+
+    $ clvault-get blog
+    The username is "tarek"
+    The password has been copied in your clipboard
+
+    $ clvault-list
+    Registered services:
+    blog    My blog password
+
+
+*clvault-set* takes a service name then prompt you for a password, and some
+optional information about your service. The password is safely stored in
+a keyring while the description is saved in a ``.clvault`` file in your
+home directory. This file is created automatically the first time the command
+is used.
+
+*clvault-get* copies the password for a given service in your clipboard, and
+displays the associated user if any.
+
+*clvault-list* lists all registered services, with their description when
+given.
+
+
+Project page: http://bitbucket.org/tarek/clvault
diff --git a/Lib/packaging/tests/PKG-INFO b/Lib/packaging/tests/PKG-INFO
new file mode 100644
index 0000000..f48546e
--- /dev/null
+++ b/Lib/packaging/tests/PKG-INFO
@@ -0,0 +1,57 @@
+Metadata-Version: 1.2
+Name: CLVault
+Version: 0.5
+Summary: Command-Line utility to store and retrieve passwords
+Home-page: http://bitbucket.org/tarek/clvault
+Author: Tarek Ziade
+Author-email: tarek@ziade.org
+License: PSF
+Keywords: keyring,password,crypt
+Requires-Dist: foo; sys.platform == 'okook'
+Requires-Dist: bar; sys.platform == '%s'
+Platform: UNKNOWN
+Description: CLVault
+       |=======
+       |
+       |CLVault uses Keyring to provide a command-line utility to safely store
+       |and retrieve passwords.
+       |
+       |Install it using pip or the setup.py script::
+       |
+       |    $ python setup.py install
+       |
+       |    $ pip install clvault
+       |
+       |Once it's installed, you will have three scripts installed in your
+       |Python scripts folder, you can use to list, store and retrieve passwords::
+       |
+       |    $ clvault-set blog
+       |    Set your password:
+       |    Set the associated username (can be blank): tarek
+       |    Set a description (can be blank): My blog password
+       |    Password set.
+       |
+       |    $ clvault-get blog
+       |    The username is "tarek"
+       |    The password has been copied in your clipboard
+       |
+       |    $ clvault-list
+       |    Registered services:
+       |    blog    My blog password
+       |
+       |
+       |*clvault-set* takes a service name then prompt you for a password, and some
+       |optional information about your service. The password is safely stored in
+       |a keyring while the description is saved in a ``.clvault`` file in your
+       |home directory. This file is created automatically the first time the command
+       |is used.
+       |
+       |*clvault-get* copies the password for a given service in your clipboard, and
+       |displays the associated user if any.
+       |
+       |*clvault-list* lists all registered services, with their description when
+       |given.
+       |
+       |
+       |Project page: http://bitbucket.org/tarek/clvault
+       |
diff --git a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO
new file mode 100644
index 0000000..dff8d00
--- /dev/null
+++ b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO
@@ -0,0 +1,182 @@
+Metadata-Version: 1.0
+Name: setuptools
+Version: 0.6c9
+Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
+Home-page: http://pypi.python.org/pypi/setuptools
+Author: Phillip J. Eby
+Author-email: distutils-sig@python.org
+License: PSF or ZPL
+Description: ===============================
+        Installing and Using Setuptools
+        ===============================
+
+        .. contents:: **Table of Contents**
+
+
+        -------------------------
+        Installation Instructions
+        -------------------------
+
+        Windows
+        =======
+
+        Install setuptools using the provided ``.exe`` installer.  If you've previously
+        installed older versions of setuptools, please delete all ``setuptools*.egg``
+        and ``setuptools.pth`` files from your system's ``site-packages`` directory
+        (and any other ``sys.path`` directories) FIRST.
+
+        If you are upgrading a previous version of setuptools that was installed using
+        an ``.exe`` installer, please be sure to also *uninstall that older version*
+        via your system's "Add/Remove Programs" feature, BEFORE installing the newer
+        version.
+
+        Once installation is complete, you will find an ``easy_install.exe`` program in
+        your Python ``Scripts`` subdirectory.  Be sure to add this directory to your
+        ``PATH`` environment variable, if you haven't already done so.
+
+
+        RPM-Based Systems
+        =================
+
+        Install setuptools using the provided source RPM.  The included ``.spec`` file
+        assumes you are installing using the default ``python`` executable, and is not
+        specific to a particular Python version.  The ``easy_install`` executable will
+        be installed to a system ``bin`` directory such as ``/usr/bin``.
+
+        If you wish to install to a location other than the default Python
+        installation's default ``site-packages`` directory (and ``$prefix/bin`` for
+        scripts), please use the ``.egg``-based installation approach described in the
+        following section.
+
+
+        Cygwin, Mac OS X, Linux, Other
+        ==============================
+
+        1. Download the appropriate egg for your version of Python (e.g.
+        ``setuptools-0.6c9-py2.4.egg``).  Do NOT rename it.
+
+        2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
+        Setuptools will install itself using the matching version of Python (e.g.
+        ``python2.4``), and will place the ``easy_install`` executable in the
+        default location for installing Python scripts (as determined by the
+        standard distutils configuration files, or by the Python installation).
+
+        If you want to install setuptools to somewhere other than ``site-packages`` or
+        your default distutils installation locations for libraries and scripts, you
+        may include EasyInstall command-line options such as ``--prefix``,
+        ``--install-dir``, and so on, following the ``.egg`` filename on the same
+        command line.  For example::
+
+        sh setuptools-0.6c9-py2.4.egg --prefix=~
+
+        You can use ``--help`` to get a full options list, but we recommend consulting
+        the `EasyInstall manual`_ for detailed instructions, especially `the section
+        on custom installation locations`_.
+
+        .. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+        .. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
+
+
+        Cygwin Note
+        -----------
+
+        If you are trying to install setuptools for the **Windows** version of Python
+        (as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
+        sure that an appropriate executable (``python2.3``, ``python2.4``, or
+        ``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg.  For
+        example, doing the following at a Cygwin bash prompt will install setuptools
+        for the **Windows** Python found at ``C:\\Python24``::
+
+        ln -s /cygdrive/c/Python24/python.exe python2.4
+        PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
+        rm python2.4
+
+
+        Downloads
+        =========
+
+        All setuptools downloads can be found at `the project's home page in the Python
+        Package Index`_.  Scroll to the very bottom of the page to find the links.
+
+        .. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools
+
+        In addition to the PyPI downloads, the development version of ``setuptools``
+        is available from the `Python SVN sandbox`_, and in-development versions of the
+        `0.6 branch`_ are available as well.
+
+        .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
+
+        .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
+
+        --------------------------------
+        Using Setuptools and EasyInstall
+        --------------------------------
+
+        Here are some of the available manuals, tutorials, and other resources for
+        learning about Setuptools, Python Eggs, and EasyInstall:
+
+        * `The EasyInstall user's guide and reference manual`_
+        * `The setuptools Developer's Guide`_
+        * `The pkg_resources API reference`_
+        * `Package Compatibility Notes`_ (user-maintained)
+        * `The Internal Structure of Python Eggs`_
+
+        Questions, comments, and bug reports should be directed to the `distutils-sig
+        mailing list`_.  If you have written (or know of) any tutorials, documentation,
+        plug-ins, or other resources for setuptools users, please let us know about
+        them there, so this reference list can be updated.  If you have working,
+        *tested* patches to correct problems or add features, you may submit them to
+        the `setuptools bug tracker`_.
+
+        .. _setuptools bug tracker: http://bugs.python.org/setuptools/
+        .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
+        .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
+        .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
+        .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
+        .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+        .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
+
+
+        -------
+        Credits
+        -------
+
+        * The original design for the ``.egg`` format and the ``pkg_resources`` API was
+        co-created by Phillip Eby and Bob Ippolito.  Bob also implemented the first
+        version of ``pkg_resources``, and supplied the OS X operating system version
+        compatibility algorithm.
+
+        * Ian Bicking implemented many early "creature comfort" features of
+        easy_install, including support for downloading via Sourceforge and
+        Subversion repositories.  Ian's comments on the Web-SIG about WSGI
+        application deployment also inspired the concept of "entry points" in eggs,
+        and he has given talks at PyCon and elsewhere to inform and educate the
+        community about eggs and setuptools.
+
+        * Jim Fulton contributed time and effort to build automated tests of various
+        aspects of ``easy_install``, and supplied the doctests for the command-line
+        ``.exe`` wrappers on Windows.
+
+        * Phillip J. Eby is the principal author and maintainer of setuptools, and
+        first proposed the idea of an importable binary distribution format for
+        Python application plug-ins.
+
+        * Significant parts of the implementation of setuptools were funded by the Open
+        Source Applications Foundation, to provide a plug-in infrastructure for the
+        Chandler PIM application.  In addition, many OSAF staffers (such as Mike
+        "Code Bear" Taylor) contributed their time and stress as guinea pigs for the
+        use of eggs and setuptools, even before eggs were "cool".  (Thanks, guys!)
+
+
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
diff --git a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2 b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2
new file mode 100644
index 0000000..4b3906a
--- /dev/null
+++ b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2
@@ -0,0 +1,183 @@
+Metadata-Version: 1.1
+Name: setuptools
+Version: 0.6c9
+Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
+Home-page: http://pypi.python.org/pypi/setuptools
+Author: Phillip J. Eby
+Author-email: distutils-sig@python.org
+License: PSF or ZPL
+Description: ===============================
+        Installing and Using Setuptools
+        ===============================
+
+        .. contents:: **Table of Contents**
+
+
+        -------------------------
+        Installation Instructions
+        -------------------------
+
+        Windows
+        =======
+
+        Install setuptools using the provided ``.exe`` installer.  If you've previously
+        installed older versions of setuptools, please delete all ``setuptools*.egg``
+        and ``setuptools.pth`` files from your system's ``site-packages`` directory
+        (and any other ``sys.path`` directories) FIRST.
+
+        If you are upgrading a previous version of setuptools that was installed using
+        an ``.exe`` installer, please be sure to also *uninstall that older version*
+        via your system's "Add/Remove Programs" feature, BEFORE installing the newer
+        version.
+
+        Once installation is complete, you will find an ``easy_install.exe`` program in
+        your Python ``Scripts`` subdirectory.  Be sure to add this directory to your
+        ``PATH`` environment variable, if you haven't already done so.
+
+
+        RPM-Based Systems
+        =================
+
+        Install setuptools using the provided source RPM.  The included ``.spec`` file
+        assumes you are installing using the default ``python`` executable, and is not
+        specific to a particular Python version.  The ``easy_install`` executable will
+        be installed to a system ``bin`` directory such as ``/usr/bin``.
+
+        If you wish to install to a location other than the default Python
+        installation's default ``site-packages`` directory (and ``$prefix/bin`` for
+        scripts), please use the ``.egg``-based installation approach described in the
+        following section.
+
+
+        Cygwin, Mac OS X, Linux, Other
+        ==============================
+
+        1. Download the appropriate egg for your version of Python (e.g.
+        ``setuptools-0.6c9-py2.4.egg``).  Do NOT rename it.
+
+        2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
+        Setuptools will install itself using the matching version of Python (e.g.
+        ``python2.4``), and will place the ``easy_install`` executable in the
+        default location for installing Python scripts (as determined by the
+        standard distutils configuration files, or by the Python installation).
+
+        If you want to install setuptools to somewhere other than ``site-packages`` or
+        your default distutils installation locations for libraries and scripts, you
+        may include EasyInstall command-line options such as ``--prefix``,
+        ``--install-dir``, and so on, following the ``.egg`` filename on the same
+        command line.  For example::
+
+        sh setuptools-0.6c9-py2.4.egg --prefix=~
+
+        You can use ``--help`` to get a full options list, but we recommend consulting
+        the `EasyInstall manual`_ for detailed instructions, especially `the section
+        on custom installation locations`_.
+
+        .. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+        .. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
+
+
+        Cygwin Note
+        -----------
+
+        If you are trying to install setuptools for the **Windows** version of Python
+        (as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
+        sure that an appropriate executable (``python2.3``, ``python2.4``, or
+        ``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg.  For
+        example, doing the following at a Cygwin bash prompt will install setuptools
+        for the **Windows** Python found at ``C:\\Python24``::
+
+        ln -s /cygdrive/c/Python24/python.exe python2.4
+        PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
+        rm python2.4
+
+
+        Downloads
+        =========
+
+        All setuptools downloads can be found at `the project's home page in the Python
+        Package Index`_.  Scroll to the very bottom of the page to find the links.
+
+        .. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools
+
+        In addition to the PyPI downloads, the development version of ``setuptools``
+        is available from the `Python SVN sandbox`_, and in-development versions of the
+        `0.6 branch`_ are available as well.
+
+        .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
+
+        .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
+
+        --------------------------------
+        Using Setuptools and EasyInstall
+        --------------------------------
+
+        Here are some of the available manuals, tutorials, and other resources for
+        learning about Setuptools, Python Eggs, and EasyInstall:
+
+        * `The EasyInstall user's guide and reference manual`_
+        * `The setuptools Developer's Guide`_
+        * `The pkg_resources API reference`_
+        * `Package Compatibility Notes`_ (user-maintained)
+        * `The Internal Structure of Python Eggs`_
+
+        Questions, comments, and bug reports should be directed to the `distutils-sig
+        mailing list`_.  If you have written (or know of) any tutorials, documentation,
+        plug-ins, or other resources for setuptools users, please let us know about
+        them there, so this reference list can be updated.  If you have working,
+        *tested* patches to correct problems or add features, you may submit them to
+        the `setuptools bug tracker`_.
+
+        .. _setuptools bug tracker: http://bugs.python.org/setuptools/
+        .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
+        .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
+        .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
+        .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
+        .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
+        .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
+
+
+        -------
+        Credits
+        -------
+
+        * The original design for the ``.egg`` format and the ``pkg_resources`` API was
+        co-created by Phillip Eby and Bob Ippolito.  Bob also implemented the first
+        version of ``pkg_resources``, and supplied the OS X operating system version
+        compatibility algorithm.
+
+        * Ian Bicking implemented many early "creature comfort" features of
+        easy_install, including support for downloading via Sourceforge and
+        Subversion repositories.  Ian's comments on the Web-SIG about WSGI
+        application deployment also inspired the concept of "entry points" in eggs,
+        and he has given talks at PyCon and elsewhere to inform and educate the
+        community about eggs and setuptools.
+
+        * Jim Fulton contributed time and effort to build automated tests of various
+        aspects of ``easy_install``, and supplied the doctests for the command-line
+        ``.exe`` wrappers on Windows.
+
+        * Phillip J. Eby is the principal author and maintainer of setuptools, and
+        first proposed the idea of an importable binary distribution format for
+        Python application plug-ins.
+
+        * Significant parts of the implementation of setuptools were funded by the Open
+        Source Applications Foundation, to provide a plug-in infrastructure for the
+        Chandler PIM application.  In addition, many OSAF staffers (such as Mike
+        "Code Bear" Taylor) contributed their time and stress as guinea pigs for the
+        use of eggs and setuptools, even before eggs were "cool".  (Thanks, guys!)
+
+
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
+Requires: Foo
diff --git a/Lib/packaging/tests/__init__.py b/Lib/packaging/tests/__init__.py
new file mode 100644
index 0000000..0b0e3c5
--- /dev/null
+++ b/Lib/packaging/tests/__init__.py
@@ -0,0 +1,133 @@
+"""Test suite for packaging.
+
+This test suite consists of a collection of test modules in the
+packaging.tests package.  Each test module has a name starting with
+'test' and contains a function test_suite().  The function is expected
+to return an initialized unittest.TestSuite instance.
+
+Utility code is included in packaging.tests.support.
+"""
+
+# Put this text back for the backport
+#Always import unittest from this module, it will be the right version
+#(standard library unittest for 3.2 and higher, third-party unittest2
+#elease for older versions).
+
+import os
+import sys
+import unittest
+from test.support import TESTFN
+
+# XXX move helpers to support, add tests for them, remove things that
+# duplicate test.support (or keep them for the backport; needs thinking)
+
+here = os.path.dirname(__file__) or os.curdir
+verbose = 1
+
+def test_suite():
+    suite = unittest.TestSuite()
+    for fn in os.listdir(here):
+        if fn.startswith("test") and fn.endswith(".py"):
+            modname = "packaging.tests." + fn[:-3]
+            __import__(modname)
+            module = sys.modules[modname]
+            suite.addTest(module.test_suite())
+    return suite
+
+
+class Error(Exception):
+    """Base class for regression test exceptions."""
+
+
+class TestFailed(Error):
+    """Test failed."""
+
+
+class BasicTestRunner:
+    def run(self, test):
+        result = unittest.TestResult()
+        test(result)
+        return result
+
+
+def _run_suite(suite, verbose_=1):
+    """Run tests from a unittest.TestSuite-derived class."""
+    global verbose
+    verbose = verbose_
+    if verbose_:
+        runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
+    else:
+        runner = BasicTestRunner()
+
+    result = runner.run(suite)
+    if not result.wasSuccessful():
+        if len(result.errors) == 1 and not result.failures:
+            err = result.errors[0][1]
+        elif len(result.failures) == 1 and not result.errors:
+            err = result.failures[0][1]
+        else:
+            err = "errors occurred; run in verbose mode for details"
+        raise TestFailed(err)
+
+
+def run_unittest(classes, verbose_=1):
+    """Run tests from unittest.TestCase-derived classes.
+
+    Originally extracted from stdlib test.test_support and modified to
+    support unittest2.
+    """
+    valid_types = (unittest.TestSuite, unittest.TestCase)
+    suite = unittest.TestSuite()
+    for cls in classes:
+        if isinstance(cls, str):
+            if cls in sys.modules:
+                suite.addTest(unittest.findTestCases(sys.modules[cls]))
+            else:
+                raise ValueError("str arguments must be keys in sys.modules")
+        elif isinstance(cls, valid_types):
+            suite.addTest(cls)
+        else:
+            suite.addTest(unittest.makeSuite(cls))
+    _run_suite(suite, verbose_)
+
+
+def reap_children():
+    """Use this function at the end of test_main() whenever sub-processes
+    are started.  This will help ensure that no extra children (zombies)
+    stick around to hog resources and create problems when looking
+    for refleaks.
+
+    Extracted from stdlib test.support.
+    """
+
+    # Reap all our dead child processes so we don't leave zombies around.
+    # These hog resources and might be causing some of the buildbots to die.
+    if hasattr(os, 'waitpid'):
+        any_process = -1
+        while True:
+            try:
+                # This will raise an exception on Windows.  That's ok.
+                pid, status = os.waitpid(any_process, os.WNOHANG)
+                if pid == 0:
+                    break
+            except:
+                break
+
+
+def captured_stdout(func, *args, **kw):
+    import io
+    orig_stdout = getattr(sys, 'stdout')
+    setattr(sys, 'stdout', io.StringIO())
+    try:
+        res = func(*args, **kw)
+        sys.stdout.seek(0)
+        return res, sys.stdout.read()
+    finally:
+        setattr(sys, 'stdout', orig_stdout)
+
+
+def unload(name):
+    try:
+        del sys.modules[name]
+    except KeyError:
+        pass
diff --git a/Lib/packaging/tests/__main__.py b/Lib/packaging/tests/__main__.py
new file mode 100644
index 0000000..68ee229
--- /dev/null
+++ b/Lib/packaging/tests/__main__.py
@@ -0,0 +1,20 @@
+"""Packaging test suite runner."""
+
+# Ripped from importlib tests, thanks Brett!
+
+import os
+import sys
+import unittest
+from test.support import run_unittest, reap_children
+
+
+def test_main():
+    start_dir = os.path.dirname(__file__)
+    top_dir = os.path.dirname(os.path.dirname(start_dir))
+    test_loader = unittest.TestLoader()
+    run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
+    reap_children()
+
+
+if __name__ == '__main__':
+    test_main()
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA
new file mode 100644
index 0000000..65e839a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA
@@ -0,0 +1,4 @@
+Metadata-version: 1.2
+Name: babar
+Version: 0.1
+Author: FELD Boris
\ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES
new file mode 100644
index 0000000..5d0da49
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES
@@ -0,0 +1,2 @@
+babar.png,babar.png
+babar.cfg,babar.cfg
\ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar.cfg b/Lib/packaging/tests/fake_dists/babar.cfg
new file mode 100644
index 0000000..ecd6efe
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar.cfg
@@ -0,0 +1 @@
+Config
\ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/babar.png b/Lib/packaging/tests/fake_dists/babar.png
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/babar.png
diff --git a/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO b/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO
new file mode 100644
index 0000000..a176dfd
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO
@@ -0,0 +1,6 @@
+Metadata-Version: 1.2
+Name: bacon
+Version: 0.1
+Provides-Dist: truffles (2.0)
+Provides-Dist: bacon (0.1)
+Obsoletes-Dist: truffles (>=0.9,<=1.5)
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO
new file mode 100644
index 0000000..a7e118a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO
@@ -0,0 +1,18 @@
+Metadata-Version: 1.0
+Name: banana
+Version: 0.4
+Summary: A yellow fruit
+Home-page: http://en.wikipedia.org/wiki/Banana
+Author: Josip Djolonga
+Author-email: foo@nbar.com
+License: BSD
+Description: A fruit
+Keywords: foo bar
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Scientific/Engineering :: GIS
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt
new file mode 100644
index 0000000..5d3e5f6
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt
@@ -0,0 +1,3 @@
+
+      # -*- Entry points: -*-
+      
\ No newline at end of file
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt
new file mode 100644
index 0000000..4354305
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt
@@ -0,0 +1,6 @@
+# this should be ignored
+
+strawberry >=0.5
+
+[section ignored]
+foo ==0.5
diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt
diff --git a/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info b/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info
new file mode 100644
index 0000000..27cbe30
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: cheese
+Version: 2.0.2
+Provides-Dist: truffles (1.0.2)
+Obsoletes-Dist: truffles (!=1.2,<=2.0)
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
new file mode 100644
index 0000000..418929e
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
@@ -0,0 +1,9 @@
+Metadata-Version: 1.2
+Name: choxie
+Version: 2.0.0.9
+Summary: Chocolate with a kick!
+Requires-Dist: towel-stuff (0.1)
+Requires-Dist: nut
+Provides-Dist: truffles (1.0)
+Obsoletes-Dist: truffles (<=0.8,>=0.5)
+Obsoletes-Dist: truffles (<=0.9,>=0.6)
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py
new file mode 100644
index 0000000..40a96af
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py
new file mode 100644
index 0000000..c4027f3
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+from towel_stuff import Towel
+
+class Chocolate(object):
+    """A piece of chocolate."""
+
+    def wrap_with_towel(self):
+        towel = Towel()
+        towel.wrap(self)
+        return towel
diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py
new file mode 100644
index 0000000..342b8ea
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py
@@ -0,0 +1,5 @@
+# -*- coding: utf-8 -*-
+from choxie.chocolate import Chocolate
+
+class Truffle(Chocolate):
+    """A truffle."""
diff --git a/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO b/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO
new file mode 100644
index 0000000..499a083
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: coconuts-aster
+Version: 10.3
+Provides-Dist: strawberry (0.6)
+Provides-Dist: banana (0.4)
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
new file mode 100644
index 0000000..0b99f52
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: grammar
+Version: 1.0a4
+Requires-Dist: truffles (>=1.2)
+Author: Sherlock Holmes
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py
new file mode 100644
index 0000000..40a96af
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py
new file mode 100644
index 0000000..66ba796
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+from random import randint
+
+def is_valid_grammar(sentence):
+    if randint(0, 10) < 2:
+        return False
+    else:
+        return True
diff --git a/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info b/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info
new file mode 100644
index 0000000..0c58ec1
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info
@@ -0,0 +1,3 @@
+Metadata-Version: 1.2
+Name: nut
+Version: funkyversion
diff --git a/Lib/packaging/tests/fake_dists/strawberry-0.6.egg b/Lib/packaging/tests/fake_dists/strawberry-0.6.egg
new file mode 100644
index 0000000..6d160e8
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/strawberry-0.6.egg
Binary files differ
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
new file mode 100644
index 0000000..ca46d0a
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA
@@ -0,0 +1,7 @@
+Metadata-Version: 1.2
+Name: towel-stuff
+Version: 0.1
+Provides-Dist: truffles (1.1.2)
+Provides-Dist: towel-stuff (0.1)
+Obsoletes-Dist: truffles (!=0.8,<1.0)
+Requires-Dist: bacon (<=0.2)
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED
diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py b/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py
new file mode 100644
index 0000000..191f895
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+class Towel(object):
+    """A towel, that one should never be without."""
+
+    def __init__(self, color='tie-dye'):
+        self.color = color
+        self.wrapped_obj = None
+
+    def wrap(self, obj):
+        """Wrap an object up in our towel."""
+        self.wrapped_obj = obj
+
+    def unwrap(self):
+        """Unwrap whatever is in our towel and return whatever it is."""
+        obj = self.wrapped_obj
+        self.wrapped_obj = None
+        return obj
diff --git a/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info b/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info
new file mode 100644
index 0000000..45f0cf8
--- /dev/null
+++ b/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info
@@ -0,0 +1,3 @@
+Metadata-Version: 1.2
+Name: truffles
+Version: 5.0
diff --git a/Lib/packaging/tests/fixer/__init__.py b/Lib/packaging/tests/fixer/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/fixer/__init__.py
diff --git a/Lib/packaging/tests/fixer/fix_idioms.py b/Lib/packaging/tests/fixer/fix_idioms.py
new file mode 100644
index 0000000..64f5ea0
--- /dev/null
+++ b/Lib/packaging/tests/fixer/fix_idioms.py
@@ -0,0 +1,134 @@
+"""Adjust some old Python 2 idioms to their modern counterparts.
+
+* Change some type comparisons to isinstance() calls:
+    type(x) == T -> isinstance(x, T)
+    type(x) is T -> isinstance(x, T)
+    type(x) != T -> not isinstance(x, T)
+    type(x) is not T -> not isinstance(x, T)
+
+* Change "while 1:" into "while True:".
+
+* Change both
+
+    v = list(EXPR)
+    v.sort()
+    foo(v)
+
+and the more general
+
+    v = EXPR
+    v.sort()
+    foo(v)
+
+into
+
+    v = sorted(EXPR)
+    foo(v)
+"""
+# Author: Jacques Frechet, Collin Winter
+
+# Local imports
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Call, Comma, Name, Node, syms
+
+CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
+TYPE = "power< 'type' trailer< '(' x=any ')' > >"
+
+class FixIdioms(fixer_base.BaseFix):
+
+    explicit = False # The user must ask for this fixer
+
+    PATTERN = r"""
+        isinstance=comparison< %s %s T=any >
+        |
+        isinstance=comparison< T=any %s %s >
+        |
+        while_stmt< 'while' while='1' ':' any+ >
+        |
+        sorted=any<
+            any*
+            simple_stmt<
+              expr_stmt< id1=any '='
+                         power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
+              >
+              '\n'
+            >
+            sort=
+            simple_stmt<
+              power< id2=any
+                     trailer< '.' 'sort' > trailer< '(' ')' >
+              >
+              '\n'
+            >
+            next=any*
+        >
+        |
+        sorted=any<
+            any*
+            simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
+            sort=
+            simple_stmt<
+              power< id2=any
+                     trailer< '.' 'sort' > trailer< '(' ')' >
+              >
+              '\n'
+            >
+            next=any*
+        >
+    """ % (TYPE, CMP, CMP, TYPE)
+
+    def match(self, node):
+        r = super(FixIdioms, self).match(node)
+        # If we've matched one of the sort/sorted subpatterns above, we
+        # want to reject matches where the initial assignment and the
+        # subsequent .sort() call involve different identifiers.
+        if r and "sorted" in r:
+            if r["id1"] == r["id2"]:
+                return r
+            return None
+        return r
+
+    def transform(self, node, results):
+        if "isinstance" in results:
+            return self.transform_isinstance(node, results)
+        elif "while" in results:
+            return self.transform_while(node, results)
+        elif "sorted" in results:
+            return self.transform_sort(node, results)
+        else:
+            raise RuntimeError("Invalid match")
+
+    def transform_isinstance(self, node, results):
+        x = results["x"].clone() # The thing inside of type()
+        T = results["T"].clone() # The type being compared against
+        x.prefix = ""
+        T.prefix = " "
+        test = Call(Name("isinstance"), [x, Comma(), T])
+        if "n" in results:
+            test.prefix = " "
+            test = Node(syms.not_test, [Name("not"), test])
+        test.prefix = node.prefix
+        return test
+
+    def transform_while(self, node, results):
+        one = results["while"]
+        one.replace(Name("True", prefix=one.prefix))
+
+    def transform_sort(self, node, results):
+        sort_stmt = results["sort"]
+        next_stmt = results["next"]
+        list_call = results.get("list")
+        simple_expr = results.get("expr")
+
+        if list_call:
+            list_call.replace(Name("sorted", prefix=list_call.prefix))
+        elif simple_expr:
+            new = simple_expr.clone()
+            new.prefix = ""
+            simple_expr.replace(Call(Name("sorted"), [new],
+                                     prefix=simple_expr.prefix))
+        else:
+            raise RuntimeError("should not have reached here")
+        sort_stmt.remove()
+        if next_stmt:
+            next_stmt[0].prefix = sort_stmt._prefix
diff --git a/Lib/packaging/tests/pypi_server.py b/Lib/packaging/tests/pypi_server.py
new file mode 100644
index 0000000..cc5fcca
--- /dev/null
+++ b/Lib/packaging/tests/pypi_server.py
@@ -0,0 +1,444 @@
+"""Mock PyPI Server implementation, to use in tests.
+
+This module also provides a simple test case to extend if you need to use
+the PyPIServer all along your test case. Be sure to read the documentation
+before any use.
+
+XXX TODO:
+
+The mock server can handle simple HTTP request (to simulate a simple index) or
+XMLRPC requests, over HTTP. Both does not have the same intergface to deal
+with, and I think it's a pain.
+
+A good idea could be to re-think a bit the way dstributions are handled in the
+mock server. As it should return malformed HTML pages, we need to keep the
+static behavior.
+
+I think of something like that:
+
+    >>> server = PyPIMockServer()
+    >>> server.startHTTP()
+    >>> server.startXMLRPC()
+
+Then, the server must have only one port to rely on, eg.
+
+    >>> server.fulladress()
+    "http://ip:port/"
+
+It could be simple to have one HTTP server, relaying the requests to the two
+implementations (static HTTP and XMLRPC over HTTP).
+"""
+
+import os
+import queue
+import select
+import socket
+import threading
+import socketserver
+from functools import wraps
+from http.server import HTTPServer, SimpleHTTPRequestHandler
+from xmlrpc.server import SimpleXMLRPCServer
+
+from packaging.tests import unittest
+
+PYPI_DEFAULT_STATIC_PATH = os.path.join(
+    os.path.dirname(os.path.abspath(__file__)), 'pypiserver')
+
+
+def use_xmlrpc_server(*server_args, **server_kwargs):
+    server_kwargs['serve_xmlrpc'] = True
+    return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_http_server(*server_args, **server_kwargs):
+    server_kwargs['serve_xmlrpc'] = False
+    return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_pypi_server(*server_args, **server_kwargs):
+    """Decorator to make use of the PyPIServer for test methods,
+    just when needed, and not for the entire duration of the testcase.
+    """
+    def wrapper(func):
+        @wraps(func)
+        def wrapped(*args, **kwargs):
+            server = PyPIServer(*server_args, **server_kwargs)
+            server.start()
+            try:
+                func(server=server, *args, **kwargs)
+            finally:
+                server.stop()
+        return wrapped
+    return wrapper
+
+
+class PyPIServerTestCase(unittest.TestCase):
+
+    def setUp(self):
+        super(PyPIServerTestCase, self).setUp()
+        self.pypi = PyPIServer()
+        self.pypi.start()
+        self.addCleanup(self.pypi.stop)
+
+
+class PyPIServer(threading.Thread):
+    """PyPI Mocked server.
+    Provides a mocked version of the PyPI API's, to ease tests.
+
+    Support serving static content and serving previously given text.
+    """
+
+    def __init__(self, test_static_path=None,
+                 static_filesystem_paths=["default"],
+                 static_uri_paths=["simple", "packages"], serve_xmlrpc=False):
+        """Initialize the server.
+
+        Default behavior is to start the HTTP server. You can either start the
+        xmlrpc server by setting xmlrpc to True. Caution: Only one server will
+        be started.
+
+        static_uri_paths and static_base_path are parameters used to provides
+        respectively the http_paths to serve statically, and where to find the
+        matching files on the filesystem.
+        """
+        # we want to launch the server in a new dedicated thread, to not freeze
+        # tests.
+        threading.Thread.__init__(self)
+        self._run = True
+        self._serve_xmlrpc = serve_xmlrpc
+
+        #TODO allow to serve XMLRPC and HTTP static files at the same time.
+        if not self._serve_xmlrpc:
+            self.server = HTTPServer(('127.0.0.1', 0), PyPIRequestHandler)
+            self.server.RequestHandlerClass.pypi_server = self
+
+            self.request_queue = queue.Queue()
+            self._requests = []
+            self.default_response_status = 404
+            self.default_response_headers = [('Content-type', 'text/plain')]
+            self.default_response_data = "The page does not exists"
+
+            # initialize static paths / filesystems
+            self.static_uri_paths = static_uri_paths
+
+            # append the static paths defined locally
+            if test_static_path is not None:
+                static_filesystem_paths.append(test_static_path)
+            self.static_filesystem_paths = [
+                PYPI_DEFAULT_STATIC_PATH + "/" + path
+                for path in static_filesystem_paths]
+        else:
+            # XMLRPC server
+            self.server = PyPIXMLRPCServer(('127.0.0.1', 0))
+            self.xmlrpc = XMLRPCMockIndex()
+            # register the xmlrpc methods
+            self.server.register_introspection_functions()
+            self.server.register_instance(self.xmlrpc)
+
+        self.address = (self.server.server_name, self.server.server_port)
+        # to not have unwanted outputs.
+        self.server.RequestHandlerClass.log_request = lambda *_: None
+
+    def run(self):
+        # loop because we can't stop it otherwise, for python < 2.6
+        while self._run:
+            r, w, e = select.select([self.server], [], [], 0.5)
+            if r:
+                self.server.handle_request()
+
+    def stop(self):
+        """self shutdown is not supported for python < 2.6"""
+        self._run = False
+
+    def get_next_response(self):
+        return (self.default_response_status,
+                self.default_response_headers,
+                self.default_response_data)
+
+    @property
+    def requests(self):
+        """Use this property to get all requests that have been made
+        to the server
+        """
+        while True:
+            try:
+                self._requests.append(self.request_queue.get_nowait())
+            except queue.Empty:
+                break
+        return self._requests
+
+    @property
+    def full_address(self):
+        return "http://%s:%s" % self.address
+
+
+class PyPIRequestHandler(SimpleHTTPRequestHandler):
+    # we need to access the pypi server while serving the content
+    pypi_server = None
+
+    def serve_request(self):
+        """Serve the content.
+
+        Also record the requests to be accessed later. If trying to access an
+        url matching a static uri, serve static content, otherwise serve
+        what is provided by the `get_next_response` method.
+
+        If nothing is defined there, return a 404 header.
+        """
+        # record the request. Read the input only on PUT or POST requests
+        if self.command in ("PUT", "POST"):
+            if 'content-length' in self.headers:
+                request_data = self.rfile.read(
+                    int(self.headers['content-length']))
+            else:
+                request_data = self.rfile.read()
+
+        elif self.command in ("GET", "DELETE"):
+            request_data = ''
+
+        self.pypi_server.request_queue.put((self, request_data))
+
+        # serve the content from local disc if we request an URL beginning
+        # by a pattern defined in `static_paths`
+        url_parts = self.path.split("/")
+        if (len(url_parts) > 1 and
+                url_parts[1] in self.pypi_server.static_uri_paths):
+            data = None
+            # always take the last first.
+            fs_paths = []
+            fs_paths.extend(self.pypi_server.static_filesystem_paths)
+            fs_paths.reverse()
+            relative_path = self.path
+            for fs_path in fs_paths:
+                try:
+                    if self.path.endswith("/"):
+                        relative_path += "index.html"
+
+                    if relative_path.endswith('.tar.gz'):
+                        with open(fs_path + relative_path, 'br') as file:
+                            data = file.read()
+                        headers = [('Content-type', 'application/x-gtar')]
+                    else:
+                        with open(fs_path + relative_path) as file:
+                            data = file.read().encode()
+                        headers = [('Content-type', 'text/html')]
+
+                    self.make_response(data, headers=headers)
+
+                except IOError:
+                    pass
+
+            if data is None:
+                self.make_response("Not found", 404)
+
+        # otherwise serve the content from get_next_response
+        else:
+            # send back a response
+            status, headers, data = self.pypi_server.get_next_response()
+            self.make_response(data, status, headers)
+
+    do_POST = do_GET = do_DELETE = do_PUT = serve_request
+
+    def make_response(self, data, status=200,
+                      headers=[('Content-type', 'text/html')]):
+        """Send the response to the HTTP client"""
+        if not isinstance(status, int):
+            try:
+                status = int(status)
+            except ValueError:
+                # we probably got something like YYY Codename.
+                # Just get the first 3 digits
+                status = int(status[:3])
+
+        self.send_response(status)
+        for header, value in headers:
+            self.send_header(header, value)
+        self.end_headers()
+
+        if type(data) is str:
+            data = data.encode()
+
+        self.wfile.write(data)
+
+
+class PyPIXMLRPCServer(SimpleXMLRPCServer):
+    def server_bind(self):
+        """Override server_bind to store the server name."""
+        socketserver.TCPServer.server_bind(self)
+        host, port = self.socket.getsockname()[:2]
+        self.server_name = socket.getfqdn(host)
+        self.server_port = port
+
+
+class MockDist:
+    """Fake distribution, used in the Mock PyPI Server"""
+
+    def __init__(self, name, version="1.0", hidden=False, url="http://url/",
+             type="sdist", filename="", size=10000,
+             digest="123456", downloads=7, has_sig=False,
+             python_version="source", comment="comment",
+             author="John Doe", author_email="john@doe.name",
+             maintainer="Main Tayner", maintainer_email="maintainer_mail",
+             project_url="http://project_url/", homepage="http://homepage/",
+             keywords="", platform="UNKNOWN", classifiers=[], licence="",
+             description="Description", summary="Summary", stable_version="",
+             ordering="", documentation_id="", code_kwalitee_id="",
+             installability_id="", obsoletes=[], obsoletes_dist=[],
+             provides=[], provides_dist=[], requires=[], requires_dist=[],
+             requires_external=[], requires_python=""):
+
+        # basic fields
+        self.name = name
+        self.version = version
+        self.hidden = hidden
+
+        # URL infos
+        self.url = url
+        self.digest = digest
+        self.downloads = downloads
+        self.has_sig = has_sig
+        self.python_version = python_version
+        self.comment = comment
+        self.type = type
+
+        # metadata
+        self.author = author
+        self.author_email = author_email
+        self.maintainer = maintainer
+        self.maintainer_email = maintainer_email
+        self.project_url = project_url
+        self.homepage = homepage
+        self.keywords = keywords
+        self.platform = platform
+        self.classifiers = classifiers
+        self.licence = licence
+        self.description = description
+        self.summary = summary
+        self.stable_version = stable_version
+        self.ordering = ordering
+        self.cheesecake_documentation_id = documentation_id
+        self.cheesecake_code_kwalitee_id = code_kwalitee_id
+        self.cheesecake_installability_id = installability_id
+
+        self.obsoletes = obsoletes
+        self.obsoletes_dist = obsoletes_dist
+        self.provides = provides
+        self.provides_dist = provides_dist
+        self.requires = requires
+        self.requires_dist = requires_dist
+        self.requires_external = requires_external
+        self.requires_python = requires_python
+
+    def url_infos(self):
+        return {
+            'url': self.url,
+            'packagetype': self.type,
+            'filename': 'filename.tar.gz',
+            'size': '6000',
+            'md5_digest': self.digest,
+            'downloads': self.downloads,
+            'has_sig': self.has_sig,
+            'python_version': self.python_version,
+            'comment_text': self.comment,
+        }
+
+    def metadata(self):
+        return {
+            'maintainer': self.maintainer,
+            'project_url': [self.project_url],
+            'maintainer_email': self.maintainer_email,
+            'cheesecake_code_kwalitee_id': self.cheesecake_code_kwalitee_id,
+            'keywords': self.keywords,
+            'obsoletes_dist': self.obsoletes_dist,
+            'requires_external': self.requires_external,
+            'author': self.author,
+            'author_email': self.author_email,
+            'download_url': self.url,
+            'platform': self.platform,
+            'version': self.version,
+            'obsoletes': self.obsoletes,
+            'provides': self.provides,
+            'cheesecake_documentation_id': self.cheesecake_documentation_id,
+            '_pypi_hidden': self.hidden,
+            'description': self.description,
+            '_pypi_ordering': 19,
+            'requires_dist': self.requires_dist,
+            'requires_python': self.requires_python,
+            'classifiers': [],
+            'name': self.name,
+            'licence': self.licence,
+            'summary': self.summary,
+            'home_page': self.homepage,
+            'stable_version': self.stable_version,
+            'provides_dist': self.provides_dist or "%s (%s)" % (self.name,
+                                                              self.version),
+            'requires': self.requires,
+            'cheesecake_installability_id': self.cheesecake_installability_id,
+        }
+
+    def search_result(self):
+        return {
+            '_pypi_ordering': 0,
+            'version': self.version,
+            'name': self.name,
+            'summary': self.summary,
+        }
+
+
+class XMLRPCMockIndex:
+    """Mock XMLRPC server"""
+
+    def __init__(self, dists=[]):
+        self._dists = dists
+        self._search_result = []
+
+    def add_distributions(self, dists):
+        for dist in dists:
+            self._dists.append(MockDist(**dist))
+
+    def set_distributions(self, dists):
+        self._dists = []
+        self.add_distributions(dists)
+
+    def set_search_result(self, result):
+        """set a predefined search result"""
+        self._search_result = result
+
+    def _get_search_results(self):
+        results = []
+        for name in self._search_result:
+            found_dist = [d for d in self._dists if d.name == name]
+            if found_dist:
+                results.append(found_dist[0])
+            else:
+                dist = MockDist(name)
+                results.append(dist)
+                self._dists.append(dist)
+        return [r.search_result() for r in results]
+
+    def list_packages(self):
+        return [d.name for d in self._dists]
+
+    def package_releases(self, package_name, show_hidden=False):
+        if show_hidden:
+            # return all
+            return [d.version for d in self._dists if d.name == package_name]
+        else:
+            # return only un-hidden
+            return [d.version for d in self._dists if d.name == package_name
+                    and not d.hidden]
+
+    def release_urls(self, package_name, version):
+        return [d.url_infos() for d in self._dists
+                if d.name == package_name and d.version == version]
+
+    def release_data(self, package_name, version):
+        release = [d for d in self._dists
+                   if d.name == package_name and d.version == version]
+        if release:
+            return release[0].metadata()
+        else:
+            return {}
+
+    def search(self, spec, operator="and"):
+        return self._get_search_results()
diff --git a/Lib/packaging/tests/pypi_test_server.py b/Lib/packaging/tests/pypi_test_server.py
new file mode 100644
index 0000000..8c8c641
--- /dev/null
+++ b/Lib/packaging/tests/pypi_test_server.py
@@ -0,0 +1,59 @@
+"""Test PyPI Server implementation at testpypi.python.org, to use in tests.
+
+This is a drop-in replacement for the mock pypi server for testing against a
+real pypi server hosted by python.org especially for testing against.
+"""
+
+import unittest
+
+PYPI_DEFAULT_STATIC_PATH = None
+
+
+def use_xmlrpc_server(*server_args, **server_kwargs):
+    server_kwargs['serve_xmlrpc'] = True
+    return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_http_server(*server_args, **server_kwargs):
+    server_kwargs['serve_xmlrpc'] = False
+    return use_pypi_server(*server_args, **server_kwargs)
+
+
+def use_pypi_server(*server_args, **server_kwargs):
+    """Decorator to make use of the PyPIServer for test methods,
+    just when needed, and not for the entire duration of the testcase.
+    """
+    def wrapper(func):
+        def wrapped(*args, **kwargs):
+            server = PyPIServer(*server_args, **server_kwargs)
+            func(server=server, *args, **kwargs)
+        return wrapped
+    return wrapper
+
+
+class PyPIServerTestCase(unittest.TestCase):
+
+    def setUp(self):
+        super(PyPIServerTestCase, self).setUp()
+        self.pypi = PyPIServer()
+        self.pypi.start()
+        self.addCleanup(self.pypi.stop)
+
+
+class PyPIServer:
+    """Shim to access testpypi.python.org, for testing a real server."""
+
+    def __init__(self, test_static_path=None,
+                 static_filesystem_paths=["default"],
+                 static_uri_paths=["simple"], serve_xmlrpc=False):
+        self.address = ('testpypi.python.org', '80')
+
+    def start(self):
+        pass
+
+    def stop(self):
+        pass
+
+    @property
+    def full_address(self):
+        return "http://%s:%s" % self.address
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz b/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz
new file mode 100644
index 0000000..333961e
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz
Binary files differ
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html
new file mode 100644
index 0000000..b89f1bd
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="badmd5-0.1.tar.gz#md5=3e3d86693d6564c807272b11b3069dfe" rel="download">badmd5-0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html
new file mode 100644
index 0000000..9e42b16
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="foobar-0.1.tar.gz#md5=fe18804c5b722ff024cabdf514924fc4" rel="download">foobar-0.1.tar.gz</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html
new file mode 100644
index 0000000..9baee04
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html
@@ -0,0 +1,2 @@
+<a href="foobar/">foobar/</a> 
+<a href="badmd5/">badmd5/</a> 
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html
new file mode 100644
index 0000000..c3d42c5
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for bar</title></head><body><h1>Links for bar</h1>
+<a rel="download" href="../../packages/source/F/bar/bar-1.0.tar.gz">bar-1.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/bar/bar-1.0.1.tar.gz">bar-1.0.1.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/bar/bar-2.0.tar.gz">bar-2.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/bar/bar-2.0.1.tar.gz">bar-2.0.1.tar.gz</a><br/> 
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html
new file mode 100644
index 0000000..4f34312
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for baz</title></head><body><h1>Links for baz</h1>
+<a rel="download" href="../../packages/source/F/baz/baz-1.0.tar.gz">baz-1.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/baz/baz-1.0.1.tar.gz">baz-1.0.1.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/baz/baz-2.0.tar.gz">baz-2.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/baz/baz-2.0.1.tar.gz">baz-2.0.1.tar.gz</a><br/> 
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html
new file mode 100644
index 0000000..0565e11
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for foo</title></head><body><h1>Links for foo</h1>
+<a rel="download" href="../../packages/source/F/foo/foo-1.0.tar.gz">foo-1.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/foo/foo-1.0.1.tar.gz">foo-1.0.1.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/foo/foo-2.0.tar.gz">foo-2.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/foo/foo-2.0.1.tar.gz">foo-2.0.1.tar.gz</a><br/> 
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html
new file mode 100644
index 0000000..a70cfd3
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html
@@ -0,0 +1,3 @@
+<a href="foo/">foo/</a> 
+<a href="bar/">bar/</a> 
+<a href="baz/">baz/</a> 
diff --git a/Lib/packaging/tests/pypiserver/project_list/simple/index.html b/Lib/packaging/tests/pypiserver/project_list/simple/index.html
new file mode 100644
index 0000000..b36d728
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/project_list/simple/index.html
@@ -0,0 +1,5 @@
+<a class="test" href="yeah">FooBar-bar</a>
+<a class="test" href="yeah">Foobar-baz</a>
+<a class="test" href="yeah">Baz-FooBar</a>
+<a class="test" href="yeah">Baz</a>
+<a class="test" href="yeah">Foo</a>
diff --git a/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html
new file mode 100644
index 0000000..a282a4e
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html
@@ -0,0 +1,6 @@
+<html><head><title>Links for Foobar</title></head><body><h1>Links for Foobar</h1>
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-1.0.tar.gz#md5=98fa833fdabcdd78d00245aead66c174">Foobar-1.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-1.0.1.tar.gz#md5=2351efb20f6b7b5d9ce80fa4cb1bd9ca">Foobar-1.0.1.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-2.0.tar.gz#md5=98fa833fdabcdd78d00245aead66c274">Foobar-2.0.tar.gz</a><br/> 
+<a rel="download" href="../../packages/source/F/Foobar/Foobar-2.0.1.tar.gz#md5=2352efb20f6b7b5d9ce80fa4cb2bd9ca">Foobar-2.0.1.tar.gz</a><br/> 
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html b/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a> 
diff --git a/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html b/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html
new file mode 100644
index 0000000..265ee0a
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html
@@ -0,0 +1 @@
+index.html from external server
diff --git a/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html b/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html
new file mode 100644
index 0000000..6f97667
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html
@@ -0,0 +1 @@
+Yeah
diff --git a/Lib/packaging/tests/pypiserver/with_externals/external/external.html b/Lib/packaging/tests/pypiserver/with_externals/external/external.html
new file mode 100644
index 0000000..92e4702
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/external/external.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html
new file mode 100644
index 0000000..b100a26
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html
@@ -0,0 +1,4 @@
+<html><body>
+<a rel ="download" href="/foobar-0.1.tar.gz#md5=12345678901234567">foobar-0.1.tar.gz</a><br/>
+<a href="../../external/external.html" rel="homepage">external homepage</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_externals/simple/index.html b/Lib/packaging/tests/pypiserver/with_externals/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_externals/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a> 
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html b/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html
new file mode 100644
index 0000000..1cc0c32
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html
@@ -0,0 +1,7 @@
+<html>
+<body>
+<p>a rel=homepage HTML page</p>
+<a href="/foobar-2.0.tar.gz">foobar 2.0</a>
+</body>
+</html>
+
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html b/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html
new file mode 100644
index 0000000..f6ace22
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html
@@ -0,0 +1 @@
+A page linked without rel="download" or rel="homepage" link.
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html
new file mode 100644
index 0000000..171df93
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html
@@ -0,0 +1,6 @@
+<html><body>
+<a rel="download" href="/foobar-0.1.tar.gz" rel="download">foobar-0.1.tar.gz</a><br/>
+<a href="../../external/homepage.html" rel="homepage">external homepage</a><br/>
+<a href="../../external/nonrel.html">unrelated link</a><br/>
+<a href="/unrelated-0.2.tar.gz">unrelated download</a></br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html b/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a> 
diff --git a/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html
new file mode 100644
index 0000000..b2885ae
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html
@@ -0,0 +1,4 @@
+<html><body>
+<a rel="download" href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
+<a href="http://a-really-external-website/external/external.html" rel="homepage">external homepage</a><br/>
+</body></html>
diff --git a/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html b/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html
new file mode 100644
index 0000000..a1a7bb7
--- /dev/null
+++ b/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html
@@ -0,0 +1 @@
+<a href="foobar/">foobar/</a> 
diff --git a/Lib/packaging/tests/support.py b/Lib/packaging/tests/support.py
new file mode 100644
index 0000000..cf5d788
--- /dev/null
+++ b/Lib/packaging/tests/support.py
@@ -0,0 +1,259 @@
+"""Support code for packaging test cases.
+
+A few helper classes are provided: LoggingCatcher, TempdirManager and
+EnvironRestorer. They are written to be used as mixins::
+
+    from packaging.tests import unittest
+    from packaging.tests.support import LoggingCatcher
+
+    class SomeTestCase(LoggingCatcher, unittest.TestCase):
+
+If you need to define a setUp method on your test class, you have to
+call the mixin class' setUp method or it won't work (same thing for
+tearDown):
+
+        def setUp(self):
+            super(SomeTestCase, self).setUp()
+            ... # other setup code
+
+Also provided is a DummyCommand class, useful to mock commands in the
+tests of another command that needs them, a create_distribution function
+and a skip_unless_symlink decorator.
+
+Also provided is a DummyCommand class, useful to mock commands in the
+tests of another command that needs them, a create_distribution function
+and a skip_unless_symlink decorator.
+
+Each class or function has a docstring to explain its purpose and usage.
+"""
+
+import os
+import errno
+import shutil
+import logging
+import weakref
+import tempfile
+
+from packaging import logger
+from packaging.dist import Distribution
+from packaging.tests import unittest
+
+__all__ = ['LoggingCatcher', 'TempdirManager', 'EnvironRestorer',
+           'DummyCommand', 'unittest', 'create_distribution',
+           'skip_unless_symlink']
+
+
+class _TestHandler(logging.handlers.BufferingHandler):
+    # stolen and adapted from test.support
+
+    def __init__(self):
+        logging.handlers.BufferingHandler.__init__(self, 0)
+        self.setLevel(logging.DEBUG)
+
+    def shouldFlush(self):
+        return False
+
+    def emit(self, record):
+        self.buffer.append(record)
+
+
+class LoggingCatcher:
+    """TestCase-compatible mixin to receive logging calls.
+
+    Upon setUp, instances of this classes get a BufferingHandler that's
+    configured to record all messages logged to the 'packaging' logger.
+
+    Use get_logs to retrieve messages and self.loghandler.flush to discard
+    them.
+    """
+
+    def setUp(self):
+        super(LoggingCatcher, self).setUp()
+        self.loghandler = handler = _TestHandler()
+        logger.addHandler(handler)
+        self.addCleanup(logger.setLevel, logger.level)
+        logger.setLevel(logging.DEBUG)  # we want all messages
+
+    def tearDown(self):
+        handler = self.loghandler
+        # All this is necessary to properly shut down the logging system and
+        # avoid a regrtest complaint.  Thanks to Vinay Sajip for the help.
+        handler.close()
+        logger.removeHandler(handler)
+        for ref in weakref.getweakrefs(handler):
+            logging._removeHandlerRef(ref)
+        del self.loghandler
+        super(LoggingCatcher, self).tearDown()
+
+    def get_logs(self, *levels):
+        """Return all log messages with level in *levels*.
+
+        Without explicit levels given, returns all messages.
+        *levels* defaults to all levels.  For log calls with arguments (i.e.
+        logger.info('bla bla %s', arg)), the messages
+        Returns a list.
+
+        Example: self.get_logs(logging.WARN, logging.DEBUG).
+        """
+        if not levels:
+            return [log.getMessage() for log in self.loghandler.buffer]
+        return [log.getMessage() for log in self.loghandler.buffer
+                if log.levelno in levels]
+
+
+class TempdirManager:
+    """TestCase-compatible mixin to create temporary directories and files.
+
+    Directories and files created in a test_* method will be removed after it
+    has run.
+    """
+
+    def setUp(self):
+        super(TempdirManager, self).setUp()
+        self._basetempdir = tempfile.mkdtemp()
+
+    def tearDown(self):
+        shutil.rmtree(self._basetempdir, os.name in ('nt', 'cygwin'))
+        super(TempdirManager, self).tearDown()
+
+    def mktempfile(self):
+        """Create a read-write temporary file and return it."""
+
+        def _delete_file(filename):
+            try:
+                os.remove(filename)
+            except OSError as exc:
+                if exc.errno != errno.ENOENT:
+                    raise
+
+        fd, fn = tempfile.mkstemp(dir=self._basetempdir)
+        os.close(fd)
+        fp = open(fn, 'w+')
+        self.addCleanup(fp.close)
+        self.addCleanup(_delete_file, fn)
+        return fp
+
+    def mkdtemp(self):
+        """Create a temporary directory and return its path."""
+        d = tempfile.mkdtemp(dir=self._basetempdir)
+        return d
+
+    def write_file(self, path, content='xxx'):
+        """Write a file at the given path.
+
+        path can be a string, a tuple or a list; if it's a tuple or list,
+        os.path.join will be used to produce a path.
+        """
+        if isinstance(path, (list, tuple)):
+            path = os.path.join(*path)
+        f = open(path, 'w')
+        try:
+            f.write(content)
+        finally:
+            f.close()
+
+    def create_dist(self, **kw):
+        """Create a stub distribution object and files.
+
+        This function creates a Distribution instance (use keyword arguments
+        to customize it) and a temporary directory with a project structure
+        (currently an empty directory).
+
+        It returns the path to the directory and the Distribution instance.
+        You can use self.write_file to write any file in that
+        directory, e.g. setup scripts or Python modules.
+        """
+        if 'name' not in kw:
+            kw['name'] = 'foo'
+        tmp_dir = self.mkdtemp()
+        project_dir = os.path.join(tmp_dir, kw['name'])
+        os.mkdir(project_dir)
+        dist = Distribution(attrs=kw)
+        return project_dir, dist
+
+    def assertIsFile(self, *args):
+        path = os.path.join(*args)
+        dirname = os.path.dirname(path)
+        file = os.path.basename(path)
+        if os.path.isdir(dirname):
+            files = os.listdir(dirname)
+            msg = "%s not found in %s: %s" % (file, dirname, files)
+            assert os.path.isfile(path), msg
+        else:
+            raise AssertionError(
+                    '%s not found. %s does not exist' % (file, dirname))
+
+    def assertIsNotFile(self, *args):
+        path = os.path.join(*args)
+        self.assertFalse(os.path.isfile(path), "%r exists" % path)
+
+
+class EnvironRestorer:
+    """TestCase-compatible mixin to restore or delete environment variables.
+
+    The variables to restore (or delete if they were not originally present)
+    must be explicitly listed in self.restore_environ.  It's better to be
+    aware of what we're modifying instead of saving and restoring the whole
+    environment.
+    """
+
+    def setUp(self):
+        super(EnvironRestorer, self).setUp()
+        self._saved = []
+        self._added = []
+        for key in self.restore_environ:
+            if key in os.environ:
+                self._saved.append((key, os.environ[key]))
+            else:
+                self._added.append(key)
+
+    def tearDown(self):
+        for key, value in self._saved:
+            os.environ[key] = value
+        for key in self._added:
+            os.environ.pop(key, None)
+        super(EnvironRestorer, self).tearDown()
+
+
+class DummyCommand:
+    """Class to store options for retrieval via set_undefined_options().
+
+    Useful for mocking one dependency command in the tests for another
+    command, see e.g. the dummy build command in test_build_scripts.
+    """
+
+    def __init__(self, **kwargs):
+        for kw, val in kwargs.items():
+            setattr(self, kw, val)
+
+    def ensure_finalized(self):
+        pass
+
+
+class TestDistribution(Distribution):
+    """Distribution subclasses that avoids the default search for
+    configuration files.
+
+    The ._config_files attribute must be set before
+    .parse_config_files() is called.
+    """
+
+    def find_config_files(self):
+        return self._config_files
+
+
+def create_distribution(configfiles=()):
+    """Prepares a distribution with given config files parsed."""
+    d = TestDistribution()
+    d.config.find_config_files = d.find_config_files
+    d._config_files = configfiles
+    d.parse_config_files()
+    d.parse_command_line()
+    return d
+
+
+try:
+    from test.support import skip_unless_symlink
+except ImportError:
+    skip_unless_symlink = unittest.skip(
+        'requires test.support.skip_unless_symlink')
diff --git a/Lib/packaging/tests/test_ccompiler.py b/Lib/packaging/tests/test_ccompiler.py
new file mode 100644
index 0000000..dd4bdd9
--- /dev/null
+++ b/Lib/packaging/tests/test_ccompiler.py
@@ -0,0 +1,15 @@
+"""Tests for distutils.compiler.ccompiler."""
+
+from packaging.compiler import ccompiler
+from packaging.tests import unittest, support
+
+
+class CCompilerTestCase(unittest.TestCase):
+    pass  # XXX need some tests on CCompiler
+
+
+def test_suite():
+    return unittest.makeSuite(CCompilerTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_bdist.py b/Lib/packaging/tests/test_command_bdist.py
new file mode 100644
index 0000000..1522b7e
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist.py
@@ -0,0 +1,77 @@
+"""Tests for distutils.command.bdist."""
+
+from packaging import util
+from packaging.command.bdist import bdist, show_formats
+
+from packaging.tests import unittest, support, captured_stdout
+
+
+class BuildTestCase(support.TempdirManager,
+                    support.LoggingCatcher,
+                    unittest.TestCase):
+
+    def _mock_get_platform(self):
+        self._get_platform_called = True
+        return self._get_platform()
+
+    def setUp(self):
+        super(BuildTestCase, self).setUp()
+
+        # mock util.get_platform
+        self._get_platform_called = False
+        self._get_platform = util.get_platform
+        util.get_platform = self._mock_get_platform
+
+    def tearDown(self):
+        super(BuildTestCase, self).tearDown()
+        util.get_platform = self._get_platform
+
+    def test_formats(self):
+
+        # let's create a command and make sure
+        # we can fix the format
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist(dist)
+        cmd.formats = ['msi']
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.formats, ['msi'])
+
+        # what format bdist offers ?
+        # XXX an explicit list in bdist is
+        # not the best way to  bdist_* commands
+        # we should add a registry
+        formats = sorted(('zip', 'gztar', 'bztar', 'ztar',
+                          'tar', 'wininst', 'msi'))
+        found = sorted(cmd.format_command)
+        self.assertEqual(found, formats)
+
+    def test_skip_build(self):
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist(dist)
+        cmd.skip_build = False
+        cmd.formats = ['ztar']
+        cmd.ensure_finalized()
+        self.assertFalse(self._get_platform_called)
+
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist(dist)
+        cmd.skip_build = True
+        cmd.formats = ['ztar']
+        cmd.ensure_finalized()
+        self.assertTrue(self._get_platform_called)
+
+    def test_show_formats(self):
+        __, stdout = captured_stdout(show_formats)
+
+        # the output should be a header line + one line per format
+        num_formats = len(bdist.format_commands)
+        output = [line for line in stdout.split('\n')
+                  if line.strip().startswith('--formats=')]
+        self.assertEqual(len(output), num_formats)
+
+
+def test_suite():
+    return unittest.makeSuite(BuildTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_dumb.py b/Lib/packaging/tests/test_command_bdist_dumb.py
new file mode 100644
index 0000000..ce1563f
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_dumb.py
@@ -0,0 +1,103 @@
+"""Tests for distutils.command.bdist_dumb."""
+
+import sys
+import os
+
+# zlib is not used here, but if it's not available
+# test_simple_built will fail
+try:
+    import zlib
+except ImportError:
+    zlib = None
+
+from packaging.dist import Distribution
+from packaging.command.bdist_dumb import bdist_dumb
+from packaging.tests import unittest, support
+
+
+SETUP_PY = """\
+from distutils.run import setup
+import foo
+
+setup(name='foo', version='0.1', py_modules=['foo'],
+      url='xxx', author='xxx', author_email='xxx')
+"""
+
+
+class BuildDumbTestCase(support.TempdirManager,
+                        support.LoggingCatcher,
+                        unittest.TestCase):
+
+    def setUp(self):
+        super(BuildDumbTestCase, self).setUp()
+        self.old_location = os.getcwd()
+        self.old_sys_argv = sys.argv, sys.argv[:]
+
+    def tearDown(self):
+        os.chdir(self.old_location)
+        sys.argv = self.old_sys_argv[0]
+        sys.argv[:] = self.old_sys_argv[1]
+        super(BuildDumbTestCase, self).tearDown()
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    def test_simple_built(self):
+
+        # let's create a simple package
+        tmp_dir = self.mkdtemp()
+        pkg_dir = os.path.join(tmp_dir, 'foo')
+        os.mkdir(pkg_dir)
+        self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
+        self.write_file((pkg_dir, 'foo.py'), '#')
+        self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
+        self.write_file((pkg_dir, 'README'), '')
+
+        dist = Distribution({'name': 'foo', 'version': '0.1',
+                             'py_modules': ['foo'],
+                             'url': 'xxx', 'author': 'xxx',
+                             'author_email': 'xxx'})
+        dist.script_name = 'setup.py'
+        os.chdir(pkg_dir)
+
+        sys.argv[:] = ['setup.py']
+        cmd = bdist_dumb(dist)
+
+        # so the output is the same no matter
+        # what is the platform
+        cmd.format = 'zip'
+
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # see what we have
+        dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
+        base = "%s.%s" % (dist.get_fullname(), cmd.plat_name)
+        if os.name == 'os2':
+            base = base.replace(':', '-')
+
+        wanted = ['%s.zip' % base]
+        self.assertEqual(dist_created, wanted)
+
+        # now let's check what we have in the zip file
+        # XXX to be done
+
+    def test_finalize_options(self):
+        pkg_dir, dist = self.create_dist()
+        os.chdir(pkg_dir)
+        cmd = bdist_dumb(dist)
+        self.assertEqual(cmd.bdist_dir, None)
+        cmd.finalize_options()
+
+        # bdist_dir is initialized to bdist_base/dumb if not set
+        base = cmd.get_finalized_command('bdist').bdist_base
+        self.assertEqual(cmd.bdist_dir, os.path.join(base, 'dumb'))
+
+        # the format is set to a default value depending on the os.name
+        default = cmd.default_format[os.name]
+        self.assertEqual(cmd.format, default)
+
+
+def test_suite():
+    return unittest.makeSuite(BuildDumbTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_msi.py b/Lib/packaging/tests/test_command_bdist_msi.py
new file mode 100644
index 0000000..fded962
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_msi.py
@@ -0,0 +1,25 @@
+"""Tests for distutils.command.bdist_msi."""
+import sys
+
+from packaging.tests import unittest, support
+
+
+class BDistMSITestCase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+    def test_minimal(self):
+        # minimal test XXX need more tests
+        from packaging.command.bdist_msi import bdist_msi
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist_msi(dist)
+        cmd.ensure_finalized()
+
+
+def test_suite():
+    return unittest.makeSuite(BDistMSITestCase)
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_bdist_wininst.py b/Lib/packaging/tests/test_command_bdist_wininst.py
new file mode 100644
index 0000000..09bdaad
--- /dev/null
+++ b/Lib/packaging/tests/test_command_bdist_wininst.py
@@ -0,0 +1,32 @@
+"""Tests for distutils.command.bdist_wininst."""
+
+from packaging.command.bdist_wininst import bdist_wininst
+from packaging.tests import unittest, support
+
+
+class BuildWinInstTestCase(support.TempdirManager,
+                           support.LoggingCatcher,
+                           unittest.TestCase):
+
+    def test_get_exe_bytes(self):
+
+        # issue5731: command was broken on non-windows platforms
+        # this test makes sure it works now for every platform
+        # let's create a command
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist_wininst(dist)
+        cmd.ensure_finalized()
+
+        # let's run the code that finds the right wininst*.exe file
+        # and make sure it finds it and returns its content
+        # no matter what platform we have
+        exe_file = cmd.get_exe_bytes()
+        self.assertGreater(len(exe_file), 10)
+
+
+def test_suite():
+    return unittest.makeSuite(BuildWinInstTestCase)
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_build.py b/Lib/packaging/tests/test_command_build.py
new file mode 100644
index 0000000..91fbe42
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build.py
@@ -0,0 +1,55 @@
+"""Tests for distutils.command.build."""
+import os
+import sys
+
+from packaging.command.build import build
+from sysconfig import get_platform
+from packaging.tests import unittest, support
+
+
+class BuildTestCase(support.TempdirManager,
+                    support.LoggingCatcher,
+                    unittest.TestCase):
+
+    def test_finalize_options(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build(dist)
+        cmd.finalize_options()
+
+        # if not specified, plat_name gets the current platform
+        self.assertEqual(cmd.plat_name, get_platform())
+
+        # build_purelib is build + lib
+        wanted = os.path.join(cmd.build_base, 'lib')
+        self.assertEqual(cmd.build_purelib, wanted)
+
+        # build_platlib is 'build/lib.platform-x.x[-pydebug]'
+        # examples:
+        #   build/lib.macosx-10.3-i386-2.7
+        plat_spec = '.%s-%s' % (cmd.plat_name, sys.version[0:3])
+        if hasattr(sys, 'gettotalrefcount'):
+            self.assertTrue(cmd.build_platlib.endswith('-pydebug'))
+            plat_spec += '-pydebug'
+        wanted = os.path.join(cmd.build_base, 'lib' + plat_spec)
+        self.assertEqual(cmd.build_platlib, wanted)
+
+        # by default, build_lib = build_purelib
+        self.assertEqual(cmd.build_lib, cmd.build_purelib)
+
+        # build_temp is build/temp.<plat>
+        wanted = os.path.join(cmd.build_base, 'temp' + plat_spec)
+        self.assertEqual(cmd.build_temp, wanted)
+
+        # build_scripts is build/scripts-x.x
+        wanted = os.path.join(cmd.build_base, 'scripts-' + sys.version[0:3])
+        self.assertEqual(cmd.build_scripts, wanted)
+
+        # executable is os.path.normpath(sys.executable)
+        self.assertEqual(cmd.executable, os.path.normpath(sys.executable))
+
+
+def test_suite():
+    return unittest.makeSuite(BuildTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_clib.py b/Lib/packaging/tests/test_command_build_clib.py
new file mode 100644
index 0000000..a2a8583
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_clib.py
@@ -0,0 +1,141 @@
+"""Tests for distutils.command.build_clib."""
+import os
+import sys
+
+from packaging.util import find_executable
+from packaging.command.build_clib import build_clib
+from packaging.errors import PackagingSetupError
+from packaging.tests import unittest, support
+
+
+class BuildCLibTestCase(support.TempdirManager,
+                        support.LoggingCatcher,
+                        unittest.TestCase):
+
+    def test_check_library_dist(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        # 'libraries' option must be a list
+        self.assertRaises(PackagingSetupError, cmd.check_library_list, 'foo')
+
+        # each element of 'libraries' must a 2-tuple
+        self.assertRaises(PackagingSetupError, cmd.check_library_list,
+                          ['foo1', 'foo2'])
+
+        # first element of each tuple in 'libraries'
+        # must be a string (the library name)
+        self.assertRaises(PackagingSetupError, cmd.check_library_list,
+                          [(1, 'foo1'), ('name', 'foo2')])
+
+        # library name may not contain directory separators
+        self.assertRaises(PackagingSetupError, cmd.check_library_list,
+                          [('name', 'foo1'),
+                           ('another/name', 'foo2')])
+
+        # second element of each tuple must be a dictionary (build info)
+        self.assertRaises(PackagingSetupError, cmd.check_library_list,
+                          [('name', {}),
+                           ('another', 'foo2')])
+
+        # those work
+        libs = [('name', {}), ('name', {'ok': 'good'})]
+        cmd.check_library_list(libs)
+
+    def test_get_source_files(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        # "in 'libraries' option 'sources' must be present and must be
+        # a list of source filenames
+        cmd.libraries = [('name', {})]
+        self.assertRaises(PackagingSetupError, cmd.get_source_files)
+
+        cmd.libraries = [('name', {'sources': 1})]
+        self.assertRaises(PackagingSetupError, cmd.get_source_files)
+
+        cmd.libraries = [('name', {'sources': ['a', 'b']})]
+        self.assertEqual(cmd.get_source_files(), ['a', 'b'])
+
+        cmd.libraries = [('name', {'sources': ('a', 'b')})]
+        self.assertEqual(cmd.get_source_files(), ['a', 'b'])
+
+        cmd.libraries = [('name', {'sources': ('a', 'b')}),
+                         ('name2', {'sources': ['c', 'd']})]
+        self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd'])
+
+    def test_build_libraries(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        class FakeCompiler:
+            def compile(*args, **kw):
+                pass
+            create_static_lib = compile
+
+        cmd.compiler = FakeCompiler()
+
+        # build_libraries is also doing a bit of type checking
+        lib = [('name', {'sources': 'notvalid'})]
+        self.assertRaises(PackagingSetupError, cmd.build_libraries, lib)
+
+        lib = [('name', {'sources': []})]
+        cmd.build_libraries(lib)
+
+        lib = [('name', {'sources': ()})]
+        cmd.build_libraries(lib)
+
+    def test_finalize_options(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        cmd.include_dirs = 'one-dir'
+        cmd.finalize_options()
+        self.assertEqual(cmd.include_dirs, ['one-dir'])
+
+        cmd.include_dirs = None
+        cmd.finalize_options()
+        self.assertEqual(cmd.include_dirs, [])
+
+        cmd.distribution.libraries = 'WONTWORK'
+        self.assertRaises(PackagingSetupError, cmd.finalize_options)
+
+    @unittest.skipIf(sys.platform == 'win32', 'disabled on win32')
+    def test_run(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = build_clib(dist)
+
+        foo_c = os.path.join(pkg_dir, 'foo.c')
+        self.write_file(foo_c, 'int main(void) { return 1;}\n')
+        cmd.libraries = [('foo', {'sources': [foo_c]})]
+
+        build_temp = os.path.join(pkg_dir, 'build')
+        os.mkdir(build_temp)
+        cmd.build_temp = build_temp
+        cmd.build_clib = build_temp
+
+        # before we run the command, we want to make sure
+        # all commands are present on the system
+        # by creating a compiler and checking its executables
+        from packaging.compiler import new_compiler, customize_compiler
+
+        compiler = new_compiler()
+        customize_compiler(compiler)
+        for ccmd in compiler.executables.values():
+            if ccmd is None:
+                continue
+            if find_executable(ccmd[0]) is None:
+                raise unittest.SkipTest("can't test")
+
+        # this should work
+        cmd.run()
+
+        # let's check the result
+        self.assertIn('libfoo.a', os.listdir(build_temp))
+
+
+def test_suite():
+    return unittest.makeSuite(BuildCLibTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_ext.py b/Lib/packaging/tests/test_command_build_ext.py
new file mode 100644
index 0000000..2d79842
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_ext.py
@@ -0,0 +1,353 @@
+import os
+import sys
+import site
+import shutil
+import sysconfig
+from io import StringIO
+from packaging.dist import Distribution
+from packaging.errors import UnknownFileError, CompileError
+from packaging.command.build_ext import build_ext
+from packaging.compiler.extension import Extension
+
+from packaging.tests import support, unittest, verbose, unload
+
+# http://bugs.python.org/issue4373
+# Don't load the xx module more than once.
+ALREADY_TESTED = False
+
+
+def _get_source_filename():
+    srcdir = sysconfig.get_config_var('srcdir')
+    return os.path.join(srcdir, 'Modules', 'xxmodule.c')
+
+
+class BuildExtTestCase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+    def setUp(self):
+        # Create a simple test environment
+        # Note that we're making changes to sys.path
+        super(BuildExtTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.sys_path = sys.path, sys.path[:]
+        sys.path.append(self.tmp_dir)
+        shutil.copy(_get_source_filename(), self.tmp_dir)
+        self.old_user_base = site.USER_BASE
+        site.USER_BASE = self.mkdtemp()
+        build_ext.USER_BASE = site.USER_BASE
+
+    def test_build_ext(self):
+        global ALREADY_TESTED
+        xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
+        xx_ext = Extension('xx', [xx_c])
+        dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
+        dist.package_dir = self.tmp_dir
+        cmd = build_ext(dist)
+        if os.name == "nt":
+            # On Windows, we must build a debug version iff running
+            # a debug build of Python
+            cmd.debug = sys.executable.endswith("_d.exe")
+        cmd.build_lib = self.tmp_dir
+        cmd.build_temp = self.tmp_dir
+
+        old_stdout = sys.stdout
+        if not verbose:
+            # silence compiler output
+            sys.stdout = StringIO()
+        try:
+            cmd.ensure_finalized()
+            cmd.run()
+        finally:
+            sys.stdout = old_stdout
+
+        if ALREADY_TESTED:
+            return
+        else:
+            ALREADY_TESTED = True
+
+        import xx
+
+        for attr in ('error', 'foo', 'new', 'roj'):
+            self.assertTrue(hasattr(xx, attr))
+
+        self.assertEqual(xx.foo(2, 5), 7)
+        self.assertEqual(xx.foo(13, 15), 28)
+        self.assertEqual(xx.new().demo(), None)
+        doc = 'This is a template module just for instruction.'
+        self.assertEqual(xx.__doc__, doc)
+        self.assertTrue(isinstance(xx.Null(), xx.Null))
+        self.assertTrue(isinstance(xx.Str(), xx.Str))
+
+    def tearDown(self):
+        # Get everything back to normal
+        unload('xx')
+        sys.path = self.sys_path[0]
+        sys.path[:] = self.sys_path[1]
+        if sys.version > "2.6":
+            site.USER_BASE = self.old_user_base
+            build_ext.USER_BASE = self.old_user_base
+
+        super(BuildExtTestCase, self).tearDown()
+
+    def test_solaris_enable_shared(self):
+        dist = Distribution({'name': 'xx'})
+        cmd = build_ext(dist)
+        old = sys.platform
+
+        sys.platform = 'sunos'  # fooling finalize_options
+        from sysconfig import _CONFIG_VARS
+
+        old_var = _CONFIG_VARS.get('Py_ENABLE_SHARED')
+        _CONFIG_VARS['Py_ENABLE_SHARED'] = 1
+        try:
+            cmd.ensure_finalized()
+        finally:
+            sys.platform = old
+            if old_var is None:
+                del _CONFIG_VARS['Py_ENABLE_SHARED']
+            else:
+                _CONFIG_VARS['Py_ENABLE_SHARED'] = old_var
+
+        # make sure we get some library dirs under solaris
+        self.assertGreater(len(cmd.library_dirs), 0)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_user_site(self):
+        dist = Distribution({'name': 'xx'})
+        cmd = build_ext(dist)
+
+        # making sure the user option is there
+        options = [name for name, short, label in
+                   cmd.user_options]
+        self.assertIn('user', options)
+
+        # setting a value
+        cmd.user = True
+
+        # setting user based lib and include
+        lib = os.path.join(site.USER_BASE, 'lib')
+        incl = os.path.join(site.USER_BASE, 'include')
+        os.mkdir(lib)
+        os.mkdir(incl)
+
+        # let's run finalize
+        cmd.ensure_finalized()
+
+        # see if include_dirs and library_dirs
+        # were set
+        self.assertIn(lib, cmd.library_dirs)
+        self.assertIn(lib, cmd.rpath)
+        self.assertIn(incl, cmd.include_dirs)
+
+    def test_optional_extension(self):
+
+        # this extension will fail, but let's ignore this failure
+        # with the optional argument.
+        modules = [Extension('foo', ['xxx'], optional=False)]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = build_ext(dist)
+        cmd.ensure_finalized()
+        self.assertRaises((UnknownFileError, CompileError),
+                          cmd.run)  # should raise an error
+
+        modules = [Extension('foo', ['xxx'], optional=True)]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = build_ext(dist)
+        cmd.ensure_finalized()
+        cmd.run()  # should pass
+
+    def test_finalize_options(self):
+        # Make sure Python's include directories (for Python.h, pyconfig.h,
+        # etc.) are in the include search path.
+        modules = [Extension('foo', ['xxx'], optional=False)]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = build_ext(dist)
+        cmd.finalize_options()
+
+        py_include = sysconfig.get_path('include')
+        self.assertIn(py_include, cmd.include_dirs)
+
+        plat_py_include = sysconfig.get_path('platinclude')
+        self.assertIn(plat_py_include, cmd.include_dirs)
+
+        # make sure cmd.libraries is turned into a list
+        # if it's a string
+        cmd = build_ext(dist)
+        cmd.libraries = 'my_lib'
+        cmd.finalize_options()
+        self.assertEqual(cmd.libraries, ['my_lib'])
+
+        # make sure cmd.library_dirs is turned into a list
+        # if it's a string
+        cmd = build_ext(dist)
+        cmd.library_dirs = 'my_lib_dir'
+        cmd.finalize_options()
+        self.assertIn('my_lib_dir', cmd.library_dirs)
+
+        # make sure rpath is turned into a list
+        # if it's a list of os.pathsep's paths
+        cmd = build_ext(dist)
+        cmd.rpath = os.pathsep.join(['one', 'two'])
+        cmd.finalize_options()
+        self.assertEqual(cmd.rpath, ['one', 'two'])
+
+        # XXX more tests to perform for win32
+
+        # make sure define is turned into 2-tuples
+        # strings if they are ','-separated strings
+        cmd = build_ext(dist)
+        cmd.define = 'one,two'
+        cmd.finalize_options()
+        self.assertEqual(cmd.define, [('one', '1'), ('two', '1')])
+
+        # make sure undef is turned into a list of
+        # strings if they are ','-separated strings
+        cmd = build_ext(dist)
+        cmd.undef = 'one,two'
+        cmd.finalize_options()
+        self.assertEqual(cmd.undef, ['one', 'two'])
+
+        # make sure swig_opts is turned into a list
+        cmd = build_ext(dist)
+        cmd.swig_opts = None
+        cmd.finalize_options()
+        self.assertEqual(cmd.swig_opts, [])
+
+        cmd = build_ext(dist)
+        cmd.swig_opts = '1 2'
+        cmd.finalize_options()
+        self.assertEqual(cmd.swig_opts, ['1', '2'])
+
+    def test_get_source_files(self):
+        modules = [Extension('foo', ['xxx'], optional=False)]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = build_ext(dist)
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.get_source_files(), ['xxx'])
+
+    def test_compiler_option(self):
+        # cmd.compiler is an option and
+        # should not be overriden by a compiler instance
+        # when the command is run
+        dist = Distribution()
+        cmd = build_ext(dist)
+        cmd.compiler = 'unix'
+        cmd.ensure_finalized()
+        cmd.run()
+        self.assertEqual(cmd.compiler, 'unix')
+
+    def test_get_outputs(self):
+        tmp_dir = self.mkdtemp()
+        c_file = os.path.join(tmp_dir, 'foo.c')
+        self.write_file(c_file, 'void initfoo(void) {};\n')
+        ext = Extension('foo', [c_file], optional=False)
+        dist = Distribution({'name': 'xx',
+                             'ext_modules': [ext]})
+        cmd = build_ext(dist)
+        cmd.ensure_finalized()
+        self.assertEqual(len(cmd.get_outputs()), 1)
+
+        if os.name == "nt":
+            cmd.debug = sys.executable.endswith("_d.exe")
+
+        cmd.build_lib = os.path.join(self.tmp_dir, 'build')
+        cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
+
+        # issue #5977 : distutils build_ext.get_outputs
+        # returns wrong result with --inplace
+        other_tmp_dir = os.path.realpath(self.mkdtemp())
+        old_wd = os.getcwd()
+        os.chdir(other_tmp_dir)
+        try:
+            cmd.inplace = True
+            cmd.run()
+            so_file = cmd.get_outputs()[0]
+        finally:
+            os.chdir(old_wd)
+        self.assertTrue(os.path.exists(so_file))
+        so_ext = sysconfig.get_config_var('SO')
+        self.assertTrue(so_file.endswith(so_ext))
+        so_dir = os.path.dirname(so_file)
+        self.assertEqual(so_dir, other_tmp_dir)
+
+        cmd.inplace = False
+        cmd.run()
+        so_file = cmd.get_outputs()[0]
+        self.assertTrue(os.path.exists(so_file))
+        self.assertTrue(so_file.endswith(so_ext))
+        so_dir = os.path.dirname(so_file)
+        self.assertEqual(so_dir, cmd.build_lib)
+
+        # inplace = False, cmd.package = 'bar'
+        build_py = cmd.get_finalized_command('build_py')
+        build_py.package_dir = 'bar'
+        path = cmd.get_ext_fullpath('foo')
+        # checking that the last directory is the build_dir
+        path = os.path.split(path)[0]
+        self.assertEqual(path, cmd.build_lib)
+
+        # inplace = True, cmd.package = 'bar'
+        cmd.inplace = True
+        other_tmp_dir = os.path.realpath(self.mkdtemp())
+        old_wd = os.getcwd()
+        os.chdir(other_tmp_dir)
+        try:
+            path = cmd.get_ext_fullpath('foo')
+        finally:
+            os.chdir(old_wd)
+        # checking that the last directory is bar
+        path = os.path.split(path)[0]
+        lastdir = os.path.split(path)[-1]
+        self.assertEqual(lastdir, 'bar')
+
+    def test_ext_fullpath(self):
+        ext = sysconfig.get_config_vars()['SO']
+        # building lxml.etree inplace
+        #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
+        #etree_ext = Extension('lxml.etree', [etree_c])
+        #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
+        dist = Distribution()
+        cmd = build_ext(dist)
+        cmd.inplace = True
+        cmd.distribution.package_dir = 'src'
+        cmd.distribution.packages = ['lxml', 'lxml.html']
+        curdir = os.getcwd()
+        wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
+        path = cmd.get_ext_fullpath('lxml.etree')
+        self.assertEqual(wanted, path)
+
+        # building lxml.etree not inplace
+        cmd.inplace = False
+        cmd.build_lib = os.path.join(curdir, 'tmpdir')
+        wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
+        path = cmd.get_ext_fullpath('lxml.etree')
+        self.assertEqual(wanted, path)
+
+        # building twisted.runner.portmap not inplace
+        build_py = cmd.get_finalized_command('build_py')
+        build_py.package_dir = None
+        cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
+        path = cmd.get_ext_fullpath('twisted.runner.portmap')
+        wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
+                              'portmap' + ext)
+        self.assertEqual(wanted, path)
+
+        # building twisted.runner.portmap inplace
+        cmd.inplace = True
+        path = cmd.get_ext_fullpath('twisted.runner.portmap')
+        wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
+        self.assertEqual(wanted, path)
+
+
+def test_suite():
+    src = _get_source_filename()
+    if not os.path.exists(src):
+        if verbose:
+            print ('test_build_ext: Cannot find source code (test'
+                   ' must run in python build dir)')
+        return unittest.TestSuite()
+    else:
+        return unittest.makeSuite(BuildExtTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_build_py.py b/Lib/packaging/tests/test_command_build_py.py
new file mode 100644
index 0000000..9b40e6d
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_py.py
@@ -0,0 +1,124 @@
+"""Tests for distutils.command.build_py."""
+
+import os
+import sys
+
+from packaging.command.build_py import build_py
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError
+
+from packaging.tests import unittest, support
+
+
+class BuildPyTestCase(support.TempdirManager,
+                      support.LoggingCatcher,
+                      unittest.TestCase):
+
+    def test_package_data(self):
+        sources = self.mkdtemp()
+        pkg_dir = os.path.join(sources, 'pkg')
+        os.mkdir(pkg_dir)
+        f = open(os.path.join(pkg_dir, "__init__.py"), "w")
+        try:
+            f.write("# Pretend this is a package.")
+        finally:
+            f.close()
+        f = open(os.path.join(pkg_dir, "README.txt"), "w")
+        try:
+            f.write("Info about this package")
+        finally:
+            f.close()
+
+        destination = self.mkdtemp()
+
+        dist = Distribution({"packages": ["pkg"],
+                             "package_dir": sources})
+        # script_name need not exist, it just need to be initialized
+
+        dist.script_name = os.path.join(sources, "setup.py")
+        dist.command_obj["build"] = support.DummyCommand(
+            force=False,
+            build_lib=destination,
+            use_2to3_fixers=None,
+            convert_2to3_doctests=None,
+            use_2to3=False)
+        dist.packages = ["pkg"]
+        dist.package_data = {"pkg": ["README.txt"]}
+        dist.package_dir = sources
+
+        cmd = build_py(dist)
+        cmd.compile = True
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.package_data, dist.package_data)
+
+        cmd.run()
+
+        # This makes sure the list of outputs includes byte-compiled
+        # files for Python modules but not for package data files
+        # (there shouldn't *be* byte-code files for those!).
+        #
+        self.assertEqual(len(cmd.get_outputs()), 3)
+        pkgdest = os.path.join(destination, "pkg")
+        files = os.listdir(pkgdest)
+        self.assertIn("__init__.py", files)
+        self.assertIn("__init__.pyc", files)
+        self.assertIn("README.txt", files)
+
+    def test_empty_package_dir(self):
+        # See SF 1668596/1720897.
+        cwd = os.getcwd()
+
+        # create the distribution files.
+        sources = self.mkdtemp()
+        pkg = os.path.join(sources, 'pkg')
+        os.mkdir(pkg)
+        open(os.path.join(pkg, "__init__.py"), "w").close()
+        testdir = os.path.join(pkg, "doc")
+        os.mkdir(testdir)
+        open(os.path.join(testdir, "testfile"), "w").close()
+
+        os.chdir(sources)
+        old_stdout = sys.stdout
+        #sys.stdout = StringIO.StringIO()
+
+        try:
+            dist = Distribution({"packages": ["pkg"],
+                                 "package_dir": sources,
+                                 "package_data": {"pkg": ["doc/*"]}})
+            # script_name need not exist, it just need to be initialized
+            dist.script_name = os.path.join(sources, "setup.py")
+            dist.script_args = ["build"]
+            dist.parse_command_line()
+
+            try:
+                dist.run_commands()
+            except PackagingFileError as e:
+                self.fail("failed package_data test when package_dir is ''")
+        finally:
+            # Restore state.
+            os.chdir(cwd)
+            sys.stdout = old_stdout
+
+    @unittest.skipUnless(hasattr(sys, 'dont_write_bytecode'),
+                         'sys.dont_write_bytecode not supported')
+    def test_dont_write_bytecode(self):
+        # makes sure byte_compile is not used
+        pkg_dir, dist = self.create_dist()
+        cmd = build_py(dist)
+        cmd.compile = True
+        cmd.optimize = 1
+
+        old_dont_write_bytecode = sys.dont_write_bytecode
+        sys.dont_write_bytecode = True
+        try:
+            cmd.byte_compile([])
+        finally:
+            sys.dont_write_bytecode = old_dont_write_bytecode
+
+        self.assertIn('byte-compiling is disabled', self.get_logs()[0])
+
+def test_suite():
+    return unittest.makeSuite(BuildPyTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_build_scripts.py b/Lib/packaging/tests/test_command_build_scripts.py
new file mode 100644
index 0000000..60d8b68
--- /dev/null
+++ b/Lib/packaging/tests/test_command_build_scripts.py
@@ -0,0 +1,112 @@
+"""Tests for distutils.command.build_scripts."""
+
+import os
+import sys
+import sysconfig
+from packaging.dist import Distribution
+from packaging.command.build_scripts import build_scripts
+
+from packaging.tests import unittest, support
+
+
+class BuildScriptsTestCase(support.TempdirManager,
+                           support.LoggingCatcher,
+                           unittest.TestCase):
+
+    def test_default_settings(self):
+        cmd = self.get_build_scripts_cmd("/foo/bar", [])
+        self.assertFalse(cmd.force)
+        self.assertIs(cmd.build_dir, None)
+
+        cmd.finalize_options()
+
+        self.assertTrue(cmd.force)
+        self.assertEqual(cmd.build_dir, "/foo/bar")
+
+    def test_build(self):
+        source = self.mkdtemp()
+        target = self.mkdtemp()
+        expected = self.write_sample_scripts(source)
+
+        cmd = self.get_build_scripts_cmd(target,
+                                         [os.path.join(source, fn)
+                                          for fn in expected])
+        cmd.finalize_options()
+        cmd.run()
+
+        built = os.listdir(target)
+        for name in expected:
+            self.assertIn(name, built)
+
+    def get_build_scripts_cmd(self, target, scripts):
+        dist = Distribution()
+        dist.scripts = scripts
+        dist.command_obj["build"] = support.DummyCommand(
+            build_scripts=target,
+            force=True,
+            executable=sys.executable,
+            use_2to3=False,
+            use_2to3_fixers=None,
+            convert_2to3_doctests=None
+            )
+        return build_scripts(dist)
+
+    def write_sample_scripts(self, dir):
+        expected = []
+        expected.append("script1.py")
+        self.write_script(dir, "script1.py",
+                          ("#! /usr/bin/env python2.3\n"
+                           "# bogus script w/ Python sh-bang\n"
+                           "pass\n"))
+        expected.append("script2.py")
+        self.write_script(dir, "script2.py",
+                          ("#!/usr/bin/python\n"
+                           "# bogus script w/ Python sh-bang\n"
+                           "pass\n"))
+        expected.append("shell.sh")
+        self.write_script(dir, "shell.sh",
+                          ("#!/bin/sh\n"
+                           "# bogus shell script w/ sh-bang\n"
+                           "exit 0\n"))
+        return expected
+
+    def write_script(self, dir, name, text):
+        f = open(os.path.join(dir, name), "w")
+        try:
+            f.write(text)
+        finally:
+            f.close()
+
+    def test_version_int(self):
+        source = self.mkdtemp()
+        target = self.mkdtemp()
+        expected = self.write_sample_scripts(source)
+
+
+        cmd = self.get_build_scripts_cmd(target,
+                                         [os.path.join(source, fn)
+                                          for fn in expected])
+        cmd.finalize_options()
+
+        # http://bugs.python.org/issue4524
+        #
+        # On linux-g++-32 with command line `./configure --enable-ipv6
+        # --with-suffix=3`, python is compiled okay but the build scripts
+        # failed when writing the name of the executable
+        old = sysconfig.get_config_vars().get('VERSION')
+        sysconfig._CONFIG_VARS['VERSION'] = 4
+        try:
+            cmd.run()
+        finally:
+            if old is not None:
+                sysconfig._CONFIG_VARS['VERSION'] = old
+
+        built = os.listdir(target)
+        for name in expected:
+            self.assertIn(name, built)
+
+def test_suite():
+    return unittest.makeSuite(BuildScriptsTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_check.py b/Lib/packaging/tests/test_command_check.py
new file mode 100644
index 0000000..8b32673
--- /dev/null
+++ b/Lib/packaging/tests/test_command_check.py
@@ -0,0 +1,131 @@
+"""Tests for distutils.command.check."""
+
+import logging
+from packaging.command.check import check
+from packaging.metadata import _HAS_DOCUTILS
+from packaging.errors import PackagingSetupError, MetadataMissingError
+from packaging.tests import unittest, support
+
+
+class CheckTestCase(support.LoggingCatcher,
+                    support.TempdirManager,
+                    unittest.TestCase):
+
+    def _run(self, metadata=None, **options):
+        if metadata is None:
+            metadata = {'name': 'xxx', 'version': '1.2'}
+        pkg_info, dist = self.create_dist(**metadata)
+        cmd = check(dist)
+        cmd.initialize_options()
+        for name, value in options.items():
+            setattr(cmd, name, value)
+        cmd.ensure_finalized()
+        cmd.run()
+        return cmd
+
+    def test_check_metadata(self):
+        # let's run the command with no metadata at all
+        # by default, check is checking the metadata
+        # should have some warnings
+        cmd = self._run()
+        # trick: using assertNotEqual with an empty list will give us a more
+        # useful error message than assertGreater(.., 0) when the code change
+        # and the test fails
+        self.assertNotEqual([], self.get_logs(logging.WARNING))
+
+        # now let's add the required fields
+        # and run it again, to make sure we don't get
+        # any warning anymore
+        self.loghandler.flush()
+        metadata = {'home_page': 'xxx', 'author': 'xxx',
+                    'author_email': 'xxx',
+                    'name': 'xxx', 'version': '4.2',
+                    }
+        cmd = self._run(metadata)
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+        # now with the strict mode, we should
+        # get an error if there are missing metadata
+        self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1})
+        self.assertRaises(PackagingSetupError, self._run,
+            {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
+
+        # and of course, no error when all metadata fields are present
+        self.loghandler.flush()
+        cmd = self._run(metadata, strict=True)
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    def test_check_metadata_1_2(self):
+        # let's run the command with no metadata at all
+        # by default, check is checking the metadata
+        # should have some warnings
+        cmd = self._run()
+        self.assertNotEqual([], self.get_logs(logging.WARNING))
+
+        # now let's add the required fields and run it again, to make sure we
+        # don't get any warning anymore let's use requires_python as a marker
+        # to enforce Metadata-Version 1.2
+        metadata = {'home_page': 'xxx', 'author': 'xxx',
+                    'author_email': 'xxx',
+                    'name': 'xxx', 'version': '4.2',
+                    'requires_python': '2.4',
+                    }
+        self.loghandler.flush()
+        cmd = self._run(metadata)
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+        # now with the strict mode, we should
+        # get an error if there are missing metadata
+        self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1})
+        self.assertRaises(PackagingSetupError, self._run,
+            {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
+
+        # complain about version format
+        metadata['version'] = 'xxx'
+        self.assertRaises(PackagingSetupError, self._run, metadata,
+            **{'strict': 1})
+
+        # now with correct version format again
+        metadata['version'] = '4.2'
+        self.loghandler.flush()
+        cmd = self._run(metadata, strict=True)
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    @unittest.skipUnless(_HAS_DOCUTILS, "requires docutils")
+    def test_check_restructuredtext(self):
+        # let's see if it detects broken rest in long_description
+        broken_rest = 'title\n===\n\ntest'
+        pkg_info, dist = self.create_dist(description=broken_rest)
+        cmd = check(dist)
+        cmd.check_restructuredtext()
+        self.assertEqual(len(self.get_logs(logging.WARNING)), 1)
+
+        self.loghandler.flush()
+        pkg_info, dist = self.create_dist(description='title\n=====\n\ntest')
+        cmd = check(dist)
+        cmd.check_restructuredtext()
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    def test_check_all(self):
+        self.assertRaises(PackagingSetupError, self._run,
+                          {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1,
+                                 'all': 1})
+        self.assertRaises(MetadataMissingError, self._run,
+                          {}, **{'strict': 1,
+                                 'all': 1})
+
+    def test_check_hooks(self):
+        pkg_info, dist = self.create_dist()
+        dist.command_options['install_dist'] = {
+            'pre_hook': ('file', {"a": 'some.nonextistant.hook.ghrrraarrhll'}),
+        }
+        cmd = check(dist)
+        cmd.check_hooks_resolvable()
+        self.assertEqual(len(self.get_logs(logging.WARNING)), 1)
+
+
+def test_suite():
+    return unittest.makeSuite(CheckTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_clean.py b/Lib/packaging/tests/test_command_clean.py
new file mode 100644
index 0000000..8d29e4d
--- /dev/null
+++ b/Lib/packaging/tests/test_command_clean.py
@@ -0,0 +1,48 @@
+"""Tests for distutils.command.clean."""
+import os
+
+from packaging.command.clean import clean
+from packaging.tests import unittest, support
+
+
+class cleanTestCase(support.TempdirManager, support.LoggingCatcher,
+                    unittest.TestCase):
+
+    def test_simple_run(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = clean(dist)
+
+        # let's add some elements clean should remove
+        dirs = [(d, os.path.join(pkg_dir, d))
+                for d in ('build_temp', 'build_lib', 'bdist_base',
+                'build_scripts', 'build_base')]
+
+        for name, path in dirs:
+            os.mkdir(path)
+            setattr(cmd, name, path)
+            if name == 'build_base':
+                continue
+            for f in ('one', 'two', 'three'):
+                self.write_file(os.path.join(path, f))
+
+        # let's run the command
+        cmd.all = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # make sure the files where removed
+        for name, path in dirs:
+            self.assertFalse(os.path.exists(path),
+                             '%r was not removed' % path)
+
+        # let's run the command again (should spit warnings but succeed)
+        cmd.all = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+
+def test_suite():
+    return unittest.makeSuite(cleanTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_cmd.py b/Lib/packaging/tests/test_command_cmd.py
new file mode 100644
index 0000000..8ac9dce
--- /dev/null
+++ b/Lib/packaging/tests/test_command_cmd.py
@@ -0,0 +1,101 @@
+"""Tests for distutils.cmd."""
+import os
+
+from packaging.command.cmd import Command
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+from packaging.tests import support, unittest
+
+
+class MyCmd(Command):
+    def initialize_options(self):
+        pass
+
+
+class CommandTestCase(support.LoggingCatcher,
+                      unittest.TestCase):
+
+    def setUp(self):
+        super(CommandTestCase, self).setUp()
+        dist = Distribution()
+        self.cmd = MyCmd(dist)
+
+    def test_make_file(self):
+        cmd = self.cmd
+
+        # making sure it raises when infiles is not a string or a list/tuple
+        self.assertRaises(TypeError, cmd.make_file,
+                          infiles=1, outfile='', func='func', args=())
+
+        # making sure execute gets called properly
+        def _execute(func, args, exec_msg, level):
+            self.assertEqual(exec_msg, 'generating out from in')
+        cmd.force = True
+        cmd.execute = _execute
+        cmd.make_file(infiles='in', outfile='out', func='func', args=())
+
+    def test_dump_options(self):
+        cmd = self.cmd
+        cmd.option1 = 1
+        cmd.option2 = 1
+        cmd.user_options = [('option1', '', ''), ('option2', '', '')]
+        cmd.dump_options()
+
+        wanted = ["command options for 'MyCmd':", '  option1 = 1',
+                  '  option2 = 1']
+        msgs = self.get_logs()
+        self.assertEqual(msgs, wanted)
+
+    def test_ensure_string(self):
+        cmd = self.cmd
+        cmd.option1 = 'ok'
+        cmd.ensure_string('option1')
+
+        cmd.option2 = None
+        cmd.ensure_string('option2', 'xxx')
+        self.assertTrue(hasattr(cmd, 'option2'))
+
+        cmd.option3 = 1
+        self.assertRaises(PackagingOptionError, cmd.ensure_string, 'option3')
+
+    def test_ensure_string_list(self):
+        cmd = self.cmd
+        cmd.option1 = 'ok,dok'
+        cmd.ensure_string_list('option1')
+        self.assertEqual(cmd.option1, ['ok', 'dok'])
+
+        cmd.yes_string_list = ['one', 'two', 'three']
+        cmd.yes_string_list2 = 'ok'
+        cmd.ensure_string_list('yes_string_list')
+        cmd.ensure_string_list('yes_string_list2')
+        self.assertEqual(cmd.yes_string_list, ['one', 'two', 'three'])
+        self.assertEqual(cmd.yes_string_list2, ['ok'])
+
+        cmd.not_string_list = ['one', 2, 'three']
+        cmd.not_string_list2 = object()
+        self.assertRaises(PackagingOptionError,
+                          cmd.ensure_string_list, 'not_string_list')
+
+        self.assertRaises(PackagingOptionError,
+                          cmd.ensure_string_list, 'not_string_list2')
+
+    def test_ensure_filename(self):
+        cmd = self.cmd
+        cmd.option1 = __file__
+        cmd.ensure_filename('option1')
+        cmd.option2 = 'xxx'
+        self.assertRaises(PackagingOptionError, cmd.ensure_filename, 'option2')
+
+    def test_ensure_dirname(self):
+        cmd = self.cmd
+        cmd.option1 = os.path.dirname(__file__) or os.curdir
+        cmd.ensure_dirname('option1')
+        cmd.option2 = 'xxx'
+        self.assertRaises(PackagingOptionError, cmd.ensure_dirname, 'option2')
+
+
+def test_suite():
+    return unittest.makeSuite(CommandTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_command_config.py b/Lib/packaging/tests/test_command_config.py
new file mode 100644
index 0000000..6d780c5
--- /dev/null
+++ b/Lib/packaging/tests/test_command_config.py
@@ -0,0 +1,76 @@
+"""Tests for distutils.command.config."""
+import os
+import sys
+import logging
+
+from packaging.command.config import dump_file, config
+from packaging.tests import unittest, support
+
+
+class ConfigTestCase(support.LoggingCatcher,
+                     support.TempdirManager,
+                     unittest.TestCase):
+
+    def test_dump_file(self):
+        this_file = __file__.rstrip('co')
+        with open(this_file) as f:
+            numlines = len(f.readlines())
+
+        dump_file(this_file, 'I am the header')
+
+        logs = []
+        for log in self.get_logs(logging.INFO):
+            logs.extend(line for line in log.split('\n'))
+        self.assertEqual(len(logs), numlines + 2)
+
+    @unittest.skipIf(sys.platform == 'win32', 'disabled on win32')
+    def test_search_cpp(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = config(dist)
+
+        # simple pattern searches
+        match = cmd.search_cpp(pattern='xxx', body='// xxx')
+        self.assertEqual(match, 0)
+
+        match = cmd.search_cpp(pattern='_configtest', body='// xxx')
+        self.assertEqual(match, 1)
+
+    def test_finalize_options(self):
+        # finalize_options does a bit of transformation
+        # on options
+        pkg_dir, dist = self.create_dist()
+        cmd = config(dist)
+        cmd.include_dirs = 'one%stwo' % os.pathsep
+        cmd.libraries = 'one'
+        cmd.library_dirs = 'three%sfour' % os.pathsep
+        cmd.ensure_finalized()
+
+        self.assertEqual(cmd.include_dirs, ['one', 'two'])
+        self.assertEqual(cmd.libraries, ['one'])
+        self.assertEqual(cmd.library_dirs, ['three', 'four'])
+
+    def test_clean(self):
+        # _clean removes files
+        tmp_dir = self.mkdtemp()
+        f1 = os.path.join(tmp_dir, 'one')
+        f2 = os.path.join(tmp_dir, 'two')
+
+        self.write_file(f1, 'xxx')
+        self.write_file(f2, 'xxx')
+
+        for f in (f1, f2):
+            self.assertTrue(os.path.exists(f))
+
+        pkg_dir, dist = self.create_dist()
+        cmd = config(dist)
+        cmd._clean(f1, f2)
+
+        for f in (f1, f2):
+            self.assertFalse(os.path.exists(f))
+
+
+def test_suite():
+    return unittest.makeSuite(ConfigTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_data.py b/Lib/packaging/tests/test_command_install_data.py
new file mode 100644
index 0000000..8b8bbac
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_data.py
@@ -0,0 +1,80 @@
+"""Tests for packaging.command.install_data."""
+import os
+import sysconfig
+from sysconfig import _get_default_scheme
+from packaging.tests import unittest, support
+from packaging.command.install_data import install_data
+
+
+class InstallDataTestCase(support.TempdirManager,
+                          support.LoggingCatcher,
+                          unittest.TestCase):
+
+    def test_simple_run(self):
+        self.addCleanup(setattr, sysconfig, '_SCHEMES', sysconfig._SCHEMES)
+
+        pkg_dir, dist = self.create_dist()
+        cmd = install_data(dist)
+        cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
+
+        sysconfig._SCHEMES.set(_get_default_scheme(), 'inst',
+                               os.path.join(pkg_dir, 'inst'))
+        sysconfig._SCHEMES.set(_get_default_scheme(), 'inst2',
+                               os.path.join(pkg_dir, 'inst2'))
+
+        one = os.path.join(pkg_dir, 'one')
+        self.write_file(one, 'xxx')
+        inst2 = os.path.join(pkg_dir, 'inst2')
+        two = os.path.join(pkg_dir, 'two')
+        self.write_file(two, 'xxx')
+
+        cmd.data_files = {one: '{inst}/one', two: '{inst2}/two'}
+        self.assertCountEqual(cmd.get_inputs(), [one, two])
+
+        # let's run the command
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the result
+        self.assertEqual(len(cmd.get_outputs()), 2)
+        rtwo = os.path.split(two)[-1]
+        self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+        rone = os.path.split(one)[-1]
+        self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+        cmd.outfiles = []
+
+        # let's try with warn_dir one
+        cmd.warn_dir = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the result
+        self.assertEqual(len(cmd.get_outputs()), 2)
+        self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+        self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+        cmd.outfiles = []
+
+        # now using root and empty dir
+        cmd.root = os.path.join(pkg_dir, 'root')
+        three = os.path.join(cmd.install_dir, 'three')
+        self.write_file(three, 'xx')
+
+        sysconfig._SCHEMES.set(_get_default_scheme(), 'inst3',
+                               cmd.install_dir)
+
+        cmd.data_files = {one: '{inst}/one', two: '{inst2}/two',
+                          three: '{inst3}/three'}
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the result
+        self.assertEqual(len(cmd.get_outputs()), 3)
+        self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+        self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+
+
+def test_suite():
+    return unittest.makeSuite(InstallDataTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_dist.py b/Lib/packaging/tests/test_command_install_dist.py
new file mode 100644
index 0000000..a06d1f6
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_dist.py
@@ -0,0 +1,210 @@
+"""Tests for packaging.command.install."""
+
+import os
+import sys
+
+from sysconfig import (get_scheme_names, get_config_vars,
+                       _SCHEMES, get_config_var, get_path)
+
+_CONFIG_VARS = get_config_vars()
+
+from packaging.tests import captured_stdout
+
+from packaging.command.install_dist import install_dist
+from packaging.command import install_dist as install_module
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+
+from packaging.tests import unittest, support
+
+
+class InstallTestCase(support.TempdirManager,
+                      support.LoggingCatcher,
+                      unittest.TestCase):
+
+    def test_home_installation_scheme(self):
+        # This ensure two things:
+        # - that --home generates the desired set of directory names
+        # - test --home is supported on all platforms
+        builddir = self.mkdtemp()
+        destination = os.path.join(builddir, "installation")
+
+        dist = Distribution({"name": "foopkg"})
+        # script_name need not exist, it just need to be initialized
+        dist.script_name = os.path.join(builddir, "setup.py")
+        dist.command_obj["build"] = support.DummyCommand(
+            build_base=builddir,
+            build_lib=os.path.join(builddir, "lib"),
+        )
+
+        old_posix_prefix = _SCHEMES.get('posix_prefix', 'platinclude')
+        old_posix_home = _SCHEMES.get('posix_home', 'platinclude')
+
+        new_path = '{platbase}/include/python{py_version_short}'
+        _SCHEMES.set('posix_prefix', 'platinclude', new_path)
+        _SCHEMES.set('posix_home', 'platinclude', '{platbase}/include/python')
+
+        try:
+            cmd = install_dist(dist)
+            cmd.home = destination
+            cmd.ensure_finalized()
+        finally:
+            _SCHEMES.set('posix_prefix', 'platinclude', old_posix_prefix)
+            _SCHEMES.set('posix_home', 'platinclude', old_posix_home)
+
+        self.assertEqual(cmd.install_base, destination)
+        self.assertEqual(cmd.install_platbase, destination)
+
+        def check_path(got, expected):
+            got = os.path.normpath(got)
+            expected = os.path.normpath(expected)
+            self.assertEqual(got, expected)
+
+        libdir = os.path.join(destination, "lib", "python")
+        check_path(cmd.install_lib, libdir)
+        check_path(cmd.install_platlib, libdir)
+        check_path(cmd.install_purelib, libdir)
+        check_path(cmd.install_headers,
+                   os.path.join(destination, "include", "python", "foopkg"))
+        check_path(cmd.install_scripts, os.path.join(destination, "bin"))
+        check_path(cmd.install_data, destination)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_user_site(self):
+        # test install with --user
+        # preparing the environment for the test
+        self.old_user_base = get_config_var('userbase')
+        self.old_user_site = get_path('purelib', '%s_user' % os.name)
+        self.tmpdir = self.mkdtemp()
+        self.user_base = os.path.join(self.tmpdir, 'B')
+        self.user_site = os.path.join(self.tmpdir, 'S')
+        _CONFIG_VARS['userbase'] = self.user_base
+        scheme = '%s_user' % os.name
+        _SCHEMES.set(scheme, 'purelib', self.user_site)
+
+        def _expanduser(path):
+            if path[0] == '~':
+                path = os.path.normpath(self.tmpdir) + path[1:]
+            return path
+
+        self.old_expand = os.path.expanduser
+        os.path.expanduser = _expanduser
+
+        try:
+            # this is the actual test
+            self._test_user_site()
+        finally:
+            _CONFIG_VARS['userbase'] = self.old_user_base
+            _SCHEMES.set(scheme, 'purelib', self.old_user_site)
+            os.path.expanduser = self.old_expand
+
+    def _test_user_site(self):
+        schemes = get_scheme_names()
+        for key in ('nt_user', 'posix_user', 'os2_home'):
+            self.assertIn(key, schemes)
+
+        dist = Distribution({'name': 'xx'})
+        cmd = install_dist(dist)
+        # making sure the user option is there
+        options = [name for name, short, lable in
+                   cmd.user_options]
+        self.assertIn('user', options)
+
+        # setting a value
+        cmd.user = True
+
+        # user base and site shouldn't be created yet
+        self.assertFalse(os.path.exists(self.user_base))
+        self.assertFalse(os.path.exists(self.user_site))
+
+        # let's run finalize
+        cmd.ensure_finalized()
+
+        # now they should
+        self.assertTrue(os.path.exists(self.user_base))
+        self.assertTrue(os.path.exists(self.user_site))
+
+        self.assertIn('userbase', cmd.config_vars)
+        self.assertIn('usersite', cmd.config_vars)
+
+    def test_handle_extra_path(self):
+        dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
+        cmd = install_dist(dist)
+
+        # two elements
+        cmd.handle_extra_path()
+        self.assertEqual(cmd.extra_path, ['path', 'dirs'])
+        self.assertEqual(cmd.extra_dirs, 'dirs')
+        self.assertEqual(cmd.path_file, 'path')
+
+        # one element
+        cmd.extra_path = ['path']
+        cmd.handle_extra_path()
+        self.assertEqual(cmd.extra_path, ['path'])
+        self.assertEqual(cmd.extra_dirs, 'path')
+        self.assertEqual(cmd.path_file, 'path')
+
+        # none
+        dist.extra_path = cmd.extra_path = None
+        cmd.handle_extra_path()
+        self.assertEqual(cmd.extra_path, None)
+        self.assertEqual(cmd.extra_dirs, '')
+        self.assertEqual(cmd.path_file, None)
+
+        # three elements (no way !)
+        cmd.extra_path = 'path,dirs,again'
+        self.assertRaises(PackagingOptionError, cmd.handle_extra_path)
+
+    def test_finalize_options(self):
+        dist = Distribution({'name': 'xx'})
+        cmd = install_dist(dist)
+
+        # must supply either prefix/exec-prefix/home or
+        # install-base/install-platbase -- not both
+        cmd.prefix = 'prefix'
+        cmd.install_base = 'base'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+        # must supply either home or prefix/exec-prefix -- not both
+        cmd.install_base = None
+        cmd.home = 'home'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+        if sys.version >= '2.6':
+            # can't combine user with with prefix/exec_prefix/home or
+            # install_(plat)base
+            cmd.prefix = None
+            cmd.user = 'user'
+            self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+    def test_old_record(self):
+        # test pre-PEP 376 --record option (outside dist-info dir)
+        install_dir = self.mkdtemp()
+        pkgdir, dist = self.create_dist()
+
+        dist = Distribution()
+        cmd = install_dist(dist)
+        dist.command_obj['install_dist'] = cmd
+        cmd.root = install_dir
+        cmd.record = os.path.join(pkgdir, 'filelist')
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the record file was created with four
+        # lines, one for each .dist-info entry: METADATA,
+        # INSTALLER, REQUSTED, RECORD
+        f = open(cmd.record)
+        try:
+            self.assertEqual(len(f.readlines()), 4)
+        finally:
+            f.close()
+
+        # XXX test that fancy_getopt is okay with options named
+        # record and no-record but unrelated
+
+
+def test_suite():
+    return unittest.makeSuite(InstallTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_distinfo.py b/Lib/packaging/tests/test_command_install_distinfo.py
new file mode 100644
index 0000000..3d33691
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_distinfo.py
@@ -0,0 +1,192 @@
+"""Tests for ``packaging.command.install_distinfo``. """
+
+import os
+import csv
+import hashlib
+import sys
+
+from packaging.command.install_distinfo import install_distinfo
+from packaging.command.cmd import Command
+from packaging.metadata import Metadata
+from packaging.tests import unittest, support
+
+
+class DummyInstallCmd(Command):
+
+    def __init__(self, dist=None):
+        self.outputs = []
+        self.distribution = dist
+
+    def __getattr__(self, name):
+        return None
+
+    def ensure_finalized(self):
+        pass
+
+    def get_outputs(self):
+        return (self.outputs +
+                self.get_finalized_command('install_distinfo').get_outputs())
+
+
+class InstallDistinfoTestCase(support.TempdirManager,
+                              support.LoggingCatcher,
+                              unittest.TestCase):
+
+    checkLists = lambda self, x, y: self.assertListEqual(sorted(x), sorted(y))
+
+    def test_empty_install(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.ensure_finalized()
+        cmd.run()
+
+        self.checkLists(os.listdir(install_dir), ['foo-1.0.dist-info'])
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+        self.checkLists(os.listdir(dist_info),
+                        ['METADATA', 'RECORD', 'REQUESTED', 'INSTALLER'])
+        with open(os.path.join(dist_info, 'INSTALLER')) as fp:
+            self.assertEqual(fp.read(), 'distutils')
+        with open(os.path.join(dist_info, 'REQUESTED')) as fp:
+            self.assertEqual(fp.read(), '')
+        meta_path = os.path.join(dist_info, 'METADATA')
+        self.assertTrue(Metadata(path=meta_path).check())
+
+    def test_installer(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.installer = 'bacon-python'
+        cmd.ensure_finalized()
+        cmd.run()
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+        with open(os.path.join(dist_info, 'INSTALLER')) as fp:
+            self.assertEqual(fp.read(), 'bacon-python')
+
+    def test_requested(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.requested = False
+        cmd.ensure_finalized()
+        cmd.run()
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+        self.checkLists(os.listdir(dist_info),
+                        ['METADATA', 'RECORD', 'INSTALLER'])
+
+    def test_no_record(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.no_record = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+        self.checkLists(os.listdir(dist_info),
+                        ['METADATA', 'REQUESTED', 'INSTALLER'])
+
+    def test_record(self):
+        pkg_dir, dist = self.create_dist(name='foo',
+                                         version='1.0')
+        install_dir = self.mkdtemp()
+
+        install = DummyInstallCmd(dist)
+        dist.command_obj['install_dist'] = install
+
+        fake_dists = os.path.join(os.path.dirname(__file__), 'fake_dists')
+        fake_dists = os.path.realpath(fake_dists)
+
+        # for testing, we simply add all files from _backport's fake_dists
+        dirs = []
+        for dir in os.listdir(fake_dists):
+            full_path = os.path.join(fake_dists, dir)
+            if (not dir.endswith('.egg') or dir.endswith('.egg-info') or
+                dir.endswith('.dist-info')) and os.path.isdir(full_path):
+                dirs.append(full_path)
+
+        for dir in dirs:
+            for path, subdirs, files in os.walk(dir):
+                install.outputs += [os.path.join(path, f) for f in files]
+                install.outputs += [os.path.join('path', f + 'c')
+                                    for f in files if f.endswith('.py')]
+
+        cmd = install_distinfo(dist)
+        dist.command_obj['install_distinfo'] = cmd
+
+        cmd.initialize_options()
+        cmd.distinfo_dir = install_dir
+        cmd.ensure_finalized()
+        cmd.run()
+
+        dist_info = os.path.join(install_dir, 'foo-1.0.dist-info')
+
+        expected = []
+        for f in install.get_outputs():
+            if (f.endswith('.pyc') or f == os.path.join(
+                install_dir, 'foo-1.0.dist-info', 'RECORD')):
+                expected.append([f, '', ''])
+            else:
+                size = os.path.getsize(f)
+                md5 = hashlib.md5()
+                with open(f) as fp:
+                    md5.update(fp.read().encode())
+                hash = md5.hexdigest()
+                expected.append([f, hash, str(size)])
+
+        parsed = []
+        with open(os.path.join(dist_info, 'RECORD'), 'r') as f:
+            reader = csv.reader(f, delimiter=',',
+                                   lineterminator=os.linesep,
+                                   quotechar='"')
+            parsed = list(reader)
+
+        self.maxDiff = None
+        self.checkLists(parsed, expected)
+
+
+def test_suite():
+    return unittest.makeSuite(InstallDistinfoTestCase)
+
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_headers.py b/Lib/packaging/tests/test_command_install_headers.py
new file mode 100644
index 0000000..f2906a7
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_headers.py
@@ -0,0 +1,38 @@
+"""Tests for packaging.command.install_headers."""
+import os
+
+from packaging.command.install_headers import install_headers
+from packaging.tests import unittest, support
+
+
+class InstallHeadersTestCase(support.TempdirManager,
+                             support.LoggingCatcher,
+                             unittest.TestCase):
+
+    def test_simple_run(self):
+        # we have two headers
+        header_list = self.mkdtemp()
+        header1 = os.path.join(header_list, 'header1')
+        header2 = os.path.join(header_list, 'header2')
+        self.write_file(header1)
+        self.write_file(header2)
+        headers = [header1, header2]
+
+        pkg_dir, dist = self.create_dist(headers=headers)
+        cmd = install_headers(dist)
+        self.assertEqual(cmd.get_inputs(), headers)
+
+        # let's run the command
+        cmd.install_dir = os.path.join(pkg_dir, 'inst')
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's check the results
+        self.assertEqual(len(cmd.get_outputs()), 2)
+
+
+def test_suite():
+    return unittest.makeSuite(InstallHeadersTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_lib.py b/Lib/packaging/tests/test_command_install_lib.py
new file mode 100644
index 0000000..99d47dd
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_lib.py
@@ -0,0 +1,111 @@
+"""Tests for packaging.command.install_data."""
+import sys
+import os
+
+from packaging.tests import unittest, support
+from packaging.command.install_lib import install_lib
+from packaging.compiler.extension import Extension
+from packaging.errors import PackagingOptionError
+
+try:
+    no_bytecode = sys.dont_write_bytecode
+    bytecode_support = True
+except AttributeError:
+    no_bytecode = False
+    bytecode_support = False
+
+
+class InstallLibTestCase(support.TempdirManager,
+                         support.LoggingCatcher,
+                         support.EnvironRestorer,
+                         unittest.TestCase):
+
+    restore_environ = ['PYTHONPATH']
+
+    def test_finalize_options(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+
+        cmd.finalize_options()
+        self.assertTrue(cmd.compile)
+        self.assertEqual(cmd.optimize, 0)
+
+        # optimize must be 0, 1, or 2
+        cmd.optimize = 'foo'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+        cmd.optimize = '4'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+        cmd.optimize = '2'
+        cmd.finalize_options()
+        self.assertEqual(cmd.optimize, 2)
+
+    @unittest.skipIf(no_bytecode, 'byte-compile not supported')
+    def test_byte_compile(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+        cmd.compile = True
+        cmd.optimize = 1
+
+        f = os.path.join(pkg_dir, 'foo.py')
+        self.write_file(f, '# python file')
+        cmd.byte_compile([f])
+        self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyc')))
+        self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyo')))
+
+    def test_get_outputs(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+
+        # setting up a dist environment
+        cmd.compile = True
+        cmd.optimize = 1
+        cmd.install_dir = pkg_dir
+        f = os.path.join(pkg_dir, '__init__.py')
+        self.write_file(f, '# python package')
+        cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
+        cmd.distribution.packages = [pkg_dir]
+        cmd.distribution.script_name = 'setup.py'
+
+        # get_output should return 4 elements
+        self.assertEqual(len(cmd.get_outputs()), 4)
+
+    def test_get_inputs(self):
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+
+        # setting up a dist environment
+        cmd.compile = True
+        cmd.optimize = 1
+        cmd.install_dir = pkg_dir
+        f = os.path.join(pkg_dir, '__init__.py')
+        self.write_file(f, '# python package')
+        cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
+        cmd.distribution.packages = [pkg_dir]
+        cmd.distribution.script_name = 'setup.py'
+
+        # get_input should return 2 elements
+        self.assertEqual(len(cmd.get_inputs()), 2)
+
+    @unittest.skipUnless(bytecode_support,
+                         'sys.dont_write_bytecode not supported')
+    def test_dont_write_bytecode(self):
+        # makes sure byte_compile is not used
+        pkg_dir, dist = self.create_dist()
+        cmd = install_lib(dist)
+        cmd.compile = True
+        cmd.optimize = 1
+
+        self.addCleanup(setattr, sys, 'dont_write_bytecode',
+                        sys.dont_write_bytecode)
+        sys.dont_write_bytecode = True
+        cmd.byte_compile([])
+
+        self.assertIn('byte-compiling is disabled', self.get_logs()[0])
+
+
+def test_suite():
+    return unittest.makeSuite(InstallLibTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_install_scripts.py b/Lib/packaging/tests/test_command_install_scripts.py
new file mode 100644
index 0000000..08c7338
--- /dev/null
+++ b/Lib/packaging/tests/test_command_install_scripts.py
@@ -0,0 +1,78 @@
+"""Tests for packaging.command.install_scripts."""
+import os
+
+from packaging.tests import unittest, support
+from packaging.command.install_scripts import install_scripts
+from packaging.dist import Distribution
+
+
+class InstallScriptsTestCase(support.TempdirManager,
+                             support.LoggingCatcher,
+                             unittest.TestCase):
+
+    def test_default_settings(self):
+        dist = Distribution()
+        dist.command_obj["build"] = support.DummyCommand(
+            build_scripts="/foo/bar")
+        dist.command_obj["install_dist"] = support.DummyCommand(
+            install_scripts="/splat/funk",
+            force=True,
+            skip_build=True,
+            )
+        cmd = install_scripts(dist)
+        self.assertFalse(cmd.force)
+        self.assertFalse(cmd.skip_build)
+        self.assertIs(cmd.build_dir, None)
+        self.assertIs(cmd.install_dir, None)
+
+        cmd.finalize_options()
+
+        self.assertTrue(cmd.force)
+        self.assertTrue(cmd.skip_build)
+        self.assertEqual(cmd.build_dir, "/foo/bar")
+        self.assertEqual(cmd.install_dir, "/splat/funk")
+
+    def test_installation(self):
+        source = self.mkdtemp()
+        expected = []
+
+        def write_script(name, text):
+            expected.append(name)
+            f = open(os.path.join(source, name), "w")
+            try:
+                f.write(text)
+            finally:
+                f.close()
+
+        write_script("script1.py", ("#! /usr/bin/env python2.3\n"
+                                    "# bogus script w/ Python sh-bang\n"
+                                    "pass\n"))
+        write_script("script2.py", ("#!/usr/bin/python\n"
+                                    "# bogus script w/ Python sh-bang\n"
+                                    "pass\n"))
+        write_script("shell.sh", ("#!/bin/sh\n"
+                                  "# bogus shell script w/ sh-bang\n"
+                                  "exit 0\n"))
+
+        target = self.mkdtemp()
+        dist = Distribution()
+        dist.command_obj["build"] = support.DummyCommand(build_scripts=source)
+        dist.command_obj["install_dist"] = support.DummyCommand(
+            install_scripts=target,
+            force=True,
+            skip_build=True,
+            )
+        cmd = install_scripts(dist)
+        cmd.finalize_options()
+        cmd.run()
+
+        installed = os.listdir(target)
+        for name in expected:
+            self.assertIn(name, installed)
+
+
+def test_suite():
+    return unittest.makeSuite(InstallScriptsTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_register.py b/Lib/packaging/tests/test_command_register.py
new file mode 100644
index 0000000..7aa487a
--- /dev/null
+++ b/Lib/packaging/tests/test_command_register.py
@@ -0,0 +1,259 @@
+"""Tests for packaging.command.register."""
+import os
+import getpass
+import urllib.request
+import urllib.error
+import urllib.parse
+
+try:
+    import docutils
+    DOCUTILS_SUPPORT = True
+except ImportError:
+    DOCUTILS_SUPPORT = False
+
+from packaging.tests import unittest, support
+from packaging.command import register as register_module
+from packaging.command.register import register
+from packaging.errors import PackagingSetupError
+
+
+PYPIRC_NOPASSWORD = """\
+[distutils]
+
+index-servers =
+    server1
+
+[server1]
+username:me
+"""
+
+WANTED_PYPIRC = """\
+[distutils]
+index-servers =
+    pypi
+
+[pypi]
+username:tarek
+password:password
+"""
+
+
+class Inputs:
+    """Fakes user inputs."""
+    def __init__(self, *answers):
+        self.answers = answers
+        self.index = 0
+
+    def __call__(self, prompt=''):
+        try:
+            return self.answers[self.index]
+        finally:
+            self.index += 1
+
+
+class FakeOpener:
+    """Fakes a PyPI server"""
+    def __init__(self):
+        self.reqs = []
+
+    def __call__(self, *args):
+        return self
+
+    def open(self, req):
+        self.reqs.append(req)
+        return self
+
+    def read(self):
+        return 'xxx'
+
+
+class RegisterTestCase(support.TempdirManager,
+                       support.EnvironRestorer,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(RegisterTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.rc = os.path.join(self.tmp_dir, '.pypirc')
+        os.environ['HOME'] = self.tmp_dir
+
+        # patching the password prompt
+        self._old_getpass = getpass.getpass
+
+        def _getpass(prompt):
+            return 'password'
+
+        getpass.getpass = _getpass
+        self.old_opener = urllib.request.build_opener
+        self.conn = urllib.request.build_opener = FakeOpener()
+
+    def tearDown(self):
+        getpass.getpass = self._old_getpass
+        urllib.request.build_opener = self.old_opener
+        if hasattr(register_module, 'input'):
+            del register_module.input
+        super(RegisterTestCase, self).tearDown()
+
+    def _get_cmd(self, metadata=None):
+        if metadata is None:
+            metadata = {'url': 'xxx', 'author': 'xxx',
+                        'author_email': 'xxx',
+                        'name': 'xxx', 'version': 'xxx'}
+        pkg_info, dist = self.create_dist(**metadata)
+        return register(dist)
+
+    def test_create_pypirc(self):
+        # this test makes sure a .pypirc file
+        # is created when requested.
+
+        # let's create a register instance
+        cmd = self._get_cmd()
+
+        # we shouldn't have a .pypirc file yet
+        self.assertFalse(os.path.exists(self.rc))
+
+        # patching input and getpass.getpass
+        # so register gets happy
+        # Here's what we are faking :
+        # use your existing login (choice 1.)
+        # Username : 'tarek'
+        # Password : 'password'
+        # Save your login (y/N)? : 'y'
+        inputs = Inputs('1', 'tarek', 'y')
+        register_module.input = inputs
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # we should have a brand new .pypirc file
+        self.assertTrue(os.path.exists(self.rc))
+
+        # with the content similar to WANTED_PYPIRC
+        with open(self.rc) as fp:
+            content = fp.read()
+        self.assertEqual(content, WANTED_PYPIRC)
+
+        # now let's make sure the .pypirc file generated
+        # really works : we shouldn't be asked anything
+        # if we run the command again
+        def _no_way(prompt=''):
+            raise AssertionError(prompt)
+
+        register_module.input = _no_way
+        cmd.show_response = True
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # let's see what the server received : we should
+        # have 2 similar requests
+        self.assertEqual(len(self.conn.reqs), 2)
+        req1 = dict(self.conn.reqs[0].headers)
+        req2 = dict(self.conn.reqs[1].headers)
+        self.assertEqual(req2['Content-length'], req1['Content-length'])
+        self.assertIn('xxx', self.conn.reqs[1].data)
+
+    def test_password_not_in_file(self):
+
+        self.write_file(self.rc, PYPIRC_NOPASSWORD)
+        cmd = self._get_cmd()
+        cmd.finalize_options()
+        cmd._set_config()
+        cmd.send_metadata()
+
+        # dist.password should be set
+        # therefore used afterwards by other commands
+        self.assertEqual(cmd.distribution.password, 'password')
+
+    def test_registration(self):
+        # this test runs choice 2
+        cmd = self._get_cmd()
+        inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
+        register_module.input = inputs
+        # let's run the command
+        # FIXME does this send a real request? use a mock server
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # we should have send a request
+        self.assertEqual(len(self.conn.reqs), 1)
+        req = self.conn.reqs[0]
+        headers = dict(req.headers)
+        self.assertEqual(headers['Content-length'], '608')
+        self.assertIn('tarek', req.data)
+
+    def test_password_reset(self):
+        # this test runs choice 3
+        cmd = self._get_cmd()
+        inputs = Inputs('3', 'tarek@ziade.org')
+        register_module.input = inputs
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # we should have send a request
+        self.assertEqual(len(self.conn.reqs), 1)
+        req = self.conn.reqs[0]
+        headers = dict(req.headers)
+        self.assertEqual(headers['Content-length'], '290')
+        self.assertIn('tarek', req.data)
+
+    @unittest.skipUnless(DOCUTILS_SUPPORT, 'needs docutils')
+    def test_strict(self):
+        # testing the script option
+        # when on, the register command stops if
+        # the metadata is incomplete or if
+        # long_description is not reSt compliant
+
+        # empty metadata
+        cmd = self._get_cmd({'name': 'xxx', 'version': 'xxx'})
+        cmd.ensure_finalized()
+        cmd.strict = True
+        inputs = Inputs('1', 'tarek', 'y')
+        register_module.input = inputs
+        self.assertRaises(PackagingSetupError, cmd.run)
+
+        # metadata is OK but long_description is broken
+        metadata = {'home_page': 'xxx', 'author': 'xxx',
+                    'author_email': 'éxéxé',
+                    'name': 'xxx', 'version': 'xxx',
+                    'description': 'title\n==\n\ntext'}
+
+        cmd = self._get_cmd(metadata)
+        cmd.ensure_finalized()
+        cmd.strict = True
+
+        self.assertRaises(PackagingSetupError, cmd.run)
+
+        # now something that works
+        metadata['description'] = 'title\n=====\n\ntext'
+        cmd = self._get_cmd(metadata)
+        cmd.ensure_finalized()
+        cmd.strict = True
+        inputs = Inputs('1', 'tarek', 'y')
+        register_module.input = inputs
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # strict is not by default
+        cmd = self._get_cmd()
+        cmd.ensure_finalized()
+        inputs = Inputs('1', 'tarek', 'y')
+        register_module.input = inputs
+        cmd.ensure_finalized()
+        cmd.run()
+
+    def test_register_pep345(self):
+        cmd = self._get_cmd({})
+        cmd.ensure_finalized()
+        cmd.distribution.metadata['Requires-Dist'] = ['lxml']
+        data = cmd.build_post_data('submit')
+        self.assertEqual(data['metadata_version'], '1.2')
+        self.assertEqual(data['requires_dist'], ['lxml'])
+
+
+def test_suite():
+    return unittest.makeSuite(RegisterTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_sdist.py b/Lib/packaging/tests/test_command_sdist.py
new file mode 100644
index 0000000..956e258
--- /dev/null
+++ b/Lib/packaging/tests/test_command_sdist.py
@@ -0,0 +1,407 @@
+"""Tests for packaging.command.sdist."""
+import os
+import zipfile
+import tarfile
+import logging
+
+# zlib is not used here, but if it's not available
+# the tests that use zipfile may fail
+try:
+    import zlib
+except ImportError:
+    zlib = None
+
+try:
+    import grp
+    import pwd
+    UID_GID_SUPPORT = True
+except ImportError:
+    UID_GID_SUPPORT = False
+
+from os.path import join
+from packaging.tests import captured_stdout
+from packaging.command.sdist import sdist
+from packaging.command.sdist import show_formats
+from packaging.dist import Distribution
+from packaging.tests import unittest
+from packaging.errors import PackagingOptionError
+from packaging.util import find_executable
+from packaging.tests import support
+from shutil import get_archive_formats
+
+SETUP_PY = """
+from packaging.core import setup
+import somecode
+
+setup(name='fake')
+"""
+
+MANIFEST = """\
+# file GENERATED by packaging, do NOT edit
+README
+inroot.txt
+data%(sep)sdata.dt
+scripts%(sep)sscript.py
+some%(sep)sfile.txt
+some%(sep)sother_file.txt
+somecode%(sep)s__init__.py
+somecode%(sep)sdoc.dat
+somecode%(sep)sdoc.txt
+"""
+
+
+def builder(dist, filelist):
+    filelist.append('bah')
+
+
+class SDistTestCase(support.TempdirManager,
+                    support.LoggingCatcher,
+                    support.EnvironRestorer,
+                    unittest.TestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        # PyPIRCCommandTestCase creates a temp dir already
+        # and put it in self.tmp_dir
+        super(SDistTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        os.environ['HOME'] = self.tmp_dir
+        # setting up an environment
+        self.old_path = os.getcwd()
+        os.mkdir(join(self.tmp_dir, 'somecode'))
+        os.mkdir(join(self.tmp_dir, 'dist'))
+        # a package, and a README
+        self.write_file((self.tmp_dir, 'README'), 'xxx')
+        self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#')
+        self.write_file((self.tmp_dir, 'setup.py'), SETUP_PY)
+        os.chdir(self.tmp_dir)
+
+    def tearDown(self):
+        # back to normal
+        os.chdir(self.old_path)
+        super(SDistTestCase, self).tearDown()
+
+    def get_cmd(self, metadata=None):
+        """Returns a cmd"""
+        if metadata is None:
+            metadata = {'name': 'fake', 'version': '1.0',
+                        'url': 'xxx', 'author': 'xxx',
+                        'author_email': 'xxx'}
+        dist = Distribution(metadata)
+        dist.script_name = 'setup.py'
+        dist.packages = ['somecode']
+        dist.include_package_data = True
+        cmd = sdist(dist)
+        cmd.dist_dir = 'dist'
+        return dist, cmd
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    def test_prune_file_list(self):
+        # this test creates a package with some vcs dirs in it
+        # and launch sdist to make sure they get pruned
+        # on all systems
+
+        # creating VCS directories with some files in them
+        os.mkdir(join(self.tmp_dir, 'somecode', '.svn'))
+
+        self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx')
+
+        os.mkdir(join(self.tmp_dir, 'somecode', '.hg'))
+        self.write_file((self.tmp_dir, 'somecode', '.hg',
+                         'ok'), 'xxx')
+
+        os.mkdir(join(self.tmp_dir, 'somecode', '.git'))
+        self.write_file((self.tmp_dir, 'somecode', '.git',
+                         'ok'), 'xxx')
+
+        # now building a sdist
+        dist, cmd = self.get_cmd()
+
+        # zip is available universally
+        # (tar might not be installed under win32)
+        cmd.formats = ['zip']
+
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # now let's check what we have
+        dist_folder = join(self.tmp_dir, 'dist')
+        files = os.listdir(dist_folder)
+        self.assertEqual(files, ['fake-1.0.zip'])
+
+        with zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) as zip_file:
+            content = zip_file.namelist()
+
+        # making sure everything has been pruned correctly
+        self.assertEqual(len(content), 3)
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    @unittest.skipIf(find_executable('tar') is None or
+                     find_executable('gzip') is None,
+                     'requires tar and gzip programs')
+    def test_make_distribution(self):
+        # building a sdist
+        dist, cmd = self.get_cmd()
+
+        # creating a gztar then a tar
+        cmd.formats = ['gztar', 'tar']
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # making sure we have two files
+        dist_folder = join(self.tmp_dir, 'dist')
+        result = sorted(os.listdir(dist_folder))
+        self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
+
+        os.remove(join(dist_folder, 'fake-1.0.tar'))
+        os.remove(join(dist_folder, 'fake-1.0.tar.gz'))
+
+        # now trying a tar then a gztar
+        cmd.formats = ['tar', 'gztar']
+
+        cmd.ensure_finalized()
+        cmd.run()
+
+        result = sorted(os.listdir(dist_folder))
+        self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    def test_add_defaults(self):
+
+        # http://bugs.python.org/issue2279
+
+        # add_default should also include
+        # data_files and package_data
+        dist, cmd = self.get_cmd()
+
+        # filling data_files by pointing files
+        # in package_data
+        dist.package_data = {'': ['*.cfg', '*.dat'],
+                             'somecode': ['*.txt']}
+        self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+        self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#')
+
+        # adding some data in data_files
+        data_dir = join(self.tmp_dir, 'data')
+        os.mkdir(data_dir)
+        self.write_file((data_dir, 'data.dt'), '#')
+        some_dir = join(self.tmp_dir, 'some')
+        os.mkdir(some_dir)
+        self.write_file((self.tmp_dir, 'inroot.txt'), '#')
+        self.write_file((some_dir, 'file.txt'), '#')
+        self.write_file((some_dir, 'other_file.txt'), '#')
+
+        dist.data_files = {'data/data.dt': '{appdata}/data.dt',
+                           'inroot.txt': '{appdata}/inroot.txt',
+                           'some/file.txt': '{appdata}/file.txt',
+                           'some/other_file.txt': '{appdata}/other_file.txt'}
+
+        # adding a script
+        script_dir = join(self.tmp_dir, 'scripts')
+        os.mkdir(script_dir)
+        self.write_file((script_dir, 'script.py'), '#')
+        dist.scripts = [join('scripts', 'script.py')]
+
+        cmd.formats = ['zip']
+        cmd.use_defaults = True
+
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # now let's check what we have
+        dist_folder = join(self.tmp_dir, 'dist')
+        files = os.listdir(dist_folder)
+        self.assertEqual(files, ['fake-1.0.zip'])
+
+        with zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) as zip_file:
+            content = zip_file.namelist()
+
+        # Making sure everything was added. This includes 9 code and data
+        # files in addition to PKG-INFO.
+        self.assertEqual(len(content), 10)
+
+        # Checking the MANIFEST
+        with open(join(self.tmp_dir, 'MANIFEST')) as fp:
+            manifest = fp.read()
+        self.assertEqual(manifest, MANIFEST % {'sep': os.sep})
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    def test_metadata_check_option(self):
+        # testing the `check-metadata` option
+        dist, cmd = self.get_cmd(metadata={'name': 'xxx', 'version': 'xxx'})
+
+        # this should raise some warnings
+        # with the check subcommand
+        cmd.ensure_finalized()
+        cmd.run()
+        warnings = self.get_logs(logging.WARN)
+        self.assertEqual(len(warnings), 3)
+
+        # trying with a complete set of metadata
+        self.loghandler.flush()
+        dist, cmd = self.get_cmd()
+        cmd.ensure_finalized()
+        cmd.metadata_check = False
+        cmd.run()
+        warnings = self.get_logs(logging.WARN)
+        # removing manifest generated warnings
+        warnings = [warn for warn in warnings if
+                    not warn.endswith('-- skipping')]
+        # the remaining warning is about the use of the default file list
+        self.assertEqual(len(warnings), 1)
+
+    def test_show_formats(self):
+        __, stdout = captured_stdout(show_formats)
+
+        # the output should be a header line + one line per format
+        num_formats = len(get_archive_formats())
+        output = [line for line in stdout.split('\n')
+                  if line.strip().startswith('--formats=')]
+        self.assertEqual(len(output), num_formats)
+
+    def test_finalize_options(self):
+
+        dist, cmd = self.get_cmd()
+        cmd.finalize_options()
+
+        # default options set by finalize
+        self.assertEqual(cmd.manifest, 'MANIFEST')
+        self.assertEqual(cmd.dist_dir, 'dist')
+
+        # formats has to be a string splitable on (' ', ',') or
+        # a stringlist
+        cmd.formats = 1
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+        cmd.formats = ['zip']
+        cmd.finalize_options()
+
+        # formats has to be known
+        cmd.formats = 'supazipa'
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    @unittest.skipUnless(UID_GID_SUPPORT, "requires grp and pwd support")
+    @unittest.skipIf(find_executable('tar') is None or
+                     find_executable('gzip') is None,
+                     'requires tar and gzip programs')
+    def test_make_distribution_owner_group(self):
+        # building a sdist
+        dist, cmd = self.get_cmd()
+
+        # creating a gztar and specifying the owner+group
+        cmd.formats = ['gztar']
+        cmd.owner = pwd.getpwuid(0)[0]
+        cmd.group = grp.getgrgid(0)[0]
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # making sure we have the good rights
+        archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+        with tarfile.open(archive_name) as archive:
+            for member in archive.getmembers():
+                self.assertEqual(member.uid, 0)
+                self.assertEqual(member.gid, 0)
+
+        # building a sdist again
+        dist, cmd = self.get_cmd()
+
+        # creating a gztar
+        cmd.formats = ['gztar']
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # making sure we have the good rights
+        archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+        with tarfile.open(archive_name) as archive:
+
+            # note that we are not testing the group ownership here
+            # because, depending on the platforms and the container
+            # rights (see #7408)
+            for member in archive.getmembers():
+                self.assertEqual(member.uid, os.getuid())
+
+    def test_get_file_list(self):
+        # make sure MANIFEST is recalculated
+        dist, cmd = self.get_cmd()
+        # filling data_files by pointing files in package_data
+        dist.package_data = {'somecode': ['*.txt']}
+        self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+        cmd.ensure_finalized()
+        cmd.run()
+
+        # Should produce four lines. Those lines are one comment, one default
+        # (README) and two package files.
+        with open(cmd.manifest) as f:
+            manifest = [line.strip() for line in f.read().split('\n')
+                        if line.strip() != '']
+        self.assertEqual(len(manifest), 4)
+
+        # Adding a file
+        self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
+
+        # make sure build_py is reinitialized, like a fresh run
+        build_py = dist.get_command_obj('build_py')
+        build_py.finalized = False
+        build_py.ensure_finalized()
+
+        cmd.run()
+
+        with open(cmd.manifest) as f:
+            manifest2 = [line.strip() for line in f.read().split('\n')
+                         if line.strip() != '']
+
+        # Do we have the new file in MANIFEST?
+        self.assertEqual(len(manifest2), 5)
+        self.assertIn('doc2.txt', manifest2[-1])
+
+    def test_manifest_marker(self):
+        # check that autogenerated MANIFESTs have a marker
+        dist, cmd = self.get_cmd()
+        cmd.ensure_finalized()
+        cmd.run()
+
+        with open(cmd.manifest) as f:
+            manifest = [line.strip() for line in f.read().split('\n')
+                        if line.strip() != '']
+
+        self.assertEqual(manifest[0],
+                         '# file GENERATED by packaging, do NOT edit')
+
+    def test_manual_manifest(self):
+        # check that a MANIFEST without a marker is left alone
+        dist, cmd = self.get_cmd()
+        cmd.ensure_finalized()
+        self.write_file((self.tmp_dir, cmd.manifest), 'README.manual')
+        cmd.run()
+
+        with open(cmd.manifest) as f:
+            manifest = [line.strip() for line in f.read().split('\n')
+                        if line.strip() != '']
+
+        self.assertEqual(manifest, ['README.manual'])
+
+    def test_template(self):
+        dist, cmd = self.get_cmd()
+        dist.extra_files = ['include yeah']
+        cmd.ensure_finalized()
+        self.write_file((self.tmp_dir, 'yeah'), 'xxx')
+        cmd.run()
+        with open(cmd.manifest) as f:
+            content = f.read()
+
+        self.assertIn('yeah', content)
+
+    def test_manifest_builder(self):
+        dist, cmd = self.get_cmd()
+        cmd.manifest_builders = 'packaging.tests.test_command_sdist.builder'
+        cmd.ensure_finalized()
+        cmd.run()
+        self.assertIn('bah', cmd.filelist.files)
+
+
+def test_suite():
+    return unittest.makeSuite(SDistTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_test.py b/Lib/packaging/tests/test_command_test.py
new file mode 100644
index 0000000..4fd8452
--- /dev/null
+++ b/Lib/packaging/tests/test_command_test.py
@@ -0,0 +1,225 @@
+import os
+import re
+import sys
+import shutil
+import logging
+import unittest as ut1
+import packaging.database
+
+from os.path import join
+from operator import getitem, setitem, delitem
+from packaging.command.build import build
+from packaging.tests import unittest
+from packaging.tests.support import (TempdirManager, EnvironRestorer,
+                                     LoggingCatcher)
+from packaging.command.test import test
+from packaging.command import set_command
+from packaging.dist import Distribution
+
+
+EXPECTED_OUTPUT_RE = r'''FAIL: test_blah \(myowntestmodule.SomeTest\)
+----------------------------------------------------------------------
+Traceback \(most recent call last\):
+  File ".+/myowntestmodule.py", line \d+, in test_blah
+    self.fail\("horribly"\)
+AssertionError: horribly
+'''
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+class MockBuildCmd(build):
+    build_lib = "mock build lib"
+    command_name = 'build'
+    plat_name = 'whatever'
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        self._record.append("build has run")
+
+
+class TestTest(TempdirManager,
+               EnvironRestorer,
+               LoggingCatcher,
+               unittest.TestCase):
+
+    restore_environ = ['PYTHONPATH']
+
+    def setUp(self):
+        super(TestTest, self).setUp()
+        self.addCleanup(packaging.database.clear_cache)
+        new_pythonpath = os.path.dirname(os.path.dirname(here))
+        pythonpath = os.environ.get('PYTHONPATH')
+        if pythonpath is not None:
+            new_pythonpath = os.pathsep.join((new_pythonpath, pythonpath))
+        os.environ['PYTHONPATH'] = new_pythonpath
+
+    def assert_re_match(self, pattern, string):
+        def quote(s):
+            lines = ['## ' + line for line in s.split('\n')]
+            sep = ["#" * 60]
+            return [''] + sep + lines + sep
+        msg = quote(pattern) + ["didn't match"] + quote(string)
+        msg = "\n".join(msg)
+        if not re.search(pattern, string):
+            self.fail(msg)
+
+    def prepare_dist(self, dist_name):
+        pkg_dir = join(os.path.dirname(__file__), "dists", dist_name)
+        temp_pkg_dir = join(self.mkdtemp(), dist_name)
+        shutil.copytree(pkg_dir, temp_pkg_dir)
+        return temp_pkg_dir
+
+    def safely_replace(self, obj, attr,
+                       new_val=None, delete=False, dictionary=False):
+        """Replace a object's attribute returning to its original state at the
+        end of the test run. Creates the attribute if not present before
+        (deleting afterwards). When delete=True, makes sure the value is del'd
+        for the test run. If dictionary is set to True, operates of its items
+        rather than attributes."""
+        if dictionary:
+            _setattr, _getattr, _delattr = setitem, getitem, delitem
+
+            def _hasattr(_dict, value):
+                return value in _dict
+        else:
+            _setattr, _getattr, _delattr, _hasattr = (setattr, getattr,
+                                                      delattr, hasattr)
+
+        orig_has_attr = _hasattr(obj, attr)
+        if orig_has_attr:
+            orig_val = _getattr(obj, attr)
+
+        if delete is False:
+            _setattr(obj, attr, new_val)
+        elif orig_has_attr:
+            _delattr(obj, attr)
+
+        def do_cleanup():
+            if orig_has_attr:
+                _setattr(obj, attr, orig_val)
+            elif _hasattr(obj, attr):
+                _delattr(obj, attr)
+
+        self.addCleanup(do_cleanup)
+
+    def test_runs_unittest(self):
+        module_name, a_module = self.prepare_a_module()
+        record = []
+        a_module.recorder = lambda *args: record.append("suite")
+
+        class MockTextTestRunner:
+            def __init__(*_, **__):
+                pass
+
+            def run(_self, suite):
+                record.append("run")
+
+        self.safely_replace(ut1, "TextTestRunner", MockTextTestRunner)
+
+        dist = Distribution()
+        cmd = test(dist)
+        cmd.suite = "%s.recorder" % module_name
+        cmd.run()
+        self.assertEqual(record, ["suite", "run"])
+
+    def test_builds_before_running_tests(self):
+        self.addCleanup(set_command, 'packaging.command.build.build')
+        set_command('packaging.tests.test_command_test.MockBuildCmd')
+
+        dist = Distribution()
+        dist.get_command_obj('build')._record = record = []
+        cmd = test(dist)
+        cmd.runner = self.prepare_named_function(lambda: None)
+        cmd.ensure_finalized()
+        cmd.run()
+        self.assertEqual(['build has run'], record)
+
+    def _test_works_with_2to3(self):
+        pass
+
+    def test_checks_requires(self):
+        dist = Distribution()
+        cmd = test(dist)
+        phony_project = 'ohno_ohno-impossible_1234-name_stop-that!'
+        cmd.tests_require = [phony_project]
+        cmd.ensure_finalized()
+        logs = self.get_logs(logging.WARNING)
+        self.assertEqual(1, len(logs))
+        self.assertIn(phony_project, logs[0])
+
+    def prepare_a_module(self):
+        tmp_dir = self.mkdtemp()
+        sys.path.append(tmp_dir)
+        self.addCleanup(sys.path.remove, tmp_dir)
+
+        self.write_file((tmp_dir, 'packaging_tests_a.py'), '')
+        import packaging_tests_a as a_module
+        return "packaging_tests_a", a_module
+
+    def prepare_named_function(self, func):
+        module_name, a_module = self.prepare_a_module()
+        a_module.recorder = func
+        return "%s.recorder" % module_name
+
+    def test_custom_runner(self):
+        dist = Distribution()
+        cmd = test(dist)
+        record = []
+        cmd.runner = self.prepare_named_function(
+            lambda: record.append("runner called"))
+        cmd.ensure_finalized()
+        cmd.run()
+        self.assertEqual(["runner called"], record)
+
+    def prepare_mock_ut2(self):
+        class MockUTClass:
+            def __init__(*_, **__):
+                pass
+
+            def discover(self):
+                pass
+
+            def run(self, _):
+                pass
+
+        class MockUTModule:
+            TestLoader = MockUTClass
+            TextTestRunner = MockUTClass
+
+        mock_ut2 = MockUTModule()
+        self.safely_replace(sys.modules, "unittest2",
+                            mock_ut2, dictionary=True)
+        return mock_ut2
+
+    def test_gets_unittest_discovery(self):
+        mock_ut2 = self.prepare_mock_ut2()
+        dist = Distribution()
+        cmd = test(dist)
+        self.safely_replace(ut1.TestLoader, "discover", lambda: None)
+        self.assertEqual(cmd.get_ut_with_discovery(), ut1)
+
+        del ut1.TestLoader.discover
+        self.assertEqual(cmd.get_ut_with_discovery(), mock_ut2)
+
+    def test_calls_discover(self):
+        self.safely_replace(ut1.TestLoader, "discover", delete=True)
+        mock_ut2 = self.prepare_mock_ut2()
+        record = []
+        mock_ut2.TestLoader.discover = lambda self, path: record.append(path)
+        dist = Distribution()
+        cmd = test(dist)
+        cmd.run()
+        self.assertEqual([os.curdir], record)
+
+
+def test_suite():
+    return unittest.makeSuite(TestTest)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_upload.py b/Lib/packaging/tests/test_command_upload.py
new file mode 100644
index 0000000..f2e338b
--- /dev/null
+++ b/Lib/packaging/tests/test_command_upload.py
@@ -0,0 +1,157 @@
+"""Tests for packaging.command.upload."""
+import os
+import sys
+
+from packaging.command.upload import upload
+from packaging.dist import Distribution
+from packaging.errors import PackagingOptionError
+
+from packaging.tests import unittest, support
+from packaging.tests.pypi_server import PyPIServer, PyPIServerTestCase
+
+
+PYPIRC_NOPASSWORD = """\
+[distutils]
+
+index-servers =
+    server1
+
+[server1]
+username:me
+"""
+
+PYPIRC = """\
+[distutils]
+
+index-servers =
+    server1
+    server2
+
+[server1]
+username:me
+password:secret
+
+[server2]
+username:meagain
+password: secret
+realm:acme
+repository:http://another.pypi/
+"""
+
+
+class UploadTestCase(support.TempdirManager, support.EnvironRestorer,
+                     support.LoggingCatcher, PyPIServerTestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(UploadTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.rc = os.path.join(self.tmp_dir, '.pypirc')
+        os.environ['HOME'] = self.tmp_dir
+
+    def test_finalize_options(self):
+        # new format
+        self.write_file(self.rc, PYPIRC)
+        dist = Distribution()
+        cmd = upload(dist)
+        cmd.finalize_options()
+        for attr, expected in (('username', 'me'), ('password', 'secret'),
+                               ('realm', 'pypi'),
+                               ('repository', 'http://pypi.python.org/pypi')):
+            self.assertEqual(getattr(cmd, attr), expected)
+
+    def test_finalize_options_unsigned_identity_raises_exception(self):
+        self.write_file(self.rc, PYPIRC)
+        dist = Distribution()
+        cmd = upload(dist)
+        cmd.identity = True
+        cmd.sign = False
+        self.assertRaises(PackagingOptionError, cmd.finalize_options)
+
+    def test_saved_password(self):
+        # file with no password
+        self.write_file(self.rc, PYPIRC_NOPASSWORD)
+
+        # make sure it passes
+        dist = Distribution()
+        cmd = upload(dist)
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.password, None)
+
+        # make sure we get it as well, if another command
+        # initialized it at the dist level
+        dist.password = 'xxx'
+        cmd = upload(dist)
+        cmd.finalize_options()
+        self.assertEqual(cmd.password, 'xxx')
+
+    def test_upload_without_files_raises_exception(self):
+        dist = Distribution()
+        cmd = upload(dist)
+        self.assertRaises(PackagingOptionError, cmd.run)
+
+    def test_upload(self):
+        path = os.path.join(self.tmp_dir, 'xxx')
+        self.write_file(path)
+        command, pyversion, filename = 'xxx', '3.3', path
+        dist_files = [(command, pyversion, filename)]
+
+        # lets run it
+        pkg_dir, dist = self.create_dist(dist_files=dist_files, author='dédé')
+        cmd = upload(dist)
+        cmd.ensure_finalized()
+        cmd.repository = self.pypi.full_address
+        cmd.run()
+
+        # what did we send ?
+        handler, request_data = self.pypi.requests[-1]
+        headers = handler.headers
+        #self.assertIn('dédé', str(request_data))
+        self.assertIn(b'xxx', request_data)
+
+        self.assertEqual(int(headers['content-length']), len(request_data))
+        self.assertLess(int(headers['content-length']), 2500)
+        self.assertTrue(headers['content-type'].startswith('multipart/form-data'))
+        self.assertEqual(handler.command, 'POST')
+        self.assertNotIn('\n', headers['authorization'])
+
+    def test_upload_docs(self):
+        path = os.path.join(self.tmp_dir, 'xxx')
+        self.write_file(path)
+        command, pyversion, filename = 'xxx', '3.3', path
+        dist_files = [(command, pyversion, filename)]
+        docs_path = os.path.join(self.tmp_dir, "build", "docs")
+        os.makedirs(docs_path)
+        self.write_file(os.path.join(docs_path, "index.html"), "yellow")
+        self.write_file(self.rc, PYPIRC)
+
+        # lets run it
+        pkg_dir, dist = self.create_dist(dist_files=dist_files, author='dédé')
+
+        cmd = upload(dist)
+        cmd.get_finalized_command("build").run()
+        cmd.upload_docs = True
+        cmd.ensure_finalized()
+        cmd.repository = self.pypi.full_address
+        try:
+            prev_dir = os.getcwd()
+            os.chdir(self.tmp_dir)
+            cmd.run()
+        finally:
+            os.chdir(prev_dir)
+
+        handler, request_data = self.pypi.requests[-1]
+        action, name, content = request_data.split(
+            "----------------GHSKFJDLGDS7543FJKLFHRE75642756743254"
+            .encode())[1:4]
+
+        self.assertIn(b'name=":action"', action)
+        self.assertIn(b'doc_upload', action)
+
+
+def test_suite():
+    return unittest.makeSuite(UploadTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_command_upload_docs.py b/Lib/packaging/tests/test_command_upload_docs.py
new file mode 100644
index 0000000..b103894
--- /dev/null
+++ b/Lib/packaging/tests/test_command_upload_docs.py
@@ -0,0 +1,205 @@
+"""Tests for packaging.command.upload_docs."""
+import os
+import sys
+import shutil
+import zipfile
+
+from packaging.command import upload_docs as upload_docs_mod
+from packaging.command.upload_docs import (upload_docs, zip_dir,
+                                           encode_multipart)
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError, PackagingOptionError
+
+from packaging.tests import unittest, support
+from packaging.tests.pypi_server import PyPIServerTestCase
+
+
+EXPECTED_MULTIPART_OUTPUT = [
+    b'---x',
+    b'Content-Disposition: form-data; name="username"',
+    b'',
+    b'wok',
+    b'---x',
+    b'Content-Disposition: form-data; name="password"',
+    b'',
+    b'secret',
+    b'---x',
+    b'Content-Disposition: form-data; name="picture"; filename="wok.png"',
+    b'',
+    b'PNG89',
+    b'---x--',
+    b'',
+]
+
+PYPIRC = """\
+[distutils]
+index-servers = server1
+
+[server1]
+repository = %s
+username = real_slim_shady
+password = long_island
+"""
+
+class UploadDocsTestCase(support.TempdirManager,
+                         support.EnvironRestorer,
+                         support.LoggingCatcher,
+                         PyPIServerTestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(UploadDocsTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.rc = os.path.join(self.tmp_dir, '.pypirc')
+        os.environ['HOME'] = self.tmp_dir
+        self.dist = Distribution()
+        self.dist.metadata['Name'] = "distr-name"
+        self.cmd = upload_docs(self.dist)
+
+    def test_default_uploaddir(self):
+        sandbox = self.mkdtemp()
+        previous = os.getcwd()
+        os.chdir(sandbox)
+        try:
+            os.mkdir("build")
+            self.prepare_sample_dir("build")
+            self.cmd.ensure_finalized()
+            self.assertEqual(self.cmd.upload_dir, os.path.join("build", "docs"))
+        finally:
+            os.chdir(previous)
+
+    def test_default_uploaddir_looks_for_doc_also(self):
+        sandbox = self.mkdtemp()
+        previous = os.getcwd()
+        os.chdir(sandbox)
+        try:
+            os.mkdir("build")
+            self.prepare_sample_dir("build")
+            os.rename(os.path.join("build", "docs"), os.path.join("build", "doc"))
+            self.cmd.ensure_finalized()
+            self.assertEqual(self.cmd.upload_dir, os.path.join("build", "doc"))
+        finally:
+            os.chdir(previous)
+
+    def prepare_sample_dir(self, sample_dir=None):
+        if sample_dir is None:
+            sample_dir = self.mkdtemp()
+        os.mkdir(os.path.join(sample_dir, "docs"))
+        self.write_file(os.path.join(sample_dir, "docs", "index.html"), "Ce mortel ennui")
+        self.write_file(os.path.join(sample_dir, "index.html"), "Oh la la")
+        return sample_dir
+
+    def test_zip_dir(self):
+        source_dir = self.prepare_sample_dir()
+        compressed = zip_dir(source_dir)
+
+        zip_f = zipfile.ZipFile(compressed)
+        self.assertEqual(zip_f.namelist(), ['index.html', 'docs/index.html'])
+
+    def test_encode_multipart(self):
+        fields = [('username', 'wok'), ('password', 'secret')]
+        files = [('picture', 'wok.png', b'PNG89')]
+        content_type, body = encode_multipart(fields, files, b'-x')
+        self.assertEqual(b'multipart/form-data; boundary=-x', content_type)
+        self.assertEqual(EXPECTED_MULTIPART_OUTPUT, body.split(b'\r\n'))
+
+    def prepare_command(self):
+        self.cmd.upload_dir = self.prepare_sample_dir()
+        self.cmd.ensure_finalized()
+        self.cmd.repository = self.pypi.full_address
+        self.cmd.username = "username"
+        self.cmd.password = "password"
+
+    def test_upload(self):
+        self.prepare_command()
+        self.cmd.run()
+
+        self.assertEqual(len(self.pypi.requests), 1)
+        handler, request_data = self.pypi.requests[-1]
+        self.assertIn(b"content", request_data)
+        self.assertIn("Basic", handler.headers['authorization'])
+        self.assertTrue(handler.headers['content-type']
+            .startswith('multipart/form-data;'))
+
+        action, name, version, content =\
+            request_data.split("----------------GHSKFJDLGDS7543FJKLFHRE75642756743254".encode())[1:5]
+
+
+        # check that we picked the right chunks
+        self.assertIn(b'name=":action"', action)
+        self.assertIn(b'name="name"', name)
+        self.assertIn(b'name="version"', version)
+        self.assertIn(b'name="content"', content)
+
+        # check their contents
+        self.assertIn(b'doc_upload', action)
+        self.assertIn(b'distr-name', name)
+        self.assertIn(b'docs/index.html', content)
+        self.assertIn(b'Ce mortel ennui', content)
+
+    def test_https_connection(self):
+        https_called = False
+
+        orig_https = upload_docs_mod.http.client.HTTPConnection
+
+        def https_conn_wrapper(*args):
+            nonlocal https_called
+            https_called = True
+            # the testing server is http
+            return upload_docs_mod.http.client.HTTPConnection(*args)
+
+        upload_docs_mod.http.client.HTTPSConnection = https_conn_wrapper
+        try:
+            self.prepare_command()
+            self.cmd.run()
+            self.assertFalse(https_called)
+
+            self.cmd.repository = self.cmd.repository.replace("http", "https")
+            self.cmd.run()
+            self.assertTrue(https_called)
+        finally:
+            upload_docs_mod.http.client.HTTPConnection = orig_https
+
+    def test_handling_response(self):
+        self.pypi.default_response_status = '403 Forbidden'
+        self.prepare_command()
+        self.cmd.run()
+        self.assertIn('Upload failed (403): Forbidden', self.get_logs()[-1])
+
+        self.pypi.default_response_status = '301 Moved Permanently'
+        self.pypi.default_response_headers.append(("Location", "brand_new_location"))
+        self.cmd.run()
+        self.assertIn('brand_new_location', self.get_logs()[-1])
+
+    def test_reads_pypirc_data(self):
+        self.write_file(self.rc, PYPIRC % self.pypi.full_address)
+        self.cmd.repository = self.pypi.full_address
+        self.cmd.upload_dir = self.prepare_sample_dir()
+        self.cmd.ensure_finalized()
+        self.assertEqual(self.cmd.username, "real_slim_shady")
+        self.assertEqual(self.cmd.password, "long_island")
+
+    def test_checks_index_html_presence(self):
+        self.cmd.upload_dir = self.prepare_sample_dir()
+        os.remove(os.path.join(self.cmd.upload_dir, "index.html"))
+        self.assertRaises(PackagingFileError, self.cmd.ensure_finalized)
+
+    def test_checks_upload_dir(self):
+        self.cmd.upload_dir = self.prepare_sample_dir()
+        shutil.rmtree(os.path.join(self.cmd.upload_dir))
+        self.assertRaises(PackagingOptionError, self.cmd.ensure_finalized)
+
+    def test_show_response(self):
+        self.prepare_command()
+        self.cmd.show_response = True
+        self.cmd.run()
+        record = self.get_logs()[-1]
+        self.assertTrue(record, "should report the response")
+        self.assertIn(self.pypi.default_response_data, record)
+
+def test_suite():
+    return unittest.makeSuite(UploadDocsTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_compiler.py b/Lib/packaging/tests/test_compiler.py
new file mode 100644
index 0000000..2c620cb
--- /dev/null
+++ b/Lib/packaging/tests/test_compiler.py
@@ -0,0 +1,66 @@
+"""Tests for distutils.compiler."""
+import os
+
+from packaging.compiler import (get_default_compiler, customize_compiler,
+                                gen_lib_options)
+from packaging.tests import unittest, support
+
+
+class FakeCompiler:
+
+    name = 'fake'
+    description = 'Fake'
+
+    def library_dir_option(self, dir):
+        return "-L" + dir
+
+    def runtime_library_dir_option(self, dir):
+        return ["-cool", "-R" + dir]
+
+    def find_library_file(self, dirs, lib, debug=False):
+        return 'found'
+
+    def library_option(self, lib):
+        return "-l" + lib
+
+
+class CompilerTestCase(support.EnvironRestorer, unittest.TestCase):
+
+    restore_environ = ['AR', 'ARFLAGS']
+
+    @unittest.skipUnless(get_default_compiler() == 'unix',
+                        'irrelevant if default compiler is not unix')
+    def test_customize_compiler(self):
+
+        os.environ['AR'] = 'my_ar'
+        os.environ['ARFLAGS'] = '-arflags'
+
+        # make sure AR gets caught
+        class compiler:
+            name = 'unix'
+
+            def set_executables(self, **kw):
+                self.exes = kw
+
+        comp = compiler()
+        customize_compiler(comp)
+        self.assertEqual(comp.exes['archiver'], 'my_ar -arflags')
+
+    def test_gen_lib_options(self):
+        compiler = FakeCompiler()
+        libdirs = ['lib1', 'lib2']
+        runlibdirs = ['runlib1']
+        libs = [os.path.join('dir', 'name'), 'name2']
+
+        opts = gen_lib_options(compiler, libdirs, runlibdirs, libs)
+        wanted = ['-Llib1', '-Llib2', '-cool', '-Rrunlib1', 'found',
+                  '-lname2']
+        self.assertEqual(opts, wanted)
+
+
+def test_suite():
+    return unittest.makeSuite(CompilerTestCase)
+
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_config.py b/Lib/packaging/tests/test_config.py
new file mode 100644
index 0000000..8908c4f
--- /dev/null
+++ b/Lib/packaging/tests/test_config.py
@@ -0,0 +1,424 @@
+"""Tests for packaging.config."""
+import os
+import sys
+import logging
+from io import StringIO
+
+from packaging import command
+from packaging.dist import Distribution
+from packaging.errors import PackagingFileError
+from packaging.compiler import new_compiler, _COMPILERS
+from packaging.command.sdist import sdist
+
+from packaging.tests import unittest, support
+
+
+SETUP_CFG = """
+[metadata]
+name = RestingParrot
+version = 0.6.4
+author = Carl Meyer
+author_email = carl@oddbird.net
+maintainer = Éric Araujo
+maintainer_email = merwok@netwok.org
+summary = A sample project demonstrating packaging
+description-file = %(description-file)s
+keywords = packaging, sample project
+
+classifier =
+  Development Status :: 4 - Beta
+  Environment :: Console (Text Based)
+  Environment :: X11 Applications :: GTK; python_version < '3'
+  License :: OSI Approved :: MIT License
+  Programming Language :: Python
+  Programming Language :: Python :: 2
+  Programming Language :: Python :: 3
+
+requires_python = >=2.4, <3.2
+
+requires_dist =
+  PetShoppe
+  MichaelPalin (> 1.1)
+  pywin32; sys.platform == 'win32'
+  pysqlite2; python_version < '2.5'
+  inotify (0.0.1); sys.platform == 'linux2'
+
+requires_external = libxml2
+
+provides_dist = packaging-sample-project (0.2)
+                unittest2-sample-project
+
+project_url =
+  Main repository, http://bitbucket.org/carljm/sample-distutils2-project
+  Fork in progress, http://bitbucket.org/Merwok/sample-distutils2-project
+
+[files]
+packages_root = src
+
+packages = one
+           two
+           three
+
+modules = haven
+
+scripts =
+  script1.py
+  scripts/find-coconuts
+  bin/taunt
+
+package_data =
+  cheese = data/templates/*
+
+extra_files = %(extra-files)s
+
+# Replaces MANIFEST.in
+sdist_extra =
+  include THANKS HACKING
+  recursive-include examples *.txt *.py
+  prune examples/sample?/build
+
+resources=
+  bm/ {b1,b2}.gif = {icon}
+  Cf*/ *.CFG = {config}/baBar/
+  init_script = {script}/JunGle/
+
+[global]
+commands =
+    packaging.tests.test_config.FooBarBazTest
+
+compilers =
+    packaging.tests.test_config.DCompiler
+
+setup_hook = %(setup-hook)s
+
+
+
+[install_dist]
+sub_commands = foo
+"""
+
+# Can not be merged with SETUP_CFG else install_dist
+# command will fail when trying to compile C sources
+EXT_SETUP_CFG = """
+[files]
+packages = one
+           two
+
+[extension=speed_coconuts]
+name = one.speed_coconuts
+sources = c_src/speed_coconuts.c
+extra_link_args = "`gcc -print-file-name=libgcc.a`" -shared
+define_macros = HAVE_CAIRO HAVE_GTK2
+libraries = gecodeint gecodekernel -- sys.platform != 'win32'
+    GecodeInt GecodeKernel -- sys.platform == 'win32'
+
+[extension=fast_taunt]
+name = three.fast_taunt
+sources = cxx_src/utils_taunt.cxx
+          cxx_src/python_module.cxx
+include_dirs = /usr/include/gecode
+    /usr/include/blitz
+extra_compile_args = -fPIC -O2
+    -DGECODE_VERSION=$(./gecode_version) -- sys.platform != 'win32'
+    /DGECODE_VERSION='win32' -- sys.platform == 'win32'
+language = cxx
+
+"""
+
+
+class DCompiler:
+    name = 'd'
+    description = 'D Compiler'
+
+    def __init__(self, *args):
+        pass
+
+
+def hook(content):
+    content['metadata']['version'] += '.dev1'
+
+
+class FooBarBazTest:
+
+    def __init__(self, dist):
+        self.distribution = dist
+
+    @classmethod
+    def get_command_name(cls):
+        return 'foo'
+
+    def run(self):
+        self.distribution.foo_was_here = True
+
+    def nothing(self):
+        pass
+
+    def get_source_files(self):
+        return []
+
+    ensure_finalized = finalize_options = initialize_options = nothing
+
+
+class ConfigTestCase(support.TempdirManager,
+                     support.EnvironRestorer,
+                     support.LoggingCatcher,
+                     unittest.TestCase):
+
+    restore_environ = ['PLAT']
+
+    def setUp(self):
+        super(ConfigTestCase, self).setUp()
+        self.addCleanup(setattr, sys, 'stdout', sys.stdout)
+        self.addCleanup(setattr, sys, 'stderr', sys.stderr)
+        sys.stdout = StringIO()
+        sys.stderr = StringIO()
+
+        self.addCleanup(os.chdir, os.getcwd())
+        tempdir = self.mkdtemp()
+        os.chdir(tempdir)
+        self.tempdir = tempdir
+
+    def write_setup(self, kwargs=None):
+        opts = {'description-file': 'README', 'extra-files': '',
+                'setup-hook': 'packaging.tests.test_config.hook'}
+        if kwargs:
+            opts.update(kwargs)
+        self.write_file('setup.cfg', SETUP_CFG % opts)
+
+    def get_dist(self):
+        dist = Distribution()
+        dist.parse_config_files()
+        return dist
+
+    def test_config(self):
+        self.write_setup()
+        self.write_file('README', 'yeah')
+        os.mkdir('bm')
+        self.write_file(('bm', 'b1.gif'), '')
+        self.write_file(('bm', 'b2.gif'), '')
+        os.mkdir('Cfg')
+        self.write_file(('Cfg', 'data.CFG'), '')
+        self.write_file('init_script', '')
+
+        # try to load the metadata now
+        dist = self.get_dist()
+
+        # check what was done
+        self.assertEqual(dist.metadata['Author'], 'Carl Meyer')
+        self.assertEqual(dist.metadata['Author-Email'], 'carl@oddbird.net')
+
+        # the hook adds .dev1
+        self.assertEqual(dist.metadata['Version'], '0.6.4.dev1')
+
+        wanted = [
+            'Development Status :: 4 - Beta',
+            'Environment :: Console (Text Based)',
+            "Environment :: X11 Applications :: GTK; python_version < '3'",
+            'License :: OSI Approved :: MIT License',
+            'Programming Language :: Python',
+            'Programming Language :: Python :: 2',
+            'Programming Language :: Python :: 3']
+        self.assertEqual(dist.metadata['Classifier'], wanted)
+
+        wanted = ['packaging', 'sample project']
+        self.assertEqual(dist.metadata['Keywords'], wanted)
+
+        self.assertEqual(dist.metadata['Requires-Python'], '>=2.4, <3.2')
+
+        wanted = ['PetShoppe',
+                  'MichaelPalin (> 1.1)',
+                  "pywin32; sys.platform == 'win32'",
+                  "pysqlite2; python_version < '2.5'",
+                  "inotify (0.0.1); sys.platform == 'linux2'"]
+
+        self.assertEqual(dist.metadata['Requires-Dist'], wanted)
+        urls = [('Main repository',
+                 'http://bitbucket.org/carljm/sample-distutils2-project'),
+                ('Fork in progress',
+                 'http://bitbucket.org/Merwok/sample-distutils2-project')]
+        self.assertEqual(dist.metadata['Project-Url'], urls)
+
+        self.assertEqual(dist.packages, ['one', 'two', 'three'])
+        self.assertEqual(dist.py_modules, ['haven'])
+        self.assertEqual(dist.package_data, {'cheese': 'data/templates/*'})
+        self.assertEqual(
+            {'bm/b1.gif': '{icon}/b1.gif',
+             'bm/b2.gif': '{icon}/b2.gif',
+             'Cfg/data.CFG': '{config}/baBar/data.CFG',
+             'init_script': '{script}/JunGle/init_script'},
+             dist.data_files)
+
+        self.assertEqual(dist.package_dir, 'src')
+
+        # Make sure we get the foo command loaded.  We use a string comparison
+        # instead of assertIsInstance because the class is not the same when
+        # this test is run directly: foo is packaging.tests.test_config.Foo
+        # because get_command_class uses the full name, but a bare "Foo" in
+        # this file would be __main__.Foo when run as "python test_config.py".
+        # The name FooBarBazTest should be unique enough to prevent
+        # collisions.
+        self.assertEqual('FooBarBazTest',
+                         dist.get_command_obj('foo').__class__.__name__)
+
+        # did the README got loaded ?
+        self.assertEqual(dist.metadata['description'], 'yeah')
+
+        # do we have the D Compiler enabled ?
+        self.assertIn('d', _COMPILERS)
+        d = new_compiler(compiler='d')
+        self.assertEqual(d.description, 'D Compiler')
+
+    def test_multiple_description_file(self):
+        self.write_setup({'description-file': 'README  CHANGES'})
+        self.write_file('README', 'yeah')
+        self.write_file('CHANGES', 'changelog2')
+        dist = self.get_dist()
+        self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+    def test_multiline_description_file(self):
+        self.write_setup({'description-file': 'README\n  CHANGES'})
+        self.write_file('README', 'yeah')
+        self.write_file('CHANGES', 'changelog')
+        dist = self.get_dist()
+        self.assertEqual(dist.metadata['description'], 'yeah\nchangelog')
+        self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+    def test_parse_extensions_in_config(self):
+        self.write_file('setup.cfg', EXT_SETUP_CFG)
+        dist = self.get_dist()
+
+        ext_modules = dict((mod.name, mod) for mod in dist.ext_modules)
+        self.assertEqual(len(ext_modules), 2)
+        ext = ext_modules.get('one.speed_coconuts')
+        self.assertEqual(ext.sources, ['c_src/speed_coconuts.c'])
+        self.assertEqual(ext.define_macros, ['HAVE_CAIRO', 'HAVE_GTK2'])
+        libs = ['gecodeint', 'gecodekernel']
+        if sys.platform == 'win32':
+            libs = ['GecodeInt', 'GecodeKernel']
+        self.assertEqual(ext.libraries, libs)
+        self.assertEqual(ext.extra_link_args,
+            ['`gcc -print-file-name=libgcc.a`', '-shared'])
+
+        ext = ext_modules.get('three.fast_taunt')
+        self.assertEqual(ext.sources,
+            ['cxx_src/utils_taunt.cxx', 'cxx_src/python_module.cxx'])
+        self.assertEqual(ext.include_dirs,
+            ['/usr/include/gecode', '/usr/include/blitz'])
+        cargs = ['-fPIC', '-O2']
+        if sys.platform == 'win32':
+            cargs.append("/DGECODE_VERSION='win32'")
+        else:
+            cargs.append('-DGECODE_VERSION=$(./gecode_version)')
+        self.assertEqual(ext.extra_compile_args, cargs)
+        self.assertEqual(ext.language, 'cxx')
+
+    def test_missing_setuphook_warns(self):
+        self.write_setup({'setup-hook': 'this.does._not.exist'})
+        self.write_file('README', 'yeah')
+        dist = self.get_dist()
+        logs = self.get_logs(logging.WARNING)
+        self.assertEqual(1, len(logs))
+        self.assertIn('could not import setup_hook', logs[0])
+
+    def test_metadata_requires_description_files_missing(self):
+        self.write_setup({'description-file': 'README\n  README2'})
+        self.write_file('README', 'yeah')
+        self.write_file('README2', 'yeah')
+        os.mkdir('src')
+        self.write_file(('src', 'haven.py'), '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(('bin', 'taunt'), '#')
+
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file((pkg, '__init__.py'), '#')
+
+        dist = self.get_dist()
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        self.assertRaises(PackagingFileError, cmd.make_distribution)
+
+    def test_metadata_requires_description_files(self):
+        # Create the following file structure:
+        #   README
+        #   README2
+        #   script1.py
+        #   scripts/
+        #       find-coconuts
+        #   bin/
+        #       taunt
+        #   src/
+        #       haven.py
+        #       one/__init__.py
+        #       two/__init__.py
+        #       three/__init__.py
+
+        self.write_setup({'description-file': 'README\n  README2',
+                          'extra-files': '\n  README3'})
+        self.write_file('README', 'yeah 1')
+        self.write_file('README2', 'yeah 2')
+        self.write_file('README3', 'yeah 3')
+        os.mkdir('src')
+        self.write_file(('src', 'haven.py'), '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(('bin', 'taunt'), '#')
+
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file((pkg, '__init__.py'), '#')
+
+        dist = self.get_dist()
+        self.assertIn('yeah 1\nyeah 2', dist.metadata['description'])
+
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        self.assertRaises(PackagingFileError, cmd.make_distribution)
+
+        self.write_setup({'description-file': 'README\n  README2',
+                          'extra-files': '\n  README2\n    README'})
+        dist = self.get_dist()
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        cmd.make_distribution()
+        with open('MANIFEST') as fp:
+            self.assertIn('README\nREADME2\n', fp.read())
+
+    def test_sub_commands(self):
+        self.write_setup()
+        self.write_file('README', 'yeah')
+        os.mkdir('src')
+        self.write_file(('src', 'haven.py'), '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(('bin', 'taunt'), '#')
+
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file((pkg, '__init__.py'), '#')
+
+        # try to run the install command to see if foo is called
+        dist = self.get_dist()
+        self.assertIn('foo', command.get_command_names())
+        self.assertEqual('FooBarBazTest',
+                         dist.get_command_obj('foo').__class__.__name__)
+
+
+def test_suite():
+    return unittest.makeSuite(ConfigTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_create.py b/Lib/packaging/tests/test_create.py
new file mode 100644
index 0000000..99ab063
--- /dev/null
+++ b/Lib/packaging/tests/test_create.py
@@ -0,0 +1,235 @@
+"""Tests for packaging.create."""
+import io
+import os
+import sys
+import sysconfig
+from textwrap import dedent
+from packaging.create import MainProgram, ask_yn, ask, main
+
+from packaging.tests import support, unittest
+
+
+class CreateTestCase(support.TempdirManager,
+                     support.EnvironRestorer,
+                     unittest.TestCase):
+
+    restore_environ = ['PLAT']
+
+    def setUp(self):
+        super(CreateTestCase, self).setUp()
+        self._stdin = sys.stdin  # TODO use Inputs
+        self._stdout = sys.stdout
+        sys.stdin = io.StringIO()
+        sys.stdout = io.StringIO()
+        self._cwd = os.getcwd()
+        self.wdir = self.mkdtemp()
+        os.chdir(self.wdir)
+        # patch sysconfig
+        self._old_get_paths = sysconfig.get_paths
+        sysconfig.get_paths = lambda *args, **kwargs: {
+            'man': sys.prefix + '/share/man',
+            'doc': sys.prefix + '/share/doc/pyxfoil', }
+
+    def tearDown(self):
+        super(CreateTestCase, self).tearDown()
+        sys.stdin = self._stdin
+        sys.stdout = self._stdout
+        os.chdir(self._cwd)
+        sysconfig.get_paths = self._old_get_paths
+
+    def test_ask_yn(self):
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        self.assertEqual('y', ask_yn('is this a test'))
+
+    def test_ask(self):
+        sys.stdin.write('a\n')
+        sys.stdin.write('b\n')
+        sys.stdin.seek(0)
+        self.assertEqual('a', ask('is this a test'))
+        self.assertEqual('b', ask(str(list(range(0, 70))), default='c',
+                                  lengthy=True))
+
+    def test_set_multi(self):
+        mainprogram = MainProgram()
+        sys.stdin.write('aaaaa\n')
+        sys.stdin.seek(0)
+        mainprogram.data['author'] = []
+        mainprogram._set_multi('_set_multi test', 'author')
+        self.assertEqual(['aaaaa'], mainprogram.data['author'])
+
+    def test_find_files(self):
+        # making sure we scan a project dir correctly
+        mainprogram = MainProgram()
+
+        # building the structure
+        tempdir = self.wdir
+        dirs = ['pkg1', 'data', 'pkg2', 'pkg2/sub']
+        files = ['README', 'setup.cfg', 'foo.py',
+                 'pkg1/__init__.py', 'pkg1/bar.py',
+                 'data/data1', 'pkg2/__init__.py',
+                 'pkg2/sub/__init__.py']
+
+        for dir_ in dirs:
+            os.mkdir(os.path.join(tempdir, dir_))
+
+        for file_ in files:
+            path = os.path.join(tempdir, file_)
+            self.write_file(path, 'xxx')
+
+        mainprogram._find_files()
+        mainprogram.data['packages'].sort()
+
+        # do we have what we want?
+        self.assertEqual(mainprogram.data['packages'],
+                         ['pkg1', 'pkg2', 'pkg2.sub'])
+        self.assertEqual(mainprogram.data['modules'], ['foo'])
+        data_fn = os.path.join('data', 'data1')
+        self.assertEqual(set(mainprogram.data['extra_files']),
+                         set(['setup.cfg', 'README', data_fn]))
+
+    def test_convert_setup_py_to_cfg(self):
+        self.write_file((self.wdir, 'setup.py'),
+                        dedent("""
+        # -*- coding: utf-8 -*-
+        from distutils.core import setup
+
+        long_description = '''My super Death-scription
+        barbar is now on the public domain,
+        ho, baby !'''
+
+        setup(name='pyxfoil',
+              version='0.2',
+              description='Python bindings for the Xfoil engine',
+              long_description=long_description,
+              maintainer='André Espaze',
+              maintainer_email='andre.espaze@logilab.fr',
+              url='http://www.python-science.org/project/pyxfoil',
+              license='GPLv2',
+              packages=['pyxfoil', 'babar', 'me'],
+              data_files=[
+                  ('share/doc/pyxfoil', ['README.rst']),
+                  ('share/man', ['pyxfoil.1']),
+                         ],
+              py_modules=['my_lib', 'mymodule'],
+              package_dir={
+                  'babar': '',
+                  'me': 'Martinique/Lamentin',
+                          },
+              package_data={
+                  'babar': ['Pom', 'Flora', 'Alexander'],
+                  'me': ['dady', 'mumy', 'sys', 'bro'],
+                  '':  ['setup.py', 'README'],
+                  'pyxfoil': ['fengine.so'],
+                           },
+              scripts=['my_script', 'bin/run'],
+              )
+        """))
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        main()
+
+        with open(os.path.join(self.wdir, 'setup.cfg')) as fp:
+            lines = set(line.rstrip() for line in fp)
+
+        # FIXME don't use sets
+        self.assertEqual(lines, set(['',
+            '[metadata]',
+            'version = 0.2',
+            'name = pyxfoil',
+            'maintainer = André Espaze',
+            'description = My super Death-scription',
+            '       |barbar is now on the public domain,',
+            '       |ho, baby !',
+            'maintainer_email = andre.espaze@logilab.fr',
+            'home_page = http://www.python-science.org/project/pyxfoil',
+            'download_url = UNKNOWN',
+            'summary = Python bindings for the Xfoil engine',
+            '[files]',
+            'modules = my_lib',
+            '    mymodule',
+            'packages = pyxfoil',
+            '    babar',
+            '    me',
+            'extra_files = Martinique/Lamentin/dady',
+            '    Martinique/Lamentin/mumy',
+            '    Martinique/Lamentin/sys',
+            '    Martinique/Lamentin/bro',
+            '    Pom',
+            '    Flora',
+            '    Alexander',
+            '    setup.py',
+            '    README',
+            '    pyxfoil/fengine.so',
+            'scripts = my_script',
+            '    bin/run',
+            'resources =',
+            '    README.rst = {doc}',
+            '    pyxfoil.1 = {man}',
+        ]))
+
+    def test_convert_setup_py_to_cfg_with_description_in_readme(self):
+        self.write_file((self.wdir, 'setup.py'),
+                        dedent("""
+        # -*- coding: utf-8 -*-
+        from distutils.core import setup
+        fp = open('README.txt')
+        try:
+            long_description = fp.read()
+        finally:
+            fp.close()
+
+        setup(name='pyxfoil',
+              version='0.2',
+              description='Python bindings for the Xfoil engine',
+              long_description=long_description,
+              maintainer='André Espaze',
+              maintainer_email='andre.espaze@logilab.fr',
+              url='http://www.python-science.org/project/pyxfoil',
+              license='GPLv2',
+              packages=['pyxfoil'],
+              package_data={'pyxfoil': ['fengine.so', 'babar.so']},
+              data_files=[
+                ('share/doc/pyxfoil', ['README.rst']),
+                ('share/man', ['pyxfoil.1']),
+              ],
+        )
+        """))
+        self.write_file((self.wdir, 'README.txt'),
+                        dedent('''
+My super Death-scription
+barbar is now in the public domain,
+ho, baby!
+                        '''))
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        # FIXME Out of memory error.
+        main()
+        with open(os.path.join(self.wdir, 'setup.cfg')) as fp:
+            lines = set(line.rstrip() for line in fp)
+
+        self.assertEqual(lines, set(['',
+            '[metadata]',
+            'version = 0.2',
+            'name = pyxfoil',
+            'maintainer = André Espaze',
+            'maintainer_email = andre.espaze@logilab.fr',
+            'home_page = http://www.python-science.org/project/pyxfoil',
+            'download_url = UNKNOWN',
+            'summary = Python bindings for the Xfoil engine',
+            'description-file = README.txt',
+            '[files]',
+            'packages = pyxfoil',
+            'extra_files = pyxfoil/fengine.so',
+            '    pyxfoil/babar.so',
+            'resources =',
+            '    README.rst = {doc}',
+            '    pyxfoil.1 = {man}',
+        ]))
+
+
+def test_suite():
+    return unittest.makeSuite(CreateTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_cygwinccompiler.py b/Lib/packaging/tests/test_cygwinccompiler.py
new file mode 100644
index 0000000..17c43cd
--- /dev/null
+++ b/Lib/packaging/tests/test_cygwinccompiler.py
@@ -0,0 +1,88 @@
+"""Tests for packaging.cygwinccompiler."""
+import os
+import sys
+import sysconfig
+from packaging.compiler.cygwinccompiler import (
+    check_config_h, get_msvcr,
+    CONFIG_H_OK, CONFIG_H_NOTOK, CONFIG_H_UNCERTAIN)
+
+from packaging.tests import unittest, support
+
+
+class CygwinCCompilerTestCase(support.TempdirManager,
+                              unittest.TestCase):
+
+    def setUp(self):
+        super(CygwinCCompilerTestCase, self).setUp()
+        self.version = sys.version
+        self.python_h = os.path.join(self.mkdtemp(), 'python.h')
+        self.old_get_config_h_filename = sysconfig.get_config_h_filename
+        sysconfig.get_config_h_filename = self._get_config_h_filename
+
+    def tearDown(self):
+        sys.version = self.version
+        sysconfig.get_config_h_filename = self.old_get_config_h_filename
+        super(CygwinCCompilerTestCase, self).tearDown()
+
+    def _get_config_h_filename(self):
+        return self.python_h
+
+    def test_check_config_h(self):
+        # check_config_h looks for "GCC" in sys.version first
+        # returns CONFIG_H_OK if found
+        sys.version = ('2.6.1 (r261:67515, Dec  6 2008, 16:42:21) \n[GCC '
+                       '4.0.1 (Apple Computer, Inc. build 5370)]')
+
+        self.assertEqual(check_config_h()[0], CONFIG_H_OK)
+
+        # then it tries to see if it can find "__GNUC__" in pyconfig.h
+        sys.version = 'something without the *CC word'
+
+        # if the file doesn't exist it returns  CONFIG_H_UNCERTAIN
+        self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
+
+        # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
+        self.write_file(self.python_h, 'xxx')
+        self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
+
+        # and CONFIG_H_OK if __GNUC__ is found
+        self.write_file(self.python_h, 'xxx __GNUC__ xxx')
+        self.assertEqual(check_config_h()[0], CONFIG_H_OK)
+
+    def test_get_msvcr(self):
+        # none
+        sys.version = ('2.6.1 (r261:67515, Dec  6 2008, 16:42:21) '
+                       '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
+        self.assertEqual(get_msvcr(), None)
+
+        # MSVC 7.0
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1300 32 bits (Intel)]')
+        self.assertEqual(get_msvcr(), ['msvcr70'])
+
+        # MSVC 7.1
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1310 32 bits (Intel)]')
+        self.assertEqual(get_msvcr(), ['msvcr71'])
+
+        # VS2005 / MSVC 8.0
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1400 32 bits (Intel)]')
+        self.assertEqual(get_msvcr(), ['msvcr80'])
+
+        # VS2008 / MSVC 9.0
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1500 32 bits (Intel)]')
+        self.assertEqual(get_msvcr(), ['msvcr90'])
+
+        # unknown
+        sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+                       '[MSC v.1999 32 bits (Intel)]')
+        self.assertRaises(ValueError, get_msvcr)
+
+
+def test_suite():
+    return unittest.makeSuite(CygwinCCompilerTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_database.py b/Lib/packaging/tests/test_database.py
new file mode 100644
index 0000000..c8d9415
--- /dev/null
+++ b/Lib/packaging/tests/test_database.py
@@ -0,0 +1,506 @@
+import os
+import io
+import csv
+import imp
+import sys
+import shutil
+import zipfile
+import tempfile
+from os.path import relpath  # separate import for backport concerns
+from hashlib import md5
+
+from packaging.errors import PackagingError
+from packaging.metadata import Metadata
+from packaging.tests import unittest, run_unittest, support, TESTFN
+
+from packaging.database import (
+    Distribution, EggInfoDistribution, get_distribution, get_distributions,
+    provides_distribution, obsoletes_distribution, get_file_users,
+    enable_cache, disable_cache, distinfo_dirname, _yield_distributions)
+
+# TODO Add a test for getting a distribution provided by another distribution
+# TODO Add a test for absolute pathed RECORD items (e.g. /etc/myapp/config.ini)
+# TODO Add tests from the former pep376 project (zipped site-packages, etc.)
+
+
+def get_hexdigest(filename):
+    with open(filename, 'rb') as file:
+        checksum = md5(file.read())
+    return checksum.hexdigest()
+
+
+def record_pieces(file):
+    path = relpath(file, sys.prefix)
+    digest = get_hexdigest(file)
+    size = os.path.getsize(file)
+    return [path, digest, size]
+
+
+class CommonDistributionTests:
+    """Mixin used to test the interface common to both Distribution classes.
+
+    Derived classes define cls, sample_dist, dirs and records.  These
+    attributes are used in test methods.  See source code for details.
+    """
+
+    def setUp(self):
+        super(CommonDistributionTests, self).setUp()
+        self.addCleanup(enable_cache)
+        disable_cache()
+        self.fake_dists_path = os.path.abspath(
+            os.path.join(os.path.dirname(__file__), 'fake_dists'))
+
+    def test_instantiation(self):
+        # check that useful attributes are here
+        name, version, distdir = self.sample_dist
+        here = os.path.abspath(os.path.dirname(__file__))
+        dist_path = os.path.join(here, 'fake_dists', distdir)
+
+        dist = self.dist = self.cls(dist_path)
+        self.assertEqual(dist.path, dist_path)
+        self.assertEqual(dist.name, name)
+        self.assertEqual(dist.metadata['Name'], name)
+        self.assertIsInstance(dist.metadata, Metadata)
+        self.assertEqual(dist.version, version)
+        self.assertEqual(dist.metadata['Version'], version)
+
+    def test_repr(self):
+        dist = self.cls(self.dirs[0])
+        # just check that the class name is in the repr
+        self.assertIn(self.cls.__name__, repr(dist))
+
+    def test_comparison(self):
+        # tests for __eq__ and __hash__
+        dist = self.cls(self.dirs[0])
+        dist2 = self.cls(self.dirs[0])
+        dist3 = self.cls(self.dirs[1])
+        self.assertIn(dist, {dist: True})
+        self.assertEqual(dist, dist)
+
+        self.assertIsNot(dist, dist2)
+        self.assertEqual(dist, dist2)
+        self.assertNotEqual(dist, dist3)
+        self.assertNotEqual(dist, ())
+
+    def test_list_installed_files(self):
+        for dir_ in self.dirs:
+            dist = self.cls(dir_)
+            for path, md5_, size in dist.list_installed_files():
+                record_data = self.records[dist.path]
+                self.assertIn(path, record_data)
+                self.assertEqual(md5_, record_data[path][0])
+                self.assertEqual(size, record_data[path][1])
+
+
+class TestDistribution(CommonDistributionTests, unittest.TestCase):
+
+    cls = Distribution
+    sample_dist = 'choxie', '2.0.0.9', 'choxie-2.0.0.9.dist-info'
+
+    def setUp(self):
+        super(TestDistribution, self).setUp()
+        self.dirs = [os.path.join(self.fake_dists_path, f)
+                     for f in os.listdir(self.fake_dists_path)
+                     if f.endswith('.dist-info')]
+
+        self.records = {}
+        for distinfo_dir in self.dirs:
+            record_file = os.path.join(distinfo_dir, 'RECORD')
+            with open(record_file, 'w') as file:
+                record_writer = csv.writer(
+                    file, delimiter=',', quoting=csv.QUOTE_NONE)
+
+                dist_location = distinfo_dir.replace('.dist-info', '')
+
+                for path, dirs, files in os.walk(dist_location):
+                    for f in files:
+                        record_writer.writerow(record_pieces(
+                                               os.path.join(path, f)))
+                for file in ('INSTALLER', 'METADATA', 'REQUESTED'):
+                    record_writer.writerow(record_pieces(
+                                           os.path.join(distinfo_dir, file)))
+                record_writer.writerow([relpath(record_file, sys.prefix)])
+
+            with open(record_file) as file:
+                record_reader = csv.reader(file)
+                record_data = {}
+                for row in record_reader:
+                    path, md5_, size = (row[:] +
+                                        [None for i in range(len(row), 3)])
+                    record_data[path] = md5_, size
+            self.records[distinfo_dir] = record_data
+
+    def tearDown(self):
+        for distinfo_dir in self.dirs:
+            record_file = os.path.join(distinfo_dir, 'RECORD')
+            open(record_file, 'w').close()
+        super(TestDistribution, self).tearDown()
+
+    def test_instantiation(self):
+        super(TestDistribution, self).test_instantiation()
+        self.assertIsInstance(self.dist.requested, bool)
+
+    def test_uses(self):
+        # Test to determine if a distribution uses a specified file.
+        # Criteria to test against
+        distinfo_name = 'grammar-1.0a4'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        true_path = [self.fake_dists_path, distinfo_name,
+                     'grammar', 'utils.py']
+        true_path = relpath(os.path.join(*true_path), sys.prefix)
+        false_path = [self.fake_dists_path, 'towel_stuff-0.1', 'towel_stuff',
+                      '__init__.py']
+        false_path = relpath(os.path.join(*false_path), sys.prefix)
+
+        # Test if the distribution uses the file in question
+        dist = Distribution(distinfo_dir)
+        self.assertTrue(dist.uses(true_path))
+        self.assertFalse(dist.uses(false_path))
+
+    def test_get_distinfo_file(self):
+        # Test the retrieval of dist-info file objects.
+        distinfo_name = 'choxie-2.0.0.9'
+        other_distinfo_name = 'grammar-1.0a4'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        dist = Distribution(distinfo_dir)
+        # Test for known good file matches
+        distinfo_files = [
+            # Relative paths
+            'INSTALLER', 'METADATA',
+            # Absolute paths
+            os.path.join(distinfo_dir, 'RECORD'),
+            os.path.join(distinfo_dir, 'REQUESTED'),
+        ]
+
+        for distfile in distinfo_files:
+            with dist.get_distinfo_file(distfile) as value:
+                self.assertIsInstance(value, io.TextIOWrapper)
+                # Is it the correct file?
+                self.assertEqual(value.name,
+                                 os.path.join(distinfo_dir, distfile))
+
+        # Test an absolute path that is part of another distributions dist-info
+        other_distinfo_file = os.path.join(
+            self.fake_dists_path, other_distinfo_name + '.dist-info',
+            'REQUESTED')
+        self.assertRaises(PackagingError, dist.get_distinfo_file,
+                          other_distinfo_file)
+        # Test for a file that should not exist
+        self.assertRaises(PackagingError, dist.get_distinfo_file,
+                          'MAGICFILE')
+
+    def test_list_distinfo_files(self):
+        # Test for the iteration of RECORD path entries.
+        distinfo_name = 'towel_stuff-0.1'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        dist = Distribution(distinfo_dir)
+        # Test for the iteration of the raw path
+        distinfo_record_paths = self.records[distinfo_dir].keys()
+        found = dist.list_distinfo_files()
+        self.assertEqual(sorted(found), sorted(distinfo_record_paths))
+        # Test for the iteration of local absolute paths
+        distinfo_record_paths = [os.path.join(sys.prefix, path)
+            for path in self.records[distinfo_dir]]
+        found = dist.list_distinfo_files(local=True)
+        self.assertEqual(sorted(found), sorted(distinfo_record_paths))
+
+    def test_get_resources_path(self):
+        distinfo_name = 'babar-0.1'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        dist = Distribution(distinfo_dir)
+        resource_path = dist.get_resource_path('babar.png')
+        self.assertEqual(resource_path, 'babar.png')
+        self.assertRaises(KeyError, dist.get_resource_path, 'notexist')
+
+
+class TestEggInfoDistribution(CommonDistributionTests,
+                              support.LoggingCatcher,
+                              unittest.TestCase):
+
+    cls = EggInfoDistribution
+    sample_dist = 'bacon', '0.1', 'bacon-0.1.egg-info'
+
+    def setUp(self):
+        super(TestEggInfoDistribution, self).setUp()
+
+        self.dirs = [os.path.join(self.fake_dists_path, f)
+                     for f in os.listdir(self.fake_dists_path)
+                     if f.endswith('.egg') or f.endswith('.egg-info')]
+
+        self.records = {}
+
+    @unittest.skip('not implemented yet')
+    def test_list_installed_files(self):
+        # EggInfoDistribution defines list_installed_files but there is no
+        # test for it yet; someone with setuptools expertise needs to add a
+        # file with the list of installed files for one of the egg fake dists
+        # and write the support code to populate self.records (and then delete
+        # this method)
+        pass
+
+
+class TestDatabase(support.LoggingCatcher,
+                   unittest.TestCase):
+
+    def setUp(self):
+        super(TestDatabase, self).setUp()
+        disable_cache()
+        # Setup the path environment with our fake distributions
+        current_path = os.path.abspath(os.path.dirname(__file__))
+        self.sys_path = sys.path[:]
+        self.fake_dists_path = os.path.join(current_path, 'fake_dists')
+        sys.path.insert(0, self.fake_dists_path)
+
+    def tearDown(self):
+        sys.path[:] = self.sys_path
+        enable_cache()
+        super(TestDatabase, self).tearDown()
+
+    def test_distinfo_dirname(self):
+        # Given a name and a version, we expect the distinfo_dirname function
+        # to return a standard distribution information directory name.
+
+        items = [
+            # (name, version, standard_dirname)
+            # Test for a very simple single word name and decimal version
+            # number
+            ('docutils', '0.5', 'docutils-0.5.dist-info'),
+            # Test for another except this time with a '-' in the name, which
+            # needs to be transformed during the name lookup
+            ('python-ldap', '2.5', 'python_ldap-2.5.dist-info'),
+            # Test for both '-' in the name and a funky version number
+            ('python-ldap', '2.5 a---5', 'python_ldap-2.5 a---5.dist-info'),
+            ]
+
+        # Loop through the items to validate the results
+        for name, version, standard_dirname in items:
+            dirname = distinfo_dirname(name, version)
+            self.assertEqual(dirname, standard_dirname)
+
+    def test_get_distributions(self):
+        # Lookup all distributions found in the ``sys.path``.
+        # This test could potentially pick up other installed distributions
+        fake_dists = [('grammar', '1.0a4'), ('choxie', '2.0.0.9'),
+                      ('towel-stuff', '0.1'), ('babar', '0.1')]
+        found_dists = []
+
+        # Verify the fake dists have been found.
+        dists = [dist for dist in get_distributions()]
+        for dist in dists:
+            self.assertIsInstance(dist, Distribution)
+            if (dist.name in dict(fake_dists) and
+                dist.path.startswith(self.fake_dists_path)):
+                found_dists.append((dist.name, dist.metadata['version'], ))
+            else:
+                # check that it doesn't find anything more than this
+                self.assertFalse(dist.path.startswith(self.fake_dists_path))
+            # otherwise we don't care what other distributions are found
+
+        # Finally, test that we found all that we were looking for
+        self.assertEqual(sorted(found_dists), sorted(fake_dists))
+
+        # Now, test if the egg-info distributions are found correctly as well
+        fake_dists += [('bacon', '0.1'), ('cheese', '2.0.2'),
+                       ('coconuts-aster', '10.3'),
+                       ('banana', '0.4'), ('strawberry', '0.6'),
+                       ('truffles', '5.0'), ('nut', 'funkyversion')]
+        found_dists = []
+
+        dists = [dist for dist in get_distributions(use_egg_info=True)]
+        for dist in dists:
+            self.assertIsInstance(dist, (Distribution, EggInfoDistribution))
+            if (dist.name in dict(fake_dists) and
+                dist.path.startswith(self.fake_dists_path)):
+                found_dists.append((dist.name, dist.metadata['version']))
+            else:
+                self.assertFalse(dist.path.startswith(self.fake_dists_path))
+
+        self.assertEqual(sorted(fake_dists), sorted(found_dists))
+
+    def test_get_distribution(self):
+        # Test for looking up a distribution by name.
+        # Test the lookup of the towel-stuff distribution
+        name = 'towel-stuff'  # Note: This is different from the directory name
+
+        # Lookup the distribution
+        dist = get_distribution(name)
+        self.assertIsInstance(dist, Distribution)
+        self.assertEqual(dist.name, name)
+
+        # Verify that an unknown distribution returns None
+        self.assertIsNone(get_distribution('bogus'))
+
+        # Verify partial name matching doesn't work
+        self.assertIsNone(get_distribution('towel'))
+
+        # Verify that it does not find egg-info distributions, when not
+        # instructed to
+        self.assertIsNone(get_distribution('bacon'))
+        self.assertIsNone(get_distribution('cheese'))
+        self.assertIsNone(get_distribution('strawberry'))
+        self.assertIsNone(get_distribution('banana'))
+
+        # Now check that it works well in both situations, when egg-info
+        # is a file and directory respectively.
+        dist = get_distribution('cheese', use_egg_info=True)
+        self.assertIsInstance(dist, EggInfoDistribution)
+        self.assertEqual(dist.name, 'cheese')
+
+        dist = get_distribution('bacon', use_egg_info=True)
+        self.assertIsInstance(dist, EggInfoDistribution)
+        self.assertEqual(dist.name, 'bacon')
+
+        dist = get_distribution('banana', use_egg_info=True)
+        self.assertIsInstance(dist, EggInfoDistribution)
+        self.assertEqual(dist.name, 'banana')
+
+        dist = get_distribution('strawberry', use_egg_info=True)
+        self.assertIsInstance(dist, EggInfoDistribution)
+        self.assertEqual(dist.name, 'strawberry')
+
+    def test_get_file_users(self):
+        # Test the iteration of distributions that use a file.
+        name = 'towel_stuff-0.1'
+        path = os.path.join(self.fake_dists_path, name,
+                            'towel_stuff', '__init__.py')
+        for dist in get_file_users(path):
+            self.assertIsInstance(dist, Distribution)
+            self.assertEqual(dist.name, name)
+
+    def test_provides(self):
+        # Test for looking up distributions by what they provide
+        checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+        l = [dist.name for dist in provides_distribution('truffles')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.0')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.0',
+                                                         use_egg_info=True)]
+        checkLists(l, ['choxie', 'cheese'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.1.2')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '1.1')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles',
+                                                         '!=1.1,<=2.0')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in provides_distribution('truffles',
+                                                         '!=1.1,<=2.0',
+                                                          use_egg_info=True)]
+        checkLists(l, ['choxie', 'bacon', 'cheese'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.0')]
+        checkLists(l, ['towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.5')]
+        checkLists(l, [])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>1.5',
+                                                         use_egg_info=True)]
+        checkLists(l, ['bacon'])
+
+        l = [dist.name for dist in provides_distribution('truffles', '>=1.0')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in provides_distribution('strawberry', '0.6',
+                                                         use_egg_info=True)]
+        checkLists(l, ['coconuts-aster'])
+
+        l = [dist.name for dist in provides_distribution('strawberry', '>=0.5',
+                                                         use_egg_info=True)]
+        checkLists(l, ['coconuts-aster'])
+
+        l = [dist.name for dist in provides_distribution('strawberry', '>0.6',
+                                                         use_egg_info=True)]
+        checkLists(l, [])
+
+        l = [dist.name for dist in provides_distribution('banana', '0.4',
+                                                         use_egg_info=True)]
+        checkLists(l, ['coconuts-aster'])
+
+        l = [dist.name for dist in provides_distribution('banana', '>=0.3',
+                                                         use_egg_info=True)]
+        checkLists(l, ['coconuts-aster'])
+
+        l = [dist.name for dist in provides_distribution('banana', '!=0.4',
+                                                         use_egg_info=True)]
+        checkLists(l, [])
+
+    def test_obsoletes(self):
+        # Test looking for distributions based on what they obsolete
+        checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')]
+        checkLists(l, [])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '1.0',
+                                                          use_egg_info=True)]
+        checkLists(l, ['cheese', 'bacon'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')]
+        checkLists(l, ['choxie'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.8',
+                                                          use_egg_info=True)]
+        checkLists(l, ['choxie', 'cheese'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.9.6')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles',
+                                                          '0.5.2.3')]
+        checkLists(l, ['choxie', 'towel-stuff'])
+
+        l = [dist.name for dist in obsoletes_distribution('truffles', '0.2')]
+        checkLists(l, ['towel-stuff'])
+
+    def test_yield_distribution(self):
+        # tests the internal function _yield_distributions
+        checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y))
+
+        eggs = [('bacon', '0.1'), ('banana', '0.4'), ('strawberry', '0.6'),
+                ('truffles', '5.0'), ('cheese', '2.0.2'),
+                ('coconuts-aster', '10.3'), ('nut', 'funkyversion')]
+        dists = [('choxie', '2.0.0.9'), ('grammar', '1.0a4'),
+                 ('towel-stuff', '0.1'), ('babar', '0.1')]
+
+        checkLists([], _yield_distributions(False, False))
+
+        found = [(dist.name, dist.metadata['Version'])
+                 for dist in _yield_distributions(False, True)
+                 if dist.path.startswith(self.fake_dists_path)]
+        checkLists(eggs, found)
+
+        found = [(dist.name, dist.metadata['Version'])
+                 for dist in _yield_distributions(True, False)
+                 if dist.path.startswith(self.fake_dists_path)]
+        checkLists(dists, found)
+
+        found = [(dist.name, dist.metadata['Version'])
+                 for dist in _yield_distributions(True, True)
+                 if dist.path.startswith(self.fake_dists_path)]
+        checkLists(dists + eggs, found)
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    load = unittest.defaultTestLoader.loadTestsFromTestCase
+    suite.addTest(load(TestDistribution))
+    suite.addTest(load(TestEggInfoDistribution))
+    suite.addTest(load(TestDatabase))
+    return suite
+
+
+if __name__ == "__main__":
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_depgraph.py b/Lib/packaging/tests/test_depgraph.py
new file mode 100644
index 0000000..9271a7b
--- /dev/null
+++ b/Lib/packaging/tests/test_depgraph.py
@@ -0,0 +1,301 @@
+"""Tests for packaging.depgraph """
+import io
+import os
+import re
+import sys
+import packaging.database
+from packaging import depgraph
+
+from packaging.tests import unittest, support
+
+
+class DepGraphTestCase(support.LoggingCatcher,
+                       unittest.TestCase):
+
+    DISTROS_DIST = ('choxie', 'grammar', 'towel-stuff')
+    DISTROS_EGG = ('bacon', 'banana', 'strawberry', 'cheese')
+    BAD_EGGS = ('nut',)
+
+    EDGE = re.compile(
+           r'"(?P<from>.*)" -> "(?P<to>.*)" \[label="(?P<label>.*)"\]')
+
+    def checkLists(self, l1, l2):
+        """ Compare two lists without taking the order into consideration """
+        self.assertListEqual(sorted(l1), sorted(l2))
+
+    def setUp(self):
+        super(DepGraphTestCase, self).setUp()
+        path = os.path.join(os.path.dirname(__file__), 'fake_dists')
+        path = os.path.abspath(path)
+        sys.path.insert(0, path)
+        self.addCleanup(sys.path.remove, path)
+        self.addCleanup(packaging.database.enable_cache)
+        packaging.database.disable_cache()
+
+    def test_generate_graph(self):
+        dists = []
+        for name in self.DISTROS_DIST:
+            dist = packaging.database.get_distribution(name)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        choxie, grammar, towel = dists
+
+        graph = depgraph.generate_graph(dists)
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[choxie]]
+        self.checkLists([('towel-stuff', 'towel-stuff (0.1)')], deps)
+        self.assertIn(choxie, graph.reverse_list[towel])
+        self.checkLists(graph.missing[choxie], ['nut'])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[grammar]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[grammar], ['truffles (>=1.2)'])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[towel]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[towel], ['bacon (<=0.2)'])
+
+    def test_generate_graph_egg(self):
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        choxie, grammar, towel, bacon, banana, strawberry, cheese = dists
+
+        graph = depgraph.generate_graph(dists)
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[choxie]]
+        self.checkLists([('towel-stuff', 'towel-stuff (0.1)')], deps)
+        self.assertIn(choxie, graph.reverse_list[towel])
+        self.checkLists(graph.missing[choxie], ['nut'])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[grammar]]
+        self.checkLists([('bacon', 'truffles (>=1.2)')], deps)
+        self.checkLists(graph.missing[grammar], [])
+        self.assertIn(grammar, graph.reverse_list[bacon])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[towel]]
+        self.checkLists([('bacon', 'bacon (<=0.2)')], deps)
+        self.checkLists(graph.missing[towel], [])
+        self.assertIn(towel, graph.reverse_list[bacon])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[bacon]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[bacon], [])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[banana]]
+        self.checkLists([('strawberry', 'strawberry (>=0.5)')], deps)
+        self.checkLists(graph.missing[banana], [])
+        self.assertIn(banana, graph.reverse_list[strawberry])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[strawberry]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[strawberry], [])
+
+        deps = [(x.name, y) for x, y in graph.adjacency_list[cheese]]
+        self.checkLists([], deps)
+        self.checkLists(graph.missing[cheese], [])
+
+    def test_dependent_dists(self):
+        dists = []
+        for name in self.DISTROS_DIST:
+            dist = packaging.database.get_distribution(name)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        choxie, grammar, towel = dists
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, choxie)]
+        self.checkLists([], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, grammar)]
+        self.checkLists([], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, towel)]
+        self.checkLists(['choxie'], deps)
+
+    def test_dependent_dists_egg(self):
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        choxie, grammar, towel, bacon, banana, strawberry, cheese = dists
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, choxie)]
+        self.checkLists([], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, grammar)]
+        self.checkLists([], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, towel)]
+        self.checkLists(['choxie'], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, bacon)]
+        self.checkLists(['choxie', 'towel-stuff', 'grammar'], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, strawberry)]
+        self.checkLists(['banana'], deps)
+
+        deps = [d.name for d in depgraph.dependent_dists(dists, cheese)]
+        self.checkLists([], deps)
+
+    def test_graph_to_dot(self):
+        expected = (
+            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+            ('grammar', 'bacon', 'truffles (>=1.2)'),
+            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+            ('banana', 'strawberry', 'strawberry (>=0.5)'),
+        )
+
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        buf = io.StringIO()
+        depgraph.graph_to_dot(graph, buf)
+        buf.seek(0)
+        matches = []
+        lines = buf.readlines()
+        for line in lines[1:-1]:  # skip the first and the last lines
+            if line[-1] == '\n':
+                line = line[:-1]
+            match = self.EDGE.match(line.strip())
+            self.assertIsNot(match, None)
+            matches.append(match.groups())
+
+        self.checkLists(matches, expected)
+
+    def test_graph_disconnected_to_dot(self):
+        dependencies_expected = (
+            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+            ('grammar', 'bacon', 'truffles (>=1.2)'),
+            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+            ('banana', 'strawberry', 'strawberry (>=0.5)'),
+        )
+        disconnected_expected = ('cheese', 'bacon', 'strawberry')
+
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        buf = io.StringIO()
+        depgraph.graph_to_dot(graph, buf, skip_disconnected=False)
+        buf.seek(0)
+        lines = buf.readlines()
+
+        dependencies_lines = []
+        disconnected_lines = []
+
+        # First sort output lines into dependencies and disconnected lines.
+        # We also skip the attribute lines, and don't include the "{" and "}"
+        # lines.
+        disconnected_active = False
+        for line in lines[1:-1]:  # Skip first and last line
+            if line.startswith('subgraph disconnected'):
+                disconnected_active = True
+                continue
+            if line.startswith('}') and disconnected_active:
+                disconnected_active = False
+                continue
+
+            if disconnected_active:
+                # Skip the 'label = "Disconnected"', etc. attribute lines.
+                if ' = ' not in line:
+                    disconnected_lines.append(line)
+            else:
+                dependencies_lines.append(line)
+
+        dependencies_matches = []
+        for line in dependencies_lines:
+            if line[-1] == '\n':
+                line = line[:-1]
+            match = self.EDGE.match(line.strip())
+            self.assertIsNot(match, None)
+            dependencies_matches.append(match.groups())
+
+        disconnected_matches = []
+        for line in disconnected_lines:
+            if line[-1] == '\n':
+                line = line[:-1]
+            line = line.strip('"')
+            disconnected_matches.append(line)
+
+        self.checkLists(dependencies_matches, dependencies_expected)
+        self.checkLists(disconnected_matches, disconnected_expected)
+
+    def test_graph_bad_version_to_dot(self):
+        expected = (
+            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+            ('grammar', 'bacon', 'truffles (>=1.2)'),
+            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+            ('banana', 'strawberry', 'strawberry (>=0.5)'),
+        )
+
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        buf = io.StringIO()
+        depgraph.graph_to_dot(graph, buf)
+        buf.seek(0)
+        matches = []
+        lines = buf.readlines()
+        for line in lines[1:-1]:  # skip the first and the last lines
+            if line[-1] == '\n':
+                line = line[:-1]
+            match = self.EDGE.match(line.strip())
+            self.assertIsNot(match, None)
+            matches.append(match.groups())
+
+        self.checkLists(matches, expected)
+
+    def test_repr(self):
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS:
+            dist = packaging.database.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        self.assertTrue(repr(graph))
+
+    def test_main(self):
+        tempout = io.StringIO()
+        old = sys.stdout
+        sys.stdout = tempout
+        oldargv = sys.argv[:]
+        sys.argv[:] = ['script.py']
+        try:
+            try:
+                depgraph.main()
+            except SystemExit:
+                pass
+        finally:
+            sys.stdout = old
+            sys.argv[:] = oldargv
+
+        # checks what main did XXX could do more here
+        tempout.seek(0)
+        res = tempout.read()
+        self.assertIn('towel', res)
+
+
+def test_suite():
+    return unittest.makeSuite(DepGraphTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_dist.py b/Lib/packaging/tests/test_dist.py
new file mode 100644
index 0000000..77dab71
--- /dev/null
+++ b/Lib/packaging/tests/test_dist.py
@@ -0,0 +1,445 @@
+"""Tests for packaging.dist."""
+import os
+import io
+import sys
+import logging
+import textwrap
+import packaging.dist
+
+from packaging.dist import Distribution
+from packaging.command import set_command
+from packaging.command.cmd import Command
+from packaging.errors import PackagingModuleError, PackagingOptionError
+from packaging.tests import TESTFN, captured_stdout
+from packaging.tests import support, unittest
+from packaging.tests.support import create_distribution
+
+
+class test_dist(Command):
+    """Sample packaging extension command."""
+
+    user_options = [
+        ("sample-option=", "S", "help text"),
+        ]
+
+    def initialize_options(self):
+        self.sample_option = None
+
+    def finalize_options(self):
+        pass
+
+
+class DistributionTestCase(support.TempdirManager,
+                           support.LoggingCatcher,
+                           support.EnvironRestorer,
+                           unittest.TestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(DistributionTestCase, self).setUp()
+        self.argv = sys.argv, sys.argv[:]
+        del sys.argv[1:]
+
+    def tearDown(self):
+        sys.argv = self.argv[0]
+        sys.argv[:] = self.argv[1]
+        super(DistributionTestCase, self).tearDown()
+
+    def test_debug_mode(self):
+        self.addCleanup(os.unlink, TESTFN)
+        with open(TESTFN, "w") as f:
+            f.write("[global]\n")
+            f.write("command_packages = foo.bar, splat")
+
+        files = [TESTFN]
+        sys.argv.append("build")
+        __, stdout = captured_stdout(create_distribution, files)
+        self.assertEqual(stdout, '')
+        packaging.dist.DEBUG = True
+        try:
+            __, stdout = captured_stdout(create_distribution, files)
+            self.assertEqual(stdout, '')
+        finally:
+            packaging.dist.DEBUG = False
+
+    def test_write_pkg_file(self):
+        # Check Metadata handling of Unicode fields
+        tmp_dir = self.mkdtemp()
+        my_file = os.path.join(tmp_dir, 'f')
+        cls = Distribution
+
+        dist = cls(attrs={'author': 'Mister Café',
+                          'name': 'my.package',
+                          'maintainer': 'Café Junior',
+                          'summary': 'Café torréfié',
+                          'description': 'Héhéhé'})
+
+        # let's make sure the file can be written
+        # with Unicode fields. they are encoded with
+        # PKG_INFO_ENCODING
+        with open(my_file, 'w') as fp:
+            dist.metadata.write_file(fp)
+
+        # regular ascii is of course always usable
+        dist = cls(attrs={'author': 'Mister Cafe',
+                          'name': 'my.package',
+                          'maintainer': 'Cafe Junior',
+                          'summary': 'Cafe torrefie',
+                          'description': 'Hehehe'})
+
+        with open(my_file, 'w') as fp:
+            dist.metadata.write_file(fp)
+
+    def test_bad_attr(self):
+        Distribution(attrs={'author': 'xxx',
+                            'name': 'xxx',
+                            'version': '1.2',
+                            'url': 'xxxx',
+                            'badoptname': 'xxx'})
+        logs = self.get_logs(logging.WARNING)
+        self.assertEqual(1, len(logs))
+        self.assertIn('unknown argument', logs[0])
+
+    def test_bad_version(self):
+        Distribution(attrs={'author': 'xxx',
+                            'name': 'xxx',
+                            'version': 'xxx',
+                            'url': 'xxxx'})
+        logs = self.get_logs(logging.WARNING)
+        self.assertEqual(1, len(logs))
+        self.assertIn('not a valid version', logs[0])
+
+    def test_empty_options(self):
+        # an empty options dictionary should not stay in the
+        # list of attributes
+        Distribution(attrs={'author': 'xxx',
+                            'name': 'xxx',
+                            'version': '1.2',
+                            'url': 'xxxx',
+                            'options': {}})
+
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    def test_non_empty_options(self):
+        # TODO: how to actually use options is not documented except
+        # for a few cryptic comments in dist.py.  If this is to stay
+        # in the public API, it deserves some better documentation.
+
+        # Here is an example of how it's used out there:
+        # http://svn.pythonmac.org/py2app/py2app/trunk/doc/
+        # index.html#specifying-customizations
+        dist = Distribution(attrs={'author': 'xxx',
+                                   'name': 'xxx',
+                                   'version': 'xxx',
+                                   'url': 'xxxx',
+                                   'options': {'sdist': {'owner': 'root'}}})
+
+        self.assertIn('owner', dist.get_option_dict('sdist'))
+
+    def test_finalize_options(self):
+
+        attrs = {'keywords': 'one,two',
+                 'platform': 'one,two'}
+
+        dist = Distribution(attrs=attrs)
+        dist.finalize_options()
+
+        # finalize_option splits platforms and keywords
+        self.assertEqual(dist.metadata['platform'], ['one', 'two'])
+        self.assertEqual(dist.metadata['keywords'], ['one', 'two'])
+
+    def test_find_config_files_disable(self):
+        # Bug #1180: Allow users to disable their own config file.
+        temp_home = self.mkdtemp()
+        if os.name == 'posix':
+            user_filename = os.path.join(temp_home, ".pydistutils.cfg")
+        else:
+            user_filename = os.path.join(temp_home, "pydistutils.cfg")
+
+        with open(user_filename, 'w') as f:
+            f.write('[distutils2]\n')
+
+        def _expander(path):
+            return temp_home
+
+        old_expander = os.path.expanduser
+        os.path.expanduser = _expander
+        try:
+            d = packaging.dist.Distribution()
+            all_files = d.find_config_files()
+
+            d = packaging.dist.Distribution(attrs={'script_args':
+                                                   ['--no-user-cfg']})
+            files = d.find_config_files()
+        finally:
+            os.path.expanduser = old_expander
+
+        # make sure --no-user-cfg disables the user cfg file
+        self.assertEqual((len(all_files) - 1), len(files))
+
+    def test_special_hooks_parsing(self):
+        temp_home = self.mkdtemp()
+        config_files = [os.path.join(temp_home, "config1.cfg"),
+                        os.path.join(temp_home, "config2.cfg")]
+
+        # Store two aliased hooks in config files
+        self.write_file((temp_home, "config1.cfg"),
+                        '[test_dist]\npre-hook.a = type')
+        self.write_file((temp_home, "config2.cfg"),
+                        '[test_dist]\npre-hook.b = type')
+
+        set_command('packaging.tests.test_dist.test_dist')
+        dist = create_distribution(config_files)
+        cmd = dist.get_command_obj("test_dist")
+        self.assertEqual(cmd.pre_hook, {"a": 'type', "b": 'type'})
+
+    def test_hooks_get_run(self):
+        temp_home = self.mkdtemp()
+        config_file = os.path.join(temp_home, "config1.cfg")
+        hooks_module = os.path.join(temp_home, "testhooks.py")
+
+        self.write_file(config_file, textwrap.dedent('''
+            [test_dist]
+            pre-hook.test = testhooks.log_pre_call
+            post-hook.test = testhooks.log_post_call'''))
+
+        self.write_file(hooks_module, textwrap.dedent('''
+        record = []
+
+        def log_pre_call(cmd):
+            record.append('pre-%s' % cmd.get_command_name())
+
+        def log_post_call(cmd):
+            record.append('post-%s' % cmd.get_command_name())
+        '''))
+
+        set_command('packaging.tests.test_dist.test_dist')
+        d = create_distribution([config_file])
+        cmd = d.get_command_obj("test_dist")
+
+        # prepare the call recorders
+        sys.path.append(temp_home)
+        self.addCleanup(sys.path.remove, temp_home)
+        from testhooks import record
+
+        cmd.run = lambda: record.append('run')
+        cmd.finalize_options = lambda: record.append('finalize')
+
+        d.run_command('test_dist')
+
+        self.assertEqual(record, ['finalize',
+                                  'pre-test_dist',
+                                  'run',
+                                  'post-test_dist'])
+
+    def test_hooks_importable(self):
+        temp_home = self.mkdtemp()
+        config_file = os.path.join(temp_home, "config1.cfg")
+
+        self.write_file(config_file, textwrap.dedent('''
+            [test_dist]
+            pre-hook.test = nonexistent.dotted.name'''))
+
+        set_command('packaging.tests.test_dist.test_dist')
+        d = create_distribution([config_file])
+        cmd = d.get_command_obj("test_dist")
+        cmd.ensure_finalized()
+
+        self.assertRaises(PackagingModuleError, d.run_command, 'test_dist')
+
+    def test_hooks_callable(self):
+        temp_home = self.mkdtemp()
+        config_file = os.path.join(temp_home, "config1.cfg")
+
+        self.write_file(config_file, textwrap.dedent('''
+            [test_dist]
+            pre-hook.test = packaging.tests.test_dist.__doc__'''))
+
+        set_command('packaging.tests.test_dist.test_dist')
+        d = create_distribution([config_file])
+        cmd = d.get_command_obj("test_dist")
+        cmd.ensure_finalized()
+
+        self.assertRaises(PackagingOptionError, d.run_command, 'test_dist')
+
+
+class MetadataTestCase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    def setUp(self):
+        super(MetadataTestCase, self).setUp()
+        self.argv = sys.argv, sys.argv[:]
+
+    def tearDown(self):
+        sys.argv = self.argv[0]
+        sys.argv[:] = self.argv[1]
+        super(MetadataTestCase, self).tearDown()
+
+    def test_simple_metadata(self):
+        attrs = {"name": "package",
+                 "version": "1.0"}
+        dist = Distribution(attrs)
+        meta = self.format_metadata(dist)
+        self.assertIn("Metadata-Version: 1.0", meta)
+        self.assertNotIn("provides:", meta.lower())
+        self.assertNotIn("requires:", meta.lower())
+        self.assertNotIn("obsoletes:", meta.lower())
+
+    def test_provides_dist(self):
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "provides_dist": ["package", "package.sub"]}
+        dist = Distribution(attrs)
+        self.assertEqual(dist.metadata['Provides-Dist'],
+                         ["package", "package.sub"])
+        meta = self.format_metadata(dist)
+        self.assertIn("Metadata-Version: 1.2", meta)
+        self.assertNotIn("requires:", meta.lower())
+        self.assertNotIn("obsoletes:", meta.lower())
+
+    def _test_provides_illegal(self):
+        # XXX to do: check the versions
+        self.assertRaises(ValueError, Distribution,
+                          {"name": "package",
+                           "version": "1.0",
+                           "provides_dist": ["my.pkg (splat)"]})
+
+    def test_requires_dist(self):
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "requires_dist": ["other", "another (==1.0)"]}
+        dist = Distribution(attrs)
+        self.assertEqual(dist.metadata['Requires-Dist'],
+                         ["other", "another (==1.0)"])
+        meta = self.format_metadata(dist)
+        self.assertIn("Metadata-Version: 1.2", meta)
+        self.assertNotIn("provides:", meta.lower())
+        self.assertIn("Requires-Dist: other", meta)
+        self.assertIn("Requires-Dist: another (==1.0)", meta)
+        self.assertNotIn("obsoletes:", meta.lower())
+
+    def _test_requires_illegal(self):
+        # XXX
+        self.assertRaises(ValueError, Distribution,
+                          {"name": "package",
+                           "version": "1.0",
+                           "requires": ["my.pkg (splat)"]})
+
+    def test_obsoletes_dist(self):
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "obsoletes_dist": ["other", "another (<1.0)"]}
+        dist = Distribution(attrs)
+        self.assertEqual(dist.metadata['Obsoletes-Dist'],
+                         ["other", "another (<1.0)"])
+        meta = self.format_metadata(dist)
+        self.assertIn("Metadata-Version: 1.2", meta)
+        self.assertNotIn("provides:", meta.lower())
+        self.assertNotIn("requires:", meta.lower())
+        self.assertIn("Obsoletes-Dist: other", meta)
+        self.assertIn("Obsoletes-Dist: another (<1.0)", meta)
+
+    def _test_obsoletes_illegal(self):
+        # XXX
+        self.assertRaises(ValueError, Distribution,
+                          {"name": "package",
+                           "version": "1.0",
+                           "obsoletes": ["my.pkg (splat)"]})
+
+    def format_metadata(self, dist):
+        sio = io.StringIO()
+        dist.metadata.write_file(sio)
+        return sio.getvalue()
+
+    def test_custom_pydistutils(self):
+        # fixes #2166
+        # make sure pydistutils.cfg is found
+        if os.name == 'posix':
+            user_filename = ".pydistutils.cfg"
+        else:
+            user_filename = "pydistutils.cfg"
+
+        temp_dir = self.mkdtemp()
+        user_filename = os.path.join(temp_dir, user_filename)
+        with open(user_filename, 'w') as f:
+            f.write('.')
+
+        dist = Distribution()
+
+        # linux-style
+        if sys.platform in ('linux', 'darwin'):
+            os.environ['HOME'] = temp_dir
+            files = dist.find_config_files()
+            self.assertIn(user_filename, files)
+
+        # win32-style
+        if sys.platform == 'win32':
+            # home drive should be found
+            os.environ['HOME'] = temp_dir
+            files = dist.find_config_files()
+            self.assertIn(user_filename, files)
+
+    def test_show_help(self):
+        # smoke test, just makes sure some help is displayed
+        dist = Distribution()
+        sys.argv = []
+        dist.help = True
+        dist.script_name = 'setup.py'
+        __, stdout = captured_stdout(dist.parse_command_line)
+        output = [line for line in stdout.split('\n')
+                  if line.strip() != '']
+        self.assertGreater(len(output), 0)
+
+    def test_description(self):
+        desc = textwrap.dedent("""\
+        example::
+              We start here
+            and continue here
+          and end here.""")
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "description": desc}
+
+        dist = packaging.dist.Distribution(attrs)
+        meta = self.format_metadata(dist)
+        meta = meta.replace('\n' + 7 * ' ' + '|', '\n')
+        self.assertIn(desc, meta)
+
+    def test_read_metadata(self):
+        attrs = {"name": "package",
+                 "version": "1.0",
+                 "description": "desc",
+                 "summary": "xxx",
+                 "download_url": "http://example.com",
+                 "keywords": ['one', 'two'],
+                 "requires_dist": ['foo']}
+
+        dist = Distribution(attrs)
+        metadata = dist.metadata
+
+        # write it then reloads it
+        PKG_INFO = io.StringIO()
+        metadata.write_file(PKG_INFO)
+        PKG_INFO.seek(0)
+
+        metadata.read_file(PKG_INFO)
+        self.assertEqual(metadata['name'], "package")
+        self.assertEqual(metadata['version'], "1.0")
+        self.assertEqual(metadata['summary'], "xxx")
+        self.assertEqual(metadata['download_url'], 'http://example.com')
+        self.assertEqual(metadata['keywords'], ['one', 'two'])
+        self.assertEqual(metadata['platform'], [])
+        self.assertEqual(metadata['obsoletes'], [])
+        self.assertEqual(metadata['requires-dist'], ['foo'])
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(DistributionTestCase))
+    suite.addTest(unittest.makeSuite(MetadataTestCase))
+    return suite
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_extension.py b/Lib/packaging/tests/test_extension.py
new file mode 100644
index 0000000..41182e5
--- /dev/null
+++ b/Lib/packaging/tests/test_extension.py
@@ -0,0 +1,15 @@
+"""Tests for packaging.extension."""
+import os
+
+from packaging.compiler.extension import Extension
+from packaging.tests import unittest
+
+class ExtensionTestCase(unittest.TestCase):
+
+    pass
+
+def test_suite():
+    return unittest.makeSuite(ExtensionTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_install.py b/Lib/packaging/tests/test_install.py
new file mode 100644
index 0000000..2c51d19
--- /dev/null
+++ b/Lib/packaging/tests/test_install.py
@@ -0,0 +1,353 @@
+"""Tests for the packaging.install module."""
+
+import os
+from tempfile import mkstemp
+from packaging import install
+from packaging.pypi.xmlrpc import Client
+from packaging.metadata import Metadata
+
+from packaging.tests.support import LoggingCatcher, TempdirManager, unittest
+from packaging.tests.pypi_server import use_xmlrpc_server
+
+
+class InstalledDist:
+    """Distribution object, represent distributions currently installed on the
+    system"""
+    def __init__(self, name, version, deps):
+        self.metadata = Metadata()
+        self.name = name
+        self.version = version
+        self.metadata['Name'] = name
+        self.metadata['Version'] = version
+        self.metadata['Requires-Dist'] = deps
+
+    def __repr__(self):
+        return '<InstalledDist %s>' % self.metadata['Name']
+
+
+class ToInstallDist:
+    """Distribution that will be installed"""
+
+    def __init__(self, files=False):
+        self._files = files
+        self.install_called = False
+        self.install_called_with = {}
+        self.uninstall_called = False
+        self._real_files = []
+        self.name = "fake"
+        self.version = "fake"
+        if files:
+            for f in range(0, 3):
+                self._real_files.append(mkstemp())
+
+    def _unlink_installed_files(self):
+        if self._files:
+            for f in self._real_files:
+                os.unlink(f[1])
+
+    def list_installed_files(self, **args):
+        if self._files:
+            return [f[1] for f in self._real_files]
+
+    def get_install(self, **args):
+        return self.list_installed_files()
+
+
+class MagicMock:
+    def __init__(self, return_value=None, raise_exception=False):
+        self.called = False
+        self._times_called = 0
+        self._called_with = []
+        self._return_value = return_value
+        self._raise = raise_exception
+
+    def __call__(self, *args, **kwargs):
+        self.called = True
+        self._times_called = self._times_called + 1
+        self._called_with.append((args, kwargs))
+        iterable = hasattr(self._raise, '__iter__')
+        if self._raise:
+            if ((not iterable and self._raise)
+                    or self._raise[self._times_called - 1]):
+                raise Exception
+        return self._return_value
+
+    def called_with(self, *args, **kwargs):
+        return (args, kwargs) in self._called_with
+
+
+def get_installed_dists(dists):
+    """Return a list of fake installed dists.
+    The list is name, version, deps"""
+    objects = []
+    for name, version, deps in dists:
+        objects.append(InstalledDist(name, version, deps))
+    return objects
+
+
+class TestInstall(LoggingCatcher, TempdirManager, unittest.TestCase):
+    def _get_client(self, server, *args, **kwargs):
+        return Client(server.full_address, *args, **kwargs)
+
+    def _get_results(self, output):
+        """return a list of results"""
+        installed = [(o.name, str(o.version)) for o in output['install']]
+        remove = [(o.name, str(o.version)) for o in output['remove']]
+        conflict = [(o.name, str(o.version)) for o in output['conflict']]
+        return installed, remove, conflict
+
+    @use_xmlrpc_server()
+    def test_existing_deps(self, server):
+        # Test that the installer get the dependencies from the metadatas
+        # and ask the index for this dependencies.
+        # In this test case, we have choxie that is dependent from towel-stuff
+        # 0.1, which is in-turn dependent on bacon <= 0.2:
+        # choxie -> towel-stuff -> bacon.
+        # Each release metadata is not provided in metadata 1.2.
+        client = self._get_client(server)
+        archive_path = '%s/distribution.tar.gz' % server.full_address
+        server.xmlrpc.set_distributions([
+            {'name': 'choxie',
+             'version': '2.0.0.9',
+             'requires_dist': ['towel-stuff (0.1)'],
+             'url': archive_path},
+            {'name': 'towel-stuff',
+             'version': '0.1',
+             'requires_dist': ['bacon (<= 0.2)'],
+             'url': archive_path},
+            {'name': 'bacon',
+             'version': '0.1',
+             'requires_dist': [],
+             'url': archive_path},
+            ])
+        installed = get_installed_dists([('bacon', '0.1', [])])
+        output = install.get_infos("choxie", index=client,
+                                   installed=installed)
+
+        # we don't have installed bacon as it's already installed system-wide
+        self.assertEqual(0, len(output['remove']))
+        self.assertEqual(2, len(output['install']))
+        readable_output = [(o.name, str(o.version))
+                           for o in output['install']]
+        self.assertIn(('towel-stuff', '0.1'), readable_output)
+        self.assertIn(('choxie', '2.0.0.9'), readable_output)
+
+    @use_xmlrpc_server()
+    def test_upgrade_existing_deps(self, server):
+        client = self._get_client(server)
+        archive_path = '%s/distribution.tar.gz' % server.full_address
+        server.xmlrpc.set_distributions([
+            {'name': 'choxie',
+             'version': '2.0.0.9',
+             'requires_dist': ['towel-stuff (0.1)'],
+             'url': archive_path},
+            {'name': 'towel-stuff',
+             'version': '0.1',
+             'requires_dist': ['bacon (>= 0.2)'],
+             'url': archive_path},
+            {'name': 'bacon',
+             'version': '0.2',
+             'requires_dist': [],
+             'url': archive_path},
+            ])
+
+        output = install.get_infos("choxie", index=client,
+                     installed=get_installed_dists([('bacon', '0.1', [])]))
+        installed = [(o.name, str(o.version)) for o in output['install']]
+
+        # we need bacon 0.2, but 0.1 is installed.
+        # So we expect to remove 0.1 and to install 0.2 instead.
+        remove = [(o.name, str(o.version)) for o in output['remove']]
+        self.assertIn(('choxie', '2.0.0.9'), installed)
+        self.assertIn(('towel-stuff', '0.1'), installed)
+        self.assertIn(('bacon', '0.2'), installed)
+        self.assertIn(('bacon', '0.1'), remove)
+        self.assertEqual(0, len(output['conflict']))
+
+    @use_xmlrpc_server()
+    def test_conflicts(self, server):
+        # Tests that conflicts are detected
+        client = self._get_client(server)
+        archive_path = '%s/distribution.tar.gz' % server.full_address
+
+        # choxie depends on towel-stuff, which depends on bacon.
+        server.xmlrpc.set_distributions([
+            {'name': 'choxie',
+             'version': '2.0.0.9',
+             'requires_dist': ['towel-stuff (0.1)'],
+             'url': archive_path},
+            {'name': 'towel-stuff',
+             'version': '0.1',
+             'requires_dist': ['bacon (>= 0.2)'],
+             'url': archive_path},
+            {'name': 'bacon',
+             'version': '0.2',
+             'requires_dist': [],
+             'url': archive_path},
+            ])
+
+        # name, version, deps.
+        already_installed = [('bacon', '0.1', []),
+                             ('chicken', '1.1', ['bacon (0.1)'])]
+        output = install.get_infos(
+            'choxie', index=client,
+            installed=get_installed_dists(already_installed))
+
+        # we need bacon 0.2, but 0.1 is installed.
+        # So we expect to remove 0.1 and to install 0.2 instead.
+        installed, remove, conflict = self._get_results(output)
+        self.assertIn(('choxie', '2.0.0.9'), installed)
+        self.assertIn(('towel-stuff', '0.1'), installed)
+        self.assertIn(('bacon', '0.2'), installed)
+        self.assertIn(('bacon', '0.1'), remove)
+        self.assertIn(('chicken', '1.1'), conflict)
+
+    @use_xmlrpc_server()
+    def test_installation_unexisting_project(self, server):
+        # Test that the isntalled raises an exception if the project does not
+        # exists.
+        client = self._get_client(server)
+        self.assertRaises(install.InstallationException,
+                          install.get_infos,
+                          'unexisting project', index=client)
+
+    def test_move_files(self):
+        # test that the files are really moved, and that the new path is
+        # returned.
+        path = self.mkdtemp()
+        newpath = self.mkdtemp()
+        files = [os.path.join(path, str(x)) for x in range(1, 20)]
+        for f in files:
+            open(f, 'a+').close()
+        output = [o for o in install._move_files(files, newpath)]
+
+        # check that output return the list of old/new places
+        for f in files:
+            self.assertIn((f, '%s%s' % (newpath, f)), output)
+
+        # remove the files
+        for f in [o[1] for o in output]:  # o[1] is the new place
+            os.remove(f)
+
+    def test_update_infos(self):
+        tests = [[
+            {'foo': ['foobar', 'foo', 'baz'], 'baz': ['foo', 'foo']},
+            {'foo': ['additional_content', 'yeah'], 'baz': ['test', 'foo']},
+            {'foo': ['foobar', 'foo', 'baz', 'additional_content', 'yeah'],
+             'baz': ['foo', 'foo', 'test', 'foo']},
+        ]]
+
+        for dict1, dict2, expect in tests:
+            install._update_infos(dict1, dict2)
+            for key in expect:
+                self.assertEqual(expect[key], dict1[key])
+
+    def test_install_dists_rollback(self):
+        # if one of the distribution installation fails, call uninstall on all
+        # installed distributions.
+
+        old_install_dist = install._install_dist
+        old_uninstall = getattr(install, 'uninstall', None)
+
+        install._install_dist = MagicMock(return_value=[],
+                                          raise_exception=(False, True))
+        install.remove = MagicMock()
+        try:
+            d1 = ToInstallDist()
+            d2 = ToInstallDist()
+            path = self.mkdtemp()
+            self.assertRaises(Exception, install.install_dists, [d1, d2], path)
+            self.assertTrue(install._install_dist.called_with(d1, path))
+            self.assertTrue(install.remove.called)
+        finally:
+            install._install_dist = old_install_dist
+            install.remove = old_uninstall
+
+    def test_install_dists_success(self):
+        old_install_dist = install._install_dist
+        install._install_dist = MagicMock(return_value=[])
+        try:
+            # test that the install method is called on each distributions
+            d1 = ToInstallDist()
+            d2 = ToInstallDist()
+
+            # should call install
+            path = self.mkdtemp()
+            install.install_dists([d1, d2], path)
+            for dist in (d1, d2):
+                self.assertTrue(install._install_dist.called_with(dist, path))
+        finally:
+            install._install_dist = old_install_dist
+
+    def test_install_from_infos_conflict(self):
+        # assert conflicts raise an exception
+        self.assertRaises(install.InstallationConflict,
+            install.install_from_infos,
+            conflicts=[ToInstallDist()])
+
+    def test_install_from_infos_remove_success(self):
+        old_install_dists = install.install_dists
+        install.install_dists = lambda x, y=None: None
+        try:
+            dists = []
+            for i in range(2):
+                dists.append(ToInstallDist(files=True))
+            install.install_from_infos(remove=dists)
+
+            # assert that the files have been removed
+            for dist in dists:
+                for f in dist.list_installed_files():
+                    self.assertFalse(os.path.exists(f))
+        finally:
+            install.install_dists = old_install_dists
+
+    def test_install_from_infos_remove_rollback(self):
+        old_install_dist = install._install_dist
+        old_uninstall = getattr(install, 'uninstall', None)
+
+        install._install_dist = MagicMock(return_value=[],
+                raise_exception=(False, True))
+        install.uninstall = MagicMock()
+        try:
+            # assert that if an error occurs, the removed files are restored.
+            remove = []
+            for i in range(2):
+                remove.append(ToInstallDist(files=True))
+            to_install = [ToInstallDist(), ToInstallDist()]
+            temp_dir = self.mkdtemp()
+
+            self.assertRaises(Exception, install.install_from_infos,
+                              install_path=temp_dir, install=to_install,
+                              remove=remove)
+            # assert that the files are in the same place
+            # assert that the files have been removed
+            for dist in remove:
+                for f in dist.list_installed_files():
+                    self.assertTrue(os.path.exists(f))
+                dist._unlink_installed_files()
+        finally:
+            install.install_dist = old_install_dist
+            install.uninstall = old_uninstall
+
+    def test_install_from_infos_install_succes(self):
+        old_install_dist = install._install_dist
+        install._install_dist = MagicMock([])
+        try:
+            # assert that the distribution can be installed
+            install_path = "my_install_path"
+            to_install = [ToInstallDist(), ToInstallDist()]
+
+            install.install_from_infos(install_path, install=to_install)
+            for dist in to_install:
+                install._install_dist.called_with(install_path)
+        finally:
+            install._install_dist = old_install_dist
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestInstall))
+    return suite
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_manifest.py b/Lib/packaging/tests/test_manifest.py
new file mode 100644
index 0000000..21a42c3
--- /dev/null
+++ b/Lib/packaging/tests/test_manifest.py
@@ -0,0 +1,72 @@
+"""Tests for packaging.manifest."""
+import os
+import logging
+from io import StringIO
+from packaging.manifest import Manifest
+
+from packaging.tests import unittest, support
+
+_MANIFEST = """\
+recursive-include foo *.py   # ok
+# nothing here
+
+#
+
+recursive-include bar \\
+  *.dat   *.txt
+"""
+
+_MANIFEST2 = """\
+README
+file1
+"""
+
+
+class ManifestTestCase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    def test_manifest_reader(self):
+        tmpdir = self.mkdtemp()
+        MANIFEST = os.path.join(tmpdir, 'MANIFEST.in')
+        with open(MANIFEST, 'w') as f:
+            f.write(_MANIFEST)
+
+        manifest = Manifest()
+        manifest.read_template(MANIFEST)
+
+        warnings = self.get_logs(logging.WARNING)
+        # the manifest should have been read and 3 warnings issued
+        # (we didn't provide the files)
+        self.assertEqual(3, len(warnings))
+        for warning in warnings:
+            self.assertIn('no files found matching', warning)
+
+        # reset logs for the next assert
+        self.loghandler.flush()
+
+        # manifest also accepts file-like objects
+        with open(MANIFEST) as f:
+            manifest.read_template(f)
+
+        # the manifest should have been read and 3 warnings issued
+        # (we didn't provide the files)
+        self.assertEqual(3, len(warnings))
+
+    def test_default_actions(self):
+        tmpdir = self.mkdtemp()
+        self.addCleanup(os.chdir, os.getcwd())
+        os.chdir(tmpdir)
+        self.write_file('README', 'xxx')
+        self.write_file('file1', 'xxx')
+        content = StringIO(_MANIFEST2)
+        manifest = Manifest()
+        manifest.read_template(content)
+        self.assertEqual(['README', 'file1'], manifest.files)
+
+
+def test_suite():
+    return unittest.makeSuite(ManifestTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_markers.py b/Lib/packaging/tests/test_markers.py
new file mode 100644
index 0000000..dec0429
--- /dev/null
+++ b/Lib/packaging/tests/test_markers.py
@@ -0,0 +1,71 @@
+"""Tests for packaging.markers."""
+import os
+import sys
+import platform
+from packaging.markers import interpret
+
+from packaging.tests import unittest
+from packaging.tests.support import LoggingCatcher
+
+
+class MarkersTestCase(LoggingCatcher,
+                      unittest.TestCase):
+
+    def test_interpret(self):
+        sys_platform = sys.platform
+        version = sys.version.split()[0]
+        os_name = os.name
+        platform_version = platform.version()
+        platform_machine = platform.machine()
+        platform_python_implementation = platform.python_implementation()
+
+        self.assertTrue(interpret("sys.platform == '%s'" % sys_platform))
+        self.assertTrue(interpret(
+            "sys.platform == '%s' or python_version == '2.4'" % sys_platform))
+        self.assertTrue(interpret(
+            "sys.platform == '%s' and python_full_version == '%s'" %
+            (sys_platform, version)))
+        self.assertTrue(interpret("'%s' == sys.platform" % sys_platform))
+        self.assertTrue(interpret('os.name == "%s"' % os_name))
+        self.assertTrue(interpret(
+            'platform.version == "%s" and platform.machine == "%s"' %
+            (platform_version, platform_machine)))
+        self.assertTrue(interpret('platform.python_implementation == "%s"' %
+            platform_python_implementation))
+
+        # stuff that need to raise a syntax error
+        ops = ('os.name == os.name', 'os.name == 2', "'2' == '2'",
+               'okpjonon', '', 'os.name ==', 'python_version == 2.4')
+        for op in ops:
+            self.assertRaises(SyntaxError, interpret, op)
+
+        # combined operations
+        OP = 'os.name == "%s"' % os_name
+        AND = ' and '
+        OR = ' or '
+        self.assertTrue(interpret(OP + AND + OP))
+        self.assertTrue(interpret(OP + AND + OP + AND + OP))
+        self.assertTrue(interpret(OP + OR + OP))
+        self.assertTrue(interpret(OP + OR + OP + OR + OP))
+
+        # other operators
+        self.assertTrue(interpret("os.name != 'buuuu'"))
+        self.assertTrue(interpret("python_version > '1.0'"))
+        self.assertTrue(interpret("python_version < '5.0'"))
+        self.assertTrue(interpret("python_version <= '5.0'"))
+        self.assertTrue(interpret("python_version >= '1.0'"))
+        self.assertTrue(interpret("'%s' in os.name" % os_name))
+        self.assertTrue(interpret("'buuuu' not in os.name"))
+        self.assertTrue(interpret(
+            "'buuuu' not in os.name and '%s' in os.name" % os_name))
+
+        # execution context
+        self.assertTrue(interpret('python_version == "0.1"',
+                                  {'python_version': '0.1'}))
+
+
+def test_suite():
+    return unittest.makeSuite(MarkersTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_metadata.py b/Lib/packaging/tests/test_metadata.py
new file mode 100644
index 0000000..b8dc5d8
--- /dev/null
+++ b/Lib/packaging/tests/test_metadata.py
@@ -0,0 +1,279 @@
+"""Tests for packaging.metadata."""
+import os
+import sys
+import logging
+from io import StringIO
+
+from packaging.errors import (MetadataConflictError, MetadataMissingError,
+                              MetadataUnrecognizedVersionError)
+from packaging.metadata import Metadata, PKG_INFO_PREFERRED_VERSION
+
+from packaging.tests import unittest
+from packaging.tests.support import LoggingCatcher
+
+
+class MetadataTestCase(LoggingCatcher,
+                       unittest.TestCase):
+
+    def test_instantiation(self):
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        with open(PKG_INFO, 'r') as f:
+            contents = f.read()
+        fp = StringIO(contents)
+
+        m = Metadata()
+        self.assertRaises(MetadataUnrecognizedVersionError, m.items)
+
+        m = Metadata(PKG_INFO)
+        self.assertEqual(len(m.items()), 22)
+
+        m = Metadata(fileobj=fp)
+        self.assertEqual(len(m.items()), 22)
+
+        m = Metadata(mapping=dict(name='Test', version='1.0'))
+        self.assertEqual(len(m.items()), 11)
+
+        d = dict(m.items())
+        self.assertRaises(TypeError, Metadata,
+                          PKG_INFO, fileobj=fp)
+        self.assertRaises(TypeError, Metadata,
+                          PKG_INFO, mapping=d)
+        self.assertRaises(TypeError, Metadata,
+                          fileobj=fp, mapping=d)
+        self.assertRaises(TypeError, Metadata,
+                          PKG_INFO, mapping=m, fileobj=fp)
+
+    def test_metadata_read_write(self):
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        metadata = Metadata(PKG_INFO)
+        out = StringIO()
+        metadata.write_file(out)
+        out.seek(0)
+        res = Metadata()
+        res.read_file(out)
+        for k in metadata:
+            self.assertEqual(metadata[k], res[k])
+
+    def test_metadata_markers(self):
+        # see if we can be platform-aware
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        with open(PKG_INFO, 'r') as f:
+            content = f.read() % sys.platform
+        metadata = Metadata(platform_dependent=True)
+
+        metadata.read_file(StringIO(content))
+        self.assertEqual(metadata['Requires-Dist'], ['bar'])
+        metadata['Name'] = "baz; sys.platform == 'blah'"
+        # FIXME is None or 'UNKNOWN' correct here?
+        # where is that documented?
+        self.assertEqual(metadata['Name'], None)
+
+        # test with context
+        context = {'sys.platform': 'okook'}
+        metadata = Metadata(platform_dependent=True,
+                                        execution_context=context)
+        metadata.read_file(StringIO(content))
+        self.assertEqual(metadata['Requires-Dist'], ['foo'])
+
+    def test_description(self):
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        with open(PKG_INFO, 'r') as f:
+            content = f.read() % sys.platform
+        metadata = Metadata()
+        metadata.read_file(StringIO(content))
+
+        # see if we can read the description now
+        DESC = os.path.join(os.path.dirname(__file__), 'LONG_DESC.txt')
+        with open(DESC) as f:
+            wanted = f.read()
+        self.assertEqual(wanted, metadata['Description'])
+
+        # save the file somewhere and make sure we can read it back
+        out = StringIO()
+        metadata.write_file(out)
+        out.seek(0)
+        metadata.read_file(out)
+        self.assertEqual(wanted, metadata['Description'])
+
+    def test_mapping_api(self):
+        PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
+        with open(PKG_INFO, 'r') as f:
+            content = f.read() % sys.platform
+        metadata = Metadata(fileobj=StringIO(content))
+        self.assertIn('Version', metadata.keys())
+        self.assertIn('0.5', metadata.values())
+        self.assertIn(('Version', '0.5'), metadata.items())
+
+        metadata.update({'version': '0.6'})
+        self.assertEqual(metadata['Version'], '0.6')
+        metadata.update([('version', '0.7')])
+        self.assertEqual(metadata['Version'], '0.7')
+
+        self.assertEqual(list(metadata), list(metadata.keys()))
+
+    def test_versions(self):
+        metadata = Metadata()
+        metadata['Obsoletes'] = 'ok'
+        self.assertEqual(metadata['Metadata-Version'], '1.1')
+
+        del metadata['Obsoletes']
+        metadata['Obsoletes-Dist'] = 'ok'
+        self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+        self.assertRaises(MetadataConflictError, metadata.set,
+                          'Obsoletes', 'ok')
+
+        del metadata['Obsoletes']
+        del metadata['Obsoletes-Dist']
+        metadata['Version'] = '1'
+        self.assertEqual(metadata['Metadata-Version'], '1.0')
+
+        PKG_INFO = os.path.join(os.path.dirname(__file__),
+                                'SETUPTOOLS-PKG-INFO')
+        with open(PKG_INFO, 'r') as f:
+            content = f.read()
+        metadata.read_file(StringIO(content))
+        self.assertEqual(metadata['Metadata-Version'], '1.0')
+
+        PKG_INFO = os.path.join(os.path.dirname(__file__),
+                                'SETUPTOOLS-PKG-INFO2')
+        with open(PKG_INFO, 'r') as f:
+            content = f.read()
+        metadata.read_file(StringIO(content))
+        self.assertEqual(metadata['Metadata-Version'], '1.1')
+
+        # Update the _fields dict directly to prevent 'Metadata-Version'
+        # from being updated by the _set_best_version() method.
+        metadata._fields['Metadata-Version'] = '1.618'
+        self.assertRaises(MetadataUnrecognizedVersionError, metadata.keys)
+
+    def test_warnings(self):
+        metadata = Metadata()
+
+        # these should raise a warning
+        values = (('Requires-Dist', 'Funky (Groovie)'),
+                  ('Requires-Python', '1-4'))
+
+        for name, value in values:
+            metadata.set(name, value)
+
+        # we should have a certain amount of warnings
+        self.assertEqual(len(self.get_logs()), 2)
+
+    def test_multiple_predicates(self):
+        metadata = Metadata()
+
+        # see for "3" instead of "3.0"  ???
+        # its seems like the MINOR VERSION can be omitted
+        metadata['Requires-Python'] = '>=2.6, <3.0'
+        metadata['Requires-Dist'] = ['Foo (>=2.6, <3.0)']
+
+        self.assertEqual([], self.get_logs(logging.WARNING))
+
+    def test_project_url(self):
+        metadata = Metadata()
+        metadata['Project-URL'] = [('one', 'http://ok')]
+        self.assertEqual(metadata['Project-URL'],
+                          [('one', 'http://ok')])
+        self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+    def test_check_version(self):
+        metadata = Metadata()
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Version'])
+
+    def test_check_version_strict(self):
+        metadata = Metadata()
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        self.assertRaises(MetadataMissingError, metadata.check, strict=True)
+
+    def test_check_name(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Name'])
+
+    def test_check_name_strict(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        self.assertRaises(MetadataMissingError, metadata.check, strict=True)
+
+    def test_check_author(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Author'])
+
+    def test_check_homepage(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Name'] = 'vimpdb'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Home-page'])
+
+    def test_check_predicates(self):
+        metadata = Metadata()
+        metadata['Version'] = 'rr'
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata['Requires-dist'] = ['Foo (a)']
+        metadata['Obsoletes-dist'] = ['Foo (a)']
+        metadata['Provides-dist'] = ['Foo (a)']
+        if metadata.docutils_support:
+            missing, warnings = metadata.check()
+            self.assertEqual(len(warnings), 4)
+            metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(len(warnings), 4)
+
+    def test_best_choice(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        self.assertEqual(metadata['Metadata-Version'],
+                         PKG_INFO_PREFERRED_VERSION)
+        metadata['Classifier'] = ['ok']
+        self.assertEqual(metadata['Metadata-Version'], '1.2')
+
+    def test_project_urls(self):
+        # project-url is a bit specific, make sure we write it
+        # properly in PKG-INFO
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Project-Url'] = [('one', 'http://ok')]
+        self.assertEqual(metadata['Project-Url'], [('one', 'http://ok')])
+        file_ = StringIO()
+        metadata.write_file(file_)
+        file_.seek(0)
+        res = file_.read().split('\n')
+        self.assertIn('Project-URL: one,http://ok', res)
+
+        file_.seek(0)
+        metadata = Metadata()
+        metadata.read_file(file_)
+        self.assertEqual(metadata['Project-Url'], [('one', 'http://ok')])
+
+
+def test_suite():
+    return unittest.makeSuite(MetadataTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_mixin2to3.py b/Lib/packaging/tests/test_mixin2to3.py
new file mode 100644
index 0000000..d7c83c2
--- /dev/null
+++ b/Lib/packaging/tests/test_mixin2to3.py
@@ -0,0 +1,75 @@
+"""Tests for packaging.command.build_py."""
+import sys
+
+from packaging.tests import unittest, support
+from packaging.compat import Mixin2to3
+
+
+class Mixin2to3TestCase(support.TempdirManager,
+                        support.LoggingCatcher,
+                        unittest.TestCase):
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_convert_code_only(self):
+        # used to check if code gets converted properly.
+        code_content = "print 'test'\n"
+        code_handle = self.mktempfile()
+        code_name = code_handle.name
+
+        code_handle.write(code_content)
+        code_handle.flush()
+
+        mixin2to3 = Mixin2to3()
+        mixin2to3._run_2to3([code_name])
+        converted_code_content = "print('test')\n"
+        with open(code_name) as fp:
+            new_code_content = "".join(fp.readlines())
+
+        self.assertEqual(new_code_content, converted_code_content)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_doctests_only(self):
+        # used to check if doctests gets converted properly.
+        doctest_content = '"""\n>>> print test\ntest\n"""\nprint test\n\n'
+        doctest_handle = self.mktempfile()
+        doctest_name = doctest_handle.name
+
+        doctest_handle.write(doctest_content)
+        doctest_handle.flush()
+
+        mixin2to3 = Mixin2to3()
+        mixin2to3._run_2to3([doctest_name])
+
+        converted_doctest_content = ['"""', '>>> print(test)', 'test', '"""',
+                                     'print(test)', '', '', '']
+        converted_doctest_content = '\n'.join(converted_doctest_content)
+        with open(doctest_name) as fp:
+            new_doctest_content = "".join(fp.readlines())
+
+        self.assertEqual(new_doctest_content, converted_doctest_content)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_additional_fixers(self):
+        # used to check if use_2to3_fixers works
+        code_content = "type(x) is T"
+        code_handle = self.mktempfile()
+        code_name = code_handle.name
+
+        code_handle.write(code_content)
+        code_handle.flush()
+
+        mixin2to3 = Mixin2to3()
+
+        mixin2to3._run_2to3(files=[code_name], doctests=[code_name],
+                            fixers=['packaging.tests.fixer'])
+        converted_code_content = "isinstance(x, T)"
+        with open(code_name) as fp:
+            new_code_content = "".join(fp.readlines())
+        self.assertEqual(new_code_content, converted_code_content)
+
+
+def test_suite():
+    return unittest.makeSuite(Mixin2to3TestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_msvc9compiler.py b/Lib/packaging/tests/test_msvc9compiler.py
new file mode 100644
index 0000000..dc3ae65
--- /dev/null
+++ b/Lib/packaging/tests/test_msvc9compiler.py
@@ -0,0 +1,140 @@
+"""Tests for packaging.compiler.msvc9compiler."""
+import os
+import sys
+
+from packaging.errors import PackagingPlatformError
+
+from packaging.tests import unittest, support
+
+_MANIFEST = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+          manifestVersion="1.0">
+  <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+    <security>
+      <requestedPrivileges>
+        <requestedExecutionLevel level="asInvoker" uiAccess="false">
+        </requestedExecutionLevel>
+      </requestedPrivileges>
+    </security>
+  </trustInfo>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
+         version="9.0.21022.8" processorArchitecture="x86"
+         publicKeyToken="XXXX">
+      </assemblyIdentity>
+    </dependentAssembly>
+  </dependency>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
+        version="9.0.21022.8" processorArchitecture="x86"
+        publicKeyToken="XXXX"></assemblyIdentity>
+    </dependentAssembly>
+  </dependency>
+</assembly>
+"""
+
+_CLEANED_MANIFEST = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+          manifestVersion="1.0">
+  <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+    <security>
+      <requestedPrivileges>
+        <requestedExecutionLevel level="asInvoker" uiAccess="false">
+        </requestedExecutionLevel>
+      </requestedPrivileges>
+    </security>
+  </trustInfo>
+  <dependency>
+
+  </dependency>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
+        version="9.0.21022.8" processorArchitecture="x86"
+        publicKeyToken="XXXX"></assemblyIdentity>
+    </dependentAssembly>
+  </dependency>
+</assembly>"""
+
+
+class msvc9compilerTestCase(support.TempdirManager,
+                            unittest.TestCase):
+
+    @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+    def test_no_compiler(self):
+        # make sure query_vcvarsall raises a PackagingPlatformError if
+        # the compiler is not found
+        from packaging.compiler.msvccompiler import get_build_version
+        if get_build_version() < 8.0:
+            raise unittest.SkipTest('only for MSVC8.0 or above')
+
+        from packaging.compiler import msvc9compiler
+        from packaging.compiler.msvc9compiler import query_vcvarsall
+
+        def _find_vcvarsall(version):
+            return None
+
+        old_find_vcvarsall = msvc9compiler.find_vcvarsall
+        msvc9compiler.find_vcvarsall = _find_vcvarsall
+        try:
+            self.assertRaises(PackagingPlatformError, query_vcvarsall,
+                             'wont find this version')
+        finally:
+            msvc9compiler.find_vcvarsall = old_find_vcvarsall
+
+    @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+    def test_reg_class(self):
+        from packaging.compiler.msvccompiler import get_build_version
+        if get_build_version() < 8.0:
+            raise unittest.SkipTest("requires MSVC 8.0 or later")
+
+        from packaging.compiler.msvc9compiler import Reg
+        self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx')
+
+        # looking for values that should exist on all
+        # windows registeries versions.
+        path = r'Control Panel\Desktop'
+        v = Reg.get_value(path, 'dragfullwindows')
+        self.assertIn(v, ('0', '1', '2'))
+
+        import winreg
+        HKCU = winreg.HKEY_CURRENT_USER
+        keys = Reg.read_keys(HKCU, 'xxxx')
+        self.assertEqual(keys, None)
+
+        keys = Reg.read_keys(HKCU, r'Control Panel')
+        self.assertIn('Desktop', keys)
+
+    @unittest.skipUnless(sys.platform == "win32", "runs only on win32")
+    def test_remove_visual_c_ref(self):
+        from packaging.compiler.msvccompiler import get_build_version
+        if get_build_version() < 8.0:
+            raise unittest.SkipTest("requires MSVC 8.0 or later")
+
+        from packaging.compiler.msvc9compiler import MSVCCompiler
+        tempdir = self.mkdtemp()
+        manifest = os.path.join(tempdir, 'manifest')
+        with open(manifest, 'w') as f:
+            f.write(_MANIFEST)
+
+        compiler = MSVCCompiler()
+        compiler._remove_visual_c_ref(manifest)
+
+        # see what we got
+        with open(manifest) as f:
+            # removing trailing spaces
+            content = '\n'.join(line.rstrip() for line in f.readlines())
+
+        # makes sure the manifest was properly cleaned
+        self.assertEqual(content, _CLEANED_MANIFEST)
+
+
+def test_suite():
+    return unittest.makeSuite(msvc9compilerTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_dist.py b/Lib/packaging/tests/test_pypi_dist.py
new file mode 100644
index 0000000..b438cb8
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_dist.py
@@ -0,0 +1,277 @@
+"""Tests for the packaging.pypi.dist module."""
+
+import os
+from packaging.version import VersionPredicate
+from packaging.pypi.dist import (ReleaseInfo, ReleasesList, DistInfo,
+                                 split_archive_name, get_infos_from_url)
+from packaging.pypi.errors import HashDoesNotMatch, UnsupportedHashName
+
+from packaging.tests import unittest
+from packaging.tests.support import TempdirManager
+from packaging.tests.pypi_server import use_pypi_server
+
+
+def Dist(*args, **kwargs):
+    # DistInfo takes a release as a first parameter, avoid this in tests.
+    return DistInfo(None, *args, **kwargs)
+
+
+class TestReleaseInfo(unittest.TestCase):
+
+    def test_instantiation(self):
+        # Test the DistInfo class provides us the good attributes when
+        # given on construction
+        release = ReleaseInfo("FooBar", "1.1")
+        self.assertEqual("FooBar", release.name)
+        self.assertEqual("1.1", "%s" % release.version)
+
+    def test_add_dist(self):
+        # empty distribution type should assume "sdist"
+        release = ReleaseInfo("FooBar", "1.1")
+        release.add_distribution(url="http://example.org/")
+        # should not fail
+        release['sdist']
+
+    def test_get_unknown_distribution(self):
+        # should raise a KeyError
+        pass
+
+    def test_get_infos_from_url(self):
+        # Test that the the URLs are parsed the right way
+        url_list = {
+            'FooBar-1.1.0.tar.gz': {
+                'name': 'foobar',  # lowercase the name
+                'version': '1.1.0',
+            },
+            'Foo-Bar-1.1.0.zip': {
+                'name': 'foo-bar',  # keep the dash
+                'version': '1.1.0',
+            },
+            'foobar-1.1b2.tar.gz#md5=123123123123123': {
+                'name': 'foobar',
+                'version': '1.1b2',
+                'url': 'http://example.org/foobar-1.1b2.tar.gz',  # no hash
+                'hashval': '123123123123123',
+                'hashname': 'md5',
+            },
+            'foobar-1.1-rc2.tar.gz': {  # use suggested name
+                'name': 'foobar',
+                'version': '1.1c2',
+                'url': 'http://example.org/foobar-1.1-rc2.tar.gz',
+            }
+        }
+
+        for url, attributes in url_list.items():
+            # for each url
+            infos = get_infos_from_url("http://example.org/" + url)
+            for attribute, expected in attributes.items():
+                got = infos.get(attribute)
+                if attribute == "version":
+                    self.assertEqual("%s" % got, expected)
+                else:
+                    self.assertEqual(got, expected)
+
+    def test_split_archive_name(self):
+        # Test we can split the archive names
+        names = {
+            'foo-bar-baz-1.0-rc2': ('foo-bar-baz', '1.0c2'),
+            'foo-bar-baz-1.0': ('foo-bar-baz', '1.0'),
+            'foobarbaz-1.0': ('foobarbaz', '1.0'),
+        }
+        for name, results in names.items():
+            self.assertEqual(results, split_archive_name(name))
+
+
+class TestDistInfo(TempdirManager, unittest.TestCase):
+    srcpath = "/packages/source/f/foobar/foobar-0.1.tar.gz"
+
+    def test_get_url(self):
+        # Test that the url property works well
+
+        d = Dist(url="test_url")
+        self.assertDictEqual(d.url, {
+            "url": "test_url",
+            "is_external": True,
+            "hashname": None,
+            "hashval": None,
+        })
+
+        # add a new url
+        d.add_url(url="internal_url", is_external=False)
+        self.assertEqual(d._url, None)
+        self.assertDictEqual(d.url, {
+            "url": "internal_url",
+            "is_external": False,
+            "hashname": None,
+            "hashval": None,
+        })
+        self.assertEqual(2, len(d.urls))
+
+    def test_comparison(self):
+        # Test that we can compare DistInfoributionInfoList
+        foo1 = ReleaseInfo("foo", "1.0")
+        foo2 = ReleaseInfo("foo", "2.0")
+        bar = ReleaseInfo("bar", "2.0")
+        # assert we use the version to compare
+        self.assertTrue(foo1 < foo2)
+        self.assertFalse(foo1 > foo2)
+        self.assertFalse(foo1 == foo2)
+
+        # assert we can't compare dists with different names
+        self.assertRaises(TypeError, foo1.__eq__, bar)
+
+    @use_pypi_server("downloads_with_md5")
+    def test_download(self, server):
+        # Download is possible, and the md5 is checked if given
+
+        url = server.full_address + self.srcpath
+
+        # check that a md5 if given
+        dist = Dist(url=url, hashname="md5",
+                    hashval="fe18804c5b722ff024cabdf514924fc4")
+        dist.download(self.mkdtemp())
+
+        # a wrong md5 fails
+        dist2 = Dist(url=url, hashname="md5", hashval="wrongmd5")
+
+        self.assertRaises(HashDoesNotMatch, dist2.download, self.mkdtemp())
+
+        # we can omit the md5 hash
+        dist3 = Dist(url=url)
+        dist3.download(self.mkdtemp())
+
+        # and specify a temporary location
+        # for an already downloaded dist
+        path1 = self.mkdtemp()
+        dist3.download(path=path1)
+        # and for a new one
+        path2_base = self.mkdtemp()
+        dist4 = Dist(url=url)
+        path2 = dist4.download(path=path2_base)
+        self.assertIn(path2_base, path2)
+
+    def test_hashname(self):
+        # Invalid hashnames raises an exception on assignation
+        Dist(hashname="md5", hashval="value")
+
+        self.assertRaises(UnsupportedHashName, Dist,
+                          hashname="invalid_hashname",
+                          hashval="value")
+
+    @use_pypi_server('downloads_with_md5')
+    def test_unpack(self, server):
+        url = server.full_address + self.srcpath
+        dist1 = Dist(url=url)
+
+        # unpack the distribution in a specfied folder
+        dist1_here = self.mkdtemp()
+        dist1_there = dist1.unpack(path=dist1_here)
+
+        # assert we unpack to the path provided
+        self.assertEqual(dist1_here, dist1_there)
+        dist1_result = os.listdir(dist1_there)
+        self.assertIn('paf', dist1_result)
+        os.remove(os.path.join(dist1_there, 'paf'))
+
+        # Test unpack works without a path argument
+        dist2 = Dist(url=url)
+        # doing an unpack
+        dist2_there = dist2.unpack()
+        dist2_result = os.listdir(dist2_there)
+        self.assertIn('paf', dist2_result)
+        os.remove(os.path.join(dist2_there, 'paf'))
+
+
+class TestReleasesList(unittest.TestCase):
+
+    def test_filter(self):
+        # Test we filter the distributions the right way, using version
+        # predicate match method
+        releases = ReleasesList('FooBar', (
+            ReleaseInfo("FooBar", "1.1"),
+            ReleaseInfo("FooBar", "1.1.1"),
+            ReleaseInfo("FooBar", "1.2"),
+            ReleaseInfo("FooBar", "1.2.1"),
+        ))
+        filtered = releases.filter(VersionPredicate("FooBar (<1.2)"))
+        self.assertNotIn(releases[2], filtered)
+        self.assertNotIn(releases[3], filtered)
+        self.assertIn(releases[0], filtered)
+        self.assertIn(releases[1], filtered)
+
+    def test_append(self):
+        # When adding a new item to the list, the behavior is to test if
+        # a release with the same name and version number already exists,
+        # and if so, to add a new distribution for it. If the distribution type
+        # is already defined too, add url informations to the existing DistInfo
+        # object.
+
+        releases = ReleasesList("FooBar", [
+            ReleaseInfo("FooBar", "1.1", url="external_url",
+                        dist_type="sdist"),
+        ])
+        self.assertEqual(1, len(releases))
+        releases.add_release(release=ReleaseInfo("FooBar", "1.1",
+                                                 url="internal_url",
+                                                 is_external=False,
+                                                 dist_type="sdist"))
+        self.assertEqual(1, len(releases))
+        self.assertEqual(2, len(releases[0]['sdist'].urls))
+
+        releases.add_release(release=ReleaseInfo("FooBar", "1.1.1",
+                                                 dist_type="sdist"))
+        self.assertEqual(2, len(releases))
+
+        # when adding a distribution whith a different type, a new distribution
+        # has to be added.
+        releases.add_release(release=ReleaseInfo("FooBar", "1.1.1",
+                                                 dist_type="bdist"))
+        self.assertEqual(2, len(releases))
+        self.assertEqual(2, len(releases[1].dists))
+
+    def test_prefer_final(self):
+        # Can order the distributions using prefer_final
+
+        fb10 = ReleaseInfo("FooBar", "1.0")  # final distribution
+        fb11a = ReleaseInfo("FooBar", "1.1a1")  # alpha
+        fb12a = ReleaseInfo("FooBar", "1.2a1")  # alpha
+        fb12b = ReleaseInfo("FooBar", "1.2b1")  # beta
+        dists = ReleasesList("FooBar", [fb10, fb11a, fb12a, fb12b])
+
+        dists.sort_releases(prefer_final=True)
+        self.assertEqual(fb10, dists[0])
+
+        dists.sort_releases(prefer_final=False)
+        self.assertEqual(fb12b, dists[0])
+
+#    def test_prefer_source(self):
+#        # Ordering support prefer_source
+#        fb_source = Dist("FooBar", "1.0", type="source")
+#        fb_binary = Dist("FooBar", "1.0", type="binary")
+#        fb2_binary = Dist("FooBar", "2.0", type="binary")
+#        dists = ReleasesList([fb_binary, fb_source])
+#
+#        dists.sort_distributions(prefer_source=True)
+#        self.assertEqual(fb_source, dists[0])
+#
+#        dists.sort_distributions(prefer_source=False)
+#        self.assertEqual(fb_binary, dists[0])
+#
+#        dists.append(fb2_binary)
+#        dists.sort_distributions(prefer_source=True)
+#        self.assertEqual(fb2_binary, dists[0])
+
+    def test_get_last(self):
+        dists = ReleasesList('Foo')
+        self.assertEqual(dists.get_last('Foo 1.0'), None)
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestDistInfo))
+    suite.addTest(unittest.makeSuite(TestReleaseInfo))
+    suite.addTest(unittest.makeSuite(TestReleasesList))
+    return suite
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_pypi_server.py b/Lib/packaging/tests/test_pypi_server.py
new file mode 100644
index 0000000..15c2e6c
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_server.py
@@ -0,0 +1,81 @@
+"""Tests for packaging.command.bdist."""
+import sys
+
+import urllib.request
+import urllib.parse
+import urllib.error
+
+from packaging.tests.pypi_server import PyPIServer, PYPI_DEFAULT_STATIC_PATH
+from packaging.tests import unittest
+
+
+class PyPIServerTest(unittest.TestCase):
+
+    def test_records_requests(self):
+        # We expect that PyPIServer can log our requests
+        server = PyPIServer()
+        server.default_response_status = 200
+
+        try:
+            server.start()
+            self.assertEqual(len(server.requests), 0)
+
+            data = b'Rock Around The Bunker'
+
+            headers = {"X-test-header": "Mister Iceberg"}
+
+            request = urllib.request.Request(server.full_address, data, headers)
+            urllib.request.urlopen(request)
+            self.assertEqual(len(server.requests), 1)
+            handler, request_data = server.requests[-1]
+            self.assertIn(data, request_data)
+            self.assertIn("x-test-header", handler.headers)
+            self.assertEqual(handler.headers["x-test-header"], "Mister Iceberg")
+
+        finally:
+            server.stop()
+
+
+    def test_serve_static_content(self):
+        # PYPI Mocked server can serve static content from disk.
+
+        def uses_local_files_for(server, url_path):
+            """Test that files are served statically (eg. the output from the
+            server is the same than the one made by a simple file read.
+            """
+            url = server.full_address + url_path
+            request = urllib.request.Request(url)
+            response = urllib.request.urlopen(request)
+            file = open(PYPI_DEFAULT_STATIC_PATH + "/test_pypi_server" +
+               url_path)
+            answer = response.read().decode() == file.read()
+            file.close()
+            return answer
+
+        server = PyPIServer(static_uri_paths=["simple", "external"],
+            static_filesystem_paths=["test_pypi_server"])
+        server.start()
+        try:
+            # the file does not exists on the disc, so it might not be served
+            url = server.full_address + "/simple/unexisting_page"
+            request = urllib.request.Request(url)
+            try:
+                urllib.request.urlopen(request)
+            except urllib.error.HTTPError as e:
+                self.assertEqual(e.code, 404)
+
+            # now try serving a content that do exists
+            self.assertTrue(uses_local_files_for(server, "/simple/index.html"))
+
+            # and another one in another root path
+            self.assertTrue(uses_local_files_for(server, "/external/index.html"))
+
+        finally:
+            server.stop()
+
+
+def test_suite():
+    return unittest.makeSuite(PyPIServerTest)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_simple.py b/Lib/packaging/tests/test_pypi_simple.py
new file mode 100644
index 0000000..c43a839
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_simple.py
@@ -0,0 +1,326 @@
+"""Tests for the packaging.pypi.simple module."""
+
+import os
+import sys
+import http.client
+import urllib.error
+import urllib.parse
+import urllib.request
+
+from packaging.pypi.simple import Crawler
+
+from packaging.tests import unittest
+from packaging.tests.support import TempdirManager, LoggingCatcher
+from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
+                                         PYPI_DEFAULT_STATIC_PATH)
+
+
+class SimpleCrawlerTestCase(TempdirManager,
+                            LoggingCatcher,
+                            unittest.TestCase):
+
+    def _get_simple_crawler(self, server, base_url="/simple/", hosts=None,
+                            *args, **kwargs):
+        """Build and return a SimpleIndex with the test server urls"""
+        if hosts is None:
+            hosts = (server.full_address.replace("http://", ""),)
+        kwargs['hosts'] = hosts
+        return Crawler(server.full_address + base_url, *args,
+                       **kwargs)
+
+    @use_pypi_server()
+    def test_bad_urls(self, server):
+        crawler = Crawler()
+        url = 'http://127.0.0.1:0/nonesuch/test_simple'
+        try:
+            v = crawler._open_url(url)
+        except Exception as v:
+            self.assertIn(url, str(v))
+        else:
+            v.close()
+            self.assertIsInstance(v, urllib.error.HTTPError)
+
+        # issue 16
+        # easy_install inquant.contentmirror.plone breaks because of a typo
+        # in its home URL
+        crawler = Crawler(hosts=('example.org',))
+        url = ('url:%20https://svn.plone.org/svn/collective/'
+               'inquant.contentmirror.plone/trunk')
+        try:
+            v = crawler._open_url(url)
+        except Exception as v:
+            self.assertIn(url, str(v))
+        else:
+            v.close()
+            self.assertIsInstance(v, urllib.error.HTTPError)
+
+        def _urlopen(*args):
+            raise http.client.BadStatusLine('line')
+
+        old_urlopen = urllib.request.urlopen
+        urllib.request.urlopen = _urlopen
+        url = 'http://example.org'
+        try:
+            v = crawler._open_url(url)
+        except Exception as v:
+            self.assertIn('line', str(v))
+        else:
+            v.close()
+            # TODO use self.assertRaises
+            raise AssertionError('Should have raise here!')
+        finally:
+            urllib.request.urlopen = old_urlopen
+
+        # issue 20
+        url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
+        try:
+            crawler._open_url(url)
+        except Exception as v:
+            self.assertIn('nonnumeric port', str(v))
+
+        # issue #160
+        url = server.full_address
+        page = ('<a href="http://www.famfamfam.com]('
+                'http://www.famfamfam.com/">')
+        crawler._process_url(url, page)
+
+    @use_pypi_server("test_found_links")
+    def test_found_links(self, server):
+        # Browse the index, asking for a specified release version
+        # The PyPI index contains links for version 1.0, 1.1, 2.0 and 2.0.1
+        crawler = self._get_simple_crawler(server)
+        last_release = crawler.get_release("foobar")
+
+        # we have scanned the index page
+        self.assertIn(server.full_address + "/simple/foobar/",
+            crawler._processed_urls)
+
+        # we have found 4 releases in this page
+        self.assertEqual(len(crawler._projects["foobar"]), 4)
+
+        # and returned the most recent one
+        self.assertEqual("%s" % last_release.version, '2.0.1')
+
+    def test_is_browsable(self):
+        crawler = Crawler(follow_externals=False)
+        self.assertTrue(crawler._is_browsable(crawler.index_url + "test"))
+
+        # Now, when following externals, we can have a list of hosts to trust.
+        # and don't follow other external links than the one described here.
+        crawler = Crawler(hosts=["pypi.python.org", "example.org"],
+                          follow_externals=True)
+        good_urls = (
+            "http://pypi.python.org/foo/bar",
+            "http://pypi.python.org/simple/foobar",
+            "http://example.org",
+            "http://example.org/",
+            "http://example.org/simple/",
+        )
+        bad_urls = (
+            "http://python.org",
+            "http://example.tld",
+        )
+
+        for url in good_urls:
+            self.assertTrue(crawler._is_browsable(url))
+
+        for url in bad_urls:
+            self.assertFalse(crawler._is_browsable(url))
+
+        # allow all hosts
+        crawler = Crawler(follow_externals=True, hosts=("*",))
+        self.assertTrue(crawler._is_browsable("http://an-external.link/path"))
+        self.assertTrue(crawler._is_browsable("pypi.example.org/a/path"))
+
+        # specify a list of hosts we want to allow
+        crawler = Crawler(follow_externals=True,
+                          hosts=("*.example.org",))
+        self.assertFalse(crawler._is_browsable("http://an-external.link/path"))
+        self.assertTrue(
+            crawler._is_browsable("http://pypi.example.org/a/path"))
+
+    @use_pypi_server("with_externals")
+    def test_follow_externals(self, server):
+        # Include external pages
+        # Try to request the package index, wich contains links to "externals"
+        # resources. They have to  be scanned too.
+        crawler = self._get_simple_crawler(server, follow_externals=True)
+        crawler.get_release("foobar")
+        self.assertIn(server.full_address + "/external/external.html",
+            crawler._processed_urls)
+
+    @use_pypi_server("with_real_externals")
+    def test_restrict_hosts(self, server):
+        # Only use a list of allowed hosts is possible
+        # Test that telling the simple pyPI client to not retrieve external
+        # works
+        crawler = self._get_simple_crawler(server, follow_externals=False)
+        crawler.get_release("foobar")
+        self.assertNotIn(server.full_address + "/external/external.html",
+            crawler._processed_urls)
+
+    @use_pypi_server(static_filesystem_paths=["with_externals"],
+        static_uri_paths=["simple", "external"])
+    def test_links_priority(self, server):
+        # Download links from the pypi simple index should be used before
+        # external download links.
+        # http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error
+        #
+        # Usecase :
+        # - someone uploads a package on pypi, a md5 is generated
+        # - someone manually coindexes this link (with the md5 in the url) onto
+        #   an external page accessible from the package page.
+        # - someone reuploads the package (with a different md5)
+        # - while easy_installing, an MD5 error occurs because the external
+        # link is used
+        # -> The index should use the link from pypi, not the external one.
+
+        # start an index server
+        index_url = server.full_address + '/simple/'
+
+        # scan a test index
+        crawler = Crawler(index_url, follow_externals=True)
+        releases = crawler.get_releases("foobar")
+        server.stop()
+
+        # we have only one link, because links are compared without md5
+        self.assertEqual(1, len(releases))
+        self.assertEqual(1, len(releases[0].dists))
+        # the link should be from the index
+        self.assertEqual(2, len(releases[0].dists['sdist'].urls))
+        self.assertEqual('12345678901234567',
+                         releases[0].dists['sdist'].url['hashval'])
+        self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
+
+    @use_pypi_server(static_filesystem_paths=["with_norel_links"],
+        static_uri_paths=["simple", "external"])
+    def test_not_scan_all_links(self, server):
+        # Do not follow all index page links.
+        # The links not tagged with rel="download" and rel="homepage" have
+        # to not be processed by the package index, while processing "pages".
+
+        # process the pages
+        crawler = self._get_simple_crawler(server, follow_externals=True)
+        crawler.get_releases("foobar")
+        # now it should have processed only pages with links rel="download"
+        # and rel="homepage"
+        self.assertIn("%s/simple/foobar/" % server.full_address,
+            crawler._processed_urls)  # it's the simple index page
+        self.assertIn("%s/external/homepage.html" % server.full_address,
+            crawler._processed_urls)  # the external homepage is rel="homepage"
+        self.assertNotIn("%s/external/nonrel.html" % server.full_address,
+            crawler._processed_urls)  # this link contains no rel=*
+        self.assertNotIn("%s/unrelated-0.2.tar.gz" % server.full_address,
+            crawler._processed_urls)  # linked from simple index (no rel)
+        self.assertIn("%s/foobar-0.1.tar.gz" % server.full_address,
+            crawler._processed_urls)  # linked from simple index (rel)
+        self.assertIn("%s/foobar-2.0.tar.gz" % server.full_address,
+            crawler._processed_urls)  # linked from external homepage (rel)
+
+    def test_uses_mirrors(self):
+        # When the main repository seems down, try using the given mirrors"""
+        server = PyPIServer("foo_bar_baz")
+        mirror = PyPIServer("foo_bar_baz")
+        mirror.start()  # we dont start the server here
+
+        try:
+            # create the index using both servers
+            crawler = Crawler(server.full_address + "/simple/", hosts=('*',),
+                              # set the timeout to 1s for the tests
+                              timeout=1, mirrors=[mirror.full_address])
+
+            # this should not raise a timeout
+            self.assertEqual(4, len(crawler.get_releases("foo")))
+        finally:
+            mirror.stop()
+
+    def test_simple_link_matcher(self):
+        # Test that the simple link matcher finds the right links"""
+        crawler = Crawler(follow_externals=False)
+
+        # Here, we define:
+        #   1. one link that must be followed, cause it's a download one
+        #   2. one link that must *not* be followed, cause the is_browsable
+        #      returns false for it.
+        #   3. one link that must be followed cause it's a homepage that is
+        #      browsable
+        #   4. one link that must be followed, because it contain a md5 hash
+        self.assertTrue(crawler._is_browsable("%stest" % crawler.index_url))
+        self.assertFalse(crawler._is_browsable("http://dl-link2"))
+        content = """
+        <a href="http://dl-link1" rel="download">download_link1</a>
+        <a href="http://dl-link2" rel="homepage">homepage_link1</a>
+        <a href="%(index_url)stest" rel="homepage">homepage_link2</a>
+        <a href="%(index_url)stest/foobar-1.tar.gz#md5=abcdef>download_link2</a>
+        """ % {'index_url': crawler.index_url}
+
+        # Test that the simple link matcher yield the good links.
+        generator = crawler._simple_link_matcher(content, crawler.index_url)
+        self.assertEqual(('%stest/foobar-1.tar.gz#md5=abcdef' %
+                          crawler.index_url, True), next(generator))
+        self.assertEqual(('http://dl-link1', True), next(generator))
+        self.assertEqual(('%stest' % crawler.index_url, False),
+                         next(generator))
+        self.assertRaises(StopIteration, generator.__next__)
+
+        # Follow the external links is possible (eg. homepages)
+        crawler.follow_externals = True
+        generator = crawler._simple_link_matcher(content, crawler.index_url)
+        self.assertEqual(('%stest/foobar-1.tar.gz#md5=abcdef' %
+                          crawler.index_url, True), next(generator))
+        self.assertEqual(('http://dl-link1', True), next(generator))
+        self.assertEqual(('http://dl-link2', False), next(generator))
+        self.assertEqual(('%stest' % crawler.index_url, False),
+                         next(generator))
+        self.assertRaises(StopIteration, generator.__next__)
+
+    def test_browse_local_files(self):
+        # Test that we can browse local files"""
+        index_path = os.sep.join(["file://" + PYPI_DEFAULT_STATIC_PATH,
+                                  "test_found_links", "simple"])
+        crawler = Crawler(index_path)
+        dists = crawler.get_releases("foobar")
+        self.assertEqual(4, len(dists))
+
+    def test_get_link_matcher(self):
+        crawler = Crawler("http://example.org")
+        self.assertEqual('_simple_link_matcher', crawler._get_link_matcher(
+                         "http://example.org/some/file").__name__)
+        self.assertEqual('_default_link_matcher', crawler._get_link_matcher(
+                         "http://other-url").__name__)
+
+    def test_default_link_matcher(self):
+        crawler = Crawler("http://example.org", mirrors=[])
+        crawler.follow_externals = True
+        crawler._is_browsable = lambda *args: True
+        base_url = "http://example.org/some/file/"
+        content = """
+<a href="../homepage" rel="homepage">link</a>
+<a href="../download" rel="download">link2</a>
+<a href="../simpleurl">link2</a>
+        """
+        found_links = set(uri for uri, _ in
+                          crawler._default_link_matcher(content, base_url))
+        self.assertIn('http://example.org/some/homepage', found_links)
+        self.assertIn('http://example.org/some/simpleurl', found_links)
+        self.assertIn('http://example.org/some/download', found_links)
+
+    @use_pypi_server("project_list")
+    def test_search_projects(self, server):
+        # we can search the index for some projects, on their names
+        # the case used no matters here
+        crawler = self._get_simple_crawler(server)
+        tests = (('Foobar', ['FooBar-bar', 'Foobar-baz', 'Baz-FooBar']),
+                 ('foobar*', ['FooBar-bar', 'Foobar-baz']),
+                 ('*foobar', ['Baz-FooBar']))
+
+        for search, expected in tests:
+            projects = [p.name for p in crawler.search_projects(search)]
+            self.assertListEqual(expected, projects)
+
+
+def test_suite():
+    return unittest.makeSuite(SimpleCrawlerTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_pypi_xmlrpc.py b/Lib/packaging/tests/test_pypi_xmlrpc.py
new file mode 100644
index 0000000..e27c7b3
--- /dev/null
+++ b/Lib/packaging/tests/test_pypi_xmlrpc.py
@@ -0,0 +1,93 @@
+"""Tests for the packaging.pypi.xmlrpc module."""
+
+from packaging.pypi.xmlrpc import Client, InvalidSearchField, ProjectNotFound
+
+from packaging.tests import unittest
+from packaging.tests.pypi_server import use_xmlrpc_server
+
+
+class TestXMLRPCClient(unittest.TestCase):
+    def _get_client(self, server, *args, **kwargs):
+        return Client(server.full_address, *args, **kwargs)
+
+    @use_xmlrpc_server()
+    def test_search_projects(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_search_result(['FooBar', 'Foo', 'FooFoo'])
+        results = [r.name for r in client.search_projects(name='Foo')]
+        self.assertEqual(3, len(results))
+        self.assertIn('FooBar', results)
+        self.assertIn('Foo', results)
+        self.assertIn('FooFoo', results)
+
+    def test_search_projects_bad_fields(self):
+        client = Client()
+        self.assertRaises(InvalidSearchField, client.search_projects,
+                          invalid="test")
+
+    @use_xmlrpc_server()
+    def test_get_releases(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_distributions([
+            {'name': 'FooBar', 'version': '1.1'},
+            {'name': 'FooBar', 'version': '1.2', 'url': 'http://some/url/'},
+            {'name': 'FooBar', 'version': '1.3', 'url': 'http://other/url/'},
+        ])
+
+        # use a lambda here to avoid an useless mock call
+        server.xmlrpc.list_releases = lambda *a, **k: ['1.1', '1.2', '1.3']
+
+        releases = client.get_releases('FooBar (<=1.2)')
+        # dont call release_data and release_url; just return name and version.
+        self.assertEqual(2, len(releases))
+        versions = releases.get_versions()
+        self.assertIn('1.1', versions)
+        self.assertIn('1.2', versions)
+        self.assertNotIn('1.3', versions)
+
+        self.assertRaises(ProjectNotFound, client.get_releases, 'Foo')
+
+    @use_xmlrpc_server()
+    def test_get_distributions(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_distributions([
+            {'name': 'FooBar', 'version': '1.1',
+             'url': 'http://example.org/foobar-1.1-sdist.tar.gz',
+             'digest': '1234567',
+             'type': 'sdist', 'python_version': 'source'},
+            {'name':'FooBar', 'version': '1.1',
+             'url': 'http://example.org/foobar-1.1-bdist.tar.gz',
+             'digest': '8912345', 'type': 'bdist'},
+        ])
+
+        releases = client.get_releases('FooBar', '1.1')
+        client.get_distributions('FooBar', '1.1')
+        release = releases.get_release('1.1')
+        self.assertTrue('http://example.org/foobar-1.1-sdist.tar.gz',
+                        release['sdist'].url['url'])
+        self.assertTrue('http://example.org/foobar-1.1-bdist.tar.gz',
+                release['bdist'].url['url'])
+        self.assertEqual(release['sdist'].python_version, 'source')
+
+    @use_xmlrpc_server()
+    def test_get_metadata(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_distributions([
+            {'name': 'FooBar',
+             'version': '1.1',
+             'keywords': '',
+             'obsoletes_dist': ['FooFoo'],
+             'requires_external': ['Foo'],
+            }])
+        release = client.get_metadata('FooBar', '1.1')
+        self.assertEqual(['Foo'], release.metadata['requires_external'])
+        self.assertEqual(['FooFoo'], release.metadata['obsoletes_dist'])
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestXMLRPCClient))
+    return suite
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_resources.py b/Lib/packaging/tests/test_resources.py
new file mode 100644
index 0000000..158af36
--- /dev/null
+++ b/Lib/packaging/tests/test_resources.py
@@ -0,0 +1,168 @@
+"""Tests for packaging.resources."""
+
+import os
+import sys
+import shutil
+import tempfile
+from textwrap import dedent
+from packaging.config import get_resources_dests
+from packaging.database import disable_cache, enable_cache
+from packaging.resources import get_file, get_file_path
+
+from packaging.tests import unittest
+from packaging.tests.test_util import GlobTestCaseBase
+
+
+class DataFilesTestCase(GlobTestCaseBase):
+
+    def assertRulesMatch(self, rules, spec):
+        tempdir = self.build_files_tree(spec)
+        expected = self.clean_tree(spec)
+        result = get_resources_dests(tempdir, rules)
+        self.assertEqual(expected, result)
+
+    def clean_tree(self, spec):
+        files = {}
+        for path, value in spec.items():
+            if value is not None:
+                path = self.os_dependent_path(path)
+                files[path] = value
+        return files
+
+    def test_simple_glob(self):
+        rules = [('', '*.tpl', '{data}')]
+        spec = {'coucou.tpl': '{data}/coucou.tpl',
+                'Donotwant': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_multiple_match(self):
+        rules = [('scripts', '*.bin', '{appdata}'),
+                 ('scripts', '*', '{appscript}')]
+        spec = {'scripts/script.bin': '{appscript}/script.bin',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match(self):
+        rules = [('scripts', '*.{bin,sh}', '{appscript}')]
+        spec = {'scripts/script.bin': '{appscript}/script.bin',
+                'scripts/babar.sh':  '{appscript}/babar.sh',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match_multiple(self):
+        rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripts/script.sh':  '{appscript}/script.sh',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match_exclude(self):
+        rules = [('scripts', '*', '{appscript}'),
+                 ('', '**/*.sh', None)]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripts/script.sh':  None,
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_glob_in_base(self):
+        rules = [('scrip*', '*.bin', '{appscript}')]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripouille/babar.bin': '{appscript}/babar.bin',
+                'scriptortu/lotus.bin': '{appscript}/lotus.bin',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_recursive_glob(self):
+        rules = [('', '**/*.bin', '{binary}')]
+        spec = {'binary0.bin': '{binary}/binary0.bin',
+                'scripts/binary1.bin': '{binary}/scripts/binary1.bin',
+                'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin',
+                'you/kill/pandabear.guy': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_final_exemple_glob(self):
+        rules = [
+            ('mailman/database/schemas/', '*', '{appdata}/schemas'),
+            ('', '**/*.tpl', '{appdata}/templates'),
+            ('', 'developer-docs/**/*.txt', '{doc}'),
+            ('', 'README', '{doc}'),
+            ('mailman/etc/', '*', '{config}'),
+            ('mailman/foo/', '**/bar/*.cfg', '{config}/baz'),
+            ('mailman/foo/', '**/*.cfg', '{config}/hmm'),
+            ('', 'some-new-semantic.sns', '{funky-crazy-category}'),
+        ]
+        spec = {
+            'README': '{doc}/README',
+            'some.tpl': '{appdata}/templates/some.tpl',
+            'some-new-semantic.sns':
+                '{funky-crazy-category}/some-new-semantic.sns',
+            'mailman/database/mailman.db': None,
+            'mailman/database/schemas/blah.schema':
+                '{appdata}/schemas/blah.schema',
+            'mailman/etc/my.cnf': '{config}/my.cnf',
+            'mailman/foo/some/path/bar/my.cfg':
+                '{config}/hmm/some/path/bar/my.cfg',
+            'mailman/foo/some/path/other.cfg':
+                '{config}/hmm/some/path/other.cfg',
+            'developer-docs/index.txt': '{doc}/developer-docs/index.txt',
+            'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt',
+        }
+        self.maxDiff = None
+        self.assertRulesMatch(rules, spec)
+
+    def test_get_file(self):
+        # Create a fake dist
+        temp_site_packages = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, temp_site_packages)
+
+        dist_name = 'test'
+        dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info')
+        os.mkdir(dist_info)
+
+        metadata_path = os.path.join(dist_info, 'METADATA')
+        resources_path = os.path.join(dist_info, 'RESOURCES')
+
+        with open(metadata_path, 'w') as fp:
+            fp.write(dedent("""\
+                Metadata-Version: 1.2
+                Name: test
+                Version: 0.1
+                Summary: test
+                Author: me
+                """))
+
+        test_path = 'test.cfg'
+
+        fd, test_resource_path = tempfile.mkstemp()
+        os.close(fd)
+        self.addCleanup(os.remove, test_resource_path)
+
+        with open(test_resource_path, 'w') as fp:
+            fp.write('Config')
+
+        with open(resources_path, 'w') as fp:
+            fp.write('%s,%s' % (test_path, test_resource_path))
+
+        # Add fake site-packages to sys.path to retrieve fake dist
+        self.addCleanup(sys.path.remove, temp_site_packages)
+        sys.path.insert(0, temp_site_packages)
+
+        # Force packaging.database to rescan the sys.path
+        self.addCleanup(enable_cache)
+        disable_cache()
+
+        # Try to retrieve resources paths and files
+        self.assertEqual(get_file_path(dist_name, test_path),
+                         test_resource_path)
+        self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist')
+
+        with get_file(dist_name, test_path) as fp:
+            self.assertEqual(fp.read(), 'Config')
+        self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist')
+
+
+def test_suite():
+    return unittest.makeSuite(DataFilesTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_run.py b/Lib/packaging/tests/test_run.py
new file mode 100644
index 0000000..01fa5aa
--- /dev/null
+++ b/Lib/packaging/tests/test_run.py
@@ -0,0 +1,62 @@
+"""Tests for packaging.run."""
+
+import os
+import sys
+import shutil
+
+from packaging.tests import unittest, support, TESTFN
+
+# setup script that uses __file__
+setup_using___file__ = """\
+
+__file__
+
+from packaging.run import setup
+setup()
+"""
+
+setup_prints_cwd = """\
+
+import os
+print os.getcwd()
+
+from packaging.run import setup
+setup()
+"""
+
+
+class CoreTestCase(unittest.TestCase):
+
+    def setUp(self):
+        super(CoreTestCase, self).setUp()
+        self.old_stdout = sys.stdout
+        self.cleanup_testfn()
+        self.old_argv = sys.argv, sys.argv[:]
+
+    def tearDown(self):
+        sys.stdout = self.old_stdout
+        self.cleanup_testfn()
+        sys.argv = self.old_argv[0]
+        sys.argv[:] = self.old_argv[1]
+        super(CoreTestCase, self).tearDown()
+
+    def cleanup_testfn(self):
+        path = TESTFN
+        if os.path.isfile(path):
+            os.remove(path)
+        elif os.path.isdir(path):
+            shutil.rmtree(path)
+
+    def write_setup(self, text, path=TESTFN):
+        with open(path, "w") as fp:
+            fp.write(text)
+        return path
+
+    # TODO restore the tests removed six months ago and port them to pysetup
+
+
+def test_suite():
+    return unittest.makeSuite(CoreTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_uninstall.py b/Lib/packaging/tests/test_uninstall.py
new file mode 100644
index 0000000..ce345b6
--- /dev/null
+++ b/Lib/packaging/tests/test_uninstall.py
@@ -0,0 +1,99 @@
+"""Tests for the uninstall command."""
+import os
+import sys
+
+from packaging.database import disable_cache, enable_cache
+from packaging.run import main
+from packaging.errors import PackagingError
+from packaging.install import remove
+from packaging.command.install_dist import install_dist
+
+from packaging.tests import unittest, support
+
+SETUP_CFG = """
+[metadata]
+name = %(name)s
+version = %(version)s
+
+[files]
+packages =
+    %(pkg)s
+    %(pkg)s.sub
+"""
+
+
+class UninstallTestCase(support.TempdirManager,
+                        support.LoggingCatcher,
+                        support.EnvironRestorer,
+                        unittest.TestCase):
+
+    restore_environ = ['PLAT']
+
+    def setUp(self):
+        super(UninstallTestCase, self).setUp()
+        self.addCleanup(setattr, sys, 'stdout', sys.stdout)
+        self.addCleanup(setattr, sys, 'stderr', sys.stderr)
+        self.addCleanup(os.chdir, os.getcwd())
+        self.addCleanup(enable_cache)
+        self.root_dir = self.mkdtemp()
+        disable_cache()
+
+    def run_setup(self, *args):
+        # run setup with args
+        args = ['run'] + list(args)
+        dist = main(args)
+        return dist
+
+    def get_path(self, dist, name):
+        cmd = install_dist(dist)
+        cmd.prefix = self.root_dir
+        cmd.finalize_options()
+        return getattr(cmd, 'install_' + name)
+
+    def make_dist(self, name='Foo', **kw):
+        kw['name'] = name
+        pkg = name.lower()
+        if 'version' not in kw:
+            kw['version'] = '0.1'
+        project_dir, dist = self.create_dist(**kw)
+        kw['pkg'] = pkg
+
+        pkg_dir = os.path.join(project_dir, pkg)
+        os.mkdir(pkg_dir)
+        os.mkdir(os.path.join(pkg_dir, 'sub'))
+
+        self.write_file((project_dir, 'setup.cfg'), SETUP_CFG % kw)
+        self.write_file((pkg_dir, '__init__.py'), '#')
+        self.write_file((pkg_dir, pkg + '_utils.py'), '#')
+        self.write_file((pkg_dir, 'sub', '__init__.py'), '#')
+        self.write_file((pkg_dir, 'sub', pkg + '_utils.py'), '#')
+
+        return project_dir
+
+    def install_dist(self, name='Foo', dirname=None, **kw):
+        if not dirname:
+            dirname = self.make_dist(name, **kw)
+        os.chdir(dirname)
+        dist = self.run_setup('install_dist', '--prefix=' + self.root_dir)
+        install_lib = self.get_path(dist, 'purelib')
+        return dist, install_lib
+
+    def test_uninstall_unknow_distribution(self):
+        self.assertRaises(PackagingError, remove, 'Foo',
+                          paths=[self.root_dir])
+
+    def test_uninstall(self):
+        dist, install_lib = self.install_dist()
+        self.assertIsFile(install_lib, 'foo', '__init__.py')
+        self.assertIsFile(install_lib, 'foo', 'sub', '__init__.py')
+        self.assertIsFile(install_lib, 'Foo-0.1.dist-info', 'RECORD')
+        remove('Foo', paths=[install_lib])
+        self.assertIsNotFile(install_lib, 'foo', 'sub', '__init__.py')
+        self.assertIsNotFile(install_lib, 'Foo-0.1.dist-info', 'RECORD')
+
+
+def test_suite():
+    return unittest.makeSuite(UninstallTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_unixccompiler.py b/Lib/packaging/tests/test_unixccompiler.py
new file mode 100644
index 0000000..16a1af3
--- /dev/null
+++ b/Lib/packaging/tests/test_unixccompiler.py
@@ -0,0 +1,132 @@
+"""Tests for packaging.unixccompiler."""
+import sys
+
+import sysconfig
+from packaging.compiler.unixccompiler import UnixCCompiler
+from packaging.tests import unittest
+
+
+class UnixCCompilerTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self._backup_platform = sys.platform
+        self._backup_get_config_var = sysconfig.get_config_var
+
+        class CompilerWrapper(UnixCCompiler):
+            def rpath_foo(self):
+                return self.runtime_library_dir_option('/foo')
+        self.cc = CompilerWrapper()
+
+    def tearDown(self):
+        sys.platform = self._backup_platform
+        sysconfig.get_config_var = self._backup_get_config_var
+
+    @unittest.skipIf(sys.platform == 'win32', 'irrelevant on win32')
+    def test_runtime_libdir_option(self):
+
+        # Issue #5900: Ensure RUNPATH is added to extension
+        # modules with RPATH if GNU ld is used
+
+        # darwin
+        sys.platform = 'darwin'
+        self.assertEqual(self.cc.rpath_foo(), '-L/foo')
+
+        # hp-ux
+        sys.platform = 'hp-ux'
+        old_gcv = sysconfig.get_config_var
+
+        def gcv(v):
+            return 'xxx'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo'])
+
+        def gcv(v):
+            return 'gcc'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
+
+        def gcv(v):
+            return 'g++'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
+
+        sysconfig.get_config_var = old_gcv
+
+        # irix646
+        sys.platform = 'irix646'
+        self.assertEqual(self.cc.rpath_foo(), ['-rpath', '/foo'])
+
+        # osf1V5
+        sys.platform = 'osf1V5'
+        self.assertEqual(self.cc.rpath_foo(), ['-rpath', '/foo'])
+
+        # GCC GNULD
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'gcc'
+            elif v == 'GNULD':
+                return 'yes'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+        # GCC non-GNULD
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'gcc'
+            elif v == 'GNULD':
+                return 'no'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo')
+
+        # GCC GNULD with fully qualified configuration prefix
+        # see #7617
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'x86_64-pc-linux-gnu-gcc-4.4.2'
+            elif v == 'GNULD':
+                return 'yes'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+        # non-GCC GNULD
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'cc'
+            elif v == 'GNULD':
+                return 'yes'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-R/foo')
+
+        # non-GCC non-GNULD
+        sys.platform = 'bar'
+
+        def gcv(v):
+            if v == 'CC':
+                return 'cc'
+            elif v == 'GNULD':
+                return 'no'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-R/foo')
+
+        # AIX C/C++ linker
+        sys.platform = 'aix'
+
+        def gcv(v):
+            return 'xxx'
+        sysconfig.get_config_var = gcv
+        self.assertEqual(self.cc.rpath_foo(), '-blibpath:/foo')
+
+
+def test_suite():
+    return unittest.makeSuite(UnixCCompilerTestCase)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_util.py b/Lib/packaging/tests/test_util.py
new file mode 100644
index 0000000..336086d
--- /dev/null
+++ b/Lib/packaging/tests/test_util.py
@@ -0,0 +1,928 @@
+"""Tests for packaging.util."""
+import os
+import sys
+import time
+import logging
+import tempfile
+import subprocess
+from io import StringIO
+
+from packaging.tests import support, unittest
+from packaging.errors import (
+    PackagingPlatformError, PackagingByteCompileError, PackagingFileError,
+    PackagingExecError, InstallationException)
+from packaging import util
+from packaging.util import (
+    convert_path, change_root, split_quoted, strtobool, rfc822_escape,
+    get_compiler_versions, _MAC_OS_X_LD_VERSION, byte_compile, find_packages,
+    spawn, get_pypirc_path, generate_pypirc, read_pypirc, resolve_name, iglob,
+    RICH_GLOB, egginfo_to_distinfo, is_setuptools, is_distutils, is_packaging,
+    get_install_method)
+
+
+PYPIRC = """\
+[distutils]
+index-servers =
+    pypi
+    server1
+
+[pypi]
+username:me
+password:xxxx
+
+[server1]
+repository:http://example.com
+username:tarek
+password:secret
+"""
+
+PYPIRC_OLD = """\
+[server-login]
+username:tarek
+password:secret
+"""
+
+WANTED = """\
+[distutils]
+index-servers =
+    pypi
+
+[pypi]
+username:tarek
+password:xxx
+"""
+
+
+class FakePopen:
+    test_class = None
+
+    def __init__(self, cmd, shell, stdout, stderr):
+        self.cmd = cmd.split()[0]
+        exes = self.test_class._exes
+        if self.cmd not in exes:
+            # we don't want to call the system, returning an empty
+            # output so it doesn't match
+            self.stdout = StringIO()
+            self.stderr = StringIO()
+        else:
+            self.stdout = StringIO(exes[self.cmd])
+            self.stderr = StringIO()
+
+
+class UtilTestCase(support.EnvironRestorer,
+                   support.TempdirManager,
+                   support.LoggingCatcher,
+                   unittest.TestCase):
+
+    restore_environ = ['HOME']
+
+    def setUp(self):
+        super(UtilTestCase, self).setUp()
+        self.tmp_dir = self.mkdtemp()
+        self.rc = os.path.join(self.tmp_dir, '.pypirc')
+        os.environ['HOME'] = self.tmp_dir
+        # saving the environment
+        self.name = os.name
+        self.platform = sys.platform
+        self.version = sys.version
+        self.sep = os.sep
+        self.join = os.path.join
+        self.isabs = os.path.isabs
+        self.splitdrive = os.path.splitdrive
+        #self._config_vars = copy(sysconfig._config_vars)
+
+        # patching os.uname
+        if hasattr(os, 'uname'):
+            self.uname = os.uname
+            self._uname = os.uname()
+        else:
+            self.uname = None
+            self._uname = None
+        os.uname = self._get_uname
+
+        # patching POpen
+        self.old_find_executable = util.find_executable
+        util.find_executable = self._find_executable
+        self._exes = {}
+        self.old_popen = subprocess.Popen
+        self.old_stdout = sys.stdout
+        self.old_stderr = sys.stderr
+        FakePopen.test_class = self
+        subprocess.Popen = FakePopen
+
+    def tearDown(self):
+        # getting back the environment
+        os.name = self.name
+        sys.platform = self.platform
+        sys.version = self.version
+        os.sep = self.sep
+        os.path.join = self.join
+        os.path.isabs = self.isabs
+        os.path.splitdrive = self.splitdrive
+        if self.uname is not None:
+            os.uname = self.uname
+        else:
+            del os.uname
+        #sysconfig._config_vars = copy(self._config_vars)
+        util.find_executable = self.old_find_executable
+        subprocess.Popen = self.old_popen
+        sys.old_stdout = self.old_stdout
+        sys.old_stderr = self.old_stderr
+        super(UtilTestCase, self).tearDown()
+
+    def _set_uname(self, uname):
+        self._uname = uname
+
+    def _get_uname(self):
+        return self._uname
+
+    def test_convert_path(self):
+        # linux/mac
+        os.sep = '/'
+
+        def _join(path):
+            return '/'.join(path)
+        os.path.join = _join
+
+        self.assertEqual(convert_path('/home/to/my/stuff'),
+                         '/home/to/my/stuff')
+
+        # win
+        os.sep = '\\'
+
+        def _join(*path):
+            return '\\'.join(path)
+        os.path.join = _join
+
+        self.assertRaises(ValueError, convert_path, '/home/to/my/stuff')
+        self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/')
+
+        self.assertEqual(convert_path('home/to/my/stuff'),
+                         'home\\to\\my\\stuff')
+        self.assertEqual(convert_path('.'),
+                         os.curdir)
+
+    def test_change_root(self):
+        # linux/mac
+        os.name = 'posix'
+
+        def _isabs(path):
+            return path[0] == '/'
+        os.path.isabs = _isabs
+
+        def _join(*path):
+            return '/'.join(path)
+        os.path.join = _join
+
+        self.assertEqual(change_root('/root', '/old/its/here'),
+                         '/root/old/its/here')
+        self.assertEqual(change_root('/root', 'its/here'),
+                         '/root/its/here')
+
+        # windows
+        os.name = 'nt'
+
+        def _isabs(path):
+            return path.startswith('c:\\')
+        os.path.isabs = _isabs
+
+        def _splitdrive(path):
+            if path.startswith('c:'):
+                return '', path.replace('c:', '')
+            return '', path
+        os.path.splitdrive = _splitdrive
+
+        def _join(*path):
+            return '\\'.join(path)
+        os.path.join = _join
+
+        self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'),
+                         'c:\\root\\old\\its\\here')
+        self.assertEqual(change_root('c:\\root', 'its\\here'),
+                         'c:\\root\\its\\here')
+
+        # BugsBunny os (it's a great os)
+        os.name = 'BugsBunny'
+        self.assertRaises(PackagingPlatformError,
+                          change_root, 'c:\\root', 'its\\here')
+
+        # XXX platforms to be covered: os2, mac
+
+    def test_split_quoted(self):
+        self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'),
+                         ['one', 'two', 'three', 'four'])
+
+    def test_strtobool(self):
+        yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1')
+        no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N')
+
+        for y in yes:
+            self.assertTrue(strtobool(y))
+
+        for n in no:
+            self.assertFalse(strtobool(n))
+
+    def test_rfc822_escape(self):
+        header = 'I am a\npoor\nlonesome\nheader\n'
+        res = rfc822_escape(header)
+        wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s'
+                  'header%(8s)s') % {'8s': '\n' + 8 * ' '}
+        self.assertEqual(res, wanted)
+
+    def test_find_exe_version(self):
+        # the ld version scheme under MAC OS is:
+        #   ^@(#)PROGRAM:ld  PROJECT:ld64-VERSION
+        #
+        # where VERSION is a 2-digit number for major
+        # revisions. For instance under Leopard, it's
+        # currently 77
+        #
+        # Dots are used when branching is done.
+        #
+        # The SnowLeopard ld64 is currently 95.2.12
+
+        for output, version in (('@(#)PROGRAM:ld  PROJECT:ld64-77', '77'),
+                                ('@(#)PROGRAM:ld  PROJECT:ld64-95.2.12',
+                                 '95.2.12')):
+            result = _MAC_OS_X_LD_VERSION.search(output)
+            self.assertEqual(result.group(1), version)
+
+    def _find_executable(self, name):
+        if name in self._exes:
+            return name
+        return None
+
+    def test_get_compiler_versions(self):
+        # get_versions calls distutils.spawn.find_executable on
+        # 'gcc', 'ld' and 'dllwrap'
+        self.assertEqual(get_compiler_versions(), (None, None, None))
+
+        # Let's fake we have 'gcc' and it returns '3.4.5'
+        self._exes['gcc'] = 'gcc (GCC) 3.4.5 (mingw special)\nFSF'
+        res = get_compiler_versions()
+        self.assertEqual(str(res[0]), '3.4.5')
+
+        # and let's see what happens when the version
+        # doesn't match the regular expression
+        # (\d+\.\d+(\.\d+)*)
+        self._exes['gcc'] = 'very strange output'
+        res = get_compiler_versions()
+        self.assertEqual(res[0], None)
+
+        # same thing for ld
+        if sys.platform != 'darwin':
+            self._exes['ld'] = 'GNU ld version 2.17.50 20060824'
+            res = get_compiler_versions()
+            self.assertEqual(str(res[1]), '2.17.50')
+            self._exes['ld'] = '@(#)PROGRAM:ld  PROJECT:ld64-77'
+            res = get_compiler_versions()
+            self.assertEqual(res[1], None)
+        else:
+            self._exes['ld'] = 'GNU ld version 2.17.50 20060824'
+            res = get_compiler_versions()
+            self.assertEqual(res[1], None)
+            self._exes['ld'] = '@(#)PROGRAM:ld  PROJECT:ld64-77'
+            res = get_compiler_versions()
+            self.assertEqual(str(res[1]), '77')
+
+        # and dllwrap
+        self._exes['dllwrap'] = 'GNU dllwrap 2.17.50 20060824\nFSF'
+        res = get_compiler_versions()
+        self.assertEqual(str(res[2]), '2.17.50')
+        self._exes['dllwrap'] = 'Cheese Wrap'
+        res = get_compiler_versions()
+        self.assertEqual(res[2], None)
+
+    @unittest.skipUnless(hasattr(sys, 'dont_write_bytecode'),
+                         'sys.dont_write_bytecode not supported')
+    def test_dont_write_bytecode(self):
+        # makes sure byte_compile raise a PackagingError
+        # if sys.dont_write_bytecode is True
+        old_dont_write_bytecode = sys.dont_write_bytecode
+        sys.dont_write_bytecode = True
+        try:
+            self.assertRaises(PackagingByteCompileError, byte_compile, [])
+        finally:
+            sys.dont_write_bytecode = old_dont_write_bytecode
+
+    def test_newer(self):
+        self.assertRaises(PackagingFileError, util.newer, 'xxx', 'xxx')
+        self.newer_f1 = self.mktempfile()
+        time.sleep(1)
+        self.newer_f2 = self.mktempfile()
+        self.assertTrue(util.newer(self.newer_f2.name, self.newer_f1.name))
+
+    def test_find_packages(self):
+        # let's create a structure we want to scan:
+        #
+        #   pkg1
+        #     __init__
+        #     pkg2
+        #       __init__
+        #     pkg3
+        #       __init__
+        #       pkg6
+        #           __init__
+        #     pkg4    <--- not a pkg
+        #       pkg8
+        #          __init__
+        #   pkg5
+        #     __init__
+        #
+        root = self.mkdtemp()
+        pkg1 = os.path.join(root, 'pkg1')
+        os.mkdir(pkg1)
+        self.write_file(os.path.join(pkg1, '__init__.py'))
+        os.mkdir(os.path.join(pkg1, 'pkg2'))
+        self.write_file(os.path.join(pkg1, 'pkg2', '__init__.py'))
+        os.mkdir(os.path.join(pkg1, 'pkg3'))
+        self.write_file(os.path.join(pkg1, 'pkg3', '__init__.py'))
+        os.mkdir(os.path.join(pkg1, 'pkg3', 'pkg6'))
+        self.write_file(os.path.join(pkg1, 'pkg3', 'pkg6', '__init__.py'))
+        os.mkdir(os.path.join(pkg1, 'pkg4'))
+        os.mkdir(os.path.join(pkg1, 'pkg4', 'pkg8'))
+        self.write_file(os.path.join(pkg1, 'pkg4', 'pkg8', '__init__.py'))
+        pkg5 = os.path.join(root, 'pkg5')
+        os.mkdir(pkg5)
+        self.write_file(os.path.join(pkg5, '__init__.py'))
+
+        res = find_packages([root], ['pkg1.pkg2'])
+        self.assertEqual(set(res), set(['pkg1', 'pkg5', 'pkg1.pkg3',
+                                         'pkg1.pkg3.pkg6']))
+
+    def test_resolve_name(self):
+        self.assertIs(str, resolve_name('builtins.str'))
+        self.assertEqual(
+            UtilTestCase.__name__,
+            resolve_name("packaging.tests.test_util.UtilTestCase").__name__)
+        self.assertEqual(
+            UtilTestCase.test_resolve_name.__name__,
+            resolve_name("packaging.tests.test_util.UtilTestCase."
+                         "test_resolve_name").__name__)
+
+        self.assertRaises(ImportError, resolve_name,
+                          "packaging.tests.test_util.UtilTestCaseNot")
+        self.assertRaises(ImportError, resolve_name,
+                          "packaging.tests.test_util.UtilTestCase."
+                          "nonexistent_attribute")
+
+    def test_import_nested_first_time(self):
+        tmp_dir = self.mkdtemp()
+        os.makedirs(os.path.join(tmp_dir, 'a', 'b'))
+        self.write_file(os.path.join(tmp_dir, 'a', '__init__.py'), '')
+        self.write_file(os.path.join(tmp_dir, 'a', 'b', '__init__.py'), '')
+        self.write_file(os.path.join(tmp_dir, 'a', 'b', 'c.py'),
+                                    'class Foo: pass')
+
+        try:
+            sys.path.append(tmp_dir)
+            resolve_name("a.b.c.Foo")
+            # assert nothing raised
+        finally:
+            sys.path.remove(tmp_dir)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_run_2to3_on_code(self):
+        content = "print 'test'"
+        converted_content = "print('test')"
+        file_handle = self.mktempfile()
+        file_name = file_handle.name
+        file_handle.write(content)
+        file_handle.flush()
+        file_handle.seek(0)
+        from packaging.util import run_2to3
+        run_2to3([file_name])
+        new_content = "".join(file_handle.read())
+        file_handle.close()
+        self.assertEqual(new_content, converted_content)
+
+    @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
+    def test_run_2to3_on_doctests(self):
+        # to check if text files containing doctests only get converted.
+        content = ">>> print 'test'\ntest\n"
+        converted_content = ">>> print('test')\ntest\n\n"
+        file_handle = self.mktempfile()
+        file_name = file_handle.name
+        file_handle.write(content)
+        file_handle.flush()
+        file_handle.seek(0)
+        from packaging.util import run_2to3
+        run_2to3([file_name], doctests_only=True)
+        new_content = "".join(file_handle.readlines())
+        file_handle.close()
+        self.assertEqual(new_content, converted_content)
+
+    @unittest.skipUnless(os.name in ('nt', 'posix'),
+                         'runs only under posix or nt')
+    def test_spawn(self):
+        # Do not patch subprocess on unix because
+        # packaging.util._spawn_posix uses it
+        if os.name in 'posix':
+            subprocess.Popen = self.old_popen
+        tmpdir = self.mkdtemp()
+
+        # creating something executable
+        # through the shell that returns 1
+        if os.name == 'posix':
+            exe = os.path.join(tmpdir, 'foo.sh')
+            self.write_file(exe, '#!/bin/sh\nexit 1')
+            os.chmod(exe, 0o777)
+        else:
+            exe = os.path.join(tmpdir, 'foo.bat')
+            self.write_file(exe, 'exit 1')
+
+        os.chmod(exe, 0o777)
+        self.assertRaises(PackagingExecError, spawn, [exe])
+
+        # now something that works
+        if os.name == 'posix':
+            exe = os.path.join(tmpdir, 'foo.sh')
+            self.write_file(exe, '#!/bin/sh\nexit 0')
+            os.chmod(exe, 0o777)
+        else:
+            exe = os.path.join(tmpdir, 'foo.bat')
+            self.write_file(exe, 'exit 0')
+
+        os.chmod(exe, 0o777)
+        spawn([exe])  # should work without any error
+
+    def test_server_registration(self):
+        # This test makes sure we know how to:
+        # 1. handle several sections in .pypirc
+        # 2. handle the old format
+
+        # new format
+        self.write_file(self.rc, PYPIRC)
+        config = read_pypirc()
+
+        config = sorted(config.items())
+        expected = [('password', 'xxxx'), ('realm', 'pypi'),
+                    ('repository', 'http://pypi.python.org/pypi'),
+                    ('server', 'pypi'), ('username', 'me')]
+        self.assertEqual(config, expected)
+
+        # old format
+        self.write_file(self.rc, PYPIRC_OLD)
+        config = read_pypirc()
+        config = sorted(config.items())
+        expected = [('password', 'secret'), ('realm', 'pypi'),
+                    ('repository', 'http://pypi.python.org/pypi'),
+                    ('server', 'server-login'), ('username', 'tarek')]
+        self.assertEqual(config, expected)
+
+    def test_server_empty_registration(self):
+        rc = get_pypirc_path()
+        self.assertFalse(os.path.exists(rc))
+        generate_pypirc('tarek', 'xxx')
+        self.assertTrue(os.path.exists(rc))
+        with open(rc) as f:
+            content = f.read()
+        self.assertEqual(content, WANTED)
+
+
+class GlobTestCaseBase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    def build_files_tree(self, files):
+        tempdir = self.mkdtemp()
+        for filepath in files:
+            is_dir = filepath.endswith('/')
+            filepath = os.path.join(tempdir, *filepath.split('/'))
+            if is_dir:
+                dirname = filepath
+            else:
+                dirname = os.path.dirname(filepath)
+            if dirname and not os.path.exists(dirname):
+                os.makedirs(dirname)
+            if not is_dir:
+                self.write_file(filepath, 'babar')
+        return tempdir
+
+    @staticmethod
+    def os_dependent_path(path):
+        path = path.rstrip('/').split('/')
+        return os.path.join(*path)
+
+    def clean_tree(self, spec):
+        files = []
+        for path, includes in spec.items():
+            if includes:
+                files.append(self.os_dependent_path(path))
+        return files
+
+
+class GlobTestCase(GlobTestCaseBase):
+
+    def assertGlobMatch(self, glob, spec):
+        """"""
+        tempdir = self.build_files_tree(spec)
+        expected = self.clean_tree(spec)
+        self.addCleanup(os.chdir, os.getcwd())
+        os.chdir(tempdir)
+        result = list(iglob(glob))
+        self.assertCountEqual(expected, result)
+
+    def test_regex_rich_glob(self):
+        matches = RICH_GLOB.findall(
+                                r"babar aime les {fraises} est les {huitres}")
+        self.assertEqual(["fraises", "huitres"], matches)
+
+    def test_simple_glob(self):
+        glob = '*.tp?'
+        spec = {'coucou.tpl': True,
+                 'coucou.tpj': True,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_simple_glob_in_dir(self):
+        glob = 'babar/*.tp?'
+        spec = {'babar/coucou.tpl': True,
+                 'babar/coucou.tpj': True,
+                 'babar/toto.bin': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_head(self):
+        glob = '**/tip/*.t?l'
+        spec = {'babar/zaza/zuzu/tip/coucou.tpl': True,
+                 'babar/z/tip/coucou.tpl': True,
+                 'babar/tip/coucou.tpl': True,
+                 'babar/zeop/tip/babar/babar.tpl': False,
+                 'babar/z/tip/coucou.bin': False,
+                 'babar/toto.bin': False,
+                 'zozo/zuzu/tip/babar.tpl': True,
+                 'zozo/tip/babar.tpl': True,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_tail(self):
+        glob = 'babar/**'
+        spec = {'babar/zaza/': True,
+                'babar/zaza/zuzu/': True,
+                'babar/zaza/zuzu/babar.xml': True,
+                'babar/zaza/zuzu/toto.xml': True,
+                'babar/zaza/zuzu/toto.csv': True,
+                'babar/zaza/coucou.tpl': True,
+                'babar/bubu.tpl': True,
+                'zozo/zuzu/tip/babar.tpl': False,
+                'zozo/tip/babar.tpl': False,
+                'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_middle(self):
+        glob = 'babar/**/tip/*.t?l'
+        spec = {'babar/zaza/zuzu/tip/coucou.tpl': True,
+                 'babar/z/tip/coucou.tpl': True,
+                 'babar/tip/coucou.tpl': True,
+                 'babar/zeop/tip/babar/babar.tpl': False,
+                 'babar/z/tip/coucou.bin': False,
+                 'babar/toto.bin': False,
+                 'zozo/zuzu/tip/babar.tpl': False,
+                 'zozo/tip/babar.tpl': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_tail(self):
+        glob = 'bin/*.{bin,sh,exe}'
+        spec = {'bin/babar.bin': True,
+                 'bin/zephir.sh': True,
+                 'bin/celestine.exe': True,
+                 'bin/cornelius.bat': False,
+                 'bin/cornelius.xml': False,
+                 'toto/yurg': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_middle(self):
+        glob = 'xml/{babar,toto}.xml'
+        spec = {'xml/babar.xml': True,
+                 'xml/toto.xml': True,
+                 'xml/babar.xslt': False,
+                 'xml/cornelius.sgml': False,
+                 'xml/zephir.xml': False,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_head(self):
+        glob = '{xml,xslt}/babar.*'
+        spec = {'xml/babar.xml': True,
+                 'xml/toto.xml': False,
+                 'xslt/babar.xslt': True,
+                 'xslt/toto.xslt': False,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_all(self):
+        glob = '{xml/*,xslt/**}/babar.xml'
+        spec = {'xml/a/babar.xml': True,
+                 'xml/b/babar.xml': True,
+                 'xml/a/c/babar.xml': False,
+                 'xslt/a/babar.xml': True,
+                 'xslt/b/babar.xml': True,
+                 'xslt/a/c/babar.xml': True,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_invalid_glob_pattern(self):
+        invalids = [
+            'ppooa**',
+            'azzaeaz4**/',
+            '/**ddsfs',
+            '**##1e"&e',
+            'DSFb**c009',
+            '{',
+            '{aaQSDFa',
+            '}',
+            'aQSDFSaa}',
+            '{**a,',
+            ',**a}',
+            '{a**,',
+            ',b**}',
+            '{a**a,babar}',
+            '{bob,b**z}',
+        ]
+        msg = "%r is not supposed to be a valid pattern"
+        for pattern in invalids:
+            try:
+                iglob(pattern)
+            except ValueError:
+                continue
+            else:
+                self.fail(msg % pattern)
+
+
+class EggInfoToDistInfoTestCase(support.TempdirManager,
+                                support.LoggingCatcher,
+                                unittest.TestCase):
+
+    def get_metadata_file_paths(self, distinfo_path):
+        req_metadata_files = ['METADATA', 'RECORD', 'INSTALLER']
+        metadata_file_paths = []
+        for metadata_file in req_metadata_files:
+            path = os.path.join(distinfo_path, metadata_file)
+            metadata_file_paths.append(path)
+        return metadata_file_paths
+
+    def test_egginfo_to_distinfo_setuptools(self):
+        distinfo = 'hello-0.1.1-py3.3.dist-info'
+        egginfo = 'hello-0.1.1-py3.3.egg-info'
+        dirs = [egginfo]
+        files = ['hello.py', 'hello.pyc']
+        extra_metadata = ['dependency_links.txt', 'entry_points.txt',
+                          'not-zip-safe', 'PKG-INFO', 'top_level.txt',
+                          'SOURCES.txt']
+        for f in extra_metadata:
+            files.append(os.path.join(egginfo, f))
+
+        tempdir, record_file = self.build_dist_tree(files, dirs)
+        distinfo_path = os.path.join(tempdir, distinfo)
+        egginfo_path = os.path.join(tempdir, egginfo)
+        metadata_file_paths = self.get_metadata_file_paths(distinfo_path)
+
+        egginfo_to_distinfo(record_file)
+        # test that directories and files get created
+        self.assertTrue(os.path.isdir(distinfo_path))
+        self.assertTrue(os.path.isdir(egginfo_path))
+
+        for mfile in metadata_file_paths:
+            self.assertTrue(os.path.isfile(mfile))
+
+    def test_egginfo_to_distinfo_distutils(self):
+        distinfo = 'hello-0.1.1-py3.3.dist-info'
+        egginfo = 'hello-0.1.1-py3.3.egg-info'
+        # egginfo is a file in distutils which contains the metadata
+        files = ['hello.py', 'hello.pyc', egginfo]
+
+        tempdir, record_file = self.build_dist_tree(files, dirs=[])
+        distinfo_path = os.path.join(tempdir, distinfo)
+        egginfo_path = os.path.join(tempdir, egginfo)
+        metadata_file_paths = self.get_metadata_file_paths(distinfo_path)
+
+        egginfo_to_distinfo(record_file)
+        # test that directories and files get created
+        self.assertTrue(os.path.isdir(distinfo_path))
+        self.assertTrue(os.path.isfile(egginfo_path))
+
+        for mfile in metadata_file_paths:
+            self.assertTrue(os.path.isfile(mfile))
+
+    def build_dist_tree(self, files, dirs):
+        tempdir = self.mkdtemp()
+        record_file_path = os.path.join(tempdir, 'RECORD')
+        file_paths, dir_paths = ([], [])
+        for d in dirs:
+            path = os.path.join(tempdir, d)
+            os.makedirs(path)
+            dir_paths.append(path)
+        for f in files:
+            path = os.path.join(tempdir, f)
+            _f = open(path, 'w')
+            _f.write(f)
+            _f.close()
+            file_paths.append(path)
+
+        record_file = open(record_file_path, 'w')
+        for fpath in file_paths:
+            record_file.write(fpath + '\n')
+        for dpath in dir_paths:
+            record_file.write(dpath + '\n')
+        record_file.close()
+
+        return (tempdir, record_file_path)
+
+
+class PackagingLibChecks(support.TempdirManager,
+                         support.LoggingCatcher,
+                         unittest.TestCase):
+
+    def setUp(self):
+        super(PackagingLibChecks, self).setUp()
+        self._empty_dir = self.mkdtemp()
+
+    def test_empty_package_is_not_based_on_anything(self):
+        self.assertFalse(is_setuptools(self._empty_dir))
+        self.assertFalse(is_distutils(self._empty_dir))
+        self.assertFalse(is_packaging(self._empty_dir))
+
+    def test_setup_py_importing_setuptools_is_setuptools_based(self):
+        self.assertTrue(is_setuptools(self._setuptools_setup_py_pkg()))
+
+    def test_egg_info_dir_and_setup_py_is_setuptools_based(self):
+        self.assertTrue(is_setuptools(self._setuptools_egg_info_pkg()))
+
+    def test_egg_info_and_non_setuptools_setup_py_is_setuptools_based(self):
+        self.assertTrue(is_setuptools(self._egg_info_with_no_setuptools()))
+
+    def test_setup_py_not_importing_setuptools_is_not_setuptools_based(self):
+        self.assertFalse(is_setuptools(self._random_setup_py_pkg()))
+
+    def test_setup_py_importing_distutils_is_distutils_based(self):
+        self.assertTrue(is_distutils(self._distutils_setup_py_pkg()))
+
+    def test_pkg_info_file_and_setup_py_is_distutils_based(self):
+        self.assertTrue(is_distutils(self._distutils_pkg_info()))
+
+    def test_pkg_info_and_non_distutils_setup_py_is_distutils_based(self):
+        self.assertTrue(is_distutils(self._pkg_info_with_no_distutils()))
+
+    def test_setup_py_not_importing_distutils_is_not_distutils_based(self):
+        self.assertFalse(is_distutils(self._random_setup_py_pkg()))
+
+    def test_setup_cfg_with_no_metadata_section_is_not_packaging_based(self):
+        self.assertFalse(is_packaging(self._setup_cfg_with_no_metadata_pkg()))
+
+    def test_setup_cfg_with_valid_metadata_section_is_packaging_based(self):
+        self.assertTrue(is_packaging(self._valid_setup_cfg_pkg()))
+
+    def test_setup_cfg_and_invalid_setup_cfg_is_not_packaging_based(self):
+        self.assertFalse(is_packaging(self._invalid_setup_cfg_pkg()))
+
+    def test_get_install_method_with_setuptools_pkg(self):
+        path = self._setuptools_setup_py_pkg()
+        self.assertEqual("setuptools", get_install_method(path))
+
+    def test_get_install_method_with_distutils_pkg(self):
+        path = self._distutils_pkg_info()
+        self.assertEqual("distutils", get_install_method(path))
+
+    def test_get_install_method_with_packaging_pkg(self):
+        path = self._valid_setup_cfg_pkg()
+        self.assertEqual("packaging", get_install_method(path))
+
+    def test_get_install_method_with_unknown_pkg(self):
+        path = self._invalid_setup_cfg_pkg()
+        self.assertRaises(InstallationException, get_install_method, path)
+
+    def test_is_setuptools_logs_setup_py_text_found(self):
+        is_setuptools(self._setuptools_setup_py_pkg())
+        expected = ['setup.py file found', 'found setuptools text in setup.py']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_setuptools_logs_setup_py_text_not_found(self):
+        directory = self._random_setup_py_pkg()
+        is_setuptools(directory)
+        info_expected = ['setup.py file found']
+        warn_expected = ['no egg-info directory found',
+                         'no setuptools text found in setup.py']
+        self.assertEqual(info_expected, self.get_logs(logging.INFO))
+        self.assertEqual(warn_expected, self.get_logs(logging.WARN))
+
+    def test_is_setuptools_logs_egg_info_dir_found(self):
+        is_setuptools(self._setuptools_egg_info_pkg())
+        expected = ['setup.py file found', 'found egg-info directory']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_distutils_logs_setup_py_text_found(self):
+        is_distutils(self._distutils_setup_py_pkg())
+        expected = ['setup.py file found', 'found distutils text in setup.py']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_distutils_logs_setup_py_text_not_found(self):
+        directory = self._random_setup_py_pkg()
+        is_distutils(directory)
+        info_expected = ['setup.py file found']
+        warn_expected = ['no PKG-INFO file found',
+                         'no distutils text found in setup.py']
+        self.assertEqual(info_expected, self.get_logs(logging.INFO))
+        self.assertEqual(warn_expected, self.get_logs(logging.WARN))
+
+    def test_is_distutils_logs_pkg_info_file_found(self):
+        is_distutils(self._distutils_pkg_info())
+        expected = ['setup.py file found', 'PKG-INFO file found']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_packaging_logs_setup_cfg_found(self):
+        is_packaging(self._valid_setup_cfg_pkg())
+        expected = ['setup.cfg file found']
+        self.assertEqual(expected, self.get_logs(logging.INFO))
+
+    def test_is_packaging_logs_setup_cfg_not_found(self):
+        is_packaging(self._empty_dir)
+        expected = ['no setup.cfg file found']
+        self.assertEqual(expected, self.get_logs(logging.WARN))
+
+    def _write_setuptools_setup_py(self, directory):
+        self.write_file((directory, 'setup.py'),
+                "from setuptools import setup")
+
+    def _write_distutils_setup_py(self, directory):
+        self.write_file([directory, 'setup.py'],
+                "from distutils.core import setup")
+
+    def _write_packaging_setup_cfg(self, directory):
+        self.write_file([directory, 'setup.cfg'],
+                        ("[metadata]\n"
+                         "name = mypackage\n"
+                         "version = 0.1.0\n"))
+
+    def _setuptools_setup_py_pkg(self):
+        tmp = self.mkdtemp()
+        self._write_setuptools_setup_py(tmp)
+        return tmp
+
+    def _distutils_setup_py_pkg(self):
+        tmp = self.mkdtemp()
+        self._write_distutils_setup_py(tmp)
+        return tmp
+
+    def _valid_setup_cfg_pkg(self):
+        tmp = self.mkdtemp()
+        self._write_packaging_setup_cfg(tmp)
+        return tmp
+
+    def _setuptools_egg_info_pkg(self):
+        tmp = self.mkdtemp()
+        self._write_setuptools_setup_py(tmp)
+        tempfile.mkdtemp(suffix='.egg-info', dir=tmp)
+        return tmp
+
+    def _distutils_pkg_info(self):
+        tmp = self._distutils_setup_py_pkg()
+        self.write_file([tmp, 'PKG-INFO'], '')
+        return tmp
+
+    def _setup_cfg_with_no_metadata_pkg(self):
+        tmp = self.mkdtemp()
+        self.write_file([tmp, 'setup.cfg'],
+                        ("[othersection]\n"
+                         "foo = bar\n"))
+        return tmp
+
+    def _invalid_setup_cfg_pkg(self):
+        tmp = self.mkdtemp()
+        self.write_file([tmp, 'setup.cfg'],
+                        ("[metadata]\n"
+                         "name = john\n"
+                         "last_name = doe\n"))
+        return tmp
+
+    def _egg_info_with_no_setuptools(self):
+        tmp = self._random_setup_py_pkg()
+        tempfile.mkdtemp(suffix='.egg-info', dir=tmp)
+        return tmp
+
+    def _pkg_info_with_no_distutils(self):
+        tmp = self._random_setup_py_pkg()
+        self.write_file([tmp, 'PKG-INFO'], '')
+        return tmp
+
+    def _random_setup_py_pkg(self):
+        tmp = self.mkdtemp()
+        self.write_file((tmp, 'setup.py'), "from mypackage import setup")
+        return tmp
+
+
+def test_suite():
+    suite = unittest.makeSuite(UtilTestCase)
+    suite.addTest(unittest.makeSuite(GlobTestCase))
+    suite.addTest(unittest.makeSuite(EggInfoToDistInfoTestCase))
+    suite.addTest(unittest.makeSuite(PackagingLibChecks))
+    return suite
+
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/tests/test_version.py b/Lib/packaging/tests/test_version.py
new file mode 100644
index 0000000..f94c800
--- /dev/null
+++ b/Lib/packaging/tests/test_version.py
@@ -0,0 +1,252 @@
+"""Tests for packaging.version."""
+import doctest
+import os
+
+from packaging.version import NormalizedVersion as V
+from packaging.version import HugeMajorVersionNumError, IrrationalVersionError
+from packaging.version import suggest_normalized_version as suggest
+from packaging.version import VersionPredicate
+from packaging.tests import unittest
+
+
+class VersionTestCase(unittest.TestCase):
+
+    versions = ((V('1.0'), '1.0'),
+                (V('1.1'), '1.1'),
+                (V('1.2.3'), '1.2.3'),
+                (V('1.2'), '1.2'),
+                (V('1.2.3a4'), '1.2.3a4'),
+                (V('1.2c4'), '1.2c4'),
+                (V('1.2.3.4'), '1.2.3.4'),
+                (V('1.2.3.4.0b3'), '1.2.3.4b3'),
+                (V('1.2.0.0.0'), '1.2'),
+                (V('1.0.dev345'), '1.0.dev345'),
+                (V('1.0.post456.dev623'), '1.0.post456.dev623'))
+
+    def test_repr(self):
+
+        self.assertEqual(repr(V('1.0')), "NormalizedVersion('1.0')")
+
+    def test_basic_versions(self):
+
+        for v, s in self.versions:
+            self.assertEqual(str(v), s)
+
+    def test_hash(self):
+
+        for v, s in self.versions:
+            self.assertEqual(hash(v), hash(V(s)))
+
+        versions = set([v for v, s in self.versions])
+        for v, s in self.versions:
+            self.assertIn(v, versions)
+
+        self.assertEqual(set([V('1.0')]), set([V('1.0'), V('1.0')]))
+
+    def test_from_parts(self):
+
+        for v, s in self.versions:
+            parts = v.parts
+            v2 = V.from_parts(*v.parts)
+            self.assertEqual(v, v2)
+            self.assertEqual(str(v), str(v2))
+
+    def test_irrational_versions(self):
+
+        irrational = ('1', '1.2a', '1.2.3b', '1.02', '1.2a03',
+                      '1.2a3.04', '1.2.dev.2', '1.2dev', '1.2.dev',
+                      '1.2.dev2.post2', '1.2.post2.dev3.post4')
+
+        for s in irrational:
+            self.assertRaises(IrrationalVersionError, V, s)
+
+    def test_huge_version(self):
+
+        self.assertEqual(str(V('1980.0')), '1980.0')
+        self.assertRaises(HugeMajorVersionNumError, V, '1981.0')
+        self.assertEqual(str(V('1981.0', error_on_huge_major_num=False)),
+                         '1981.0')
+
+    def test_comparison(self):
+        comparison_doctest_string = r"""
+        >>> V('1.2.0') == '1.2'
+        Traceback (most recent call last):
+        ...
+        TypeError: cannot compare NormalizedVersion and str
+
+        >>> V('1.2') < '1.3'
+        Traceback (most recent call last):
+        ...
+        TypeError: cannot compare NormalizedVersion and str
+
+        >>> V('1.2.0') == V('1.2')
+        True
+        >>> V('1.2.0') == V('1.2.3')
+        False
+        >>> V('1.2.0') != V('1.2.3')
+        True
+        >>> V('1.2.0') < V('1.2.3')
+        True
+        >>> V('1.2.0') < V('1.2.0')
+        False
+        >>> V('1.2.0') <= V('1.2.0')
+        True
+        >>> V('1.2.0') <= V('1.2.3')
+        True
+        >>> V('1.2.3') <= V('1.2.0')
+        False
+        >>> V('1.2.0') >= V('1.2.0')
+        True
+        >>> V('1.2.3') >= V('1.2.0')
+        True
+        >>> V('1.2.0') >= V('1.2.3')
+        False
+        >>> (V('1.0') > V('1.0b2'))
+        True
+        >>> (V('1.0') > V('1.0c2') > V('1.0c1') > V('1.0b2') > V('1.0b1')
+        ...  > V('1.0a2') > V('1.0a1'))
+        True
+        >>> (V('1.0.0') > V('1.0.0c2') > V('1.0.0c1') > V('1.0.0b2') > V('1.0.0b1')
+        ...  > V('1.0.0a2') > V('1.0.0a1'))
+        True
+
+        >>> V('1.0') < V('1.0.post456.dev623')
+        True
+
+        >>> V('1.0.post456.dev623') < V('1.0.post456')  < V('1.0.post1234')
+        True
+
+        >>> (V('1.0a1')
+        ...  < V('1.0a2.dev456')
+        ...  < V('1.0a2')
+        ...  < V('1.0a2.1.dev456')  # e.g. need to do a quick post release on 1.0a2
+        ...  < V('1.0a2.1')
+        ...  < V('1.0b1.dev456')
+        ...  < V('1.0b2')
+        ...  < V('1.0c1.dev456')
+        ...  < V('1.0c1')
+        ...  < V('1.0.dev7')
+        ...  < V('1.0.dev18')
+        ...  < V('1.0.dev456')
+        ...  < V('1.0.dev1234')
+        ...  < V('1.0')
+        ...  < V('1.0.post456.dev623')  # development version of a post release
+        ...  < V('1.0.post456'))
+        True
+        """
+        doctest.script_from_examples(comparison_doctest_string)
+
+    def test_suggest_normalized_version(self):
+
+        self.assertEqual(suggest('1.0'), '1.0')
+        self.assertEqual(suggest('1.0-alpha1'), '1.0a1')
+        self.assertEqual(suggest('1.0c2'), '1.0c2')
+        self.assertEqual(suggest('walla walla washington'), None)
+        self.assertEqual(suggest('2.4c1'), '2.4c1')
+        self.assertEqual(suggest('v1.0'), '1.0')
+
+        # from setuptools
+        self.assertEqual(suggest('0.4a1.r10'), '0.4a1.post10')
+        self.assertEqual(suggest('0.7a1dev-r66608'), '0.7a1.dev66608')
+        self.assertEqual(suggest('0.6a9.dev-r41475'), '0.6a9.dev41475')
+        self.assertEqual(suggest('2.4preview1'), '2.4c1')
+        self.assertEqual(suggest('2.4pre1'), '2.4c1')
+        self.assertEqual(suggest('2.1-rc2'), '2.1c2')
+
+        # from pypi
+        self.assertEqual(suggest('0.1dev'), '0.1.dev0')
+        self.assertEqual(suggest('0.1.dev'), '0.1.dev0')
+
+        # we want to be able to parse Twisted
+        # development versions are like post releases in Twisted
+        self.assertEqual(suggest('9.0.0+r2363'), '9.0.0.post2363')
+
+        # pre-releases are using markers like "pre1"
+        self.assertEqual(suggest('9.0.0pre1'), '9.0.0c1')
+
+        # we want to be able to parse Tcl-TK
+        # they us "p1" "p2" for post releases
+        self.assertEqual(suggest('1.4p1'), '1.4.post1')
+
+    def test_predicate(self):
+        # VersionPredicate knows how to parse stuff like:
+        #
+        #   Project (>=version, ver2)
+
+        predicates = ('zope.interface (>3.5.0)',
+                      'AnotherProject (3.4)',
+                      'OtherProject (<3.0)',
+                      'NoVersion',
+                      'Hey (>=2.5,<2.7)')
+
+        for predicate in predicates:
+            v = VersionPredicate(predicate)
+
+        self.assertTrue(VersionPredicate('Hey (>=2.5,<2.7)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho').match('2.6'))
+        self.assertFalse(VersionPredicate('Hey (>=2.5,!=2.6,<2.7)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho (<3.0)').match('2.6'))
+        self.assertTrue(VersionPredicate('Ho (<3.0,!=2.5)').match('2.6.0'))
+        self.assertFalse(VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.0'))
+        self.assertTrue(VersionPredicate('Ho (2.5)').match('2.5.4'))
+        self.assertFalse(VersionPredicate('Ho (!=2.5)').match('2.5.2'))
+        self.assertTrue(VersionPredicate('Hey (<=2.5)').match('2.5.9'))
+        self.assertFalse(VersionPredicate('Hey (<=2.5)').match('2.6.0'))
+        self.assertTrue(VersionPredicate('Hey (>=2.5)').match('2.5.1'))
+
+        self.assertRaises(ValueError, VersionPredicate, '')
+
+        self.assertTrue(VersionPredicate('Hey 2.5').match('2.5.1'))
+
+        # XXX need to silent the micro version in this case
+        self.assertFalse(VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.3'))
+
+        # Make sure a predicate that ends with a number works
+        self.assertTrue(VersionPredicate('virtualenv5 (1.0)').match('1.0'))
+        self.assertTrue(VersionPredicate('virtualenv5').match('1.0'))
+        self.assertTrue(VersionPredicate('vi5two').match('1.0'))
+        self.assertTrue(VersionPredicate('5two').match('1.0'))
+        self.assertTrue(VersionPredicate('vi5two 1.0').match('1.0'))
+        self.assertTrue(VersionPredicate('5two 1.0').match('1.0'))
+
+        # test repr
+        for predicate in predicates:
+            self.assertEqual(str(VersionPredicate(predicate)), predicate)
+
+    def test_predicate_name(self):
+        # Test that names are parsed the right way
+
+        self.assertEqual('Hey', VersionPredicate('Hey (<1.1)').name)
+        self.assertEqual('Foo-Bar', VersionPredicate('Foo-Bar (1.1)').name)
+        self.assertEqual('Foo Bar', VersionPredicate('Foo Bar (1.1)').name)
+
+    def test_is_final(self):
+        # VersionPredicate knows is a distribution is a final one or not.
+        final_versions = ('1.0', '1.0.post456')
+        other_versions = ('1.0.dev1', '1.0a2', '1.0c3')
+
+        for version in final_versions:
+            self.assertTrue(V(version).is_final)
+        for version in other_versions:
+            self.assertFalse(V(version).is_final)
+
+
+class VersionWhiteBoxTestCase(unittest.TestCase):
+
+    def test_parse_numdots(self):
+        # For code coverage completeness, as pad_zeros_length can't be set or
+        # influenced from the public interface
+        self.assertEqual(V('1.0')._parse_numdots('1.0', '1.0',
+                                                  pad_zeros_length=3),
+                          [1, 0, 0])
+
+
+def test_suite():
+    #README = os.path.join(os.path.dirname(__file__), 'README.txt')
+    #suite = [doctest.DocFileSuite(README), unittest.makeSuite(VersionTestCase)]
+    suite = [unittest.makeSuite(VersionTestCase),
+             unittest.makeSuite(VersionWhiteBoxTestCase)]
+    return unittest.TestSuite(suite)
+
+if __name__ == "__main__":
+    unittest.main(defaultTest="test_suite")
diff --git a/Lib/packaging/util.py b/Lib/packaging/util.py
new file mode 100644
index 0000000..486e2da
--- /dev/null
+++ b/Lib/packaging/util.py
@@ -0,0 +1,1451 @@
+"""packaging.util
+Miscellaneous utility functions.
+"""
+import errno
+import csv
+import hashlib
+import os
+import sys
+import re
+import shutil
+import string
+import tarfile
+import zipfile
+import posixpath
+import sysconfig
+import subprocess
+from copy import copy
+from glob import iglob as std_iglob
+from fnmatch import fnmatchcase
+from inspect import getsource
+from configparser import RawConfigParser
+
+from packaging import logger
+from packaging.errors import (PackagingPlatformError, PackagingFileError,
+                              PackagingByteCompileError, PackagingExecError,
+                              InstallationException, PackagingInternalError)
+
+_PLATFORM = None
+_DEFAULT_INSTALLER = 'packaging'
+
+
+def newer(source, target):
+    """Tell if the target is newer than the source.
+
+    Returns true if 'source' exists and is more recently modified than
+    'target', or if 'source' exists and 'target' doesn't.
+
+    Returns false if both exist and 'target' is the same age or younger
+    than 'source'. Raise PackagingFileError if 'source' does not exist.
+
+    Note that this test is not very accurate: files created in the same second
+    will have the same "age".
+    """
+    if not os.path.exists(source):
+        raise PackagingFileError("file '%s' does not exist" %
+                                 os.path.abspath(source))
+    if not os.path.exists(target):
+        return True
+
+    return os.stat(source).st_mtime > os.stat(target).st_mtime
+
+
+def get_platform():
+    """Return a string that identifies the current platform.
+
+    By default, will return the value returned by sysconfig.get_platform(),
+    but it can be changed by calling set_platform().
+    """
+    global _PLATFORM
+    if _PLATFORM is None:
+        _PLATFORM = sysconfig.get_platform()
+    return _PLATFORM
+
+
+def set_platform(identifier):
+    """Set the platform string identifier returned by get_platform().
+
+    Note that this change doesn't impact the value returned by
+    sysconfig.get_platform(); it is local to packaging.
+    """
+    global _PLATFORM
+    _PLATFORM = identifier
+
+
+def convert_path(pathname):
+    """Return 'pathname' as a name that will work on the native filesystem.
+
+    The path is split on '/' and put back together again using the current
+    directory separator.  Needed because filenames in the setup script are
+    always supplied in Unix style, and have to be converted to the local
+    convention before we can actually use them in the filesystem.  Raises
+    ValueError on non-Unix-ish systems if 'pathname' either starts or
+    ends with a slash.
+    """
+    if os.sep == '/':
+        return pathname
+    if not pathname:
+        return pathname
+    if pathname[0] == '/':
+        raise ValueError("path '%s' cannot be absolute" % pathname)
+    if pathname[-1] == '/':
+        raise ValueError("path '%s' cannot end with '/'" % pathname)
+
+    paths = pathname.split('/')
+    while os.curdir in paths:
+        paths.remove(os.curdir)
+    if not paths:
+        return os.curdir
+    return os.path.join(*paths)
+
+
+def change_root(new_root, pathname):
+    """Return 'pathname' with 'new_root' prepended.
+
+    If 'pathname' is relative, this is equivalent to
+    os.path.join(new_root,pathname). Otherwise, it requires making 'pathname'
+    relative and then joining the two, which is tricky on DOS/Windows.
+    """
+    if os.name == 'posix':
+        if not os.path.isabs(pathname):
+            return os.path.join(new_root, pathname)
+        else:
+            return os.path.join(new_root, pathname[1:])
+
+    elif os.name == 'nt':
+        drive, path = os.path.splitdrive(pathname)
+        if path[0] == '\\':
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    elif os.name == 'os2':
+        drive, path = os.path.splitdrive(pathname)
+        if path[0] == os.sep:
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    else:
+        raise PackagingPlatformError("nothing known about "
+                                     "platform '%s'" % os.name)
+
+_environ_checked = False
+
+
+def check_environ():
+    """Ensure that 'os.environ' has all the environment variables needed.
+
+    We guarantee that users can use in config files, command-line options,
+    etc.  Currently this includes:
+      HOME - user's home directory (Unix only)
+      PLAT - description of the current platform, including hardware
+             and OS (see 'get_platform()')
+    """
+    global _environ_checked
+    if _environ_checked:
+        return
+
+    if os.name == 'posix' and 'HOME' not in os.environ:
+        import pwd
+        os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
+
+    if 'PLAT' not in os.environ:
+        os.environ['PLAT'] = sysconfig.get_platform()
+
+    _environ_checked = True
+
+
+def subst_vars(s, local_vars):
+    """Perform shell/Perl-style variable substitution on 'string'.
+
+    Every occurrence of '$' followed by a name is considered a variable, and
+    variable is substituted by the value found in the 'local_vars'
+    dictionary, or in 'os.environ' if it's not in 'local_vars'.
+    'os.environ' is first checked/augmented to guarantee that it contains
+    certain values: see 'check_environ()'.  Raise ValueError for any
+    variables not found in either 'local_vars' or 'os.environ'.
+    """
+    check_environ()
+
+    def _subst(match, local_vars=local_vars):
+        var_name = match.group(1)
+        if var_name in local_vars:
+            return str(local_vars[var_name])
+        else:
+            return os.environ[var_name]
+
+    try:
+        return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
+    except KeyError as var:
+        raise ValueError("invalid variable '$%s'" % var)
+
+
+# Needed by 'split_quoted()'
+_wordchars_re = _squote_re = _dquote_re = None
+
+
+def _init_regex():
+    global _wordchars_re, _squote_re, _dquote_re
+    _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+    _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
+    _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
+
+
+def split_quoted(s):
+    """Split a string up according to Unix shell-like rules for quotes and
+    backslashes.
+
+    In short: words are delimited by spaces, as long as those
+    spaces are not escaped by a backslash, or inside a quoted string.
+    Single and double quotes are equivalent, and the quote characters can
+    be backslash-escaped.  The backslash is stripped from any two-character
+    escape sequence, leaving only the escaped character.  The quote
+    characters are stripped from any quoted string.  Returns a list of
+    words.
+    """
+    # This is a nice algorithm for splitting up a single string, since it
+    # doesn't require character-by-character examination.  It was a little
+    # bit of a brain-bender to get it working right, though...
+    if _wordchars_re is None:
+        _init_regex()
+
+    s = s.strip()
+    words = []
+    pos = 0
+
+    while s:
+        m = _wordchars_re.match(s, pos)
+        end = m.end()
+        if end == len(s):
+            words.append(s[:end])
+            break
+
+        if s[end] in string.whitespace:  # unescaped, unquoted whitespace: now
+            words.append(s[:end])        # we definitely have a word delimiter
+            s = s[end:].lstrip()
+            pos = 0
+
+        elif s[end] == '\\':             # preserve whatever is being escaped;
+                                         # will become part of the current word
+            s = s[:end] + s[end + 1:]
+            pos = end + 1
+
+        else:
+            if s[end] == "'":            # slurp singly-quoted string
+                m = _squote_re.match(s, end)
+            elif s[end] == '"':          # slurp doubly-quoted string
+                m = _dquote_re.match(s, end)
+            else:
+                raise RuntimeError("this can't happen "
+                                   "(bad char '%c')" % s[end])
+
+            if m is None:
+                raise ValueError("bad string (mismatched %s quotes?)" % s[end])
+
+            beg, end = m.span()
+            s = s[:beg] + s[beg + 1:end - 1] + s[end:]
+            pos = m.end() - 2
+
+        if pos >= len(s):
+            words.append(s)
+            break
+
+    return words
+
+
+def execute(func, args, msg=None, verbose=0, dry_run=False):
+    """Perform some action that affects the outside world.
+
+    Some actions (e.g. writing to the filesystem) are special because
+    they are disabled by the 'dry_run' flag.  This method takes care of all
+    that bureaucracy for you; all you have to do is supply the
+    function to call and an argument tuple for it (to embody the
+    "external action" being performed), and an optional message to
+    print.
+    """
+    if msg is None:
+        msg = "%s%r" % (func.__name__, args)
+        if msg[-2:] == ',)':        # correct for singleton tuple
+            msg = msg[0:-2] + ')'
+
+    logger.info(msg)
+    if not dry_run:
+        func(*args)
+
+
+def strtobool(val):
+    """Convert a string representation of truth to true (1) or false (0).
+
+    True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+    are 'n', 'no', 'f', 'false', 'off', and '0'.  Raises ValueError if
+    'val' is anything else.
+    """
+    val = val.lower()
+    if val in ('y', 'yes', 't', 'true', 'on', '1'):
+        return True
+    elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+        return False
+    else:
+        raise ValueError("invalid truth value %r" % (val,))
+
+
+def byte_compile(py_files, optimize=0, force=False, prefix=None,
+                 base_dir=None, verbose=0, dry_run=False, direct=None):
+    """Byte-compile a collection of Python source files to either .pyc
+    or .pyo files in the same directory.
+
+    'py_files' is a list of files to compile; any files that don't end in
+    ".py" are silently skipped. 'optimize' must be one of the following:
+      0 - don't optimize (generate .pyc)
+      1 - normal optimization (like "python -O")
+      2 - extra optimization (like "python -OO")
+    If 'force' is true, all files are recompiled regardless of
+    timestamps.
+
+    The source filename encoded in each bytecode file defaults to the
+    filenames listed in 'py_files'; you can modify these with 'prefix' and
+    'basedir'.  'prefix' is a string that will be stripped off of each
+    source filename, and 'base_dir' is a directory name that will be
+    prepended (after 'prefix' is stripped).  You can supply either or both
+    (or neither) of 'prefix' and 'base_dir', as you wish.
+
+    If 'dry_run' is true, doesn't actually do anything that would
+    affect the filesystem.
+
+    Byte-compilation is either done directly in this interpreter process
+    with the standard py_compile module, or indirectly by writing a
+    temporary script and executing it.  Normally, you should let
+    'byte_compile()' figure out to use direct compilation or not (see
+    the source for details).  The 'direct' flag is used by the script
+    generated in indirect mode; unless you know what you're doing, leave
+    it set to None.
+    """
+    # nothing is done if sys.dont_write_bytecode is True
+    # FIXME this should not raise an error
+    if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode:
+        raise PackagingByteCompileError('byte-compiling is disabled.')
+
+    # First, if the caller didn't force us into direct or indirect mode,
+    # figure out which mode we should be in.  We take a conservative
+    # approach: choose direct mode *only* if the current interpreter is
+    # in debug mode and optimize is 0.  If we're not in debug mode (-O
+    # or -OO), we don't know which level of optimization this
+    # interpreter is running with, so we can't do direct
+    # byte-compilation and be certain that it's the right thing.  Thus,
+    # always compile indirectly if the current interpreter is in either
+    # optimize mode, or if either optimization level was requested by
+    # the caller.
+    if direct is None:
+        direct = (__debug__ and optimize == 0)
+
+    # "Indirect" byte-compilation: write a temporary script and then
+    # run it with the appropriate flags.
+    if not direct:
+        from tempfile import mkstemp
+        # XXX script_fd may leak, use something better than mkstemp
+        script_fd, script_name = mkstemp(".py")
+        logger.info("writing byte-compilation script '%s'", script_name)
+        if not dry_run:
+            if script_fd is not None:
+                script = os.fdopen(script_fd, "w")
+            else:
+                script = open(script_name, "w")
+
+            try:
+                script.write("""\
+from packaging.util import byte_compile
+files = [
+""")
+
+                # XXX would be nice to write absolute filenames, just for
+                # safety's sake (script should be more robust in the face of
+                # chdir'ing before running it).  But this requires abspath'ing
+                # 'prefix' as well, and that breaks the hack in build_lib's
+                # 'byte_compile()' method that carefully tacks on a trailing
+                # slash (os.sep really) to make sure the prefix here is "just
+                # right".  This whole prefix business is rather delicate -- the
+                # problem is that it's really a directory, but I'm treating it
+                # as a dumb string, so trailing slashes and so forth matter.
+
+                #py_files = map(os.path.abspath, py_files)
+                #if prefix:
+                #    prefix = os.path.abspath(prefix)
+
+                script.write(",\n".join(map(repr, py_files)) + "]\n")
+                script.write("""
+byte_compile(files, optimize=%r, force=%r,
+             prefix=%r, base_dir=%r,
+             verbose=%r, dry_run=False,
+             direct=True)
+""" % (optimize, force, prefix, base_dir, verbose))
+
+            finally:
+                script.close()
+
+        cmd = [sys.executable, script_name]
+        if optimize == 1:
+            cmd.insert(1, "-O")
+        elif optimize == 2:
+            cmd.insert(1, "-OO")
+
+        env = copy(os.environ)
+        env['PYTHONPATH'] = os.path.pathsep.join(sys.path)
+        try:
+            spawn(cmd, env=env)
+        finally:
+            execute(os.remove, (script_name,), "removing %s" % script_name,
+                    dry_run=dry_run)
+
+    # "Direct" byte-compilation: use the py_compile module to compile
+    # right here, right now.  Note that the script generated in indirect
+    # mode simply calls 'byte_compile()' in direct mode, a weird sort of
+    # cross-process recursion.  Hey, it works!
+    else:
+        from py_compile import compile
+
+        for file in py_files:
+            if file[-3:] != ".py":
+                # This lets us be lazy and not filter filenames in
+                # the "install_lib" command.
+                continue
+
+            # Terminology from the py_compile module:
+            #   cfile - byte-compiled file
+            #   dfile - purported source filename (same as 'file' by default)
+            cfile = file + (__debug__ and "c" or "o")
+            dfile = file
+            if prefix:
+                if file[:len(prefix)] != prefix:
+                    raise ValueError("invalid prefix: filename %r doesn't "
+                                     "start with %r" % (file, prefix))
+                dfile = dfile[len(prefix):]
+            if base_dir:
+                dfile = os.path.join(base_dir, dfile)
+
+            cfile_base = os.path.basename(cfile)
+            if direct:
+                if force or newer(file, cfile):
+                    logger.info("byte-compiling %s to %s", file, cfile_base)
+                    if not dry_run:
+                        compile(file, cfile, dfile)
+                else:
+                    logger.debug("skipping byte-compilation of %s to %s",
+                              file, cfile_base)
+
+
+def rfc822_escape(header):
+    """Return a form of *header* suitable for inclusion in an RFC 822-header.
+
+    This function ensures there are 8 spaces after each newline.
+    """
+    lines = header.split('\n')
+    sep = '\n' + 8 * ' '
+    return sep.join(lines)
+
+_RE_VERSION = re.compile('(\d+\.\d+(\.\d+)*)')
+_MAC_OS_X_LD_VERSION = re.compile('^@\(#\)PROGRAM:ld  '
+                                  'PROJECT:ld64-((\d+)(\.\d+)*)')
+
+
+def _find_ld_version():
+    """Find the ld version.  The version scheme differs under Mac OS X."""
+    if sys.platform == 'darwin':
+        return _find_exe_version('ld -v', _MAC_OS_X_LD_VERSION)
+    else:
+        return _find_exe_version('ld -v')
+
+
+def _find_exe_version(cmd, pattern=_RE_VERSION):
+    """Find the version of an executable by running `cmd` in the shell.
+
+    `pattern` is a compiled regular expression.  If not provided, defaults
+    to _RE_VERSION. If the command is not found, or the output does not
+    match the mattern, returns None.
+    """
+    from subprocess import Popen, PIPE
+    executable = cmd.split()[0]
+    if find_executable(executable) is None:
+        return None
+    pipe = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
+    try:
+        stdout, stderr = pipe.stdout.read(), pipe.stderr.read()
+    finally:
+        pipe.stdout.close()
+        pipe.stderr.close()
+    # some commands like ld under MacOS X, will give the
+    # output in the stderr, rather than stdout.
+    if stdout != '':
+        out_string = stdout
+    else:
+        out_string = stderr
+
+    result = pattern.search(out_string)
+    if result is None:
+        return None
+    return result.group(1)
+
+
+def get_compiler_versions():
+    """Return a tuple providing the versions of gcc, ld and dllwrap
+
+    For each command, if a command is not found, None is returned.
+    Otherwise a string with the version is returned.
+    """
+    gcc = _find_exe_version('gcc -dumpversion')
+    ld = _find_ld_version()
+    dllwrap = _find_exe_version('dllwrap --version')
+    return gcc, ld, dllwrap
+
+
+def newer_group(sources, target, missing='error'):
+    """Return true if 'target' is out-of-date with respect to any file
+    listed in 'sources'.
+
+    In other words, if 'target' exists and is newer
+    than every file in 'sources', return false; otherwise return true.
+    'missing' controls what we do when a source file is missing; the
+    default ("error") is to blow up with an OSError from inside 'stat()';
+    if it is "ignore", we silently drop any missing source files; if it is
+    "newer", any missing source files make us assume that 'target' is
+    out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
+    carry out commands that wouldn't work because inputs are missing, but
+    that doesn't matter because you're not actually going to run the
+    commands).
+    """
+    # If the target doesn't even exist, then it's definitely out-of-date.
+    if not os.path.exists(target):
+        return True
+
+    # Otherwise we have to find out the hard way: if *any* source file
+    # is more recent than 'target', then 'target' is out-of-date and
+    # we can immediately return true.  If we fall through to the end
+    # of the loop, then 'target' is up-to-date and we return false.
+    target_mtime = os.stat(target).st_mtime
+
+    for source in sources:
+        if not os.path.exists(source):
+            if missing == 'error':      # blow up when we stat() the file
+                pass
+            elif missing == 'ignore':   # missing source dropped from
+                continue                # target's dependency list
+            elif missing == 'newer':    # missing source means target is
+                return True             # out-of-date
+
+        if os.stat(source).st_mtime > target_mtime:
+            return True
+
+    return False
+
+
+def write_file(filename, contents):
+    """Create *filename* and write *contents* to it.
+
+    *contents* is a sequence of strings without line terminators.
+    """
+    with open(filename, "w") as f:
+        for line in contents:
+            f.write(line + "\n")
+
+
+def _is_package(path):
+    if not os.path.isdir(path):
+        return False
+    return os.path.isfile(os.path.join(path, '__init__.py'))
+
+
+# Code taken from the pip project
+def _is_archive_file(name):
+    archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar')
+    ext = splitext(name)[1].lower()
+    if ext in archives:
+        return True
+    return False
+
+
+def _under(path, root):
+    path = path.split(os.sep)
+    root = root.split(os.sep)
+    if len(root) > len(path):
+        return False
+    for pos, part in enumerate(root):
+        if path[pos] != part:
+            return False
+    return True
+
+
+def _package_name(root_path, path):
+    # Return a dotted package name, given a subpath
+    if not _under(path, root_path):
+        raise ValueError('"%s" is not a subpath of "%s"' % (path, root_path))
+    return path[len(root_path) + 1:].replace(os.sep, '.')
+
+
+def find_packages(paths=(os.curdir,), exclude=()):
+    """Return a list all Python packages found recursively within
+    directories 'paths'
+
+    'paths' should be supplied as a sequence of "cross-platform"
+    (i.e. URL-style) path; it will be converted to the appropriate local
+    path syntax.
+
+    'exclude' is a sequence of package names to exclude; '*' can be used as
+    a wildcard in the names, such that 'foo.*' will exclude all subpackages
+    of 'foo' (but not 'foo' itself).
+    """
+    packages = []
+    discarded = []
+
+    def _discarded(path):
+        for discard in discarded:
+            if _under(path, discard):
+                return True
+        return False
+
+    for path in paths:
+        path = convert_path(path)
+        for root, dirs, files in os.walk(path):
+            for dir_ in dirs:
+                fullpath = os.path.join(root, dir_)
+                if _discarded(fullpath):
+                    continue
+                # we work only with Python packages
+                if not _is_package(fullpath):
+                    discarded.append(fullpath)
+                    continue
+                # see if it's excluded
+                excluded = False
+                package_name = _package_name(path, fullpath)
+                for pattern in exclude:
+                    if fnmatchcase(package_name, pattern):
+                        excluded = True
+                        break
+                if excluded:
+                    continue
+
+                # adding it to the list
+                packages.append(package_name)
+    return packages
+
+
+def resolve_name(name):
+    """Resolve a name like ``module.object`` to an object and return it.
+
+    Raise ImportError if the module or name is not found.
+    """
+    parts = name.split('.')
+    cursor = len(parts)
+    module_name = parts[:cursor]
+
+    while cursor > 0:
+        try:
+            ret = __import__('.'.join(module_name))
+            break
+        except ImportError:
+            if cursor == 0:
+                raise
+            cursor -= 1
+            module_name = parts[:cursor]
+            ret = ''
+
+    for part in parts[1:]:
+        try:
+            ret = getattr(ret, part)
+        except AttributeError as exc:
+            raise ImportError(exc)
+
+    return ret
+
+
+def splitext(path):
+    """Like os.path.splitext, but take off .tar too"""
+    base, ext = posixpath.splitext(path)
+    if base.lower().endswith('.tar'):
+        ext = base[-4:] + ext
+        base = base[:-4]
+    return base, ext
+
+
+def unzip_file(filename, location, flatten=True):
+    """Unzip the file *filename* into the *location* directory."""
+    if not os.path.exists(location):
+        os.makedirs(location)
+    with open(filename, 'rb') as zipfp:
+        zip = zipfile.ZipFile(zipfp)
+        leading = has_leading_dir(zip.namelist()) and flatten
+        for name in zip.namelist():
+            data = zip.read(name)
+            fn = name
+            if leading:
+                fn = split_leading_dir(name)[1]
+            fn = os.path.join(location, fn)
+            dir = os.path.dirname(fn)
+            if not os.path.exists(dir):
+                os.makedirs(dir)
+            if fn.endswith('/') or fn.endswith('\\'):
+                # A directory
+                if not os.path.exists(fn):
+                    os.makedirs(fn)
+            else:
+                with open(fn, 'wb') as fp:
+                    fp.write(data)
+
+
+def untar_file(filename, location):
+    """Untar the file *filename* into the *location* directory."""
+    if not os.path.exists(location):
+        os.makedirs(location)
+    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+        mode = 'r:gz'
+    elif (filename.lower().endswith('.bz2')
+          or filename.lower().endswith('.tbz')):
+        mode = 'r:bz2'
+    elif filename.lower().endswith('.tar'):
+        mode = 'r'
+    else:
+        mode = 'r:*'
+    with tarfile.open(filename, mode) as tar:
+        leading = has_leading_dir(member.name for member in tar.getmembers())
+        for member in tar.getmembers():
+            fn = member.name
+            if leading:
+                fn = split_leading_dir(fn)[1]
+            path = os.path.join(location, fn)
+            if member.isdir():
+                if not os.path.exists(path):
+                    os.makedirs(path)
+            else:
+                try:
+                    fp = tar.extractfile(member)
+                except (KeyError, AttributeError):
+                    # Some corrupt tar files seem to produce this
+                    # (specifically bad symlinks)
+                    continue
+                try:
+                    if not os.path.exists(os.path.dirname(path)):
+                        os.makedirs(os.path.dirname(path))
+                        with open(path, 'wb') as destfp:
+                            shutil.copyfileobj(fp, destfp)
+                finally:
+                    fp.close()
+
+
+def has_leading_dir(paths):
+    """Return true if all the paths have the same leading path name.
+
+    In other words, check that everything is in one subdirectory in an
+    archive.
+    """
+    common_prefix = None
+    for path in paths:
+        prefix, rest = split_leading_dir(path)
+        if not prefix:
+            return False
+        elif common_prefix is None:
+            common_prefix = prefix
+        elif prefix != common_prefix:
+            return False
+    return True
+
+
+def split_leading_dir(path):
+    path = str(path)
+    path = path.lstrip('/').lstrip('\\')
+    if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
+                        or '\\' not in path):
+        return path.split('/', 1)
+    elif '\\' in path:
+        return path.split('\\', 1)
+    else:
+        return path, ''
+
+
+def spawn(cmd, search_path=True, verbose=0, dry_run=False, env=None):
+    """Run another program specified as a command list 'cmd' in a new process.
+
+    'cmd' is just the argument list for the new process, ie.
+    cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
+    There is no way to run a program with a name different from that of its
+    executable.
+
+    If 'search_path' is true (the default), the system's executable
+    search path will be used to find the program; otherwise, cmd[0]
+    must be the exact path to the executable.  If 'dry_run' is true,
+    the command will not actually be run.
+
+    If 'env' is given, it's a environment dictionary used for the execution
+    environment.
+
+    Raise PackagingExecError if running the program fails in any way; just
+    return on success.
+    """
+    logger.info(' '.join(cmd))
+    if dry_run:
+        return
+    exit_status = subprocess.call(cmd, env=env)
+    if exit_status != 0:
+        msg = "command '%s' failed with exit status %d"
+        raise PackagingExecError(msg % (cmd, exit_status))
+
+
+def find_executable(executable, path=None):
+    """Try to find 'executable' in the directories listed in 'path'.
+
+    *path* is a string listing directories separated by 'os.pathsep' and
+    defaults to os.environ['PATH'].  Returns the complete filename or None
+    if not found.
+    """
+    if path is None:
+        path = os.environ['PATH']
+    paths = path.split(os.pathsep)
+    base, ext = os.path.splitext(executable)
+
+    if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+        executable = executable + '.exe'
+
+    if not os.path.isfile(executable):
+        for p in paths:
+            f = os.path.join(p, executable)
+            if os.path.isfile(f):
+                # the file exists, we have a shot at spawn working
+                return f
+        return None
+    else:
+        return executable
+
+
+DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
+DEFAULT_REALM = 'pypi'
+DEFAULT_PYPIRC = """\
+[distutils]
+index-servers =
+    pypi
+
+[pypi]
+username:%s
+password:%s
+"""
+
+
+def get_pypirc_path():
+    """Return path to pypirc config file."""
+    return os.path.join(os.path.expanduser('~'), '.pypirc')
+
+
+def generate_pypirc(username, password):
+    """Create a default .pypirc file."""
+    rc = get_pypirc_path()
+    with open(rc, 'w') as f:
+        f.write(DEFAULT_PYPIRC % (username, password))
+    try:
+        os.chmod(rc, 0o600)
+    except OSError:
+        # should do something better here
+        pass
+
+
+def read_pypirc(repository=DEFAULT_REPOSITORY, realm=DEFAULT_REALM):
+    """Read the .pypirc file."""
+    rc = get_pypirc_path()
+    if os.path.exists(rc):
+        config = RawConfigParser()
+        config.read(rc)
+        sections = config.sections()
+        if 'distutils' in sections:
+            # let's get the list of servers
+            index_servers = config.get('distutils', 'index-servers')
+            _servers = [server.strip() for server in
+                        index_servers.split('\n')
+                        if server.strip() != '']
+            if _servers == []:
+                # nothing set, let's try to get the default pypi
+                if 'pypi' in sections:
+                    _servers = ['pypi']
+                else:
+                    # the file is not properly defined, returning
+                    # an empty dict
+                    return {}
+            for server in _servers:
+                current = {'server': server}
+                current['username'] = config.get(server, 'username')
+
+                # optional params
+                for key, default in (('repository', DEFAULT_REPOSITORY),
+                                     ('realm', DEFAULT_REALM),
+                                     ('password', None)):
+                    if config.has_option(server, key):
+                        current[key] = config.get(server, key)
+                    else:
+                        current[key] = default
+                if (current['server'] == repository or
+                    current['repository'] == repository):
+                    return current
+        elif 'server-login' in sections:
+            # old format
+            server = 'server-login'
+            if config.has_option(server, 'repository'):
+                repository = config.get(server, 'repository')
+            else:
+                repository = DEFAULT_REPOSITORY
+
+            return {'username': config.get(server, 'username'),
+                    'password': config.get(server, 'password'),
+                    'repository': repository,
+                    'server': server,
+                    'realm': DEFAULT_REALM}
+
+    return {}
+
+
+# utility functions for 2to3 support
+
+def run_2to3(files, doctests_only=False, fixer_names=None,
+             options=None, explicit=None):
+    """ Wrapper function around the refactor() class which
+    performs the conversions on a list of python files.
+    Invoke 2to3 on a list of Python files. The files should all come
+    from the build area, as the modification is done in-place."""
+
+    #if not files:
+    #    return
+
+    # Make this class local, to delay import of 2to3
+    from lib2to3.refactor import get_fixers_from_package, RefactoringTool
+    fixers = []
+    fixers = get_fixers_from_package('lib2to3.fixes')
+
+    if fixer_names:
+        for fixername in fixer_names:
+            fixers.extend(fixer for fixer in
+                          get_fixers_from_package(fixername))
+    r = RefactoringTool(fixers, options=options)
+    r.refactor(files, write=True, doctests_only=doctests_only)
+
+
+class Mixin2to3:
+    """ Wrapper class for commands that run 2to3.
+    To configure 2to3, setup scripts may either change
+    the class variables, or inherit from this class
+    to override how 2to3 is invoked.
+    """
+    # provide list of fixers to run.
+    # defaults to all from lib2to3.fixers
+    fixer_names = None
+
+    # options dictionary
+    options = None
+
+    # list of fixers to invoke even though they are marked as explicit
+    explicit = None
+
+    def run_2to3(self, files, doctests_only=False):
+        """ Issues a call to util.run_2to3. """
+        return run_2to3(files, doctests_only, self.fixer_names,
+                        self.options, self.explicit)
+
+RICH_GLOB = re.compile(r'\{([^}]*)\}')
+_CHECK_RECURSIVE_GLOB = re.compile(r'[^/,{]\*\*|\*\*[^/,}]')
+_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
+
+
+def iglob(path_glob):
+    """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
+    if _CHECK_RECURSIVE_GLOB.search(path_glob):
+        msg = """invalid glob %r: recursive glob "**" must be used alone"""
+        raise ValueError(msg % path_glob)
+    if _CHECK_MISMATCH_SET.search(path_glob):
+        msg = """invalid glob %r: mismatching set marker '{' or '}'"""
+        raise ValueError(msg % path_glob)
+    return _iglob(path_glob)
+
+
+def _iglob(path_glob):
+    rich_path_glob = RICH_GLOB.split(path_glob, 1)
+    if len(rich_path_glob) > 1:
+        assert len(rich_path_glob) == 3, rich_path_glob
+        prefix, set, suffix = rich_path_glob
+        for item in set.split(','):
+            for path in _iglob(''.join((prefix, item, suffix))):
+                yield path
+    else:
+        if '**' not in path_glob:
+            for item in std_iglob(path_glob):
+                yield item
+        else:
+            prefix, radical = path_glob.split('**', 1)
+            if prefix == '':
+                prefix = '.'
+            if radical == '':
+                radical = '*'
+            else:
+                radical = radical.lstrip('/')
+            for path, dir, files in os.walk(prefix):
+                path = os.path.normpath(path)
+                for file in _iglob(os.path.join(path, radical)):
+                    yield file
+
+
+def cfg_to_args(path='setup.cfg'):
+    """Compatibility helper to use setup.cfg in setup.py.
+
+    This functions uses an existing setup.cfg to generate a dictionnary of
+    keywords that can be used by distutils.core.setup(**kwargs).  It is used
+    by generate_setup_py.
+
+    *file* is the path to the setup.cfg file.  If it doesn't exist,
+    PackagingFileError is raised.
+    """
+    # We need to declare the following constants here so that it's easier to
+    # generate the setup.py afterwards, using inspect.getsource.
+
+    # XXX ** == needs testing
+    D1_D2_SETUP_ARGS = {"name": ("metadata",),
+                        "version": ("metadata",),
+                        "author": ("metadata",),
+                        "author_email": ("metadata",),
+                        "maintainer": ("metadata",),
+                        "maintainer_email": ("metadata",),
+                        "url": ("metadata", "home_page"),
+                        "description": ("metadata", "summary"),
+                        "long_description": ("metadata", "description"),
+                        "download-url": ("metadata",),
+                        "classifiers": ("metadata", "classifier"),
+                        "platforms": ("metadata", "platform"),  # **
+                        "license": ("metadata",),
+                        "requires": ("metadata", "requires_dist"),
+                        "provides": ("metadata", "provides_dist"),  # **
+                        "obsoletes": ("metadata", "obsoletes_dist"),  # **
+                        "packages": ("files",),
+                        "scripts": ("files",),
+                        "py_modules": ("files", "modules"),  # **
+                        }
+
+    MULTI_FIELDS = ("classifiers",
+                    "requires",
+                    "platforms",
+                    "packages",
+                    "scripts")
+
+    def has_get_option(config, section, option):
+        if config.has_option(section, option):
+            return config.get(section, option)
+        elif config.has_option(section, option.replace('_', '-')):
+            return config.get(section, option.replace('_', '-'))
+        else:
+            return False
+
+    # The real code starts here
+    config = RawConfigParser()
+    if not os.path.exists(file):
+        raise PackagingFileError("file '%s' does not exist" %
+                                 os.path.abspath(file))
+    config.read(path)
+
+    kwargs = {}
+    for arg in D1_D2_SETUP_ARGS:
+        if len(D1_D2_SETUP_ARGS[arg]) == 2:
+            # The distutils field name is different than packaging's
+            section, option = D1_D2_SETUP_ARGS[arg]
+
+        else:
+            # The distutils field name is the same thant packaging's
+            section = D1_D2_SETUP_ARGS[arg][0]
+            option = arg
+
+        in_cfg_value = has_get_option(config, section, option)
+        if not in_cfg_value:
+            # There is no such option in the setup.cfg
+            if arg == "long_description":
+                filename = has_get_option(config, section, "description_file")
+                if filename:
+                    with open(filename) as fp:
+                        in_cfg_value = fp.read()
+            else:
+                continue
+
+        if arg in MULTI_FIELDS:
+            # support multiline options
+            in_cfg_value = in_cfg_value.strip().split('\n')
+
+        kwargs[arg] = in_cfg_value
+
+    return kwargs
+
+
+_SETUP_TMPL = """\
+# This script was automatically generated by packaging
+import os
+from distutils.core import setup
+from ConfigParser import RawConfigParser
+
+%(func)s
+
+setup(**cfg_to_args())
+"""
+
+
+def generate_setup_py():
+    """Generate a distutils compatible setup.py using an existing setup.cfg.
+
+    Raises a PackagingFileError when a setup.py already exists.
+    """
+    if os.path.exists("setup.py"):
+        raise PackagingFileError("a setup.py file alreadyexists")
+
+    with open("setup.py", "w") as fp:
+        fp.write(_SETUP_TMPL % {'func': getsource(cfg_to_args)})
+
+
+# Taken from the pip project
+# https://github.com/pypa/pip/blob/master/pip/util.py
+def ask(message, options):
+    """Prompt the user with *message*; *options* contains allowed responses."""
+    while True:
+        response = input(message)
+        response = response.strip().lower()
+        if response not in options:
+            print('invalid response: %r' % response)
+            print('choose one of', ', '.join(repr(o) for o in options))
+        else:
+            return response
+
+
+def _parse_record_file(record_file):
+    distinfo, extra_metadata, installed = ({}, [], [])
+    with open(record_file, 'r') as rfile:
+        for path in rfile:
+            path = path.strip()
+            if path.endswith('egg-info') and os.path.isfile(path):
+                distinfo_dir = path.replace('egg-info', 'dist-info')
+                metadata = path
+                egginfo = path
+            elif path.endswith('egg-info') and os.path.isdir(path):
+                distinfo_dir = path.replace('egg-info', 'dist-info')
+                egginfo = path
+                for metadata_file in os.listdir(path):
+                    metadata_fpath = os.path.join(path, metadata_file)
+                    if metadata_file == 'PKG-INFO':
+                        metadata = metadata_fpath
+                    else:
+                        extra_metadata.append(metadata_fpath)
+            elif 'egg-info' in path and os.path.isfile(path):
+                # skip extra metadata files
+                continue
+            else:
+                installed.append(path)
+
+    distinfo['egginfo'] = egginfo
+    distinfo['metadata'] = metadata
+    distinfo['distinfo_dir'] = distinfo_dir
+    distinfo['installer_path'] = os.path.join(distinfo_dir, 'INSTALLER')
+    distinfo['metadata_path'] = os.path.join(distinfo_dir, 'METADATA')
+    distinfo['record_path'] = os.path.join(distinfo_dir, 'RECORD')
+    distinfo['requested_path'] = os.path.join(distinfo_dir, 'REQUESTED')
+    installed.extend([distinfo['installer_path'], distinfo['metadata_path']])
+    distinfo['installed'] = installed
+    distinfo['extra_metadata'] = extra_metadata
+    return distinfo
+
+
+def _write_record_file(record_path, installed_files):
+    with open(record_path, 'w', encoding='utf-8') as f:
+        writer = csv.writer(f, delimiter=',', lineterminator=os.linesep,
+                            quotechar='"')
+
+        for fpath in installed_files:
+            if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
+                # do not put size and md5 hash, as in PEP-376
+                writer.writerow((fpath, '', ''))
+            else:
+                hash = hashlib.md5()
+                with open(fpath, 'rb') as fp:
+                    hash.update(fp.read())
+                md5sum = hash.hexdigest()
+                size = os.path.getsize(fpath)
+                writer.writerow((fpath, md5sum, size))
+
+        # add the RECORD file itself
+        writer.writerow((record_path, '', ''))
+    return record_path
+
+
+def egginfo_to_distinfo(record_file, installer=_DEFAULT_INSTALLER,
+                        requested=False, remove_egginfo=False):
+    """Create files and directories required for PEP 376
+
+    :param record_file: path to RECORD file as produced by setup.py --record
+    :param installer: installer name
+    :param requested: True if not installed as a dependency
+    :param remove_egginfo: delete egginfo dir?
+    """
+    distinfo = _parse_record_file(record_file)
+    distinfo_dir = distinfo['distinfo_dir']
+    if os.path.isdir(distinfo_dir) and not os.path.islink(distinfo_dir):
+        shutil.rmtree(distinfo_dir)
+    elif os.path.exists(distinfo_dir):
+        os.unlink(distinfo_dir)
+
+    os.makedirs(distinfo_dir)
+
+    # copy setuptools extra metadata files
+    if distinfo['extra_metadata']:
+        for path in distinfo['extra_metadata']:
+            shutil.copy2(path, distinfo_dir)
+            new_path = path.replace('egg-info', 'dist-info')
+            distinfo['installed'].append(new_path)
+
+    metadata_path = distinfo['metadata_path']
+    logger.info('creating %s', metadata_path)
+    shutil.copy2(distinfo['metadata'], metadata_path)
+
+    installer_path = distinfo['installer_path']
+    logger.info('creating %s', installer_path)
+    with open(installer_path, 'w') as f:
+        f.write(installer)
+
+    if requested:
+        requested_path = distinfo['requested_path']
+        logger.info('creating %s', requested_path)
+        open(requested_path, 'w').close()
+        distinfo['installed'].append(requested_path)
+
+    record_path = distinfo['record_path']
+    logger.info('creating %s', record_path)
+    _write_record_file(record_path, distinfo['installed'])
+
+    if remove_egginfo:
+        egginfo = distinfo['egginfo']
+        logger.info('removing %s', egginfo)
+        if os.path.isfile(egginfo):
+            os.remove(egginfo)
+        else:
+            shutil.rmtree(egginfo)
+
+
+def _has_egg_info(srcdir):
+    if os.path.isdir(srcdir):
+        for item in os.listdir(srcdir):
+            full_path = os.path.join(srcdir, item)
+            if item.endswith('.egg-info') and os.path.isdir(full_path):
+                logger.info("found egg-info directory")
+                return True
+    logger.warning("no egg-info directory found")
+    return False
+
+
+def _has_setuptools_text(setup_py):
+    return _has_text(setup_py, 'setuptools')
+
+
+def _has_distutils_text(setup_py):
+    return _has_text(setup_py, 'distutils')
+
+
+def _has_text(setup_py, installer):
+    installer_pattern = re.compile('import {0}|from {0}'.format(installer))
+    with open(setup_py, 'r', encoding='utf-8') as setup:
+        for line in setup:
+            if re.search(installer_pattern, line):
+                logger.info("found %s text in setup.py", installer)
+                return True
+    logger.warning("no %s text found in setup.py", installer)
+    return False
+
+
+def _has_required_metadata(setup_cfg):
+    config = RawConfigParser()
+    config.read([setup_cfg], encoding='utf8')
+    return (config.has_section('metadata') and
+            'name' in config.options('metadata') and
+            'version' in config.options('metadata'))
+
+
+def _has_pkg_info(srcdir):
+    pkg_info = os.path.join(srcdir, 'PKG-INFO')
+    has_pkg_info = os.path.isfile(pkg_info)
+    if has_pkg_info:
+        logger.info("PKG-INFO file found")
+    logger.warning("no PKG-INFO file found")
+    return has_pkg_info
+
+
+def _has_setup_py(srcdir):
+    setup_py = os.path.join(srcdir, 'setup.py')
+    if os.path.isfile(setup_py):
+        logger.info('setup.py file found')
+        return True
+    return False
+
+
+def _has_setup_cfg(srcdir):
+    setup_cfg = os.path.join(srcdir, 'setup.cfg')
+    if os.path.isfile(setup_cfg):
+        logger.info('setup.cfg file found')
+        return True
+    logger.warning("no setup.cfg file found")
+    return False
+
+
+def is_setuptools(path):
+    """Check if the project is based on setuptools.
+
+    :param path: path to source directory containing a setup.py script.
+
+    Return True if the project requires setuptools to install, else False.
+    """
+    srcdir = os.path.abspath(path)
+    setup_py = os.path.join(srcdir, 'setup.py')
+
+    return _has_setup_py(srcdir) and (_has_egg_info(srcdir) or
+                                      _has_setuptools_text(setup_py))
+
+
+def is_distutils(path):
+    """Check if the project is based on distutils.
+
+    :param path: path to source directory containing a setup.py script.
+
+    Return True if the project requires distutils to install, else False.
+    """
+    srcdir = os.path.abspath(path)
+    setup_py = os.path.join(srcdir, 'setup.py')
+
+    return _has_setup_py(srcdir) and (_has_pkg_info(srcdir) or
+                                      _has_distutils_text(setup_py))
+
+
+def is_packaging(path):
+    """Check if the project is based on packaging
+
+    :param path: path to source directory containing a setup.cfg file.
+
+    Return True if the project has a valid setup.cfg, else False.
+    """
+    srcdir = os.path.abspath(path)
+    setup_cfg = os.path.join(srcdir, 'setup.cfg')
+
+    return _has_setup_cfg(srcdir) and _has_required_metadata(setup_cfg)
+
+
+def get_install_method(path):
+    """Check if the project is based on packaging, setuptools, or distutils
+
+    :param path: path to source directory containing a setup.cfg file,
+                 or setup.py.
+
+    Returns a string representing the best install method to use.
+    """
+    if is_packaging(path):
+        return "packaging"
+    elif is_setuptools(path):
+        return "setuptools"
+    elif is_distutils(path):
+        return "distutils"
+    else:
+        raise InstallationException('Cannot detect install method')
+
+
+# XXX to be replaced by shutil.copytree
+def copy_tree(src, dst, preserve_mode=True, preserve_times=True,
+              preserve_symlinks=False, update=False, verbose=True,
+              dry_run=False):
+    from distutils.file_util import copy_file
+
+    if not dry_run and not os.path.isdir(src):
+        raise PackagingFileError(
+              "cannot copy tree '%s': not a directory" % src)
+    try:
+        names = os.listdir(src)
+    except os.error as e:
+        errstr = e[1]
+        if dry_run:
+            names = []
+        else:
+            raise PackagingFileError(
+                  "error listing files in '%s': %s" % (src, errstr))
+
+    if not dry_run:
+        _mkpath(dst, verbose=verbose)
+
+    outputs = []
+
+    for n in names:
+        src_name = os.path.join(src, n)
+        dst_name = os.path.join(dst, n)
+
+        if preserve_symlinks and os.path.islink(src_name):
+            link_dest = os.readlink(src_name)
+            if verbose >= 1:
+                logger.info("linking %s -> %s", dst_name, link_dest)
+            if not dry_run:
+                os.symlink(link_dest, dst_name)
+            outputs.append(dst_name)
+
+        elif os.path.isdir(src_name):
+            outputs.extend(
+                copy_tree(src_name, dst_name, preserve_mode,
+                          preserve_times, preserve_symlinks, update,
+                          verbose=verbose, dry_run=dry_run))
+        else:
+            copy_file(src_name, dst_name, preserve_mode,
+                      preserve_times, update, verbose=verbose,
+                      dry_run=dry_run)
+            outputs.append(dst_name)
+
+    return outputs
+
+# cache for by mkpath() -- in addition to cheapening redundant calls,
+# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
+_path_created = set()
+
+
+# I don't use os.makedirs because a) it's new to Python 1.5.2, and
+# b) it blows up if the directory already exists (I want to silently
+# succeed in that case).
+def _mkpath(name, mode=0o777, verbose=True, dry_run=False):
+    # Detect a common bug -- name is None
+    if not isinstance(name, str):
+        raise PackagingInternalError(
+              "mkpath: 'name' must be a string (got %r)" % (name,))
+
+    # XXX what's the better way to handle verbosity? print as we create
+    # each directory in the path (the current behaviour), or only announce
+    # the creation of the whole path? (quite easy to do the latter since
+    # we're not using a recursive algorithm)
+
+    name = os.path.normpath(name)
+    created_dirs = []
+    if os.path.isdir(name) or name == '':
+        return created_dirs
+    if os.path.abspath(name) in _path_created:
+        return created_dirs
+
+    head, tail = os.path.split(name)
+    tails = [tail]                      # stack of lone dirs to create
+
+    while head and tail and not os.path.isdir(head):
+        head, tail = os.path.split(head)
+        tails.insert(0, tail)          # push next higher dir onto stack
+
+    # now 'head' contains the deepest directory that already exists
+    # (that is, the child of 'head' in 'name' is the highest directory
+    # that does *not* exist)
+    for d in tails:
+        head = os.path.join(head, d)
+        abs_head = os.path.abspath(head)
+
+        if abs_head in _path_created:
+            continue
+
+        if verbose >= 1:
+            logger.info("creating %s", head)
+
+        if not dry_run:
+            try:
+                os.mkdir(head, mode)
+            except OSError as exc:
+                if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
+                    raise PackagingFileError(
+                          "could not create '%s': %s" % (head, exc.args[-1]))
+            created_dirs.append(head)
+
+        _path_created.add(abs_head)
+    return created_dirs
diff --git a/Lib/packaging/version.py b/Lib/packaging/version.py
new file mode 100644
index 0000000..0eaf80b
--- /dev/null
+++ b/Lib/packaging/version.py
@@ -0,0 +1,449 @@
+"""Implementation of the versioning scheme defined in PEP 386."""
+
+import re
+
+from packaging.errors import IrrationalVersionError, HugeMajorVersionNumError
+
+__all__ = ['NormalizedVersion', 'suggest_normalized_version',
+           'VersionPredicate', 'is_valid_version', 'is_valid_versions',
+           'is_valid_predicate']
+
+# A marker used in the second and third parts of the `parts` tuple, for
+# versions that don't have those segments, to sort properly. An example
+# of versions in sort order ('highest' last):
+#   1.0b1                 ((1,0), ('b',1), ('f',))
+#   1.0.dev345            ((1,0), ('f',),  ('dev', 345))
+#   1.0                   ((1,0), ('f',),  ('f',))
+#   1.0.post256.dev345    ((1,0), ('f',),  ('f', 'post', 256, 'dev', 345))
+#   1.0.post345           ((1,0), ('f',),  ('f', 'post', 345, 'f'))
+#                                   ^        ^                 ^
+#   'b' < 'f' ---------------------/         |                 |
+#                                            |                 |
+#   'dev' < 'f' < 'post' -------------------/                  |
+#                                                              |
+#   'dev' < 'f' ----------------------------------------------/
+# Other letters would do, but 'f' for 'final' is kind of nice.
+_FINAL_MARKER = ('f',)
+
+_VERSION_RE = re.compile(r'''
+    ^
+    (?P<version>\d+\.\d+)          # minimum 'N.N'
+    (?P<extraversion>(?:\.\d+)*)   # any number of extra '.N' segments
+    (?:
+        (?P<prerel>[abc]|rc)       # 'a'=alpha, 'b'=beta, 'c'=release candidate
+                                   # 'rc'= alias for release candidate
+        (?P<prerelversion>\d+(?:\.\d+)*)
+    )?
+    (?P<postdev>(\.post(?P<post>\d+))?(\.dev(?P<dev>\d+))?)?
+    $''', re.VERBOSE)
+
+
+class NormalizedVersion:
+    """A rational version.
+
+    Good:
+        1.2         # equivalent to "1.2.0"
+        1.2.0
+        1.2a1
+        1.2.3a2
+        1.2.3b1
+        1.2.3c1
+        1.2.3.4
+        TODO: fill this out
+
+    Bad:
+        1           # mininum two numbers
+        1.2a        # release level must have a release serial
+        1.2.3b
+    """
+    def __init__(self, s, error_on_huge_major_num=True):
+        """Create a NormalizedVersion instance from a version string.
+
+        @param s {str} The version string.
+        @param error_on_huge_major_num {bool} Whether to consider an
+            apparent use of a year or full date as the major version number
+            an error. Default True. One of the observed patterns on PyPI before
+            the introduction of `NormalizedVersion` was version numbers like
+            this:
+                2009.01.03
+                20040603
+                2005.01
+            This guard is here to strongly encourage the package author to
+            use an alternate version, because a release deployed into PyPI
+            and, e.g. downstream Linux package managers, will forever remove
+            the possibility of using a version number like "1.0" (i.e.
+            where the major number is less than that huge major number).
+        """
+        self.is_final = True  # by default, consider a version as final.
+        self._parse(s, error_on_huge_major_num)
+
+    @classmethod
+    def from_parts(cls, version, prerelease=_FINAL_MARKER,
+                   devpost=_FINAL_MARKER):
+        return cls(cls.parts_to_str((version, prerelease, devpost)))
+
+    def _parse(self, s, error_on_huge_major_num=True):
+        """Parses a string version into parts."""
+        match = _VERSION_RE.search(s)
+        if not match:
+            raise IrrationalVersionError(s)
+
+        groups = match.groupdict()
+        parts = []
+
+        # main version
+        block = self._parse_numdots(groups['version'], s, False, 2)
+        extraversion = groups.get('extraversion')
+        if extraversion not in ('', None):
+            block += self._parse_numdots(extraversion[1:], s)
+        parts.append(tuple(block))
+
+        # prerelease
+        prerel = groups.get('prerel')
+        if prerel is not None:
+            block = [prerel]
+            block += self._parse_numdots(groups.get('prerelversion'), s,
+                                         pad_zeros_length=1)
+            parts.append(tuple(block))
+            self.is_final = False
+        else:
+            parts.append(_FINAL_MARKER)
+
+        # postdev
+        if groups.get('postdev'):
+            post = groups.get('post')
+            dev = groups.get('dev')
+            postdev = []
+            if post is not None:
+                postdev.extend((_FINAL_MARKER[0], 'post', int(post)))
+                if dev is None:
+                    postdev.append(_FINAL_MARKER[0])
+            if dev is not None:
+                postdev.extend(('dev', int(dev)))
+                self.is_final = False
+            parts.append(tuple(postdev))
+        else:
+            parts.append(_FINAL_MARKER)
+        self.parts = tuple(parts)
+        if error_on_huge_major_num and self.parts[0][0] > 1980:
+            raise HugeMajorVersionNumError("huge major version number, %r, "
+               "which might cause future problems: %r" % (self.parts[0][0], s))
+
+    def _parse_numdots(self, s, full_ver_str, drop_trailing_zeros=True,
+                       pad_zeros_length=0):
+        """Parse 'N.N.N' sequences, return a list of ints.
+
+        @param s {str} 'N.N.N...' sequence to be parsed
+        @param full_ver_str {str} The full version string from which this
+            comes. Used for error strings.
+        @param drop_trailing_zeros {bool} Whether to drop trailing zeros
+            from the returned list. Default True.
+        @param pad_zeros_length {int} The length to which to pad the
+            returned list with zeros, if necessary. Default 0.
+        """
+        nums = []
+        for n in s.split("."):
+            if len(n) > 1 and n[0] == '0':
+                raise IrrationalVersionError("cannot have leading zero in "
+                    "version number segment: '%s' in %r" % (n, full_ver_str))
+            nums.append(int(n))
+        if drop_trailing_zeros:
+            while nums and nums[-1] == 0:
+                nums.pop()
+        while len(nums) < pad_zeros_length:
+            nums.append(0)
+        return nums
+
+    def __str__(self):
+        return self.parts_to_str(self.parts)
+
+    @classmethod
+    def parts_to_str(cls, parts):
+        """Transforms a version expressed in tuple into its string
+        representation."""
+        # XXX This doesn't check for invalid tuples
+        main, prerel, postdev = parts
+        s = '.'.join(str(v) for v in main)
+        if prerel is not _FINAL_MARKER:
+            s += prerel[0]
+            s += '.'.join(str(v) for v in prerel[1:])
+        if postdev and postdev is not _FINAL_MARKER:
+            if postdev[0] == 'f':
+                postdev = postdev[1:]
+            i = 0
+            while i < len(postdev):
+                if i % 2 == 0:
+                    s += '.'
+                s += str(postdev[i])
+                i += 1
+        return s
+
+    def __repr__(self):
+        return "%s('%s')" % (self.__class__.__name__, self)
+
+    def _cannot_compare(self, other):
+        raise TypeError("cannot compare %s and %s"
+                % (type(self).__name__, type(other).__name__))
+
+    def __eq__(self, other):
+        if not isinstance(other, NormalizedVersion):
+            self._cannot_compare(other)
+        return self.parts == other.parts
+
+    def __lt__(self, other):
+        if not isinstance(other, NormalizedVersion):
+            self._cannot_compare(other)
+        return self.parts < other.parts
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __gt__(self, other):
+        return not (self.__lt__(other) or self.__eq__(other))
+
+    def __le__(self, other):
+        return self.__eq__(other) or self.__lt__(other)
+
+    def __ge__(self, other):
+        return self.__eq__(other) or self.__gt__(other)
+
+    # See http://docs.python.org/reference/datamodel#object.__hash__
+    def __hash__(self):
+        return hash(self.parts)
+
+
+def suggest_normalized_version(s):
+    """Suggest a normalized version close to the given version string.
+
+    If you have a version string that isn't rational (i.e. NormalizedVersion
+    doesn't like it) then you might be able to get an equivalent (or close)
+    rational version from this function.
+
+    This does a number of simple normalizations to the given string, based
+    on observation of versions currently in use on PyPI. Given a dump of
+    those version during PyCon 2009, 4287 of them:
+    - 2312 (53.93%) match NormalizedVersion without change
+      with the automatic suggestion
+    - 3474 (81.04%) match when using this suggestion method
+
+    @param s {str} An irrational version string.
+    @returns A rational version string, or None, if couldn't determine one.
+    """
+    try:
+        NormalizedVersion(s)
+        return s   # already rational
+    except IrrationalVersionError:
+        pass
+
+    rs = s.lower()
+
+    # part of this could use maketrans
+    for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
+                       ('beta', 'b'), ('rc', 'c'), ('-final', ''),
+                       ('-pre', 'c'),
+                       ('-release', ''), ('.release', ''), ('-stable', ''),
+                       ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
+                       ('final', '')):
+        rs = rs.replace(orig, repl)
+
+    # if something ends with dev or pre, we add a 0
+    rs = re.sub(r"pre$", r"pre0", rs)
+    rs = re.sub(r"dev$", r"dev0", rs)
+
+    # if we have something like "b-2" or "a.2" at the end of the
+    # version, that is pobably beta, alpha, etc
+    # let's remove the dash or dot
+    rs = re.sub(r"([abc|rc])[\-\.](\d+)$", r"\1\2", rs)
+
+    # 1.0-dev-r371 -> 1.0.dev371
+    # 0.1-dev-r79 -> 0.1.dev79
+    rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
+
+    # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
+    rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
+
+    # Clean: v0.3, v1.0
+    if rs.startswith('v'):
+        rs = rs[1:]
+
+    # Clean leading '0's on numbers.
+    #TODO: unintended side-effect on, e.g., "2003.05.09"
+    # PyPI stats: 77 (~2%) better
+    rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
+
+    # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
+    # zero.
+    # PyPI stats: 245 (7.56%) better
+    rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
+
+    # the 'dev-rNNN' tag is a dev tag
+    rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
+
+    # clean the - when used as a pre delimiter
+    rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
+
+    # a terminal "dev" or "devel" can be changed into ".dev0"
+    rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
+
+    # a terminal "dev" can be changed into ".dev0"
+    rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
+
+    # a terminal "final" or "stable" can be removed
+    rs = re.sub(r"(final|stable)$", "", rs)
+
+    # The 'r' and the '-' tags are post release tags
+    #   0.4a1.r10       ->  0.4a1.post10
+    #   0.9.33-17222    ->  0.9.3.post17222
+    #   0.9.33-r17222   ->  0.9.3.post17222
+    rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
+
+    # Clean 'r' instead of 'dev' usage:
+    #   0.9.33+r17222   ->  0.9.3.dev17222
+    #   1.0dev123       ->  1.0.dev123
+    #   1.0.git123      ->  1.0.dev123
+    #   1.0.bzr123      ->  1.0.dev123
+    #   0.1a0dev.123    ->  0.1a0.dev123
+    # PyPI stats:  ~150 (~4%) better
+    rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
+
+    # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
+    #   0.2.pre1        ->  0.2c1
+    #   0.2-c1         ->  0.2c1
+    #   1.0preview123   ->  1.0c123
+    # PyPI stats: ~21 (0.62%) better
+    rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
+
+    # Tcl/Tk uses "px" for their post release markers
+    rs = re.sub(r"p(\d+)$", r".post\1", rs)
+
+    try:
+        NormalizedVersion(rs)
+        return rs   # already rational
+    except IrrationalVersionError:
+        pass
+    return None
+
+
+# A predicate is: "ProjectName (VERSION1, VERSION2, ..)
+_PREDICATE = re.compile(r"(?i)^\s*(\w[\s\w-]*(?:\.\w*)*)(.*)")
+_VERSIONS = re.compile(r"^\s*\((?P<versions>.*)\)\s*$|^\s*"
+                        "(?P<versions2>.*)\s*$")
+_PLAIN_VERSIONS = re.compile(r"^\s*(.*)\s*$")
+_SPLIT_CMP = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
+
+
+def _split_predicate(predicate):
+    match = _SPLIT_CMP.match(predicate)
+    if match is None:
+        # probably no op, we'll use "=="
+        comp, version = '==', predicate
+    else:
+        comp, version = match.groups()
+    return comp, NormalizedVersion(version)
+
+
+class VersionPredicate:
+    """Defines a predicate: ProjectName (>ver1,ver2, ..)"""
+
+    _operators = {"<": lambda x, y: x < y,
+                  ">": lambda x, y: x > y,
+                  "<=": lambda x, y: str(x).startswith(str(y)) or x < y,
+                  ">=": lambda x, y: str(x).startswith(str(y)) or x > y,
+                  "==": lambda x, y: str(x).startswith(str(y)),
+                  "!=": lambda x, y: not str(x).startswith(str(y)),
+                  }
+
+    def __init__(self, predicate):
+        self._string = predicate
+        predicate = predicate.strip()
+        match = _PREDICATE.match(predicate)
+        if match is None:
+            raise ValueError('Bad predicate "%s"' % predicate)
+
+        name, predicates = match.groups()
+        self.name = name.strip()
+        self.predicates = []
+        if predicates is None:
+            return
+
+        predicates = _VERSIONS.match(predicates.strip())
+        if predicates is None:
+            return
+
+        predicates = predicates.groupdict()
+        if predicates['versions'] is not None:
+            versions = predicates['versions']
+        else:
+            versions = predicates.get('versions2')
+
+        if versions is not None:
+            for version in versions.split(','):
+                if version.strip() == '':
+                    continue
+                self.predicates.append(_split_predicate(version))
+
+    def match(self, version):
+        """Check if the provided version matches the predicates."""
+        if isinstance(version, str):
+            version = NormalizedVersion(version)
+        for operator, predicate in self.predicates:
+            if not self._operators[operator](version, predicate):
+                return False
+        return True
+
+    def __repr__(self):
+        return self._string
+
+
+class _Versions(VersionPredicate):
+    def __init__(self, predicate):
+        predicate = predicate.strip()
+        match = _PLAIN_VERSIONS.match(predicate)
+        self.name = None
+        predicates = match.groups()[0]
+        self.predicates = [_split_predicate(pred.strip())
+                           for pred in predicates.split(',')]
+
+
+class _Version(VersionPredicate):
+    def __init__(self, predicate):
+        predicate = predicate.strip()
+        match = _PLAIN_VERSIONS.match(predicate)
+        self.name = None
+        self.predicates = _split_predicate(match.groups()[0])
+
+
+def is_valid_predicate(predicate):
+    try:
+        VersionPredicate(predicate)
+    except (ValueError, IrrationalVersionError):
+        return False
+    else:
+        return True
+
+
+def is_valid_versions(predicate):
+    try:
+        _Versions(predicate)
+    except (ValueError, IrrationalVersionError):
+        return False
+    else:
+        return True
+
+
+def is_valid_version(predicate):
+    try:
+        _Version(predicate)
+    except (ValueError, IrrationalVersionError):
+        return False
+    else:
+        return True
+
+
+def get_version_predicate(requirements):
+    """Return a VersionPredicate object, from a string or an already
+    existing object.
+    """
+    if isinstance(requirements, str):
+        requirements = VersionPredicate(requirements)
+    return requirements
diff --git a/Lib/sysconfig.cfg b/Lib/sysconfig.cfg
new file mode 100644
index 0000000..1f6b8bc
--- /dev/null
+++ b/Lib/sysconfig.cfg
@@ -0,0 +1,111 @@
+[globals]
+# These are the useful categories that are sometimes referenced at runtime,
+# using packaging.resources.get_file:
+# Configuration files
+config = {confdir}/{distribution.name}
+# Non-writable data that is independent of architecture (images, many xml/text files)
+appdata = {datadir}/{distribution.name}
+# Non-writable data that is architecture-dependent (some binary data formats)
+appdata.arch = {libdir}/{distribution.name}
+# Data, written by the package, that must be preserved (databases)
+appdata.persistent = {statedir}/lib/{distribution.name}
+# Data, written by the package, that can be safely discarded (cache)
+appdata.disposable = {statedir}/cache/{distribution.name}
+# Help or documentation files referenced at runtime
+help = {datadir}/{distribution.name}
+icon = {datadir}/pixmaps
+scripts = {base}/bin
+
+# Non-runtime files.  These are valid categories for marking files for
+# install, but they should not be referenced by the app at runtime:
+# Help or documentation files not referenced by the package at runtime
+doc = {datadir}/doc/{distribution.name}
+# GNU info documentation files
+info = {datadir}/info
+# man pages
+man = {datadir}/man
+
+[posix_prefix]
+# Configuration directories.  Some of these come straight out of the
+# configure script.  They are for implementing the other variables, not to
+# be used directly in [resource_locations].
+confdir = /etc
+datadir = /usr/share
+libdir = /usr/lib  ; or /usr/lib64 on a multilib system
+statedir = /var
+# User resource directory
+local = ~/.local/{distribution.name}
+
+stdlib = {base}/lib/python{py_version_short}
+platstdlib = {platbase}/lib/python{py_version_short}
+purelib = {base}/lib/python{py_version_short}/site-packages
+platlib = {platbase}/lib/python{py_version_short}/site-packages
+include = {base}/include/python{py_version_short}{abiflags}
+platinclude = {platbase}/include/python{py_version_short}{abiflags}
+data = {base}
+
+[posix_home]
+stdlib = {base}/lib/python
+platstdlib = {base}/lib/python
+purelib = {base}/lib/python
+platlib = {base}/lib/python
+include = {base}/include/python
+platinclude = {base}/include/python
+scripts = {base}/bin
+data = {base}
+
+[nt]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2_home]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[nt_user]
+stdlib = {userbase}/Python{py_version_nodot}
+platstdlib = {userbase}/Python{py_version_nodot}
+purelib = {userbase}/Python{py_version_nodot}/site-packages
+platlib = {userbase}/Python{py_version_nodot}/site-packages
+include = {userbase}/Python{py_version_nodot}/Include
+scripts = {userbase}/Scripts
+data = {userbase}
+
+[posix_user]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[osx_framework_user]
+stdlib = {userbase}/lib/python
+platstdlib = {userbase}/lib/python
+purelib = {userbase}/lib/python/site-packages
+platlib = {userbase}/lib/python/site-packages
+include = {userbase}/include
+scripts = {userbase}/bin
+data = {userbase}
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index 41bccf3..4c1fd1b 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -1,9 +1,10 @@
-"""Provide access to Python's configuration information.
+"""Access to Python's configuration information."""
 
-"""
-import sys
 import os
+import re
+import sys
 from os.path import pardir, realpath
+from configparser import RawConfigParser
 
 __all__ = [
     'get_config_h_filename',
@@ -17,91 +18,51 @@
     'get_python_version',
     'get_scheme_names',
     'parse_config_h',
-    ]
+]
 
-_INSTALL_SCHEMES = {
-    'posix_prefix': {
-        'stdlib': '{base}/lib/python{py_version_short}',
-        'platstdlib': '{platbase}/lib/python{py_version_short}',
-        'purelib': '{base}/lib/python{py_version_short}/site-packages',
-        'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
-        'include':
-            '{base}/include/python{py_version_short}{abiflags}',
-        'platinclude':
-            '{platbase}/include/python{py_version_short}{abiflags}',
-        'scripts': '{base}/bin',
-        'data': '{base}',
-        },
-    'posix_home': {
-        'stdlib': '{base}/lib/python',
-        'platstdlib': '{base}/lib/python',
-        'purelib': '{base}/lib/python',
-        'platlib': '{base}/lib/python',
-        'include': '{base}/include/python',
-        'platinclude': '{base}/include/python',
-        'scripts': '{base}/bin',
-        'data'   : '{base}',
-        },
-    'nt': {
-        'stdlib': '{base}/Lib',
-        'platstdlib': '{base}/Lib',
-        'purelib': '{base}/Lib/site-packages',
-        'platlib': '{base}/Lib/site-packages',
-        'include': '{base}/Include',
-        'platinclude': '{base}/Include',
-        'scripts': '{base}/Scripts',
-        'data'   : '{base}',
-        },
-    'os2': {
-        'stdlib': '{base}/Lib',
-        'platstdlib': '{base}/Lib',
-        'purelib': '{base}/Lib/site-packages',
-        'platlib': '{base}/Lib/site-packages',
-        'include': '{base}/Include',
-        'platinclude': '{base}/Include',
-        'scripts': '{base}/Scripts',
-        'data'   : '{base}',
-        },
-    'os2_home': {
-        'stdlib': '{userbase}/lib/python{py_version_short}',
-        'platstdlib': '{userbase}/lib/python{py_version_short}',
-        'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'include': '{userbase}/include/python{py_version_short}',
-        'scripts': '{userbase}/bin',
-        'data'   : '{userbase}',
-        },
-    'nt_user': {
-        'stdlib': '{userbase}/Python{py_version_nodot}',
-        'platstdlib': '{userbase}/Python{py_version_nodot}',
-        'purelib': '{userbase}/Python{py_version_nodot}/site-packages',
-        'platlib': '{userbase}/Python{py_version_nodot}/site-packages',
-        'include': '{userbase}/Python{py_version_nodot}/Include',
-        'scripts': '{userbase}/Scripts',
-        'data'   : '{userbase}',
-        },
-    'posix_user': {
-        'stdlib': '{userbase}/lib/python{py_version_short}',
-        'platstdlib': '{userbase}/lib/python{py_version_short}',
-        'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'include': '{userbase}/include/python{py_version_short}',
-        'scripts': '{userbase}/bin',
-        'data'   : '{userbase}',
-        },
-    'osx_framework_user': {
-        'stdlib': '{userbase}/lib/python',
-        'platstdlib': '{userbase}/lib/python',
-        'purelib': '{userbase}/lib/python/site-packages',
-        'platlib': '{userbase}/lib/python/site-packages',
-        'include': '{userbase}/include',
-        'scripts': '{userbase}/bin',
-        'data'   : '{userbase}',
-        },
-    }
+# let's read the configuration file
+# XXX _CONFIG_DIR will be set by the Makefile later
+_CONFIG_DIR = os.path.normpath(os.path.dirname(__file__))
+_CONFIG_FILE = os.path.join(_CONFIG_DIR, 'sysconfig.cfg')
+_SCHEMES = RawConfigParser()
+_SCHEMES.read(_CONFIG_FILE)
+_VAR_REPL = re.compile(r'\{([^{]*?)\}')
 
-_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
-                'scripts', 'data')
+
+def _expand_globals(config):
+    if config.has_section('globals'):
+        globals = config.items('globals')
+    else:
+        globals = tuple()
+
+    sections = config.sections()
+    for section in sections:
+        if section == 'globals':
+            continue
+        for option, value in globals:
+            if config.has_option(section, option):
+                continue
+            config.set(section, option, value)
+    config.remove_section('globals')
+
+    # now expanding local variables defined in the cfg file
+    #
+    for section in config.sections():
+        variables = dict(config.items(section))
+
+        def _replacer(matchobj):
+            name = matchobj.group(1)
+            if name in variables:
+                return variables[name]
+            return matchobj.group(0)
+
+        for option, value in config.items(section):
+            config.set(section, option, _VAR_REPL.sub(_replacer, value))
+
+_expand_globals(_SCHEMES)
+
+ # FIXME don't rely on sys.version here, its format is an implementatin detail
+ # of CPython, use sys.version_info or sys.hexversion
 _PY_VERSION = sys.version.split()[0]
 _PY_VERSION_SHORT = sys.version[:3]
 _PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
@@ -110,6 +71,7 @@
 _CONFIG_VARS = None
 _USER_BASE = None
 
+
 def _safe_realpath(path):
     try:
         return realpath(path)
@@ -132,6 +94,7 @@
 if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
     _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
 
+
 def is_python_build():
     for fn in ("Setup.dist", "Setup.local"):
         if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
@@ -142,17 +105,25 @@
 
 if _PYTHON_BUILD:
     for scheme in ('posix_prefix', 'posix_home'):
-        _INSTALL_SCHEMES[scheme]['include'] = '{srcdir}/Include'
-        _INSTALL_SCHEMES[scheme]['platinclude'] = '{projectbase}/.'
+        _SCHEMES.set(scheme, 'include', '{srcdir}/Include')
+        _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
 
-def _subst_vars(s, local_vars):
-    try:
-        return s.format(**local_vars)
-    except KeyError:
-        try:
-            return s.format(**os.environ)
-        except KeyError as var:
-            raise AttributeError('{%s}' % var)
+
+def _subst_vars(path, local_vars):
+    """In the string `path`, replace tokens like {some.thing} with the
+    corresponding value from the map `local_vars`.
+
+    If there is no corresponding value, leave the token unchanged.
+    """
+    def _replacer(matchobj):
+        name = matchobj.group(1)
+        if name in local_vars:
+            return local_vars[name]
+        elif name in os.environ:
+            return os.environ[name]
+        return matchobj.group(0)
+    return _VAR_REPL.sub(_replacer, path)
+
 
 def _extend_dict(target_dict, other_dict):
     target_keys = target_dict.keys()
@@ -161,41 +132,63 @@
             continue
         target_dict[key] = value
 
+
 def _expand_vars(scheme, vars):
     res = {}
     if vars is None:
         vars = {}
     _extend_dict(vars, get_config_vars())
 
-    for key, value in _INSTALL_SCHEMES[scheme].items():
+    for key, value in _SCHEMES.items(scheme):
         if os.name in ('posix', 'nt'):
             value = os.path.expanduser(value)
         res[key] = os.path.normpath(_subst_vars(value, vars))
     return res
 
+
+def format_value(value, vars):
+    def _replacer(matchobj):
+        name = matchobj.group(1)
+        if name in vars:
+            return vars[name]
+        return matchobj.group(0)
+    return _VAR_REPL.sub(_replacer, value)
+
+
 def _get_default_scheme():
     if os.name == 'posix':
         # the default scheme for posix is posix_prefix
         return 'posix_prefix'
     return os.name
 
+
 def _getuserbase():
     env_base = os.environ.get("PYTHONUSERBASE", None)
+
     def joinuser(*args):
         return os.path.expanduser(os.path.join(*args))
 
     # what about 'os2emx', 'riscos' ?
     if os.name == "nt":
         base = os.environ.get("APPDATA") or "~"
-        return env_base if env_base else joinuser(base, "Python")
+        if env_base:
+            return env_base
+        else:
+            return joinuser(base, "Python")
 
     if sys.platform == "darwin":
         framework = get_config_var("PYTHONFRAMEWORK")
         if framework:
-            return env_base if env_base else joinuser("~", "Library", framework, "%d.%d"%(
-                sys.version_info[:2]))
+            if env_base:
+                return env_base
+            else:
+                return joinuser("~", "Library", framework, "%d.%d" %
+                                sys.version_info[:2])
 
-    return env_base if env_base else joinuser("~", ".local")
+    if env_base:
+        return env_base
+    else:
+        return joinuser("~", ".local")
 
 
 def _parse_makefile(filename, vars=None):
@@ -205,7 +198,6 @@
     optional dictionary is passed in as the second argument, it is
     used instead of a new dictionary.
     """
-    import re
     # Regexes needed for parsing Makefile (and similar syntaxes,
     # like old-style Setup files).
     _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
@@ -267,7 +259,8 @@
                     item = os.environ[n]
 
                 elif n in renamed_variables:
-                    if name.startswith('PY_') and name[3:] in renamed_variables:
+                    if (name.startswith('PY_') and
+                        name[3:] in renamed_variables):
                         item = ""
 
                     elif 'PY_' + n in notdone:
@@ -300,7 +293,6 @@
                             if name not in done:
                                 done[name] = value
 
-
             else:
                 # bogus variable reference; just drop it since we can't deal
                 variables.remove(name)
@@ -319,9 +311,11 @@
     """Return the path of the Makefile."""
     if _PYTHON_BUILD:
         return os.path.join(_PROJECT_BASE, "Makefile")
-    return os.path.join(get_path('stdlib'),
-                        'config-{}{}'.format(_PY_VERSION_SHORT, sys.abiflags),
-                        'Makefile')
+    if hasattr(sys, 'abiflags'):
+        config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
+    else:
+        config_dir_name = 'config'
+    return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
 
 
 def _init_posix(vars):
@@ -351,6 +345,7 @@
     if _PYTHON_BUILD:
         vars['LDSHARED'] = vars['BLDSHARED']
 
+
 def _init_non_posix(vars):
     """Initialize the module as appropriate for NT"""
     # set basic install directories
@@ -374,7 +369,6 @@
     optional dictionary is passed in as the second argument, it is
     used instead of a new dictionary.
     """
-    import re
     if vars is None:
         vars = {}
     define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
@@ -387,8 +381,10 @@
         m = define_rx.match(line)
         if m:
             n, v = m.group(1, 2)
-            try: v = int(v)
-            except ValueError: pass
+            try:
+                v = int(v)
+            except ValueError:
+                pass
             vars[n] = v
         else:
             m = undef_rx.match(line)
@@ -396,6 +392,7 @@
                 vars[m.group(1)] = 0
     return vars
 
+
 def get_config_h_filename():
     """Return the path of pyconfig.h."""
     if _PYTHON_BUILD:
@@ -407,15 +404,17 @@
         inc_dir = get_path('platinclude')
     return os.path.join(inc_dir, 'pyconfig.h')
 
+
 def get_scheme_names():
     """Return a tuple containing the schemes names."""
-    schemes = list(_INSTALL_SCHEMES.keys())
-    schemes.sort()
-    return tuple(schemes)
+    return tuple(sorted(_SCHEMES.sections()))
+
 
 def get_path_names():
     """Return a tuple containing the paths names."""
-    return _SCHEME_KEYS
+    # xxx see if we want a static list
+    return _SCHEMES.options('posix_prefix')
+
 
 def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
     """Return a mapping containing an install scheme.
@@ -426,7 +425,8 @@
     if expand:
         return _expand_vars(scheme, vars)
     else:
-        return _INSTALL_SCHEMES[scheme]
+        return dict(_SCHEMES.items(scheme))
+
 
 def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
     """Return a path corresponding to the scheme.
@@ -435,6 +435,7 @@
     """
     return get_paths(scheme, vars, expand)[name]
 
+
 def get_config_vars(*args):
     """With no arguments, return a dictionary of all configuration
     variables relevant for the current platform.
@@ -445,13 +446,12 @@
     With arguments, return a list of values that result from looking up
     each argument in the configuration variable dictionary.
     """
-    import re
     global _CONFIG_VARS
     if _CONFIG_VARS is None:
         _CONFIG_VARS = {}
         # Normalized versions of prefix and exec_prefix are handy to have;
         # in fact, these are the standard versions used most places in the
-        # Distutils.
+        # packaging module.
         _CONFIG_VARS['prefix'] = _PREFIX
         _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
         _CONFIG_VARS['py_version'] = _PY_VERSION
@@ -473,14 +473,14 @@
         # Setting 'userbase' is done below the call to the
         # init function to enable using 'get_config_var' in
         # the init-function.
-        _CONFIG_VARS['userbase'] = _getuserbase()
+        if sys.version >= '2.6':
+            _CONFIG_VARS['userbase'] = _getuserbase()
 
         if 'srcdir' not in _CONFIG_VARS:
             _CONFIG_VARS['srcdir'] = _PROJECT_BASE
         else:
             _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
 
-
         # Convert srcdir into an absolute path if it appears necessary.
         # Normally it is relative to the build directory.  However, during
         # testing, for example, we might be running a non-installed python
@@ -500,7 +500,7 @@
                 _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
 
         if sys.platform == 'darwin':
-            kernel_version = os.uname()[2] # Kernel version (8.4.3)
+            kernel_version = os.uname()[2]  # Kernel version (8.4.3)
             major_version = int(kernel_version.split('.')[0])
 
             if major_version < 8:
@@ -566,6 +566,7 @@
     else:
         return _CONFIG_VARS
 
+
 def get_config_var(name):
     """Return the value of a single variable using the dictionary returned by
     'get_config_vars()'.
@@ -574,6 +575,7 @@
     """
     return get_config_vars().get(name)
 
+
 def get_platform():
     """Return a string that identifies the current platform.
 
@@ -599,7 +601,6 @@
 
     For other non-POSIX platforms, currently just returns 'sys.platform'.
     """
-    import re
     if os.name == 'nt':
         # sniff sys.version for architecture.
         prefix = " bit ("
@@ -644,7 +645,7 @@
         return "%s-%s.%s" % (osname, version, release)
     elif osname[:6] == "cygwin":
         osname = "cygwin"
-        rel_re = re.compile (r'[\d.]+')
+        rel_re = re.compile(r'[\d.]+')
         m = rel_re.match(release)
         if m:
             release = m.group()
@@ -675,14 +676,13 @@
                 pass
             else:
                 try:
-                    m = re.search(
-                            r'<key>ProductUserVisibleVersion</key>\s*' +
-                            r'<string>(.*?)</string>', f.read())
-                    if m is not None:
-                        macrelease = '.'.join(m.group(1).split('.')[:2])
-                    # else: fall back to the default behaviour
+                    m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
+                                  r'<string>(.*?)</string>', f.read())
                 finally:
                     f.close()
+                if m is not None:
+                    macrelease = '.'.join(m.group(1).split('.')[:2])
+                # else: fall back to the default behaviour
 
         if not macver:
             macver = macrelease
@@ -691,8 +691,8 @@
             release = macver
             osname = "macosx"
 
-            if (macrelease + '.') >= '10.4.' and \
-                    '-arch' in get_config_vars().get('CFLAGS', '').strip():
+            if ((macrelease + '.') >= '10.4.' and
+                '-arch' in get_config_vars().get('CFLAGS', '').strip()):
                 # The universal build will build fat binaries, but not on
                 # systems before 10.4
                 #
@@ -719,7 +719,7 @@
                     machine = 'universal'
                 else:
                     raise ValueError(
-                       "Don't know machine value for archs=%r"%(archs,))
+                       "Don't know machine value for archs=%r" % (archs,))
 
             elif machine == 'i386':
                 # On OSX the machine type returned by uname is always the
@@ -742,21 +742,24 @@
 def get_python_version():
     return _PY_VERSION_SHORT
 
+
 def _print_dict(title, data):
     for index, (key, value) in enumerate(sorted(data.items())):
         if index == 0:
-            print('{0}: '.format(title))
-        print('\t{0} = "{1}"'.format(key, value))
+            print('%s: ' % (title))
+        print('\t%s = "%s"' % (key, value))
+
 
 def _main():
     """Display all information sysconfig detains."""
-    print('Platform: "{0}"'.format(get_platform()))
-    print('Python version: "{0}"'.format(get_python_version()))
-    print('Current installation scheme: "{0}"'.format(_get_default_scheme()))
+    print('Platform: "%s"' % get_platform())
+    print('Python version: "%s"' % get_python_version())
+    print('Current installation scheme: "%s"' % _get_default_scheme())
     print('')
     _print_dict('Paths', get_paths())
-    print('')
+    print()
     _print_dict('Variables', get_config_vars())
 
+
 if __name__ == '__main__':
     _main()
diff --git a/Lib/test/test_packaging.py b/Lib/test/test_packaging.py
new file mode 100644
index 0000000..250d661
--- /dev/null
+++ b/Lib/test/test_packaging.py
@@ -0,0 +1,5 @@
+import sys
+from packaging.tests.__main__ import test_main
+
+if __name__ == '__main__':
+    test_main()
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
index a97b388..214fc70 100644
--- a/Lib/test/test_sysconfig.py
+++ b/Lib/test/test_sysconfig.py
@@ -16,7 +16,7 @@
 
 import sysconfig
 from sysconfig import (get_paths, get_platform, get_config_vars,
-                       get_path, get_path_names, _INSTALL_SCHEMES,
+                       get_path, get_path_names, _SCHEMES,
                        _get_default_scheme, _expand_vars,
                        get_scheme_names, get_config_var, _main)
 
@@ -44,7 +44,13 @@
         self.isabs = os.path.isabs
         self.splitdrive = os.path.splitdrive
         self._config_vars = copy(sysconfig._CONFIG_VARS)
-        self.old_environ = deepcopy(os.environ)
+        self._added_envvars = []
+        self._changed_envvars = []
+        for var in ('MACOSX_DEPLOYMENT_TARGET', 'Path'):
+            if var in os.environ:
+                self._changed_envvars.append((var, os.environ[var]))
+            else:
+                self._added_envvars.append(var)
 
     def tearDown(self):
         """Restore sys.path"""
@@ -64,13 +70,10 @@
         os.path.isabs = self.isabs
         os.path.splitdrive = self.splitdrive
         sysconfig._CONFIG_VARS = copy(self._config_vars)
-        for key, value in self.old_environ.items():
-            if os.environ.get(key) != value:
-                os.environ[key] = value
-
-        for key in list(os.environ.keys()):
-            if key not in self.old_environ:
-                del os.environ[key]
+        for var, value in self._changed_envvars:
+            os.environ[var] = value
+        for var in self._added_envvars:
+            os.environ.pop(var, None)
 
         super(TestSysConfig, self).tearDown()
 
@@ -88,7 +91,7 @@
             shutil.rmtree(path)
 
     def test_get_path_names(self):
-        self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS)
+        self.assertEqual(get_path_names(), _SCHEMES.options('posix_prefix'))
 
     def test_get_paths(self):
         scheme = get_paths()
@@ -102,8 +105,8 @@
 
     def test_get_path(self):
         # xxx make real tests here
-        for scheme in _INSTALL_SCHEMES:
-            for name in _INSTALL_SCHEMES[scheme]:
+        for scheme in _SCHEMES:
+            for name in _SCHEMES[scheme]:
                 res = get_path(name, scheme)
 
     def test_get_config_vars(self):