Merge.
diff --git a/.hgtags b/.hgtags
index ea9ef8f..b393a33 100644
--- a/.hgtags
+++ b/.hgtags
@@ -142,4 +142,7 @@
 c0e311e010fcb5bae8d87ca22051cd0845ea0ca0 v3.4.1
 8711a09513848cfc48c689d983495ee64f4668ca v3.4.2rc1
 ab2c023a9432f16652e89c404bbc84aa91bf55af v3.4.2
+69dd528ca6255a66c37cc5cf680e8357d108b036 v3.4.3rc1
+b4cbecbc0781e89a309d03b60a1f75f8499250e6 v3.4.3
 5d4b6a57d5fd7564bf73f3db0e46fe5eeb00bcd8 v3.5.0a1
+0337bd7ebcb6559d69679bc7025059ad1ce4f432 v3.5.0a2
diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst
index 45b5721..d520702 100644
--- a/Doc/extending/newtypes.rst
+++ b/Doc/extending/newtypes.rst
@@ -1205,7 +1205,7 @@
    {
        if (strcmp(name, "data") == 0)
        {
-           return PyInt_FromLong(obj->data);
+           return PyLong_FromLong(obj->data);
        }
 
        PyErr_Format(PyExc_AttributeError,
diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst
index 57e23f9..e31b6c2 100644
--- a/Doc/howto/logging-cookbook.rst
+++ b/Doc/howto/logging-cookbook.rst
@@ -325,6 +325,15 @@
 
     MainThread: Look out!
 
+.. versionchanged:: 3.5
+   Prior to Python 3.5, the :class:`QueueListener` always passed every message
+   received from the queue to every handler it was initialized with. (This was
+   because it was assumed that level filtering was all done on the other side,
+   where the queue is filled.) From 3.5 onwards, this behaviour can be changed
+   by passing a keyword argument ``respect_handler_level=True`` to the
+   listener's constructor. When this is done, the listener compares the level
+   of each message with the handler's level, and only passes a message to a
+   handler if it's appropriate to do so.
 
 .. _network-logging:
 
diff --git a/Doc/howto/pyporting.rst b/Doc/howto/pyporting.rst
index bd80dfd..d2cd9de 100644
--- a/Doc/howto/pyporting.rst
+++ b/Doc/howto/pyporting.rst
@@ -169,8 +169,9 @@
    division or continue using ``/`` and expect a float
 
 The reason that ``/`` isn't simply translated to ``//`` automatically is that if
-an object defines its own ``__div__`` method but not ``__floordiv__`` then your
-code would begin to fail.
+an object defines a ``__truediv__`` method but not ``__floordiv__`` then your
+code would begin to fail (e.g. a user-defined class that uses ``/`` to
+signify some operation but not ``//`` for the same thing or at all).
 
 Text versus binary data
 +++++++++++++++++++++++
diff --git a/Doc/includes/email-headers.py b/Doc/includes/email-headers.py
index a53317d..89c8f3a 100644
--- a/Doc/includes/email-headers.py
+++ b/Doc/includes/email-headers.py
@@ -1,8 +1,9 @@
 # Import the email modules we'll need
 from email.parser import Parser
 
-#  If the e-mail headers are in a file, uncomment this line:
-#headers = Parser().parse(open(messagefile, 'r'))
+# If the e-mail headers are in a file, uncomment these two lines:
+# with open(messagefile) as fp:
+#     headers = Parser().parse(fp)
 
 #  Or for parsing headers in a string, use:
 headers = Parser().parsestr('From: <user@example.com>\n'
diff --git a/Doc/includes/email-mime.py b/Doc/includes/email-mime.py
index a90edc1..61d0830 100644
--- a/Doc/includes/email-mime.py
+++ b/Doc/includes/email-mime.py
@@ -20,9 +20,8 @@
 for file in pngfiles:
     # Open the files in binary mode.  Let the MIMEImage class automatically
     # guess the specific image type.
-    fp = open(file, 'rb')
-    img = MIMEImage(fp.read())
-    fp.close()
+    with open(file, 'rb') as fp:
+        img = MIMEImage(fp.read())
     msg.attach(img)
 
 # Send the email via our own SMTP server.
diff --git a/Doc/includes/email-read-alternative-new-api.py b/Doc/includes/email-read-alternative-new-api.py
index 8ab4e9f..3f5ab24 100644
--- a/Doc/includes/email-read-alternative-new-api.py
+++ b/Doc/includes/email-read-alternative-new-api.py
@@ -12,7 +12,8 @@
 from imaginary import magic_html_parser
 
 # In a real program you'd get the filename from the arguments.
-msg = BytesParser(policy=policy.default).parse(open('outgoing.msg', 'rb'))
+with open('outgoing.msg', 'rb') as fp:
+    msg = BytesParser(policy=policy.default).parse(fp)
 
 # Now the header items can be accessed as a dictionary, and any non-ASCII will
 # be converted to unicode:
diff --git a/Doc/includes/email-simple.py b/Doc/includes/email-simple.py
index 077568d..b9b8b41 100644
--- a/Doc/includes/email-simple.py
+++ b/Doc/includes/email-simple.py
@@ -6,10 +6,9 @@
 
 # Open a plain text file for reading.  For this example, assume that
 # the text file contains only ASCII characters.
-fp = open(textfile, 'rb')
-# Create a text/plain message
-msg = MIMEText(fp.read())
-fp.close()
+with open(textfile) as fp:
+    # Create a text/plain message
+    msg = MIMEText(fp.read())
 
 # me == the sender's email address
 # you == the recipient's email address
diff --git a/Doc/install/index.rst b/Doc/install/index.rst
index 8f3ad72..876f350 100644
--- a/Doc/install/index.rst
+++ b/Doc/install/index.rst
@@ -361,7 +361,7 @@
 Type of file    Installation directory
 =============== ===========================================================
 modules         :file:`{userbase}\\Python{XY}\\site-packages`
-scripts         :file:`{userbase}\\Scripts`
+scripts         :file:`{userbase}\\Python{XY}\\Scripts`
 data            :file:`{userbase}`
 C headers       :file:`{userbase}\\Python{XY}\\Include\\{distname}`
 =============== ===========================================================
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index 8b3d9fc..1f75cd9 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -135,7 +135,7 @@
                           formatter_class=argparse.HelpFormatter, \
                           prefix_chars='-', fromfile_prefix_chars=None, \
                           argument_default=None, conflict_handler='error', \
-                          add_help=True)
+                          add_help=True, allow_abbrev=True)
 
    Create a new :class:`ArgumentParser` object. All parameters should be passed
    as keyword arguments. Each parameter has its own more detailed description
@@ -169,6 +169,12 @@
 
    * add_help_ - Add a -h/--help option to the parser (default: ``True``)
 
+   * allow_abbrev_ - Allows long options to be abbreviated if the
+     abbreviation is unambiguous. (default: ``True``)
+
+   .. versionchanged:: 3.5
+      *allow_abbrev* parameter was added.
+
 The following sections describe how each of these are used.
 
 
@@ -518,6 +524,26 @@
    >>> parser.parse_args([])
    Namespace()
 
+.. _allow_abbrev:
+
+allow_abbrev
+^^^^^^^^^^^^
+
+Normally, when you pass an argument list to the
+:meth:`~ArgumentParser.parse_args` method of a :class:`ArgumentParser`,
+it :ref:`recognizes abbreviations <prefix-matching>` of long options.
+
+This feature can be disabled by setting ``allow_abbrev`` to ``False``::
+
+   >>> parser = argparse.ArgumentParser(prog='PROG', allow_abbrev=False)
+   >>> parser.add_argument('--foobar', action='store_true')
+   >>> parser.add_argument('--foonley', action='store_false')
+   >>> parser.parse_args([--foon])
+   usage: PROG [-h] [--foobar] [--foonley]
+   PROG: error: unrecognized arguments: --foon
+
+.. versionadded:: 3.5
+
 
 conflict_handler
 ^^^^^^^^^^^^^^^^
@@ -1410,9 +1436,9 @@
 Argument abbreviations (prefix matching)
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-The :meth:`~ArgumentParser.parse_args` method allows long options to be
-abbreviated to a prefix, if the abbreviation is unambiguous (the prefix matches
-a unique option)::
+The :meth:`~ArgumentParser.parse_args` method :ref:`by default <allow_abbrev>`
+allows long options to be abbreviated to a prefix, if the abbreviation is
+unambiguous (the prefix matches a unique option)::
 
    >>> parser = argparse.ArgumentParser(prog='PROG')
    >>> parser.add_argument('-bacon')
@@ -1426,6 +1452,7 @@
    PROG: error: ambiguous option: -ba could match -badger, -bacon
 
 An error is produced for arguments that could produce more than one options.
+This feature can be disabled by setting :ref:`allow_abbrev` to ``False``.
 
 
 Beyond ``sys.argv``
diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst
index bf77a8f..d7f474e 100644
--- a/Doc/library/asyncio-dev.rst
+++ b/Doc/library/asyncio-dev.rst
@@ -212,6 +212,7 @@
     loop = asyncio.get_event_loop()
     asyncio.async(bug())
     loop.run_forever()
+    loop.close()
 
 Output::
 
@@ -258,6 +259,7 @@
     loop = asyncio.get_event_loop()
     asyncio.async(handle_exception())
     loop.run_forever()
+    loop.close()
 
 Another option is to use the :meth:`BaseEventLoop.run_until_complete`
 function::
diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst
index 4f7fdfe..d27eb4b 100644
--- a/Doc/library/asyncio-eventloop.rst
+++ b/Doc/library/asyncio-eventloop.rst
@@ -22,6 +22,8 @@
 
    Base class of event loops.
 
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
 Run an event loop
 -----------------
 
@@ -104,6 +106,9 @@
 
    Like :meth:`call_soon`, but thread safe.
 
+   See the :ref:`concurrency and multithreading <asyncio-multithreading>`
+   section of the documentation.
+
 
 .. _asyncio-delayed-calls:
 
@@ -180,7 +185,7 @@
 Creating connections
 --------------------
 
-.. method:: BaseEventLoop.create_connection(protocol_factory, host=None, port=None, \*, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None)
+.. coroutinemethod:: BaseEventLoop.create_connection(protocol_factory, host=None, port=None, \*, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None)
 
    Create a streaming transport connection to a given Internet *host* and
    *port*: socket family :py:data:`~socket.AF_INET` or
@@ -253,7 +258,7 @@
       (:class:`StreamReader`, :class:`StreamWriter`) instead of a protocol.
 
 
-.. method:: BaseEventLoop.create_datagram_endpoint(protocol_factory, local_addr=None, remote_addr=None, \*, family=0, proto=0, flags=0)
+.. coroutinemethod:: BaseEventLoop.create_datagram_endpoint(protocol_factory, local_addr=None, remote_addr=None, \*, family=0, proto=0, flags=0)
 
    Create datagram connection: socket family :py:data:`~socket.AF_INET` or
    :py:data:`~socket.AF_INET6` depending on *host* (or *family* if specified),
@@ -271,7 +276,7 @@
    :ref:`UDP echo server protocol <asyncio-udp-echo-server-protocol>` examples.
 
 
-.. method:: BaseEventLoop.create_unix_connection(protocol_factory, path, \*, ssl=None, sock=None, server_hostname=None)
+.. coroutinemethod:: BaseEventLoop.create_unix_connection(protocol_factory, path, \*, ssl=None, sock=None, server_hostname=None)
 
    Create UNIX connection: socket family :py:data:`~socket.AF_UNIX`, socket
    type :py:data:`~socket.SOCK_STREAM`. The :py:data:`~socket.AF_UNIX` socket
@@ -290,7 +295,7 @@
 Creating listening connections
 ------------------------------
 
-.. method:: BaseEventLoop.create_server(protocol_factory, host=None, port=None, \*, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None)
+.. coroutinemethod:: BaseEventLoop.create_server(protocol_factory, host=None, port=None, \*, family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE, sock=None, backlog=100, ssl=None, reuse_address=None)
 
    Create a TCP server (socket type :data:`~socket.SOCK_STREAM`) bound to
    *host* and *port*.
@@ -336,11 +341,13 @@
       :class:`StreamWriter`) pair and calls back a function with this pair.
 
 
-.. method:: BaseEventLoop.create_unix_server(protocol_factory, path=None, \*, sock=None, backlog=100, ssl=None)
+.. coroutinemethod:: BaseEventLoop.create_unix_server(protocol_factory, path=None, \*, sock=None, backlog=100, ssl=None)
 
    Similar to :meth:`BaseEventLoop.create_server`, but specific to the
    socket family :py:data:`~socket.AF_UNIX`.
 
+   This method is a :ref:`coroutine <coroutine>`.
+
    Availability: UNIX.
 
 
@@ -384,7 +391,7 @@
 Low-level socket operations
 ---------------------------
 
-.. method:: BaseEventLoop.sock_recv(sock, nbytes)
+.. coroutinemethod:: BaseEventLoop.sock_recv(sock, nbytes)
 
    Receive data from the socket.  The return value is a bytes object
    representing the data received.  The maximum amount of data to be received
@@ -399,7 +406,7 @@
 
       The :meth:`socket.socket.recv` method.
 
-.. method:: BaseEventLoop.sock_sendall(sock, data)
+.. coroutinemethod:: BaseEventLoop.sock_sendall(sock, data)
 
    Send data to the socket.  The socket must be connected to a remote socket.
    This method continues to send data from *data* until either all data has
@@ -416,7 +423,7 @@
 
       The :meth:`socket.socket.sendall` method.
 
-.. method:: BaseEventLoop.sock_connect(sock, address)
+.. coroutinemethod:: BaseEventLoop.sock_connect(sock, address)
 
    Connect to a remote socket at *address*.
 
@@ -438,7 +445,7 @@
       method.
 
 
-.. method:: BaseEventLoop.sock_accept(sock)
+.. coroutinemethod:: BaseEventLoop.sock_accept(sock)
 
    Accept a connection. The socket must be bound to an address and listening
    for connections. The return value is a pair ``(conn, address)`` where *conn*
@@ -459,12 +466,12 @@
 Resolve host name
 -----------------
 
-.. method:: BaseEventLoop.getaddrinfo(host, port, \*, family=0, type=0, proto=0, flags=0)
+.. coroutinemethod:: BaseEventLoop.getaddrinfo(host, port, \*, family=0, type=0, proto=0, flags=0)
 
    This method is a :ref:`coroutine <coroutine>`, similar to
    :meth:`socket.getaddrinfo` function but non-blocking.
 
-.. method:: BaseEventLoop.getnameinfo(sockaddr, flags=0)
+.. coroutinemethod:: BaseEventLoop.getnameinfo(sockaddr, flags=0)
 
    This method is a :ref:`coroutine <coroutine>`, similar to
    :meth:`socket.getnameinfo` function but non-blocking.
@@ -476,7 +483,7 @@
 On Windows with :class:`SelectorEventLoop`, these methods are not supported.
 Use :class:`ProactorEventLoop` to support pipes on Windows.
 
-.. method:: BaseEventLoop.connect_read_pipe(protocol_factory, pipe)
+.. coroutinemethod:: BaseEventLoop.connect_read_pipe(protocol_factory, pipe)
 
    Register read pipe in eventloop.
 
@@ -490,7 +497,7 @@
 
    This method is a :ref:`coroutine <coroutine>`.
 
-.. method:: BaseEventLoop.connect_write_pipe(protocol_factory, pipe)
+.. coroutinemethod:: BaseEventLoop.connect_write_pipe(protocol_factory, pipe)
 
    Register write pipe in eventloop.
 
@@ -543,7 +550,7 @@
 pool of processes). By default, an event loop uses a thread pool executor
 (:class:`~concurrent.futures.ThreadPoolExecutor`).
 
-.. method:: BaseEventLoop.run_in_executor(executor, callback, \*args)
+.. coroutinemethod:: BaseEventLoop.run_in_executor(executor, callback, \*args)
 
    Arrange for a callback to be called in the specified executor.
 
@@ -654,7 +661,7 @@
       The server is closed asynchonously, use the :meth:`wait_closed` coroutine
       to wait until the server is closed.
 
-   .. method:: wait_closed()
+   .. coroutinemethod:: wait_closed()
 
       Wait until the :meth:`close` method completes.
 
diff --git a/Doc/library/asyncio-protocol.rst b/Doc/library/asyncio-protocol.rst
index b6fcc48..2e671e8 100644
--- a/Doc/library/asyncio-protocol.rst
+++ b/Doc/library/asyncio-protocol.rst
@@ -23,6 +23,8 @@
 subprocess pipes.  The methods available on a transport depend on
 the transport's kind.
 
+The transport classes are :ref:`not thread safe <asyncio-multithreading>`.
+
 
 BaseTransport
 -------------
diff --git a/Doc/library/asyncio-queue.rst b/Doc/library/asyncio-queue.rst
new file mode 100644
index 0000000..c82e08b
--- /dev/null
+++ b/Doc/library/asyncio-queue.rst
@@ -0,0 +1,169 @@
+.. currentmodule:: asyncio
+
+Queues
+======
+
+Queues:
+
+* :class:`Queue`
+* :class:`PriorityQueue`
+* :class:`LifoQueue`
+* :class:`JoinableQueue`
+
+asyncio queue API was designed to be close to classes of the :mod:`queue`
+module (:class:`~queue.Queue`, :class:`~queue.PriorityQueue`,
+:class:`~queue.LifoQueue`), but it has no *timeout* parameter. The
+:func:`asyncio.wait_for` function can be used to cancel a task after a timeout.
+
+Queue
+-----
+
+.. class:: Queue(maxsize=0, \*, loop=None)
+
+   A queue, useful for coordinating producer and consumer coroutines.
+
+   If *maxsize* is less than or equal to zero, the queue size is infinite. If
+   it is an integer greater than ``0``, then ``yield from put()`` will block
+   when the queue reaches *maxsize*, until an item is removed by :meth:`get`.
+
+   Unlike the standard library :mod:`queue`, you can reliably know this Queue's
+   size with :meth:`qsize`, since your single-threaded asyncio application won't
+   be interrupted between calling :meth:`qsize` and doing an operation on the
+   Queue.
+
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
+   .. versionchanged:: 3.4.3
+      New :meth:`join` and :meth:`task_done` methods.
+
+   .. method:: empty()
+
+      Return ``True`` if the queue is empty, ``False`` otherwise.
+
+   .. method:: full()
+
+      Return ``True`` if there are :attr:`maxsize` items in the queue.
+
+      .. note::
+
+         If the Queue was initialized with ``maxsize=0`` (the default), then
+         :meth:`full()` is never ``True``.
+
+   .. coroutinemethod:: get()
+
+      Remove and return an item from the queue. If queue is empty, wait until
+      an item is available.
+
+      This method is a :ref:`coroutine <coroutine>`.
+
+      .. seealso::
+
+         The :meth:`empty` method.
+
+   .. method:: get_nowait()
+
+      Remove and return an item from the queue.
+
+      Return an item if one is immediately available, else raise
+      :exc:`QueueEmpty`.
+
+   .. coroutinemethod:: join()
+
+      Block until all items in the queue have been gotten and processed.
+
+      The count of unfinished tasks goes up whenever an item is added to the
+      queue. The count goes down whenever a consumer thread calls
+      :meth:`task_done` to indicate that the item was retrieved and all work on
+      it is complete.  When the count of unfinished tasks drops to zero,
+      :meth:`join` unblocks.
+
+      This method is a :ref:`coroutine <coroutine>`.
+
+      .. versionadded:: 3.4.3
+
+   .. coroutinemethod:: put(item)
+
+      Put an item into the queue. If the queue is full, wait until a free slot
+      is available before adding item.
+
+      This method is a :ref:`coroutine <coroutine>`.
+
+      .. seealso::
+
+         The :meth:`full` method.
+
+   .. method:: put_nowait(item)
+
+      Put an item into the queue without blocking.
+
+      If no free slot is immediately available, raise :exc:`QueueFull`.
+
+   .. method:: qsize()
+
+      Number of items in the queue.
+
+   .. method:: task_done()
+
+      Indicate that a formerly enqueued task is complete.
+
+      Used by queue consumers. For each :meth:`~Queue.get` used to fetch a task, a
+      subsequent call to :meth:`task_done` tells the queue that the processing
+      on the task is complete.
+
+      If a :meth:`join` is currently blocking, it will resume when all items
+      have been processed (meaning that a :meth:`task_done` call was received
+      for every item that had been :meth:`~Queue.put` into the queue).
+
+      Raises :exc:`ValueError` if called more times than there were items
+      placed in the queue.
+
+      .. versionadded:: 3.4.3
+
+   .. attribute:: maxsize
+
+      Number of items allowed in the queue.
+
+
+PriorityQueue
+-------------
+
+.. class:: PriorityQueue
+
+   A subclass of :class:`Queue`; retrieves entries in priority order (lowest
+   first).
+
+   Entries are typically tuples of the form: (priority number, data).
+
+
+LifoQueue
+---------
+
+.. class:: LifoQueue
+
+    A subclass of :class:`Queue` that retrieves most recently added entries
+    first.
+
+
+JoinableQueue
+^^^^^^^^^^^^^
+
+.. class:: JoinableQueue
+
+   Deprecated alias for :class:`Queue`.
+
+   .. deprecated:: 3.4.3
+
+
+Exceptions
+^^^^^^^^^^
+
+.. exception:: QueueEmpty
+
+   Exception raised when the :meth:`~Queue.get_nowait` method is called on a
+   :class:`Queue` object which is empty.
+
+
+.. exception:: QueueFull
+
+   Exception raised when the :meth:`~Queue.put_nowait` method is called on a
+   :class:`Queue` object which is full.
diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst
index 3809d94..41b24ac 100644
--- a/Doc/library/asyncio-stream.rst
+++ b/Doc/library/asyncio-stream.rst
@@ -9,7 +9,7 @@
 Stream functions
 ================
 
-.. function:: open_connection(host=None, port=None, \*, loop=None, limit=None, **kwds)
+.. coroutinefunction:: open_connection(host=None, port=None, \*, loop=None, limit=None, \*\*kwds)
 
    A wrapper for :meth:`~BaseEventLoop.create_connection()` returning a (reader,
    writer) pair.
@@ -32,7 +32,7 @@
 
    This function is a :ref:`coroutine <coroutine>`.
 
-.. function:: start_server(client_connected_cb, host=None, port=None, \*, loop=None, limit=None, **kwds)
+.. coroutinefunction:: start_server(client_connected_cb, host=None, port=None, \*, loop=None, limit=None, \*\*kwds)
 
    Start a socket server, with a callback for each client connected. The return
    value is the same as :meth:`~BaseEventLoop.create_server()`.
@@ -56,7 +56,7 @@
 
    This function is a :ref:`coroutine <coroutine>`.
 
-.. function:: open_unix_connection(path=None, \*, loop=None, limit=None, **kwds)
+.. coroutinefunction:: open_unix_connection(path=None, \*, loop=None, limit=None, **kwds)
 
    A wrapper for :meth:`~BaseEventLoop.create_unix_connection()` returning
    a (reader, writer) pair.
@@ -68,7 +68,7 @@
 
    Availability: UNIX.
 
-.. function:: start_unix_server(client_connected_cb, path=None, \*, loop=None, limit=None, **kwds)
+.. coroutinefunction:: start_unix_server(client_connected_cb, path=None, \*, loop=None, limit=None, **kwds)
 
    Start a UNIX Domain Socket server, with a callback for each client connected.
 
@@ -85,6 +85,8 @@
 
 .. class:: StreamReader(limit=None, loop=None)
 
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
    .. method:: exception()
 
       Get the exception.
@@ -106,7 +108,7 @@
 
       Set the transport.
 
-   .. method:: read(n=-1)
+   .. coroutinemethod:: read(n=-1)
 
       Read up to *n* bytes.  If *n* is not provided, or set to ``-1``,
       read until EOF and return all read bytes.
@@ -116,7 +118,7 @@
 
       This method is a :ref:`coroutine <coroutine>`.
 
-   .. method:: readline()
+   .. coroutinemethod:: readline()
 
       Read one line, where "line" is a sequence of bytes ending with ``\n``.
 
@@ -128,7 +130,7 @@
 
       This method is a :ref:`coroutine <coroutine>`.
 
-   .. method:: readexactly(n)
+   .. coroutinemethod:: readexactly(n)
 
       Read exactly *n* bytes. Raise an :exc:`IncompleteReadError` if the end of
       the stream is reached before *n* can be read, the
@@ -155,6 +157,8 @@
    wait for flow control.  It also adds a transport attribute which references
    the :class:`Transport` directly.
 
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
    .. attribute:: transport
 
       Transport.
@@ -168,7 +172,7 @@
 
       Close the transport: see :meth:`BaseTransport.close`.
 
-   .. method:: drain()
+   .. coroutinemethod:: drain()
 
       Let the write buffer of the underlying transport a chance to be flushed.
 
diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst
index 570107e..1b82030 100644
--- a/Doc/library/asyncio-subprocess.rst
+++ b/Doc/library/asyncio-subprocess.rst
@@ -27,7 +27,7 @@
 Create a subprocess: high-level API using Process
 -------------------------------------------------
 
-.. function:: create_subprocess_exec(\*args, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
+.. coroutinefunction:: create_subprocess_exec(\*args, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
 
    Create a subprocess.
 
@@ -39,7 +39,7 @@
 
    This function is a :ref:`coroutine <coroutine>`.
 
-.. function:: create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
+.. coroutinefunction:: create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=None, \*\*kwds)
 
    Run the shell command *cmd*.
 
@@ -67,7 +67,7 @@
 
 Run subprocesses asynchronously using the :mod:`subprocess` module.
 
-.. method:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
+.. coroutinemethod:: BaseEventLoop.subprocess_exec(protocol_factory, \*args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
 
    Create a subprocess from one or more string arguments (character strings or
    bytes strings encoded to the :ref:`filesystem encoding
@@ -116,7 +116,7 @@
 
    See the constructor of the :class:`subprocess.Popen` class for parameters.
 
-.. method:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
+.. coroutinemethod:: BaseEventLoop.subprocess_shell(protocol_factory, cmd, \*, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, \*\*kwargs)
 
    Create a subprocess from *cmd*, which is a character string or a bytes
    string encoded to the :ref:`filesystem encoding <filesystem-encoding>`,
@@ -193,7 +193,10 @@
      :meth:`~subprocess.Popen.wait` method of the :class:`~subprocess.Popen`
      class is implemented as a busy loop.
 
-   .. method:: wait()
+   This class is :ref:`not thread safe <asyncio-multithreading>`. See also the
+   :ref:`Subprocess and threads <asyncio-subprocess-threads>` section.
+
+   .. coroutinemethod:: wait()
 
       Wait for child process to terminate.  Set and return :attr:`returncode`
       attribute.
@@ -207,7 +210,7 @@
          blocks waiting for the OS pipe buffer to accept more data. Use the
          :meth:`communicate` method when using pipes to avoid that.
 
-   .. method:: communicate(input=None)
+   .. coroutinemethod:: communicate(input=None)
 
       Interact with process: Send data to stdin.  Read data from stdout and
       stderr, until end-of-file is reached.  Wait for process to terminate.
@@ -310,6 +313,8 @@
   subprocesses from other threads. Call the :func:`get_child_watcher`
   function in the main thread to instantiate the child watcher.
 
+The :class:`asyncio.subprocess.Process` class is not thread safe.
+
 .. seealso::
 
    The :ref:`Concurrency and multithreading in asyncio
diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst
index 80974d9..622ff5fd 100644
--- a/Doc/library/asyncio-sync.rst
+++ b/Doc/library/asyncio-sync.rst
@@ -9,22 +9,16 @@
 * :class:`Lock`
 * :class:`Event`
 * :class:`Condition`
+
+Semaphores:
+
 * :class:`Semaphore`
 * :class:`BoundedSemaphore`
 
-Queues:
-
-* :class:`Queue`
-* :class:`PriorityQueue`
-* :class:`LifoQueue`
-* :class:`JoinableQueue`
-
-asyncio locks and queues API were designed to be close to classes of the
-:mod:`threading` module (:class:`~threading.Lock`, :class:`~threading.Event`,
+asyncio lock API was designed to be close to classes of the :mod:`threading`
+module (:class:`~threading.Lock`, :class:`~threading.Event`,
 :class:`~threading.Condition`, :class:`~threading.Semaphore`,
-:class:`~threading.BoundedSemaphore`) and the :mod:`queue` module
-(:class:`~queue.Queue`, :class:`~queue.PriorityQueue`,
-:class:`~queue.LifoQueue`), but they have no *timeout* parameter. The
+:class:`~threading.BoundedSemaphore`), but it has no *timeout* parameter. The
 :func:`asyncio.wait_for` function can be used to cancel a task after a timeout.
 
 Locks
@@ -60,6 +54,8 @@
    Locks also support the context management protocol.  ``(yield from lock)``
    should be used as context manager expression.
 
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
    Usage::
 
        lock = Lock()
@@ -89,7 +85,7 @@
 
       Return ``True`` if the lock is acquired.
 
-   .. method:: acquire()
+   .. coroutinemethod:: acquire()
 
       Acquire a lock.
 
@@ -123,6 +119,8 @@
    method.  The :meth:`wait` method blocks until the flag is true. The flag is
    initially false.
 
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
    .. method:: clear()
 
       Reset the internal flag to false. Subsequently, coroutines calling
@@ -139,7 +137,7 @@
       true are awakened. Coroutine that call :meth:`wait` once the flag is true
       will not block at all.
 
-   .. method:: wait()
+   .. coroutinemethod:: wait()
 
       Block until the internal flag is true.
 
@@ -166,7 +164,9 @@
    object, and it is used as the underlying lock.  Otherwise,
    a new :class:`Lock` object is created and used as the underlying lock.
 
-   .. method:: acquire()
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
+   .. coroutinemethod:: acquire()
 
       Acquire the underlying lock.
 
@@ -213,7 +213,7 @@
 
       There is no return value.
 
-   .. method:: wait()
+   .. coroutinemethod:: wait()
 
       Wait until notified.
 
@@ -227,7 +227,7 @@
 
       This method is a :ref:`coroutine <coroutine>`.
 
-   .. method:: wait_for(predicate)
+   .. coroutinemethod:: wait_for(predicate)
 
       Wait until a predicate becomes true.
 
@@ -258,7 +258,9 @@
    defaults to ``1``. If the value given is less than ``0``, :exc:`ValueError`
    is raised.
 
-   .. method:: acquire()
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
+   .. coroutinemethod:: acquire()
 
       Acquire a semaphore.
 
@@ -273,7 +275,7 @@
 
       Returns ``True`` if semaphore can not be acquired immediately.
 
-   .. method:: release()
+   .. coroutinemethod:: release()
 
       Release a semaphore, incrementing the internal counter by one. When it
       was zero on entry and another coroutine is waiting for it to become
@@ -285,154 +287,8 @@
 
 .. class:: BoundedSemaphore(value=1, \*, loop=None)
 
-    A bounded semaphore implementation. Inherit from :class:`Semaphore`.
+   A bounded semaphore implementation. Inherit from :class:`Semaphore`.
 
-    This raises :exc:`ValueError` in :meth:`~Semaphore.release` if it would
-    increase the value above the initial value.
+   This raises :exc:`ValueError` in :meth:`~Semaphore.release` if it would
+   increase the value above the initial value.
 
-
-Queues
-------
-
-Queue
-^^^^^
-
-.. class:: Queue(maxsize=0, \*, loop=None)
-
-   A queue, useful for coordinating producer and consumer coroutines.
-
-   If *maxsize* is less than or equal to zero, the queue size is infinite. If
-   it is an integer greater than ``0``, then ``yield from put()`` will block
-   when the queue reaches *maxsize*, until an item is removed by :meth:`get`.
-
-   Unlike the standard library :mod:`queue`, you can reliably know this Queue's
-   size with :meth:`qsize`, since your single-threaded asyncio application won't
-   be interrupted between calling :meth:`qsize` and doing an operation on the
-   Queue.
-
-   .. method:: empty()
-
-      Return ``True`` if the queue is empty, ``False`` otherwise.
-
-   .. method:: full()
-
-      Return ``True`` if there are :attr:`maxsize` items in the queue.
-
-      .. note::
-
-         If the Queue was initialized with ``maxsize=0`` (the default), then
-         :meth:`full()` is never ``True``.
-
-   .. method:: get()
-
-      Remove and return an item from the queue. If queue is empty, wait until
-      an item is available.
-
-      This method is a :ref:`coroutine <coroutine>`.
-
-      .. seealso::
-
-         The :meth:`empty` method.
-
-   .. method:: get_nowait()
-
-      Remove and return an item from the queue.
-
-      Return an item if one is immediately available, else raise
-      :exc:`QueueEmpty`.
-
-   .. method:: put(item)
-
-      Put an item into the queue. If the queue is full, wait until a free slot
-      is available before adding item.
-
-      This method is a :ref:`coroutine <coroutine>`.
-
-      .. seealso::
-
-         The :meth:`full` method.
-
-   .. method:: put_nowait(item)
-
-      Put an item into the queue without blocking.
-
-      If no free slot is immediately available, raise :exc:`QueueFull`.
-
-   .. method:: qsize()
-
-      Number of items in the queue.
-
-   .. attribute:: maxsize
-
-      Number of items allowed in the queue.
-
-
-PriorityQueue
-^^^^^^^^^^^^^
-
-.. class:: PriorityQueue
-
-   A subclass of :class:`Queue`; retrieves entries in priority order (lowest
-   first).
-
-   Entries are typically tuples of the form: (priority number, data).
-
-
-LifoQueue
-^^^^^^^^^
-
-.. class:: LifoQueue
-
-    A subclass of :class:`Queue` that retrieves most recently added entries
-    first.
-
-
-JoinableQueue
-^^^^^^^^^^^^^
-
-.. class:: JoinableQueue
-
-   A subclass of :class:`Queue` with :meth:`task_done` and :meth:`join`
-   methods.
-
-   .. method:: join()
-
-      Block until all items in the queue have been gotten and processed.
-
-      The count of unfinished tasks goes up whenever an item is added to the
-      queue. The count goes down whenever a consumer thread calls
-      :meth:`task_done` to indicate that the item was retrieved and all work on
-      it is complete.  When the count of unfinished tasks drops to zero,
-      :meth:`join` unblocks.
-
-      This method is a :ref:`coroutine <coroutine>`.
-
-   .. method:: task_done()
-
-      Indicate that a formerly enqueued task is complete.
-
-      Used by queue consumers. For each :meth:`~Queue.get` used to fetch a task, a
-      subsequent call to :meth:`task_done` tells the queue that the processing
-      on the task is complete.
-
-      If a :meth:`join` is currently blocking, it will resume when all items
-      have been processed (meaning that a :meth:`task_done` call was received
-      for every item that had been :meth:`~Queue.put` into the queue).
-
-      Raises :exc:`ValueError` if called more times than there were items
-      placed in the queue.
-
-
-Exceptions
-^^^^^^^^^^
-
-.. exception:: QueueEmpty
-
-   Exception raised when the :meth:`~Queue.get_nowait` method is called on a
-   :class:`Queue` object which is empty.
-
-
-.. exception:: QueueFull
-
-   Exception raised when the :meth:`~Queue.put_nowait` method is called on a
-   :class:`Queue` object which is full.
diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst
index 3008c86..158a0d8 100644
--- a/Doc/library/asyncio-task.rst
+++ b/Doc/library/asyncio-task.rst
@@ -209,6 +209,8 @@
      :func:`~concurrent.futures.as_completed` functions in the
      :mod:`concurrent.futures` package.
 
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
    .. method:: cancel()
 
       Cancel the future and schedule callbacks.
@@ -375,6 +377,8 @@
    Don't directly create :class:`Task` instances: use the :func:`async`
    function or the :meth:`BaseEventLoop.create_task` method.
 
+   This class is :ref:`not thread safe <asyncio-multithreading>`.
+
    .. classmethod:: all_tasks(loop=None)
 
       Return a set of all tasks for an event loop.
@@ -545,7 +549,7 @@
    Return ``True`` if *func* is a decorated :ref:`coroutine function
    <coroutine>`.
 
-.. function:: sleep(delay, result=None, \*, loop=None)
+.. coroutinefunction:: sleep(delay, result=None, \*, loop=None)
 
    Create a :ref:`coroutine <coroutine>` that completes after a given
    time (in seconds).  If *result* is provided, it is produced to the caller
@@ -554,6 +558,8 @@
    The resolution of the sleep depends on the :ref:`granularity of the event
    loop <asyncio-delayed-calls>`.
 
+   This function is a :ref:`coroutine <coroutine>`.
+
 .. function:: shield(arg, \*, loop=None)
 
    Wait for a future, shielding it from cancellation.
@@ -581,7 +587,7 @@
        except CancelledError:
            res = None
 
-.. function:: wait(futures, \*, loop=None, timeout=None, return_when=ALL_COMPLETED)
+.. coroutinefunction:: wait(futures, \*, loop=None, timeout=None, return_when=ALL_COMPLETED)
 
    Wait for the Futures and coroutine objects given by the sequence *futures*
    to complete.  Coroutines will be wrapped in Tasks. Returns two sets of
@@ -626,7 +632,7 @@
       when the timeout occurs are returned in the second set.
 
 
-.. function:: wait_for(fut, timeout, \*, loop=None)
+.. coroutinefunction:: wait_for(fut, timeout, \*, loop=None)
 
    Wait for the single :class:`Future` or :ref:`coroutine object <coroutine>`
    to complete with timeout. If *timeout* is ``None``, block until the future
diff --git a/Doc/library/asyncio.rst b/Doc/library/asyncio.rst
index 6900198..9b4d65e 100644
--- a/Doc/library/asyncio.rst
+++ b/Doc/library/asyncio.rst
@@ -46,6 +46,11 @@
   you absolutely, positively have to use a library that makes blocking
   I/O calls.
 
+Asynchronous programming is more complex than classical "sequential"
+programming: see the :ref:`Develop with asyncio <asyncio-dev>` page which lists
+common traps and explains how to avoid them. :ref:`Enable the debug mode
+<asyncio-debug-mode>` during development to detect common issues.
+
 Table of contents:
 
 .. toctree::
@@ -58,6 +63,7 @@
    asyncio-stream.rst
    asyncio-subprocess.rst
    asyncio-sync.rst
+   asyncio-queue.rst
    asyncio-dev.rst
 
 .. seealso::
diff --git a/Doc/library/binascii.rst b/Doc/library/binascii.rst
index c92a8e1..3f7df74 100644
--- a/Doc/library/binascii.rst
+++ b/Doc/library/binascii.rst
@@ -65,9 +65,6 @@
    data. More than one line may be passed at a time. If the optional argument
    *header* is present and true, underscores will be decoded as spaces.
 
-   .. versionchanged:: 3.2
-      Accept only bytestring or bytearray objects as input.
-
 
 .. function:: b2a_qp(data, quotetabs=False, istext=True, header=False)
 
@@ -156,9 +153,6 @@
    of hexadecimal digits (which can be upper or lower case), otherwise a
    :exc:`TypeError` is raised.
 
-   .. versionchanged:: 3.2
-      Accept only bytestring or bytearray objects as input.
-
 
 .. exception:: Error
 
diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst
index 488cda5..ed28699 100644
--- a/Doc/library/bz2.rst
+++ b/Doc/library/bz2.rst
@@ -162,15 +162,32 @@
       you need to decompress a multi-stream input with :class:`BZ2Decompressor`,
       you must use a new decompressor for each stream.
 
-   .. method:: decompress(data)
+   .. method:: decompress(data, max_length=-1)
 
-      Provide data to the decompressor object. Returns a chunk of decompressed
-      data if possible, or an empty byte string otherwise.
+      Decompress *data* (a :term:`bytes-like object`), returning
+      uncompressed data as bytes. Some of *data* may be buffered
+      internally, for use in later calls to :meth:`decompress`. The
+      returned data should be concatenated with the output of any
+      previous calls to :meth:`decompress`.
 
-      Attempting to decompress data after the end of the current stream is
-      reached raises an :exc:`EOFError`. If any data is found after the end of
-      the stream, it is ignored and saved in the :attr:`unused_data` attribute.
+      If *max_length* is nonnegative, returns at most *max_length*
+      bytes of decompressed data. If this limit is reached and further
+      output can be produced, the :attr:`~.needs_input` attribute will
+      be set to ``False``. In this case, the next call to
+      :meth:`~.decompress` may provide *data* as ``b''`` to obtain
+      more of the output.
 
+      If all of the input data was decompressed and returned (either
+      because this was less than *max_length* bytes, or because
+      *max_length* was negative), the :attr:`~.needs_input` attribute
+      will be set to ``True``.
+
+      Attempting to decompress data after the end of stream is reached
+      raises an `EOFError`.  Any data found after the end of the
+      stream is ignored and saved in the :attr:`~.unused_data` attribute.
+
+      .. versionchanged:: 3.5
+         Added the *max_length* parameter.
 
    .. attribute:: eof
 
@@ -186,6 +203,13 @@
       If this attribute is accessed before the end of the stream has been
       reached, its value will be ``b''``.
 
+   .. attribute:: needs_input
+
+      ``False`` if the :meth:`.decompress` method can provide more
+      decompressed data before requiring new uncompressed input.
+
+      .. versionadded:: 3.5
+
 
 One-shot (de)compression
 ------------------------
diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst
index 048f0e9..0430cb9 100644
--- a/Doc/library/codecs.rst
+++ b/Doc/library/codecs.rst
@@ -403,7 +403,7 @@
    Implements the ``'replace'`` error handling (for :term:`text encodings
    <text encoding>` only): substitutes ``'?'`` for encoding errors
    (to be encoded by the codec), and ``'\ufffd'`` (the Unicode replacement
-   character, ``'�'``) for decoding errors.
+   character) for decoding errors.
 
 
 .. function:: ignore_errors(exception)
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index 7a9f93c..f82f425 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -759,13 +759,19 @@
    :attr:`tzinfo` ``None``. This may raise :exc:`OverflowError`, if the timestamp is
    out of the range of values supported by the platform C :c:func:`gmtime` function,
    and :exc:`OSError` on :c:func:`gmtime` failure.
-   It's common for this to be restricted to years in 1970 through 2038. See also
-   :meth:`fromtimestamp`.
+   It's common for this to be restricted to years in 1970 through 2038.
 
-   On the POSIX compliant platforms, ``utcfromtimestamp(timestamp)``
-   is equivalent to the following expression::
+   To get an aware :class:`.datetime` object, call :meth:`fromtimestamp`::
 
-     datetime(1970, 1, 1) + timedelta(seconds=timestamp)
+     datetime.fromtimestamp(timestamp, timezone.utc)
+
+   On the POSIX compliant platforms, it is equivalent to the following
+   expression::
+
+     datetime(1970, 1, 1, tzinfo=timezone.utc) + timedelta(seconds=timestamp)
+
+   except the latter formula always supports the full years range: between
+   :const:`MINYEAR` and :const:`MAXYEAR` inclusive.
 
    .. versionchanged:: 3.3
       Raise :exc:`OverflowError` instead of :exc:`ValueError` if the timestamp
diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst
index 3b419e6..2b10795 100644
--- a/Doc/library/dis.rst
+++ b/Doc/library/dis.rst
@@ -9,9 +9,9 @@
 --------------
 
 The :mod:`dis` module supports the analysis of CPython :term:`bytecode` by
-disassembling it. The CPython bytecode which this module takes as an
-input is defined in the file :file:`Include/opcode.h` and used by the compiler
-and the interpreter.
+disassembling it. The CPython bytecode which this module takes as an input is
+defined in the file :file:`Include/opcode.h` and used by the compiler and the
+interpreter.
 
 .. impl-detail::
 
@@ -43,33 +43,32 @@
 .. versionadded:: 3.4
 
 The bytecode analysis API allows pieces of Python code to be wrapped in a
-:class:`Bytecode` object that provides easy access to details of the
-compiled code.
+:class:`Bytecode` object that provides easy access to details of the compiled
+code.
 
 .. class:: Bytecode(x, *, first_line=None, current_offset=None)
 
-   Analyse the bytecode corresponding to a function, generator, method,
-   string of source code, or a code object (as returned by :func:`compile`).
 
-   This is a convenience wrapper around many of the functions listed below,
-   most notably :func:`get_instructions`, as iterating over a
-   :class:`Bytecode` instance yields the bytecode operations as
-   :class:`Instruction` instances.
+   Analyse the bytecode corresponding to a function, generator, method, string
+   of source code, or a code object (as returned by :func:`compile`).
 
-   If *first_line* is not None, it indicates the line number that should
-   be reported for the first source line in the disassembled code.
-   Otherwise, the source line information (if any) is taken directly from
-   the disassembled code object.
+   This is a convenience wrapper around many of the functions listed below, most
+   notably :func:`get_instructions`, as iterating over a :class:`Bytecode`
+   instance yields the bytecode operations as :class:`Instruction` instances.
 
-   If *current_offset* is not None, it refers to an instruction offset
-   in the disassembled code. Setting this means :meth:`dis` will display
-   a "current instruction" marker against the specified opcode.
+   If *first_line* is not None, it indicates the line number that should be
+   reported for the first source line in the disassembled code.  Otherwise, the
+   source line information (if any) is taken directly from the disassembled code
+   object.
+
+   If *current_offset* is not None, it refers to an instruction offset in the
+   disassembled code. Setting this means :meth:`.dis` will display a "current
+   instruction" marker against the specified opcode.
 
    .. classmethod:: from_traceback(tb)
 
-      Construct a :class:`Bytecode` instance from the given traceback,
-      setting *current_offset* to the instruction responsible for the
-      exception.
+      Construct a :class:`Bytecode` instance from the given traceback, setting
+      *current_offset* to the instruction responsible for the exception.
 
    .. data:: codeobj
 
@@ -81,8 +80,8 @@
 
    .. method:: dis()
 
-      Return a formatted view of the bytecode operations (the same as
-      printed by :func:`dis`, but returned as a multi-line string).
+      Return a formatted view of the bytecode operations (the same as printed by
+      :func:`dis.dis`, but returned as a multi-line string).
 
    .. method:: info()
 
@@ -104,10 +103,9 @@
 Analysis functions
 ------------------
 
-The :mod:`dis` module also defines the following analysis functions that
-convert the input directly to the desired output. They can be useful if
-only a single operation is being performed, so the intermediate analysis
-object isn't useful:
+The :mod:`dis` module also defines the following analysis functions that convert
+the input directly to the desired output. They can be useful if only a single
+operation is being performed, so the intermediate analysis object isn't useful:
 
 .. function:: code_info(x)
 
@@ -196,13 +194,13 @@
    Return an iterator over the instructions in the supplied function, method,
    source code string or code object.
 
-   The iterator generates a series of :class:`Instruction` named tuples
-   giving the details of each operation in the supplied code.
+   The iterator generates a series of :class:`Instruction` named tuples giving
+   the details of each operation in the supplied code.
 
-   If *first_line* is not None, it indicates the line number that should
-   be reported for the first source line in the disassembled code.
-   Otherwise, the source line information (if any) is taken directly from
-   the disassembled code object.
+   If *first_line* is not None, it indicates the line number that should be
+   reported for the first source line in the disassembled code.  Otherwise, the
+   source line information (if any) is taken directly from the disassembled code
+   object.
 
    .. versionadded:: 3.4
 
@@ -511,8 +509,8 @@
 .. opcode:: PRINT_EXPR
 
    Implements the expression statement for the interactive mode.  TOS is removed
-   from the stack and printed.  In non-interactive mode, an expression statement is
-   terminated with :opcode:`POP_TOP`.
+   from the stack and printed.  In non-interactive mode, an expression statement
+   is terminated with :opcode:`POP_TOP`.
 
 
 .. opcode:: BREAK_LOOP
@@ -542,9 +540,9 @@
    comprehensions.
 
 For all of the :opcode:`SET_ADD`, :opcode:`LIST_APPEND` and :opcode:`MAP_ADD`
-instructions, while the
-added value or key/value pair is popped off, the container object remains on
-the stack so that it is available for further iterations of the loop.
+instructions, while the added value or key/value pair is popped off, the
+container object remains on the stack so that it is available for further
+iterations of the loop.
 
 
 .. opcode:: RETURN_VALUE
@@ -554,35 +552,35 @@
 
 .. opcode:: YIELD_VALUE
 
-   Pops ``TOS`` and yields it from a :term:`generator`.
+   Pops TOS and yields it from a :term:`generator`.
 
 
 .. opcode:: YIELD_FROM
 
-   Pops ``TOS`` and delegates to it as a subiterator from a :term:`generator`.
+   Pops TOS and delegates to it as a subiterator from a :term:`generator`.
 
    .. versionadded:: 3.3
 
 
 .. opcode:: IMPORT_STAR
 
-   Loads all symbols not starting with ``'_'`` directly from the module TOS to the
-   local namespace. The module is popped after loading all names. This opcode
-   implements ``from module import *``.
+   Loads all symbols not starting with ``'_'`` directly from the module TOS to
+   the local namespace. The module is popped after loading all names. This
+   opcode implements ``from module import *``.
 
 
 .. opcode:: POP_BLOCK
 
-   Removes one block from the block stack.  Per frame, there is a  stack of blocks,
-   denoting nested loops, try statements, and such.
+   Removes one block from the block stack.  Per frame, there is a stack of
+   blocks, denoting nested loops, try statements, and such.
 
 
 .. opcode:: POP_EXCEPT
 
    Removes one block from the block stack. The popped block must be an exception
-   handler block, as implicitly created when entering an except handler.
-   In addition to popping extraneous values from the frame stack, the
-   last three popped values are used to restore the exception state.
+   handler block, as implicitly created when entering an except handler.  In
+   addition to popping extraneous values from the frame stack, the last three
+   popped values are used to restore the exception state.
 
 
 .. opcode:: END_FINALLY
@@ -612,9 +610,9 @@
 
 .. opcode:: WITH_CLEANUP
 
-   Cleans up the stack when a :keyword:`with` statement block exits.  TOS is
-   the context manager's :meth:`__exit__` bound method. Below TOS are 1--3
-   values indicating how/why the finally clause was entered:
+   Cleans up the stack when a :keyword:`with` statement block exits.  TOS is the
+   context manager's :meth:`__exit__` bound method. Below TOS are 1--3 values
+   indicating how/why the finally clause was entered:
 
    * SECOND = ``None``
    * (SECOND, THIRD) = (``WHY_{RETURN,CONTINUE}``), retval
@@ -624,10 +622,10 @@
    In the last case, ``TOS(SECOND, THIRD, FOURTH)`` is called, otherwise
    ``TOS(None, None, None)``.  In addition, TOS is removed from the stack.
 
-   If the stack represents an exception, *and* the function call returns
-   a 'true' value, this information is "zapped" and replaced with a single
-   ``WHY_SILENCED`` to prevent :opcode:`END_FINALLY` from re-raising the exception.
-   (But non-local gotos will still be resumed.)
+   If the stack represents an exception, *and* the function call returns a
+   'true' value, this information is "zapped" and replaced with a single
+   ``WHY_SILENCED`` to prevent :opcode:`END_FINALLY` from re-raising the
+   exception.  (But non-local gotos will still be resumed.)
 
    .. XXX explain the WHY stuff!
 
@@ -638,8 +636,8 @@
 .. opcode:: STORE_NAME (namei)
 
    Implements ``name = TOS``. *namei* is the index of *name* in the attribute
-   :attr:`co_names` of the code object. The compiler tries to use :opcode:`STORE_FAST`
-   or :opcode:`STORE_GLOBAL` if possible.
+   :attr:`co_names` of the code object. The compiler tries to use
+   :opcode:`STORE_FAST` or :opcode:`STORE_GLOBAL` if possible.
 
 
 .. opcode:: DELETE_NAME (namei)
@@ -699,8 +697,8 @@
 
 .. opcode:: BUILD_TUPLE (count)
 
-   Creates a tuple consuming *count* items from the stack, and pushes the resulting
-   tuple onto the stack.
+   Creates a tuple consuming *count* items from the stack, and pushes the
+   resulting tuple onto the stack.
 
 
 .. opcode:: BUILD_LIST (count)
@@ -734,8 +732,8 @@
 
    Imports the module ``co_names[namei]``.  TOS and TOS1 are popped and provide
    the *fromlist* and *level* arguments of :func:`__import__`.  The module
-   object is pushed onto the stack.  The current namespace is not affected:
-   for a proper import statement, a subsequent :opcode:`STORE_FAST` instruction
+   object is pushed onto the stack.  The current namespace is not affected: for
+   a proper import statement, a subsequent :opcode:`STORE_FAST` instruction
    modifies the namespace.
 
 
@@ -763,14 +761,14 @@
 
 .. opcode:: JUMP_IF_TRUE_OR_POP (target)
 
-   If TOS is true, sets the bytecode counter to *target* and leaves TOS
-   on the stack.  Otherwise (TOS is false), TOS is popped.
+   If TOS is true, sets the bytecode counter to *target* and leaves TOS on the
+   stack.  Otherwise (TOS is false), TOS is popped.
 
 
 .. opcode:: JUMP_IF_FALSE_OR_POP (target)
 
-   If TOS is false, sets the bytecode counter to *target* and leaves
-   TOS on the stack.  Otherwise (TOS is true), TOS is popped.
+   If TOS is false, sets the bytecode counter to *target* and leaves TOS on the
+   stack.  Otherwise (TOS is true), TOS is popped.
 
 
 .. opcode:: JUMP_ABSOLUTE (target)
@@ -780,10 +778,10 @@
 
 .. opcode:: FOR_ITER (delta)
 
-   ``TOS`` is an :term:`iterator`.  Call its :meth:`~iterator.__next__` method.
-   If this yields a new value, push it on the stack (leaving the iterator below
-   it).  If the iterator indicates it is exhausted ``TOS`` is popped, and the
-   byte code counter is incremented by *delta*.
+   TOS is an :term:`iterator`.  Call its :meth:`~iterator.__next__` method.  If
+   this yields a new value, push it on the stack (leaving the iterator below
+   it).  If the iterator indicates it is exhausted TOS is popped, and the byte
+   code counter is incremented by *delta*.
 
 
 .. opcode:: LOAD_GLOBAL (namei)
@@ -799,19 +797,19 @@
 
 .. opcode:: SETUP_EXCEPT (delta)
 
-   Pushes a try block from a try-except clause onto the block stack. *delta* points
-   to the first except block.
+   Pushes a try block from a try-except clause onto the block stack. *delta*
+   points to the first except block.
 
 
 .. opcode:: SETUP_FINALLY (delta)
 
-   Pushes a try block from a try-except clause onto the block stack. *delta* points
-   to the finally block.
+   Pushes a try block from a try-except clause onto the block stack. *delta*
+   points to the finally block.
 
 .. opcode:: STORE_MAP
 
-   Store a key and value pair in a dictionary.  Pops the key and value while leaving
-   the dictionary on the stack.
+   Store a key and value pair in a dictionary.  Pops the key and value while
+   leaving the dictionary on the stack.
 
 .. opcode:: LOAD_FAST (var_num)
 
@@ -831,8 +829,8 @@
 .. opcode:: LOAD_CLOSURE (i)
 
    Pushes a reference to the cell contained in slot *i* of the cell and free
-   variable storage.  The name of the variable is  ``co_cellvars[i]`` if *i* is
-   less than the length of *co_cellvars*.  Otherwise it is  ``co_freevars[i -
+   variable storage.  The name of the variable is ``co_cellvars[i]`` if *i* is
+   less than the length of *co_cellvars*.  Otherwise it is ``co_freevars[i -
    len(co_cellvars)]``.
 
 
@@ -872,11 +870,12 @@
 
    Calls a function.  The low byte of *argc* indicates the number of positional
    parameters, the high byte the number of keyword parameters. On the stack, the
-   opcode finds the keyword parameters first.  For each keyword argument, the value
-   is on top of the key.  Below the keyword parameters, the positional parameters
-   are on the stack, with the right-most parameter on top.  Below the parameters,
-   the function object to call is on the stack.  Pops all function arguments, and
-   the function itself off the stack, and pushes the return value.
+   opcode finds the keyword parameters first.  For each keyword argument, the
+   value is on top of the key.  Below the keyword parameters, the positional
+   parameters are on the stack, with the right-most parameter on top.  Below the
+   parameters, the function object to call is on the stack.  Pops all function
+   arguments, and the function itself off the stack, and pushes the return
+   value.
 
 
 .. opcode:: MAKE_FUNCTION (argc)
@@ -899,8 +898,8 @@
    Creates a new function object, sets its *__closure__* slot, and pushes it on
    the stack.  TOS is the :term:`qualified name` of the function, TOS1 is the
    code associated with the function, and TOS2 is the tuple containing cells for
-   the closure's free variables.  The function also has *argc* default parameters,
-   which are found below the cells.
+   the closure's free variables.  The function also has *argc* default
+   parameters, which are found below the cells.
 
 
 .. opcode:: BUILD_SLICE (argc)
@@ -916,36 +915,37 @@
 
    Prefixes any opcode which has an argument too big to fit into the default two
    bytes.  *ext* holds two additional bytes which, taken together with the
-   subsequent opcode's argument, comprise a four-byte argument, *ext* being the two
-   most-significant bytes.
+   subsequent opcode's argument, comprise a four-byte argument, *ext* being the
+   two most-significant bytes.
 
 
 .. opcode:: CALL_FUNCTION_VAR (argc)
 
-   Calls a function. *argc* is interpreted as in :opcode:`CALL_FUNCTION`. The top element
-   on the stack contains the variable argument list, followed by keyword and
-   positional arguments.
+   Calls a function. *argc* is interpreted as in :opcode:`CALL_FUNCTION`. The
+   top element on the stack contains the variable argument list, followed by
+   keyword and positional arguments.
 
 
 .. opcode:: CALL_FUNCTION_KW (argc)
 
-   Calls a function. *argc* is interpreted as in :opcode:`CALL_FUNCTION`. The top element
-   on the stack contains the keyword arguments dictionary,  followed by explicit
-   keyword and positional arguments.
+   Calls a function. *argc* is interpreted as in :opcode:`CALL_FUNCTION`. The
+   top element on the stack contains the keyword arguments dictionary, followed
+   by explicit keyword and positional arguments.
 
 
 .. opcode:: CALL_FUNCTION_VAR_KW (argc)
 
-   Calls a function. *argc* is interpreted as in :opcode:`CALL_FUNCTION`.  The top
-   element on the stack contains the keyword arguments dictionary, followed by the
-   variable-arguments tuple, followed by explicit keyword and positional arguments.
+   Calls a function. *argc* is interpreted as in :opcode:`CALL_FUNCTION`.  The
+   top element on the stack contains the keyword arguments dictionary, followed
+   by the variable-arguments tuple, followed by explicit keyword and positional
+   arguments.
 
 
 .. opcode:: HAVE_ARGUMENT
 
-   This is not really an opcode.  It identifies the dividing line between opcodes
-   which don't take arguments ``< HAVE_ARGUMENT`` and those which do ``>=
-   HAVE_ARGUMENT``.
+   This is not really an opcode.  It identifies the dividing line between
+   opcodes which don't take arguments ``< HAVE_ARGUMENT`` and those which do
+   ``>= HAVE_ARGUMENT``.
 
 .. _opcode_collections:
 
@@ -977,10 +977,10 @@
 
 .. data:: hasfree
 
-   Sequence of bytecodes that access a free variable (note that 'free' in
-   this context refers to names in the current scope that are referenced by
-   inner scopes or names in outer scopes that are referenced from this scope.
-   It does *not* include references to global or builtin scopes).
+   Sequence of bytecodes that access a free variable (note that 'free' in this
+   context refers to names in the current scope that are referenced by inner
+   scopes or names in outer scopes that are referenced from this scope.  It does
+   *not* include references to global or builtin scopes).
 
 
 .. data:: hasname
diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst
index b495f41..5e27a36 100644
--- a/Doc/library/http.client.rst
+++ b/Doc/library/http.client.rst
@@ -169,6 +169,12 @@
    status code that we don't understand.
 
 
+.. exception:: LineTooLong
+
+   A subclass of :exc:`HTTPException`.  Raised if an excessively long line
+   is received in the HTTP protocol from the server.
+
+
 The constants defined in this module are:
 
 .. data:: HTTP_PORT
diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst
index ec54643..1c3e202 100644
--- a/Doc/library/http.server.rst
+++ b/Doc/library/http.server.rst
@@ -64,6 +64,18 @@
 
       Contains the server instance.
 
+   .. attribute:: close_connection
+
+      Boolean that should be set before :meth:`handle_one_request` returns,
+      indicating if another request may be expected, or if the connection should
+      be shut down.
+
+   .. attribute:: requestline
+
+      Contains the string representation of the HTTP request line. The
+      terminating CRLF is stripped. This attribute should be set by
+      :meth:`handle_one_request`. If no valid request line was processed, it
+      should be set to the empty string.
 
    .. attribute:: command
 
diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst
index 9c6b280..e61ac35 100644
--- a/Doc/library/importlib.rst
+++ b/Doc/library/importlib.rst
@@ -69,6 +69,10 @@
 
     An implementation of the built-in :func:`__import__` function.
 
+    .. note::
+       Programmatic importing of modules should use :func:`import_module`
+       instead of this function.
+
 .. function:: import_module(name, package=None)
 
     Import a module. The *name* argument specifies what module to
@@ -81,12 +85,15 @@
 
     The :func:`import_module` function acts as a simplifying wrapper around
     :func:`importlib.__import__`. This means all semantics of the function are
-    derived from :func:`importlib.__import__`, including requiring the package
-    from which an import is occurring to have been previously imported
-    (i.e., *package* must already be imported). The most important difference
-    is that :func:`import_module` returns the specified package or module
-    (e.g. ``pkg.mod``), while :func:`__import__` returns the
-    top-level package or module (e.g. ``pkg``).
+    derived from :func:`importlib.__import__`. The most important difference
+    between these two functions is that :func:`import_module` returns the
+    specified package or module (e.g. ``pkg.mod``), while :func:`__import__`
+    returns the top-level package or module (e.g. ``pkg``).
+
+    If you are dynamically importing a module that was created since the
+    interpreter began execution (e.g., created a Python source file), you may
+    need to call :func:`invalidate_caches` in order for the new module to be
+    noticed by the import system.
 
     .. versionchanged:: 3.3
        Parent packages are automatically imported.
diff --git a/Doc/library/linecache.rst b/Doc/library/linecache.rst
index dacf8aa..12d0113 100644
--- a/Doc/library/linecache.rst
+++ b/Doc/library/linecache.rst
@@ -43,6 +43,14 @@
    changed on disk, and you require the updated version.  If *filename* is omitted,
    it will check all the entries in the cache.
 
+.. function:: lazycache(filename, module_globals)
+
+   Capture enough detail about a non-file based module to permit getting its
+   lines later via :func:`getline` even if *module_globals* is None in the later
+   call. This avoids doing I/O until a line is actually needed, without having
+   to carry the module globals around indefinitely.
+
+   .. versionadded:: 3.5
 
 Example::
 
diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst
index 48c57cf..67403a9 100644
--- a/Doc/library/logging.handlers.rst
+++ b/Doc/library/logging.handlers.rst
@@ -953,13 +953,20 @@
 possible, while any potentially slow operations (such as sending an email via
 :class:`SMTPHandler`) are done on a separate thread.
 
-.. class:: QueueListener(queue, *handlers)
+.. class:: QueueListener(queue, *handlers, respect_handler_level=False)
 
    Returns a new instance of the :class:`QueueListener` class. The instance is
    initialized with the queue to send messages to and a list of handlers which
    will handle entries placed on the queue. The queue can be any queue-
    like object; it's passed as-is to the :meth:`dequeue` method, which needs
-   to know how to get messages from it.
+   to know how to get messages from it. If ``respect_handler_level`` is ``True``,
+   a handler's level is respected (compared with the level for the message) when
+   deciding whether to pass messages to that handler; otherwise, the behaviour
+   is as in previous Python versions - to always pass each message to each
+   handler.
+
+   .. versionchanged:: 3.5
+      The ``respect_handler_levels`` argument was added.
 
    .. method:: dequeue(block)
 
diff --git a/Doc/library/mimetypes.rst b/Doc/library/mimetypes.rst
index f836243..8739ea3 100644
--- a/Doc/library/mimetypes.rst
+++ b/Doc/library/mimetypes.rst
@@ -106,8 +106,8 @@
    extension is already known, the new type will replace the old one. When the type
    is already known the extension will be added to the list of known extensions.
 
-   When *strict* is ``True`` (the default), the mapping will added to the official MIME
-   types, otherwise to the non-standard ones.
+   When *strict* is ``True`` (the default), the mapping will be added to the
+   official MIME types, otherwise to the non-standard ones.
 
 
 .. data:: inited
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index c4b8bac..0014b6c 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -1601,6 +1601,11 @@
 
    Availability: Unix, Windows.
 
+   .. seealso::
+
+      The :func:`scandir` function returns the directory entries with more
+      information than just the name.
+
    .. versionchanged:: 3.2
       The *path* parameter became optional.
 
@@ -1893,6 +1898,178 @@
       The *dir_fd* parameter.
 
 
+.. function:: scandir(path='.')
+
+   Return an iterator of :class:`DirEntry` objects corresponding to the entries
+   in the directory given by *path*. The entries are yielded in arbitrary
+   order, and the special entries ``'.'`` and ``'..'`` are not included.
+
+   On Windows, *path* must of type :class:`str`. On POSIX, *path* can be of
+   type :class:`str` or :class:`bytes`.  If *path* is of type :class:`bytes`,
+   the :attr:`~DirEntry.name` and :attr:`~DirEntry.path` attributes of
+   :class:`DirEntry` are also of type ``bytes``. Use :func:`~os.fsencode` and
+   :func:`~os.fsdecode` to encode and decode paths.
+
+   The :func:`scandir` function is recommended, instead of :func:`listdir`,
+   when the file type of entries is used. In most cases, the file type of a
+   :class:`DirEntry` is retrieved directly by :func:`scandir`, no system call
+   is required. If only the name of entries is used, :func:`listdir` can
+   be more efficient than :func:`scandir`.
+
+   The following example shows a simple use of :func:`scandir` to display all
+   the files excluding directories in the given *path* that don't start with
+   ``'.'``::
+
+      for entry in os.scandir(path):
+         if not entry.name.startswith('.') and entry.is_file():
+             print(entry.name)
+
+   .. note::
+
+      On Unix-based systems, :func:`scandir` uses the system's
+      `opendir() <http://pubs.opengroup.org/onlinepubs/009695399/functions/opendir.html>`_
+      and
+      `readdir() <http://pubs.opengroup.org/onlinepubs/009695399/functions/readdir_r.html>`_
+      functions. On Windows, it uses the Win32
+      `FindFirstFileW <http://msdn.microsoft.com/en-us/library/windows/desktop/aa364418(v=vs.85).aspx>`_
+      and
+      `FindNextFileW <http://msdn.microsoft.com/en-us/library/windows/desktop/aa364428(v=vs.85).aspx>`_
+      functions.
+
+   .. seealso::
+
+      The :func:`listdir` function returns the names of the directory entries.
+
+   .. versionadded:: 3.5
+
+
+.. class:: DirEntry
+
+   Object yielded by :func:`scandir` to expose the file path and other file
+   attributes of a directory entry.
+
+   :func:`scandir` will provide as much of this information as possible without
+   making additional system calls. When a ``stat()`` or ``lstat()`` system call
+   is made, the ``DirEntry`` object cache the result .
+
+   ``DirEntry`` instances are not intended to be stored in long-lived data
+   structures; if you know the file metadata has changed or if a long time has
+   elapsed since calling :func:`scandir`, call ``os.stat(entry.path)`` to fetch
+   up-to-date information.
+
+   Because the ``DirEntry`` methods can make operating system calls, they may
+   also raise :exc:`OSError`. For example, if a file is deleted between calling
+   :func:`scandir` and calling :func:`DirEntry.stat`, a
+   :exc:`FileNotFoundError` exception can be raised. Unfortunately, the
+   behaviour on errors depends on the platform. If you need very fine-grained
+   control over errors, you can catch :exc:`OSError` when calling one of the
+   ``DirEntry`` methods and handle as appropriate.
+
+   Attributes and methods on a ``DirEntry`` instance are as follows:
+
+   .. attribute:: name
+
+      The entry's base filename, relative to the :func:`scandir` *path*
+      argument.
+
+      The :attr:`name` type is :class:`str`. On POSIX, it can be of type
+      :class:`bytes` if the type of the :func:`scandir` *path* argument is also
+      :class:`bytes`. Use :func:`~os.fsdecode` to decode the name.
+
+   .. attribute:: path
+
+      The entry's full path name: equivalent to ``os.path.join(scandir_path,
+      entry.name)`` where *scandir_path* is the :func:`scandir` *path*
+      argument.  The path is only absolute if the :func:`scandir` *path*
+      argument is absolute.
+
+      The :attr:`name` type is :class:`str`. On POSIX, it can be of type
+      :class:`bytes` if the type of the :func:`scandir` *path* argument is also
+      :class:`bytes`. Use :func:`~os.fsdecode` to decode the path.
+
+   .. method:: inode()
+
+      Return the inode number of the entry.
+
+      The result is cached in the object, use ``os.stat(entry.path,
+      follow_symlinks=False).st_ino`` to fetch up-to-date information.
+
+      On POSIX, no system call is required.
+
+   .. method:: is_dir(\*, follow_symlinks=True)
+
+      If *follow_symlinks* is ``True`` (the default), return ``True`` if the
+      entry is a directory or a symbolic link pointing to a directory,
+      return ``False`` if it points to another kind of file, if it doesn't
+      exist anymore or if it is a broken symbolic link.
+
+      If *follow_symlinks* is ``False``, return ``True`` only if this entry
+      is a directory, return ``False`` if it points to a symbolic link or
+      another kind of file, if the entry doesn't exist anymore or if it is a
+      broken symbolic link
+
+      The result is cached in the object. Call :func:`stat.S_ISDIR` with
+      :func:`os.stat` to fetch up-to-date information.
+
+      The method can raise :exc:`OSError`, such as :exc:`PermissionError`,
+      but :exc:`FileNotFoundError` is catched.
+
+      In most cases, no system call is required.
+
+   .. method:: is_file(\*, follow_symlinks=True)
+
+      If *follow_symlinks* is ``True`` (the default), return ``True`` if the
+      entry is a regular file or a symbolic link pointing to a regular file,
+      return ``False`` if it points to another kind of file, if it doesn't
+      exist anymore or if it is a broken symbolic link.
+
+      If *follow_symlinks* is ``False``, return ``True`` only if this entry
+      is a regular file, return ``False`` if it points to a symbolic link or
+      another kind of file, if it doesn't exist anymore or if it is a broken
+      symbolic link.
+
+      The result is cached in the object. Call :func:`stat.S_ISREG` with
+      :func:`os.stat` to fetch up-to-date information.
+
+      The method can raise :exc:`OSError`, such as :exc:`PermissionError`,
+      but :exc:`FileNotFoundError` is catched.
+
+      In most cases, no system call is required.
+
+   .. method:: is_symlink()
+
+      Return ``True`` if this entry is a symbolic link or a broken symbolic
+      link, return ``False`` if it points to a another kind of file or if the
+      entry doesn't exist anymore.
+
+      The result is cached in the object. Call :func:`os.path.islink` to fetch
+      up-to-date information.
+
+      The method can raise :exc:`OSError`, such as :exc:`PermissionError`,
+      but :exc:`FileNotFoundError` is catched.
+
+      In most cases, no system call is required.
+
+   .. method:: stat(\*, follow_symlinks=True)
+
+      Return a :class:`stat_result` object for this entry. This function
+      normally follows symbolic links; to stat a symbolic link add the
+      argument ``follow_symlinks=False``.
+
+      On Windows, the ``st_ino``, ``st_dev`` and ``st_nlink`` attributes of the
+      :class:`stat_result` are always set to zero. Call :func:`os.stat` to
+      get these attributes.
+
+      The result is cached in the object. Call :func:`os.stat` to fetch
+      up-to-date information.
+
+      On Windows, ``DirEntry.stat(follow_symlinks=False)`` doesn't require a
+      system call. ``DirEntry.stat()`` requires a system call if the entry is a
+      symbolic link.
+
+   .. versionadded:: 3.5
+
+
 .. function:: stat(path, \*, dir_fd=None, follow_symlinks=True)
 
    Get the status of a file or a file descriptor. Perform the equivalent of a
@@ -2195,7 +2372,8 @@
    contain :func:`os.access`, otherwise it will be empty.
 
    To check whether you can use the *effective_ids* parameter for
-   :func:`os.access`, use the ``in`` operator on ``supports_dir_fd``, like so::
+   :func:`os.access`, use the ``in`` operator on ``supports_effective_ids``,
+   like so::
 
        os.access in os.supports_effective_ids
 
diff --git a/Doc/library/othergui.rst b/Doc/library/othergui.rst
index efb7cff..43721b2 100644
--- a/Doc/library/othergui.rst
+++ b/Doc/library/othergui.rst
@@ -50,7 +50,7 @@
       low-level device context drawing, drag and drop, system clipboard access,
       an XML-based resource format and more, including an ever growing library
       of user-contributed modules.  wxPython has a book, `wxPython in Action
-      <http://www.amazon.com/exec/obidos/ASIN/1932394621>`_, by Noel Rappin and
+      <http://www.manning.com/rappin/>`_, by Noel Rappin and
       Robin Dunn.
 
 PyGTK, PyQt, and wxPython, all have a modern look and feel and more
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
index 8e20496..0d305d5 100644
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -297,6 +297,9 @@
       >>> m.group(0)
       'egg'
 
+   .. versionchanged: 3.5
+      Added support for group references of fixed length.
+
 ``(?<!...)``
    Matches if the current position in the string is not preceded by a match for
    ``...``.  This is called a :dfn:`negative lookbehind assertion`.  Similar to
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
index e7cf425..7846cad 100644
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -344,10 +344,9 @@
    Verify that *cert* (in decoded format as returned by
    :meth:`SSLSocket.getpeercert`) matches the given *hostname*.  The rules
    applied are those for checking the identity of HTTPS servers as outlined
-   in :rfc:`2818` and :rfc:`6125`, except that IP addresses are not currently
-   supported. In addition to HTTPS, this function should be suitable for
-   checking the identity of servers in various SSL-based protocols such as
-   FTPS, IMAPS, POPS and others.
+   in :rfc:`2818` and :rfc:`6125`.  In addition to HTTPS, this function
+   should be suitable for checking the identity of servers in various
+   SSL-based protocols such as FTPS, IMAPS, POPS and others.
 
    :exc:`CertificateError` is raised on failure. On success, the function
    returns nothing::
@@ -369,6 +368,10 @@
       IDN A-labels such as ``www*.xn--pthon-kva.org`` are still supported,
       but ``x*.python.org`` no longer matches ``xn--tda.python.org``.
 
+   .. versionchanged:: 3.5
+      Matching of IP addresses, when present in the subjectAltName field
+      of the certificate, is now supported.
+
 .. function:: cert_time_to_seconds(cert_time)
 
    Return the time in seconds since the Epoch, given the ``cert_time``
@@ -517,9 +520,9 @@
 
 .. data:: VERIFY_DEFAULT
 
-   Possible value for :attr:`SSLContext.verify_flags`. In this mode,
-   certificate revocation lists (CRLs) are not checked. By default OpenSSL
-   does neither require nor verify CRLs.
+   Possible value for :attr:`SSLContext.verify_flags`. In this mode, certificate
+   revocation lists (CRLs) are not checked. By default OpenSSL does neither
+   require nor verify CRLs.
 
    .. versionadded:: 3.4
 
@@ -547,6 +550,14 @@
 
    .. versionadded:: 3.4
 
+.. data:: VERIFY_X509_TRUSTED_FIRST
+
+   Possible value for :attr:`SSLContext.verify_flags`. It instructs OpenSSL to
+   prefer trusted certificates when building the trust chain to validate a
+   certificate. This flag is enabled by default.
+
+   .. versionadded:: 3.4.5
+
 .. data:: PROTOCOL_SSLv23
 
    Selects the highest protocol version that both the client and server support.
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index 55f5351..f79c416 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -2880,8 +2880,8 @@
 .. method:: bytes.isupper()
             bytearray.isupper()
 
-   Return true if there is at least one lowercase alphabetic ASCII character
-   in the sequence and no uppercase ASCII characters, false otherwise.
+   Return true if there is at least one uppercase alphabetic ASCII character
+   in the sequence and no lowercase ASCII characters, false otherwise.
 
    For example::
 
diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst
index fe2ed99..4fd94fd 100644
--- a/Doc/library/tarfile.rst
+++ b/Doc/library/tarfile.rst
@@ -62,6 +62,23 @@
    +------------------+---------------------------------------------+
    | ``'r:xz'``       | Open for reading with lzma compression.     |
    +------------------+---------------------------------------------+
+   | ``'x'`` or       | Create a tarfile exclusively without        |
+   | ``'x:'``         | compression.                                |
+   |                  | Raise an :exc:`FileExistsError` exception   |
+   |                  | if it is already exists.                    |
+   +------------------+---------------------------------------------+
+   | ``'x:gz'``       | Create a tarfile with gzip compression.     |
+   |                  | Raise an :exc:`FileExistsError` exception   |
+   |                  | if it is already exists.                    |
+   +------------------+---------------------------------------------+
+   | ``'x:bz2'``      | Create a tarfile with bzip2 compression.    |
+   |                  | Raise an :exc:`FileExistsError` exception   |
+   |                  | if it is already exists.                    |
+   +------------------+---------------------------------------------+
+   | ``'x:xz'``       | Create a tarfile with lzma compression.     |
+   |                  | Raise an :exc:`FileExistsError` exception   |
+   |                  | if it is already exists.                    |
+   +------------------+---------------------------------------------+
    | ``'a' or 'a:'``  | Open for appending with no compression. The |
    |                  | file is created if it does not exist.       |
    +------------------+---------------------------------------------+
@@ -82,9 +99,9 @@
    If *fileobj* is specified, it is used as an alternative to a :term:`file object`
    opened in binary mode for *name*. It is supposed to be at position 0.
 
-   For modes ``'w:gz'``, ``'r:gz'``, ``'w:bz2'``, ``'r:bz2'``, :func:`tarfile.open`
-   accepts the keyword argument *compresslevel* to specify the compression level of
-   the file.
+   For modes ``'w:gz'``, ``'r:gz'``, ``'w:bz2'``, ``'r:bz2'``, ``'x:gz'``,
+   ``'x:bz2'``, :func:`tarfile.open` accepts the keyword argument
+   *compresslevel* to specify the compression level of the file.
 
    For special purposes, there is a second format for *mode*:
    ``'filemode|[compression]'``.  :func:`tarfile.open` will return a :class:`TarFile`
@@ -127,6 +144,8 @@
    |             | writing.                                   |
    +-------------+--------------------------------------------+
 
+   .. versionchanged:: 3.5
+      The ``'x'`` (exclusive creation) mode was added.
 
 .. class:: TarFile
 
@@ -252,8 +271,8 @@
    In this case, the file object's :attr:`name` attribute is used if it exists.
 
    *mode* is either ``'r'`` to read from an existing archive, ``'a'`` to append
-   data to an existing file or ``'w'`` to create a new file overwriting an existing
-   one.
+   data to an existing file, ``'w'`` to create a new file overwriting an existing
+   one or ``'x'`` to create a new file only if it's not exists.
 
    If *fileobj* is given, it is used for reading or writing data. If it can be
    determined, *mode* is overridden by *fileobj*'s mode. *fileobj* will be used
@@ -292,12 +311,14 @@
    to be handled. The default settings will work for most users.
    See section :ref:`tar-unicode` for in-depth information.
 
-   .. versionchanged:: 3.2
-      Use ``'surrogateescape'`` as the default for the *errors* argument.
-
    The *pax_headers* argument is an optional dictionary of strings which
    will be added as a pax global header if *format* is :const:`PAX_FORMAT`.
 
+   .. versionchanged:: 3.2
+      Use ``'surrogateescape'`` as the default for the *errors* argument.
+
+   .. versionchanged:: 3.5
+      The ``'x'`` (exclusive creation) mode was added.
 
 .. classmethod:: TarFile.open(...)
 
diff --git a/Doc/library/tkinter.rst b/Doc/library/tkinter.rst
index 40e97bf..3b6c681 100644
--- a/Doc/library/tkinter.rst
+++ b/Doc/library/tkinter.rst
@@ -31,13 +31,13 @@
    `Tcl/Tk manual <http://www.tcl.tk/man/tcl8.5/>`_
       Official manual for the latest tcl/tk version.
 
-   `Programming Python <http://www.amazon.com/Programming-Python-Mark-Lutz/dp/0596158106/>`_
+   `Programming Python <http://www.rmi.net/~lutz/about-pp4e.html>`_
       Book by Mark Lutz, has excellent coverage of Tkinter.
 
    `Modern Tkinter for Busy Python Developers <http://www.amazon.com/Modern-Tkinter-Python-Developers-ebook/dp/B0071QDNLO/>`_
       Book by Mark Rozerman about building attractive and modern graphical user interfaces with Python and Tkinter.
 
-   `Python and Tkinter Programming <http://www.amazon.com/exec/obidos/ASIN/1884777813>`_
+   `Python and Tkinter Programming <http://www.manning.com/grayson/>`_
       The book by John Grayson (ISBN 1-884777-81-3).
 
 
@@ -182,7 +182,7 @@
    `Tcl and the Tk Toolkit <http://www.amazon.com/exec/obidos/ASIN/020163337X>`_
       The book by John Ousterhout, the inventor of Tcl.
 
-   `Practical Programming in Tcl and Tk <http://www.amazon.com/exec/obidos/ASIN/0130220280>`_
+   `Practical Programming in Tcl and Tk <http://www.beedub.com/book/>`_
       Brent Welch's encyclopedic book.
 
 
diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst
index 15fbedc..c6b5d0c 100644
--- a/Doc/library/traceback.rst
+++ b/Doc/library/traceback.rst
@@ -136,6 +136,162 @@
 
    .. versionadded:: 3.4
 
+.. function:: walk_stack(f)
+
+   Walk a stack following ``f.f_back`` from the given frame, yielding the frame
+   and line number for each frame. If *f* is ``None``, the current stack is
+   used. This helper is used with :meth:`StackSummary.extract`.
+
+   .. versionadded:: 3.5
+
+.. function:: walk_tb(tb)
+
+   Walk a traceback following ``tb_next`` yielding the frame and line number
+   for each frame. This helper is used with :meth:`StackSummary.extract`.
+
+   .. versionadded:: 3.5
+
+The module also defines the following classes:
+
+:class:`TracebackException` Objects
+-----------------------------------
+
+.. versionadded:: 3.5
+
+:class:`TracebackException` objects are created from actual exceptions to
+capture data for later printing in a lightweight fashion.
+
+.. class:: TracebackException(exc_type, exc_value, exc_traceback, *, limit=None, lookup_lines=True, capture_locals=False)
+
+   Capture an exception for later rendering. *limit*, *lookup_lines* and
+   *capture_locals* are as for the :class:`StackSummary` class.
+
+   Note that when locals are captured, they are also shown in the traceback.
+
+   .. attribute:: __cause__
+
+      A :class:`TracebackException` of the original ``__cause__``.
+
+   .. attribute:: __context__
+
+      A :class:`TracebackException` of the original ``__context__``.
+
+   .. attribute:: __suppress_context__
+
+      The ``__suppress_context__`` value from the original exception.
+
+   .. attribute:: stack
+
+      A :class:`StackSummary` representing the traceback.
+
+   .. attribute:: exc_type
+
+      The class of the original traceback.
+
+   .. attribute:: filename
+
+      For syntax errors - the file name where the error occurred.
+
+   .. attribute:: lineno
+
+      For syntax errors - the line number where the error occurred.
+
+   .. attribute:: text
+
+      For syntax errors - the text where the error occurred.
+
+   .. attribute:: offset
+
+      For syntax errors - the offset into the text where the error occurred.
+
+   .. attribute:: msg
+
+      For syntax errors - the compiler error message.
+
+   .. classmethod:: from_exception(exc, *, limit=None, lookup_lines=True, capture_locals=False)
+
+      Capture an exception for later rendering. *limit*, *lookup_lines* and
+      *capture_locals* are as for the :class:`StackSummary` class.
+
+      Note that when locals are captured, they are also shown in the traceback.
+
+   .. method:: format(*, chain=True)
+
+      Format the exception.
+
+      If *chain* is not ``True``, ``__cause__`` and ``__context__`` will not
+      be formatted.
+
+      The return value is a generator of strings, each ending in a newline and
+      some containing internal newlines. :func:`~traceback.print_exception`
+      is a wrapper around this method which just prints the lines to a file.
+
+      The message indicating which exception occurred is always the last
+      string in the output.
+
+   .. method::  format_exception_only()
+
+      Format the exception part of the traceback.
+
+      The return value is a generator of strings, each ending in a newline.
+
+      Normally, the generator emits a single string; however, for
+      :exc:`SyntaxError` exceptions, it emits several lines that (when
+      printed) display detailed information about where the syntax
+      error occurred.
+
+      The message indicating which exception occurred is always the last
+      string in the output.
+
+
+:class:`StackSummary` Objects
+-----------------------------
+
+.. versionadded:: 3.5
+
+:class:`StackSummary` objects represent a call stack ready for formatting.
+
+.. class:: StackSummary
+
+   .. classmethod:: extract(frame_gen, *, limit=None, lookup_lines=True, capture_locals=False)
+
+      Construct a :class:`StackSummary` object from a frame generator (such as
+      is returned by :func:`~traceback.walk_stack` or
+      :func:`~traceback.walk_tb`).
+
+      If *limit* is supplied, only this many frames are taken from *frame_gen*.
+      If *lookup_lines* is ``False``, the returned :class:`FrameSummary`
+      objects will not have read their lines in yet, making the cost of
+      creating the :class:`StackSummary` cheaper (which may be valuable if it
+      may not actually get formatted). If *capture_locals* is ``True`` the
+      local variables in each :class:`FrameSummary` are captured as object
+      representations.
+
+   .. classmethod:: from_list(a_list)
+
+      Construct a :class:`StackSummary` object from a supplied old-style list
+      of tuples. Each tuple should be a 4-tuple with filename, lineno, name,
+      line as the elements.
+
+
+:class:`FrameSummary` Objects
+-----------------------------
+
+.. versionadded:: 3.5
+
+:class:`FrameSummary` objects represent a single frame in a traceback.
+
+.. class:: FrameSummary(filename, lineno, name, lookup_line=True, locals=None, line=None)
+
+   Represent a single frame in the traceback or stack that is being formatted
+   or printed. It may optionally have a stringified version of the frames
+   locals included in it. If *lookup_line* is ``False``, the source code is not
+   looked up until the :class:`FrameSummary` has the :attr:`~FrameSummary.line`
+   attribute accessed (which also happens when casting it to a tuple).
+   :attr:`~FrameSummary.line` may be directly provided, and will prevent line
+   lookups happening at all. *locals* is an optional local variable
+   dictionary, and if supplied the variable representations are stored in the
+   summary for later display.
 
 .. _traceback-example:
 
diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst
index 157fe93..30dd6ef 100644
--- a/Doc/library/turtle.rst
+++ b/Doc/library/turtle.rst
@@ -2351,6 +2351,9 @@
 |                | pairwise in opposite         | shapesize, tilt,      |
 |                | direction                    | get_shapepoly, update |
 +----------------+------------------------------+-----------------------+
+| sorting_animate| visual demonstration of      | simple alignment,     |
+|                | different sorting methods    | randomization         |
++----------------+------------------------------+-----------------------+
 | tree           | a (graphical) breadth        | :func:`clone`         |
 |                | first tree (using generators)|                       |
 +----------------+------------------------------+-----------------------+
diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst
index 92609ec..7ddf703 100644
--- a/Doc/library/unittest.rst
+++ b/Doc/library/unittest.rst
@@ -223,9 +223,16 @@
 
    Stop the test run on the first error or failure.
 
+.. cmdoption:: --locals
+
+   Show local variables in tracebacks.
+
 .. versionadded:: 3.2
    The command-line options ``-b``, ``-c`` and ``-f`` were added.
 
+.. versionadded:: 3.5
+   The command-line option ``--locals``.
+
 The command line can also be used for test discovery, for running all of the
 tests in a project or just a subset.
 
@@ -1782,12 +1789,10 @@
 
       Set to ``True`` when the execution of tests should stop by :meth:`stop`.
 
-
    .. attribute:: testsRun
 
       The total number of tests run so far.
 
-
    .. attribute:: buffer
 
       If set to true, ``sys.stdout`` and ``sys.stderr`` will be buffered in between
@@ -1797,7 +1802,6 @@
 
       .. versionadded:: 3.2
 
-
    .. attribute:: failfast
 
       If set to true :meth:`stop` will be called on the first failure or error,
@@ -1805,6 +1809,11 @@
 
       .. versionadded:: 3.2
 
+   .. attribute:: tb_locals
+
+      If set to true then local variables will be shown in tracebacks.
+
+      .. versionadded:: 3.5
 
    .. method:: wasSuccessful()
 
@@ -1815,7 +1824,6 @@
          Returns ``False`` if there were any :attr:`unexpectedSuccesses`
          from tests marked with the :func:`expectedFailure` decorator.
 
-
    .. method:: stop()
 
       This method can be called to signal that the set of tests being run should
@@ -1947,12 +1955,14 @@
 
 
 .. class:: TextTestRunner(stream=None, descriptions=True, verbosity=1, failfast=False, \
-                          buffer=False, resultclass=None, warnings=None)
+                          buffer=False, resultclass=None, warnings=None, *, tb_locals=False)
 
    A basic test runner implementation that outputs results to a stream. If *stream*
    is ``None``, the default, :data:`sys.stderr` is used as the output stream. This class
    has a few configurable parameters, but is essentially very simple.  Graphical
-   applications which run test suites should provide alternate implementations.
+   applications which run test suites should provide alternate implementations. Such
+   implementations should accept ``**kwargs`` as the interface to construct runners
+   changes when features are added to unittest.
 
    By default this runner shows :exc:`DeprecationWarning`,
    :exc:`PendingDeprecationWarning`, :exc:`ResourceWarning` and
@@ -1971,6 +1981,9 @@
       The default stream is set to :data:`sys.stderr` at instantiation time rather
       than import time.
 
+   .. versionchanged:: 3.5
+      Added the tb_locals parameter.
+
    .. method:: _makeResult()
 
       This method returns the instance of ``TestResult`` used by :meth:`run`.
diff --git a/Doc/make.bat b/Doc/make.bat
index fd54cb8..6aae34a 100644
--- a/Doc/make.bat
+++ b/Doc/make.bat
@@ -8,13 +8,17 @@
 if "%SPHINXBUILD%" EQU "" set SPHINXBUILD=sphinx-build
 if "%PYTHON%" EQU "" set PYTHON=py
 
-if "%HTMLHELP%" EQU "" (
-    where hhc 2>nul >"%TEMP%\hhc.loc"
-    if errorlevel 1 dir "..\externals\hhc.exe" /s/b > "%TEMP%\hhc.loc"
-    if errorlevel 1 echo Cannot find HHC on PATH or in externals & exit /B 1
-    set /P HTMLHELP= < "%TEMP%\hhc.loc"
-    del "%TEMP%\hhc.loc"
-)
+if "%1" NEQ "htmlhelp" goto :skiphhcsearch
+if exist "%HTMLHELP%" goto :skiphhcsearch
+
+rem Search for HHC in likely places
+set HTMLHELP=
+where hhc /q && set HTMLHELP=hhc && goto :skiphhcsearch
+where /R ..\externals hhc > "%TEMP%\hhc.loc" 2> nul && set /P HTMLHELP= < "%TEMP%\hhc.loc" & del "%TEMP%\hhc.loc"
+if not exist "%HTMLHELP%" where /R "%ProgramFiles(x86)%" hhc > "%TEMP%\hhc.loc" 2> nul && set /P HTMLHELP= < "%TEMP%\hhc.loc" & del "%TEMP%\hhc.loc"
+if not exist "%HTMLHELP%" where /R "%ProgramFiles%" hhc > "%TEMP%\hhc.loc" 2> nul && set /P HTMLHELP= < "%TEMP%\hhc.loc" & del "%TEMP%\hhc.loc"
+if not exist "%HTMLHELP%" echo Cannot find HHC on PATH or in externals & exit /B 1
+:skiphhcsearch
 
 if "%DISTVERSION%" EQU "" for /f "usebackq" %%v in (`%PYTHON% tools/extensions/patchlevel.py`) do set DISTVERSION=%%v
 
diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst
index 58d38ed..12f9f2f 100644
--- a/Doc/reference/expressions.rst
+++ b/Doc/reference/expressions.rst
@@ -1075,7 +1075,7 @@
 * Numbers are compared arithmetically.
 
 * The values :const:`float('NaN')` and :const:`Decimal('NaN')` are special.
-  The are identical to themselves, ``x is x`` but are not equal to themselves,
+  They are identical to themselves, ``x is x`` but are not equal to themselves,
   ``x != x``.  Additionally, comparing any value to a not-a-number value
   will return ``False``.  For example, both ``3 < float('NaN')`` and
   ``float('NaN') < 3`` will return ``False``.
diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py
index 41c25bc..9b78184 100644
--- a/Doc/tools/extensions/pyspecific.py
+++ b/Doc/tools/extensions/pyspecific.py
@@ -145,6 +145,25 @@
         return PyClassmember.run(self)
 
 
+class PyCoroutineMixin(object):
+    def handle_signature(self, sig, signode):
+        ret = super(PyCoroutineMixin, self).handle_signature(sig, signode)
+        signode.insert(0, addnodes.desc_annotation('coroutine ', 'coroutine '))
+        return ret
+
+
+class PyCoroutineFunction(PyCoroutineMixin, PyModulelevel):
+    def run(self):
+        self.name = 'py:function'
+        return PyModulelevel.run(self)
+
+
+class PyCoroutineMethod(PyCoroutineMixin, PyClassmember):
+    def run(self):
+        self.name = 'py:method'
+        return PyClassmember.run(self)
+
+
 # Support for documenting version of removal in deprecations
 
 class DeprecatedRemoved(Directive):
@@ -347,5 +366,7 @@
     app.add_description_unit('2to3fixer', '2to3fixer', '%s (2to3 fixer)')
     app.add_directive_to_domain('py', 'decorator', PyDecoratorFunction)
     app.add_directive_to_domain('py', 'decoratormethod', PyDecoratorMethod)
+    app.add_directive_to_domain('py', 'coroutinefunction', PyCoroutineFunction)
+    app.add_directive_to_domain('py', 'coroutinemethod', PyCoroutineMethod)
     app.add_directive('miscnews', MiscNews)
     return {'version': '1.0', 'parallel_read_safe': True}
diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst
index af66196..f194802 100644
--- a/Doc/using/windows.rst
+++ b/Doc/using/windows.rst
@@ -404,6 +404,18 @@
 Per-user installations of Python do not add the launcher to :envvar:`PATH`
 unless the option was selected on installation.
 
+Virtual environments
+^^^^^^^^^^^^^^^^^^^^
+
+.. versionadded:: 3.5
+
+If the launcher is run with no explicit Python version specification, and a
+virtual environment (created with the standard library :mod:`venv` module or
+the external ``virtualenv`` tool) active, the launcher will run the virtual
+environment's interpreter rather than the global one.  To run the global
+interpreter, either deactivate the virtual environment, or explicitly specify
+the global Python version.
+
 From a script
 ^^^^^^^^^^^^^
 
@@ -478,6 +490,16 @@
 on Windows which you hope will be useful on Unix, you should use one of the
 shebang lines starting with ``/usr``.
 
+Any of the above virtual commands can be suffixed with an explicit version
+(either just the major version, or the major and minor version) - for example
+``/usr/bin/python2.7`` - which will cause that specific version to be located
+and used.
+
+The ``/usr/bin/env`` form of shebang line has one further special property.
+Before looking for installed Python interpreters, this form will search the
+executable :envvar:`PATH` for a Python executable. This corresponds to the
+behaviour of the Unix ``env`` program, which performs a :envvar:`PATH` search.
+
 Arguments in shebang lines
 --------------------------
 
diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst
index 2c6c3a2..6932fcb 100644
--- a/Doc/whatsnew/3.5.rst
+++ b/Doc/whatsnew/3.5.rst
@@ -111,6 +111,19 @@
        PEP written by Carl Meyer
 
 
+
+PEP 471 - os.scandir() function -- a better and faster directory iterator
+-------------------------------------------------------------------------
+
+:pep:`471`  includes a new directory iteration function, :func:`os.scandir`,
+in the standard library.
+
+.. seealso::
+
+   :pep:`471` -- os.scandir() function -- a better and faster directory
+   iterator.
+
+
 PEP 475: Retry system calls failing with EINTR
 ----------------------------------------------
 
@@ -118,11 +131,25 @@
 this means that user code doesn't have to deal with EINTR or InterruptedError
 manually, and should make it more robust against asynchronous signal reception.
 
+PEP and implementation written by Ben Hoyt with the help of Victor Stinner.
+
 .. seealso::
 
    :pep:`475` -- Retry system calls failing with EINTR
 
 
+PEP 486: Make the Python Launcher aware of virtual environments
+---------------------------------------------------------------
+
+:pep:`486` makes the Windows launcher (see :pep:`397`) aware of an active
+virtual environment. When the default interpreter would be used and the
+``VIRTUAL_ENV`` environment variable is set, the interpreter in the virtual
+environment will be used.
+
+.. seealso::
+
+    :pep:`486` -- Make the Python Launcher aware of virtual environments
+
 Other Language Changes
 ======================
 
@@ -146,10 +173,18 @@
 Improved Modules
 ================
 
+argparse
+--------
+
+* :class:`~argparse.ArgumentParser` now allows to disable
+  :ref:`abbreviated usage <prefix-matching>` of long options by setting
+  :ref:`allow_abbrev` to ``False``.
+  (Contributed by Jonathan Paugh, Steven Bethard, paul j3 and Daniel Eriksson.)
+
 cgi
 ---
 
-* :class:`FieldStorage` now supports the context management protocol.
+* :class:`~cgi.FieldStorage` now supports the context management protocol.
   (Contributed by Berker Peksag in :issue:`20289`.)
 
 code
@@ -259,6 +294,10 @@
 os
 --
 
+* New :func:`os.scandir` function: Return an iterator of :class:`os.DirEntry`
+  objects corresponding to the entries in the directory given by *path*.
+  (Implementation written by Ben Hoyt with the help of Victor Stinner.)
+
 * :class:`os.stat_result` now has a :attr:`~os.stat_result.st_file_attributes`
   attribute on Windows.  (Contributed by Ben Hoyt in :issue:`21719`.)
 
@@ -334,14 +373,27 @@
   :meth:`socket.socket.send`.
   (Contributed by Giampaolo Rodola' in :issue:`17552`.)
 
+sysconfig
+---------
+
+* The user scripts directory on Windows is now versioned.
+  (Contributed by Paul Moore in :issue:`23437`.)
+
+
+tarfile
+-------
+
+* The :func:`tarfile.open` function now supports ``'x'`` (exclusive creation)
+  mode.  (Contributed by Berker Peksag in :issue:`21717`.)
+
 time
 ----
 
 * The :func:`time.monotonic` function is now always available.  (Contributed by
   Victor Stinner in :issue:`22043`.)
 
-time
-----
+urllib
+------
 
 * A new :class:`urllib.request.HTTPBasicPriorAuthHandler` allows HTTP Basic
   Authentication credentials to be sent unconditionally with the first HTTP
@@ -379,7 +431,12 @@
   (:issue:`21486`, :issue:`21487`, :issue:`20826`)
 
 * Many operations on :class:`io.BytesIO` are now 50% to 100% faster.
-  (Contributed by Serhiy Storchaka in :issue:`15381`.)
+  (Contributed by Serhiy Storchaka in :issue:`15381` and David Wilson in
+  :issue:`22003`.)
+
+* :func:`marshal.dumps` is now faster (65%-85% with versions 3--4, 20-25% with
+  versions 0--2 on typical data, and up to 5x in best cases).
+  (Contributed by Serhiy Storchaka in :issue:`20416` and :issue:`23344`.)
 
 
 Build and C API Changes
@@ -523,3 +580,8 @@
 * Removed non-documented macro :c:macro:`PyObject_REPR` which leaked references.
   Use format character ``%R`` in :c:func:`PyUnicode_FromFormat`-like functions
   to format the :func:`repr` of the object.
+
+* Because the lack of the :attr:`__module__` attribute breaks pickling and
+  introspection, a deprecation warning now is raised for builtin type without
+  the :attr:`__module__` attribute.  Would be an AttributeError in future.
+  (:issue:`20204`)
diff --git a/Include/abstract.h b/Include/abstract.h
index db70f21..56fbf86 100644
--- a/Include/abstract.h
+++ b/Include/abstract.h
@@ -266,6 +266,11 @@
      PyAPI_FUNC(PyObject *) PyObject_Call(PyObject *callable_object,
                                           PyObject *args, PyObject *kw);
 
+#ifndef Py_LIMITED_API
+     PyAPI_FUNC(PyObject *) _Py_CheckFunctionResult(PyObject *obj,
+                                                    const char *func_name);
+#endif
+
        /*
      Call a callable Python object, callable_object, with
      arguments and keywords arguments.  The 'args' argument can not be
diff --git a/Include/fileobject.h b/Include/fileobject.h
index 0939744..03155d3 100644
--- a/Include/fileobject.h
+++ b/Include/fileobject.h
@@ -32,17 +32,6 @@
 #ifndef Py_LIMITED_API
 PyAPI_FUNC(PyObject *) PyFile_NewStdPrinter(int);
 PyAPI_DATA(PyTypeObject) PyStdPrinter_Type;
-
-#if defined _MSC_VER && _MSC_VER >= 1400
-/* A routine to check if a file descriptor is valid on Windows.  Returns 0
- * and sets errno to EBADF if it isn't.  This is to avoid Assertions
- * from various functions in the Windows CRT beginning with
- * Visual Studio 2005
- */
-int _PyVerify_fd(int fd);
-#else
-#define _PyVerify_fd(A) (1) /* dummy */
-#endif
 #endif /* Py_LIMITED_API */
 
 /* A routine to check if a file descriptor can be select()-ed. */
diff --git a/Include/fileutils.h b/Include/fileutils.h
index c5eebc5..95632ed 100644
--- a/Include/fileutils.h
+++ b/Include/fileutils.h
@@ -21,11 +21,42 @@
     struct stat *buf);
 #endif
 
+#ifndef Py_LIMITED_API
+#if defined(HAVE_FSTAT) || defined(MS_WINDOWS)
+
+#ifdef MS_WINDOWS
+struct _Py_stat_struct {
+    unsigned long st_dev;
+    __int64 st_ino;
+    unsigned short st_mode;
+    int st_nlink;
+    int st_uid;
+    int st_gid;
+    unsigned long st_rdev;
+    __int64 st_size;
+    time_t st_atime;
+    int st_atime_nsec;
+    time_t st_mtime;
+    int st_mtime_nsec;
+    time_t st_ctime;
+    int st_ctime_nsec;
+    unsigned long st_file_attributes;
+};
+#else
+#  define _Py_stat_struct stat
+#endif
+
+PyAPI_FUNC(int) _Py_fstat(
+    int fd,
+    struct _Py_stat_struct *stat);
+#endif   /* HAVE_FSTAT || MS_WINDOWS */
+#endif   /* Py_LIMITED_API */
+
 #ifdef HAVE_STAT
 PyAPI_FUNC(int) _Py_stat(
     PyObject *path,
     struct stat *statbuf);
-#endif
+#endif   /* HAVE_STAT */
 
 #ifndef Py_LIMITED_API
 PyAPI_FUNC(int) _Py_open(
@@ -77,6 +108,18 @@
 PyAPI_FUNC(int) _Py_set_blocking(int fd, int blocking);
 #endif   /* !MS_WINDOWS */
 
+#if defined _MSC_VER && _MSC_VER >= 1400
+/* A routine to check if a file descriptor is valid on Windows.  Returns 0
+ * and sets errno to EBADF if it isn't.  This is to avoid Assertions
+ * from various functions in the Windows CRT beginning with
+ * Visual Studio 2005
+ */
+int _PyVerify_fd(int fd);
+
+#else
+#define _PyVerify_fd(A) (1) /* dummy */
+#endif
+
 #endif   /* Py_LIMITED_API */
 
 #ifdef __cplusplus
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index 9606c15..7ca2775 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -20,10 +20,10 @@
 #define PY_MINOR_VERSION	5
 #define PY_MICRO_VERSION	0
 #define PY_RELEASE_LEVEL	PY_RELEASE_LEVEL_ALPHA
-#define PY_RELEASE_SERIAL	1
+#define PY_RELEASE_SERIAL	2
 
 /* Version as a string */
-#define PY_VERSION      	"3.5.0a1"
+#define PY_VERSION      	"3.5.0a2"
 /*--end constants--*/
 
 /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h
index d2ffabe..814d7c0 100644
--- a/Include/unicodeobject.h
+++ b/Include/unicodeobject.h
@@ -1052,7 +1052,7 @@
    always ends with a nul character. If size is not NULL, write the number of
    wide characters (excluding the null character) into *size.
 
-   Returns a buffer allocated by PyMem_Alloc() (use PyMem_Free() to free it)
+   Returns a buffer allocated by PyMem_Malloc() (use PyMem_Free() to free it)
    on success. On error, returns NULL, *size is undefined and raises a
    MemoryError. */
 
diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py
index ca6c4bd..88222be 100644
--- a/Lib/_pydecimal.py
+++ b/Lib/_pydecimal.py
@@ -4108,7 +4108,7 @@
         >>> context.create_decimal_from_float(3.1415926535897932)
         Traceback (most recent call last):
             ...
-        decimal.Inexact: None
+        decimal.Inexact
 
         """
         d = Decimal.from_float(f)       # An exact conversion
diff --git a/Lib/argparse.py b/Lib/argparse.py
index ba9e3df..9a06719 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -1590,6 +1590,7 @@
         - argument_default -- The default value for all arguments
         - conflict_handler -- String indicating how to handle conflicts
         - add_help -- Add a -h/-help option
+        - allow_abbrev -- Allow long options to be abbreviated unambiguously
     """
 
     def __init__(self,
@@ -1603,7 +1604,8 @@
                  fromfile_prefix_chars=None,
                  argument_default=None,
                  conflict_handler='error',
-                 add_help=True):
+                 add_help=True,
+                 allow_abbrev=True):
 
         superinit = super(ArgumentParser, self).__init__
         superinit(description=description,
@@ -1621,6 +1623,7 @@
         self.formatter_class = formatter_class
         self.fromfile_prefix_chars = fromfile_prefix_chars
         self.add_help = add_help
+        self.allow_abbrev = allow_abbrev
 
         add_group = self.add_argument_group
         self._positionals = add_group(_('positional arguments'))
@@ -2098,23 +2101,24 @@
                 action = self._option_string_actions[option_string]
                 return action, option_string, explicit_arg
 
-        # search through all possible prefixes of the option string
-        # and all actions in the parser for possible interpretations
-        option_tuples = self._get_option_tuples(arg_string)
+        if self.allow_abbrev:
+            # search through all possible prefixes of the option string
+            # and all actions in the parser for possible interpretations
+            option_tuples = self._get_option_tuples(arg_string)
 
-        # if multiple actions match, the option string was ambiguous
-        if len(option_tuples) > 1:
-            options = ', '.join([option_string
-                for action, option_string, explicit_arg in option_tuples])
-            args = {'option': arg_string, 'matches': options}
-            msg = _('ambiguous option: %(option)s could match %(matches)s')
-            self.error(msg % args)
+            # if multiple actions match, the option string was ambiguous
+            if len(option_tuples) > 1:
+                options = ', '.join([option_string
+                    for action, option_string, explicit_arg in option_tuples])
+                args = {'option': arg_string, 'matches': options}
+                msg = _('ambiguous option: %(option)s could match %(matches)s')
+                self.error(msg % args)
 
-        # if exactly one action matched, this segmentation is good,
-        # so return the parsed action
-        elif len(option_tuples) == 1:
-            option_tuple, = option_tuples
-            return option_tuple
+            # if exactly one action matched, this segmentation is good,
+            # so return the parsed action
+            elif len(option_tuples) == 1:
+                option_tuple, = option_tuples
+                return option_tuple
 
         # if it was not found as an option, but it looks like a negative
         # number, it was meant to be positional
diff --git a/Lib/asyncio/base_subprocess.py b/Lib/asyncio/base_subprocess.py
index 02b9e89..c1cdfda 100644
--- a/Lib/asyncio/base_subprocess.py
+++ b/Lib/asyncio/base_subprocess.py
@@ -57,6 +57,8 @@
         info.append('pid=%s' % self._pid)
         if self._returncode is not None:
             info.append('returncode=%s' % self._returncode)
+        else:
+            info.append('running')
 
         stdin = self._pipes.get(0)
         if stdin is not None:
@@ -77,12 +79,6 @@
     def _start(self, args, shell, stdin, stdout, stderr, bufsize, **kwargs):
         raise NotImplementedError
 
-    def _make_write_subprocess_pipe_proto(self, fd):
-        raise NotImplementedError
-
-    def _make_read_subprocess_pipe_proto(self, fd):
-        raise NotImplementedError
-
     def close(self):
         if self._closed:
             return
@@ -93,7 +89,12 @@
                 continue
             proto.pipe.close()
 
-        if self._proc is not None and self._returncode is None:
+        if (self._proc is not None
+        # the child process finished?
+        and self._returncode is None
+        # the child process finished but the transport was not notified yet?
+        and self._proc.poll() is None
+        ):
             if self._loop.get_debug():
                 logger.warning('Close running child process: kill %r', self)
 
diff --git a/Lib/asyncio/queues.py b/Lib/asyncio/queues.py
index 4aeb6c4..84cdabc 100644
--- a/Lib/asyncio/queues.py
+++ b/Lib/asyncio/queues.py
@@ -1,7 +1,7 @@
 """Queues"""
 
-__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'JoinableQueue',
-           'QueueFull', 'QueueEmpty']
+__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty',
+           'JoinableQueue']
 
 import collections
 import heapq
@@ -49,6 +49,9 @@
         self._getters = collections.deque()
         # Pairs of (item, Future).
         self._putters = collections.deque()
+        self._unfinished_tasks = 0
+        self._finished = locks.Event(loop=self._loop)
+        self._finished.set()
         self._init(maxsize)
 
     def _init(self, maxsize):
@@ -59,6 +62,8 @@
 
     def _put(self, item):
         self._queue.append(item)
+        self._unfinished_tasks += 1
+        self._finished.clear()
 
     def __repr__(self):
         return '<{} at {:#x} {}>'.format(
@@ -75,6 +80,8 @@
             result += ' _getters[{}]'.format(len(self._getters))
         if self._putters:
             result += ' _putters[{}]'.format(len(self._putters))
+        if self._unfinished_tasks:
+            result += ' tasks={}'.format(self._unfinished_tasks)
         return result
 
     def _consume_done_getters(self):
@@ -126,9 +133,6 @@
                 'queue non-empty, why are getters waiting?')
 
             getter = self._getters.popleft()
-
-            # Use _put and _get instead of passing item straight to getter, in
-            # case a subclass has logic that must run (e.g. JoinableQueue).
             self._put(item)
 
             # getter cannot be cancelled, we just removed done getters
@@ -154,9 +158,6 @@
                 'queue non-empty, why are getters waiting?')
 
             getter = self._getters.popleft()
-
-            # Use _put and _get instead of passing item straight to getter, in
-            # case a subclass has logic that must run (e.g. JoinableQueue).
             self._put(item)
 
             # getter cannot be cancelled, we just removed done getters
@@ -219,6 +220,38 @@
         else:
             raise QueueEmpty
 
+    def task_done(self):
+        """Indicate that a formerly enqueued task is complete.
+
+        Used by queue consumers. For each get() used to fetch a task,
+        a subsequent call to task_done() tells the queue that the processing
+        on the task is complete.
+
+        If a join() is currently blocking, it will resume when all items have
+        been processed (meaning that a task_done() call was received for every
+        item that had been put() into the queue).
+
+        Raises ValueError if called more times than there were items placed in
+        the queue.
+        """
+        if self._unfinished_tasks <= 0:
+            raise ValueError('task_done() called too many times')
+        self._unfinished_tasks -= 1
+        if self._unfinished_tasks == 0:
+            self._finished.set()
+
+    @coroutine
+    def join(self):
+        """Block until all items in the queue have been gotten and processed.
+
+        The count of unfinished tasks goes up whenever an item is added to the
+        queue. The count goes down whenever a consumer calls task_done() to
+        indicate that the item was retrieved and all work on it is complete.
+        When the count of unfinished tasks drops to zero, join() unblocks.
+        """
+        if self._unfinished_tasks > 0:
+            yield from self._finished.wait()
+
 
 class PriorityQueue(Queue):
     """A subclass of Queue; retrieves entries in priority order (lowest first).
@@ -249,54 +282,5 @@
         return self._queue.pop()
 
 
-class JoinableQueue(Queue):
-    """A subclass of Queue with task_done() and join() methods."""
-
-    def __init__(self, maxsize=0, *, loop=None):
-        super().__init__(maxsize=maxsize, loop=loop)
-        self._unfinished_tasks = 0
-        self._finished = locks.Event(loop=self._loop)
-        self._finished.set()
-
-    def _format(self):
-        result = Queue._format(self)
-        if self._unfinished_tasks:
-            result += ' tasks={}'.format(self._unfinished_tasks)
-        return result
-
-    def _put(self, item):
-        super()._put(item)
-        self._unfinished_tasks += 1
-        self._finished.clear()
-
-    def task_done(self):
-        """Indicate that a formerly enqueued task is complete.
-
-        Used by queue consumers. For each get() used to fetch a task,
-        a subsequent call to task_done() tells the queue that the processing
-        on the task is complete.
-
-        If a join() is currently blocking, it will resume when all items have
-        been processed (meaning that a task_done() call was received for every
-        item that had been put() into the queue).
-
-        Raises ValueError if called more times than there were items placed in
-        the queue.
-        """
-        if self._unfinished_tasks <= 0:
-            raise ValueError('task_done() called too many times')
-        self._unfinished_tasks -= 1
-        if self._unfinished_tasks == 0:
-            self._finished.set()
-
-    @coroutine
-    def join(self):
-        """Block until all items in the queue have been gotten and processed.
-
-        The count of unfinished tasks goes up whenever an item is added to the
-        queue. The count goes down whenever a consumer thread calls task_done()
-        to indicate that the item was retrieved and all work on it is complete.
-        When the count of unfinished tasks drops to zero, join() unblocks.
-        """
-        if self._unfinished_tasks > 0:
-            yield from self._finished.wait()
+JoinableQueue = Queue
+"""Deprecated alias for Queue."""
diff --git a/Lib/code.py b/Lib/code.py
index 86e1f03..53244e3 100644
--- a/Lib/code.py
+++ b/Lib/code.py
@@ -140,32 +140,15 @@
         sys.last_type, sys.last_value, last_tb = ei = sys.exc_info()
         sys.last_traceback = last_tb
         try:
-            lines = []
-            for value, tb in traceback._iter_chain(*ei[1:]):
-                if isinstance(value, str):
-                    lines.append(value)
-                    lines.append('\n')
-                    continue
-                if tb:
-                    tblist = traceback.extract_tb(tb)
-                    if tb is last_tb:
-                        # The last traceback includes the frame we
-                        # exec'd in
-                        del tblist[:1]
-                    tblines = traceback.format_list(tblist)
-                    if tblines:
-                        lines.append("Traceback (most recent call last):\n")
-                        lines.extend(tblines)
-                lines.extend(traceback.format_exception_only(type(value),
-                                                             value))
+            lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next)
+            if sys.excepthook is sys.__excepthook__:
+                self.write(''.join(lines))
+            else:
+                # If someone has set sys.excepthook, we let that take precedence
+                # over self.write
+                sys.excepthook(ei[0], ei[1], last_tb)
         finally:
-            tblist = last_tb = ei = None
-        if sys.excepthook is sys.__excepthook__:
-            self.write(''.join(lines))
-        else:
-            # If someone has set sys.excepthook, we let that take precedence
-            # over self.write
-            sys.excepthook(type, value, last_tb)
+            last_tb = ei = None
 
     def write(self, data):
         """Write a string.
diff --git a/Lib/datetime.py b/Lib/datetime.py
index 4afe9a5..de57472 100644
--- a/Lib/datetime.py
+++ b/Lib/datetime.py
@@ -297,6 +297,25 @@
     raise TypeError("can't compare '%s' to '%s'" % (
                     type(x).__name__, type(y).__name__))
 
+def _divide_and_round(a, b):
+    """divide a by b and round result to the nearest integer
+
+    When the ratio is exactly half-way between two integers,
+    the even integer is returned.
+    """
+    # Based on the reference implementation for divmod_near
+    # in Objects/longobject.c.
+    q, r = divmod(a, b)
+    # round up if either r / b > 0.5, or r / b == 0.5 and q is odd.
+    # The expression r / b > 0.5 is equivalent to 2 * r > b if b is
+    # positive, 2 * r < b if b negative.
+    r *= 2
+    greater_than_half = r > b if b > 0 else r < b
+    if greater_than_half or r == b and q % 2 == 1:
+        q += 1
+
+    return q
+
 class timedelta:
     """Represent the difference between two datetime objects.
 
@@ -515,8 +534,9 @@
                              self._seconds * other,
                              self._microseconds * other)
         if isinstance(other, float):
+            usec = self._to_microseconds()
             a, b = other.as_integer_ratio()
-            return self * a / b
+            return timedelta(0, 0, _divide_and_round(usec * a, b))
         return NotImplemented
 
     __rmul__ = __mul__
@@ -541,10 +561,10 @@
         if isinstance(other, timedelta):
             return usec / other._to_microseconds()
         if isinstance(other, int):
-            return timedelta(0, 0, usec / other)
+            return timedelta(0, 0, _divide_and_round(usec, other))
         if isinstance(other, float):
             a, b = other.as_integer_ratio()
-            return timedelta(0, 0, b * usec / a)
+            return timedelta(0, 0, _divide_and_round(b * usec, a))
 
     def __mod__(self, other):
         if isinstance(other, timedelta):
@@ -1373,7 +1393,7 @@
 
     @classmethod
     def utcfromtimestamp(cls, t):
-        "Construct a UTC datetime from a POSIX timestamp (like time.time())."
+        """Construct a naive UTC datetime from a POSIX timestamp."""
         t, frac = divmod(t, 1.0)
         us = int(frac * 1e6)
 
diff --git a/Lib/dbm/dumb.py b/Lib/dbm/dumb.py
index f95ab85..3424096 100644
--- a/Lib/dbm/dumb.py
+++ b/Lib/dbm/dumb.py
@@ -21,6 +21,7 @@
 
 """
 
+import ast as _ast
 import io as _io
 import os as _os
 import collections
@@ -95,7 +96,7 @@
             with f:
                 for line in f:
                     line = line.rstrip()
-                    key, pos_and_siz_pair = eval(line)
+                    key, pos_and_siz_pair = _ast.literal_eval(line)
                     key = key.encode('Latin-1')
                     self._index[key] = pos_and_siz_pair
 
diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py
index 67ec78b..451c5fb 100644
--- a/Lib/distutils/__init__.py
+++ b/Lib/distutils/__init__.py
@@ -13,5 +13,5 @@
 # Updated automatically by the Python release process.
 #
 #--start constants--
-__version__ = "3.5.0a1"
+__version__ = "3.5.0a2"
 #--end constants--
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index d768dc5..67db007 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -51,7 +51,7 @@
         'purelib': '$usersite',
         'platlib': '$usersite',
         'headers': '$userbase/Python$py_version_nodot/Include/$dist_name',
-        'scripts': '$userbase/Scripts',
+        'scripts': '$userbase/Python$py_version_nodot/Scripts',
         'data'   : '$userbase',
         }
 
diff --git a/Lib/functools.py b/Lib/functools.py
index 20a26f9..91e9685 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -241,6 +241,14 @@
     """New function with partial application of the given arguments
     and keywords.
     """
+    if hasattr(func, 'func'):
+        args = func.args + args
+        tmpkw = func.keywords.copy()
+        tmpkw.update(keywords)
+        keywords = tmpkw
+        del tmpkw
+        func = func.func
+
     def newfunc(*fargs, **fkeywords):
         newkeywords = keywords.copy()
         newkeywords.update(fkeywords)
diff --git a/Lib/http/__init__.py b/Lib/http/__init__.py
index 475f1c0..d4334cc 100644
--- a/Lib/http/__init__.py
+++ b/Lib/http/__init__.py
@@ -93,8 +93,8 @@
         'URI is too long')
     UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type',
         'Entity body in unsupported format')
-    REQUEST_RANGE_NOT_SATISFIABLE = (416,
-        'Request Range Not Satisfiable',
+    REQUESTED_RANGE_NOT_SATISFIABLE = (416,
+        'Requested Range Not Satisfiable',
         'Cannot satisfy request range')
     EXPECTATION_FAILED = (417, 'Expectation Failed',
         'Expect condition could not be satisfied')
@@ -107,8 +107,8 @@
     TOO_MANY_REQUESTS = (429, 'Too Many Requests',
         'The user has sent too many requests in '
         'a given amount of time ("rate limiting")')
-    REQUEST_HEADER_FIELD_TOO_LARGE = (431,
-        'Request Header Field Too Large',
+    REQUEST_HEADER_FIELDS_TOO_LARGE = (431,
+        'Request Header Fields Too Large',
         'The server is unwilling to process the request because its header '
         'fields are too large')
 
diff --git a/Lib/http/client.py b/Lib/http/client.py
index a77e501..5e12a85 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -75,12 +75,14 @@
 import collections
 from urllib.parse import urlsplit
 
+# HTTPMessage, parse_headers(), and the HTTP status code constants are
+# intentionally omitted for simplicity
 __all__ = ["HTTPResponse", "HTTPConnection",
            "HTTPException", "NotConnected", "UnknownProtocol",
            "UnknownTransferEncoding", "UnimplementedFileMode",
            "IncompleteRead", "InvalidURL", "ImproperConnectionState",
            "CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
-           "BadStatusLine", "error", "responses"]
+           "BadStatusLine", "LineTooLong", "error", "responses"]
 
 HTTP_PORT = 80
 HTTPS_PORT = 443
diff --git a/Lib/http/server.py b/Lib/http/server.py
index ac53550..fd13be3 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -82,7 +82,10 @@
 
 __version__ = "0.6"
 
-__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
+__all__ = [
+    "HTTPServer", "BaseHTTPRequestHandler",
+    "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler",
+]
 
 import html
 import http.client
@@ -272,7 +275,7 @@
         """
         self.command = None  # set in case of error on the first line
         self.request_version = version = self.default_request_version
-        self.close_connection = 1
+        self.close_connection = True
         requestline = str(self.raw_requestline, 'iso-8859-1')
         requestline = requestline.rstrip('\r\n')
         self.requestline = requestline
@@ -302,7 +305,7 @@
                     "Bad request version (%r)" % version)
                 return False
             if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
-                self.close_connection = 0
+                self.close_connection = False
             if version_number >= (2, 0):
                 self.send_error(
                     HTTPStatus.HTTP_VERSION_NOT_SUPPORTED,
@@ -310,7 +313,7 @@
                 return False
         elif len(words) == 2:
             command, path = words
-            self.close_connection = 1
+            self.close_connection = True
             if command != 'GET':
                 self.send_error(
                     HTTPStatus.BAD_REQUEST,
@@ -337,10 +340,10 @@
 
         conntype = self.headers.get('Connection', "")
         if conntype.lower() == 'close':
-            self.close_connection = 1
+            self.close_connection = True
         elif (conntype.lower() == 'keep-alive' and
               self.protocol_version >= "HTTP/1.1"):
-            self.close_connection = 0
+            self.close_connection = False
         # Examine the headers and look for an Expect directive
         expect = self.headers.get('Expect', "")
         if (expect.lower() == "100-continue" and
@@ -385,7 +388,7 @@
                 self.send_error(HTTPStatus.REQUEST_URI_TOO_LONG)
                 return
             if not self.raw_requestline:
-                self.close_connection = 1
+                self.close_connection = True
                 return
             if not self.parse_request():
                 # An error code has been sent, just exit
@@ -402,12 +405,12 @@
         except socket.timeout as e:
             #a read or a write timed out.  Discard this connection
             self.log_error("Request timed out: %r", e)
-            self.close_connection = 1
+            self.close_connection = True
             return
 
     def handle(self):
         """Handle multiple requests if necessary."""
-        self.close_connection = 1
+        self.close_connection = True
 
         self.handle_one_request()
         while not self.close_connection:
@@ -493,9 +496,9 @@
 
         if keyword.lower() == 'connection':
             if value.lower() == 'close':
-                self.close_connection = 1
+                self.close_connection = True
             elif value.lower() == 'keep-alive':
-                self.close_connection = 0
+                self.close_connection = False
 
     def end_headers(self):
         """Send the blank line ending the MIME headers."""
@@ -514,7 +517,8 @@
         This is called by send_response().
 
         """
-
+        if isinstance(code, HTTPStatus):
+            code = code.value
         self.log_message('"%s" %s %s',
                          self.requestline, str(code), str(size))
 
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index 1ffbc76..7d9ac67 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "3.5.0a1"
+IDLE_VERSION = "3.5.0a2"
diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py
index 6c225f3..be1ec52 100644
--- a/Lib/ipaddress.py
+++ b/Lib/ipaddress.py
@@ -386,6 +386,8 @@
 
     """The mother class."""
 
+    __slots__ = ()
+
     @property
     def exploded(self):
         """Return the longhand version of the IP address as a string."""
@@ -543,6 +545,8 @@
     used by single IP addresses.
     """
 
+    __slots__ = ()
+
     def __int__(self):
         return self._ip
 
@@ -1051,6 +1055,8 @@
 
     """
 
+    __slots__ = ()
+    _version = 4
     # Equivalent to 255.255.255.255 or 32 bits of 1's.
     _ALL_ONES = (2**IPV4LENGTH) - 1
     _DECIMAL_DIGITS = frozenset('0123456789')
@@ -1063,9 +1069,6 @@
     # when constructed (see _make_netmask()).
     _netmask_cache = {}
 
-    def __init__(self, address):
-        self._version = 4
-
     def _explode_shorthand_ip_string(self):
         return str(self)
 
@@ -1243,6 +1246,8 @@
 
     """Represent and manipulate single IPv4 Addresses."""
 
+    __slots__ = ('_ip', '__weakref__')
+
     def __init__(self, address):
 
         """
@@ -1259,8 +1264,6 @@
             AddressValueError: If ipaddress isn't a valid IPv4 address.
 
         """
-        _BaseV4.__init__(self, address)
-
         # Efficient constructor from integer.
         if isinstance(address, int):
             self._check_int_address(address)
@@ -1485,8 +1488,6 @@
               supplied.
 
         """
-
-        _BaseV4.__init__(self, address)
         _BaseNetwork.__init__(self, address)
 
         # Constructing from a packed address or integer
@@ -1590,6 +1591,8 @@
 
     """
 
+    __slots__ = ()
+    _version = 6
     _ALL_ONES = (2**IPV6LENGTH) - 1
     _HEXTET_COUNT = 8
     _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
@@ -1599,9 +1602,6 @@
     # when constructed (see _make_netmask()).
     _netmask_cache = {}
 
-    def __init__(self, address):
-        self._version = 6
-
     @classmethod
     def _make_netmask(cls, arg):
         """Make a (netmask, prefix_len) tuple from the given argument.
@@ -1870,6 +1870,8 @@
 
     """Represent and manipulate single IPv6 Addresses."""
 
+    __slots__ = ('_ip', '__weakref__')
+
     def __init__(self, address):
         """Instantiate a new IPv6 address object.
 
@@ -1887,8 +1889,6 @@
             AddressValueError: If address isn't a valid IPv6 address.
 
         """
-        _BaseV6.__init__(self, address)
-
         # Efficient constructor from integer.
         if isinstance(address, int):
             self._check_int_address(address)
@@ -2180,7 +2180,6 @@
               supplied.
 
         """
-        _BaseV6.__init__(self, address)
         _BaseNetwork.__init__(self, address)
 
         # Efficient constructor from integer or packed address
diff --git a/Lib/linecache.py b/Lib/linecache.py
index 02a9eb5..33b0af7 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -5,6 +5,7 @@
 that name.
 """
 
+import functools
 import sys
 import os
 import tokenize
@@ -21,7 +22,9 @@
 
 # The cache
 
-cache = {} # The cache
+# The cache. Maps filenames to either a thunk which will provide source code,
+# or a tuple (size, mtime, lines, fullname) once loaded.
+cache = {}
 
 
 def clearcache():
@@ -36,6 +39,9 @@
     Update the cache if it doesn't contain an entry for this file already."""
 
     if filename in cache:
+        entry = cache[filename]
+        if len(entry) == 1:
+            return updatecache(filename, module_globals)
         return cache[filename][2]
     else:
         return updatecache(filename, module_globals)
@@ -54,7 +60,11 @@
             return
 
     for filename in filenames:
-        size, mtime, lines, fullname = cache[filename]
+        entry = cache[filename]
+        if len(entry) == 1:
+            # lazy cache entry, leave it lazy.
+            continue
+        size, mtime, lines, fullname = entry
         if mtime is None:
             continue   # no-op for files loaded via a __loader__
         try:
@@ -72,7 +82,8 @@
     and return an empty list."""
 
     if filename in cache:
-        del cache[filename]
+        if len(cache[filename]) != 1:
+            del cache[filename]
     if not filename or (filename.startswith('<') and filename.endswith('>')):
         return []
 
@@ -82,27 +93,23 @@
     except OSError:
         basename = filename
 
-        # Try for a __loader__, if available
-        if module_globals and '__loader__' in module_globals:
-            name = module_globals.get('__name__')
-            loader = module_globals['__loader__']
-            get_source = getattr(loader, 'get_source', None)
-
-            if name and get_source:
-                try:
-                    data = get_source(name)
-                except (ImportError, OSError):
-                    pass
-                else:
-                    if data is None:
-                        # No luck, the PEP302 loader cannot find the source
-                        # for this module.
-                        return []
-                    cache[filename] = (
-                        len(data), None,
-                        [line+'\n' for line in data.splitlines()], fullname
-                    )
-                    return cache[filename][2]
+        # Realise a lazy loader based lookup if there is one
+        # otherwise try to lookup right now.
+        if lazycache(filename, module_globals):
+            try:
+                data = cache[filename][0]()
+            except (ImportError, OSError):
+                pass
+            else:
+                if data is None:
+                    # No luck, the PEP302 loader cannot find the source
+                    # for this module.
+                    return []
+                cache[filename] = (
+                    len(data), None,
+                    [line+'\n' for line in data.splitlines()], fullname
+                )
+                return cache[filename][2]
 
         # Try looking through the module search path, which is only useful
         # when handling a relative filename.
@@ -132,3 +139,36 @@
     size, mtime = stat.st_size, stat.st_mtime
     cache[filename] = size, mtime, lines, fullname
     return lines
+
+
+def lazycache(filename, module_globals):
+    """Seed the cache for filename with module_globals.
+
+    The module loader will be asked for the source only when getlines is
+    called, not immediately.
+
+    If there is an entry in the cache already, it is not altered.
+
+    :return: True if a lazy load is registered in the cache,
+        otherwise False. To register such a load a module loader with a
+        get_source method must be found, the filename must be a cachable
+        filename, and the filename must not be already cached.
+    """
+    if filename in cache:
+        if len(cache[filename]) == 1:
+            return True
+        else:
+            return False
+    if not filename or (filename.startswith('<') and filename.endswith('>')):
+        return False
+    # Try for a __loader__, if available
+    if module_globals and '__loader__' in module_globals:
+        name = module_globals.get('__name__')
+        loader = module_globals['__loader__']
+        get_source = getattr(loader, 'get_source', None)
+
+        if name and get_source:
+            get_lines = functools.partial(get_source, name)
+            cache[filename] = (get_lines,)
+            return True
+    return False
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index c67ac99..d4f8aef 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2013 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2015 by Vinay Sajip. All Rights Reserved.
 #
 # Permission to use, copy, modify, and distribute this software and its
 # documentation for any purpose and without fee is hereby granted,
@@ -18,7 +18,7 @@
 Additional handlers for the logging package for Python. The core package is
 based on PEP 282 and comments thereto in comp.lang.python.
 
-Copyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2015 Vinay Sajip. All Rights Reserved.
 
 To use, simply 'import logging.handlers' and log away!
 """
@@ -1350,7 +1350,7 @@
         """
         _sentinel = None
 
-        def __init__(self, queue, *handlers):
+        def __init__(self, queue, *handlers, respect_handler_level=False):
             """
             Initialise an instance with the specified queue and
             handlers.
@@ -1359,6 +1359,7 @@
             self.handlers = handlers
             self._stop = threading.Event()
             self._thread = None
+            self.respect_handler_level = respect_handler_level
 
         def dequeue(self, block):
             """
@@ -1399,7 +1400,12 @@
             """
             record = self.prepare(record)
             for handler in self.handlers:
-                handler.handle(record)
+                if not self.respect_handler_level:
+                    process = True
+                else:
+                    process = record.levelno >= handler.level
+                if process:
+                    handler.handle(record)
 
         def _monitor(self):
             """
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
index 9a8ae29..07d19de 100644
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -829,7 +829,7 @@
                     try:
                         ov, err = _winapi.ReadFile(fileno(), 0, True)
                     except OSError as e:
-                        err = e.winerror
+                        ov, err = None, e.winerror
                         if err not in _ready_errors:
                             raise
                     if err == _winapi.ERROR_IO_PENDING:
@@ -838,7 +838,16 @@
                     else:
                         # If o.fileno() is an overlapped pipe handle and
                         # err == 0 then there is a zero length message
-                        # in the pipe, but it HAS NOT been consumed.
+                        # in the pipe, but it HAS NOT been consumed...
+                        if ov and sys.getwindowsversion()[:2] >= (6, 2):
+                            # ... except on Windows 8 and later, where
+                            # the message HAS been consumed.
+                            try:
+                                _, err = ov.GetOverlappedResult(False)
+                            except OSError as e:
+                                err = e.winerror
+                            if not err and hasattr(o, '_got_empty_message'):
+                                o._got_empty_message = True
                         ready_objects.add(o)
                         timeout = 0
 
diff --git a/Lib/pathlib.py b/Lib/pathlib.py
index e1b07ca..01e66a0 100644
--- a/Lib/pathlib.py
+++ b/Lib/pathlib.py
@@ -73,6 +73,10 @@
                     # parts. This makes the result of parsing e.g.
                     # ("C:", "/", "a") reasonably intuitive.
                     for part in it:
+                        if not part:
+                            continue
+                        if altsep:
+                            part = part.replace(altsep, sep)
                         drv = self.splitroot(part)[0]
                         if drv:
                             break
diff --git a/Lib/pprint.py b/Lib/pprint.py
index 2cbffed..0091e69 100644
--- a/Lib/pprint.py
+++ b/Lib/pprint.py
@@ -161,7 +161,7 @@
             return
         rep = self._repr(object, context, level - 1)
         typ = type(object)
-        max_width = self._width - 1 - indent - allowance
+        max_width = self._width - indent - allowance
         sepLines = len(rep) > max_width
         write = stream.write
 
@@ -174,24 +174,14 @@
                 length = len(object)
                 if length:
                     context[objid] = 1
-                    indent = indent + self._indent_per_level
                     if issubclass(typ, _OrderedDict):
                         items = list(object.items())
                     else:
                         items = sorted(object.items(), key=_safe_tuple)
-                    key, ent = items[0]
-                    rep = self._repr(key, context, level)
-                    write(rep)
-                    write(': ')
-                    self._format(ent, stream, indent + len(rep) + 2,
-                                  allowance + 1, context, level)
-                    if length > 1:
-                        for key, ent in items[1:]:
-                            rep = self._repr(key, context, level)
-                            write(',\n%s%s: ' % (' '*indent, rep))
-                            self._format(ent, stream, indent + len(rep) + 2,
-                                          allowance + 1, context, level)
-                    indent = indent - self._indent_per_level
+                    self._format_dict_items(items, stream,
+                                            indent + self._indent_per_level,
+                                            allowance + 1,
+                                            context, level)
                     del context[objid]
                 write('}')
                 return
@@ -207,7 +197,10 @@
                     endchar = ']'
                 elif issubclass(typ, tuple):
                     write('(')
-                    endchar = ')'
+                    if length == 1:
+                        endchar = ',)'
+                    else:
+                        endchar = ')'
                 else:
                     if not length:
                         write(rep)
@@ -227,10 +220,9 @@
                     context[objid] = 1
                     self._format_items(object, stream,
                                        indent + self._indent_per_level,
-                                       allowance + 1, context, level)
+                                       allowance + len(endchar),
+                                       context, level)
                     del context[objid]
-                if issubclass(typ, tuple) and length == 1:
-                    write(',')
                 write(endchar)
                 return
 
@@ -239,19 +231,27 @@
                 lines = object.splitlines(True)
                 if level == 1:
                     indent += 1
-                    max_width -= 2
+                    allowance += 1
+                max_width1 = max_width = self._width - indent
                 for i, line in enumerate(lines):
                     rep = repr(line)
-                    if len(rep) <= max_width:
+                    if i == len(lines) - 1:
+                        max_width1 -= allowance
+                    if len(rep) <= max_width1:
                         chunks.append(rep)
                     else:
                         # A list of alternating (non-space, space) strings
-                        parts = re.split(r'(\s+)', line) + ['']
+                        parts = re.findall(r'\S*\s*', line)
+                        assert parts
+                        assert not parts[-1]
+                        parts.pop()  # drop empty last part
+                        max_width2 = max_width
                         current = ''
-                        for i in range(0, len(parts), 2):
-                            part = parts[i] + parts[i+1]
+                        for j, part in enumerate(parts):
                             candidate = current + part
-                            if len(repr(candidate)) > max_width:
+                            if j == len(parts) - 1 and i == len(lines) - 1:
+                                max_width2 -= allowance
+                            if len(repr(candidate)) > max_width2:
                                 if current:
                                     chunks.append(repr(current))
                                 current = part
@@ -273,12 +273,41 @@
                 return
         write(rep)
 
+    def _format_dict_items(self, items, stream, indent, allowance, context,
+                           level):
+        write = stream.write
+        delimnl = ',\n' + ' ' * indent
+        last_index = len(items) - 1
+        for i, (key, ent) in enumerate(items):
+            last = i == last_index
+            rep = self._repr(key, context, level)
+            write(rep)
+            write(': ')
+            self._format(ent, stream, indent + len(rep) + 2,
+                         allowance if last else 1,
+                         context, level)
+            if not last:
+                write(delimnl)
+
     def _format_items(self, items, stream, indent, allowance, context, level):
         write = stream.write
         delimnl = ',\n' + ' ' * indent
         delim = ''
-        width = max_width = self._width - indent - allowance + 2
-        for ent in items:
+        width = max_width = self._width - indent + 1
+        it = iter(items)
+        try:
+            next_ent = next(it)
+        except StopIteration:
+            return
+        last = False
+        while not last:
+            ent = next_ent
+            try:
+                next_ent = next(it)
+            except StopIteration:
+                last = True
+                max_width -= allowance
+                width -= allowance
             if self._compact:
                 rep = self._repr(ent, context, level)
                 w = len(rep) + 2
@@ -294,7 +323,9 @@
                     continue
             write(delim)
             delim = delimnl
-            self._format(ent, stream, indent, allowance, context, level)
+            self._format(ent, stream, indent,
+                         allowance if last else 1,
+                         context, level)
 
     def _repr(self, object, context, level):
         repr, readable, recursive = self.format(object, context.copy(),
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index 60b0a9e5..c92b324 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -269,7 +269,7 @@
             except:
                 return None
             del sys.modules['__temp__']
-            result = (module.__doc__ or '').splitlines()[0]
+            result = module.__doc__.splitlines()[0] if module.__doc__ else None
         # Cache the result.
         cache[filename] = (mtime, result)
     return result
@@ -1405,9 +1405,6 @@
 def pager(text):
     """The first time this is called, determine what kind of pager to use."""
     global pager
-    # Escape non-encodable characters to avoid encoding errors later
-    encoding = sys.getfilesystemencoding()
-    text = text.encode(encoding, 'backslashreplace').decode(encoding)
     pager = getpager()
     pager(text)
 
@@ -1450,10 +1447,12 @@
 
 def pipepager(text, cmd):
     """Page through text by feeding it to another program."""
-    pipe = os.popen(cmd, 'w')
+    import subprocess
+    proc = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE)
     try:
-        pipe.write(text)
-        pipe.close()
+        with proc:
+            with io.TextIOWrapper(proc.stdin, errors='backslashreplace') as pipe:
+                pipe.write(text)
     except OSError:
         pass # Ignore broken pipes caused by quitting the pager program.
 
@@ -1461,16 +1460,21 @@
     """Page through text by invoking a program on a temporary file."""
     import tempfile
     filename = tempfile.mktemp()
-    with open(filename, 'w') as file:
+    with open(filename, 'w', errors='backslashreplace') as file:
         file.write(text)
     try:
         os.system(cmd + ' "' + filename + '"')
     finally:
         os.unlink(filename)
 
+def _escape_stdout(text):
+    # Escape non-encodable characters to avoid encoding errors later
+    encoding = getattr(sys.stdout, 'encoding', None) or 'utf-8'
+    return text.encode(encoding, 'backslashreplace').decode(encoding)
+
 def ttypager(text):
     """Page through text on a text terminal."""
-    lines = plain(text).split('\n')
+    lines = plain(_escape_stdout(text)).split('\n')
     try:
         import tty
         fd = sys.stdin.fileno()
@@ -1514,7 +1518,7 @@
 
 def plainpager(text):
     """Simply print unformatted text.  This is the ultimate fallback."""
-    sys.stdout.write(plain(text))
+    sys.stdout.write(plain(_escape_stdout(text)))
 
 def describe(thing):
     """Produce a short description of the given thing."""
@@ -1573,7 +1577,10 @@
     if isinstance(thing, str):
         object = locate(thing, forceload)
         if not object:
-            raise ImportError('no Python documentation found for %r' % thing)
+            raise ImportError('''\
+No Python documentation found for %r.
+Use help() to get the interactive help utility.
+Use help(str) for help on the str class.''' % thing)
         return object, thing
     else:
         name = getattr(thing, '__name__', None)
@@ -1844,7 +1851,10 @@
                 break
             request = replace(request, '"', '', "'", '').strip()
             if request.lower() in ('q', 'quit'): break
-            self.help(request)
+            if request == 'help':
+                self.intro()
+            else:
+                self.help(request)
 
     def getline(self, prompt):
         """Read one line, using input() when appropriate."""
@@ -1858,8 +1868,7 @@
     def help(self, request):
         if type(request) is type(''):
             request = request.strip()
-            if request == 'help': self.intro()
-            elif request == 'keywords': self.listkeywords()
+            if request == 'keywords': self.listkeywords()
             elif request == 'symbols': self.listsymbols()
             elif request == 'topics': self.listtopics()
             elif request == 'modules': self.listmodules()
@@ -1872,6 +1881,7 @@
             elif request in self.keywords: self.showtopic(request)
             elif request in self.topics: self.showtopic(request)
             elif request: doc(request, 'Help on %s:', output=self._output)
+            else: doc(str, 'Help on %s:', output=self._output)
         elif isinstance(request, Helper): self()
         else: doc(request, 'Help on %s:', output=self._output)
         self.output.write('\n')
@@ -2073,7 +2083,7 @@
                         if onerror:
                             onerror(modname)
                         continue
-                    desc = (module.__doc__ or '').splitlines()[0]
+                    desc = module.__doc__.splitlines()[0] if module.__doc__ else ''
                     path = getattr(module,'__file__',None)
                 name = modname + ' - ' + desc
                 if name.lower().find(key) >= 0:
diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py
index 489259e..631ad33 100644
--- a/Lib/pydoc_data/topics.py
+++ b/Lib/pydoc_data/topics.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Sat Feb  7 15:53:56 2015
+# Autogenerated by Sphinx on Sun Mar  8 00:21:33 2015
 topics = {'assert': u'\nThe "assert" statement\n**********************\n\nAssert statements are a convenient way to insert debugging assertions\ninto a program:\n\n   assert_stmt ::= "assert" expression ["," expression]\n\nThe simple form, "assert expression", is equivalent to\n\n   if __debug__:\n      if not expression: raise AssertionError\n\nThe extended form, "assert expression1, expression2", is equivalent to\n\n   if __debug__:\n      if not expression1: raise AssertionError(expression2)\n\nThese equivalences assume that "__debug__" and "AssertionError" refer\nto the built-in variables with those names.  In the current\nimplementation, the built-in variable "__debug__" is "True" under\nnormal circumstances, "False" when optimization is requested (command\nline option -O).  The current code generator emits no code for an\nassert statement when optimization is requested at compile time.  Note\nthat it is unnecessary to include the source code for the expression\nthat failed in the error message; it will be displayed as part of the\nstack trace.\n\nAssignments to "__debug__" are illegal.  The value for the built-in\nvariable is determined when the interpreter starts.\n',
  'assignment': u'\nAssignment statements\n*********************\n\nAssignment statements are used to (re)bind names to values and to\nmodify attributes or items of mutable objects:\n\n   assignment_stmt ::= (target_list "=")+ (expression_list | yield_expression)\n   target_list     ::= target ("," target)* [","]\n   target          ::= identifier\n              | "(" target_list ")"\n              | "[" target_list "]"\n              | attributeref\n              | subscription\n              | slicing\n              | "*" target\n\n(See section *Primaries* for the syntax definitions for\n*attributeref*, *subscription*, and *slicing*.)\n\nAn assignment statement evaluates the expression list (remember that\nthis can be a single expression or a comma-separated list, the latter\nyielding a tuple) and assigns the single resulting object to each of\nthe target lists, from left to right.\n\nAssignment is defined recursively depending on the form of the target\n(list). When a target is part of a mutable object (an attribute\nreference, subscription or slicing), the mutable object must\nultimately perform the assignment and decide about its validity, and\nmay raise an exception if the assignment is unacceptable.  The rules\nobserved by various types and the exceptions raised are given with the\ndefinition of the object types (see section *The standard type\nhierarchy*).\n\nAssignment of an object to a target list, optionally enclosed in\nparentheses or square brackets, is recursively defined as follows.\n\n* If the target list is a single target: The object is assigned to\n  that target.\n\n* If the target list is a comma-separated list of targets: The\n  object must be an iterable with the same number of items as there\n  are targets in the target list, and the items are assigned, from\n  left to right, to the corresponding targets.\n\n  * If the target list contains one target prefixed with an\n    asterisk, called a "starred" target: The object must be a sequence\n    with at least as many items as there are targets in the target\n    list, minus one.  The first items of the sequence are assigned,\n    from left to right, to the targets before the starred target.  The\n    final items of the sequence are assigned to the targets after the\n    starred target.  A list of the remaining items in the sequence is\n    then assigned to the starred target (the list can be empty).\n\n  * Else: The object must be a sequence with the same number of\n    items as there are targets in the target list, and the items are\n    assigned, from left to right, to the corresponding targets.\n\nAssignment of an object to a single target is recursively defined as\nfollows.\n\n* If the target is an identifier (name):\n\n  * If the name does not occur in a "global" or "nonlocal" statement\n    in the current code block: the name is bound to the object in the\n    current local namespace.\n\n  * Otherwise: the name is bound to the object in the global\n    namespace or the outer namespace determined by "nonlocal",\n    respectively.\n\n  The name is rebound if it was already bound.  This may cause the\n  reference count for the object previously bound to the name to reach\n  zero, causing the object to be deallocated and its destructor (if it\n  has one) to be called.\n\n* If the target is a target list enclosed in parentheses or in\n  square brackets: The object must be an iterable with the same number\n  of items as there are targets in the target list, and its items are\n  assigned, from left to right, to the corresponding targets.\n\n* If the target is an attribute reference: The primary expression in\n  the reference is evaluated.  It should yield an object with\n  assignable attributes; if this is not the case, "TypeError" is\n  raised.  That object is then asked to assign the assigned object to\n  the given attribute; if it cannot perform the assignment, it raises\n  an exception (usually but not necessarily "AttributeError").\n\n  Note: If the object is a class instance and the attribute reference\n  occurs on both sides of the assignment operator, the RHS expression,\n  "a.x" can access either an instance attribute or (if no instance\n  attribute exists) a class attribute.  The LHS target "a.x" is always\n  set as an instance attribute, creating it if necessary.  Thus, the\n  two occurrences of "a.x" do not necessarily refer to the same\n  attribute: if the RHS expression refers to a class attribute, the\n  LHS creates a new instance attribute as the target of the\n  assignment:\n\n     class Cls:\n         x = 3             # class variable\n     inst = Cls()\n     inst.x = inst.x + 1   # writes inst.x as 4 leaving Cls.x as 3\n\n  This description does not necessarily apply to descriptor\n  attributes, such as properties created with "property()".\n\n* If the target is a subscription: The primary expression in the\n  reference is evaluated.  It should yield either a mutable sequence\n  object (such as a list) or a mapping object (such as a dictionary).\n  Next, the subscript expression is evaluated.\n\n  If the primary is a mutable sequence object (such as a list), the\n  subscript must yield an integer.  If it is negative, the sequence\'s\n  length is added to it.  The resulting value must be a nonnegative\n  integer less than the sequence\'s length, and the sequence is asked\n  to assign the assigned object to its item with that index.  If the\n  index is out of range, "IndexError" is raised (assignment to a\n  subscripted sequence cannot add new items to a list).\n\n  If the primary is a mapping object (such as a dictionary), the\n  subscript must have a type compatible with the mapping\'s key type,\n  and the mapping is then asked to create a key/datum pair which maps\n  the subscript to the assigned object.  This can either replace an\n  existing key/value pair with the same key value, or insert a new\n  key/value pair (if no key with the same value existed).\n\n  For user-defined objects, the "__setitem__()" method is called with\n  appropriate arguments.\n\n* If the target is a slicing: The primary expression in the\n  reference is evaluated.  It should yield a mutable sequence object\n  (such as a list).  The assigned object should be a sequence object\n  of the same type.  Next, the lower and upper bound expressions are\n  evaluated, insofar they are present; defaults are zero and the\n  sequence\'s length.  The bounds should evaluate to integers. If\n  either bound is negative, the sequence\'s length is added to it.  The\n  resulting bounds are clipped to lie between zero and the sequence\'s\n  length, inclusive.  Finally, the sequence object is asked to replace\n  the slice with the items of the assigned sequence.  The length of\n  the slice may be different from the length of the assigned sequence,\n  thus changing the length of the target sequence, if the target\n  sequence allows it.\n\n**CPython implementation detail:** In the current implementation, the\nsyntax for targets is taken to be the same as for expressions, and\ninvalid syntax is rejected during the code generation phase, causing\nless detailed error messages.\n\nAlthough the definition of assignment implies that overlaps between\nthe left-hand side and the right-hand side are \'simultanenous\' (for\nexample "a, b = b, a" swaps two variables), overlaps *within* the\ncollection of assigned-to variables occur left-to-right, sometimes\nresulting in confusion.  For instance, the following program prints\n"[0, 2]":\n\n   x = [0, 1]\n   i = 0\n   i, x[i] = 1, 2         # i is updated, then x[i] is updated\n   print(x)\n\nSee also: **PEP 3132** - Extended Iterable Unpacking\n\n     The specification for the "*target" feature.\n\n\nAugmented assignment statements\n===============================\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n   augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n   augtarget                 ::= identifier | attributeref | subscription | slicing\n   augop                     ::= "+=" | "-=" | "*=" | "@=" | "/=" | "//=" | "%=" | "**="\n             | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions of the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like "x += 1" can be rewritten as\n"x = x + 1" to achieve a similar, but not exactly equal effect. In the\naugmented version, "x" is only evaluated once. Also, when possible,\nthe actual operation is performed *in-place*, meaning that rather than\ncreating a new object and assigning that to the target, the old object\nis modified instead.\n\nUnlike normal assignments, augmented assignments evaluate the left-\nhand side *before* evaluating the right-hand side.  For example, "a[i]\n+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and performs\nthe addition, and lastly, it writes the result back to "a[i]".\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n',
  'atom-identifiers': u'\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name.  See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a "NameError" exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them.  The transformation inserts the\nclass name, with leading underscores removed and a single underscore\ninserted, in front of the name.  For example, the identifier "__spam"\noccurring in a class named "Ham" will be transformed to "_Ham__spam".\nThis transformation is independent of the syntactical context in which\nthe identifier is used.  If the transformed name is extremely long\n(longer than 255 characters), implementation defined truncation may\nhappen. If the class name consists only of underscores, no\ntransformation is done.\n',
@@ -18,7 +18,7 @@
  'callable-types': u'\nEmulating callable objects\n**************************\n\nobject.__call__(self[, args...])\n\n   Called when the instance is "called" as a function; if this method\n   is defined, "x(arg1, arg2, ...)" is a shorthand for\n   "x.__call__(arg1, arg2, ...)".\n',
  'calls': u'\nCalls\n*****\n\nA call calls a callable object (e.g., a *function*) with a possibly\nempty series of *arguments*:\n\n   call                 ::= primary "(" [argument_list [","] | comprehension] ")"\n   argument_list        ::= positional_arguments ["," keyword_arguments]\n                       ["," "*" expression] ["," keyword_arguments]\n                       ["," "**" expression]\n                     | keyword_arguments ["," "*" expression]\n                       ["," keyword_arguments] ["," "**" expression]\n                     | "*" expression ["," keyword_arguments] ["," "**" expression]\n                     | "**" expression\n   positional_arguments ::= expression ("," expression)*\n   keyword_arguments    ::= keyword_item ("," keyword_item)*\n   keyword_item         ::= identifier "=" expression\n\nAn optional trailing comma may be present after the positional and\nkeyword arguments but does not affect the semantics.\n\nThe primary must evaluate to a callable object (user-defined\nfunctions, built-in functions, methods of built-in objects, class\nobjects, methods of class instances, and all objects having a\n"__call__()" method are callable).  All argument expressions are\nevaluated before the call is attempted.  Please refer to section\n*Function definitions* for the syntax of formal *parameter* lists.\n\nIf keyword arguments are present, they are first converted to\npositional arguments, as follows.  First, a list of unfilled slots is\ncreated for the formal parameters.  If there are N positional\narguments, they are placed in the first N slots.  Next, for each\nkeyword argument, the identifier is used to determine the\ncorresponding slot (if the identifier is the same as the first formal\nparameter name, the first slot is used, and so on).  If the slot is\nalready filled, a "TypeError" exception is raised. Otherwise, the\nvalue of the argument is placed in the slot, filling it (even if the\nexpression is "None", it fills the slot).  When all arguments have\nbeen processed, the slots that are still unfilled are filled with the\ncorresponding default value from the function definition.  (Default\nvalues are calculated, once, when the function is defined; thus, a\nmutable object such as a list or dictionary used as default value will\nbe shared by all calls that don\'t specify an argument value for the\ncorresponding slot; this should usually be avoided.)  If there are any\nunfilled slots for which no default value is specified, a "TypeError"\nexception is raised.  Otherwise, the list of filled slots is used as\nthe argument list for the call.\n\n**CPython implementation detail:** An implementation may provide\nbuilt-in functions whose positional parameters do not have names, even\nif they are \'named\' for the purpose of documentation, and which\ntherefore cannot be supplied by keyword.  In CPython, this is the case\nfor functions implemented in C that use "PyArg_ParseTuple()" to parse\ntheir arguments.\n\nIf there are more positional arguments than there are formal parameter\nslots, a "TypeError" exception is raised, unless a formal parameter\nusing the syntax "*identifier" is present; in this case, that formal\nparameter receives a tuple containing the excess positional arguments\n(or an empty tuple if there were no excess positional arguments).\n\nIf any keyword argument does not correspond to a formal parameter\nname, a "TypeError" exception is raised, unless a formal parameter\nusing the syntax "**identifier" is present; in this case, that formal\nparameter receives a dictionary containing the excess keyword\narguments (using the keywords as keys and the argument values as\ncorresponding values), or a (new) empty dictionary if there were no\nexcess keyword arguments.\n\nIf the syntax "*expression" appears in the function call, "expression"\nmust evaluate to an iterable.  Elements from this iterable are treated\nas if they were additional positional arguments; if there are\npositional arguments *x1*, ..., *xN*, and "expression" evaluates to a\nsequence *y1*, ..., *yM*, this is equivalent to a call with M+N\npositional arguments *x1*, ..., *xN*, *y1*, ..., *yM*.\n\nA consequence of this is that although the "*expression" syntax may\nappear *after* some keyword arguments, it is processed *before* the\nkeyword arguments (and the "**expression" argument, if any -- see\nbelow).  So:\n\n   >>> def f(a, b):\n   ...  print(a, b)\n   ...\n   >>> f(b=1, *(2,))\n   2 1\n   >>> f(a=1, *(2,))\n   Traceback (most recent call last):\n     File "<stdin>", line 1, in ?\n   TypeError: f() got multiple values for keyword argument \'a\'\n   >>> f(1, *(2,))\n   1 2\n\nIt is unusual for both keyword arguments and the "*expression" syntax\nto be used in the same call, so in practice this confusion does not\narise.\n\nIf the syntax "**expression" appears in the function call,\n"expression" must evaluate to a mapping, the contents of which are\ntreated as additional keyword arguments.  In the case of a keyword\nappearing in both "expression" and as an explicit keyword argument, a\n"TypeError" exception is raised.\n\nFormal parameters using the syntax "*identifier" or "**identifier"\ncannot be used as positional argument slots or as keyword argument\nnames.\n\nA call always returns some value, possibly "None", unless it raises an\nexception.  How this value is computed depends on the type of the\ncallable object.\n\nIf it is---\n\na user-defined function:\n   The code block for the function is executed, passing it the\n   argument list.  The first thing the code block will do is bind the\n   formal parameters to the arguments; this is described in section\n   *Function definitions*.  When the code block executes a "return"\n   statement, this specifies the return value of the function call.\n\na built-in function or method:\n   The result is up to the interpreter; see *Built-in Functions* for\n   the descriptions of built-in functions and methods.\n\na class object:\n   A new instance of that class is returned.\n\na class instance method:\n   The corresponding user-defined function is called, with an argument\n   list that is one longer than the argument list of the call: the\n   instance becomes the first argument.\n\na class instance:\n   The class must define a "__call__()" method; the effect is then the\n   same as if that method was called.\n',
  'class': u'\nClass definitions\n*****************\n\nA class definition defines a class object (see section *The standard\ntype hierarchy*):\n\n   classdef    ::= [decorators] "class" classname [inheritance] ":" suite\n   inheritance ::= "(" [parameter_list] ")"\n   classname   ::= identifier\n\nA class definition is an executable statement.  The inheritance list\nusually gives a list of base classes (see *Customizing class creation*\nfor more advanced uses), so each item in the list should evaluate to a\nclass object which allows subclassing.  Classes without an inheritance\nlist inherit, by default, from the base class "object"; hence,\n\n   class Foo:\n       pass\n\nis equivalent to\n\n   class Foo(object):\n       pass\n\nThe class\'s suite is then executed in a new execution frame (see\n*Naming and binding*), using a newly created local namespace and the\noriginal global namespace. (Usually, the suite contains mostly\nfunction definitions.)  When the class\'s suite finishes execution, its\nexecution frame is discarded but its local namespace is saved. [4] A\nclass object is then created using the inheritance list for the base\nclasses and the saved local namespace for the attribute dictionary.\nThe class name is bound to this class object in the original local\nnamespace.\n\nClass creation can be customized heavily using *metaclasses*.\n\nClasses can also be decorated: just like when decorating functions,\n\n   @f1(arg)\n   @f2\n   class Foo: pass\n\nis equivalent to\n\n   class Foo: pass\n   Foo = f1(arg)(f2(Foo))\n\nThe evaluation rules for the decorator expressions are the same as for\nfunction decorators.  The result must be a class object, which is then\nbound to the class name.\n\n**Programmer\'s note:** Variables defined in the class definition are\nclass attributes; they are shared by instances.  Instance attributes\ncan be set in a method with "self.name = value".  Both class and\ninstance attributes are accessible through the notation ""self.name"",\nand an instance attribute hides a class attribute with the same name\nwhen accessed in this way.  Class attributes can be used as defaults\nfor instance attributes, but using mutable values there can lead to\nunexpected results.  *Descriptors* can be used to create instance\nvariables with different implementation details.\n\nSee also: **PEP 3115** - Metaclasses in Python 3 **PEP 3129** -\n  Class Decorators\n\n-[ Footnotes ]-\n\n[1] The exception is propagated to the invocation stack unless\n    there is a "finally" clause which happens to raise another\n    exception. That new exception causes the old one to be lost.\n\n[2] Currently, control "flows off the end" except in the case of\n    an exception or the execution of a "return", "continue", or\n    "break" statement.\n\n[3] A string literal appearing as the first statement in the\n    function body is transformed into the function\'s "__doc__"\n    attribute and therefore the function\'s *docstring*.\n\n[4] A string literal appearing as the first statement in the class\n    body is transformed into the namespace\'s "__doc__" item and\n    therefore the class\'s *docstring*.\n',
- 'comparisons': u'\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation.  Also unlike C, expressions like "a < b < c" have the\ninterpretation that is conventional in mathematics:\n\n   comparison    ::= or_expr ( comp_operator or_expr )*\n   comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n                     | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: "True" or "False".\n\nComparisons can be chained arbitrarily, e.g., "x < y <= z" is\nequivalent to "x < y and y <= z", except that "y" is evaluated only\nonce (but in both cases "z" is not evaluated at all when "x < y" is\nfound to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then "a op1 b op2 c ... y\nopN z" is equivalent to "a op1 b and b op2 c and ... y opN z", except\nthat each expression is evaluated at most once.\n\nNote that "a op1 b op2 c" doesn\'t imply any kind of comparison between\n*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though\nperhaps not pretty).\n\nThe operators "<", ">", "==", ">=", "<=", and "!=" compare the values\nof two objects.  The objects need not have the same type. If both are\nnumbers, they are converted to a common type.  Otherwise, the "==" and\n"!=" operators *always* consider objects of different types to be\nunequal, while the "<", ">", ">=" and "<=" operators raise a\n"TypeError" when comparing objects of different types that do not\nimplement these operators for the given pair of types.  You can\ncontrol comparison behavior of objects of non-built-in types by\ndefining rich comparison methods like "__gt__()", described in section\n*Basic customization*.\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* The values "float(\'NaN\')" and "Decimal(\'NaN\')" are special. The\n  are identical to themselves, "x is x" but are not equal to\n  themselves, "x != x".  Additionally, comparing any value to a\n  not-a-number value will return "False".  For example, both "3 <\n  float(\'NaN\')" and "float(\'NaN\') < 3" will return "False".\n\n* Bytes objects are compared lexicographically using the numeric\n  values of their elements.\n\n* Strings are compared lexicographically using the numeric\n  equivalents (the result of the built-in function "ord()") of their\n  characters. [3] String and bytes object can\'t be compared!\n\n* Tuples and lists are compared lexicographically using comparison\n  of corresponding elements.  This means that to compare equal, each\n  element must compare equal and the two sequences must be of the same\n  type and have the same length.\n\n  If not equal, the sequences are ordered the same as their first\n  differing elements.  For example, "[1,2,x] <= [1,2,y]" has the same\n  value as "x <= y".  If the corresponding element does not exist, the\n  shorter sequence is ordered first (for example, "[1,2] < [1,2,3]").\n\n* Mappings (dictionaries) compare equal if and only if they have the\n  same "(key, value)" pairs. Order comparisons "(\'<\', \'<=\', \'>=\',\n  \'>\')" raise "TypeError".\n\n* Sets and frozensets define comparison operators to mean subset and\n  superset tests.  Those relations do not define total orderings (the\n  two sets "{1,2}" and {2,3} are not equal, nor subsets of one\n  another, nor supersets of one another).  Accordingly, sets are not\n  appropriate arguments for functions which depend on total ordering.\n  For example, "min()", "max()", and "sorted()" produce undefined\n  results given a list of sets as inputs.\n\n* Most other objects of built-in types compare unequal unless they\n  are the same object; the choice whether one object is considered\n  smaller or larger than another one is made arbitrarily but\n  consistently within one execution of a program.\n\nComparison of objects of differing types depends on whether either of\nthe types provide explicit support for the comparison.  Most numeric\ntypes can be compared with one another.  When cross-type comparison is\nnot supported, the comparison method returns "NotImplemented".\n\nThe operators "in" and "not in" test for membership.  "x in s"\nevaluates to true if *x* is a member of *s*, and false otherwise.  "x\nnot in s" returns the negation of "x in s".  All built-in sequences\nand set types support this as well as dictionary, for which "in" tests\nwhether the dictionary has a given key. For container types such as\nlist, tuple, set, frozenset, dict, or collections.deque, the\nexpression "x in y" is equivalent to "any(x is e or x == e for e in\ny)".\n\nFor the string and bytes types, "x in y" is true if and only if *x* is\na substring of *y*.  An equivalent test is "y.find(x) != -1".  Empty\nstrings are always considered to be a substring of any other string,\nso """ in "abc"" will return "True".\n\nFor user-defined classes which define the "__contains__()" method, "x\nin y" is true if and only if "y.__contains__(x)" is true.\n\nFor user-defined classes which do not define "__contains__()" but do\ndefine "__iter__()", "x in y" is true if some value "z" with "x == z"\nis produced while iterating over "y".  If an exception is raised\nduring the iteration, it is as if "in" raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n"__getitem__()", "x in y" is true if and only if there is a non-\nnegative integer index *i* such that "x == y[i]", and all lower\ninteger indices do not raise "IndexError" exception.  (If any other\nexception is raised, it is as if "in" raised that exception).\n\nThe operator "not in" is defined to have the inverse true value of\n"in".\n\nThe operators "is" and "is not" test for object identity: "x is y" is\ntrue if and only if *x* and *y* are the same object.  "x is not y"\nyields the inverse truth value. [4]\n',
+ 'comparisons': u'\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation.  Also unlike C, expressions like "a < b < c" have the\ninterpretation that is conventional in mathematics:\n\n   comparison    ::= or_expr ( comp_operator or_expr )*\n   comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n                     | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: "True" or "False".\n\nComparisons can be chained arbitrarily, e.g., "x < y <= z" is\nequivalent to "x < y and y <= z", except that "y" is evaluated only\nonce (but in both cases "z" is not evaluated at all when "x < y" is\nfound to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then "a op1 b op2 c ... y\nopN z" is equivalent to "a op1 b and b op2 c and ... y opN z", except\nthat each expression is evaluated at most once.\n\nNote that "a op1 b op2 c" doesn\'t imply any kind of comparison between\n*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though\nperhaps not pretty).\n\nThe operators "<", ">", "==", ">=", "<=", and "!=" compare the values\nof two objects.  The objects need not have the same type. If both are\nnumbers, they are converted to a common type.  Otherwise, the "==" and\n"!=" operators *always* consider objects of different types to be\nunequal, while the "<", ">", ">=" and "<=" operators raise a\n"TypeError" when comparing objects of different types that do not\nimplement these operators for the given pair of types.  You can\ncontrol comparison behavior of objects of non-built-in types by\ndefining rich comparison methods like "__gt__()", described in section\n*Basic customization*.\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* The values "float(\'NaN\')" and "Decimal(\'NaN\')" are special. They\n  are identical to themselves, "x is x" but are not equal to\n  themselves, "x != x".  Additionally, comparing any value to a\n  not-a-number value will return "False".  For example, both "3 <\n  float(\'NaN\')" and "float(\'NaN\') < 3" will return "False".\n\n* Bytes objects are compared lexicographically using the numeric\n  values of their elements.\n\n* Strings are compared lexicographically using the numeric\n  equivalents (the result of the built-in function "ord()") of their\n  characters. [3] String and bytes object can\'t be compared!\n\n* Tuples and lists are compared lexicographically using comparison\n  of corresponding elements.  This means that to compare equal, each\n  element must compare equal and the two sequences must be of the same\n  type and have the same length.\n\n  If not equal, the sequences are ordered the same as their first\n  differing elements.  For example, "[1,2,x] <= [1,2,y]" has the same\n  value as "x <= y".  If the corresponding element does not exist, the\n  shorter sequence is ordered first (for example, "[1,2] < [1,2,3]").\n\n* Mappings (dictionaries) compare equal if and only if they have the\n  same "(key, value)" pairs. Order comparisons "(\'<\', \'<=\', \'>=\',\n  \'>\')" raise "TypeError".\n\n* Sets and frozensets define comparison operators to mean subset and\n  superset tests.  Those relations do not define total orderings (the\n  two sets "{1,2}" and {2,3} are not equal, nor subsets of one\n  another, nor supersets of one another).  Accordingly, sets are not\n  appropriate arguments for functions which depend on total ordering.\n  For example, "min()", "max()", and "sorted()" produce undefined\n  results given a list of sets as inputs.\n\n* Most other objects of built-in types compare unequal unless they\n  are the same object; the choice whether one object is considered\n  smaller or larger than another one is made arbitrarily but\n  consistently within one execution of a program.\n\nComparison of objects of differing types depends on whether either of\nthe types provide explicit support for the comparison.  Most numeric\ntypes can be compared with one another.  When cross-type comparison is\nnot supported, the comparison method returns "NotImplemented".\n\nThe operators "in" and "not in" test for membership.  "x in s"\nevaluates to true if *x* is a member of *s*, and false otherwise.  "x\nnot in s" returns the negation of "x in s".  All built-in sequences\nand set types support this as well as dictionary, for which "in" tests\nwhether the dictionary has a given key. For container types such as\nlist, tuple, set, frozenset, dict, or collections.deque, the\nexpression "x in y" is equivalent to "any(x is e or x == e for e in\ny)".\n\nFor the string and bytes types, "x in y" is true if and only if *x* is\na substring of *y*.  An equivalent test is "y.find(x) != -1".  Empty\nstrings are always considered to be a substring of any other string,\nso """ in "abc"" will return "True".\n\nFor user-defined classes which define the "__contains__()" method, "x\nin y" is true if and only if "y.__contains__(x)" is true.\n\nFor user-defined classes which do not define "__contains__()" but do\ndefine "__iter__()", "x in y" is true if some value "z" with "x == z"\nis produced while iterating over "y".  If an exception is raised\nduring the iteration, it is as if "in" raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n"__getitem__()", "x in y" is true if and only if there is a non-\nnegative integer index *i* such that "x == y[i]", and all lower\ninteger indices do not raise "IndexError" exception.  (If any other\nexception is raised, it is as if "in" raised that exception).\n\nThe operator "not in" is defined to have the inverse true value of\n"in".\n\nThe operators "is" and "is not" test for object identity: "x is y" is\ntrue if and only if *x* and *y* are the same object.  "x is not y"\nyields the inverse truth value. [4]\n',
  'compound': u'\nCompound statements\n*******************\n\nCompound statements contain (groups of) other statements; they affect\nor control the execution of those other statements in some way.  In\ngeneral, compound statements span multiple lines, although in simple\nincarnations a whole compound statement may be contained in one line.\n\nThe "if", "while" and "for" statements implement traditional control\nflow constructs.  "try" specifies exception handlers and/or cleanup\ncode for a group of statements, while the "with" statement allows the\nexecution of initialization and finalization code around a block of\ncode.  Function and class definitions are also syntactically compound\nstatements.\n\nA compound statement consists of one or more \'clauses.\'  A clause\nconsists of a header and a \'suite.\'  The clause headers of a\nparticular compound statement are all at the same indentation level.\nEach clause header begins with a uniquely identifying keyword and ends\nwith a colon.  A suite is a group of statements controlled by a\nclause.  A suite can be one or more semicolon-separated simple\nstatements on the same line as the header, following the header\'s\ncolon, or it can be one or more indented statements on subsequent\nlines.  Only the latter form of a suite can contain nested compound\nstatements; the following is illegal, mostly because it wouldn\'t be\nclear to which "if" clause a following "else" clause would belong:\n\n   if test1: if test2: print(x)\n\nAlso note that the semicolon binds tighter than the colon in this\ncontext, so that in the following example, either all or none of the\n"print()" calls are executed:\n\n   if x < y < z: print(x); print(y); print(z)\n\nSummarizing:\n\n   compound_stmt ::= if_stmt\n                     | while_stmt\n                     | for_stmt\n                     | try_stmt\n                     | with_stmt\n                     | funcdef\n                     | classdef\n   suite         ::= stmt_list NEWLINE | NEWLINE INDENT statement+ DEDENT\n   statement     ::= stmt_list NEWLINE | compound_stmt\n   stmt_list     ::= simple_stmt (";" simple_stmt)* [";"]\n\nNote that statements always end in a "NEWLINE" possibly followed by a\n"DEDENT".  Also note that optional continuation clauses always begin\nwith a keyword that cannot start a statement, thus there are no\nambiguities (the \'dangling "else"\' problem is solved in Python by\nrequiring nested "if" statements to be indented).\n\nThe formatting of the grammar rules in the following sections places\neach clause on a separate line for clarity.\n\n\nThe "if" statement\n==================\n\nThe "if" statement is used for conditional execution:\n\n   if_stmt ::= "if" expression ":" suite\n               ( "elif" expression ":" suite )*\n               ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the "if" statement is executed or evaluated).\nIf all expressions are false, the suite of the "else" clause, if\npresent, is executed.\n\n\nThe "while" statement\n=====================\n\nThe "while" statement is used for repeated execution as long as an\nexpression is true:\n\n   while_stmt ::= "while" expression ":" suite\n                  ["else" ":" suite]\n\nThis repeatedly tests the expression and, if it is true, executes the\nfirst suite; if the expression is false (which may be the first time\nit is tested) the suite of the "else" clause, if present, is executed\nand the loop terminates.\n\nA "break" statement executed in the first suite terminates the loop\nwithout executing the "else" clause\'s suite.  A "continue" statement\nexecuted in the first suite skips the rest of the suite and goes back\nto testing the expression.\n\n\nThe "for" statement\n===================\n\nThe "for" statement is used to iterate over the elements of a sequence\n(such as a string, tuple or list) or other iterable object:\n\n   for_stmt ::= "for" target_list "in" expression_list ":" suite\n                ["else" ":" suite]\n\nThe expression list is evaluated once; it should yield an iterable\nobject.  An iterator is created for the result of the\n"expression_list".  The suite is then executed once for each item\nprovided by the iterator, in the order returned by the iterator.  Each\nitem in turn is assigned to the target list using the standard rules\nfor assignments (see *Assignment statements*), and then the suite is\nexecuted.  When the items are exhausted (which is immediately when the\nsequence is empty or an iterator raises a "StopIteration" exception),\nthe suite in the "else" clause, if present, is executed, and the loop\nterminates.\n\nA "break" statement executed in the first suite terminates the loop\nwithout executing the "else" clause\'s suite.  A "continue" statement\nexecuted in the first suite skips the rest of the suite and continues\nwith the next item, or with the "else" clause if there is no next\nitem.\n\nThe for-loop makes assignments to the variables(s) in the target list.\nThis overwrites all previous assignments to those variables including\nthose made in the suite of the for-loop:\n\n   for i in range(10):\n       print(i)\n       i = 5             # this will not affect the for-loop\n                         # because i will be overwritten with the next\n                         # index in the range\n\nNames in the target list are not deleted when the loop is finished,\nbut if the sequence is empty, they will not have been assigned to at\nall by the loop.  Hint: the built-in function "range()" returns an\niterator of integers suitable to emulate the effect of Pascal\'s "for i\n:= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, 2]".\n\nNote: There is a subtlety when the sequence is being modified by the\n  loop (this can only occur for mutable sequences, i.e. lists).  An\n  internal counter is used to keep track of which item is used next,\n  and this is incremented on each iteration.  When this counter has\n  reached the length of the sequence the loop terminates.  This means\n  that if the suite deletes the current (or a previous) item from the\n  sequence, the next item will be skipped (since it gets the index of\n  the current item which has already been treated).  Likewise, if the\n  suite inserts an item in the sequence before the current item, the\n  current item will be treated again the next time through the loop.\n  This can lead to nasty bugs that can be avoided by making a\n  temporary copy using a slice of the whole sequence, e.g.,\n\n     for x in a[:]:\n         if x < 0: a.remove(x)\n\n\nThe "try" statement\n===================\n\nThe "try" statement specifies exception handlers and/or cleanup code\nfor a group of statements:\n\n   try_stmt  ::= try1_stmt | try2_stmt\n   try1_stmt ::= "try" ":" suite\n                 ("except" [expression ["as" identifier]] ":" suite)+\n                 ["else" ":" suite]\n                 ["finally" ":" suite]\n   try2_stmt ::= "try" ":" suite\n                 "finally" ":" suite\n\nThe "except" clause(s) specify one or more exception handlers. When no\nexception occurs in the "try" clause, no exception handler is\nexecuted. When an exception occurs in the "try" suite, a search for an\nexception handler is started.  This search inspects the except clauses\nin turn until one is found that matches the exception.  An expression-\nless except clause, if present, must be last; it matches any\nexception.  For an except clause with an expression, that expression\nis evaluated, and the clause matches the exception if the resulting\nobject is "compatible" with the exception.  An object is compatible\nwith an exception if it is the class or a base class of the exception\nobject or a tuple containing an item compatible with the exception.\n\nIf no except clause matches the exception, the search for an exception\nhandler continues in the surrounding code and on the invocation stack.\n[1]\n\nIf the evaluation of an expression in the header of an except clause\nraises an exception, the original search for a handler is canceled and\na search starts for the new exception in the surrounding code and on\nthe call stack (it is treated as if the entire "try" statement raised\nthe exception).\n\nWhen a matching except clause is found, the exception is assigned to\nthe target specified after the "as" keyword in that except clause, if\npresent, and the except clause\'s suite is executed.  All except\nclauses must have an executable block.  When the end of this block is\nreached, execution continues normally after the entire try statement.\n(This means that if two nested handlers exist for the same exception,\nand the exception occurs in the try clause of the inner handler, the\nouter handler will not handle the exception.)\n\nWhen an exception has been assigned using "as target", it is cleared\nat the end of the except clause.  This is as if\n\n   except E as N:\n       foo\n\nwas translated to\n\n   except E as N:\n       try:\n           foo\n       finally:\n           del N\n\nThis means the exception must be assigned to a different name to be\nable to refer to it after the except clause.  Exceptions are cleared\nbecause with the traceback attached to them, they form a reference\ncycle with the stack frame, keeping all locals in that frame alive\nuntil the next garbage collection occurs.\n\nBefore an except clause\'s suite is executed, details about the\nexception are stored in the "sys" module and can be accessed via\n"sys.exc_info()". "sys.exc_info()" returns a 3-tuple consisting of the\nexception class, the exception instance and a traceback object (see\nsection *The standard type hierarchy*) identifying the point in the\nprogram where the exception occurred.  "sys.exc_info()" values are\nrestored to their previous values (before the call) when returning\nfrom a function that handled an exception.\n\nThe optional "else" clause is executed if and when control flows off\nthe end of the "try" clause. [2] Exceptions in the "else" clause are\nnot handled by the preceding "except" clauses.\n\nIf "finally" is present, it specifies a \'cleanup\' handler.  The "try"\nclause is executed, including any "except" and "else" clauses.  If an\nexception occurs in any of the clauses and is not handled, the\nexception is temporarily saved. The "finally" clause is executed.  If\nthere is a saved exception it is re-raised at the end of the "finally"\nclause.  If the "finally" clause raises another exception, the saved\nexception is set as the context of the new exception. If the "finally"\nclause executes a "return" or "break" statement, the saved exception\nis discarded:\n\n   >>> def f():\n   ...     try:\n   ...         1/0\n   ...     finally:\n   ...         return 42\n   ...\n   >>> f()\n   42\n\nThe exception information is not available to the program during\nexecution of the "finally" clause.\n\nWhen a "return", "break" or "continue" statement is executed in the\n"try" suite of a "try"..."finally" statement, the "finally" clause is\nalso executed \'on the way out.\' A "continue" statement is illegal in\nthe "finally" clause. (The reason is a problem with the current\nimplementation --- this restriction may be lifted in the future).\n\nThe return value of a function is determined by the last "return"\nstatement executed.  Since the "finally" clause always executes, a\n"return" statement executed in the "finally" clause will always be the\nlast one executed:\n\n   >>> def foo():\n   ...     try:\n   ...         return \'try\'\n   ...     finally:\n   ...         return \'finally\'\n   ...\n   >>> foo()\n   \'finally\'\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information on using the "raise" statement to\ngenerate exceptions may be found in section *The raise statement*.\n\n\nThe "with" statement\n====================\n\nThe "with" statement is used to wrap the execution of a block with\nmethods defined by a context manager (see section *With Statement\nContext Managers*). This allows common "try"..."except"..."finally"\nusage patterns to be encapsulated for convenient reuse.\n\n   with_stmt ::= "with" with_item ("," with_item)* ":" suite\n   with_item ::= expression ["as" target]\n\nThe execution of the "with" statement with one "item" proceeds as\nfollows:\n\n1. The context expression (the expression given in the "with_item")\n   is evaluated to obtain a context manager.\n\n2. The context manager\'s "__exit__()" is loaded for later use.\n\n3. The context manager\'s "__enter__()" method is invoked.\n\n4. If a target was included in the "with" statement, the return\n   value from "__enter__()" is assigned to it.\n\n   Note: The "with" statement guarantees that if the "__enter__()"\n     method returns without an error, then "__exit__()" will always be\n     called. Thus, if an error occurs during the assignment to the\n     target list, it will be treated the same as an error occurring\n     within the suite would be. See step 6 below.\n\n5. The suite is executed.\n\n6. The context manager\'s "__exit__()" method is invoked.  If an\n   exception caused the suite to be exited, its type, value, and\n   traceback are passed as arguments to "__exit__()". Otherwise, three\n   "None" arguments are supplied.\n\n   If the suite was exited due to an exception, and the return value\n   from the "__exit__()" method was false, the exception is reraised.\n   If the return value was true, the exception is suppressed, and\n   execution continues with the statement following the "with"\n   statement.\n\n   If the suite was exited for any reason other than an exception, the\n   return value from "__exit__()" is ignored, and execution proceeds\n   at the normal location for the kind of exit that was taken.\n\nWith more than one item, the context managers are processed as if\nmultiple "with" statements were nested:\n\n   with A() as a, B() as b:\n       suite\n\nis equivalent to\n\n   with A() as a:\n       with B() as b:\n           suite\n\nChanged in version 3.1: Support for multiple context expressions.\n\nSee also: **PEP 0343** - The "with" statement\n\n     The specification, background, and examples for the Python "with"\n     statement.\n\n\nFunction definitions\n====================\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n   funcdef        ::= [decorators] "def" funcname "(" [parameter_list] ")" ["->" expression] ":" suite\n   decorators     ::= decorator+\n   decorator      ::= "@" dotted_name ["(" [parameter_list [","]] ")"] NEWLINE\n   dotted_name    ::= identifier ("." identifier)*\n   parameter_list ::= (defparameter ",")*\n                      | "*" [parameter] ("," defparameter)* ["," "**" parameter]\n                      | "**" parameter\n                      | defparameter [","] )\n   parameter      ::= identifier [":" expression]\n   defparameter   ::= parameter ["=" expression]\n   funcname       ::= identifier\n\nA function definition is an executable statement.  Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function).  This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition.  The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object.  Multiple decorators are applied in\nnested fashion. For example, the following code\n\n   @f1(arg)\n   @f2\n   def func(): pass\n\nis equivalent to\n\n   def func(): pass\n   func = f1(arg)(f2(func))\n\nWhen one or more *parameters* have the form *parameter* "="\n*expression*, the function is said to have "default parameter values."\nFor a parameter with a default value, the corresponding *argument* may\nbe omitted from a call, in which case the parameter\'s default value is\nsubstituted.  If a parameter has a default value, all following\nparameters up until the ""*"" must also have a default value --- this\nis a syntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated from left to right when the\nfunction definition is executed.** This means that the expression is\nevaluated once, when the function is defined, and that the same "pre-\ncomputed" value is used for each call.  This is especially important\nto understand when a default parameter is a mutable object, such as a\nlist or a dictionary: if the function modifies the object (e.g. by\nappending an item to a list), the default value is in effect modified.\nThis is generally not what was intended.  A way around this is to use\n"None" as the default, and explicitly test for it in the body of the\nfunction, e.g.:\n\n   def whats_on_the_telly(penguin=None):\n       if penguin is None:\n           penguin = []\n       penguin.append("property of the zoo")\n       return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values.  If the form\n""*identifier"" is present, it is initialized to a tuple receiving any\nexcess positional parameters, defaulting to the empty tuple.  If the\nform ""**identifier"" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary. Parameters after ""*"" or ""*identifier"" are\nkeyword-only parameters and may only be passed used keyword arguments.\n\nParameters may have annotations of the form "": expression"" following\nthe parameter name.  Any parameter may have an annotation even those\nof the form "*identifier" or "**identifier".  Functions may have\n"return" annotation of the form ""-> expression"" after the parameter\nlist.  These annotations can be any valid Python expression and are\nevaluated when the function definition is executed.  Annotations may\nbe evaluated in a different order than they appear in the source code.\nThe presence of annotations does not change the semantics of a\nfunction.  The annotation values are available as values of a\ndictionary keyed by the parameters\' names in the "__annotations__"\nattribute of the function object.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions.  This uses lambda\nexpressions, described in section *Lambdas*.  Note that the lambda\nexpression is merely a shorthand for a simplified function definition;\na function defined in a ""def"" statement can be passed around or\nassigned to another name just like a function defined by a lambda\nexpression.  The ""def"" form is actually more powerful since it\nallows the execution of multiple statements and annotations.\n\n**Programmer\'s note:** Functions are first-class objects.  A ""def""\nstatement executed inside a function definition defines a local\nfunction that can be returned or passed around.  Free variables used\nin the nested function can access the local variables of the function\ncontaining the def.  See section *Naming and binding* for details.\n\nSee also: **PEP 3107** - Function Annotations\n\n     The original specification for function annotations.\n\n\nClass definitions\n=================\n\nA class definition defines a class object (see section *The standard\ntype hierarchy*):\n\n   classdef    ::= [decorators] "class" classname [inheritance] ":" suite\n   inheritance ::= "(" [parameter_list] ")"\n   classname   ::= identifier\n\nA class definition is an executable statement.  The inheritance list\nusually gives a list of base classes (see *Customizing class creation*\nfor more advanced uses), so each item in the list should evaluate to a\nclass object which allows subclassing.  Classes without an inheritance\nlist inherit, by default, from the base class "object"; hence,\n\n   class Foo:\n       pass\n\nis equivalent to\n\n   class Foo(object):\n       pass\n\nThe class\'s suite is then executed in a new execution frame (see\n*Naming and binding*), using a newly created local namespace and the\noriginal global namespace. (Usually, the suite contains mostly\nfunction definitions.)  When the class\'s suite finishes execution, its\nexecution frame is discarded but its local namespace is saved. [4] A\nclass object is then created using the inheritance list for the base\nclasses and the saved local namespace for the attribute dictionary.\nThe class name is bound to this class object in the original local\nnamespace.\n\nClass creation can be customized heavily using *metaclasses*.\n\nClasses can also be decorated: just like when decorating functions,\n\n   @f1(arg)\n   @f2\n   class Foo: pass\n\nis equivalent to\n\n   class Foo: pass\n   Foo = f1(arg)(f2(Foo))\n\nThe evaluation rules for the decorator expressions are the same as for\nfunction decorators.  The result must be a class object, which is then\nbound to the class name.\n\n**Programmer\'s note:** Variables defined in the class definition are\nclass attributes; they are shared by instances.  Instance attributes\ncan be set in a method with "self.name = value".  Both class and\ninstance attributes are accessible through the notation ""self.name"",\nand an instance attribute hides a class attribute with the same name\nwhen accessed in this way.  Class attributes can be used as defaults\nfor instance attributes, but using mutable values there can lead to\nunexpected results.  *Descriptors* can be used to create instance\nvariables with different implementation details.\n\nSee also: **PEP 3115** - Metaclasses in Python 3 **PEP 3129** -\n  Class Decorators\n\n-[ Footnotes ]-\n\n[1] The exception is propagated to the invocation stack unless\n    there is a "finally" clause which happens to raise another\n    exception. That new exception causes the old one to be lost.\n\n[2] Currently, control "flows off the end" except in the case of\n    an exception or the execution of a "return", "continue", or\n    "break" statement.\n\n[3] A string literal appearing as the first statement in the\n    function body is transformed into the function\'s "__doc__"\n    attribute and therefore the function\'s *docstring*.\n\n[4] A string literal appearing as the first statement in the class\n    body is transformed into the namespace\'s "__doc__" item and\n    therefore the class\'s *docstring*.\n',
  'context-managers': u'\nWith Statement Context Managers\n*******************************\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a "with" statement. The context manager\nhandles the entry into, and the exit from, the desired runtime context\nfor the execution of the block of code.  Context managers are normally\ninvoked using the "with" statement (described in section *The with\nstatement*), but can also be used by directly invoking their methods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n   Enter the runtime context related to this object. The "with"\n   statement will bind this method\'s return value to the target(s)\n   specified in the "as" clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n   Exit the runtime context related to this object. The parameters\n   describe the exception that caused the context to be exited. If the\n   context was exited without an exception, all three arguments will\n   be "None".\n\n   If an exception is supplied, and the method wishes to suppress the\n   exception (i.e., prevent it from being propagated), it should\n   return a true value. Otherwise, the exception will be processed\n   normally upon exit from this method.\n\n   Note that "__exit__()" methods should not reraise the passed-in\n   exception; this is the caller\'s responsibility.\n\nSee also: **PEP 0343** - The "with" statement\n\n     The specification, background, and examples for the Python "with"\n     statement.\n',
  'continue': u'\nThe "continue" statement\n************************\n\n   continue_stmt ::= "continue"\n\n"continue" may only occur syntactically nested in a "for" or "while"\nloop, but not nested in a function or class definition or "finally"\nclause within that loop.  It continues with the next cycle of the\nnearest enclosing loop.\n\nWhen "continue" passes control out of a "try" statement with a\n"finally" clause, that "finally" clause is executed before really\nstarting the next loop cycle.\n',
@@ -42,7 +42,7 @@
  'if': u'\nThe "if" statement\n******************\n\nThe "if" statement is used for conditional execution:\n\n   if_stmt ::= "if" expression ":" suite\n               ( "elif" expression ":" suite )*\n               ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the "if" statement is executed or evaluated).\nIf all expressions are false, the suite of the "else" clause, if\npresent, is executed.\n',
  'imaginary': u'\nImaginary literals\n******************\n\nImaginary literals are described by the following lexical definitions:\n\n   imagnumber ::= (floatnumber | intpart) ("j" | "J")\n\nAn imaginary literal yields a complex number with a real part of 0.0.\nComplex numbers are represented as a pair of floating point numbers\nand have the same restrictions on their range.  To create a complex\nnumber with a nonzero real part, add a floating point number to it,\ne.g., "(3+4j)".  Some examples of imaginary literals:\n\n   3.14j   10.j    10j     .001j   1e100j  3.14e-10j\n',
  'import': u'\nThe "import" statement\n**********************\n\n   import_stmt     ::= "import" module ["as" name] ( "," module ["as" name] )*\n                   | "from" relative_module "import" identifier ["as" name]\n                   ( "," identifier ["as" name] )*\n                   | "from" relative_module "import" "(" identifier ["as" name]\n                   ( "," identifier ["as" name] )* [","] ")"\n                   | "from" module "import" "*"\n   module          ::= (identifier ".")* identifier\n   relative_module ::= "."* module | "."+\n   name            ::= identifier\n\nThe basic import statement (no "from" clause) is executed in two\nsteps:\n\n1. find a module, loading and initializing it if necessary\n\n2. define a name or names in the local namespace for the scope\n   where the "import" statement occurs.\n\nWhen the statement contains multiple clauses (separated by commas) the\ntwo steps are carried out separately for each clause, just as though\nthe clauses had been separated out into individiual import statements.\n\nThe details of the first step, finding and loading modules are\ndescribed in greater detail in the section on the *import system*,\nwhich also describes the various types of packages and modules that\ncan be imported, as well as all the hooks that can be used to\ncustomize the import system. Note that failures in this step may\nindicate either that the module could not be located, *or* that an\nerror occurred while initializing the module, which includes execution\nof the module\'s code.\n\nIf the requested module is retrieved successfully, it will be made\navailable in the local namespace in one of three ways:\n\n* If the module name is followed by "as", then the name following\n  "as" is bound directly to the imported module.\n\n* If no other name is specified, and the module being imported is a\n  top level module, the module\'s name is bound in the local namespace\n  as a reference to the imported module\n\n* If the module being imported is *not* a top level module, then the\n  name of the top level package that contains the module is bound in\n  the local namespace as a reference to the top level package. The\n  imported module must be accessed using its full qualified name\n  rather than directly\n\nThe "from" form uses a slightly more complex process:\n\n1. find the module specified in the "from" clause, loading and\n   initializing it if necessary;\n\n2. for each of the identifiers specified in the "import" clauses:\n\n   1. check if the imported module has an attribute by that name\n\n   2. if not, attempt to import a submodule with that name and then\n      check the imported module again for that attribute\n\n   3. if the attribute is not found, "ImportError" is raised.\n\n   4. otherwise, a reference to that value is stored in the local\n      namespace, using the name in the "as" clause if it is present,\n      otherwise using the attribute name\n\nExamples:\n\n   import foo                 # foo imported and bound locally\n   import foo.bar.baz         # foo.bar.baz imported, foo bound locally\n   import foo.bar.baz as fbb  # foo.bar.baz imported and bound as fbb\n   from foo.bar import baz    # foo.bar.baz imported and bound as baz\n   from foo import attr       # foo imported and foo.attr bound as attr\n\nIf the list of identifiers is replaced by a star ("\'*\'"), all public\nnames defined in the module are bound in the local namespace for the\nscope where the "import" statement occurs.\n\nThe *public names* defined by a module are determined by checking the\nmodule\'s namespace for a variable named "__all__"; if defined, it must\nbe a sequence of strings which are names defined or imported by that\nmodule.  The names given in "__all__" are all considered public and\nare required to exist.  If "__all__" is not defined, the set of public\nnames includes all names found in the module\'s namespace which do not\nbegin with an underscore character ("\'_\'").  "__all__" should contain\nthe entire public API. It is intended to avoid accidentally exporting\nitems that are not part of the API (such as library modules which were\nimported and used within the module).\n\nThe wild card form of import --- "from module import *" --- is only\nallowed at the module level.  Attempting to use it in class or\nfunction definitions will raise a "SyntaxError".\n\nWhen specifying what module to import you do not have to specify the\nabsolute name of the module. When a module or package is contained\nwithin another package it is possible to make a relative import within\nthe same top package without having to mention the package name. By\nusing leading dots in the specified module or package after "from" you\ncan specify how high to traverse up the current package hierarchy\nwithout specifying exact names. One leading dot means the current\npackage where the module making the import exists. Two dots means up\none package level. Three dots is up two levels, etc. So if you execute\n"from . import mod" from a module in the "pkg" package then you will\nend up importing "pkg.mod". If you execute "from ..subpkg2 import mod"\nfrom within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The\nspecification for relative imports is contained within **PEP 328**.\n\n"importlib.import_module()" is provided to support applications that\ndetermine dynamically the modules to be loaded.\n\n\nFuture statements\n=================\n\nA *future statement* is a directive to the compiler that a particular\nmodule should be compiled using syntax or semantics that will be\navailable in a specified future release of Python where the feature\nbecomes standard.\n\nThe future statement is intended to ease migration to future versions\nof Python that introduce incompatible changes to the language.  It\nallows use of the new features on a per-module basis before the\nrelease in which the feature becomes standard.\n\n   future_statement ::= "from" "__future__" "import" feature ["as" name]\n                        ("," feature ["as" name])*\n                        | "from" "__future__" "import" "(" feature ["as" name]\n                        ("," feature ["as" name])* [","] ")"\n   feature          ::= identifier\n   name             ::= identifier\n\nA future statement must appear near the top of the module.  The only\nlines that can appear before a future statement are:\n\n* the module docstring (if any),\n\n* comments,\n\n* blank lines, and\n\n* other future statements.\n\nThe features recognized by Python 3.0 are "absolute_import",\n"division", "generators", "unicode_literals", "print_function",\n"nested_scopes" and "with_statement".  They are all redundant because\nthey are always enabled, and only kept for backwards compatibility.\n\nA future statement is recognized and treated specially at compile\ntime: Changes to the semantics of core constructs are often\nimplemented by generating different code.  It may even be the case\nthat a new feature introduces new incompatible syntax (such as a new\nreserved word), in which case the compiler may need to parse the\nmodule differently.  Such decisions cannot be pushed off until\nruntime.\n\nFor any given release, the compiler knows which feature names have\nbeen defined, and raises a compile-time error if a future statement\ncontains a feature not known to it.\n\nThe direct runtime semantics are the same as for any import statement:\nthere is a standard module "__future__", described later, and it will\nbe imported in the usual way at the time the future statement is\nexecuted.\n\nThe interesting runtime semantics depend on the specific feature\nenabled by the future statement.\n\nNote that there is nothing special about the statement:\n\n   import __future__ [as name]\n\nThat is not a future statement; it\'s an ordinary import statement with\nno special semantics or syntax restrictions.\n\nCode compiled by calls to the built-in functions "exec()" and\n"compile()" that occur in a module "M" containing a future statement\nwill, by default, use the new syntax or semantics associated with the\nfuture statement.  This can be controlled by optional arguments to\n"compile()" --- see the documentation of that function for details.\n\nA future statement typed at an interactive interpreter prompt will\ntake effect for the rest of the interpreter session.  If an\ninterpreter is started with the *-i* option, is passed a script name\nto execute, and the script includes a future statement, it will be in\neffect in the interactive session started after the script is\nexecuted.\n\nSee also: **PEP 236** - Back to the __future__\n\n     The original proposal for the __future__ mechanism.\n',
- 'in': u'\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation.  Also unlike C, expressions like "a < b < c" have the\ninterpretation that is conventional in mathematics:\n\n   comparison    ::= or_expr ( comp_operator or_expr )*\n   comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n                     | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: "True" or "False".\n\nComparisons can be chained arbitrarily, e.g., "x < y <= z" is\nequivalent to "x < y and y <= z", except that "y" is evaluated only\nonce (but in both cases "z" is not evaluated at all when "x < y" is\nfound to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then "a op1 b op2 c ... y\nopN z" is equivalent to "a op1 b and b op2 c and ... y opN z", except\nthat each expression is evaluated at most once.\n\nNote that "a op1 b op2 c" doesn\'t imply any kind of comparison between\n*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though\nperhaps not pretty).\n\nThe operators "<", ">", "==", ">=", "<=", and "!=" compare the values\nof two objects.  The objects need not have the same type. If both are\nnumbers, they are converted to a common type.  Otherwise, the "==" and\n"!=" operators *always* consider objects of different types to be\nunequal, while the "<", ">", ">=" and "<=" operators raise a\n"TypeError" when comparing objects of different types that do not\nimplement these operators for the given pair of types.  You can\ncontrol comparison behavior of objects of non-built-in types by\ndefining rich comparison methods like "__gt__()", described in section\n*Basic customization*.\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* The values "float(\'NaN\')" and "Decimal(\'NaN\')" are special. The\n  are identical to themselves, "x is x" but are not equal to\n  themselves, "x != x".  Additionally, comparing any value to a\n  not-a-number value will return "False".  For example, both "3 <\n  float(\'NaN\')" and "float(\'NaN\') < 3" will return "False".\n\n* Bytes objects are compared lexicographically using the numeric\n  values of their elements.\n\n* Strings are compared lexicographically using the numeric\n  equivalents (the result of the built-in function "ord()") of their\n  characters. [3] String and bytes object can\'t be compared!\n\n* Tuples and lists are compared lexicographically using comparison\n  of corresponding elements.  This means that to compare equal, each\n  element must compare equal and the two sequences must be of the same\n  type and have the same length.\n\n  If not equal, the sequences are ordered the same as their first\n  differing elements.  For example, "[1,2,x] <= [1,2,y]" has the same\n  value as "x <= y".  If the corresponding element does not exist, the\n  shorter sequence is ordered first (for example, "[1,2] < [1,2,3]").\n\n* Mappings (dictionaries) compare equal if and only if they have the\n  same "(key, value)" pairs. Order comparisons "(\'<\', \'<=\', \'>=\',\n  \'>\')" raise "TypeError".\n\n* Sets and frozensets define comparison operators to mean subset and\n  superset tests.  Those relations do not define total orderings (the\n  two sets "{1,2}" and {2,3} are not equal, nor subsets of one\n  another, nor supersets of one another).  Accordingly, sets are not\n  appropriate arguments for functions which depend on total ordering.\n  For example, "min()", "max()", and "sorted()" produce undefined\n  results given a list of sets as inputs.\n\n* Most other objects of built-in types compare unequal unless they\n  are the same object; the choice whether one object is considered\n  smaller or larger than another one is made arbitrarily but\n  consistently within one execution of a program.\n\nComparison of objects of differing types depends on whether either of\nthe types provide explicit support for the comparison.  Most numeric\ntypes can be compared with one another.  When cross-type comparison is\nnot supported, the comparison method returns "NotImplemented".\n\nThe operators "in" and "not in" test for membership.  "x in s"\nevaluates to true if *x* is a member of *s*, and false otherwise.  "x\nnot in s" returns the negation of "x in s".  All built-in sequences\nand set types support this as well as dictionary, for which "in" tests\nwhether the dictionary has a given key. For container types such as\nlist, tuple, set, frozenset, dict, or collections.deque, the\nexpression "x in y" is equivalent to "any(x is e or x == e for e in\ny)".\n\nFor the string and bytes types, "x in y" is true if and only if *x* is\na substring of *y*.  An equivalent test is "y.find(x) != -1".  Empty\nstrings are always considered to be a substring of any other string,\nso """ in "abc"" will return "True".\n\nFor user-defined classes which define the "__contains__()" method, "x\nin y" is true if and only if "y.__contains__(x)" is true.\n\nFor user-defined classes which do not define "__contains__()" but do\ndefine "__iter__()", "x in y" is true if some value "z" with "x == z"\nis produced while iterating over "y".  If an exception is raised\nduring the iteration, it is as if "in" raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n"__getitem__()", "x in y" is true if and only if there is a non-\nnegative integer index *i* such that "x == y[i]", and all lower\ninteger indices do not raise "IndexError" exception.  (If any other\nexception is raised, it is as if "in" raised that exception).\n\nThe operator "not in" is defined to have the inverse true value of\n"in".\n\nThe operators "is" and "is not" test for object identity: "x is y" is\ntrue if and only if *x* and *y* are the same object.  "x is not y"\nyields the inverse truth value. [4]\n',
+ 'in': u'\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation.  Also unlike C, expressions like "a < b < c" have the\ninterpretation that is conventional in mathematics:\n\n   comparison    ::= or_expr ( comp_operator or_expr )*\n   comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n                     | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: "True" or "False".\n\nComparisons can be chained arbitrarily, e.g., "x < y <= z" is\nequivalent to "x < y and y <= z", except that "y" is evaluated only\nonce (but in both cases "z" is not evaluated at all when "x < y" is\nfound to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then "a op1 b op2 c ... y\nopN z" is equivalent to "a op1 b and b op2 c and ... y opN z", except\nthat each expression is evaluated at most once.\n\nNote that "a op1 b op2 c" doesn\'t imply any kind of comparison between\n*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though\nperhaps not pretty).\n\nThe operators "<", ">", "==", ">=", "<=", and "!=" compare the values\nof two objects.  The objects need not have the same type. If both are\nnumbers, they are converted to a common type.  Otherwise, the "==" and\n"!=" operators *always* consider objects of different types to be\nunequal, while the "<", ">", ">=" and "<=" operators raise a\n"TypeError" when comparing objects of different types that do not\nimplement these operators for the given pair of types.  You can\ncontrol comparison behavior of objects of non-built-in types by\ndefining rich comparison methods like "__gt__()", described in section\n*Basic customization*.\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* The values "float(\'NaN\')" and "Decimal(\'NaN\')" are special. They\n  are identical to themselves, "x is x" but are not equal to\n  themselves, "x != x".  Additionally, comparing any value to a\n  not-a-number value will return "False".  For example, both "3 <\n  float(\'NaN\')" and "float(\'NaN\') < 3" will return "False".\n\n* Bytes objects are compared lexicographically using the numeric\n  values of their elements.\n\n* Strings are compared lexicographically using the numeric\n  equivalents (the result of the built-in function "ord()") of their\n  characters. [3] String and bytes object can\'t be compared!\n\n* Tuples and lists are compared lexicographically using comparison\n  of corresponding elements.  This means that to compare equal, each\n  element must compare equal and the two sequences must be of the same\n  type and have the same length.\n\n  If not equal, the sequences are ordered the same as their first\n  differing elements.  For example, "[1,2,x] <= [1,2,y]" has the same\n  value as "x <= y".  If the corresponding element does not exist, the\n  shorter sequence is ordered first (for example, "[1,2] < [1,2,3]").\n\n* Mappings (dictionaries) compare equal if and only if they have the\n  same "(key, value)" pairs. Order comparisons "(\'<\', \'<=\', \'>=\',\n  \'>\')" raise "TypeError".\n\n* Sets and frozensets define comparison operators to mean subset and\n  superset tests.  Those relations do not define total orderings (the\n  two sets "{1,2}" and {2,3} are not equal, nor subsets of one\n  another, nor supersets of one another).  Accordingly, sets are not\n  appropriate arguments for functions which depend on total ordering.\n  For example, "min()", "max()", and "sorted()" produce undefined\n  results given a list of sets as inputs.\n\n* Most other objects of built-in types compare unequal unless they\n  are the same object; the choice whether one object is considered\n  smaller or larger than another one is made arbitrarily but\n  consistently within one execution of a program.\n\nComparison of objects of differing types depends on whether either of\nthe types provide explicit support for the comparison.  Most numeric\ntypes can be compared with one another.  When cross-type comparison is\nnot supported, the comparison method returns "NotImplemented".\n\nThe operators "in" and "not in" test for membership.  "x in s"\nevaluates to true if *x* is a member of *s*, and false otherwise.  "x\nnot in s" returns the negation of "x in s".  All built-in sequences\nand set types support this as well as dictionary, for which "in" tests\nwhether the dictionary has a given key. For container types such as\nlist, tuple, set, frozenset, dict, or collections.deque, the\nexpression "x in y" is equivalent to "any(x is e or x == e for e in\ny)".\n\nFor the string and bytes types, "x in y" is true if and only if *x* is\na substring of *y*.  An equivalent test is "y.find(x) != -1".  Empty\nstrings are always considered to be a substring of any other string,\nso """ in "abc"" will return "True".\n\nFor user-defined classes which define the "__contains__()" method, "x\nin y" is true if and only if "y.__contains__(x)" is true.\n\nFor user-defined classes which do not define "__contains__()" but do\ndefine "__iter__()", "x in y" is true if some value "z" with "x == z"\nis produced while iterating over "y".  If an exception is raised\nduring the iteration, it is as if "in" raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n"__getitem__()", "x in y" is true if and only if there is a non-\nnegative integer index *i* such that "x == y[i]", and all lower\ninteger indices do not raise "IndexError" exception.  (If any other\nexception is raised, it is as if "in" raised that exception).\n\nThe operator "not in" is defined to have the inverse true value of\n"in".\n\nThe operators "is" and "is not" test for object identity: "x is y" is\ntrue if and only if *x* and *y* are the same object.  "x is not y"\nyields the inverse truth value. [4]\n',
  'integers': u'\nInteger literals\n****************\n\nInteger literals are described by the following lexical definitions:\n\n   integer        ::= decimalinteger | octinteger | hexinteger | bininteger\n   decimalinteger ::= nonzerodigit digit* | "0"+\n   nonzerodigit   ::= "1"..."9"\n   digit          ::= "0"..."9"\n   octinteger     ::= "0" ("o" | "O") octdigit+\n   hexinteger     ::= "0" ("x" | "X") hexdigit+\n   bininteger     ::= "0" ("b" | "B") bindigit+\n   octdigit       ::= "0"..."7"\n   hexdigit       ::= digit | "a"..."f" | "A"..."F"\n   bindigit       ::= "0" | "1"\n\nThere is no limit for the length of integer literals apart from what\ncan be stored in available memory.\n\nNote that leading zeros in a non-zero decimal number are not allowed.\nThis is for disambiguation with C-style octal literals, which Python\nused before version 3.0.\n\nSome examples of integer literals:\n\n   7     2147483647                        0o177    0b100110111\n   3     79228162514264337593543950336     0o377    0x100000000\n         79228162514264337593543950336              0xdeadbeef\n',
  'lambda': u'\nLambdas\n*******\n\n   lambda_expr        ::= "lambda" [parameter_list]: expression\n   lambda_expr_nocond ::= "lambda" [parameter_list]: expression_nocond\n\nLambda expressions (sometimes called lambda forms) are used to create\nanonymous functions. The expression "lambda arguments: expression"\nyields a function object.  The unnamed object behaves like a function\nobject defined with\n\n   def <lambda>(arguments):\n       return expression\n\nSee section *Function definitions* for the syntax of parameter lists.\nNote that functions created with lambda expressions cannot contain\nstatements or annotations.\n',
  'lists': u'\nList displays\n*************\n\nA list display is a possibly empty series of expressions enclosed in\nsquare brackets:\n\n   list_display ::= "[" [expression_list | comprehension] "]"\n\nA list display yields a new list object, the contents being specified\nby either a list of expressions or a comprehension.  When a comma-\nseparated list of expressions is supplied, its elements are evaluated\nfrom left to right and placed into the list object in that order.\nWhen a comprehension is supplied, the list is constructed from the\nelements resulting from the comprehension.\n',
diff --git a/Lib/re.py b/Lib/re.py
index bc3056f..788fa6b 100644
--- a/Lib/re.py
+++ b/Lib/re.py
@@ -351,10 +351,11 @@
         s = sre_parse.Pattern()
         s.flags = flags
         for phrase, action in lexicon:
+            gid = s.opengroup()
             p.append(sre_parse.SubPattern(s, [
-                (SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))),
+                (SUBPATTERN, (gid, sre_parse.parse(phrase, flags))),
                 ]))
-        s.groups = len(p)+1
+            s.closegroup(gid, p[-1])
         p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
         self.scanner = sre_compile.compile(p)
     def scan(self, string):
diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py
index 1a7d316..98afd7c 100644
--- a/Lib/sre_parse.py
+++ b/Lib/sre_parse.py
@@ -68,12 +68,15 @@
     # master pattern object.  keeps track of global attributes
     def __init__(self):
         self.flags = 0
-        self.open = []
-        self.groups = 1
         self.groupdict = {}
+        self.subpatterns = [None]  # group 0
+        self.lookbehindgroups = None
+    @property
+    def groups(self):
+        return len(self.subpatterns)
     def opengroup(self, name=None):
         gid = self.groups
-        self.groups = gid + 1
+        self.subpatterns.append(None)
         if self.groups > MAXGROUPS:
             raise error("groups number is too large")
         if name is not None:
@@ -82,12 +85,19 @@
                 raise error("redefinition of group name %r as group %d; "
                             "was group %d" % (name, gid,  ogid))
             self.groupdict[name] = gid
-        self.open.append(gid)
         return gid
-    def closegroup(self, gid):
-        self.open.remove(gid)
+    def closegroup(self, gid, p):
+        self.subpatterns[gid] = p
     def checkgroup(self, gid):
-        return gid < self.groups and gid not in self.open
+        return gid < self.groups and self.subpatterns[gid] is not None
+
+    def checklookbehindgroup(self, gid, source):
+        if self.lookbehindgroups is not None:
+            if not self.checkgroup(gid):
+                raise source.error('cannot refer to an open group')
+            if gid >= self.lookbehindgroups:
+                raise source.error('cannot refer to group defined in the same '
+                                   'lookbehind subpattern')
 
 class SubPattern:
     # a subpattern, in intermediate form
@@ -183,7 +193,21 @@
             elif op in _UNITCODES:
                 lo = lo + 1
                 hi = hi + 1
-            elif op == SUCCESS:
+            elif op is GROUPREF:
+                i, j = self.pattern.subpatterns[av].getwidth()
+                lo = lo + i
+                hi = hi + j
+            elif op is GROUPREF_EXISTS:
+                i, j = av[1].getwidth()
+                if av[2] is not None:
+                    l, h = av[2].getwidth()
+                    i = min(i, l)
+                    j = max(j, h)
+                else:
+                    i = 0
+                lo = lo + i
+                hi = hi + j
+            elif op is SUCCESS:
                 break
         self.width = min(lo, MAXREPEAT - 1), min(hi, MAXREPEAT)
         return self.width
@@ -379,6 +403,7 @@
                 if not state.checkgroup(group):
                     raise source.error("cannot refer to open group",
                                        len(escape))
+                state.checklookbehindgroup(group, source)
                 return GROUPREF, group
             raise ValueError
         if len(escape) == 2:
@@ -641,6 +666,7 @@
                         if gid is None:
                             msg = "unknown group name: {0!r}".format(name)
                             raise source.error(msg, len(name) + 1)
+                        state.checklookbehindgroup(gid, source)
                         subpatternappend((GROUPREF, gid))
                         continue
                     else:
@@ -668,7 +694,13 @@
                         if char is None or char not in "=!":
                             raise source.error("syntax error")
                         dir = -1 # lookbehind
+                        lookbehindgroups = state.lookbehindgroups
+                        if lookbehindgroups is None:
+                            state.lookbehindgroups = state.groups
                     p = _parse_sub(source, state)
+                    if dir < 0:
+                        if lookbehindgroups is None:
+                            state.lookbehindgroups = None
                     if not sourcematch(")"):
                         raise source.error("unbalanced parenthesis")
                     if char == "=":
@@ -701,6 +733,7 @@
                         if condgroup >= MAXGROUPS:
                             raise source.error("the group number is too large",
                                                len(condname) + 1)
+                    state.checklookbehindgroup(condgroup, source)
                 elif char in FLAGS:
                     # flags
                     state.flags |= FLAGS[char]
@@ -726,7 +759,7 @@
                 if not sourcematch(")"):
                     raise source.error("unbalanced parenthesis")
                 if group is not None:
-                    state.closegroup(group)
+                    state.closegroup(group, p)
                 subpatternappend((SUBPATTERN, (group, p)))
             else:
                 while True:
diff --git a/Lib/ssl.py b/Lib/ssl.py
index 807e9f2..18730cb 100644
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -87,6 +87,7 @@
 ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY
 """
 
+import ipaddress
 import textwrap
 import re
 import sys
@@ -103,8 +104,6 @@
     SSLSyscallError, SSLEOFError,
     )
 from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
-from _ssl import (VERIFY_DEFAULT, VERIFY_CRL_CHECK_LEAF, VERIFY_CRL_CHECK_CHAIN,
-    VERIFY_X509_STRICT)
 from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
 from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
 try:
@@ -121,6 +120,7 @@
 _import_symbols('OP_')
 _import_symbols('ALERT_DESCRIPTION_')
 _import_symbols('SSL_ERROR_')
+_import_symbols('VERIFY_')
 
 from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN
 
@@ -163,14 +163,12 @@
 #   * Prefer any AES-GCM over any AES-CBC for better performance and security
 #   * Then Use HIGH cipher suites as a fallback
 #   * Then Use 3DES as fallback which is secure but slow
-#   * Finally use RC4 as a fallback which is problematic but needed for
-#     compatibility some times.
 #   * Disable NULL authentication, NULL encryption, and MD5 MACs for security
 #     reasons
 _DEFAULT_CIPHERS = (
     'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
-    'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:'
-    'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5'
+    'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
+    '!eNULL:!MD5'
 )
 
 # Restricted and more secure ciphers for the server side
@@ -242,6 +240,17 @@
     return pat.match(hostname)
 
 
+def _ipaddress_match(ipname, host_ip):
+    """Exact matching of IP addresses.
+
+    RFC 6125 explicitly doesn't define an algorithm for this
+    (section 1.7.2 - "Out of Scope").
+    """
+    # OpenSSL may add a trailing newline to a subjectAltName's IP address
+    ip = ipaddress.ip_address(ipname.rstrip())
+    return ip == host_ip
+
+
 def match_hostname(cert, hostname):
     """Verify that *cert* (in decoded format as returned by
     SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
@@ -254,11 +263,20 @@
         raise ValueError("empty or no certificate, match_hostname needs a "
                          "SSL socket or SSL context with either "
                          "CERT_OPTIONAL or CERT_REQUIRED")
+    try:
+        host_ip = ipaddress.ip_address(hostname)
+    except ValueError:
+        # Not an IP address (common case)
+        host_ip = None
     dnsnames = []
     san = cert.get('subjectAltName', ())
     for key, value in san:
         if key == 'DNS':
-            if _dnsname_match(value, hostname):
+            if host_ip is None and _dnsname_match(value, hostname):
+                return
+            dnsnames.append(value)
+        elif key == 'IP Address':
+            if host_ip is not None and _ipaddress_match(value, host_ip):
                 return
             dnsnames.append(value)
     if not dnsnames:
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index 25ffeff..6d2c4f5 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -892,10 +892,12 @@
             self.stdout.close()
         if self.stderr:
             self.stderr.close()
-        if self.stdin:
-            self.stdin.close()
-        # Wait for the process to terminate, to avoid zombies.
-        self.wait()
+        try:  # Flushing a BufferedWriter may raise an error
+            if self.stdin:
+                self.stdin.close()
+        finally:
+            # Wait for the process to terminate, to avoid zombies.
+            self.wait()
 
     def __del__(self, _maxsize=sys.maxsize):
         if not self._child_created:
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index c5f541b..137932e 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -57,7 +57,7 @@
         'purelib': '{userbase}/Python{py_version_nodot}/site-packages',
         'platlib': '{userbase}/Python{py_version_nodot}/site-packages',
         'include': '{userbase}/Python{py_version_nodot}/Include',
-        'scripts': '{userbase}/Scripts',
+        'scripts': '{userbase}/Python{py_version_nodot}/Scripts',
         'data': '{userbase}',
         },
     'posix_user': {
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 4b4e0d3..ea7a89a 100755
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -1409,9 +1409,9 @@
            can be determined, `mode' is overridden by `fileobj's mode.
            `fileobj' is not closed, when TarFile is closed.
         """
-        modes = {"r": "rb", "a": "r+b", "w": "wb"}
+        modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
         if mode not in modes:
-            raise ValueError("mode must be 'r', 'a' or 'w'")
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
         self.mode = mode
         self._mode = modes[mode]
 
@@ -1524,6 +1524,15 @@
            'w:bz2'      open for writing with bzip2 compression
            'w:xz'       open for writing with lzma compression
 
+           'x' or 'x:'  create a tarfile exclusively without compression, raise
+                        an exception if the file is already created
+           'x:gz'       create an gzip compressed tarfile, raise an exception
+                        if the file is already created
+           'x:bz2'      create an bzip2 compressed tarfile, raise an exception
+                        if the file is already created
+           'x:xz'       create an lzma compressed tarfile, raise an exception
+                        if the file is already created
+
            'r|*'        open a stream of tar blocks with transparent compression
            'r|'         open an uncompressed stream of tar blocks for reading
            'r|gz'       open a gzip compressed stream of tar blocks
@@ -1582,7 +1591,7 @@
             t._extfileobj = False
             return t
 
-        elif mode in ("a", "w"):
+        elif mode in ("a", "w", "x"):
             return cls.taropen(name, mode, fileobj, **kwargs)
 
         raise ValueError("undiscernible mode")
@@ -1591,8 +1600,8 @@
     def taropen(cls, name, mode="r", fileobj=None, **kwargs):
         """Open uncompressed tar archive name for reading or writing.
         """
-        if mode not in ("r", "a", "w"):
-            raise ValueError("mode must be 'r', 'a' or 'w'")
+        if mode not in ("r", "a", "w", "x"):
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
         return cls(name, mode, fileobj, **kwargs)
 
     @classmethod
@@ -1600,8 +1609,8 @@
         """Open gzip compressed tar archive name for reading or writing.
            Appending is not allowed.
         """
-        if mode not in ("r", "w"):
-            raise ValueError("mode must be 'r' or 'w'")
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
 
         try:
             import gzip
@@ -1634,8 +1643,8 @@
         """Open bzip2 compressed tar archive name for reading or writing.
            Appending is not allowed.
         """
-        if mode not in ("r", "w"):
-            raise ValueError("mode must be 'r' or 'w'.")
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
 
         try:
             import bz2
@@ -1663,8 +1672,8 @@
         """Open lzma compressed tar archive name for reading or writing.
            Appending is not allowed.
         """
-        if mode not in ("r", "w"):
-            raise ValueError("mode must be 'r' or 'w'")
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
 
         try:
             import lzma
@@ -1751,7 +1760,7 @@
            addfile(). If given, `arcname' specifies an alternative name for the
            file in the archive.
         """
-        self._check("aw")
+        self._check("awx")
 
         # When fileobj is given, replace name by
         # fileobj's real name.
@@ -1885,7 +1894,7 @@
            TarInfo object, if it returns None the TarInfo object will be
            excluded from the archive.
         """
-        self._check("aw")
+        self._check("awx")
 
         if arcname is None:
             arcname = name
@@ -1942,7 +1951,7 @@
            On Windows platforms, `fileobj' should always be opened with mode
            'rb' to avoid irritation about the file size.
         """
-        self._check("aw")
+        self._check("awx")
 
         tarinfo = copy.copy(tarinfo)
 
@@ -2494,16 +2503,16 @@
         _, ext = os.path.splitext(tar_name)
         compressions = {
             # gz
-            'gz': 'gz',
-            'tgz': 'gz',
+            '.gz': 'gz',
+            '.tgz': 'gz',
             # xz
-            'xz': 'xz',
-            'txz': 'xz',
+            '.xz': 'xz',
+            '.txz': 'xz',
             # bz2
-            'bz2': 'bz2',
-            'tbz': 'bz2',
-            'tbz2': 'bz2',
-            'tb2': 'bz2',
+            '.bz2': 'bz2',
+            '.tbz': 'bz2',
+            '.tbz2': 'bz2',
+            '.tb2': 'bz2',
         }
         tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
         tar_files = args.create
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
index 52598b3..4520c6a 100644
--- a/Lib/test/_test_multiprocessing.py
+++ b/Lib/test/_test_multiprocessing.py
@@ -713,6 +713,27 @@
         for p in workers:
             p.join()
 
+    def test_no_import_lock_contention(self):
+        with test.support.temp_cwd():
+            module_name = 'imported_by_an_imported_module'
+            with open(module_name + '.py', 'w') as f:
+                f.write("""if 1:
+                    import multiprocessing
+
+                    q = multiprocessing.Queue()
+                    q.put('knock knock')
+                    q.get(timeout=3)
+                    q.close()
+                    del q
+                """)
+
+            with test.support.DirsOnSysPath(os.getcwd()):
+                try:
+                    __import__(module_name)
+                except pyqueue.Empty:
+                    self.fail("Probable regression on import lock contention;"
+                              " see Issue #22853")
+
     def test_timeout(self):
         q = multiprocessing.Queue()
         start = time.time()
@@ -2020,6 +2041,12 @@
 class _TestRemoteManager(BaseTestCase):
 
     ALLOWED_TYPES = ('manager',)
+    values = ['hello world', None, True, 2.25,
+              'hall\xe5 v\xe4rlden',
+              '\u043f\u0440\u0438\u0432\u0456\u0442 \u0441\u0432\u0456\u0442',
+              b'hall\xe5 v\xe4rlden',
+             ]
+    result = values[:]
 
     @classmethod
     def _putter(cls, address, authkey):
@@ -2028,7 +2055,8 @@
             )
         manager.connect()
         queue = manager.get_queue()
-        queue.put(('hello world', None, True, 2.25))
+        # Note that xmlrpclib will deserialize object as a list not a tuple
+        queue.put(tuple(cls.values))
 
     def test_remote(self):
         authkey = os.urandom(32)
@@ -2048,8 +2076,7 @@
         manager2.connect()
         queue = manager2.get_queue()
 
-        # Note that xmlrpclib will deserialize object as a list not a tuple
-        self.assertEqual(queue.get(), ['hello world', None, True, 2.25])
+        self.assertEqual(queue.get(), self.result)
 
         # Because we are using xmlrpclib for serialization instead of
         # pickle this will cause a serialization error.
@@ -3405,12 +3432,12 @@
         name = os.path.join(os.path.dirname(__file__), 'mp_fork_bomb.py')
         if sm != 'fork':
             rc, out, err = test.script_helper.assert_python_failure(name, sm)
-            self.assertEqual('', out.decode('ascii'))
-            self.assertIn('RuntimeError', err.decode('ascii'))
+            self.assertEqual(out, b'')
+            self.assertIn(b'RuntimeError', err)
         else:
             rc, out, err = test.script_helper.assert_python_ok(name, sm)
-            self.assertEqual('123', out.decode('ascii').rstrip())
-            self.assertEqual('', err.decode('ascii'))
+            self.assertEqual(out.rstrip(), b'123')
+            self.assertEqual(err, b'')
 
 #
 # Issue #17555: ForkAwareThreadLock
diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py
index 7935cf2..40ef1ba 100644
--- a/Lib/test/datetimetester.py
+++ b/Lib/test/datetimetester.py
@@ -62,6 +62,33 @@
                        'tzinfo'])
         self.assertEqual(names - allowed, set([]))
 
+    def test_divide_and_round(self):
+        if '_Fast' in str(self):
+            return
+        dar = datetime_module._divide_and_round
+
+        self.assertEqual(dar(-10, -3), 3)
+        self.assertEqual(dar(5, -2), -2)
+
+        # four cases: (2 signs of a) x (2 signs of b)
+        self.assertEqual(dar(7, 3), 2)
+        self.assertEqual(dar(-7, 3), -2)
+        self.assertEqual(dar(7, -3), -2)
+        self.assertEqual(dar(-7, -3), 2)
+
+        # ties to even - eight cases:
+        # (2 signs of a) x (2 signs of b) x (even / odd quotient)
+        self.assertEqual(dar(10, 4), 2)
+        self.assertEqual(dar(-10, 4), -2)
+        self.assertEqual(dar(10, -4), -2)
+        self.assertEqual(dar(-10, -4), 2)
+
+        self.assertEqual(dar(6, 4), 2)
+        self.assertEqual(dar(-6, 4), -2)
+        self.assertEqual(dar(6, -4), -2)
+        self.assertEqual(dar(-6, -4), 2)
+
+
 #############################################################################
 # tzinfo tests
 
@@ -394,6 +421,10 @@
         eq((-3*us) * 0.5, -2*us)
         eq((-5*us) * 0.5, -2*us)
 
+        # Issue #23521
+        eq(td(seconds=1) * 0.123456, td(microseconds=123456))
+        eq(td(seconds=1) * 0.6112295, td(microseconds=611229))
+
         # Division by int and float
         eq((3*us) / 2, 2*us)
         eq((5*us) / 2, 2*us)
@@ -408,6 +439,9 @@
         for i in range(-10, 10):
             eq((i*us/-3)//us, round(i/-3))
 
+        # Issue #23521
+        eq(td(seconds=1) / (1 / 0.6112295), td(microseconds=611229))
+
         # Issue #11576
         eq(td(999999999, 86399, 999999) - td(999999999, 86399, 999998),
            td(0, 0, 1))
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index 5cd08d9..f8372e3 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -144,21 +144,22 @@
 # the object returned by create_data().
 
 DATA0 = (
-    b'(lp0\nL0L\naL1L\naF2.0\nac'
-    b'builtins\ncomplex\n'
-    b'p1\n(F3.0\nF0.0\ntp2\nRp'
-    b'3\naL1L\naL-1L\naL255L\naL-'
-    b'255L\naL-256L\naL65535L\na'
-    b'L-65535L\naL-65536L\naL2'
-    b'147483647L\naL-2147483'
-    b'647L\naL-2147483648L\na('
-    b'Vabc\np4\ng4\nccopyreg'
-    b'\n_reconstructor\np5\n('
-    b'c__main__\nC\np6\ncbu'
-    b'iltins\nobject\np7\nNt'
-    b'p8\nRp9\n(dp10\nVfoo\np1'
-    b'1\nL1L\nsVbar\np12\nL2L\nsb'
-    b'g9\ntp13\nag13\naL5L\na.'
+    b'(lp0\nL0L\naL1L\naF2.0\n'
+    b'ac__builtin__\ncomple'
+    b'x\np1\n(F3.0\nF0.0\ntp2\n'
+    b'Rp3\naL1L\naL-1L\naL255'
+    b'L\naL-255L\naL-256L\naL'
+    b'65535L\naL-65535L\naL-'
+    b'65536L\naL2147483647L'
+    b'\naL-2147483647L\naL-2'
+    b'147483648L\na(Vabc\np4'
+    b'\ng4\nccopy_reg\n_recon'
+    b'structor\np5\n(c__main'
+    b'__\nC\np6\nc__builtin__'
+    b'\nobject\np7\nNtp8\nRp9\n'
+    b'(dp10\nVfoo\np11\nL1L\ns'
+    b'Vbar\np12\nL2L\nsbg9\ntp'
+    b'13\nag13\naL5L\na.'
 )
 
 # Disassembly of DATA0
@@ -172,88 +173,88 @@
    14: a    APPEND
    15: F    FLOAT      2.0
    20: a    APPEND
-   21: c    GLOBAL     'builtins complex'
-   39: p    PUT        1
-   42: (    MARK
-   43: F        FLOAT      3.0
-   48: F        FLOAT      0.0
-   53: t        TUPLE      (MARK at 42)
-   54: p    PUT        2
-   57: R    REDUCE
-   58: p    PUT        3
-   61: a    APPEND
-   62: L    LONG       1
-   66: a    APPEND
-   67: L    LONG       -1
-   72: a    APPEND
-   73: L    LONG       255
-   79: a    APPEND
-   80: L    LONG       -255
-   87: a    APPEND
-   88: L    LONG       -256
-   95: a    APPEND
-   96: L    LONG       65535
-  104: a    APPEND
-  105: L    LONG       -65535
-  114: a    APPEND
-  115: L    LONG       -65536
-  124: a    APPEND
-  125: L    LONG       2147483647
-  138: a    APPEND
-  139: L    LONG       -2147483647
-  153: a    APPEND
-  154: L    LONG       -2147483648
-  168: a    APPEND
-  169: (    MARK
-  170: V        UNICODE    'abc'
-  175: p        PUT        4
-  178: g        GET        4
-  181: c        GLOBAL     'copyreg _reconstructor'
-  205: p        PUT        5
-  208: (        MARK
-  209: c            GLOBAL     '__main__ C'
-  221: p            PUT        6
-  224: c            GLOBAL     'builtins object'
-  241: p            PUT        7
-  244: N            NONE
-  245: t            TUPLE      (MARK at 208)
-  246: p        PUT        8
-  249: R        REDUCE
-  250: p        PUT        9
-  253: (        MARK
-  254: d            DICT       (MARK at 253)
-  255: p        PUT        10
-  259: V        UNICODE    'foo'
-  264: p        PUT        11
-  268: L        LONG       1
-  272: s        SETITEM
-  273: V        UNICODE    'bar'
-  278: p        PUT        12
-  282: L        LONG       2
-  286: s        SETITEM
-  287: b        BUILD
-  288: g        GET        9
-  291: t        TUPLE      (MARK at 169)
-  292: p    PUT        13
-  296: a    APPEND
-  297: g    GET        13
-  301: a    APPEND
-  302: L    LONG       5
-  306: a    APPEND
-  307: .    STOP
+   21: c    GLOBAL     '__builtin__ complex'
+   42: p    PUT        1
+   45: (    MARK
+   46: F        FLOAT      3.0
+   51: F        FLOAT      0.0
+   56: t        TUPLE      (MARK at 45)
+   57: p    PUT        2
+   60: R    REDUCE
+   61: p    PUT        3
+   64: a    APPEND
+   65: L    LONG       1
+   69: a    APPEND
+   70: L    LONG       -1
+   75: a    APPEND
+   76: L    LONG       255
+   82: a    APPEND
+   83: L    LONG       -255
+   90: a    APPEND
+   91: L    LONG       -256
+   98: a    APPEND
+   99: L    LONG       65535
+  107: a    APPEND
+  108: L    LONG       -65535
+  117: a    APPEND
+  118: L    LONG       -65536
+  127: a    APPEND
+  128: L    LONG       2147483647
+  141: a    APPEND
+  142: L    LONG       -2147483647
+  156: a    APPEND
+  157: L    LONG       -2147483648
+  171: a    APPEND
+  172: (    MARK
+  173: V        UNICODE    'abc'
+  178: p        PUT        4
+  181: g        GET        4
+  184: c        GLOBAL     'copy_reg _reconstructor'
+  209: p        PUT        5
+  212: (        MARK
+  213: c            GLOBAL     '__main__ C'
+  225: p            PUT        6
+  228: c            GLOBAL     '__builtin__ object'
+  248: p            PUT        7
+  251: N            NONE
+  252: t            TUPLE      (MARK at 212)
+  253: p        PUT        8
+  256: R        REDUCE
+  257: p        PUT        9
+  260: (        MARK
+  261: d            DICT       (MARK at 260)
+  262: p        PUT        10
+  266: V        UNICODE    'foo'
+  271: p        PUT        11
+  275: L        LONG       1
+  279: s        SETITEM
+  280: V        UNICODE    'bar'
+  285: p        PUT        12
+  289: L        LONG       2
+  293: s        SETITEM
+  294: b        BUILD
+  295: g        GET        9
+  298: t        TUPLE      (MARK at 172)
+  299: p    PUT        13
+  303: a    APPEND
+  304: g    GET        13
+  308: a    APPEND
+  309: L    LONG       5
+  313: a    APPEND
+  314: .    STOP
 highest protocol among opcodes = 0
 """
 
 DATA1 = (
-    b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
-    b'builtins\ncomplex\nq\x01'
+    b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c__'
+    b'builtin__\ncomplex\nq\x01'
     b'(G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00t'
     b'q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xffJ'
     b'\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff'
     b'\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00ab'
-    b'cq\x04h\x04ccopyreg\n_reco'
+    b'cq\x04h\x04ccopy_reg\n_reco'
     b'nstructor\nq\x05(c__main'
-    b'__\nC\nq\x06cbuiltins\n'
+    b'__\nC\nq\x06c__builtin__\n'
     b'object\nq\x07Ntq\x08Rq\t}q\n('
     b'X\x03\x00\x00\x00fooq\x0bK\x01X\x03\x00\x00\x00bar'
     b'q\x0cK\x02ubh\ttq\rh\rK\x05e.'
@@ -267,66 +268,66 @@
     4: K        BININT1    0
     6: K        BININT1    1
     8: G        BINFLOAT   2.0
-   17: c        GLOBAL     'builtins complex'
-   35: q        BINPUT     1
-   37: (        MARK
-   38: G            BINFLOAT   3.0
-   47: G            BINFLOAT   0.0
-   56: t            TUPLE      (MARK at 37)
-   57: q        BINPUT     2
-   59: R        REDUCE
-   60: q        BINPUT     3
-   62: K        BININT1    1
-   64: J        BININT     -1
-   69: K        BININT1    255
-   71: J        BININT     -255
-   76: J        BININT     -256
-   81: M        BININT2    65535
-   84: J        BININT     -65535
-   89: J        BININT     -65536
-   94: J        BININT     2147483647
-   99: J        BININT     -2147483647
-  104: J        BININT     -2147483648
-  109: (        MARK
-  110: X            BINUNICODE 'abc'
-  118: q            BINPUT     4
-  120: h            BINGET     4
-  122: c            GLOBAL     'copyreg _reconstructor'
-  146: q            BINPUT     5
-  148: (            MARK
-  149: c                GLOBAL     '__main__ C'
-  161: q                BINPUT     6
-  163: c                GLOBAL     'builtins object'
-  180: q                BINPUT     7
-  182: N                NONE
-  183: t                TUPLE      (MARK at 148)
-  184: q            BINPUT     8
-  186: R            REDUCE
-  187: q            BINPUT     9
-  189: }            EMPTY_DICT
-  190: q            BINPUT     10
-  192: (            MARK
-  193: X                BINUNICODE 'foo'
-  201: q                BINPUT     11
-  203: K                BININT1    1
-  205: X                BINUNICODE 'bar'
-  213: q                BINPUT     12
-  215: K                BININT1    2
-  217: u                SETITEMS   (MARK at 192)
-  218: b            BUILD
-  219: h            BINGET     9
-  221: t            TUPLE      (MARK at 109)
-  222: q        BINPUT     13
-  224: h        BINGET     13
-  226: K        BININT1    5
-  228: e        APPENDS    (MARK at 3)
-  229: .    STOP
+   17: c        GLOBAL     '__builtin__ complex'
+   38: q        BINPUT     1
+   40: (        MARK
+   41: G            BINFLOAT   3.0
+   50: G            BINFLOAT   0.0
+   59: t            TUPLE      (MARK at 40)
+   60: q        BINPUT     2
+   62: R        REDUCE
+   63: q        BINPUT     3
+   65: K        BININT1    1
+   67: J        BININT     -1
+   72: K        BININT1    255
+   74: J        BININT     -255
+   79: J        BININT     -256
+   84: M        BININT2    65535
+   87: J        BININT     -65535
+   92: J        BININT     -65536
+   97: J        BININT     2147483647
+  102: J        BININT     -2147483647
+  107: J        BININT     -2147483648
+  112: (        MARK
+  113: X            BINUNICODE 'abc'
+  121: q            BINPUT     4
+  123: h            BINGET     4
+  125: c            GLOBAL     'copy_reg _reconstructor'
+  150: q            BINPUT     5
+  152: (            MARK
+  153: c                GLOBAL     '__main__ C'
+  165: q                BINPUT     6
+  167: c                GLOBAL     '__builtin__ object'
+  187: q                BINPUT     7
+  189: N                NONE
+  190: t                TUPLE      (MARK at 152)
+  191: q            BINPUT     8
+  193: R            REDUCE
+  194: q            BINPUT     9
+  196: }            EMPTY_DICT
+  197: q            BINPUT     10
+  199: (            MARK
+  200: X                BINUNICODE 'foo'
+  208: q                BINPUT     11
+  210: K                BININT1    1
+  212: X                BINUNICODE 'bar'
+  220: q                BINPUT     12
+  222: K                BININT1    2
+  224: u                SETITEMS   (MARK at 199)
+  225: b            BUILD
+  226: h            BINGET     9
+  228: t            TUPLE      (MARK at 112)
+  229: q        BINPUT     13
+  231: h        BINGET     13
+  233: K        BININT1    5
+  235: e        APPENDS    (MARK at 3)
+  236: .    STOP
 highest protocol among opcodes = 1
 """
 
 DATA2 = (
     b'\x80\x02]q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
-    b'builtins\ncomplex\n'
+    b'__builtin__\ncomplex\n'
     b'q\x01G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00'
     b'\x86q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xff'
     b'J\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff'
@@ -346,52 +347,52 @@
     6: K        BININT1    0
     8: K        BININT1    1
    10: G        BINFLOAT   2.0
-   19: c        GLOBAL     'builtins complex'
-   37: q        BINPUT     1
-   39: G        BINFLOAT   3.0
-   48: G        BINFLOAT   0.0
-   57: \x86     TUPLE2
-   58: q        BINPUT     2
-   60: R        REDUCE
-   61: q        BINPUT     3
-   63: K        BININT1    1
-   65: J        BININT     -1
-   70: K        BININT1    255
-   72: J        BININT     -255
-   77: J        BININT     -256
-   82: M        BININT2    65535
-   85: J        BININT     -65535
-   90: J        BININT     -65536
-   95: J        BININT     2147483647
-  100: J        BININT     -2147483647
-  105: J        BININT     -2147483648
-  110: (        MARK
-  111: X            BINUNICODE 'abc'
-  119: q            BINPUT     4
-  121: h            BINGET     4
-  123: c            GLOBAL     '__main__ C'
-  135: q            BINPUT     5
-  137: )            EMPTY_TUPLE
-  138: \x81         NEWOBJ
-  139: q            BINPUT     6
-  141: }            EMPTY_DICT
-  142: q            BINPUT     7
-  144: (            MARK
-  145: X                BINUNICODE 'foo'
-  153: q                BINPUT     8
-  155: K                BININT1    1
-  157: X                BINUNICODE 'bar'
-  165: q                BINPUT     9
-  167: K                BININT1    2
-  169: u                SETITEMS   (MARK at 144)
-  170: b            BUILD
-  171: h            BINGET     6
-  173: t            TUPLE      (MARK at 110)
-  174: q        BINPUT     10
-  176: h        BINGET     10
-  178: K        BININT1    5
-  180: e        APPENDS    (MARK at 5)
-  181: .    STOP
+   19: c        GLOBAL     '__builtin__ complex'
+   40: q        BINPUT     1
+   42: G        BINFLOAT   3.0
+   51: G        BINFLOAT   0.0
+   60: \x86     TUPLE2
+   61: q        BINPUT     2
+   63: R        REDUCE
+   64: q        BINPUT     3
+   66: K        BININT1    1
+   68: J        BININT     -1
+   73: K        BININT1    255
+   75: J        BININT     -255
+   80: J        BININT     -256
+   85: M        BININT2    65535
+   88: J        BININT     -65535
+   93: J        BININT     -65536
+   98: J        BININT     2147483647
+  103: J        BININT     -2147483647
+  108: J        BININT     -2147483648
+  113: (        MARK
+  114: X            BINUNICODE 'abc'
+  122: q            BINPUT     4
+  124: h            BINGET     4
+  126: c            GLOBAL     '__main__ C'
+  138: q            BINPUT     5
+  140: )            EMPTY_TUPLE
+  141: \x81         NEWOBJ
+  142: q            BINPUT     6
+  144: }            EMPTY_DICT
+  145: q            BINPUT     7
+  147: (            MARK
+  148: X                BINUNICODE 'foo'
+  156: q                BINPUT     8
+  158: K                BININT1    1
+  160: X                BINUNICODE 'bar'
+  168: q                BINPUT     9
+  170: K                BININT1    2
+  172: u                SETITEMS   (MARK at 147)
+  173: b            BUILD
+  174: h            BINGET     6
+  176: t            TUPLE      (MARK at 113)
+  177: q        BINPUT     10
+  179: h        BINGET     10
+  181: K        BININT1    5
+  183: e        APPENDS    (MARK at 5)
+  184: .    STOP
 highest protocol among opcodes = 2
 """
 
@@ -570,14 +571,14 @@
             xname = X.__name__.encode('ascii')
             # Protocol 0 (text mode pickle):
             """
-            0: (    MARK
-            1: i        INST       '__main__ X' (MARK at 0)
-            15: p    PUT        0
-            18: (    MARK
-            19: d        DICT       (MARK at 18)
-            20: p    PUT        1
-            23: b    BUILD
-            24: .    STOP
+             0: (    MARK
+             1: i        INST       '__main__ X' (MARK at 0)
+            13: p    PUT        0
+            16: (    MARK
+            17: d        DICT       (MARK at 16)
+            18: p    PUT        1
+            21: b    BUILD
+            22: .    STOP
             """
             pickle0 = (b"(i__main__\n"
                        b"X\n"
@@ -587,15 +588,15 @@
 
             # Protocol 1 (binary mode pickle)
             """
-            0: (    MARK
-            1: c        GLOBAL     '__main__ X'
-            15: q        BINPUT     0
-            17: o        OBJ        (MARK at 0)
-            18: q    BINPUT     1
-            20: }    EMPTY_DICT
-            21: q    BINPUT     2
-            23: b    BUILD
-            24: .    STOP
+             0: (    MARK
+             1: c        GLOBAL     '__main__ X'
+            13: q        BINPUT     0
+            15: o        OBJ        (MARK at 0)
+            16: q    BINPUT     1
+            18: }    EMPTY_DICT
+            19: q    BINPUT     2
+            21: b    BUILD
+            22: .    STOP
             """
             pickle1 = (b'(c__main__\n'
                        b'X\n'
@@ -604,16 +605,16 @@
 
             # Protocol 2 (pickle2 = b'\x80\x02' + pickle1)
             """
-            0: \x80 PROTO      2
-            2: (    MARK
-            3: c        GLOBAL     '__main__ X'
-            17: q        BINPUT     0
-            19: o        OBJ        (MARK at 2)
-            20: q    BINPUT     1
-            22: }    EMPTY_DICT
-            23: q    BINPUT     2
-            25: b    BUILD
-            26: .    STOP
+             0: \x80 PROTO      2
+             2: (    MARK
+             3: c        GLOBAL     '__main__ X'
+            15: q        BINPUT     0
+            17: o        OBJ        (MARK at 2)
+            18: q    BINPUT     1
+            20: }    EMPTY_DICT
+            21: q    BINPUT     2
+            23: b    BUILD
+            24: .    STOP
             """
             pickle2 = (b'\x80\x02(c__main__\n'
                        b'X\n'
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index 205c47c..7ef6980 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -1325,6 +1325,8 @@
         n = getattr(err, 'errno', None)
         if (isinstance(err, socket.timeout) or
             (isinstance(err, socket.gaierror) and n in gai_errnos) or
+            (isinstance(err, urllib.error.HTTPError) and
+             500 <= err.code <= 599) or
             (isinstance(err, urllib.error.URLError) and
              "ConnectionRefusedError" in err.reason) or
             n in captured_errnos):
@@ -2132,16 +2134,15 @@
 
 def fs_is_case_insensitive(directory):
     """Detects if the file system for the specified directory is case-insensitive."""
-    base_fp, base_path = tempfile.mkstemp(dir=directory)
-    case_path = base_path.upper()
-    if case_path == base_path:
-        case_path = base_path.lower()
-    try:
-        return os.path.samefile(base_path, case_path)
-    except FileNotFoundError:
-        return False
-    finally:
-        os.unlink(base_path)
+    with tempfile.NamedTemporaryFile(dir=directory) as base:
+        base_path = base.name
+        case_path = base_path.upper()
+        if case_path == base_path:
+            case_path = base_path.lower()
+        try:
+            return os.path.samefile(base_path, case_path)
+        except FileNotFoundError:
+            return False
 
 
 class SuppressCrashReport:
@@ -2151,6 +2152,7 @@
     disable the creation of coredump file.
     """
     old_value = None
+    old_modes = None
 
     def __enter__(self):
         """On Windows, disable Windows Error Reporting dialogs using
@@ -2168,6 +2170,26 @@
             SEM_NOGPFAULTERRORBOX = 0x02
             self.old_value = self._k32.SetErrorMode(SEM_NOGPFAULTERRORBOX)
             self._k32.SetErrorMode(self.old_value | SEM_NOGPFAULTERRORBOX)
+
+            # Suppress assert dialogs in debug builds
+            # (see http://bugs.python.org/issue23314)
+            try:
+                import msvcrt
+                msvcrt.CrtSetReportMode
+            except (AttributeError, ImportError):
+                # no msvcrt or a release build
+                pass
+            else:
+                self.old_modes = {}
+                for report_type in [msvcrt.CRT_WARN,
+                                    msvcrt.CRT_ERROR,
+                                    msvcrt.CRT_ASSERT]:
+                    old_mode = msvcrt.CrtSetReportMode(report_type,
+                            msvcrt.CRTDBG_MODE_FILE)
+                    old_file = msvcrt.CrtSetReportFile(report_type,
+                            msvcrt.CRTDBG_FILE_STDERR)
+                    self.old_modes[report_type] = old_mode, old_file
+
         else:
             if resource is not None:
                 try:
@@ -2199,6 +2221,12 @@
 
         if sys.platform.startswith('win'):
             self._k32.SetErrorMode(self.old_value)
+
+            if self.old_modes:
+                import msvcrt
+                for report_type, (old_mode, old_file) in self.old_modes.items():
+                    msvcrt.CrtSetReportMode(report_type, old_mode)
+                    msvcrt.CrtSetReportFile(report_type, old_file)
         else:
             if resource is not None:
                 try:
diff --git a/Lib/test/test__locale.py b/Lib/test/test__locale.py
index 4231f37..8d1c8db 100644
--- a/Lib/test/test__locale.py
+++ b/Lib/test/test__locale.py
@@ -9,7 +9,6 @@
 import sys
 import unittest
 from platform import uname
-from test.support import run_unittest
 
 if uname().system == "Darwin":
     maj, min, mic = [int(part) for part in uname().release.split(".")]
@@ -24,45 +23,52 @@
     'da_DK', 'nn_NO', 'cs_CZ', 'de_LU', 'es_BO', 'sq_AL', 'sk_SK', 'fr_CH',
     'de_DE', 'sr_YU', 'br_FR', 'nl_BE', 'sv_FI', 'pl_PL', 'fr_CA', 'fo_FO',
     'bs_BA', 'fr_LU', 'kl_GL', 'fa_IR', 'de_BE', 'sv_SE', 'it_CH', 'uk_UA',
-    'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'en_US',
+    'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'ps_AF', 'en_US',
     'es_ES.ISO8859-1', 'fr_FR.ISO8859-15', 'ru_RU.KOI8-R', 'ko_KR.eucKR']
 
-# Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to
-# workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses
-# the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is
-# decoded as U+30000020 (an invalid character) by mbstowcs().
-if sys.platform == 'sunos5':
-    old_locale = locale.setlocale(locale.LC_ALL)
-    try:
-        locales = []
-        for loc in candidate_locales:
-            try:
-                locale.setlocale(locale.LC_ALL, loc)
-            except Error:
-                continue
-            encoding = locale.getpreferredencoding(False)
-            try:
-                localeconv()
-            except Exception as err:
-                print("WARNING: Skip locale %s (encoding %s): [%s] %s"
-                      % (loc, encoding, type(err), err))
-            else:
-                locales.append(loc)
-        candidate_locales = locales
-    finally:
-        locale.setlocale(locale.LC_ALL, old_locale)
+def setUpModule():
+    global candidate_locales
+    # Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to
+    # workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses
+    # the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is
+    # decoded as U+30000020 (an invalid character) by mbstowcs().
+    if sys.platform == 'sunos5':
+        old_locale = locale.setlocale(locale.LC_ALL)
+        try:
+            locales = []
+            for loc in candidate_locales:
+                try:
+                    locale.setlocale(locale.LC_ALL, loc)
+                except Error:
+                    continue
+                encoding = locale.getpreferredencoding(False)
+                try:
+                    localeconv()
+                except Exception as err:
+                    print("WARNING: Skip locale %s (encoding %s): [%s] %s"
+                        % (loc, encoding, type(err), err))
+                else:
+                    locales.append(loc)
+            candidate_locales = locales
+        finally:
+            locale.setlocale(locale.LC_ALL, old_locale)
 
-# Workaround for MSVC6(debug) crash bug
-if "MSC v.1200" in sys.version:
-    def accept(loc):
-        a = loc.split(".")
-        return not(len(a) == 2 and len(a[-1]) >= 9)
-    candidate_locales = [loc for loc in candidate_locales if accept(loc)]
+    # Workaround for MSVC6(debug) crash bug
+    if "MSC v.1200" in sys.version:
+        def accept(loc):
+            a = loc.split(".")
+            return not(len(a) == 2 and len(a[-1]) >= 9)
+        candidate_locales = [loc for loc in candidate_locales if accept(loc)]
 
 # List known locale values to test against when available.
 # Dict formatted as ``<locale> : (<decimal_point>, <thousands_sep>)``.  If a
 # value is not known, use '' .
-known_numerics = {'fr_FR' : (',', ''), 'en_US':('.', ',')}
+known_numerics = {
+    'en_US': ('.', ','),
+    'fr_FR' : (',', ' '),
+    'de_DE' : (',', '.'),
+    'ps_AF': ('\u066b', '\u066c'),
+}
 
 class _LocaleTests(unittest.TestCase):
 
@@ -91,10 +97,12 @@
                                     calc_value, known_value,
                                     calc_type, data_type, set_locale,
                                     used_locale))
+            return True
 
     @unittest.skipUnless(nl_langinfo, "nl_langinfo is not available")
     def test_lc_numeric_nl_langinfo(self):
         # Test nl_langinfo against known values
+        tested = False
         for loc in candidate_locales:
             try:
                 setlocale(LC_NUMERIC, loc)
@@ -103,10 +111,14 @@
                 continue
             for li, lc in ((RADIXCHAR, "decimal_point"),
                             (THOUSEP, "thousands_sep")):
-                self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc)
+                if self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc):
+                    tested = True
+        if not tested:
+            self.skipTest('no suitable locales')
 
     def test_lc_numeric_localeconv(self):
         # Test localeconv against known values
+        tested = False
         for loc in candidate_locales:
             try:
                 setlocale(LC_NUMERIC, loc)
@@ -116,11 +128,15 @@
             formatting = localeconv()
             for lc in ("decimal_point",
                         "thousands_sep"):
-                self.numeric_tester('localeconv', formatting[lc], lc, loc)
+                if self.numeric_tester('localeconv', formatting[lc], lc, loc):
+                    tested = True
+        if not tested:
+            self.skipTest('no suitable locales')
 
     @unittest.skipUnless(nl_langinfo, "nl_langinfo is not available")
     def test_lc_numeric_basic(self):
         # Test nl_langinfo against localeconv
+        tested = False
         for loc in candidate_locales:
             try:
                 setlocale(LC_NUMERIC, loc)
@@ -140,10 +156,14 @@
                                 "(set to %s, using %s)" % (
                                                 nl_radixchar, li_radixchar,
                                                 loc, set_locale))
+                tested = True
+        if not tested:
+            self.skipTest('no suitable locales')
 
     def test_float_parsing(self):
         # Bug #1391872: Test whether float parsing is okay on European
         # locales.
+        tested = False
         for loc in candidate_locales:
             try:
                 setlocale(LC_NUMERIC, loc)
@@ -162,9 +182,10 @@
             if localeconv()['decimal_point'] != '.':
                 self.assertRaises(ValueError, float,
                                   localeconv()['decimal_point'].join(['1', '23']))
+            tested = True
+        if not tested:
+            self.skipTest('no suitable locales')
 
-def test_main():
-    run_unittest(_LocaleTests)
 
 if __name__ == '__main__':
-    test_main()
+    unittest.main()
diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py
index a0b9162..d7f90cd 100644
--- a/Lib/test/test_argparse.py
+++ b/Lib/test/test_argparse.py
@@ -753,6 +753,39 @@
     ]
 
 
+class TestOptionalsAllowLongAbbreviation(ParserTestCase):
+    """Allow long options to be abbreviated unambiguously"""
+
+    argument_signatures = [
+        Sig('--foo'),
+        Sig('--foobaz'),
+        Sig('--fooble', action='store_true'),
+    ]
+    failures = ['--foob 5', '--foob']
+    successes = [
+        ('', NS(foo=None, foobaz=None, fooble=False)),
+        ('--foo 7', NS(foo='7', foobaz=None, fooble=False)),
+        ('--fooba a', NS(foo=None, foobaz='a', fooble=False)),
+        ('--foobl --foo g', NS(foo='g', foobaz=None, fooble=True)),
+    ]
+
+
+class TestOptionalsDisallowLongAbbreviation(ParserTestCase):
+    """Do not allow abbreviations of long options at all"""
+
+    parser_signature = Sig(allow_abbrev=False)
+    argument_signatures = [
+        Sig('--foo'),
+        Sig('--foodle', action='store_true'),
+        Sig('--foonly'),
+    ]
+    failures = ['-foon 3', '--foon 3', '--food', '--food --foo 2']
+    successes = [
+        ('', NS(foo=None, foodle=False, foonly=None)),
+        ('--foo 3', NS(foo='3', foodle=False, foonly=None)),
+        ('--foonly 7 --foodle --foo 2', NS(foo='2', foodle=True, foonly='7')),
+    ]
+
 # ================
 # Positional tests
 # ================
diff --git a/Lib/test/test_asyncio/test_queues.py b/Lib/test/test_asyncio/test_queues.py
index 3d4ac51..a73539d 100644
--- a/Lib/test/test_asyncio/test_queues.py
+++ b/Lib/test/test_asyncio/test_queues.py
@@ -408,14 +408,14 @@
         self.assertEqual([1, 2, 3], items)
 
 
-class JoinableQueueTests(_QueueTestBase):
+class QueueJoinTests(_QueueTestBase):
 
     def test_task_done_underflow(self):
-        q = asyncio.JoinableQueue(loop=self.loop)
+        q = asyncio.Queue(loop=self.loop)
         self.assertRaises(ValueError, q.task_done)
 
     def test_task_done(self):
-        q = asyncio.JoinableQueue(loop=self.loop)
+        q = asyncio.Queue(loop=self.loop)
         for i in range(100):
             q.put_nowait(i)
 
@@ -452,7 +452,7 @@
         self.loop.run_until_complete(asyncio.wait(tasks, loop=self.loop))
 
     def test_join_empty_queue(self):
-        q = asyncio.JoinableQueue(loop=self.loop)
+        q = asyncio.Queue(loop=self.loop)
 
         # Test that a queue join()s successfully, and before anything else
         # (done twice for insurance).
@@ -465,7 +465,7 @@
         self.loop.run_until_complete(join())
 
     def test_format(self):
-        q = asyncio.JoinableQueue(loop=self.loop)
+        q = asyncio.Queue(loop=self.loop)
         self.assertEqual(q._format(), 'maxsize=0')
 
         q._unfinished_tasks = 2
diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py
index b467b04..5ccdafb 100644
--- a/Lib/test/test_asyncio/test_subprocess.py
+++ b/Lib/test/test_asyncio/test_subprocess.py
@@ -349,6 +349,70 @@
             self.loop.run_until_complete(cancel_make_transport())
             test_utils.run_briefly(self.loop)
 
+    def test_close_kill_running(self):
+        @asyncio.coroutine
+        def kill_running():
+            create = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
+                                               *PROGRAM_BLOCKED)
+            transport, protocol = yield from create
+
+            kill_called = False
+            def kill():
+                nonlocal kill_called
+                kill_called = True
+                orig_kill()
+
+            proc = transport.get_extra_info('subprocess')
+            orig_kill = proc.kill
+            proc.kill = kill
+            returncode = transport.get_returncode()
+            transport.close()
+            yield from transport._wait()
+            return (returncode, kill_called)
+
+        # Ignore "Close running child process: kill ..." log
+        with test_utils.disable_logger():
+            returncode, killed = self.loop.run_until_complete(kill_running())
+        self.assertIsNone(returncode)
+
+        # transport.close() must kill the process if it is still running
+        self.assertTrue(killed)
+        test_utils.run_briefly(self.loop)
+
+    def test_close_dont_kill_finished(self):
+        @asyncio.coroutine
+        def kill_running():
+            create = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
+                                               *PROGRAM_BLOCKED)
+            transport, protocol = yield from create
+            proc = transport.get_extra_info('subprocess')
+
+            # kill the process (but asyncio is not notified immediatly)
+            proc.kill()
+            proc.wait()
+
+            proc.kill = mock.Mock()
+            proc_returncode = proc.poll()
+            transport_returncode = transport.get_returncode()
+            transport.close()
+            return (proc_returncode, transport_returncode, proc.kill.called)
+
+        # Ignore "Unknown child process pid ..." log of SafeChildWatcher,
+        # emitted because the test already consumes the exit status:
+        # proc.wait()
+        with test_utils.disable_logger():
+            result = self.loop.run_until_complete(kill_running())
+            test_utils.run_briefly(self.loop)
+
+        proc_returncode, transport_return_code, killed = result
+
+        self.assertIsNotNone(proc_returncode)
+        self.assertIsNone(transport_return_code)
+
+        # transport.close() must not kill the process if it finished, even if
+        # the transport was not notified yet
+        self.assertFalse(killed)
+
 
 if sys.platform != 'win32':
     # Unix
diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py
index 41249ff..dc0835c 100644
--- a/Lib/test/test_asyncio/test_unix_events.py
+++ b/Lib/test/test_asyncio/test_unix_events.py
@@ -295,7 +295,7 @@
 
     def test_create_unix_connection_path_sock(self):
         coro = self.loop.create_unix_connection(
-            lambda: None, '/dev/null', sock=object())
+            lambda: None, os.devnull, sock=object())
         with self.assertRaisesRegex(ValueError, 'path and sock can not be'):
             self.loop.run_until_complete(coro)
 
@@ -308,14 +308,14 @@
 
     def test_create_unix_connection_nossl_serverhost(self):
         coro = self.loop.create_unix_connection(
-            lambda: None, '/dev/null', server_hostname='spam')
+            lambda: None, os.devnull, server_hostname='spam')
         with self.assertRaisesRegex(ValueError,
                                     'server_hostname is only meaningful'):
             self.loop.run_until_complete(coro)
 
     def test_create_unix_connection_ssl_noserverhost(self):
         coro = self.loop.create_unix_connection(
-            lambda: None, '/dev/null', ssl=True)
+            lambda: None, os.devnull, ssl=True)
 
         with self.assertRaisesRegex(
             ValueError, 'you have to pass server_hostname when using ssl'):
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index ce012d6..2fe3f2a 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -5,6 +5,7 @@
 from io import BytesIO
 import os
 import pickle
+import glob
 import random
 import subprocess
 import sys
@@ -51,6 +52,19 @@
     EMPTY_DATA = b'BZh9\x17rE8P\x90\x00\x00\x00\x00'
     BAD_DATA = b'this is not a valid bzip2 file'
 
+    # Some tests need more than one block of uncompressed data. Since one block
+    # is at least 100 kB, we gather some data dynamically and compress it.
+    # Note that this assumes that compression works correctly, so we cannot
+    # simply use the bigger test data for all tests.
+    test_size = 0
+    BIG_TEXT = bytearray(128*1024)
+    for fname in glob.glob(os.path.join(os.path.dirname(__file__), '*.py')):
+        with open(fname, 'rb') as fh:
+            test_size += fh.readinto(memoryview(BIG_TEXT)[test_size:])
+        if test_size > 128*1024:
+            break
+    BIG_DATA = bz2.compress(BIG_TEXT, compresslevel=1)
+
     def setUp(self):
         self.filename = support.TESTFN
 
@@ -87,11 +101,11 @@
 
     def testBadArgs(self):
         self.assertRaises(TypeError, BZ2File, 123.456)
-        self.assertRaises(ValueError, BZ2File, "/dev/null", "z")
-        self.assertRaises(ValueError, BZ2File, "/dev/null", "rx")
-        self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt")
-        self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0)
-        self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10)
+        self.assertRaises(ValueError, BZ2File, os.devnull, "z")
+        self.assertRaises(ValueError, BZ2File, os.devnull, "rx")
+        self.assertRaises(ValueError, BZ2File, os.devnull, "rbt")
+        self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=0)
+        self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=10)
 
     def testRead(self):
         self.createTempFile()
@@ -707,6 +721,95 @@
             with self.assertRaises(TypeError):
                 pickle.dumps(BZ2Decompressor(), proto)
 
+    def testDecompressorChunksMaxsize(self):
+        bzd = BZ2Decompressor()
+        max_length = 100
+        out = []
+
+        # Feed some input
+        len_ = len(self.BIG_DATA) - 64
+        out.append(bzd.decompress(self.BIG_DATA[:len_],
+                                  max_length=max_length))
+        self.assertFalse(bzd.needs_input)
+        self.assertEqual(len(out[-1]), max_length)
+
+        # Retrieve more data without providing more input
+        out.append(bzd.decompress(b'', max_length=max_length))
+        self.assertFalse(bzd.needs_input)
+        self.assertEqual(len(out[-1]), max_length)
+
+        # Retrieve more data while providing more input
+        out.append(bzd.decompress(self.BIG_DATA[len_:],
+                                  max_length=max_length))
+        self.assertLessEqual(len(out[-1]), max_length)
+
+        # Retrieve remaining uncompressed data
+        while not bzd.eof:
+            out.append(bzd.decompress(b'', max_length=max_length))
+            self.assertLessEqual(len(out[-1]), max_length)
+
+        out = b"".join(out)
+        self.assertEqual(out, self.BIG_TEXT)
+        self.assertEqual(bzd.unused_data, b"")
+
+    def test_decompressor_inputbuf_1(self):
+        # Test reusing input buffer after moving existing
+        # contents to beginning
+        bzd = BZ2Decompressor()
+        out = []
+
+        # Create input buffer and fill it
+        self.assertEqual(bzd.decompress(self.DATA[:100],
+                                        max_length=0), b'')
+
+        # Retrieve some results, freeing capacity at beginning
+        # of input buffer
+        out.append(bzd.decompress(b'', 2))
+
+        # Add more data that fits into input buffer after
+        # moving existing data to beginning
+        out.append(bzd.decompress(self.DATA[100:105], 15))
+
+        # Decompress rest of data
+        out.append(bzd.decompress(self.DATA[105:]))
+        self.assertEqual(b''.join(out), self.TEXT)
+
+    def test_decompressor_inputbuf_2(self):
+        # Test reusing input buffer by appending data at the
+        # end right away
+        bzd = BZ2Decompressor()
+        out = []
+
+        # Create input buffer and empty it
+        self.assertEqual(bzd.decompress(self.DATA[:200],
+                                        max_length=0), b'')
+        out.append(bzd.decompress(b''))
+
+        # Fill buffer with new data
+        out.append(bzd.decompress(self.DATA[200:280], 2))
+
+        # Append some more data, not enough to require resize
+        out.append(bzd.decompress(self.DATA[280:300], 2))
+
+        # Decompress rest of data
+        out.append(bzd.decompress(self.DATA[300:]))
+        self.assertEqual(b''.join(out), self.TEXT)
+
+    def test_decompressor_inputbuf_3(self):
+        # Test reusing input buffer after extending it
+
+        bzd = BZ2Decompressor()
+        out = []
+
+        # Create almost full input buffer
+        out.append(bzd.decompress(self.DATA[:200], 5))
+
+        # Add even more data to it, requiring resize
+        out.append(bzd.decompress(self.DATA[200:300], 5))
+
+        # Decompress rest of data
+        out.append(bzd.decompress(self.DATA[300:]))
+        self.assertEqual(b''.join(out), self.TEXT)
 
 class CompressDecompressTest(BaseTest):
     def testCompress(self):
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
index 89a6e84..715bd73 100644
--- a/Lib/test/test_cgi.py
+++ b/Lib/test/test_cgi.py
@@ -186,9 +186,9 @@
         cgi.initlog("%s", "Testing initlog 1")
         cgi.log("%s", "Testing log 2")
         self.assertEqual(cgi.logfp.getvalue(), "Testing initlog 1\nTesting log 2\n")
-        if os.path.exists("/dev/null"):
+        if os.path.exists(os.devnull):
             cgi.logfp = None
-            cgi.logfile = "/dev/null"
+            cgi.logfile = os.devnull
             cgi.initlog("%s", "Testing log 3")
             self.addCleanup(cgi.closelog)
             cgi.log("Testing log 4")
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index ebe557e..abaf3bb 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -340,7 +340,8 @@
         # Issue #5319: if stdout.flush() fails at shutdown, an error should
         # be printed out.
         code = """if 1:
-            import os, sys
+            import os, sys, test.support
+            test.support.SuppressCrashReport().__enter__()
             sys.stdout.write('x')
             os.close(sys.stdout.fileno())"""
         rc, out, err = assert_python_ok('-c', code)
diff --git a/Lib/test/test_dbm_dumb.py b/Lib/test/test_dbm_dumb.py
index ee5a32f..ff63c88 100644
--- a/Lib/test/test_dbm_dumb.py
+++ b/Lib/test/test_dbm_dumb.py
@@ -225,6 +225,15 @@
         with dumbdbm.open(_fname, 'n') as f:
             self.assertEqual(f.keys(), [])
 
+    def test_eval(self):
+        with open(_fname + '.dir', 'w') as stream:
+            stream.write("str(print('Hacked!')), 0\n")
+        with support.captured_stdout() as stdout:
+            with self.assertRaises(ValueError):
+                with dumbdbm.open(_fname) as f:
+                    pass
+            self.assertEqual(stdout.getvalue(), '')
+
     def tearDown(self):
         _delete_files()
 
diff --git a/Lib/test/test_deque.py b/Lib/test/test_deque.py
index e336e5a..0eebbff 100644
--- a/Lib/test/test_deque.py
+++ b/Lib/test/test_deque.py
@@ -542,7 +542,7 @@
 
     @support.cpython_only
     def test_sizeof(self):
-        BLOCKLEN = 62
+        BLOCKLEN = 64
         basesize = support.calcobjsize('2P4nlP')
         blocksize = struct.calcsize('2P%dP' % BLOCKLEN)
         self.assertEqual(object.__sizeof__(deque()), basesize)
diff --git a/Lib/test/test_float.py b/Lib/test/test_float.py
index e1e1f04..1baa6a9 100644
--- a/Lib/test/test_float.py
+++ b/Lib/test/test_float.py
@@ -6,6 +6,7 @@
 import math
 from math import isinf, isnan, copysign, ldexp
 import operator
+import time
 import random, fractions
 
 INF = float("inf")
@@ -93,10 +94,6 @@
 
     def test_floatconversion(self):
         # Make sure that calls to __float__() work properly
-        class Foo0:
-            def __float__(self):
-                return 42.
-
         class Foo1(object):
             def __float__(self):
                 return 42.
@@ -122,13 +119,17 @@
             def __float__(self):
                 return float(str(self)) + 1
 
-        self.assertAlmostEqual(float(Foo0()), 42.)
         self.assertAlmostEqual(float(Foo1()), 42.)
         self.assertAlmostEqual(float(Foo2()), 42.)
         self.assertAlmostEqual(float(Foo3(21)), 42.)
         self.assertRaises(TypeError, float, Foo4(42))
         self.assertAlmostEqual(float(FooStr('8')), 9.)
 
+        class Foo5:
+            def __float__(self):
+                return ""
+        self.assertRaises(TypeError, time.sleep, Foo5())
+
     def test_is_integer(self):
         self.assertFalse((1.1).is_integer())
         self.assertTrue((1.).is_integer())
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index fbb43e4..c549ac4 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -131,6 +131,16 @@
         join = self.partial(''.join)
         self.assertEqual(join(data), '0123456789')
 
+    def test_nested_optimization(self):
+        partial = self.partial
+        # Only "true" partial is optimized
+        if partial.__name__ != 'partial':
+            return
+        inner = partial(signature, 'asdf')
+        nested = partial(inner, bar=True)
+        flat = partial(signature, 'asdf', bar=True)
+        self.assertEqual(signature(nested), signature(flat))
+
 
 @unittest.skipUnless(c_functools, 'requires the C _functools module')
 class TestPartialC(TestPartial, unittest.TestCase):
@@ -155,9 +165,9 @@
     def test_repr(self):
         args = (object(), object())
         args_repr = ', '.join(repr(a) for a in args)
-        #kwargs = {'a': object(), 'b': object()}
-        kwargs = {'a': object()}
-        kwargs_repr = ', '.join("%s=%r" % (k, v) for k, v in kwargs.items())
+        kwargs = {'a': object(), 'b': object()}
+        kwargs_reprs = ['a={a!r}, b={b!r}'.format_map(kwargs),
+                        'b={b!r}, a={a!r}'.format_map(kwargs)]
         if self.partial is c_functools.partial:
             name = 'functools.partial'
         else:
@@ -172,12 +182,14 @@
                          repr(f))
 
         f = self.partial(capture, **kwargs)
-        self.assertEqual('{}({!r}, {})'.format(name, capture, kwargs_repr),
-                         repr(f))
+        self.assertIn(repr(f),
+                      ['{}({!r}, {})'.format(name, capture, kwargs_repr)
+                       for kwargs_repr in kwargs_reprs])
 
         f = self.partial(capture, *args, **kwargs)
-        self.assertEqual('{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr),
-                         repr(f))
+        self.assertIn(repr(f),
+                      ['{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr)
+                       for kwargs_repr in kwargs_reprs])
 
     def test_pickle(self):
         f = self.partial(signature, 'asdf', bar=True)
diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py
index aaa5c69..c57875c 100644
--- a/Lib/test/test_gdb.py
+++ b/Lib/test/test_gdb.py
@@ -190,6 +190,8 @@
             'linux-vdso.so',
             'warning: Could not load shared library symbols for '
             'linux-gate.so',
+            'warning: Could not load shared library symbols for '
+            'linux-vdso64.so',
             'Do you need "set solib-search-path" or '
             '"set sysroot"?',
             'warning: Source file is more recent than executable.',
diff --git a/Lib/test/test_getargs2.py b/Lib/test/test_getargs2.py
index cf1279d..71472cd 100644
--- a/Lib/test/test_getargs2.py
+++ b/Lib/test/test_getargs2.py
@@ -34,8 +34,8 @@
 # > ** Changed from previous "range-and-a-half" to "none"; the
 # > range-and-a-half checking wasn't particularly useful.
 #
-# Plus a C API or two, e.g. PyInt_AsLongMask() ->
-# unsigned long and PyInt_AsLongLongMask() -> unsigned
+# Plus a C API or two, e.g. PyLong_AsUnsignedLongMask() ->
+# unsigned long and PyLong_AsUnsignedLongLongMask() -> unsigned
 # long long (if that exists).
 
 LARGE = 0x7FFFFFFF
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index 40ef250..9c1550e 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -920,10 +920,86 @@
             self.remainder = b"".join(data)
             raise
 
+
 class OfflineTest(TestCase):
+    def test_all(self):
+        # Documented objects defined in the module should be in __all__
+        expected = {"responses"}  # White-list documented dict() object
+        # HTTPMessage, parse_headers(), and the HTTP status code constants are
+        # intentionally omitted for simplicity
+        blacklist = {"HTTPMessage", "parse_headers"}
+        for name in dir(client):
+            if name in blacklist:
+                continue
+            module_object = getattr(client, name)
+            if getattr(module_object, "__module__", None) == "http.client":
+                expected.add(name)
+        self.assertCountEqual(client.__all__, expected)
+
     def test_responses(self):
         self.assertEqual(client.responses[client.NOT_FOUND], "Not Found")
 
+    def test_client_constants(self):
+        # Make sure we don't break backward compatibility with 3.4
+        expected = [
+            'CONTINUE',
+            'SWITCHING_PROTOCOLS',
+            'PROCESSING',
+            'OK',
+            'CREATED',
+            'ACCEPTED',
+            'NON_AUTHORITATIVE_INFORMATION',
+            'NO_CONTENT',
+            'RESET_CONTENT',
+            'PARTIAL_CONTENT',
+            'MULTI_STATUS',
+            'IM_USED',
+            'MULTIPLE_CHOICES',
+            'MOVED_PERMANENTLY',
+            'FOUND',
+            'SEE_OTHER',
+            'NOT_MODIFIED',
+            'USE_PROXY',
+            'TEMPORARY_REDIRECT',
+            'BAD_REQUEST',
+            'UNAUTHORIZED',
+            'PAYMENT_REQUIRED',
+            'FORBIDDEN',
+            'NOT_FOUND',
+            'METHOD_NOT_ALLOWED',
+            'NOT_ACCEPTABLE',
+            'PROXY_AUTHENTICATION_REQUIRED',
+            'REQUEST_TIMEOUT',
+            'CONFLICT',
+            'GONE',
+            'LENGTH_REQUIRED',
+            'PRECONDITION_FAILED',
+            'REQUEST_ENTITY_TOO_LARGE',
+            'REQUEST_URI_TOO_LONG',
+            'UNSUPPORTED_MEDIA_TYPE',
+            'REQUESTED_RANGE_NOT_SATISFIABLE',
+            'EXPECTATION_FAILED',
+            'UNPROCESSABLE_ENTITY',
+            'LOCKED',
+            'FAILED_DEPENDENCY',
+            'UPGRADE_REQUIRED',
+            'PRECONDITION_REQUIRED',
+            'TOO_MANY_REQUESTS',
+            'REQUEST_HEADER_FIELDS_TOO_LARGE',
+            'INTERNAL_SERVER_ERROR',
+            'NOT_IMPLEMENTED',
+            'BAD_GATEWAY',
+            'SERVICE_UNAVAILABLE',
+            'GATEWAY_TIMEOUT',
+            'HTTP_VERSION_NOT_SUPPORTED',
+            'INSUFFICIENT_STORAGE',
+            'NOT_EXTENDED',
+            'NETWORK_AUTHENTICATION_REQUIRED',
+        ]
+        for const in expected:
+            with self.subTest(constant=const):
+                self.assertTrue(hasattr(client, const))
+
 
 class SourceAddressTest(TestCase):
     def setUp(self):
@@ -1037,6 +1113,7 @@
                                        context=context)
             h.request('GET', '/')
             resp = h.getresponse()
+            h.close()
             self.assertIn('nginx', resp.getheader('server'))
 
     @support.system_must_validate_cert
@@ -1048,6 +1125,7 @@
             h.request('GET', '/')
             resp = h.getresponse()
             content_type = resp.getheader('content-type')
+            h.close()
             self.assertIn('text/html', content_type)
 
     def test_networked_good_cert(self):
@@ -1062,6 +1140,7 @@
             h.request('GET', '/')
             resp = h.getresponse()
             server_string = resp.getheader('server')
+            h.close()
             self.assertIn('nginx', server_string)
 
     def test_networked_bad_cert(self):
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index 569341d..fcdfe51 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -6,7 +6,7 @@
 
 from http.server import BaseHTTPRequestHandler, HTTPServer, \
      SimpleHTTPRequestHandler, CGIHTTPRequestHandler
-from http import server
+from http import server, HTTPStatus
 
 import os
 import sys
@@ -79,13 +79,13 @@
         default_request_version = 'HTTP/1.1'
 
         def do_TEST(self):
-            self.send_response(204)
+            self.send_response(HTTPStatus.NO_CONTENT)
             self.send_header('Content-Type', 'text/html')
             self.send_header('Connection', 'close')
             self.end_headers()
 
         def do_KEEP(self):
-            self.send_response(204)
+            self.send_response(HTTPStatus.NO_CONTENT)
             self.send_header('Content-Type', 'text/html')
             self.send_header('Connection', 'keep-alive')
             self.end_headers()
@@ -94,7 +94,7 @@
             self.send_error(999)
 
         def do_NOTFOUND(self):
-            self.send_error(404)
+            self.send_error(HTTPStatus.NOT_FOUND)
 
         def do_EXPLAINERROR(self):
             self.send_error(999, "Short Message",
@@ -122,35 +122,35 @@
     def test_command(self):
         self.con.request('GET', '/')
         res = self.con.getresponse()
-        self.assertEqual(res.status, 501)
+        self.assertEqual(res.status, HTTPStatus.NOT_IMPLEMENTED)
 
     def test_request_line_trimming(self):
         self.con._http_vsn_str = 'HTTP/1.1\n'
         self.con.putrequest('XYZBOGUS', '/')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 501)
+        self.assertEqual(res.status, HTTPStatus.NOT_IMPLEMENTED)
 
     def test_version_bogus(self):
         self.con._http_vsn_str = 'FUBAR'
         self.con.putrequest('GET', '/')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 400)
+        self.assertEqual(res.status, HTTPStatus.BAD_REQUEST)
 
     def test_version_digits(self):
         self.con._http_vsn_str = 'HTTP/9.9.9'
         self.con.putrequest('GET', '/')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 400)
+        self.assertEqual(res.status, HTTPStatus.BAD_REQUEST)
 
     def test_version_none_get(self):
         self.con._http_vsn_str = ''
         self.con.putrequest('GET', '/')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 501)
+        self.assertEqual(res.status, HTTPStatus.NOT_IMPLEMENTED)
 
     def test_version_none(self):
         # Test that a valid method is rejected when not HTTP/1.x
@@ -158,7 +158,7 @@
         self.con.putrequest('CUSTOM', '/')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 400)
+        self.assertEqual(res.status, HTTPStatus.BAD_REQUEST)
 
     def test_version_invalid(self):
         self.con._http_vsn = 99
@@ -166,21 +166,21 @@
         self.con.putrequest('GET', '/')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 505)
+        self.assertEqual(res.status, HTTPStatus.HTTP_VERSION_NOT_SUPPORTED)
 
     def test_send_blank(self):
         self.con._http_vsn_str = ''
         self.con.putrequest('', '')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 400)
+        self.assertEqual(res.status, HTTPStatus.BAD_REQUEST)
 
     def test_header_close(self):
         self.con.putrequest('GET', '/')
         self.con.putheader('Connection', 'close')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 501)
+        self.assertEqual(res.status, HTTPStatus.NOT_IMPLEMENTED)
 
     def test_head_keep_alive(self):
         self.con._http_vsn_str = 'HTTP/1.1'
@@ -188,12 +188,12 @@
         self.con.putheader('Connection', 'keep-alive')
         self.con.endheaders()
         res = self.con.getresponse()
-        self.assertEqual(res.status, 501)
+        self.assertEqual(res.status, HTTPStatus.NOT_IMPLEMENTED)
 
     def test_handler(self):
         self.con.request('TEST', '/')
         res = self.con.getresponse()
-        self.assertEqual(res.status, 204)
+        self.assertEqual(res.status, HTTPStatus.NO_CONTENT)
 
     def test_return_header_keep_alive(self):
         self.con.request('KEEP', '/')
@@ -230,11 +230,48 @@
         # Issue #16088: standard error responses should have a content-length
         self.con.request('NOTFOUND', '/')
         res = self.con.getresponse()
-        self.assertEqual(res.status, 404)
+        self.assertEqual(res.status, HTTPStatus.NOT_FOUND)
+
         data = res.read()
         self.assertEqual(int(res.getheader('Content-Length')), len(data))
 
 
+class RequestHandlerLoggingTestCase(BaseTestCase):
+    class request_handler(BaseHTTPRequestHandler):
+        protocol_version = 'HTTP/1.1'
+        default_request_version = 'HTTP/1.1'
+
+        def do_GET(self):
+            self.send_response(HTTPStatus.OK)
+            self.end_headers()
+
+        def do_ERROR(self):
+            self.send_error(HTTPStatus.NOT_FOUND, 'File not found')
+
+    def test_get(self):
+        self.con = http.client.HTTPConnection(self.HOST, self.PORT)
+        self.con.connect()
+
+        with support.captured_stderr() as err:
+            self.con.request('GET', '/')
+            self.con.getresponse()
+
+        self.assertTrue(
+            err.getvalue().endswith('"GET / HTTP/1.1" 200 -\n'))
+
+    def test_err(self):
+        self.con = http.client.HTTPConnection(self.HOST, self.PORT)
+        self.con.connect()
+
+        with support.captured_stderr() as err:
+            self.con.request('ERROR', '/')
+            self.con.getresponse()
+
+        lines = err.getvalue().split('\n')
+        self.assertTrue(lines[0].endswith('code 404, message File not found'))
+        self.assertTrue(lines[1].endswith('"ERROR / HTTP/1.1" 404 -'))
+
+
 class SimpleHTTPServerTestCase(BaseTestCase):
     class request_handler(NoLogRequestHandler, SimpleHTTPRequestHandler):
         pass
@@ -285,52 +322,52 @@
                 if name != 'test': # Ignore a filename created in setUp().
                     filename = name
                     break
-        body = self.check_status_and_reason(response, 200)
+        body = self.check_status_and_reason(response, HTTPStatus.OK)
         quotedname = urllib.parse.quote(filename, errors='surrogatepass')
         self.assertIn(('href="%s"' % quotedname)
                       .encode(enc, 'surrogateescape'), body)
         self.assertIn(('>%s<' % html.escape(filename))
                       .encode(enc, 'surrogateescape'), body)
         response = self.request(self.tempdir_name + '/' + quotedname)
-        self.check_status_and_reason(response, 200,
+        self.check_status_and_reason(response, HTTPStatus.OK,
                                      data=support.TESTFN_UNDECODABLE)
 
     def test_get(self):
         #constructs the path relative to the root directory of the HTTPServer
         response = self.request(self.tempdir_name + '/test')
-        self.check_status_and_reason(response, 200, data=self.data)
+        self.check_status_and_reason(response, HTTPStatus.OK, data=self.data)
         # check for trailing "/" which should return 404. See Issue17324
         response = self.request(self.tempdir_name + '/test/')
-        self.check_status_and_reason(response, 404)
+        self.check_status_and_reason(response, HTTPStatus.NOT_FOUND)
         response = self.request(self.tempdir_name + '/')
-        self.check_status_and_reason(response, 200)
+        self.check_status_and_reason(response, HTTPStatus.OK)
         response = self.request(self.tempdir_name)
-        self.check_status_and_reason(response, 301)
+        self.check_status_and_reason(response, HTTPStatus.MOVED_PERMANENTLY)
         response = self.request(self.tempdir_name + '/?hi=2')
-        self.check_status_and_reason(response, 200)
+        self.check_status_and_reason(response, HTTPStatus.OK)
         response = self.request(self.tempdir_name + '?hi=1')
-        self.check_status_and_reason(response, 301)
+        self.check_status_and_reason(response, HTTPStatus.MOVED_PERMANENTLY)
         self.assertEqual(response.getheader("Location"),
                          self.tempdir_name + "/?hi=1")
         response = self.request('/ThisDoesNotExist')
-        self.check_status_and_reason(response, 404)
+        self.check_status_and_reason(response, HTTPStatus.NOT_FOUND)
         response = self.request('/' + 'ThisDoesNotExist' + '/')
-        self.check_status_and_reason(response, 404)
+        self.check_status_and_reason(response, HTTPStatus.NOT_FOUND)
         with open(os.path.join(self.tempdir_name, 'index.html'), 'w') as f:
             response = self.request('/' + self.tempdir_name + '/')
-            self.check_status_and_reason(response, 200)
+            self.check_status_and_reason(response, HTTPStatus.OK)
             # chmod() doesn't work as expected on Windows, and filesystem
             # permissions are ignored by root on Unix.
             if os.name == 'posix' and os.geteuid() != 0:
                 os.chmod(self.tempdir, 0)
                 response = self.request(self.tempdir_name + '/')
-                self.check_status_and_reason(response, 404)
+                self.check_status_and_reason(response, HTTPStatus.NOT_FOUND)
                 os.chmod(self.tempdir, 0o755)
 
     def test_head(self):
         response = self.request(
             self.tempdir_name + '/test', method='HEAD')
-        self.check_status_and_reason(response, 200)
+        self.check_status_and_reason(response, HTTPStatus.OK)
         self.assertEqual(response.getheader('content-length'),
                          str(len(self.data)))
         self.assertEqual(response.getheader('content-type'),
@@ -338,12 +375,12 @@
 
     def test_invalid_requests(self):
         response = self.request('/', method='FOO')
-        self.check_status_and_reason(response, 501)
+        self.check_status_and_reason(response, HTTPStatus.NOT_IMPLEMENTED)
         # requests must be case sensitive,so this should fail too
         response = self.request('/', method='custom')
-        self.check_status_and_reason(response, 501)
+        self.check_status_and_reason(response, HTTPStatus.NOT_IMPLEMENTED)
         response = self.request('/', method='GETs')
-        self.check_status_and_reason(response, 501)
+        self.check_status_and_reason(response, HTTPStatus.NOT_IMPLEMENTED)
 
 
 cgi_file1 = """\
@@ -490,12 +527,13 @@
 
     def test_headers_and_content(self):
         res = self.request('/cgi-bin/file1.py')
-        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
-            (res.read(), res.getheader('Content-type'), res.status))
+        self.assertEqual(
+            (res.read(), res.getheader('Content-type'), res.status),
+            (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK))
 
     def test_issue19435(self):
         res = self.request('///////////nocgi.py/../cgi-bin/nothere.sh')
-        self.assertEqual(res.status, 404)
+        self.assertEqual(res.status, HTTPStatus.NOT_FOUND)
 
     def test_post(self):
         params = urllib.parse.urlencode(
@@ -508,38 +546,43 @@
     def test_invaliduri(self):
         res = self.request('/cgi-bin/invalid')
         res.read()
-        self.assertEqual(res.status, 404)
+        self.assertEqual(res.status, HTTPStatus.NOT_FOUND)
 
     def test_authorization(self):
         headers = {b'Authorization' : b'Basic ' +
                    base64.b64encode(b'username:pass')}
         res = self.request('/cgi-bin/file1.py', 'GET', headers=headers)
-        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
-                (res.read(), res.getheader('Content-type'), res.status))
+        self.assertEqual(
+            (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK),
+            (res.read(), res.getheader('Content-type'), res.status))
 
     def test_no_leading_slash(self):
         # http://bugs.python.org/issue2254
         res = self.request('cgi-bin/file1.py')
-        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
-             (res.read(), res.getheader('Content-type'), res.status))
+        self.assertEqual(
+            (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK),
+            (res.read(), res.getheader('Content-type'), res.status))
 
     def test_os_environ_is_not_altered(self):
         signature = "Test CGI Server"
         os.environ['SERVER_SOFTWARE'] = signature
         res = self.request('/cgi-bin/file1.py')
-        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
-                (res.read(), res.getheader('Content-type'), res.status))
+        self.assertEqual(
+            (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK),
+            (res.read(), res.getheader('Content-type'), res.status))
         self.assertEqual(os.environ['SERVER_SOFTWARE'], signature)
 
     def test_urlquote_decoding_in_cgi_check(self):
         res = self.request('/cgi-bin%2ffile1.py')
-        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
-                (res.read(), res.getheader('Content-type'), res.status))
+        self.assertEqual(
+            (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK),
+            (res.read(), res.getheader('Content-type'), res.status))
 
     def test_nested_cgi_path_issue21323(self):
         res = self.request('/cgi-bin/child-dir/file3.py')
-        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
-                (res.read(), res.getheader('Content-type'), res.status))
+        self.assertEqual(
+            (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK),
+            (res.read(), res.getheader('Content-type'), res.status))
 
 
 class SocketlessRequestHandler(SimpleHTTPRequestHandler):
@@ -549,7 +592,7 @@
 
     def do_GET(self):
         self.get_called = True
-        self.send_response(200)
+        self.send_response(HTTPStatus.OK)
         self.send_header('Content-Type', 'text/html')
         self.end_headers()
         self.wfile.write(b'<html><body>Data</body></html>\r\n')
@@ -559,7 +602,7 @@
 
 class RejectingSocketlessRequestHandler(SocketlessRequestHandler):
     def handle_expect_100(self):
-        self.send_error(417)
+        self.send_error(HTTPStatus.EXPECTATION_FAILED)
         return False
 
 
@@ -616,6 +659,11 @@
         self.verify_expected_headers(result[1:-1])
         self.verify_get_called()
         self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
+        self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
+        self.assertEqual(self.handler.command, 'GET')
+        self.assertEqual(self.handler.path, '/')
+        self.assertEqual(self.handler.request_version, 'HTTP/1.1')
+        self.assertSequenceEqual(self.handler.headers.items(), ())
 
     def test_http_1_0(self):
         result = self.send_typical_request(b'GET / HTTP/1.0\r\n\r\n')
@@ -623,6 +671,11 @@
         self.verify_expected_headers(result[1:-1])
         self.verify_get_called()
         self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
+        self.assertEqual(self.handler.requestline, 'GET / HTTP/1.0')
+        self.assertEqual(self.handler.command, 'GET')
+        self.assertEqual(self.handler.path, '/')
+        self.assertEqual(self.handler.request_version, 'HTTP/1.0')
+        self.assertSequenceEqual(self.handler.headers.items(), ())
 
     def test_http_0_9(self):
         result = self.send_typical_request(b'GET / HTTP/0.9\r\n\r\n')
@@ -636,6 +689,12 @@
         self.verify_expected_headers(result[1:-1])
         self.verify_get_called()
         self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
+        self.assertEqual(self.handler.requestline, 'GET / HTTP/1.0')
+        self.assertEqual(self.handler.command, 'GET')
+        self.assertEqual(self.handler.path, '/')
+        self.assertEqual(self.handler.request_version, 'HTTP/1.0')
+        headers = (("Expect", "100-continue"),)
+        self.assertSequenceEqual(self.handler.headers.items(), headers)
 
     def test_with_continue_1_1(self):
         result = self.send_typical_request(b'GET / HTTP/1.1\r\nExpect: 100-continue\r\n\r\n')
@@ -645,6 +704,12 @@
         self.verify_expected_headers(result[2:-1])
         self.verify_get_called()
         self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
+        self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
+        self.assertEqual(self.handler.command, 'GET')
+        self.assertEqual(self.handler.path, '/')
+        self.assertEqual(self.handler.request_version, 'HTTP/1.1')
+        headers = (("Expect", "100-continue"),)
+        self.assertSequenceEqual(self.handler.headers.items(), headers)
 
     def test_header_buffering_of_send_error(self):
 
@@ -730,6 +795,7 @@
         result = self.send_typical_request(b'GET ' + b'x' * 65537)
         self.assertEqual(result[0], b'HTTP/1.1 414 Request-URI Too Long\r\n')
         self.assertFalse(self.handler.get_called)
+        self.assertIsInstance(self.handler.requestline, str)
 
     def test_header_length(self):
         # Issue #6791: same for headers
@@ -737,6 +803,22 @@
             b'GET / HTTP/1.1\r\nX-Foo: bar' + b'r' * 65537 + b'\r\n\r\n')
         self.assertEqual(result[0], b'HTTP/1.1 400 Line too long\r\n')
         self.assertFalse(self.handler.get_called)
+        self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
+
+    def test_close_connection(self):
+        # handle_one_request() should be repeatedly called until
+        # it sets close_connection
+        def handle_one_request():
+            self.handler.close_connection = next(close_values)
+        self.handler.handle_one_request = handle_one_request
+
+        close_values = iter((True,))
+        self.handler.handle()
+        self.assertRaises(StopIteration, next, close_values)
+
+        close_values = iter((False, False, True))
+        self.handler.handle()
+        self.assertRaises(StopIteration, next, close_values)
 
 class SimpleHTTPRequestHandlerTestCase(unittest.TestCase):
     """ Test url parsing """
@@ -760,15 +842,30 @@
         self.assertEqual(path, self.translated)
 
 
+class MiscTestCase(unittest.TestCase):
+    def test_all(self):
+        expected = []
+        blacklist = {'executable', 'nobody_uid', 'test'}
+        for name in dir(server):
+            if name.startswith('_') or name in blacklist:
+                continue
+            module_object = getattr(server, name)
+            if getattr(module_object, '__module__', None) == 'http.server':
+                expected.append(name)
+        self.assertCountEqual(server.__all__, expected)
+
+
 def test_main(verbose=None):
     cwd = os.getcwd()
     try:
         support.run_unittest(
+            RequestHandlerLoggingTestCase,
             BaseHTTPRequestHandlerTestCase,
             BaseHTTPServerTestCase,
             SimpleHTTPServerTestCase,
             CGIHTTPServerTestCase,
             SimpleHTTPRequestHandlerTestCase,
+            MiscTestCase,
         )
     finally:
         os.chdir(cwd)
diff --git a/Lib/test/test_importlib/import_/test_path.py b/Lib/test/test_importlib/import_/test_path.py
index 9a3c4fe..c246d69 100644
--- a/Lib/test/test_importlib/import_/test_path.py
+++ b/Lib/test/test_importlib/import_/test_path.py
@@ -3,6 +3,7 @@
 importlib = util.import_importlib('importlib')
 machinery = util.import_importlib('importlib.machinery')
 
+import errno
 import os
 import sys
 import tempfile
@@ -163,8 +164,13 @@
     def test_deleted_cwd(self):
         # Issue #22834
         self.addCleanup(os.chdir, os.getcwd())
-        with tempfile.TemporaryDirectory() as path:
-            os.chdir(path)
+        try:
+            with tempfile.TemporaryDirectory() as path:
+                os.chdir(path)
+        except OSError as exc:
+            if exc.errno == errno.EINVAL:
+                self.skipTest("platform does not allow the deletion of the cwd")
+            raise
         with util.import_state(path=['']):
             # Do not want FileNotFoundError raised.
             self.assertIsNone(self.machinery.PathFinder.find_spec('whatever'))
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index c867502..a0a4670 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -589,13 +589,44 @@
         with self.open(zero, "r") as f:
             self.assertRaises(OverflowError, f.read)
 
-    def test_flush_error_on_close(self):
-        f = self.open(support.TESTFN, "wb", buffering=0)
+    def check_flush_error_on_close(self, *args, **kwargs):
+        # Test that the file is closed despite failed flush
+        # and that flush() is called before file closed.
+        f = self.open(*args, **kwargs)
+        closed = []
         def bad_flush():
+            closed[:] = [f.closed]
             raise OSError()
         f.flush = bad_flush
         self.assertRaises(OSError, f.close) # exception not swallowed
         self.assertTrue(f.closed)
+        self.assertTrue(closed)      # flush() called
+        self.assertFalse(closed[0])  # flush() called before file closed
+        f.flush = lambda: None  # break reference loop
+
+    def test_flush_error_on_close(self):
+        # raw file
+        # Issue #5700: io.FileIO calls flush() after file closed
+        self.check_flush_error_on_close(support.TESTFN, 'wb', buffering=0)
+        fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+        self.check_flush_error_on_close(fd, 'wb', buffering=0)
+        fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+        self.check_flush_error_on_close(fd, 'wb', buffering=0, closefd=False)
+        os.close(fd)
+        # buffered io
+        self.check_flush_error_on_close(support.TESTFN, 'wb')
+        fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+        self.check_flush_error_on_close(fd, 'wb')
+        fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+        self.check_flush_error_on_close(fd, 'wb', closefd=False)
+        os.close(fd)
+        # text io
+        self.check_flush_error_on_close(support.TESTFN, 'w')
+        fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+        self.check_flush_error_on_close(fd, 'w')
+        fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+        self.check_flush_error_on_close(fd, 'w', closefd=False)
+        os.close(fd)
 
     def test_multi_close(self):
         f = self.open(support.TESTFN, "wb", buffering=0)
@@ -784,13 +815,22 @@
         self.assertEqual(repr(b), "<%s name=b'dummy'>" % clsname)
 
     def test_flush_error_on_close(self):
+        # Test that buffered file is closed despite failed flush
+        # and that flush() is called before file closed.
         raw = self.MockRawIO()
+        closed = []
         def bad_flush():
+            closed[:] = [b.closed, raw.closed]
             raise OSError()
         raw.flush = bad_flush
         b = self.tp(raw)
         self.assertRaises(OSError, b.close) # exception not swallowed
         self.assertTrue(b.closed)
+        self.assertTrue(raw.closed)
+        self.assertTrue(closed)      # flush() called
+        self.assertFalse(closed[0])  # flush() called before file closed
+        self.assertFalse(closed[1])
+        raw.flush = lambda: None  # break reference loop
 
     def test_close_error_on_close(self):
         raw = self.MockRawIO()
@@ -2679,12 +2719,21 @@
                 self.assertEqual(content.count("Thread%03d\n" % n), 1)
 
     def test_flush_error_on_close(self):
+        # Test that text file is closed despite failed flush
+        # and that flush() is called before file closed.
         txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
+        closed = []
         def bad_flush():
+            closed[:] = [txt.closed, txt.buffer.closed]
             raise OSError()
         txt.flush = bad_flush
         self.assertRaises(OSError, txt.close) # exception not swallowed
         self.assertTrue(txt.closed)
+        self.assertTrue(txt.buffer.closed)
+        self.assertTrue(closed)      # flush() called
+        self.assertFalse(closed[0])  # flush() called before file closed
+        self.assertFalse(closed[1])
+        txt.flush = lambda: None  # break reference loop
 
     def test_close_error_on_close(self):
         buffer = self.BytesIO(self.testdata)
diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py
index e985329..c217d36 100644
--- a/Lib/test/test_ipaddress.py
+++ b/Lib/test/test_ipaddress.py
@@ -11,6 +11,7 @@
 import operator
 import pickle
 import ipaddress
+import weakref
 
 
 class BaseTestCase(unittest.TestCase):
@@ -259,6 +260,9 @@
     def test_pickle(self):
         self.pickle_test('192.0.2.1')
 
+    def test_weakref(self):
+        weakref.ref(self.factory('192.0.2.1'))
+
 
 class AddressTestCase_v6(BaseTestCase, CommonTestMixin_v6):
     factory = ipaddress.IPv6Address
@@ -394,6 +398,9 @@
     def test_pickle(self):
         self.pickle_test('2001:db8::')
 
+    def test_weakref(self):
+        weakref.ref(self.factory('2001:db8::'))
+
 
 class NetmaskTestMixin_v4(CommonTestMixin_v4):
     """Input validation on interfaces and networks is very similar"""
diff --git a/Lib/test/test_linecache.py b/Lib/test/test_linecache.py
index 5fe0554..2fb8662 100644
--- a/Lib/test/test_linecache.py
+++ b/Lib/test/test_linecache.py
@@ -7,6 +7,7 @@
 
 
 FILENAME = linecache.__file__
+NONEXISTENT_FILENAME = FILENAME + '.missing'
 INVALID_NAME = '!@$)(!@#_1'
 EMPTY = ''
 TESTS = 'inspect_fodder inspect_fodder2 mapping_tests'
@@ -126,6 +127,49 @@
                 self.assertEqual(line, getline(source_name, index + 1))
                 source_list.append(line)
 
+    def test_lazycache_no_globals(self):
+        lines = linecache.getlines(FILENAME)
+        linecache.clearcache()
+        self.assertEqual(False, linecache.lazycache(FILENAME, None))
+        self.assertEqual(lines, linecache.getlines(FILENAME))
+
+    def test_lazycache_smoke(self):
+        lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
+        linecache.clearcache()
+        self.assertEqual(
+            True, linecache.lazycache(NONEXISTENT_FILENAME, globals()))
+        self.assertEqual(1, len(linecache.cache[NONEXISTENT_FILENAME]))
+        # Note here that we're looking up a non existant filename with no
+        # globals: this would error if the lazy value wasn't resolved.
+        self.assertEqual(lines, linecache.getlines(NONEXISTENT_FILENAME))
+
+    def test_lazycache_provide_after_failed_lookup(self):
+        linecache.clearcache()
+        lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
+        linecache.clearcache()
+        linecache.getlines(NONEXISTENT_FILENAME)
+        linecache.lazycache(NONEXISTENT_FILENAME, globals())
+        self.assertEqual(lines, linecache.updatecache(NONEXISTENT_FILENAME))
+
+    def test_lazycache_check(self):
+        linecache.clearcache()
+        linecache.lazycache(NONEXISTENT_FILENAME, globals())
+        linecache.checkcache()
+
+    def test_lazycache_bad_filename(self):
+        linecache.clearcache()
+        self.assertEqual(False, linecache.lazycache('', globals()))
+        self.assertEqual(False, linecache.lazycache('<foo>', globals()))
+
+    def test_lazycache_already_cached(self):
+        linecache.clearcache()
+        lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
+        self.assertEqual(
+            False,
+            linecache.lazycache(NONEXISTENT_FILENAME, globals()))
+        self.assertEqual(4, len(linecache.cache[NONEXISTENT_FILENAME]))
+
+
 def test_main():
     support.run_unittest(LineCacheTests)
 
diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py
index 81fe57d..fae2c3d 100644
--- a/Lib/test/test_locale.py
+++ b/Lib/test/test_locale.py
@@ -511,7 +511,7 @@
             self.skipTest('test needs Turkish locale')
         loc = locale.getlocale(locale.LC_CTYPE)
         if verbose:
-            print('got locale %a' % (loc,))
+            print('testing with %a' % (loc,), end=' ', flush=True)
         locale.setlocale(locale.LC_CTYPE, loc)
         self.assertEqual(loc, locale.getlocale(locale.LC_CTYPE))
 
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index 9674322..c8b6a98 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -3006,6 +3006,25 @@
         self.assertTrue(handler.matches(levelno=logging.WARNING, message='1'))
         self.assertTrue(handler.matches(levelno=logging.ERROR, message='2'))
         self.assertTrue(handler.matches(levelno=logging.CRITICAL, message='3'))
+        handler.close()
+
+        # Now test with respect_handler_level set
+
+        handler = support.TestHandler(support.Matcher())
+        handler.setLevel(logging.CRITICAL)
+        listener = logging.handlers.QueueListener(self.queue, handler,
+                                                  respect_handler_level=True)
+        listener.start()
+        try:
+            self.que_logger.warning(self.next_message())
+            self.que_logger.error(self.next_message())
+            self.que_logger.critical(self.next_message())
+        finally:
+            listener.stop()
+        self.assertFalse(handler.matches(levelno=logging.WARNING, message='4'))
+        self.assertFalse(handler.matches(levelno=logging.ERROR, message='5'))
+        self.assertTrue(handler.matches(levelno=logging.CRITICAL, message='6'))
+
 
 ZERO = datetime.timedelta(0)
 
diff --git a/Lib/test/test_multibytecodec.py b/Lib/test/test_multibytecodec.py
index ce267dd..2929f98 100644
--- a/Lib/test/test_multibytecodec.py
+++ b/Lib/test/test_multibytecodec.py
@@ -44,6 +44,13 @@
         self.assertRaises(IndexError, dec,
                           b'apple\x92ham\x93spam', 'test.cjktest')
 
+    def test_errorcallback_custom_ignore(self):
+        # Issue #23215: MemoryError with custom error handlers and multibyte codecs
+        data = 100 * "\udc00"
+        codecs.register_error("test.ignore", codecs.ignore_errors)
+        for enc in ALL_CJKENCODINGS:
+            self.assertEqual(data.encode(enc, "test.ignore"), b'')
+
     def test_codingspec(self):
         try:
             for enc in ALL_CJKENCODINGS:
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index e28a3db..f0b98e7 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -1075,7 +1075,7 @@
     def test_chown_without_permission(self):
         uid_1, uid_2 = all_users[:2]
         gid = os.stat(support.TESTFN).st_gid
-        with self.assertRaisesRegex(PermissionError, "Operation not permitted"):
+        with self.assertRaises(PermissionError):
             os.chown(support.TESTFN, uid_1, gid)
             os.chown(support.TESTFN, uid_2, gid)
 
@@ -1199,8 +1199,10 @@
         code = """if 1:
             import os
             import sys
+            import test.support
             os.urandom(4)
-            os.closerange(3, 256)
+            with test.support.SuppressCrashReport():
+                os.closerange(3, 256)
             sys.stdout.buffer.write(os.urandom(4))
             """
         rc, out, err = assert_python_ok('-Sc', code)
@@ -1214,16 +1216,18 @@
         code = """if 1:
             import os
             import sys
+            import test.support
             os.urandom(4)
-            for fd in range(3, 256):
-                try:
-                    os.close(fd)
-                except OSError:
-                    pass
-                else:
-                    # Found the urandom fd (XXX hopefully)
-                    break
-            os.closerange(3, 256)
+            with test.support.SuppressCrashReport():
+                for fd in range(3, 256):
+                    try:
+                        os.close(fd)
+                    except OSError:
+                        pass
+                    else:
+                        # Found the urandom fd (XXX hopefully)
+                        break
+                os.closerange(3, 256)
             with open({TESTFN!r}, 'rb') as f:
                 os.dup2(f.fileno(), fd)
                 sys.stdout.buffer.write(os.urandom(4))
@@ -2698,5 +2702,229 @@
         self.assertIn('walk', os.__all__)
 
 
+class TestScandir(unittest.TestCase):
+    def setUp(self):
+        self.path = os.path.realpath(support.TESTFN)
+        self.addCleanup(support.rmtree, self.path)
+        os.mkdir(self.path)
+
+    def create_file(self, name="file.txt"):
+        filename = os.path.join(self.path, name)
+        with open(filename, "wb") as fp:
+            fp.write(b'python')
+        return filename
+
+    def get_entries(self, names):
+        entries = dict((entry.name, entry)
+                       for entry in os.scandir(self.path))
+        self.assertEqual(sorted(entries.keys()), names)
+        return entries
+
+    def assert_stat_equal(self, stat1, stat2, skip_fields):
+        if skip_fields:
+            for attr in dir(stat1):
+                if not attr.startswith("st_"):
+                    continue
+                if attr in ("st_dev", "st_ino", "st_nlink"):
+                    continue
+                self.assertEqual(getattr(stat1, attr),
+                                 getattr(stat2, attr),
+                                 (stat1, stat2, attr))
+        else:
+            self.assertEqual(stat1, stat2)
+
+    def check_entry(self, entry, name, is_dir, is_file, is_symlink):
+        self.assertEqual(entry.name, name)
+        self.assertEqual(entry.path, os.path.join(self.path, name))
+        self.assertEqual(entry.inode(),
+                         os.stat(entry.path, follow_symlinks=False).st_ino)
+
+        entry_stat = os.stat(entry.path)
+        self.assertEqual(entry.is_dir(),
+                         stat.S_ISDIR(entry_stat.st_mode))
+        self.assertEqual(entry.is_file(),
+                         stat.S_ISREG(entry_stat.st_mode))
+        self.assertEqual(entry.is_symlink(),
+                         os.path.islink(entry.path))
+
+        entry_lstat = os.stat(entry.path, follow_symlinks=False)
+        self.assertEqual(entry.is_dir(follow_symlinks=False),
+                         stat.S_ISDIR(entry_lstat.st_mode))
+        self.assertEqual(entry.is_file(follow_symlinks=False),
+                         stat.S_ISREG(entry_lstat.st_mode))
+
+        self.assert_stat_equal(entry.stat(),
+                               entry_stat,
+                               os.name == 'nt' and not is_symlink)
+        self.assert_stat_equal(entry.stat(follow_symlinks=False),
+                               entry_lstat,
+                               os.name == 'nt')
+
+    def test_attributes(self):
+        link = hasattr(os, 'link')
+        symlink = support.can_symlink()
+
+        dirname = os.path.join(self.path, "dir")
+        os.mkdir(dirname)
+        filename = self.create_file("file.txt")
+        if link:
+            os.link(filename, os.path.join(self.path, "link_file.txt"))
+        if symlink:
+            os.symlink(dirname, os.path.join(self.path, "symlink_dir"),
+                       target_is_directory=True)
+            os.symlink(filename, os.path.join(self.path, "symlink_file.txt"))
+
+        names = ['dir', 'file.txt']
+        if link:
+            names.append('link_file.txt')
+        if symlink:
+            names.extend(('symlink_dir', 'symlink_file.txt'))
+        entries = self.get_entries(names)
+
+        entry = entries['dir']
+        self.check_entry(entry, 'dir', True, False, False)
+
+        entry = entries['file.txt']
+        self.check_entry(entry, 'file.txt', False, True, False)
+
+        if link:
+            entry = entries['link_file.txt']
+            self.check_entry(entry, 'link_file.txt', False, True, False)
+
+        if symlink:
+            entry = entries['symlink_dir']
+            self.check_entry(entry, 'symlink_dir', True, False, True)
+
+            entry = entries['symlink_file.txt']
+            self.check_entry(entry, 'symlink_file.txt', False, True, True)
+
+    def get_entry(self, name):
+        entries = list(os.scandir(self.path))
+        self.assertEqual(len(entries), 1)
+
+        entry = entries[0]
+        self.assertEqual(entry.name, name)
+        return entry
+
+    def create_file_entry(self):
+        filename = self.create_file()
+        return self.get_entry(os.path.basename(filename))
+
+    def test_current_directory(self):
+        filename = self.create_file()
+        old_dir = os.getcwd()
+        try:
+            os.chdir(self.path)
+
+            # call scandir() without parameter: it must list the content
+            # of the current directory
+            entries = dict((entry.name, entry) for entry in os.scandir())
+            self.assertEqual(sorted(entries.keys()),
+                             [os.path.basename(filename)])
+        finally:
+            os.chdir(old_dir)
+
+    def test_repr(self):
+        entry = self.create_file_entry()
+        self.assertEqual(repr(entry), "<DirEntry 'file.txt'>")
+
+    def test_removed_dir(self):
+        path = os.path.join(self.path, 'dir')
+
+        os.mkdir(path)
+        entry = self.get_entry('dir')
+        os.rmdir(path)
+
+        # On POSIX, is_dir() result depends if scandir() filled d_type or not
+        if os.name == 'nt':
+            self.assertTrue(entry.is_dir())
+        self.assertFalse(entry.is_file())
+        self.assertFalse(entry.is_symlink())
+        if os.name == 'nt':
+            self.assertRaises(FileNotFoundError, entry.inode)
+            # don't fail
+            entry.stat()
+            entry.stat(follow_symlinks=False)
+        else:
+            self.assertGreater(entry.inode(), 0)
+            self.assertRaises(FileNotFoundError, entry.stat)
+            self.assertRaises(FileNotFoundError, entry.stat, follow_symlinks=False)
+
+    def test_removed_file(self):
+        entry = self.create_file_entry()
+        os.unlink(entry.path)
+
+        self.assertFalse(entry.is_dir())
+        # On POSIX, is_dir() result depends if scandir() filled d_type or not
+        if os.name == 'nt':
+            self.assertTrue(entry.is_file())
+        self.assertFalse(entry.is_symlink())
+        if os.name == 'nt':
+            self.assertRaises(FileNotFoundError, entry.inode)
+            # don't fail
+            entry.stat()
+            entry.stat(follow_symlinks=False)
+        else:
+            self.assertGreater(entry.inode(), 0)
+            self.assertRaises(FileNotFoundError, entry.stat)
+            self.assertRaises(FileNotFoundError, entry.stat, follow_symlinks=False)
+
+    def test_broken_symlink(self):
+        if not support.can_symlink():
+            return self.skipTest('cannot create symbolic link')
+
+        filename = self.create_file("file.txt")
+        os.symlink(filename,
+                   os.path.join(self.path, "symlink.txt"))
+        entries = self.get_entries(['file.txt', 'symlink.txt'])
+        entry = entries['symlink.txt']
+        os.unlink(filename)
+
+        self.assertGreater(entry.inode(), 0)
+        self.assertFalse(entry.is_dir())
+        self.assertFalse(entry.is_file())  # broken symlink returns False
+        self.assertFalse(entry.is_dir(follow_symlinks=False))
+        self.assertFalse(entry.is_file(follow_symlinks=False))
+        self.assertTrue(entry.is_symlink())
+        self.assertRaises(FileNotFoundError, entry.stat)
+        # don't fail
+        entry.stat(follow_symlinks=False)
+
+    def test_bytes(self):
+        if os.name == "nt":
+            # On Windows, os.scandir(bytes) must raise an exception
+            self.assertRaises(TypeError, os.scandir, b'.')
+            return
+
+        self.create_file("file.txt")
+
+        path_bytes = os.fsencode(self.path)
+        entries = list(os.scandir(path_bytes))
+        self.assertEqual(len(entries), 1, entries)
+        entry = entries[0]
+
+        self.assertEqual(entry.name, b'file.txt')
+        self.assertEqual(entry.path,
+                         os.fsencode(os.path.join(self.path, 'file.txt')))
+
+    def test_empty_path(self):
+        self.assertRaises(FileNotFoundError, os.scandir, '')
+
+    def test_consume_iterator_twice(self):
+        self.create_file("file.txt")
+        iterator = os.scandir(self.path)
+
+        entries = list(iterator)
+        self.assertEqual(len(entries), 1, entries)
+
+        # check than consuming the iterator twice doesn't raise exception
+        entries2 = list(iterator)
+        self.assertEqual(len(entries2), 0, entries2)
+
+    def test_bad_path_type(self):
+        for obj in [1234, 1.234, {}, []]:
+            self.assertRaises(TypeError, os.scandir, obj)
+
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py
index f4ee519..1c53ab7 100644
--- a/Lib/test/test_pathlib.py
+++ b/Lib/test/test_pathlib.py
@@ -102,31 +102,35 @@
         check = self._check_parse_parts
         # First part is anchored
         check(['c:'],                   ('c:', '', ['c:']))
-        check(['c:\\'],                 ('c:', '\\', ['c:\\']))
-        check(['\\'],                   ('', '\\', ['\\']))
+        check(['c:/'],                  ('c:', '\\', ['c:\\']))
+        check(['/'],                    ('', '\\', ['\\']))
         check(['c:a'],                  ('c:', '', ['c:', 'a']))
-        check(['c:\\a'],                ('c:', '\\', ['c:\\', 'a']))
-        check(['\\a'],                  ('', '\\', ['\\', 'a']))
+        check(['c:/a'],                 ('c:', '\\', ['c:\\', 'a']))
+        check(['/a'],                   ('', '\\', ['\\', 'a']))
         # UNC paths
-        check(['\\\\a\\b'],             ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
-        check(['\\\\a\\b\\'],           ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
-        check(['\\\\a\\b\\c'],          ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
+        check(['//a/b'],                ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
+        check(['//a/b/'],               ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
+        check(['//a/b/c'],              ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
         # Second part is anchored, so that the first part is ignored
         check(['a', 'Z:b', 'c'],        ('Z:', '', ['Z:', 'b', 'c']))
-        check(['a', 'Z:\\b', 'c'],      ('Z:', '\\', ['Z:\\', 'b', 'c']))
-        check(['a', '\\b', 'c'],        ('', '\\', ['\\', 'b', 'c']))
+        check(['a', 'Z:/b', 'c'],       ('Z:', '\\', ['Z:\\', 'b', 'c']))
         # UNC paths
-        check(['a', '\\\\b\\c', 'd'],   ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
+        check(['a', '//b/c', 'd'],      ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
         # Collapsing and stripping excess slashes
-        check(['a', 'Z:\\\\b\\\\c\\', 'd\\'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
+        check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
         # UNC paths
-        check(['a', '\\\\b\\c\\\\', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
+        check(['a', '//b/c//', 'd'],    ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
         # Extended paths
-        check(['\\\\?\\c:\\'],          ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
-        check(['\\\\?\\c:\\a'],         ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
+        check(['//?/c:/'],              ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
+        check(['//?/c:/a'],             ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
+        check(['//?/c:/a', '/b'],       ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b']))
         # Extended UNC paths (format is "\\?\UNC\server\share")
-        check(['\\\\?\\UNC\\b\\c'],     ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
-        check(['\\\\?\\UNC\\b\\c\\d'],  ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
+        check(['//?/UNC/b/c'],          ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
+        check(['//?/UNC/b/c/d'],        ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
+        # Second part has a root but not drive
+        check(['a', '/b', 'c'],         ('', '\\', ['\\', 'b', 'c']))
+        check(['Z:/a', '/b', 'c'],      ('Z:', '\\', ['Z:\\', 'b', 'c']))
+        check(['//?/Z:/a', '/b', 'c'],  ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c']))
 
     def test_splitroot(self):
         f = self.flavour.splitroot
diff --git a/Lib/test/test_pprint.py b/Lib/test/test_pprint.py
index ad6a7a1..c056880 100644
--- a/Lib/test/test_pprint.py
+++ b/Lib/test/test_pprint.py
@@ -192,10 +192,52 @@
         o = [o1, o2]
         expected = """\
 [   [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+    {'first': 1, 'second': 2, 'third': 3}]"""
+        self.assertEqual(pprint.pformat(o, indent=4, width=42), expected)
+        expected = """\
+[   [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
     {   'first': 1,
         'second': 2,
         'third': 3}]"""
-        self.assertEqual(pprint.pformat(o, indent=4, width=42), expected)
+        self.assertEqual(pprint.pformat(o, indent=4, width=41), expected)
+
+    def test_width(self):
+        expected = """\
+[[[[[[1, 2, 3],
+     '1 2']]]],
+ {1: [1, 2, 3],
+  2: [12, 34]},
+ 'abc def ghi',
+ ('ab cd ef',),
+ set2({1, 23}),
+ [[[[[1, 2, 3],
+     '1 2']]]]]"""
+        o = eval(expected)
+        self.assertEqual(pprint.pformat(o, width=15), expected)
+        self.assertEqual(pprint.pformat(o, width=16), expected)
+        self.assertEqual(pprint.pformat(o, width=25), expected)
+        self.assertEqual(pprint.pformat(o, width=14), """\
+[[[[[[1,
+      2,
+      3],
+     '1 '
+     '2']]]],
+ {1: [1,
+      2,
+      3],
+  2: [12,
+      34]},
+ 'abc def '
+ 'ghi',
+ ('ab cd '
+  'ef',),
+ set2({1,
+       23}),
+ [[[[[1,
+      2,
+      3],
+     '1 '
+     '2']]]]]""")
 
     def test_sorted_dict(self):
         # Starting in Python 2.5, pprint sorts dict displays by key regardless
@@ -535,13 +577,12 @@
     def test_str_wrap(self):
         # pprint tries to wrap strings intelligently
         fox = 'the quick brown fox jumped over a lazy dog'
-        self.assertEqual(pprint.pformat(fox, width=20), """\
-('the quick '
- 'brown fox '
- 'jumped over a '
- 'lazy dog')""")
+        self.assertEqual(pprint.pformat(fox, width=19), """\
+('the quick brown '
+ 'fox jumped over '
+ 'a lazy dog')""")
         self.assertEqual(pprint.pformat({'a': 1, 'b': fox, 'c': 2},
-                                        width=26), """\
+                                        width=25), """\
 {'a': 1,
  'b': 'the quick brown '
       'fox jumped over '
@@ -553,12 +594,34 @@
         # - non-ASCII is allowed
         # - an apostrophe doesn't disrupt the pprint
         special = "Portons dix bons \"whiskys\"\nà l'avocat goujat\t qui fumait au zoo"
-        self.assertEqual(pprint.pformat(special, width=21), """\
-('Portons dix '
- 'bons "whiskys"\\n'
+        self.assertEqual(pprint.pformat(special, width=68), repr(special))
+        self.assertEqual(pprint.pformat(special, width=31), """\
+('Portons dix bons "whiskys"\\n'
+ "à l'avocat goujat\\t qui "
+ 'fumait au zoo')""")
+        self.assertEqual(pprint.pformat(special, width=20), """\
+('Portons dix bons '
+ '"whiskys"\\n'
  "à l'avocat "
  'goujat\\t qui '
  'fumait au zoo')""")
+        self.assertEqual(pprint.pformat([[[[[special]]]]], width=35), """\
+[[[[['Portons dix bons "whiskys"\\n'
+     "à l'avocat goujat\\t qui "
+     'fumait au zoo']]]]]""")
+        self.assertEqual(pprint.pformat([[[[[special]]]]], width=25), """\
+[[[[['Portons dix bons '
+     '"whiskys"\\n'
+     "à l'avocat "
+     'goujat\\t qui '
+     'fumait au zoo']]]]]""")
+        self.assertEqual(pprint.pformat([[[[[special]]]]], width=23), """\
+[[[[['Portons dix '
+     'bons "whiskys"\\n'
+     "à l'avocat "
+     'goujat\\t qui '
+     'fumait au '
+     'zoo']]]]]""")
         # An unwrappable string is formatted as its repr
         unwrappable = "x" * 100
         self.assertEqual(pprint.pformat(unwrappable, width=80), repr(unwrappable))
@@ -581,7 +644,19 @@
   14, 15],
  [], [0], [0, 1], [0, 1, 2], [0, 1, 2, 3],
  [0, 1, 2, 3, 4]]"""
-        self.assertEqual(pprint.pformat(o, width=48, compact=True), expected)
+        self.assertEqual(pprint.pformat(o, width=47, compact=True), expected)
+
+    def test_compact_width(self):
+        levels = 20
+        number = 10
+        o = [0] * number
+        for i in range(levels - 1):
+            o = [o]
+        for w in range(levels * 2 + 1, levels + 3 * number - 1):
+            lines = pprint.pformat(o, width=w, compact=True).splitlines()
+            maxwidth = max(map(len, lines))
+            self.assertLessEqual(maxwidth, w)
+            self.assertGreater(maxwidth, w - 3)
 
 
 class DottedPrettyPrinter(pprint.PrettyPrinter):
diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py
index 6742444..50ed4d5 100644
--- a/Lib/test/test_pydoc.py
+++ b/Lib/test/test_pydoc.py
@@ -2,12 +2,15 @@
 import sys
 import builtins
 import contextlib
+import importlib.util
 import inspect
 import pydoc
+import py_compile
 import keyword
 import _pickle
 import pkgutil
 import re
+import stat
 import string
 import test.support
 import time
@@ -31,6 +34,10 @@
 except ImportError:
     threading = None
 
+class nonascii:
+    'Це не латиниця'
+    pass
+
 if test.support.HAVE_DOCSTRINGS:
     expected_data_docstrings = (
         'dictionary for instance variables (if defined)',
@@ -249,7 +256,10 @@
                                       for s in expected_data_docstrings)
 
 # output pattern for missing module
-missing_pattern = "no Python documentation found for '%s'"
+missing_pattern = '''\
+No Python documentation found for %r.
+Use help() to get the interactive help utility.
+Use help(str) for help on the str class.'''.replace('\n', os.linesep)
 
 # output pattern for module with bad imports
 badimport_pattern = "problem in %s - ImportError: No module named %r"
@@ -457,6 +467,13 @@
         self.assertEqual(expected, result,
             "documentation for missing module found")
 
+    @unittest.skipIf(sys.flags.optimize >= 2,
+                     'Docstrings are omitted with -OO and above')
+    def test_not_ascii(self):
+        result = run_pydoc('test.test_pydoc.nonascii', PYTHONIOENCODING='ascii')
+        encoded = nonascii.__doc__.encode('ascii', 'backslashreplace')
+        self.assertIn(encoded, result)
+
     def test_input_strip(self):
         missing_module = " test.i_am_not_here "
         result = str(run_pydoc(missing_module), 'ascii')
@@ -536,6 +553,8 @@
             synopsis = pydoc.synopsis(TESTFN, {})
             self.assertEqual(synopsis, 'line 1: h\xe9')
 
+    @unittest.skipIf(sys.flags.optimize >= 2,
+                     'Docstrings are omitted with -OO and above')
     def test_synopsis_sourceless(self):
         expected = os.__doc__.splitlines()[0]
         filename = os.__cached__
@@ -543,6 +562,18 @@
 
         self.assertEqual(synopsis, expected)
 
+    def test_synopsis_sourceless_empty_doc(self):
+        with test.support.temp_cwd() as test_dir:
+            init_path = os.path.join(test_dir, 'foomod42.py')
+            cached_path = importlib.util.cache_from_source(init_path)
+            with open(init_path, 'w') as fobj:
+                fobj.write("foo = 1")
+            py_compile.compile(init_path)
+            synopsis = pydoc.synopsis(init_path, {})
+            self.assertIsNone(synopsis)
+            synopsis_cached = pydoc.synopsis(cached_path, {})
+            self.assertIsNone(synopsis_cached)
+
     def test_splitdoc_with_description(self):
         example_string = "I Am A Doc\n\n\nHere is my description"
         self.assertEqual(pydoc.splitdoc(example_string),
@@ -598,6 +629,7 @@
     def setUp(self):
         self.test_dir = os.mkdir(TESTFN)
         self.addCleanup(rmtree, TESTFN)
+        importlib.invalidate_caches()
 
     def test_badimport(self):
         # This tests the fix for issue 5230, where if pydoc found the module
@@ -656,6 +688,22 @@
         self.assertEqual(out.getvalue(), '')
         self.assertEqual(err.getvalue(), '')
 
+    def test_apropos_empty_doc(self):
+        pkgdir = os.path.join(TESTFN, 'walkpkg')
+        os.mkdir(pkgdir)
+        self.addCleanup(rmtree, pkgdir)
+        init_path = os.path.join(pkgdir, '__init__.py')
+        with open(init_path, 'w') as fobj:
+            fobj.write("foo = 1")
+        current_mode = stat.S_IMODE(os.stat(pkgdir).st_mode)
+        try:
+            os.chmod(pkgdir, current_mode & ~stat.S_IEXEC)
+            with self.restrict_walk_packages(path=[TESTFN]), captured_stdout() as stdout:
+                pydoc.apropos('')
+            self.assertIn('walkpkg', stdout.getvalue())
+        finally:
+            os.chmod(pkgdir, current_mode)
+
     @unittest.skip('causes undesireable side-effects (#20128)')
     def test_modules(self):
         # See Helper.listmodules().
diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py
index 2fb4764..0fbf8c5 100644
--- a/Lib/test/test_re.py
+++ b/Lib/test/test_re.py
@@ -604,7 +604,7 @@
         self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
                          "a\n\nb")
 
-    def test_non_consuming(self):
+    def test_lookahead(self):
         self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
         self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
         self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
@@ -618,6 +618,46 @@
         self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
         self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
 
+        # Group reference.
+        self.assertTrue(re.match(r'(a)b(?=\1)a', 'aba'))
+        self.assertIsNone(re.match(r'(a)b(?=\1)c', 'abac'))
+        # Conditional group reference.
+        self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(2)x|c))c', 'abc'))
+        self.assertIsNone(re.match(r'(?:(a)|(x))b(?=(?(2)c|x))c', 'abc'))
+        self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(2)x|c))c', 'abc'))
+        self.assertIsNone(re.match(r'(?:(a)|(x))b(?=(?(1)b|x))c', 'abc'))
+        self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(1)c|x))c', 'abc'))
+        # Group used before defined.
+        self.assertTrue(re.match(r'(a)b(?=(?(2)x|c))(c)', 'abc'))
+        self.assertIsNone(re.match(r'(a)b(?=(?(2)b|x))(c)', 'abc'))
+        self.assertTrue(re.match(r'(a)b(?=(?(1)c|x))(c)', 'abc'))
+
+    def test_lookbehind(self):
+        self.assertTrue(re.match(r'ab(?<=b)c', 'abc'))
+        self.assertIsNone(re.match(r'ab(?<=c)c', 'abc'))
+        self.assertIsNone(re.match(r'ab(?<!b)c', 'abc'))
+        self.assertTrue(re.match(r'ab(?<!c)c', 'abc'))
+        # Group reference.
+        self.assertTrue(re.match(r'(a)a(?<=\1)c', 'aac'))
+        self.assertIsNone(re.match(r'(a)b(?<=\1)a', 'abaa'))
+        self.assertIsNone(re.match(r'(a)a(?<!\1)c', 'aac'))
+        self.assertTrue(re.match(r'(a)b(?<!\1)a', 'abaa'))
+        # Conditional group reference.
+        self.assertIsNone(re.match(r'(?:(a)|(x))b(?<=(?(2)x|c))c', 'abc'))
+        self.assertIsNone(re.match(r'(?:(a)|(x))b(?<=(?(2)b|x))c', 'abc'))
+        self.assertTrue(re.match(r'(?:(a)|(x))b(?<=(?(2)x|b))c', 'abc'))
+        self.assertIsNone(re.match(r'(?:(a)|(x))b(?<=(?(1)c|x))c', 'abc'))
+        self.assertTrue(re.match(r'(?:(a)|(x))b(?<=(?(1)b|x))c', 'abc'))
+        # Group used before defined.
+        self.assertRaises(re.error, re.compile, r'(a)b(?<=(?(2)b|x))(c)')
+        self.assertIsNone(re.match(r'(a)b(?<=(?(1)c|x))(c)', 'abc'))
+        self.assertTrue(re.match(r'(a)b(?<=(?(1)b|x))(c)', 'abc'))
+        # Group defined in the same lookbehind pattern
+        self.assertRaises(re.error, re.compile, r'(a)b(?<=(.)\2)(c)')
+        self.assertRaises(re.error, re.compile, r'(a)b(?<=(?P<a>.)(?P=a))(c)')
+        self.assertRaises(re.error, re.compile, r'(a)b(?<=(a)(?(2)b|x))(c)')
+        self.assertRaises(re.error, re.compile, r'(a)b(?<=(.)(?<=\2))(c)')
+
     def test_ignore_case(self):
         self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
         self.assertEqual(re.match(b"abc", b"ABC", re.I).group(0), b"ABC")
diff --git a/Lib/test/test_smtpnet.py b/Lib/test/test_smtpnet.py
index 03bf93b..15654f2 100644
--- a/Lib/test/test_smtpnet.py
+++ b/Lib/test/test_smtpnet.py
@@ -21,7 +21,7 @@
 
 class SmtpTest(unittest.TestCase):
     testServer = 'smtp.gmail.com'
-    remotePort = 25
+    remotePort = 587
 
     def test_connect_starttls(self):
         support.get_attribute(smtplib, 'SMTP_SSL')
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index 2f2e739..ea619fd 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -383,6 +383,8 @@
             self.assertRaises(ssl.CertificateError,
                               ssl.match_hostname, cert, hostname)
 
+        # -- Hostname matching --
+
         cert = {'subject': ((('commonName', 'example.com'),),)}
         ok(cert, 'example.com')
         ok(cert, 'ExAmple.cOm')
@@ -468,6 +470,28 @@
         # Only commonName is considered
         fail(cert, 'California')
 
+        # -- IPv4 matching --
+        cert = {'subject': ((('commonName', 'example.com'),),),
+                'subjectAltName': (('DNS', 'example.com'),
+                                   ('IP Address', '10.11.12.13'),
+                                   ('IP Address', '14.15.16.17'))}
+        ok(cert, '10.11.12.13')
+        ok(cert, '14.15.16.17')
+        fail(cert, '14.15.16.18')
+        fail(cert, 'example.net')
+
+        # -- IPv6 matching --
+        cert = {'subject': ((('commonName', 'example.com'),),),
+                'subjectAltName': (('DNS', 'example.com'),
+                                   ('IP Address', '2001:0:0:0:0:0:0:CAFE\n'),
+                                   ('IP Address', '2003:0:0:0:0:0:0:BABA\n'))}
+        ok(cert, '2001::cafe')
+        ok(cert, '2003::baba')
+        fail(cert, '2003::bebe')
+        fail(cert, 'example.net')
+
+        # -- Miscellaneous --
+
         # Neither commonName nor subjectAltName
         cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT',
                 'subject': ((('countryName', 'US'),),
@@ -794,8 +818,9 @@
                          "verify_flags need OpenSSL > 0.9.8")
     def test_verify_flags(self):
         ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
-        # default value by OpenSSL
-        self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT)
+        # default value
+        tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0)
+        self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT | tf)
         ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF
         self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_LEAF)
         ctx.verify_flags = ssl.VERIFY_CRL_CHECK_CHAIN
@@ -2259,7 +2284,8 @@
             context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
             context.verify_mode = ssl.CERT_REQUIRED
             context.load_verify_locations(SIGNING_CA)
-            self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT)
+            tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0)
+            self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT | tf)
 
             # VERIFY_DEFAULT should pass
             server = ThreadedEchoServer(context=server_context, chatty=True)
diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py
index f1da21e..758a481 100644
--- a/Lib/test/test_statistics.py
+++ b/Lib/test/test_statistics.py
@@ -9,7 +9,6 @@
 import math
 import random
 import sys
-import types
 import unittest
 
 from decimal import Decimal
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index 3d34859..e25cccd 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -1922,7 +1922,7 @@
         open_fds = set(fds)
         # add a bunch more fds
         for _ in range(9):
-            fd = os.open("/dev/null", os.O_RDONLY)
+            fd = os.open(os.devnull, os.O_RDONLY)
             self.addCleanup(os.close, fd)
             open_fds.add(fd)
 
@@ -1984,7 +1984,7 @@
         open_fds = set()
         # Add a bunch more fds to pass down.
         for _ in range(40):
-            fd = os.open("/dev/null", os.O_RDONLY)
+            fd = os.open(os.devnull, os.O_RDONLY)
             open_fds.add(fd)
 
         # Leave a two pairs of low ones available for use by the
@@ -2502,6 +2502,22 @@
                                   stderr=subprocess.PIPE) as proc:
                 pass
 
+    def test_broken_pipe_cleanup(self):
+        """Broken pipe error should not prevent wait() (Issue 21619)"""
+        proc = subprocess.Popen([sys.executable, '-c', 'pass'],
+                                stdin=subprocess.PIPE,
+                                bufsize=support.PIPE_MAX_SIZE*2)
+        proc = proc.__enter__()
+        # Prepare to send enough data to overflow any OS pipe buffering and
+        # guarantee a broken pipe error. Data is held in BufferedWriter
+        # buffer until closed.
+        proc.stdin.write(b'x' * support.PIPE_MAX_SIZE)
+        self.assertIsNone(proc.returncode)
+        # EPIPE expected under POSIX; EINVAL under Windows
+        self.assertRaises(OSError, proc.__exit__, None, None, None)
+        self.assertEqual(proc.returncode, 0)
+        self.assertTrue(proc.stdin.closed)
+
 
 def test_main():
     unit_tests = (ProcessTestCase,
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 810b76b..01d1a92 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -1428,6 +1428,88 @@
                    ("longlnk/" * 127) + "longlink_")
 
 
+class CreateTest(TarTest, unittest.TestCase):
+
+    prefix = "x:"
+
+    file_path = os.path.join(TEMPDIR, "spameggs42")
+
+    def setUp(self):
+        support.unlink(tmpname)
+
+    @classmethod
+    def setUpClass(cls):
+        with open(cls.file_path, "wb") as fobj:
+            fobj.write(b"aaa")
+
+    @classmethod
+    def tearDownClass(cls):
+        support.unlink(cls.file_path)
+
+    def test_create(self):
+        with tarfile.open(tmpname, self.mode) as tobj:
+            tobj.add(self.file_path)
+
+        with self.taropen(tmpname) as tobj:
+            names = tobj.getnames()
+        self.assertEqual(len(names), 1)
+        self.assertIn('spameggs42', names[0])
+
+    def test_create_existing(self):
+        with tarfile.open(tmpname, self.mode) as tobj:
+            tobj.add(self.file_path)
+
+        with self.assertRaises(FileExistsError):
+            tobj = tarfile.open(tmpname, self.mode)
+
+        with self.taropen(tmpname) as tobj:
+            names = tobj.getnames()
+        self.assertEqual(len(names), 1)
+        self.assertIn('spameggs42', names[0])
+
+    def test_create_taropen(self):
+        with self.taropen(tmpname, "x") as tobj:
+            tobj.add(self.file_path)
+
+        with self.taropen(tmpname) as tobj:
+            names = tobj.getnames()
+        self.assertEqual(len(names), 1)
+        self.assertIn('spameggs42', names[0])
+
+    def test_create_existing_taropen(self):
+        with self.taropen(tmpname, "x") as tobj:
+            tobj.add(self.file_path)
+
+        with self.assertRaises(FileExistsError):
+            with self.taropen(tmpname, "x"):
+                pass
+
+        with self.taropen(tmpname) as tobj:
+            names = tobj.getnames()
+        self.assertEqual(len(names), 1)
+        self.assertIn("spameggs42", names[0])
+
+
+class GzipCreateTest(GzipTest, CreateTest):
+    pass
+
+
+class Bz2CreateTest(Bz2Test, CreateTest):
+    pass
+
+
+class LzmaCreateTest(LzmaTest, CreateTest):
+    pass
+
+
+class CreateWithXModeTest(CreateTest):
+
+    prefix = "x"
+
+    test_create_taropen = None
+    test_create_existing_taropen = None
+
+
 @unittest.skipUnless(hasattr(os, "link"), "Missing hardlink implementation")
 class HardlinkTest(unittest.TestCase):
     # Test the creation of LNKTYPE (hardlink) members in an archive.
@@ -2006,6 +2088,21 @@
         finally:
             support.unlink(tar_name)
 
+    def test_create_command_compressed(self):
+        files = [support.findfile('tokenize_tests.txt'),
+                 support.findfile('tokenize_tests-no-coding-cookie-'
+                                  'and-utf8-bom-sig-only.txt')]
+        for filetype in (GzipTest, Bz2Test, LzmaTest):
+            if not filetype.open:
+                continue
+            try:
+                tar_name = tmpname + '.' + filetype.suffix
+                out = self.tarfilecmd('-c', tar_name, *files)
+                with filetype.taropen(tar_name) as tar:
+                    tar.getmembers()
+            finally:
+                support.unlink(tar_name)
+
     def test_extract_command(self):
         self.make_simple_tarfile(tmpname)
         for opt in '-e', '--extract':
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index 6bd6fa6..d9b73c1 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -1,6 +1,8 @@
 """Test cases for traceback module"""
 
+from collections import namedtuple
 from io import StringIO
+import linecache
 import sys
 import unittest
 import re
@@ -12,6 +14,11 @@
 import traceback
 
 
+test_code = namedtuple('code', ['co_filename', 'co_name'])
+test_frame = namedtuple('frame', ['f_code', 'f_globals', 'f_locals'])
+test_tb = namedtuple('tb', ['tb_frame', 'tb_lineno', 'tb_next'])
+
+
 class SyntaxTracebackCases(unittest.TestCase):
     # For now, a very minimal set of tests.  I want to be sure that
     # formatting of SyntaxErrors works based on changes for 2.1.
@@ -477,6 +484,246 @@
         self.assertEqual(len(inner_frame.f_locals), 0)
 
 
+class TestFrame(unittest.TestCase):
+
+    def test_basics(self):
+        linecache.clearcache()
+        linecache.lazycache("f", globals())
+        f = traceback.FrameSummary("f", 1, "dummy")
+        self.assertEqual(
+            ("f", 1, "dummy", '"""Test cases for traceback module"""'),
+            tuple(f))
+        self.assertEqual(None, f.locals)
+
+    def test_lazy_lines(self):
+        linecache.clearcache()
+        f = traceback.FrameSummary("f", 1, "dummy", lookup_line=False)
+        self.assertEqual(None, f._line)
+        linecache.lazycache("f", globals())
+        self.assertEqual(
+            '"""Test cases for traceback module"""',
+            f.line)
+
+    def test_explicit_line(self):
+        f = traceback.FrameSummary("f", 1, "dummy", line="line")
+        self.assertEqual("line", f.line)
+
+
+class TestStack(unittest.TestCase):
+
+    def test_walk_stack(self):
+        s = list(traceback.walk_stack(None))
+        self.assertGreater(len(s), 10)
+
+    def test_walk_tb(self):
+        try:
+            1/0
+        except Exception:
+            _, _, tb = sys.exc_info()
+        s = list(traceback.walk_tb(tb))
+        self.assertEqual(len(s), 1)
+
+    def test_extract_stack(self):
+        s = traceback.StackSummary.extract(traceback.walk_stack(None))
+        self.assertIsInstance(s, traceback.StackSummary)
+
+    def test_extract_stack_limit(self):
+        s = traceback.StackSummary.extract(traceback.walk_stack(None), limit=5)
+        self.assertEqual(len(s), 5)
+
+    def test_extract_stack_lookup_lines(self):
+        linecache.clearcache()
+        linecache.updatecache('/foo.py', globals())
+        c = test_code('/foo.py', 'method')
+        f = test_frame(c, None, None)
+        s = traceback.StackSummary.extract(iter([(f, 6)]), lookup_lines=True)
+        linecache.clearcache()
+        self.assertEqual(s[0].line, "import sys")
+
+    def test_extract_stackup_deferred_lookup_lines(self):
+        linecache.clearcache()
+        c = test_code('/foo.py', 'method')
+        f = test_frame(c, None, None)
+        s = traceback.StackSummary.extract(iter([(f, 6)]), lookup_lines=False)
+        self.assertEqual({}, linecache.cache)
+        linecache.updatecache('/foo.py', globals())
+        self.assertEqual(s[0].line, "import sys")
+
+    def test_from_list(self):
+        s = traceback.StackSummary.from_list([('foo.py', 1, 'fred', 'line')])
+        self.assertEqual(
+            ['  File "foo.py", line 1, in fred\n    line\n'],
+            s.format())
+
+    def test_format_smoke(self):
+        # For detailed tests see the format_list tests, which consume the same
+        # code.
+        s = traceback.StackSummary.from_list([('foo.py', 1, 'fred', 'line')])
+        self.assertEqual(
+            ['  File "foo.py", line 1, in fred\n    line\n'],
+            s.format())
+
+    def test_locals(self):
+        linecache.updatecache('/foo.py', globals())
+        c = test_code('/foo.py', 'method')
+        f = test_frame(c, globals(), {'something': 1})
+        s = traceback.StackSummary.extract(iter([(f, 6)]), capture_locals=True)
+        self.assertEqual(s[0].locals, {'something': '1'})
+
+    def test_no_locals(self):
+        linecache.updatecache('/foo.py', globals())
+        c = test_code('/foo.py', 'method')
+        f = test_frame(c, globals(), {'something': 1})
+        s = traceback.StackSummary.extract(iter([(f, 6)]))
+        self.assertEqual(s[0].locals, None)
+
+    def test_format_locals(self):
+        def some_inner(k, v):
+            a = 1
+            b = 2
+            return traceback.StackSummary.extract(
+                traceback.walk_stack(None), capture_locals=True, limit=1)
+        s = some_inner(3, 4)
+        self.assertEqual(
+            ['  File "' + __file__ + '", line 585, '
+             'in some_inner\n'
+             '    traceback.walk_stack(None), capture_locals=True, limit=1)\n'
+             '    a = 1\n'
+             '    b = 2\n'
+             '    k = 3\n'
+             '    v = 4\n'
+            ], s.format())
+
+
+
+class TestTracebackException(unittest.TestCase):
+
+    def test_smoke(self):
+        try:
+            1/0
+        except Exception:
+            exc_info = sys.exc_info()
+            exc = traceback.TracebackException(*exc_info)
+            expected_stack = traceback.StackSummary.extract(
+                traceback.walk_tb(exc_info[2]))
+        self.assertEqual(None, exc.__cause__)
+        self.assertEqual(None, exc.__context__)
+        self.assertEqual(False, exc.__suppress_context__)
+        self.assertEqual(expected_stack, exc.stack)
+        self.assertEqual(exc_info[0], exc.exc_type)
+        self.assertEqual(str(exc_info[1]), str(exc))
+
+    def test_from_exception(self):
+        # Check all the parameters are accepted.
+        def foo():
+            1/0
+        try:
+            foo()
+        except Exception as e:
+            exc_info = sys.exc_info()
+            self.expected_stack = traceback.StackSummary.extract(
+                traceback.walk_tb(exc_info[2]), limit=1, lookup_lines=False,
+                capture_locals=True)
+            self.exc = traceback.TracebackException.from_exception(
+                e, limit=1, lookup_lines=False, capture_locals=True)
+        expected_stack = self.expected_stack
+        exc = self.exc
+        self.assertEqual(None, exc.__cause__)
+        self.assertEqual(None, exc.__context__)
+        self.assertEqual(False, exc.__suppress_context__)
+        self.assertEqual(expected_stack, exc.stack)
+        self.assertEqual(exc_info[0], exc.exc_type)
+        self.assertEqual(str(exc_info[1]), str(exc))
+
+    def test_cause(self):
+        try:
+            try:
+                1/0
+            finally:
+                exc_info_context = sys.exc_info()
+                exc_context = traceback.TracebackException(*exc_info_context)
+                cause = Exception("cause")
+                raise Exception("uh oh") from cause
+        except Exception:
+            exc_info = sys.exc_info()
+            exc = traceback.TracebackException(*exc_info)
+            expected_stack = traceback.StackSummary.extract(
+                traceback.walk_tb(exc_info[2]))
+        exc_cause = traceback.TracebackException(Exception, cause, None)
+        self.assertEqual(exc_cause, exc.__cause__)
+        self.assertEqual(exc_context, exc.__context__)
+        self.assertEqual(True, exc.__suppress_context__)
+        self.assertEqual(expected_stack, exc.stack)
+        self.assertEqual(exc_info[0], exc.exc_type)
+        self.assertEqual(str(exc_info[1]), str(exc))
+
+    def test_context(self):
+        try:
+            try:
+                1/0
+            finally:
+                exc_info_context = sys.exc_info()
+                exc_context = traceback.TracebackException(*exc_info_context)
+                raise Exception("uh oh")
+        except Exception:
+            exc_info = sys.exc_info()
+            exc = traceback.TracebackException(*exc_info)
+            expected_stack = traceback.StackSummary.extract(
+                traceback.walk_tb(exc_info[2]))
+        self.assertEqual(None, exc.__cause__)
+        self.assertEqual(exc_context, exc.__context__)
+        self.assertEqual(False, exc.__suppress_context__)
+        self.assertEqual(expected_stack, exc.stack)
+        self.assertEqual(exc_info[0], exc.exc_type)
+        self.assertEqual(str(exc_info[1]), str(exc))
+
+    def test_limit(self):
+        def recurse(n):
+            if n:
+                recurse(n-1)
+            else:
+                1/0
+        try:
+            recurse(10)
+        except Exception:
+            exc_info = sys.exc_info()
+            exc = traceback.TracebackException(*exc_info, limit=5)
+            expected_stack = traceback.StackSummary.extract(
+                traceback.walk_tb(exc_info[2]), limit=5)
+        self.assertEqual(expected_stack, exc.stack)
+
+    def test_lookup_lines(self):
+        linecache.clearcache()
+        e = Exception("uh oh")
+        c = test_code('/foo.py', 'method')
+        f = test_frame(c, None, None)
+        tb = test_tb(f, 6, None)
+        exc = traceback.TracebackException(Exception, e, tb, lookup_lines=False)
+        self.assertEqual({}, linecache.cache)
+        linecache.updatecache('/foo.py', globals())
+        self.assertEqual(exc.stack[0].line, "import sys")
+
+    def test_locals(self):
+        linecache.updatecache('/foo.py', globals())
+        e = Exception("uh oh")
+        c = test_code('/foo.py', 'method')
+        f = test_frame(c, globals(), {'something': 1, 'other': 'string'})
+        tb = test_tb(f, 6, None)
+        exc = traceback.TracebackException(
+            Exception, e, tb, capture_locals=True)
+        self.assertEqual(
+            exc.stack[0].locals, {'something': '1', 'other': "'string'"})
+
+    def test_no_locals(self):
+        linecache.updatecache('/foo.py', globals())
+        e = Exception("uh oh")
+        c = test_code('/foo.py', 'method')
+        f = test_frame(c, globals(), {'something': 1})
+        tb = test_tb(f, 6, None)
+        exc = traceback.TracebackException(Exception, e, tb)
+        self.assertEqual(exc.stack[0].locals, None)
+
+
 def test_main():
     run_unittest(__name__)
 
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index fcd2073..58ca2a5 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -1298,21 +1298,6 @@
 class Utility_Tests(unittest.TestCase):
     """Testcase to test the various utility functions in the urllib."""
 
-    def test_splitpasswd(self):
-        """Some of password examples are not sensible, but it is added to
-        confirming to RFC2617 and addressing issue4675.
-        """
-        self.assertEqual(('user', 'ab'),urllib.parse.splitpasswd('user:ab'))
-        self.assertEqual(('user', 'a\nb'),urllib.parse.splitpasswd('user:a\nb'))
-        self.assertEqual(('user', 'a\tb'),urllib.parse.splitpasswd('user:a\tb'))
-        self.assertEqual(('user', 'a\rb'),urllib.parse.splitpasswd('user:a\rb'))
-        self.assertEqual(('user', 'a\fb'),urllib.parse.splitpasswd('user:a\fb'))
-        self.assertEqual(('user', 'a\vb'),urllib.parse.splitpasswd('user:a\vb'))
-        self.assertEqual(('user', 'a:b'),urllib.parse.splitpasswd('user:a:b'))
-        self.assertEqual(('user', 'a b'),urllib.parse.splitpasswd('user:a b'))
-        self.assertEqual(('user 2', 'ab'),urllib.parse.splitpasswd('user 2:ab'))
-        self.assertEqual(('user+1', 'a+b'),urllib.parse.splitpasswd('user+1:a+b'))
-
     def test_thishost(self):
         """Test the urllib.request.thishost utility function returns a tuple"""
         self.assertIsInstance(urllib.request.thishost(), tuple)
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 3182390..36d7e87 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -1484,17 +1484,18 @@
     @unittest.skipUnless(support.is_resource_enabled('network'),
                          'test requires network access')
     def test_issue16464(self):
-        opener = urllib.request.build_opener()
-        request = urllib.request.Request("http://www.example.com/")
-        self.assertEqual(None, request.data)
+        with support.transient_internet("http://www.example.com/"):
+            opener = urllib.request.build_opener()
+            request = urllib.request.Request("http://www.example.com/")
+            self.assertEqual(None, request.data)
 
-        opener.open(request, "1".encode("us-ascii"))
-        self.assertEqual(b"1", request.data)
-        self.assertEqual("1", request.get_header("Content-length"))
+            opener.open(request, "1".encode("us-ascii"))
+            self.assertEqual(b"1", request.data)
+            self.assertEqual("1", request.get_header("Content-length"))
 
-        opener.open(request, "1234567890".encode("us-ascii"))
-        self.assertEqual(b"1234567890", request.data)
-        self.assertEqual("10", request.get_header("Content-length"))
+            opener.open(request, "1234567890".encode("us-ascii"))
+            self.assertEqual(b"1234567890", request.data)
+            self.assertEqual("10", request.get_header("Content-length"))
 
     def test_HTTPError_interface(self):
         """
@@ -1645,17 +1646,6 @@
             req = Request(url)
             self.assertEqual(req.get_full_url(), req.full_url)
 
-def test_main(verbose=None):
-    from test import test_urllib2
-    support.run_doctest(test_urllib2, verbose)
-    support.run_doctest(urllib.request, verbose)
-    tests = (TrivialTests,
-             OpenerDirectorTests,
-             HandlerTests,
-             MiscTests,
-             RequestTests,
-             RequestHdrsTests)
-    support.run_unittest(*tests)
 
 if __name__ == "__main__":
-    test_main(verbose=True)
+    unittest.main()
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index cb323d3..0481f0b 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -1,4 +1,3 @@
-from test import support
 import unittest
 import urllib.parse
 
@@ -769,52 +768,6 @@
                                                           errors="ignore")
         self.assertEqual(result, [('key', '\u0141-')])
 
-    def test_splitport(self):
-        splitport = urllib.parse.splitport
-        self.assertEqual(splitport('parrot:88'), ('parrot', '88'))
-        self.assertEqual(splitport('parrot'), ('parrot', None))
-        self.assertEqual(splitport('parrot:'), ('parrot', None))
-        self.assertEqual(splitport('127.0.0.1'), ('127.0.0.1', None))
-        self.assertEqual(splitport('parrot:cheese'), ('parrot:cheese', None))
-
-    def test_splitnport(self):
-        splitnport = urllib.parse.splitnport
-        self.assertEqual(splitnport('parrot:88'), ('parrot', 88))
-        self.assertEqual(splitnport('parrot'), ('parrot', -1))
-        self.assertEqual(splitnport('parrot', 55), ('parrot', 55))
-        self.assertEqual(splitnport('parrot:'), ('parrot', -1))
-        self.assertEqual(splitnport('parrot:', 55), ('parrot', 55))
-        self.assertEqual(splitnport('127.0.0.1'), ('127.0.0.1', -1))
-        self.assertEqual(splitnport('127.0.0.1', 55), ('127.0.0.1', 55))
-        self.assertEqual(splitnport('parrot:cheese'), ('parrot', None))
-        self.assertEqual(splitnport('parrot:cheese', 55), ('parrot', None))
-
-    def test_splitquery(self):
-        # Normal cases are exercised by other tests; ensure that we also
-        # catch cases with no port specified (testcase ensuring coverage)
-        result = urllib.parse.splitquery('http://python.org/fake?foo=bar')
-        self.assertEqual(result, ('http://python.org/fake', 'foo=bar'))
-        result = urllib.parse.splitquery('http://python.org/fake?foo=bar?')
-        self.assertEqual(result, ('http://python.org/fake?foo=bar', ''))
-        result = urllib.parse.splitquery('http://python.org/fake')
-        self.assertEqual(result, ('http://python.org/fake', None))
-
-    def test_splitvalue(self):
-        # Normal cases are exercised by other tests; test pathological cases
-        # with no key/value pairs. (testcase ensuring coverage)
-        result = urllib.parse.splitvalue('foo=bar')
-        self.assertEqual(result, ('foo', 'bar'))
-        result = urllib.parse.splitvalue('foo=')
-        self.assertEqual(result, ('foo', ''))
-        result = urllib.parse.splitvalue('foobar')
-        self.assertEqual(result, ('foobar', None))
-
-    def test_to_bytes(self):
-        result = urllib.parse.to_bytes('http://www.python.org')
-        self.assertEqual(result, 'http://www.python.org')
-        self.assertRaises(UnicodeError, urllib.parse.to_bytes,
-                          'http://www.python.org/medi\u00e6val')
-
     def test_urlencode_sequences(self):
         # Other tests incidentally urlencode things; test non-covered cases:
         # Sequence and object values.
@@ -883,17 +836,139 @@
         self.assertEqual(p1.path, '863-1234')
         self.assertEqual(p1.params, 'phone-context=+1-914-555')
 
-    def test_unwrap(self):
-        url = urllib.parse.unwrap('<URL:type://host/path>')
-        self.assertEqual(url, 'type://host/path')
-
     def test_Quoter_repr(self):
         quoter = urllib.parse.Quoter(urllib.parse._ALWAYS_SAFE)
         self.assertIn('Quoter', repr(quoter))
 
 
-def test_main():
-    support.run_unittest(UrlParseTestCase)
+class Utility_Tests(unittest.TestCase):
+    """Testcase to test the various utility functions in the urllib."""
+    # In Python 2 this test class was in test_urllib.
+
+    def test_splittype(self):
+        splittype = urllib.parse.splittype
+        self.assertEqual(splittype('type:opaquestring'), ('type', 'opaquestring'))
+        self.assertEqual(splittype('opaquestring'), (None, 'opaquestring'))
+        self.assertEqual(splittype(':opaquestring'), (None, ':opaquestring'))
+        self.assertEqual(splittype('type:'), ('type', ''))
+        self.assertEqual(splittype('type:opaque:string'), ('type', 'opaque:string'))
+
+    def test_splithost(self):
+        splithost = urllib.parse.splithost
+        self.assertEqual(splithost('//www.example.org:80/foo/bar/baz.html'),
+                         ('www.example.org:80', '/foo/bar/baz.html'))
+        self.assertEqual(splithost('//www.example.org:80'),
+                         ('www.example.org:80', ''))
+        self.assertEqual(splithost('/foo/bar/baz.html'),
+                         (None, '/foo/bar/baz.html'))
+
+    def test_splituser(self):
+        splituser = urllib.parse.splituser
+        self.assertEqual(splituser('User:Pass@www.python.org:080'),
+                         ('User:Pass', 'www.python.org:080'))
+        self.assertEqual(splituser('@www.python.org:080'),
+                         ('', 'www.python.org:080'))
+        self.assertEqual(splituser('www.python.org:080'),
+                         (None, 'www.python.org:080'))
+        self.assertEqual(splituser('User:Pass@'),
+                         ('User:Pass', ''))
+        self.assertEqual(splituser('User@example.com:Pass@www.python.org:080'),
+                         ('User@example.com:Pass', 'www.python.org:080'))
+
+    def test_splitpasswd(self):
+        # Some of the password examples are not sensible, but it is added to
+        # confirming to RFC2617 and addressing issue4675.
+        splitpasswd = urllib.parse.splitpasswd
+        self.assertEqual(splitpasswd('user:ab'), ('user', 'ab'))
+        self.assertEqual(splitpasswd('user:a\nb'), ('user', 'a\nb'))
+        self.assertEqual(splitpasswd('user:a\tb'), ('user', 'a\tb'))
+        self.assertEqual(splitpasswd('user:a\rb'), ('user', 'a\rb'))
+        self.assertEqual(splitpasswd('user:a\fb'), ('user', 'a\fb'))
+        self.assertEqual(splitpasswd('user:a\vb'), ('user', 'a\vb'))
+        self.assertEqual(splitpasswd('user:a:b'), ('user', 'a:b'))
+        self.assertEqual(splitpasswd('user:a b'), ('user', 'a b'))
+        self.assertEqual(splitpasswd('user 2:ab'), ('user 2', 'ab'))
+        self.assertEqual(splitpasswd('user+1:a+b'), ('user+1', 'a+b'))
+        self.assertEqual(splitpasswd('user:'), ('user', ''))
+        self.assertEqual(splitpasswd('user'), ('user', None))
+        self.assertEqual(splitpasswd(':ab'), ('', 'ab'))
+
+    def test_splitport(self):
+        splitport = urllib.parse.splitport
+        self.assertEqual(splitport('parrot:88'), ('parrot', '88'))
+        self.assertEqual(splitport('parrot'), ('parrot', None))
+        self.assertEqual(splitport('parrot:'), ('parrot', None))
+        self.assertEqual(splitport('127.0.0.1'), ('127.0.0.1', None))
+        self.assertEqual(splitport('parrot:cheese'), ('parrot:cheese', None))
+        self.assertEqual(splitport('[::1]:88'), ('[::1]', '88'))
+        self.assertEqual(splitport('[::1]'), ('[::1]', None))
+        self.assertEqual(splitport(':88'), ('', '88'))
+
+    def test_splitnport(self):
+        splitnport = urllib.parse.splitnport
+        self.assertEqual(splitnport('parrot:88'), ('parrot', 88))
+        self.assertEqual(splitnport('parrot'), ('parrot', -1))
+        self.assertEqual(splitnport('parrot', 55), ('parrot', 55))
+        self.assertEqual(splitnport('parrot:'), ('parrot', -1))
+        self.assertEqual(splitnport('parrot:', 55), ('parrot', 55))
+        self.assertEqual(splitnport('127.0.0.1'), ('127.0.0.1', -1))
+        self.assertEqual(splitnport('127.0.0.1', 55), ('127.0.0.1', 55))
+        self.assertEqual(splitnport('parrot:cheese'), ('parrot', None))
+        self.assertEqual(splitnport('parrot:cheese', 55), ('parrot', None))
+
+    def test_splitquery(self):
+        # Normal cases are exercised by other tests; ensure that we also
+        # catch cases with no port specified (testcase ensuring coverage)
+        splitquery = urllib.parse.splitquery
+        self.assertEqual(splitquery('http://python.org/fake?foo=bar'),
+                         ('http://python.org/fake', 'foo=bar'))
+        self.assertEqual(splitquery('http://python.org/fake?foo=bar?'),
+                         ('http://python.org/fake?foo=bar', ''))
+        self.assertEqual(splitquery('http://python.org/fake'),
+                         ('http://python.org/fake', None))
+        self.assertEqual(splitquery('?foo=bar'), ('', 'foo=bar'))
+
+    def test_splittag(self):
+        splittag = urllib.parse.splittag
+        self.assertEqual(splittag('http://example.com?foo=bar#baz'),
+                         ('http://example.com?foo=bar', 'baz'))
+        self.assertEqual(splittag('http://example.com?foo=bar#'),
+                         ('http://example.com?foo=bar', ''))
+        self.assertEqual(splittag('#baz'), ('', 'baz'))
+        self.assertEqual(splittag('http://example.com?foo=bar'),
+                         ('http://example.com?foo=bar', None))
+        self.assertEqual(splittag('http://example.com?foo=bar#baz#boo'),
+                         ('http://example.com?foo=bar#baz', 'boo'))
+
+    def test_splitattr(self):
+        splitattr = urllib.parse.splitattr
+        self.assertEqual(splitattr('/path;attr1=value1;attr2=value2'),
+                         ('/path', ['attr1=value1', 'attr2=value2']))
+        self.assertEqual(splitattr('/path;'), ('/path', ['']))
+        self.assertEqual(splitattr(';attr1=value1;attr2=value2'),
+                         ('', ['attr1=value1', 'attr2=value2']))
+        self.assertEqual(splitattr('/path'), ('/path', []))
+
+    def test_splitvalue(self):
+        # Normal cases are exercised by other tests; test pathological cases
+        # with no key/value pairs. (testcase ensuring coverage)
+        splitvalue = urllib.parse.splitvalue
+        self.assertEqual(splitvalue('foo=bar'), ('foo', 'bar'))
+        self.assertEqual(splitvalue('foo='), ('foo', ''))
+        self.assertEqual(splitvalue('=bar'), ('', 'bar'))
+        self.assertEqual(splitvalue('foobar'), ('foobar', None))
+        self.assertEqual(splitvalue('foo=bar=baz'), ('foo', 'bar=baz'))
+
+    def test_to_bytes(self):
+        result = urllib.parse.to_bytes('http://www.python.org')
+        self.assertEqual(result, 'http://www.python.org')
+        self.assertRaises(UnicodeError, urllib.parse.to_bytes,
+                          'http://www.python.org/medi\u00e6val')
+
+    def test_unwrap(self):
+        url = urllib.parse.unwrap('<URL:type://host/path>')
+        self.assertEqual(url, 'type://host/path')
+
 
 if __name__ == "__main__":
-    test_main()
+    unittest.main()
diff --git a/Lib/test/test_wsgiref.py b/Lib/test/test_wsgiref.py
index 479fcbc..29f64f3 100644
--- a/Lib/test/test_wsgiref.py
+++ b/Lib/test/test_wsgiref.py
@@ -48,6 +48,18 @@
     ])
     return [b"Hello, world!"]
 
+
+def header_app(environ, start_response):
+    start_response("200 OK", [
+        ('Content-Type', 'text/plain'),
+        ('Date', 'Mon, 05 Jun 2006 18:49:54 GMT')
+    ])
+    return [';'.join([
+        environ['HTTP_X_TEST_HEADER'], environ['QUERY_STRING'],
+        environ['PATH_INFO']
+    ]).encode('iso-8859-1')]
+
+
 def run_amock(app=hello_app, data=b"GET / HTTP/1.0\n\n"):
     server = make_server("", 80, app, MockServer, MockHandler)
     inp = BufferedReader(BytesIO(data))
@@ -118,6 +130,19 @@
         out, err = run_amock()
         self.check_hello(out)
 
+    def test_environ(self):
+        request = (
+            b"GET /p%61th/?query=test HTTP/1.0\n"
+            b"X-Test-Header: Python test \n"
+            b"X-Test-Header: Python test 2\n"
+            b"Content-Length: 0\n\n"
+        )
+        out, err = run_amock(header_app, request)
+        self.assertEqual(
+            out.splitlines()[-1],
+            b"Python test,Python test 2;query=test;/path/"
+        )
+
     def test_request_length(self):
         out, err = run_amock(data=b"GET " + (b"x" * 65537) + b" HTTP/1.0\n\n")
         self.assertEqual(out.splitlines()[0],
diff --git a/Lib/test/test_xml_etree_c.py b/Lib/test/test_xml_etree_c.py
index 816aa86..d0df38d 100644
--- a/Lib/test/test_xml_etree_c.py
+++ b/Lib/test/test_xml_etree_c.py
@@ -55,7 +55,7 @@
     def setUp(self):
         self.elementsize = support.calcobjsize('5P')
         # extra
-        self.extra = struct.calcsize('PiiP4P')
+        self.extra = struct.calcsize('PnnP4P')
 
     check_sizeof = support.check_sizeof
 
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index abe80e4..f458f30 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -679,7 +679,14 @@
         if name + 'o' not in namelist:
             self.assertIn(name + 'c', namelist)
 
+    def requiresWriteAccess(self, path):
+        # effective_ids unavailable on windows
+        if not os.access(path, os.W_OK,
+                         effective_ids=os.access in os.supports_effective_ids):
+            self.skipTest('requires write access to the installed location')
+
     def test_write_pyfile(self):
+        self.requiresWriteAccess(os.path.dirname(__file__))
         with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
             fn = __file__
             if fn.endswith('.pyc') or fn.endswith('.pyo'):
@@ -711,6 +718,7 @@
     def test_write_python_package(self):
         import email
         packagedir = os.path.dirname(email.__file__)
+        self.requiresWriteAccess(packagedir)
 
         with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
             zipfp.writepy(packagedir)
@@ -724,6 +732,7 @@
     def test_write_filtered_python_package(self):
         import test
         packagedir = os.path.dirname(test.__file__)
+        self.requiresWriteAccess(packagedir)
 
         with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
 
@@ -752,6 +761,7 @@
     def test_write_with_optimization(self):
         import email
         packagedir = os.path.dirname(email.__file__)
+        self.requiresWriteAccess(packagedir)
         # use .pyc if running test in optimization mode,
         # use .pyo if running test in debug mode
         optlevel = 1 if __debug__ else 0
diff --git a/Lib/test/tf_inherit_check.py b/Lib/test/tf_inherit_check.py
index afe50d2..138f25a 100644
--- a/Lib/test/tf_inherit_check.py
+++ b/Lib/test/tf_inherit_check.py
@@ -4,22 +4,24 @@
 
 import sys
 import os
+from test.support import SuppressCrashReport
 
-verbose = (sys.argv[1] == 'v')
-try:
-    fd = int(sys.argv[2])
-
+with SuppressCrashReport():
+    verbose = (sys.argv[1] == 'v')
     try:
-        os.write(fd, b"blat")
-    except OSError:
-        # Success -- could not write to fd.
-        sys.exit(0)
-    else:
-        if verbose:
-            sys.stderr.write("fd %d is open in child" % fd)
-        sys.exit(1)
+        fd = int(sys.argv[2])
 
-except Exception:
-    if verbose:
-        raise
-    sys.exit(1)
+        try:
+            os.write(fd, b"blat")
+        except OSError:
+            # Success -- could not write to fd.
+            sys.exit(0)
+        else:
+            if verbose:
+                sys.stderr.write("fd %d is open in child" % fd)
+            sys.exit(1)
+
+    except Exception:
+        if verbose:
+            raise
+        sys.exit(1)
diff --git a/Lib/traceback.py b/Lib/traceback.py
index c1ab36e..0ac1819 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -14,19 +14,12 @@
 # Formatting and printing lists of traceback lines.
 #
 
-def _format_list_iter(extracted_list):
-    for filename, lineno, name, line in extracted_list:
-        item = '  File "{}", line {}, in {}\n'.format(filename, lineno, name)
-        if line:
-            item = item + '    {}\n'.format(line.strip())
-        yield item
-
 def print_list(extracted_list, file=None):
     """Print the list of tuples as returned by extract_tb() or
     extract_stack() as a formatted stack trace to the given file."""
     if file is None:
         file = sys.stderr
-    for item in _format_list_iter(extracted_list):
+    for item in StackSummary.from_list(extracted_list).format():
         print(item, file=file, end="")
 
 def format_list(extracted_list):
@@ -39,45 +32,12 @@
     the strings may contain internal newlines as well, for those items
     whose source text line is not None.
     """
-    return list(_format_list_iter(extracted_list))
+    return StackSummary.from_list(extracted_list).format()
 
 #
 # Printing and Extracting Tracebacks.
 #
 
-# extractor takes curr and needs to return a tuple of:
-# - Frame object
-# - Line number
-# - Next item (same type as curr)
-# In practice, curr is either a traceback or a frame.
-def _extract_tb_or_stack_iter(curr, limit, extractor):
-    if limit is None:
-        limit = getattr(sys, 'tracebacklimit', None)
-
-    n = 0
-    while curr is not None and (limit is None or n < limit):
-        f, lineno, next_item = extractor(curr)
-        co = f.f_code
-        filename = co.co_filename
-        name = co.co_name
-
-        linecache.checkcache(filename)
-        line = linecache.getline(filename, lineno, f.f_globals)
-
-        if line:
-            line = line.strip()
-        else:
-            line = None
-
-        yield (filename, lineno, name, line)
-        curr = next_item
-        n += 1
-
-def _extract_tb_iter(tb, limit):
-    return _extract_tb_or_stack_iter(
-                tb, limit,
-                operator.attrgetter("tb_frame", "tb_lineno", "tb_next"))
-
 def print_tb(tb, limit=None, file=None):
     """Print up to 'limit' stack trace entries from the traceback 'tb'.
 
@@ -90,7 +50,7 @@
 
 def format_tb(tb, limit=None):
     """A shorthand for 'format_list(extract_tb(tb, limit))'."""
-    return format_list(extract_tb(tb, limit=limit))
+    return extract_tb(tb, limit=limit).format()
 
 def extract_tb(tb, limit=None):
     """Return list of up to limit pre-processed entries from traceback.
@@ -103,7 +63,7 @@
     leading and trailing whitespace stripped; if the source is not
     available it is None.
     """
-    return list(_extract_tb_iter(tb, limit=limit))
+    return StackSummary.extract(walk_tb(tb), limit=limit)
 
 #
 # Exception formatting and output.
@@ -111,47 +71,12 @@
 
 _cause_message = (
     "\nThe above exception was the direct cause "
-    "of the following exception:\n")
+    "of the following exception:\n\n")
 
 _context_message = (
     "\nDuring handling of the above exception, "
-    "another exception occurred:\n")
+    "another exception occurred:\n\n")
 
-def _iter_chain(exc, custom_tb=None, seen=None):
-    if seen is None:
-        seen = set()
-    seen.add(exc)
-    its = []
-    context = exc.__context__
-    cause = exc.__cause__
-    if cause is not None and cause not in seen:
-        its.append(_iter_chain(cause, False, seen))
-        its.append([(_cause_message, None)])
-    elif (context is not None and
-          not exc.__suppress_context__ and
-          context not in seen):
-        its.append(_iter_chain(context, None, seen))
-        its.append([(_context_message, None)])
-    its.append([(exc, custom_tb or exc.__traceback__)])
-    # itertools.chain is in an extension module and may be unavailable
-    for it in its:
-        yield from it
-
-def _format_exception_iter(etype, value, tb, limit, chain):
-    if chain:
-        values = _iter_chain(value, tb)
-    else:
-        values = [(value, tb)]
-
-    for value, tb in values:
-        if isinstance(value, str):
-            # This is a cause/context message line
-            yield value + '\n'
-            continue
-        if tb:
-            yield 'Traceback (most recent call last):\n'
-            yield from _format_list_iter(_extract_tb_iter(tb, limit=limit))
-        yield from _format_exception_only_iter(type(value), value)
 
 def print_exception(etype, value, tb, limit=None, file=None, chain=True):
     """Print exception up to 'limit' stack trace entries from 'tb' to 'file'.
@@ -164,11 +89,16 @@
     occurred with a caret on the next line indicating the approximate
     position of the error.
     """
+    # format_exception has ignored etype for some time, and code such as cgitb
+    # passes in bogus values as a result. For compatibility with such code we
+    # ignore it here (rather than in the new TracebackException API).
     if file is None:
         file = sys.stderr
-    for line in _format_exception_iter(etype, value, tb, limit, chain):
+    for line in TracebackException(
+            type(value), value, tb, limit=limit).format(chain=chain):
         print(line, file=file, end="")
 
+
 def format_exception(etype, value, tb, limit=None, chain=True):
     """Format a stack trace and the exception information.
 
@@ -178,7 +108,12 @@
     these lines are concatenated and printed, exactly the same text is
     printed as does print_exception().
     """
-    return list(_format_exception_iter(etype, value, tb, limit, chain))
+    # format_exception has ignored etype for some time, and code such as cgitb
+    # passes in bogus values as a result. For compatibility with such code we
+    # ignore it here (rather than in the new TracebackException API).
+    return list(TracebackException(
+        type(value), value, tb, limit=limit).format(chain=chain))
+
 
 def format_exception_only(etype, value):
     """Format the exception part of a traceback.
@@ -196,46 +131,14 @@
     string in the list.
 
     """
-    return list(_format_exception_only_iter(etype, value))
+    return list(TracebackException(etype, value, None).format_exception_only())
 
-def _format_exception_only_iter(etype, value):
-    # Gracefully handle (the way Python 2.4 and earlier did) the case of
-    # being called with (None, None).
-    if etype is None:
-        yield _format_final_exc_line(etype, value)
-        return
 
-    stype = etype.__qualname__
-    smod = etype.__module__
-    if smod not in ("__main__", "builtins"):
-        stype = smod + '.' + stype
-
-    if not issubclass(etype, SyntaxError):
-        yield _format_final_exc_line(stype, value)
-        return
-
-    # It was a syntax error; show exactly where the problem was found.
-    filename = value.filename or "<string>"
-    lineno = str(value.lineno) or '?'
-    yield '  File "{}", line {}\n'.format(filename, lineno)
-
-    badline = value.text
-    offset = value.offset
-    if badline is not None:
-        yield '    {}\n'.format(badline.strip())
-        if offset is not None:
-            caretspace = badline.rstrip('\n')
-            offset = min(len(caretspace), offset) - 1
-            caretspace = caretspace[:offset].lstrip()
-            # non-space whitespace (likes tabs) must be kept for alignment
-            caretspace = ((c.isspace() and c or ' ') for c in caretspace)
-            yield '    {}^\n'.format(''.join(caretspace))
-    msg = value.msg or "<no detail available>"
-    yield "{}: {}\n".format(stype, msg)
+# -- not offical API but folk probably use these two functions.
 
 def _format_final_exc_line(etype, value):
     valuestr = _some_str(value)
-    if value is None or not valuestr:
+    if value == 'None' or value is None or not valuestr:
         line = "%s\n" % etype
     else:
         line = "%s: %s\n" % (etype, valuestr)
@@ -247,6 +150,8 @@
     except:
         return '<unprintable %s object>' % type(value).__name__
 
+# --
+
 def print_exc(limit=None, file=None, chain=True):
     """Shorthand for 'print_exception(*sys.exc_info(), limit, file)'."""
     print_exception(*sys.exc_info(), limit=limit, file=file, chain=chain)
@@ -267,15 +172,6 @@
 # Printing and Extracting Stacks.
 #
 
-def _extract_stack_iter(f, limit=None):
-    return _extract_tb_or_stack_iter(
-                f, limit, lambda f: (f, f.f_lineno, f.f_back))
-
-def _get_stack(f):
-    if f is None:
-        f = sys._getframe().f_back.f_back
-    return f
-
 def print_stack(f=None, limit=None, file=None):
     """Print a stack trace from its invocation point.
 
@@ -283,11 +179,13 @@
     stack frame at which to start. The optional 'limit' and 'file'
     arguments have the same meaning as for print_exception().
     """
-    print_list(extract_stack(_get_stack(f), limit=limit), file=file)
+    print_list(extract_stack(f, limit=limit), file=file)
+
 
 def format_stack(f=None, limit=None):
     """Shorthand for 'format_list(extract_stack(f, limit))'."""
-    return format_list(extract_stack(_get_stack(f), limit=limit))
+    return format_list(extract_stack(f, limit=limit))
+
 
 def extract_stack(f=None, limit=None):
     """Extract the raw traceback from the current stack frame.
@@ -298,10 +196,11 @@
     line number, function name, text), and the entries are in order
     from oldest to newest stack frame.
     """
-    stack = list(_extract_stack_iter(_get_stack(f), limit=limit))
+    stack = StackSummary.extract(walk_stack(f), limit=limit)
     stack.reverse()
     return stack
 
+
 def clear_frames(tb):
     "Clear all references to local variables in the frames of a traceback."
     while tb is not None:
@@ -311,3 +210,344 @@
             # Ignore the exception raised if the frame is still executing.
             pass
         tb = tb.tb_next
+
+
+class FrameSummary:
+    """A single frame from a traceback.
+
+    - :attr:`filename` The filename for the frame.
+    - :attr:`lineno` The line within filename for the frame that was
+      active when the frame was captured.
+    - :attr:`name` The name of the function or method that was executing
+      when the frame was captured.
+    - :attr:`line` The text from the linecache module for the
+      of code that was running when the frame was captured.
+    - :attr:`locals` Either None if locals were not supplied, or a dict
+      mapping the name to the repr() of the variable.
+    """
+
+    __slots__ = ('filename', 'lineno', 'name', '_line', 'locals')
+
+    def __init__(self, filename, lineno, name, *, lookup_line=True,
+            locals=None, line=None):
+        """Construct a FrameSummary.
+
+        :param lookup_line: If True, `linecache` is consulted for the source
+            code line. Otherwise, the line will be looked up when first needed.
+        :param locals: If supplied the frame locals, which will be captured as
+            object representations.
+        :param line: If provided, use this instead of looking up the line in
+            the linecache.
+        """
+        self.filename = filename
+        self.lineno = lineno
+        self.name = name
+        self._line = line
+        if lookup_line:
+            self.line
+        self.locals = \
+            dict((k, repr(v)) for k, v in locals.items()) if locals else None
+
+    def __eq__(self, other):
+        return (self.filename == other.filename and
+                self.lineno == other.lineno and
+                self.name == other.name and
+                self.locals == other.locals)
+
+    def __getitem__(self, pos):
+        return (self.filename, self.lineno, self.name, self.line)[pos]
+
+    def __iter__(self):
+        return iter([self.filename, self.lineno, self.name, self.line])
+
+    def __repr__(self):
+        return "<FrameSummary file {filename}, line {lineno} in {name}>".format(
+            filename=self.filename, lineno=self.lineno, name=self.name)
+
+    @property
+    def line(self):
+        if self._line is None:
+            self._line = linecache.getline(self.filename, self.lineno).strip()
+        return self._line
+
+
+def walk_stack(f):
+    """Walk a stack yielding the frame and line number for each frame.
+
+    This will follow f.f_back from the given frame. If no frame is given, the
+    current stack is used. Usually used with StackSummary.extract.
+    """
+    if f is None:
+        f = sys._getframe().f_back.f_back
+    while f is not None:
+        yield f, f.f_lineno
+        f = f.f_back
+
+
+def walk_tb(tb):
+    """Walk a traceback yielding the frame and line number for each frame.
+
+    This will follow tb.tb_next (and thus is in the opposite order to
+    walk_stack). Usually used with StackSummary.extract.
+    """
+    while tb is not None:
+        yield tb.tb_frame, tb.tb_lineno
+        tb = tb.tb_next
+
+
+class StackSummary(list):
+    """A stack of frames."""
+
+    @classmethod
+    def extract(klass, frame_gen, *, limit=None, lookup_lines=True,
+            capture_locals=False):
+        """Create a StackSummary from a traceback or stack object.
+
+        :param frame_gen: A generator that yields (frame, lineno) tuples to
+            include in the stack.
+        :param limit: None to include all frames or the number of frames to
+            include.
+        :param lookup_lines: If True, lookup lines for each frame immediately,
+            otherwise lookup is deferred until the frame is rendered.
+        :param capture_locals: If True, the local variables from each frame will
+            be captured as object representations into the FrameSummary.
+        """
+        if limit is None:
+            limit = getattr(sys, 'tracebacklimit', None)
+
+        result = klass()
+        fnames = set()
+        for pos, (f, lineno) in enumerate(frame_gen):
+            if limit is not None and pos >= limit:
+                break
+            co = f.f_code
+            filename = co.co_filename
+            name = co.co_name
+
+            fnames.add(filename)
+            linecache.lazycache(filename, f.f_globals)
+            # Must defer line lookups until we have called checkcache.
+            if capture_locals:
+                f_locals = f.f_locals
+            else:
+                f_locals = None
+            result.append(FrameSummary(
+                filename, lineno, name, lookup_line=False, locals=f_locals))
+        for filename in fnames:
+            linecache.checkcache(filename)
+        # If immediate lookup was desired, trigger lookups now.
+        if lookup_lines:
+            for f in result:
+                f.line
+        return result
+
+    @classmethod
+    def from_list(klass, a_list):
+        """Create a StackSummary from a simple list of tuples.
+
+        This method supports the older Python API. Each tuple should be a
+        4-tuple with (filename, lineno, name, line) elements.
+        """
+        if isinstance(a_list, StackSummary):
+            return StackSummary(a_list)
+        result = StackSummary()
+        for filename, lineno, name, line in a_list:
+            result.append(FrameSummary(filename, lineno, name, line=line))
+        return result
+
+    def format(self):
+        """Format the stack ready for printing.
+
+        Returns a list of strings ready for printing.  Each string in the
+        resulting list corresponds to a single frame from the stack.
+        Each string ends in a newline; the strings may contain internal
+        newlines as well, for those items with source text lines.
+        """
+        result = []
+        for frame in self:
+            row = []
+            row.append('  File "{}", line {}, in {}\n'.format(
+                frame.filename, frame.lineno, frame.name))
+            if frame.line:
+                row.append('    {}\n'.format(frame.line.strip()))
+            if frame.locals:
+                for name, value in sorted(frame.locals.items()):
+                    row.append('    {name} = {value}\n'.format(name=name, value=value))
+            result.append(''.join(row))
+        return result
+
+
+class TracebackException:
+    """An exception ready for rendering.
+
+    The traceback module captures enough attributes from the original exception
+    to this intermediary form to ensure that no references are held, while
+    still being able to fully print or format it.
+
+    Use `from_exception` to create TracebackException instances from exception
+    objects, or the constructor to create TracebackException instances from
+    individual components.
+
+    - :attr:`__cause__` A TracebackException of the original *__cause__*.
+    - :attr:`__context__` A TracebackException of the original *__context__*.
+    - :attr:`__suppress_context__` The *__suppress_context__* value from the
+      original exception.
+    - :attr:`stack` A `StackSummary` representing the traceback.
+    - :attr:`exc_type` The class of the original traceback.
+    - :attr:`filename` For syntax errors - the filename where the error
+      occured.
+    - :attr:`lineno` For syntax errors - the linenumber where the error
+      occured.
+    - :attr:`text` For syntax errors - the text where the error
+      occured.
+    - :attr:`offset` For syntax errors - the offset into the text where the
+      error occured.
+    - :attr:`msg` For syntax errors - the compiler error message.
+    """
+
+    def __init__(self, exc_type, exc_value, exc_traceback, *, limit=None,
+            lookup_lines=True, capture_locals=False, _seen=None):
+        # NB: we need to accept exc_traceback, exc_value, exc_traceback to
+        # permit backwards compat with the existing API, otherwise we
+        # need stub thunk objects just to glue it together.
+        # Handle loops in __cause__ or __context__.
+        if _seen is None:
+            _seen = set()
+        _seen.add(exc_value)
+        # Gracefully handle (the way Python 2.4 and earlier did) the case of
+        # being called with no type or value (None, None, None).
+        if (exc_value and exc_value.__cause__ is not None
+            and exc_value.__cause__ not in _seen):
+            cause = TracebackException(
+                type(exc_value.__cause__),
+                exc_value.__cause__,
+                exc_value.__cause__.__traceback__,
+                limit=limit,
+                lookup_lines=False,
+                capture_locals=capture_locals,
+                _seen=_seen)
+        else:
+            cause = None
+        if (exc_value and exc_value.__context__ is not None
+            and exc_value.__context__ not in _seen):
+            context = TracebackException(
+                type(exc_value.__context__),
+                exc_value.__context__,
+                exc_value.__context__.__traceback__,
+                limit=limit,
+                lookup_lines=False,
+                capture_locals=capture_locals,
+                _seen=_seen)
+        else:
+            context = None
+        self.__cause__ = cause
+        self.__context__ = context
+        self.__suppress_context__ = \
+            exc_value.__suppress_context__ if exc_value else False
+        # TODO: locals.
+        self.stack = StackSummary.extract(
+            walk_tb(exc_traceback), limit=limit, lookup_lines=lookup_lines,
+            capture_locals=capture_locals)
+        self.exc_type = exc_type
+        # Capture now to permit freeing resources: only complication is in the
+        # unofficial API _format_final_exc_line
+        self._str = _some_str(exc_value)
+        if exc_type and issubclass(exc_type, SyntaxError):
+            # Handle SyntaxError's specially
+            self.filename = exc_value.filename
+            self.lineno = str(exc_value.lineno)
+            self.text = exc_value.text
+            self.offset = exc_value.offset
+            self.msg = exc_value.msg
+        if lookup_lines:
+            self._load_lines()
+
+    @classmethod
+    def from_exception(self, exc, *args, **kwargs):
+        """Create a TracebackException from an exception."""
+        return TracebackException(
+            type(exc), exc, exc.__traceback__, *args, **kwargs)
+
+    def _load_lines(self):
+        """Private API. force all lines in the stack to be loaded."""
+        for frame in self.stack:
+            frame.line
+        if self.__context__:
+            self.__context__._load_lines()
+        if self.__cause__:
+            self.__cause__._load_lines()
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+    def __str__(self):
+        return self._str
+
+    def format_exception_only(self):
+        """Format the exception part of the traceback.
+
+        The return value is a generator of strings, each ending in a newline.
+
+        Normally, the generator emits a single string; however, for
+        SyntaxError exceptions, it emites several lines that (when
+        printed) display detailed information about where the syntax
+        error occurred.
+
+        The message indicating which exception occurred is always the last
+        string in the output.
+        """
+        if self.exc_type is None:
+            yield _format_final_exc_line(None, self._str)
+            return
+
+        stype = self.exc_type.__qualname__
+        smod = self.exc_type.__module__
+        if smod not in ("__main__", "builtins"):
+            stype = smod + '.' + stype
+
+        if not issubclass(self.exc_type, SyntaxError):
+            yield _format_final_exc_line(stype, self._str)
+            return
+
+        # It was a syntax error; show exactly where the problem was found.
+        filename = self.filename or "<string>"
+        lineno = str(self.lineno) or '?'
+        yield '  File "{}", line {}\n'.format(filename, lineno)
+
+        badline = self.text
+        offset = self.offset
+        if badline is not None:
+            yield '    {}\n'.format(badline.strip())
+            if offset is not None:
+                caretspace = badline.rstrip('\n')
+                offset = min(len(caretspace), offset) - 1
+                caretspace = caretspace[:offset].lstrip()
+                # non-space whitespace (likes tabs) must be kept for alignment
+                caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+                yield '    {}^\n'.format(''.join(caretspace))
+        msg = self.msg or "<no detail available>"
+        yield "{}: {}\n".format(stype, msg)
+
+    def format(self, *, chain=True):
+        """Format the exception.
+
+        If chain is not *True*, *__cause__* and *__context__* will not be formatted.
+
+        The return value is a generator of strings, each ending in a newline and
+        some containing internal newlines. `print_exception` is a wrapper around
+        this method which just prints the lines to a file.
+
+        The message indicating which exception occurred is always the last
+        string in the output.
+        """
+        if chain:
+            if self.__cause__ is not None:
+                yield from self.__cause__.format(chain=chain)
+                yield _cause_message
+            elif (self.__context__ is not None and
+                not self.__suppress_context__):
+                yield from self.__context__.format(chain=chain)
+                yield _context_message
+        yield 'Traceback (most recent call last):\n'
+        yield from self.stack.format()
+        yield from self.format_exception_only()
diff --git a/Lib/turtle.py b/Lib/turtle.py
index f4400c9..cbd4f47 100644
--- a/Lib/turtle.py
+++ b/Lib/turtle.py
@@ -1288,7 +1288,7 @@
     def _incrementudc(self):
         """Increment update counter."""
         if not TurtleScreen._RUNNING:
-            TurtleScreen._RUNNNING = True
+            TurtleScreen._RUNNING = True
             raise Terminator
         if self._tracing > 0:
             self._updatecounter += 1
@@ -3754,7 +3754,7 @@
             Turtle._screen = None
             _Screen._root = None
             _Screen._canvas = None
-        TurtleScreen._RUNNING = True
+        TurtleScreen._RUNNING = False
         root.destroy()
 
     def bye(self):
@@ -3795,7 +3795,6 @@
         except AttributeError:
             exit(0)
 
-
 class Turtle(RawTurtle):
     """RawTurtle auto-creating (scrolled) canvas.
 
@@ -3818,18 +3817,6 @@
 
 Pen = Turtle
 
-def _getpen():
-    """Create the 'anonymous' turtle if not already present."""
-    if Turtle._pen is None:
-        Turtle._pen = Turtle()
-    return Turtle._pen
-
-def _getscreen():
-    """Create a TurtleScreen if not already present."""
-    if Turtle._screen is None:
-        Turtle._screen = Screen()
-    return Turtle._screen
-
 def write_docstringdict(filename="turtle_docstringdict"):
     """Create and write docstring-dictionary to file.
 
@@ -3952,26 +3939,38 @@
 ## as functions. So we can enhance, change, add, delete methods to these
 ## classes and do not need to change anything here.
 
+__func_body = """\
+def {name}{paramslist}:
+    if {obj} is None:
+        if not TurtleScreen._RUNNING:
+            TurtleScreen._RUNNING = True
+            raise Terminator
+        {obj} = {init}
+    try:
+        return {obj}.{name}{argslist}
+    except TK.TclError:
+        if not TurtleScreen._RUNNING:
+            TurtleScreen._RUNNING = True
+            raise Terminator
+        raise
+"""
 
-for methodname in _tg_screen_functions:
-    pl1, pl2 = getmethparlist(eval('_Screen.' + methodname))
-    if pl1 == "":
-        print(">>>>>>", pl1, pl2)
-        continue
-    defstr = ("def %(key)s%(pl1)s: return _getscreen().%(key)s%(pl2)s" %
-                                   {'key':methodname, 'pl1':pl1, 'pl2':pl2})
-    exec(defstr)
-    eval(methodname).__doc__ = _screen_docrevise(eval('_Screen.'+methodname).__doc__)
+def _make_global_funcs(functions, cls, obj, init, docrevise):
+    for methodname in functions:
+        method = getattr(cls, methodname)
+        pl1, pl2 = getmethparlist(method)
+        if pl1 == "":
+            print(">>>>>>", pl1, pl2)
+            continue
+        defstr = __func_body.format(obj=obj, init=init, name=methodname,
+                                    paramslist=pl1, argslist=pl2)
+        exec(defstr, globals())
+        globals()[methodname].__doc__ = docrevise(method.__doc__)
 
-for methodname in _tg_turtle_functions:
-    pl1, pl2 = getmethparlist(eval('Turtle.' + methodname))
-    if pl1 == "":
-        print(">>>>>>", pl1, pl2)
-        continue
-    defstr = ("def %(key)s%(pl1)s: return _getpen().%(key)s%(pl2)s" %
-                                   {'key':methodname, 'pl1':pl1, 'pl2':pl2})
-    exec(defstr)
-    eval(methodname).__doc__ = _turtle_docrevise(eval('Turtle.'+methodname).__doc__)
+_make_global_funcs(_tg_screen_functions, _Screen,
+                   'Turtle._screen', 'Screen()', _screen_docrevise)
+_make_global_funcs(_tg_turtle_functions, Turtle,
+                   'Turtle._pen', 'Turtle()', _turtle_docrevise)
 
 
 done = mainloop
diff --git a/Lib/turtledemo/__main__.py b/Lib/turtledemo/__main__.py
old mode 100755
new mode 100644
index 6280c84..106d058
--- a/Lib/turtledemo/__main__.py
+++ b/Lib/turtledemo/__main__.py
@@ -344,6 +344,8 @@
             else:
                 self.state = DONE
         except turtle.Terminator:
+            if self.root is None:
+                return
             self.state = DONE
             result = "stopped!"
         if self.state == DONE:
@@ -369,7 +371,9 @@
         turtle.TurtleScreen._RUNNING = False
 
     def _destroy(self):
+        turtle.TurtleScreen._RUNNING = False
         self.root.destroy()
+        self.root = None
 
 
 def main():
diff --git a/Lib/turtledemo/sorting_animate.py b/Lib/turtledemo/sorting_animate.py
new file mode 100644
index 0000000..d25a0ab
--- /dev/null
+++ b/Lib/turtledemo/sorting_animate.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python3
+"""
+
+         sorting_animation.py
+
+A minimal sorting algorithm animation:
+Sorts a shelf of 10 blocks using insertion
+sort, selection sort and quicksort.
+
+Shelfs are implemented using builtin lists.
+
+Blocks are turtles with shape "square", but
+stretched to rectangles by shapesize()
+ ---------------------------------------
+       To exit press space button
+ ---------------------------------------
+"""
+from turtle import *
+import random
+
+
+class Block(Turtle):
+
+    def __init__(self, size):
+        self.size = size
+        Turtle.__init__(self, shape="square", visible=False)
+        self.pu()
+        self.shapesize(size * 1.5, 1.5, 2) # square-->rectangle
+        self.fillcolor("black")
+        self.st()
+
+    def glow(self):
+        self.fillcolor("red")
+
+    def unglow(self):
+        self.fillcolor("black")
+
+    def __repr__(self):
+        return "Block size: {0}".format(self.size)
+
+
+class Shelf(list):
+
+    def __init__(self, y):
+        "create a shelf. y is y-position of first block"
+        self.y = y
+        self.x = -150
+
+    def push(self, d):
+        width, _, _ = d.shapesize()
+        # align blocks by the bottom edge
+        y_offset = width / 2 * 20
+        d.sety(self.y + y_offset)
+        d.setx(self.x + 34 * len(self))
+        self.append(d)
+
+    def _close_gap_from_i(self, i):
+        for b in self[i:]:
+            xpos, _ = b.pos()
+            b.setx(xpos - 34)
+
+    def _open_gap_from_i(self, i):
+        for b in self[i:]:
+            xpos, _ = b.pos()
+            b.setx(xpos + 34)
+
+    def pop(self, key):
+        b = list.pop(self, key)
+        b.glow()
+        b.sety(200)
+        self._close_gap_from_i(key)
+        return b
+
+    def insert(self, key, b):
+        self._open_gap_from_i(key)
+        list.insert(self, key, b)
+        b.setx(self.x + 34 * key)
+        width, _, _ = b.shapesize()
+        # align blocks by the bottom edge
+        y_offset = width / 2 * 20
+        b.sety(self.y + y_offset)
+        b.unglow()
+
+def isort(shelf):
+    length = len(shelf)
+    for i in range(1, length):
+        hole = i
+        while hole > 0 and shelf[i].size < shelf[hole - 1].size:
+            hole = hole - 1
+        shelf.insert(hole, shelf.pop(i))
+    return
+
+def ssort(shelf):
+    length = len(shelf)
+    for j in range(0, length - 1):
+        imin = j
+        for i in range(j + 1, length):
+            if shelf[i].size < shelf[imin].size:
+                imin = i
+        if imin != j:
+            shelf.insert(j, shelf.pop(imin))
+
+def partition(shelf, left, right, pivot_index):
+    pivot = shelf[pivot_index]
+    shelf.insert(right, shelf.pop(pivot_index))
+    store_index = left
+    for i in range(left, right): # range is non-inclusive of ending value
+        if shelf[i].size < pivot.size:
+            shelf.insert(store_index, shelf.pop(i))
+            store_index = store_index + 1
+    shelf.insert(store_index, shelf.pop(right)) # move pivot to correct position
+    return store_index
+
+def qsort(shelf, left, right):
+    if left < right:
+        pivot_index = left
+        pivot_new_index = partition(shelf, left, right, pivot_index)
+        qsort(shelf, left, pivot_new_index - 1)
+        qsort(shelf, pivot_new_index + 1, right)
+
+def randomize():
+    disable_keys()
+    clear()
+    target = list(range(10))
+    random.shuffle(target)
+    for i, t in enumerate(target):
+        for j in range(i, len(s)):
+            if s[j].size == t + 1:
+                s.insert(i, s.pop(j))
+    show_text(instructions1)
+    show_text(instructions2, line=1)
+    enable_keys()
+
+def show_text(text, line=0):
+    line = 20 * line
+    goto(0,-250 - line)
+    write(text, align="center", font=("Courier", 16, "bold"))
+
+def start_ssort():
+    disable_keys()
+    clear()
+    show_text("Selection Sort")
+    ssort(s)
+    clear()
+    show_text(instructions1)
+    show_text(instructions2, line=1)
+    enable_keys()
+
+def start_isort():
+    disable_keys()
+    clear()
+    show_text("Insertion Sort")
+    isort(s)
+    clear()
+    show_text(instructions1)
+    show_text(instructions2, line=1)
+    enable_keys()
+
+def start_qsort():
+    disable_keys()
+    clear()
+    show_text("Quicksort")
+    qsort(s, 0, len(s) - 1)
+    clear()
+    show_text(instructions1)
+    show_text(instructions2, line=1)
+    enable_keys()
+
+def init_shelf():
+    global s
+    s = Shelf(-200)
+    vals = (4, 2, 8, 9, 1, 5, 10, 3, 7, 6)
+    for i in vals:
+        s.push(Block(i))
+
+def disable_keys():
+    onkey(None, "s")
+    onkey(None, "i")
+    onkey(None, "q")
+    onkey(None, "r")
+
+def enable_keys():
+    onkey(start_isort, "i")
+    onkey(start_ssort, "s")
+    onkey(start_qsort, "q")
+    onkey(randomize, "r")
+    onkey(bye, "space")
+
+def main():
+    getscreen().clearscreen()
+    ht(); penup()
+    init_shelf()
+    show_text(instructions1)
+    show_text(instructions2, line=1)
+    enable_keys()
+    listen()
+    return "EVENTLOOP"
+
+instructions1 = "press i for insertion sort, s for selection sort, q for quicksort"
+instructions2 = "spacebar to quit, r to randomize"
+
+if __name__=="__main__":
+    msg = main()
+    mainloop()
diff --git a/Lib/types.py b/Lib/types.py
index 7e4fec2..4fb2def 100644
--- a/Lib/types.py
+++ b/Lib/types.py
@@ -156,3 +156,6 @@
         result = type(self)(self.fget, self.fset, fdel, self.__doc__)
         result.overwrite_doc = self.overwrite_doc
         return result
+
+
+__all__ = [n for n in globals() if n[:1] != '_']
diff --git a/Lib/unittest/main.py b/Lib/unittest/main.py
index 180df86..b209a3a 100644
--- a/Lib/unittest/main.py
+++ b/Lib/unittest/main.py
@@ -58,7 +58,7 @@
     def __init__(self, module='__main__', defaultTest=None, argv=None,
                     testRunner=None, testLoader=loader.defaultTestLoader,
                     exit=True, verbosity=1, failfast=None, catchbreak=None,
-                    buffer=None, warnings=None):
+                    buffer=None, warnings=None, *, tb_locals=False):
         if isinstance(module, str):
             self.module = __import__(module)
             for part in module.split('.')[1:]:
@@ -73,8 +73,9 @@
         self.catchbreak = catchbreak
         self.verbosity = verbosity
         self.buffer = buffer
+        self.tb_locals = tb_locals
         if warnings is None and not sys.warnoptions:
-            # even if DreprecationWarnings are ignored by default
+            # even if DeprecationWarnings are ignored by default
             # print them anyway unless other warnings settings are
             # specified by the warnings arg or the -W python flag
             self.warnings = 'default'
@@ -159,7 +160,9 @@
         parser.add_argument('-q', '--quiet', dest='verbosity',
                             action='store_const', const=0,
                             help='Quiet output')
-
+        parser.add_argument('--locals', dest='tb_locals',
+                            action='store_true',
+                            help='Show local variables in tracebacks')
         if self.failfast is None:
             parser.add_argument('-f', '--failfast', dest='failfast',
                                 action='store_true',
@@ -231,10 +234,18 @@
             self.testRunner = runner.TextTestRunner
         if isinstance(self.testRunner, type):
             try:
-                testRunner = self.testRunner(verbosity=self.verbosity,
-                                             failfast=self.failfast,
-                                             buffer=self.buffer,
-                                             warnings=self.warnings)
+                try:
+                    testRunner = self.testRunner(verbosity=self.verbosity,
+                                                 failfast=self.failfast,
+                                                 buffer=self.buffer,
+                                                 warnings=self.warnings,
+                                                 tb_locals=self.tb_locals)
+                except TypeError:
+                    # didn't accept the tb_locals argument
+                    testRunner = self.testRunner(verbosity=self.verbosity,
+                                                 failfast=self.failfast,
+                                                 buffer=self.buffer,
+                                                 warnings=self.warnings)
             except TypeError:
                 # didn't accept the verbosity, buffer or failfast arguments
                 testRunner = self.testRunner()
diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py
index 8e0a643..a18f11b 100644
--- a/Lib/unittest/result.py
+++ b/Lib/unittest/result.py
@@ -45,6 +45,7 @@
         self.unexpectedSuccesses = []
         self.shouldStop = False
         self.buffer = False
+        self.tb_locals = False
         self._stdout_buffer = None
         self._stderr_buffer = None
         self._original_stdout = sys.stdout
@@ -179,9 +180,11 @@
         if exctype is test.failureException:
             # Skip assert*() traceback levels
             length = self._count_relevant_tb_levels(tb)
-            msgLines = traceback.format_exception(exctype, value, tb, length)
         else:
-            msgLines = traceback.format_exception(exctype, value, tb)
+            length = None
+        tb_e = traceback.TracebackException(
+            exctype, value, tb, limit=length, capture_locals=self.tb_locals)
+        msgLines = list(tb_e.format())
 
         if self.buffer:
             output = sys.stdout.getvalue()
diff --git a/Lib/unittest/runner.py b/Lib/unittest/runner.py
index 28b8865..2112262 100644
--- a/Lib/unittest/runner.py
+++ b/Lib/unittest/runner.py
@@ -126,7 +126,13 @@
     resultclass = TextTestResult
 
     def __init__(self, stream=None, descriptions=True, verbosity=1,
-                 failfast=False, buffer=False, resultclass=None, warnings=None):
+                 failfast=False, buffer=False, resultclass=None, warnings=None,
+                 *, tb_locals=False):
+        """Construct a TextTestRunner.
+
+        Subclasses should accept **kwargs to ensure compatibility as the
+        interface changes.
+        """
         if stream is None:
             stream = sys.stderr
         self.stream = _WritelnDecorator(stream)
@@ -134,6 +140,7 @@
         self.verbosity = verbosity
         self.failfast = failfast
         self.buffer = buffer
+        self.tb_locals = tb_locals
         self.warnings = warnings
         if resultclass is not None:
             self.resultclass = resultclass
@@ -147,6 +154,7 @@
         registerResult(result)
         result.failfast = self.failfast
         result.buffer = self.buffer
+        result.tb_locals = self.tb_locals
         with warnings.catch_warnings():
             if self.warnings:
                 # if self.warnings is set, use it to filter all the warnings
diff --git a/Lib/unittest/test/test_break.py b/Lib/unittest/test/test_break.py
index 0bf1a22..2c75019 100644
--- a/Lib/unittest/test/test_break.py
+++ b/Lib/unittest/test/test_break.py
@@ -211,6 +211,7 @@
                 self.verbosity = verbosity
                 self.failfast = failfast
                 self.catchbreak = catchbreak
+                self.tb_locals = False
                 self.testRunner = FakeRunner
                 self.test = test
                 self.result = None
@@ -221,6 +222,7 @@
         self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
                                                      'verbosity': verbosity,
                                                      'failfast': failfast,
+                                                     'tb_locals': False,
                                                      'warnings': None})])
         self.assertEqual(FakeRunner.runArgs, [test])
         self.assertEqual(p.result, result)
@@ -235,6 +237,7 @@
         self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
                                                      'verbosity': verbosity,
                                                      'failfast': failfast,
+                                                     'tb_locals': False,
                                                      'warnings': None})])
         self.assertEqual(FakeRunner.runArgs, [test])
         self.assertEqual(p.result, result)
diff --git a/Lib/unittest/test/test_program.py b/Lib/unittest/test/test_program.py
index 725d67f..1cfc179 100644
--- a/Lib/unittest/test/test_program.py
+++ b/Lib/unittest/test/test_program.py
@@ -134,6 +134,7 @@
     result = None
     verbosity = 1
     defaultTest = None
+    tb_locals = False
     testRunner = None
     testLoader = unittest.defaultTestLoader
     module = '__main__'
@@ -147,18 +148,19 @@
 class FakeRunner(object):
     initArgs = None
     test = None
-    raiseError = False
+    raiseError = 0
 
     def __init__(self, **kwargs):
         FakeRunner.initArgs = kwargs
         if FakeRunner.raiseError:
-            FakeRunner.raiseError = False
+            FakeRunner.raiseError -= 1
             raise TypeError
 
     def run(self, test):
         FakeRunner.test = test
         return RESULT
 
+
 class TestCommandLineArgs(unittest.TestCase):
 
     def setUp(self):
@@ -166,7 +168,7 @@
         self.program.createTests = lambda: None
         FakeRunner.initArgs = None
         FakeRunner.test = None
-        FakeRunner.raiseError = False
+        FakeRunner.raiseError = 0
 
     def testVerbosity(self):
         program = self.program
@@ -256,6 +258,7 @@
         self.assertEqual(FakeRunner.initArgs, {'verbosity': 'verbosity',
                                                 'failfast': 'failfast',
                                                 'buffer': 'buffer',
+                                                'tb_locals': False,
                                                 'warnings': 'warnings'})
         self.assertEqual(FakeRunner.test, 'test')
         self.assertIs(program.result, RESULT)
@@ -274,10 +277,25 @@
         self.assertEqual(FakeRunner.test, 'test')
         self.assertIs(program.result, RESULT)
 
+    def test_locals(self):
+        program = self.program
+
+        program.testRunner = FakeRunner
+        program.parseArgs([None, '--locals'])
+        self.assertEqual(True, program.tb_locals)
+        program.runTests()
+        self.assertEqual(FakeRunner.initArgs, {'buffer': False,
+                                               'failfast': False,
+                                               'tb_locals': True,
+                                               'verbosity': 1,
+                                               'warnings': None})
+
     def testRunTestsOldRunnerClass(self):
         program = self.program
 
-        FakeRunner.raiseError = True
+        # Two TypeErrors are needed to fall all the way back to old-style
+        # runners - one to fail tb_locals, one to fail buffer etc.
+        FakeRunner.raiseError = 2
         program.testRunner = FakeRunner
         program.verbosity = 'verbosity'
         program.failfast = 'failfast'
diff --git a/Lib/unittest/test/test_result.py b/Lib/unittest/test/test_result.py
index 489fe17..e39e2ea 100644
--- a/Lib/unittest/test/test_result.py
+++ b/Lib/unittest/test/test_result.py
@@ -8,6 +8,20 @@
 import unittest
 
 
+class MockTraceback(object):
+    class TracebackException:
+        def __init__(self, *args, **kwargs):
+            self.capture_locals = kwargs.get('capture_locals', False)
+        def format(self):
+            result = ['A traceback']
+            if self.capture_locals:
+                result.append('locals')
+            return result
+
+def restore_traceback():
+    unittest.result.traceback = traceback
+
+
 class Test_TestResult(unittest.TestCase):
     # Note: there are not separate tests for TestResult.wasSuccessful(),
     # TestResult.errors, TestResult.failures, TestResult.testsRun or
@@ -227,6 +241,25 @@
         self.assertIs(test_case, test)
         self.assertIsInstance(formatted_exc, str)
 
+    def test_addError_locals(self):
+        class Foo(unittest.TestCase):
+            def test_1(self):
+                1/0
+
+        test = Foo('test_1')
+        result = unittest.TestResult()
+        result.tb_locals = True
+
+        unittest.result.traceback = MockTraceback
+        self.addCleanup(restore_traceback)
+        result.startTestRun()
+        test.run(result)
+        result.stopTestRun()
+
+        self.assertEqual(len(result.errors), 1)
+        test_case, formatted_exc = result.errors[0]
+        self.assertEqual('A tracebacklocals', formatted_exc)
+
     def test_addSubTest(self):
         class Foo(unittest.TestCase):
             def test_1(self):
@@ -398,6 +431,7 @@
     self.testsRun = 0
     self.shouldStop = False
     self.buffer = False
+    self.tb_locals = False
 
 classDict['__init__'] = __init__
 OldResult = type('OldResult', (object,), classDict)
@@ -454,15 +488,6 @@
         runner.run(Test('testFoo'))
 
 
-class MockTraceback(object):
-    @staticmethod
-    def format_exception(*_):
-        return ['A traceback']
-
-def restore_traceback():
-    unittest.result.traceback = traceback
-
-
 class TestOutputBuffering(unittest.TestCase):
 
     def setUp(self):
diff --git a/Lib/unittest/test/test_runner.py b/Lib/unittest/test/test_runner.py
index 7c0bd51..9cbc260 100644
--- a/Lib/unittest/test/test_runner.py
+++ b/Lib/unittest/test/test_runner.py
@@ -158,7 +158,7 @@
         self.assertEqual(runner.warnings, None)
         self.assertTrue(runner.descriptions)
         self.assertEqual(runner.resultclass, unittest.TextTestResult)
-
+        self.assertFalse(runner.tb_locals)
 
     def test_multiple_inheritance(self):
         class AResult(unittest.TestResult):
@@ -172,14 +172,13 @@
         # on arguments in its __init__ super call
         ATextResult(None, None, 1)
 
-
     def testBufferAndFailfast(self):
         class Test(unittest.TestCase):
             def testFoo(self):
                 pass
         result = unittest.TestResult()
         runner = unittest.TextTestRunner(stream=io.StringIO(), failfast=True,
-                                           buffer=True)
+                                         buffer=True)
         # Use our result object
         runner._makeResult = lambda: result
         runner.run(Test('testFoo'))
@@ -187,6 +186,11 @@
         self.assertTrue(result.failfast)
         self.assertTrue(result.buffer)
 
+    def test_locals(self):
+        runner = unittest.TextTestRunner(stream=io.StringIO(), tb_locals=True)
+        result = runner.run(unittest.TestSuite())
+        self.assertEqual(True, result.tb_locals)
+
     def testRunnerRegistersResult(self):
         class Test(unittest.TestCase):
             def testFoo(self):
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
index 821dae6..66420d2 100644
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -869,12 +869,12 @@
     """splittype('type:opaquestring') --> 'type', 'opaquestring'."""
     global _typeprog
     if _typeprog is None:
-        _typeprog = re.compile('^([^/:]+):')
+        _typeprog = re.compile('([^/:]+):(.*)', re.DOTALL)
 
     match = _typeprog.match(url)
     if match:
-        scheme = match.group(1)
-        return scheme.lower(), url[len(scheme) + 1:]
+        scheme, data = match.groups()
+        return scheme.lower(), data
     return None, url
 
 _hostprog = None
@@ -882,38 +882,25 @@
     """splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
     global _hostprog
     if _hostprog is None:
-        _hostprog = re.compile('^//([^/?]*)(.*)$')
+        _hostprog = re.compile('//([^/?]*)(.*)', re.DOTALL)
 
     match = _hostprog.match(url)
     if match:
-        host_port = match.group(1)
-        path = match.group(2)
-        if path and not path.startswith('/'):
+        host_port, path = match.groups()
+        if path and path[0] != '/':
             path = '/' + path
         return host_port, path
     return None, url
 
-_userprog = None
 def splituser(host):
     """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
-    global _userprog
-    if _userprog is None:
-        _userprog = re.compile('^(.*)@(.*)$')
+    user, delim, host = host.rpartition('@')
+    return (user if delim else None), host
 
-    match = _userprog.match(host)
-    if match: return match.group(1, 2)
-    return None, host
-
-_passwdprog = None
 def splitpasswd(user):
     """splitpasswd('user:passwd') -> 'user', 'passwd'."""
-    global _passwdprog
-    if _passwdprog is None:
-        _passwdprog = re.compile('^([^:]*):(.*)$',re.S)
-
-    match = _passwdprog.match(user)
-    if match: return match.group(1, 2)
-    return user, None
+    user, delim, passwd = user.partition(':')
+    return user, (passwd if delim else None)
 
 # splittag('/path#tag') --> '/path', 'tag'
 _portprog = None
@@ -921,7 +908,7 @@
     """splitport('host:port') --> 'host', 'port'."""
     global _portprog
     if _portprog is None:
-        _portprog = re.compile('^(.*):([0-9]*)$')
+        _portprog = re.compile('(.*):([0-9]*)$', re.DOTALL)
 
     match = _portprog.match(host)
     if match:
@@ -930,47 +917,34 @@
             return host, port
     return host, None
 
-_nportprog = None
 def splitnport(host, defport=-1):
     """Split host and port, returning numeric port.
     Return given default port if no ':' found; defaults to -1.
     Return numerical port if a valid number are found after ':'.
     Return None if ':' but not a valid number."""
-    global _nportprog
-    if _nportprog is None:
-        _nportprog = re.compile('^(.*):(.*)$')
-
-    match = _nportprog.match(host)
-    if match:
-        host, port = match.group(1, 2)
-        if port:
-            try:
-                nport = int(port)
-            except ValueError:
-                nport = None
-            return host, nport
+    host, delim, port = host.rpartition(':')
+    if not delim:
+        host = port
+    elif port:
+        try:
+            nport = int(port)
+        except ValueError:
+            nport = None
+        return host, nport
     return host, defport
 
-_queryprog = None
 def splitquery(url):
     """splitquery('/path?query') --> '/path', 'query'."""
-    global _queryprog
-    if _queryprog is None:
-        _queryprog = re.compile('^(.*)\?([^?]*)$')
-
-    match = _queryprog.match(url)
-    if match: return match.group(1, 2)
+    path, delim, query = url.rpartition('?')
+    if delim:
+        return path, query
     return url, None
 
-_tagprog = None
 def splittag(url):
     """splittag('/path#tag') --> '/path', 'tag'."""
-    global _tagprog
-    if _tagprog is None:
-        _tagprog = re.compile('^(.*)#([^#]*)$')
-
-    match = _tagprog.match(url)
-    if match: return match.group(1, 2)
+    path, delim, tag = url.rpartition('#')
+    if delim:
+        return path, tag
     return url, None
 
 def splitattr(url):
@@ -979,13 +953,7 @@
     words = url.split(';')
     return words[0], words[1:]
 
-_valueprog = None
 def splitvalue(attr):
     """splitvalue('attr=value') --> 'attr', 'value'."""
-    global _valueprog
-    if _valueprog is None:
-        _valueprog = re.compile('^([^=]*)=(.*)$')
-
-    match = _valueprog.match(attr)
-    if match: return match.group(1, 2)
-    return attr, None
+    attr, delim, value = attr.partition('=')
+    return attr, (value if delim else None)
diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py
index 16576d5..0ae1cb6 100755
--- a/Mac/BuildScript/build-installer.py
+++ b/Mac/BuildScript/build-installer.py
@@ -237,9 +237,9 @@
 
         result.extend([
           dict(
-              name="OpenSSL 1.0.1l",
-              url="https://www.openssl.org/source/openssl-1.0.1l.tar.gz",
-              checksum='cdb22925fc9bc97ccbf1e007661f2aa6',
+              name="OpenSSL 1.0.2",
+              url="https://www.openssl.org/source/openssl-1.0.2.tar.gz",
+              checksum='38373013fc85c790aabf8837969c5eba',
               patches=[
                   "openssl_sdk_makedepend.patch",
                    ],
diff --git a/Mac/BuildScript/openssl_sdk_makedepend.patch b/Mac/BuildScript/openssl_sdk_makedepend.patch
index a72f5a3..c2e66c2 100644
--- a/Mac/BuildScript/openssl_sdk_makedepend.patch
+++ b/Mac/BuildScript/openssl_sdk_makedepend.patch
@@ -1,16 +1,18 @@
+# HG changeset patch
+# Parent  973741cf5a045b2ba895094d98b2c3649dc00b61
 # openssl_sdk_makedepend.patch
 #
-# 	using openssl 1.0.1k
+# 	using openssl 1.0.2
 #
 # - support building with an OS X SDK
 # - allow "make depend" to use compilers with names other than "gcc"
 
 diff Configure
 
-diff -r 99ae439a07f1 Configure
---- a/Configure	Fri Jan 09 12:50:43 2015 -0800
-+++ b/Configure	Fri Jan 09 12:53:52 2015 -0800
-@@ -577,11 +577,11 @@
+diff -r 973741cf5a04 Configure
+--- a/Configure	Thu Mar 05 13:45:59 2015 -0800
++++ b/Configure	Thu Mar 05 14:00:20 2015 -0800
+@@ -617,12 +617,12 @@
  
  ##### MacOS X (a.k.a. Rhapsody or Darwin) setup
  "rhapsody-ppc-cc","cc:-O3 -DB_ENDIAN::(unknown):MACOSX_RHAPSODY::BN_LLONG RC4_CHAR RC4_CHUNK DES_UNROLL BF_PTR:${no_asm}::",
@@ -18,16 +20,18 @@
 -"darwin64-ppc-cc","cc:-arch ppc64 -O3 -DB_ENDIAN::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:SIXTY_FOUR_BIT_LONG RC4_CHAR RC4_CHUNK DES_UNROLL BF_PTR:${ppc64_asm}:osx64:dlfcn:darwin-shared:-fPIC -fno-common:-arch ppc64 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
 -"darwin-i386-cc","cc:-arch i386 -O3 -fomit-frame-pointer -DL_ENDIAN::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:BN_LLONG RC4_INT RC4_CHUNK DES_UNROLL BF_PTR:".eval{my $asm=$x86_asm;$asm=~s/cast\-586\.o//;$asm}.":macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch i386 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
 -"debug-darwin-i386-cc","cc:-arch i386 -g3 -DL_ENDIAN::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:BN_LLONG RC4_INT RC4_CHUNK DES_UNROLL BF_PTR:${x86_asm}:macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch i386 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
--"darwin64-x86_64-cc","cc:-arch x86_64 -O3 -DL_ENDIAN -Wall::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:SIXTY_FOUR_BIT_LONG RC4_CHAR RC4_CHUNK DES_INT DES_UNROLL:".eval{my $asm=$x86_64_asm;$asm=~s/rc4\-[^:]+//;$asm}.":macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch x86_64 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
+-"darwin64-x86_64-cc","cc:-arch x86_64 -O3 -DL_ENDIAN -Wall::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:SIXTY_FOUR_BIT_LONG RC4_CHUNK DES_INT DES_UNROLL:".eval{my $asm=$x86_64_asm;$asm=~s/rc4\-[^:]+//;$asm}.":macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch x86_64 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
+-"debug-darwin64-x86_64-cc","cc:-arch x86_64 -ggdb -g2 -O0 -DL_ENDIAN -Wall::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:SIXTY_FOUR_BIT_LONG RC4_CHUNK DES_INT DES_UNROLL:".eval{my $asm=$x86_64_asm;$asm=~s/rc4\-[^:]+//;$asm}.":macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch x86_64 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
 +"darwin-ppc-cc","cc:-arch ppc -isysroot \$(OSX_SDK) -O3 -DB_ENDIAN -Wa,-force_cpusubtype_ALL::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:BN_LLONG RC4_CHAR RC4_CHUNK DES_UNROLL BF_PTR:${ppc32_asm}:osx32:dlfcn:darwin-shared:-fPIC -fno-common:-arch ppc -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
 +"darwin64-ppc-cc","cc:-arch ppc64 -isysroot \$(OSX_SDK) -O3 -DB_ENDIAN::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:SIXTY_FOUR_BIT_LONG RC4_CHAR RC4_CHUNK DES_UNROLL BF_PTR:${ppc64_asm}:osx64:dlfcn:darwin-shared:-fPIC -fno-common:-arch ppc64 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
 +"darwin-i386-cc","cc:-arch i386 -isysroot \$(OSX_SDK) -O3 -fomit-frame-pointer -DL_ENDIAN::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:BN_LLONG RC4_INT RC4_CHUNK DES_UNROLL BF_PTR:".eval{my $asm=$x86_asm;$asm=~s/cast\-586\.o//;$asm}.":macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch i386 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
 +"debug-darwin-i386-cc","cc:-arch i386 -isysroot \$(OSX_SDK) -g3 -DL_ENDIAN::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:BN_LLONG RC4_INT RC4_CHUNK DES_UNROLL BF_PTR:${x86_asm}:macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch i386 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
-+"darwin64-x86_64-cc","cc:-arch x86_64 -isysroot \$(OSX_SDK) -O3 -DL_ENDIAN -Wall::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:SIXTY_FOUR_BIT_LONG RC4_CHAR RC4_CHUNK DES_INT DES_UNROLL:".eval{my $asm=$x86_64_asm;$asm=~s/rc4\-[^:]+//;$asm}.":macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch x86_64 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
++"darwin64-x86_64-cc","cc:-arch x86_64 -isysroot \$(OSX_SDK) -O3 -DL_ENDIAN -Wall::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:SIXTY_FOUR_BIT_LONG RC4_CHUNK DES_INT DES_UNROLL:".eval{my $asm=$x86_64_asm;$asm=~s/rc4\-[^:]+//;$asm}.":macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch x86_64 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
++"debug-darwin64-x86_64-cc","cc:-arch x86_64 -isysroot \$(OSX_SDK) -ggdb -g2 -O0 -DL_ENDIAN -Wall::-D_REENTRANT:MACOSX:-Wl,-search_paths_first%:SIXTY_FOUR_BIT_LONG RC4_CHUNK DES_INT DES_UNROLL:".eval{my $asm=$x86_64_asm;$asm=~s/rc4\-[^:]+//;$asm}.":macosx:dlfcn:darwin-shared:-fPIC -fno-common:-arch x86_64 -dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
  "debug-darwin-ppc-cc","cc:-DBN_DEBUG -DREF_CHECK -DCONF_DEBUG -DCRYPTO_MDEBUG -DB_ENDIAN -g -Wall -O::-D_REENTRANT:MACOSX::BN_LLONG RC4_CHAR RC4_CHUNK DES_UNROLL BF_PTR:${ppc32_asm}:osx32:dlfcn:darwin-shared:-fPIC:-dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
  # iPhoneOS/iOS
  "iphoneos-cross","llvm-gcc:-O3 -isysroot \$(CROSS_TOP)/SDKs/\$(CROSS_SDK) -fomit-frame-pointer -fno-common::-D_REENTRANT:iOS:-Wl,-search_paths_first%:BN_LLONG RC4_CHAR RC4_CHUNK DES_UNROLL BF_PTR:${no_asm}:dlfcn:darwin-shared:-fPIC -fno-common:-dynamiclib:.\$(SHLIB_MAJOR).\$(SHLIB_MINOR).dylib",
-@@ -1629,7 +1629,7 @@
+@@ -1685,7 +1685,7 @@
  		s/^CC=.*$/CC= $cc/;
  		s/^AR=\s*ar/AR= $ar/;
  		s/^RANLIB=.*/RANLIB= $ranlib/;
@@ -36,9 +40,9 @@
  		}
  	s/^CFLAG=.*$/CFLAG= $cflags/;
  	s/^DEPFLAG=.*$/DEPFLAG=$depflags/;
-diff -r 99ae439a07f1 util/domd
---- a/util/domd	Fri Jan 09 12:50:43 2015 -0800
-+++ b/util/domd	Fri Jan 09 12:53:52 2015 -0800
+diff -r 973741cf5a04 util/domd
+--- a/util/domd	Thu Mar 05 13:45:59 2015 -0800
++++ b/util/domd	Thu Mar 05 14:00:20 2015 -0800
 @@ -14,7 +14,7 @@
  cp Makefile Makefile.save
  # fake the presence of Kerberos
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 55032d0..d9ee777 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -1143,6 +1143,7 @@
 		test/audiodata \
 		test/capath test/data \
 		test/cjkencodings test/decimaltestdata test/xmltestdata \
+		test/eintrdata \
 		test/imghdrdata \
 		test/subprocessdata test/sndhdrdata test/support \
 		test/tracedmodules test/encoded_modules \
diff --git a/Misc/ACKS b/Misc/ACKS
index c1d0dab..7707f67 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -513,6 +513,7 @@
 Dag Gruneau
 Filip Gruszczyński
 Thomas Guettler
+Yuyang Guo
 Anuj Gupta
 Michael Guravage
 Lars Gustäbel
@@ -788,6 +789,7 @@
 Simon Law
 Julia Lawall
 Chris Lawrence
+Mark Lawrence
 Brian Leair
 Mathieu Leduc-Hamel
 Amandine Lee
@@ -1269,6 +1271,7 @@
 Pete Shinners
 Michael Shiplett
 John W. Shipman
+Alex Shkop
 Joel Shprentz
 Yue Shuaijie
 Terrel Shumway
diff --git a/Misc/NEWS b/Misc/NEWS
index 1a9c481..343cf8f 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -2,6 +2,143 @@
 Python News
 +++++++++++
 
+What's New in Python 3.5 alpha 2?
+=================================
+
+Release date: 2015-03-08
+
+Core and Builtins
+-----------------
+
+- Issue #23571: PyObject_Call() and PyCFunction_Call() now raise a SystemError
+  if a function returns a result and raises an exception. The SystemError is
+  chained to the previous exception.
+
+Library
+-------
+
+- Issue #22524: New os.scandir() function, part of the PEP 471: "os.scandir()
+  function -- a better and faster directory iterator". Patch written by Ben
+  Hoyt.
+
+- Issue #23103: Reduced the memory consumption of IPv4Address and IPv6Address.
+
+- Issue #21793: BaseHTTPRequestHandler again logs response code as numeric,
+  not as stringified enum.  Patch by Demian Brecht.
+
+- Issue #23476: In the ssl module, enable OpenSSL's X509_V_FLAG_TRUSTED_FIRST
+  flag on certificate stores when it is available.
+
+- Issue #23576: Avoid stalling in SSL reads when EOF has been reached in the
+  SSL layer but the underlying connection hasn't been closed.
+
+- Issue #23504: Added an __all__ to the types module.
+
+- Issue #23563: Optimized utility functions in urllib.parse.
+
+- Issue #7830: Flatten nested functools.partial.
+
+- Issue #20204: Added the __module__ attribute to _tkinter classes.
+
+- Issue #19980: Improved help() for non-recognized strings.  help('') now
+  shows the help on str.  help('help') now shows the help on help().
+  Original patch by Mark Lawrence.
+
+- Issue #23521: Corrected pure python implementation of timedelta division.
+
+ * Eliminated OverflowError from timedelta * float for some floats;
+ * Corrected rounding in timedlta true division.
+
+- Issue #21619: Popen objects no longer leave a zombie after exit in the with
+  statement if the pipe was broken.  Patch by Martin Panter.
+
+- Issue #22936: Make it possible to show local variables in tracebacks for
+  both the traceback module and unittest.
+
+- Issue #15955: Add an option to limit the output size in bz2.decompress().
+  Patch by Nikolaus Rath.
+
+- Issue #6639: Module-level turtle functions no longer raise TclError after
+  closing the window.
+
+- Issues #814253, #9179: Group references and conditional group references now
+  work in lookbehind assertions in regular expressions.
+
+- Issue #23215: Multibyte codecs with custom error handlers that ignores errors
+  consumed too much memory and raised SystemError or MemoryError.
+  Original patch by Aleksi Torhamo.
+
+- Issue #5700: io.FileIO() called flush() after closing the file.
+  flush() was not called in close() if closefd=False.
+
+- Issue #23374: Fixed pydoc failure with non-ASCII files when stdout encoding
+  differs from file system encoding (e.g. on Mac OS).
+
+- Issue #23481: Remove RC4 from the SSL module's default cipher list.
+
+- Issue #21548: Fix pydoc.synopsis() and pydoc.apropos() on modules with empty
+  docstrings.
+
+- Issue #22885: Fixed arbitrary code execution vulnerability in the dbm.dumb
+  module.  Original patch by Claudiu Popa.
+
+- Issue #23239: ssl.match_hostname() now supports matching of IP addresses.
+
+- Issue #23146: Fix mishandling of absolute Windows paths with forward
+  slashes in pathlib.
+
+- Issue #23096: Pickle representation of floats with protocol 0 now is the same
+  for both Python and C implementations.
+
+- Issue #19105: pprint now more efficiently uses free space at the right.
+
+- Issue #14910: Add allow_abbrev parameter to argparse.ArgumentParser. Patch by
+  Jonathan Paugh, Steven Bethard, paul j3 and Daniel Eriksson.
+
+- Issue #21717: tarfile.open() now supports 'x' (exclusive creation) mode.
+
+- Issue #23344: marshal.dumps() is now 20-25% faster on average.
+
+- Issue #20416: marshal.dumps() with protocols 3 and 4 is now 40-50% faster on
+  average.
+
+- Issue #23421: Fixed compression in tarfile CLI.  Patch by wdv4758h.
+
+- Issue #23367: Fix possible overflows in the unicodedata module.
+
+- Issue #23361: Fix possible overflow in Windows subprocess creation code.
+
+- logging.handlers.QueueListener now takes a respect_handler_level keyword
+  argument which, if set to True, will pass messages to handlers taking handler
+  levels into account.
+
+- Issue #19705: turtledemo now has a visual sorting algorithm demo.  Original
+  patch from Jason Yeo.
+
+Build
+-----
+
+- Issue #23445: pydebug builds now use "gcc -Og" where possible, to make
+  the resulting executable faster.
+
+- Issue #23593: Update OS X 10.5 installer build to use OpenSSL 1.0.2.
+
+C API
+-----
+
+- Issue #20204: Deprecation warning is now raised for builtin type without the
+  __module__ attribute.
+
+Windows
+-------
+
+- Issue #23465: Implement PEP 486 - Make the Python Launcher aware of virtual
+  environments. Patch by Paul Moore.
+
+- Issue #23437: Make user scripts directory versioned on Windows. Patch by Paul
+  Moore.
+
+
 What's New in Python 3.5 alpha 1?
 =================================
 
@@ -263,8 +400,6 @@
 
 - Issue #23366: Fixed possible integer overflow in itertools.combinations.
 
-- Issue #23366: Fixed possible integer overflow in itertools.combinations.
-
 - Issue #23369: Fixed possible integer overflow in
   _json.encode_basestring_ascii.
 
@@ -294,7 +429,7 @@
   __ne__() now works correctly.
 
 - Issue #19996: :class:`email.feedparser.FeedParser` now handles (malformed)
-  headers with no key rather than amusing the body has started.
+  headers with no key rather than assuming the body has started.
 
 - Issue #20188: Support Application-Layer Protocol Negotiation (ALPN) in the ssl
   module.
@@ -327,6 +462,13 @@
   now clears its internal reference to the selector mapping to break a
   reference cycle. Initial patch written by Martin Richard.
 
+- Issue #17911: Provide a way to seed the linecache for a PEP-302 module
+  without actually loading the code.
+
+- Issue #17911: Provide a new object API for traceback, including the ability
+  to not lookup lines at all until the traceback is actually rendered, without
+  any trace of the original objects being kept alive.
+
 - Issue #19777: Provide a home() classmethod on Path objects.  Contributed
   by Victor Salgado and Mayank Tripathi.
 
diff --git a/Misc/README b/Misc/README
index e7780a2..ddb8f3f 100644
--- a/Misc/README
+++ b/Misc/README
@@ -21,7 +21,6 @@
 README.AIX              Information about using Python on AIX
 README.coverity         Information about running Coverity's Prevent on Python
 README.valgrind         Information for Valgrind users, see valgrind-python.supp
-RPM                     (Old) tools to build RPMs
 SpecialBuilds.txt       Describes extra symbols you can set for debug builds
 svnmap.txt              Map of old SVN revs and branches to hg changeset ids
 valgrind-python.supp    Valgrind suppression file, see README.valgrind
diff --git a/Misc/RPM/README b/Misc/RPM/README
deleted file mode 100644
index d883c95..0000000
--- a/Misc/RPM/README
+++ /dev/null
@@ -1,33 +0,0 @@
-This directory contains support file used to build RPM releases of
-Python.  Its contents are maintained by Sean Reifschneider
-<jafo@tummy.com>.
-
-If you wish to build RPMs from the base Python release tar-file, note
-that you will have to download the
-"doc/<version>/html-<version>.tar.bz2"
-file from python.org and place it into your "SOURCES" directory for
-the build to complete.  This is the same directory that you place the
-Python-2.3.1 release tar-file in.  You can then use the ".spec" file in
-this directory to build RPMs.
-
-You may also wish to pursue RPMs provided by distribution makers to see if
-they have one suitable for your uses.  If, for example, you just want a
-slightly newer version of Python than what the distro provides, you could
-pick up the closest SRPM your distro provides, and then modify it to
-the newer version, and build that.  It may be as simple as just changing
-the "version" information in the spec file (or it may require fixing
-patches).
-
-NOTE: I am *NOT* recommending just using the binary RPM, and never do an
-install with "--force" or "--nodeps".
-
-Also worth pursuing may be newer versions provided by similar distros.  For
-example, a Python 3 SRPM from Fedora may be a good baseline to try building
-on CentOS.
-
-Many newer SRPMs won't install on older distros because of format changes.
-You can manually extract these SRPMS with:
-
-   mkdir foo
-   cd foo
-   rpm2cpio <../python3-*.src.rpm | cpio -ivd
diff --git a/Misc/RPM/python-3.5.spec b/Misc/RPM/python-3.5.spec
deleted file mode 100644
index 8efda0e..0000000
--- a/Misc/RPM/python-3.5.spec
+++ /dev/null
@@ -1,389 +0,0 @@
-##########################
-#  User-modifiable configs
-##########################
-
-#  Is the resulting package and the installed binary named "python" or
-#  "python2"?
-#WARNING: Commenting out doesn't work.  Last line is what's used.
-%define config_binsuffix none
-%define config_binsuffix 2.6
-
-#  Build tkinter?  "auto" enables it if /usr/bin/wish exists.
-#WARNING: Commenting out doesn't work.  Last line is what's used.
-%define config_tkinter no
-%define config_tkinter yes
-%define config_tkinter auto
-
-#  Use pymalloc?  The last line (commented or not) determines wether
-#  pymalloc is used.
-#WARNING: Commenting out doesn't work.  Last line is what's used.
-%define config_pymalloc no
-%define config_pymalloc yes
-
-#  Enable IPV6?
-#WARNING: Commenting out doesn't work.  Last line is what's used.
-%define config_ipv6 yes
-%define config_ipv6 no
-
-#  Build shared libraries or .a library?
-#WARNING: Commenting out doesn't work.  Last line is what's used.
-%define config_sharedlib no
-%define config_sharedlib yes
-
-#  Location of the HTML directory.
-%define config_htmldir /var/www/html/python
-
-#################################
-#  End of user-modifiable configs
-#################################
-
-%define name python
-#--start constants--
-%define version 3.5.0a1
-%define libvers 3.5
-#--end constants--
-%define release 1pydotorg
-%define __prefix /usr
-
-#  kludge to get around rpm <percent>define weirdness
-%define ipv6 %(if [ "%{config_ipv6}" = yes ]; then echo --enable-ipv6; else echo --disable-ipv6; fi)
-%define pymalloc %(if [ "%{config_pymalloc}" = yes ]; then echo --with-pymalloc; else echo --without-pymalloc; fi)
-%define binsuffix %(if [ "%{config_binsuffix}" = none ]; then echo ; else echo "%{config_binsuffix}"; fi)
-%define include_tkinter %(if [ \\( "%{config_tkinter}" = auto -a -f /usr/bin/wish \\) -o "%{config_tkinter}" = yes ]; then echo 1; else echo 0; fi)
-%define libdirname %(( uname -m | egrep -q '_64$' && [ -d /usr/lib64 ] && echo lib64 ) || echo lib)
-%define sharedlib %(if [ "%{config_sharedlib}" = yes ]; then echo --enable-shared; else echo ; fi)
-%define include_sharedlib %(if [ "%{config_sharedlib}" = yes ]; then echo 1; else echo 0; fi)
-
-#  detect if documentation is available
-%define include_docs %(if [ -f "%{_sourcedir}/html-%{version}.tar.bz2" ]; then echo 1; else echo 0; fi)
-
-Summary: An interpreted, interactive, object-oriented programming language.
-Name: %{name}%{binsuffix}
-Version: %{version}
-Release: %{release}
-License: PSF
-Group: Development/Languages
-Source: Python-%{version}.tar.bz2
-%if %{include_docs}
-Source1: html-%{version}.tar.bz2
-%endif
-BuildRoot: %{_tmppath}/%{name}-%{version}-root
-BuildPrereq: expat-devel
-BuildPrereq: db4-devel
-BuildPrereq: gdbm-devel
-BuildPrereq: sqlite-devel
-Prefix: %{__prefix}
-Packager: Sean Reifschneider <jafo-rpms@tummy.com>
-
-%description
-Python is an interpreted, interactive, object-oriented programming
-language.  It incorporates modules, exceptions, dynamic typing, very high
-level dynamic data types, and classes. Python combines remarkable power
-with very clear syntax. It has interfaces to many system calls and
-libraries, as well as to various window systems, and is extensible in C or
-C++. It is also usable as an extension language for applications that need
-a programmable interface.  Finally, Python is portable: it runs on many
-brands of UNIX, on PCs under Windows, MS-DOS, and on the Mac.
-
-%package devel
-Summary: The libraries and header files needed for Python extension development.
-Prereq: python%{binsuffix} = %{PACKAGE_VERSION}
-Group: Development/Libraries
-
-%description devel
-The Python programming language's interpreter can be extended with
-dynamically loaded extensions and can be embedded in other programs.
-This package contains the header files and libraries needed to do
-these types of tasks.
-
-Install python-devel if you want to develop Python extensions.  The
-python package will also need to be installed.  You'll probably also
-want to install the python-docs package, which contains Python
-documentation.
-
-%if %{include_tkinter}
-%package tkinter
-Summary: A graphical user interface for the Python scripting language.
-Group: Development/Languages
-Prereq: python%{binsuffix} = %{PACKAGE_VERSION}-%{release}
-
-%description tkinter
-The Tkinter (Tk interface) program is an graphical user interface for
-the Python scripting language.
-
-You should install the tkinter package if you'd like to use a graphical
-user interface for Python programming.
-%endif
-
-%package tools
-Summary: A collection of development tools included with Python.
-Group: Development/Tools
-Prereq: python%{binsuffix} = %{PACKAGE_VERSION}-%{release}
-
-%description tools
-The Python package includes several development tools that are used
-to build python programs.  This package contains a selection of those
-tools, including the IDLE Python IDE.
-
-Install python-tools if you want to use these tools to develop
-Python programs.  You will also need to install the python and
-tkinter packages.
-
-%if %{include_docs}
-%package docs
-Summary: Python-related documentation.
-Group: Development/Documentation
-
-%description docs
-Documentation relating to the Python programming language in HTML and info
-formats.
-%endif
-
-%changelog
-* Mon Dec 20 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.4-2pydotorg]
-- Changing the idle wrapper so that it passes arguments to idle.
-
-* Tue Oct 19 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.4b1-1pydotorg]
-- Updating to 2.4.
-
-* Thu Jul 22 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.4-3pydotorg]
-- Paul Tiemann fixes for %{prefix}.
-- Adding permission changes for directory as suggested by reimeika.ca
-- Adding code to detect when it should be using lib64.
-- Adding a define for the location of /var/www/html for docs.
-
-* Thu May 27 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.4-2pydotorg]
-- Including changes from Ian Holsman to build under Red Hat 7.3.
-- Fixing some problems with the /usr/local path change.
-
-* Sat Mar 27 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.2-3pydotorg]
-- Being more agressive about finding the paths to fix for
-  #!/usr/local/bin/python.
-
-* Sat Feb 07 2004 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.3-2pydotorg]
-- Adding code to remove "#!/usr/local/bin/python" from particular files and
-  causing the RPM build to terminate if there are any unexpected files
-  which have that line in them.
-
-* Mon Oct 13 2003 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.2-1pydotorg]
-- Adding code to detect wether documentation is available to build.
-
-* Fri Sep 19 2003 Sean Reifschneider <jafo-rpms@tummy.com> [2.3.1-1pydotorg]
-- Updating to the 2.3.1 release.
-
-* Mon Feb 24 2003 Sean Reifschneider <jafo-rpms@tummy.com> [2.3b1-1pydotorg]
-- Updating to 2.3b1 release.
-
-* Mon Feb 17 2003 Sean Reifschneider <jafo-rpms@tummy.com> [2.3a1-1]
-- Updating to 2.3 release.
-
-* Sun Dec 23 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2-2]
-- Added -docs package.
-- Added "auto" config_tkinter setting which only enables tk if
-  /usr/bin/wish exists.
-
-* Sat Dec 22 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2-1]
-- Updated to 2.2.
-- Changed the extension to "2" from "2.2".
-
-* Tue Nov 18 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2c1-1]
-- Updated to 2.2c1.
-
-* Thu Nov  1 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2b1-3]
-- Changed the way the sed for fixing the #! in pydoc works.
-
-* Wed Oct  24 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2b1-2]
-- Fixed missing "email" package, thanks to anonymous report on sourceforge.
-- Fixed missing "compiler" package.
-
-* Mon Oct 22 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2b1-1]
-- Updated to 2.2b1.
-
-* Mon Oct  9 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2a4-4]
-- otto@balinor.mat.unimi.it mentioned that the license file is missing.
-
-* Sun Sep 30 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2a4-3]
-- Ignacio Vazquez-Abrams pointed out that I had a spruious double-quote in
-  the spec files.  Thanks.
-
-* Wed Jul 25 2001 Sean Reifschneider <jafo-rpms@tummy.com>
-[Release 2.2a1-1]
-- Updated to 2.2a1 release.
-- Changed idle and pydoc to use binsuffix macro
-
-#######
-#  PREP
-#######
-%prep
-%setup -n Python-%{version}
-
-########
-#  BUILD
-########
-%build
-echo "Setting for ipv6: %{ipv6}"
-echo "Setting for pymalloc: %{pymalloc}"
-echo "Setting for binsuffix: %{binsuffix}"
-echo "Setting for include_tkinter: %{include_tkinter}"
-echo "Setting for libdirname: %{libdirname}"
-echo "Setting for sharedlib: %{sharedlib}"
-echo "Setting for include_sharedlib: %{include_sharedlib}"
-./configure --enable-unicode=ucs4 %{sharedlib} %{ipv6} %{pymalloc} --prefix=%{__prefix}
-make
-
-##########
-#  INSTALL
-##########
-%install
-#  set the install path
-echo '[install_scripts]' >setup.cfg
-echo 'install_dir='"${RPM_BUILD_ROOT}%{__prefix}/bin" >>setup.cfg
-
-[ -d "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != "/" ] && rm -rf $RPM_BUILD_ROOT
-mkdir -p $RPM_BUILD_ROOT%{__prefix}/%{libdirname}/python%{libvers}/lib-dynload
-make prefix=$RPM_BUILD_ROOT%{__prefix} install
-
-#  REPLACE PATH IN PYDOC
-if [ ! -z "%{binsuffix}" ]
-then
-   (
-      cd $RPM_BUILD_ROOT%{__prefix}/bin
-      mv pydoc pydoc.old
-      sed 's|#!.*|#!%{__prefix}/bin/env python'%{binsuffix}'|' \
-            pydoc.old >pydoc
-      chmod 755 pydoc
-      rm -f pydoc.old
-   )
-fi
-
-#  add the binsuffix
-if [ ! -z "%{binsuffix}" ]
-then
-   rm -f $RPM_BUILD_ROOT%{__prefix}/bin/python[0-9a-zA-Z]*
-   ( cd $RPM_BUILD_ROOT%{__prefix}/bin; 
-      for file in *; do mv "$file" "$file"%{binsuffix}; done )
-   ( cd $RPM_BUILD_ROOT%{_mandir}/man1; mv python.1 python%{binsuffix}.1 )
-fi
-
-########
-#  Tools
-echo '#!%{__prefix}/bin/env python%{binsuffix}' >${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
-echo 'import os, sys' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
-echo 'os.execvp("%{__prefix}/bin/python%{binsuffix}", ["%{__prefix}/bin/python%{binsuffix}", "%{__prefix}/lib/python%{libvers}/idlelib/idle.py"] + sys.argv[1:])' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
-echo 'print "Failed to exec Idle"' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
-echo 'sys.exit(1)' >>${RPM_BUILD_ROOT}%{__prefix}/bin/idle%{binsuffix}
-chmod 755 $RPM_BUILD_ROOT%{__prefix}/bin/idle%{binsuffix}
-cp -a Tools $RPM_BUILD_ROOT%{__prefix}/%{libdirname}/python%{libvers}
-
-#  MAKE FILE LISTS
-rm -f mainpkg.files
-find "$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers} -type f |
-	sed "s|^${RPM_BUILD_ROOT}|/|" |
-	grep -v -e '/python%{libvers}/config$' -e '_tkinter.so$' >mainpkg.files
-find "$RPM_BUILD_ROOT""%{__prefix}"/bin -type f -o -type l |
-	sed "s|^${RPM_BUILD_ROOT}|/|" |
-	grep -v -e '/bin/2to3%{binsuffix}$' |
-	grep -v -e '/bin/pydoc%{binsuffix}$' |
-	grep -v -e '/bin/smtpd.py%{binsuffix}$' |
-	grep -v -e '/bin/idle%{binsuffix}$' >>mainpkg.files
-
-rm -f tools.files
-find "$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers}/idlelib \
-      "$RPM_BUILD_ROOT""%{__prefix}"/%{libdirname}/python%{libvers}/Tools -type f |
-      sed "s|^${RPM_BUILD_ROOT}|/|" >tools.files
-echo "%{__prefix}"/bin/2to3%{binsuffix} >>tools.files
-echo "%{__prefix}"/bin/pydoc%{binsuffix} >>tools.files
-echo "%{__prefix}"/bin/smtpd.py%{binsuffix} >>tools.files
-echo "%{__prefix}"/bin/idle%{binsuffix} >>tools.files
-
-######
-# Docs
-%if %{include_docs}
-mkdir -p "$RPM_BUILD_ROOT"%{config_htmldir}
-(
-   cd "$RPM_BUILD_ROOT"%{config_htmldir}
-   bunzip2 < %{SOURCE1} | tar x
-)
-%endif
-
-#  fix the #! line in installed files
-find "$RPM_BUILD_ROOT" -type f -print0 |
-      xargs -0 grep -l /usr/local/bin/python | while read file
-do
-   FIXFILE="$file"
-   sed 's|^#!.*python|#!%{__prefix}/bin/env python'"%{binsuffix}"'|' \
-         "$FIXFILE" >/tmp/fix-python-path.$$
-   cat /tmp/fix-python-path.$$ >"$FIXFILE"
-   rm -f /tmp/fix-python-path.$$
-done
-
-#  check to see if there are any straggling #! lines
-find "$RPM_BUILD_ROOT" -type f | xargs egrep -n '^#! */usr/local/bin/python' \
-      | grep ':1:#!' >/tmp/python-rpm-files.$$ || true
-if [ -s /tmp/python-rpm-files.$$ ]
-then
-   echo '*****************************************************'
-   cat /tmp/python-rpm-files.$$
-   cat <<@EOF
-   *****************************************************
-     There are still files referencing /usr/local/bin/python in the
-     install directory.  They are listed above.  Please fix the .spec
-     file and try again.  If you are an end-user, you probably want
-     to report this to jafo-rpms@tummy.com as well.
-   *****************************************************
-@EOF
-   rm -f /tmp/python-rpm-files.$$
-   exit 1
-fi
-rm -f /tmp/python-rpm-files.$$
-
-########
-#  CLEAN
-########
-%clean
-[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf $RPM_BUILD_ROOT
-rm -f mainpkg.files tools.files
-
-########
-#  FILES
-########
-%files -f mainpkg.files
-%defattr(-,root,root)
-%doc Misc/README Misc/cheatsheet Misc/Porting
-%doc LICENSE Misc/ACKS Misc/HISTORY Misc/NEWS
-%{_mandir}/man1/python%{binsuffix}.1*
-
-%attr(755,root,root) %dir %{__prefix}/include/python%{libvers}
-%attr(755,root,root) %dir %{__prefix}/%{libdirname}/python%{libvers}/
-%if %{include_sharedlib}
-%{__prefix}/%{libdirname}/libpython*
-%endif
-
-%files devel
-%defattr(-,root,root)
-%{__prefix}/include/python%{libvers}/*.h
-%{__prefix}/%{libdirname}/python%{libvers}/config
-
-%files -f tools.files tools
-%defattr(-,root,root)
-
-%if %{include_tkinter}
-%files tkinter
-%defattr(-,root,root)
-%{__prefix}/%{libdirname}/python%{libvers}/tkinter
-%{__prefix}/%{libdirname}/python%{libvers}/lib-dynload/_tkinter.so*
-%endif
-
-%if %{include_docs}
-%files docs
-%defattr(-,root,root)
-%{config_htmldir}/*
-%endif
diff --git a/Misc/SpecialBuilds.txt b/Misc/SpecialBuilds.txt
index 732cb00..3004174 100644
--- a/Misc/SpecialBuilds.txt
+++ b/Misc/SpecialBuilds.txt
@@ -216,12 +216,11 @@
 
 Compile in support for Low Level TRACE-ing of the main interpreter loop.
 
-When this preprocessor symbol is defined, before PyEval_EvalFrame (eval_frame in
-2.3 and 2.2, eval_code2 before that) executes a frame's code it checks the
-frame's global namespace for a variable "__lltrace__".  If such a variable is
-found, mounds of information about what the interpreter is doing are sprayed to
-stdout, such as every opcode and opcode argument and values pushed onto and
-popped off the value stack.
+When this preprocessor symbol is defined, before PyEval_EvalFrame executes a
+frame's code it checks the frame's global namespace for a variable
+"__lltrace__".  If such a variable is found, mounds of information about what
+the interpreter is doing are sprayed to stdout, such as every opcode and opcode
+argument and values pushed onto and popped off the value stack.
 
 Not useful very often, but very useful when needed.
 
diff --git a/Modules/_bz2module.c b/Modules/_bz2module.c
index 4f2afda..f284cd6 100644
--- a/Modules/_bz2module.c
+++ b/Modules/_bz2module.c
@@ -51,6 +51,14 @@
     bz_stream bzs;
     char eof;           /* T_BOOL expects a char */
     PyObject *unused_data;
+    char needs_input;
+    char *input_buffer;
+    size_t input_buffer_size;
+
+    /* bzs->avail_in is only 32 bit, so we store the true length
+       separately. Conversion and looping is encapsulated in
+       decompress_buf() */
+    size_t bzs_avail_in_real;
 #ifdef WITH_THREAD
     PyThread_type_lock lock;
 #endif
@@ -111,19 +119,23 @@
 }
 
 #if BUFSIZ < 8192
-#define SMALLCHUNK 8192
+#define INITIAL_BUFFER_SIZE 8192
 #else
-#define SMALLCHUNK BUFSIZ
+#define INITIAL_BUFFER_SIZE BUFSIZ
 #endif
 
 static int
-grow_buffer(PyObject **buf)
+grow_buffer(PyObject **buf, Py_ssize_t max_length)
 {
     /* Expand the buffer by an amount proportional to the current size,
        giving us amortized linear-time behavior. Use a less-than-double
        growth factor to avoid excessive allocation. */
     size_t size = PyBytes_GET_SIZE(*buf);
     size_t new_size = size + (size >> 3) + 6;
+
+    if (max_length > 0 && new_size > (size_t) max_length)
+        new_size = (size_t) max_length;
+
     if (new_size > size) {
         return _PyBytes_Resize(buf, new_size);
     } else {  /* overflow */
@@ -142,14 +154,14 @@
     size_t data_size = 0;
     PyObject *result;
 
-    result = PyBytes_FromStringAndSize(NULL, SMALLCHUNK);
+    result = PyBytes_FromStringAndSize(NULL, INITIAL_BUFFER_SIZE);
     if (result == NULL)
         return NULL;
 
     c->bzs.next_in = data;
     c->bzs.avail_in = 0;
     c->bzs.next_out = PyBytes_AS_STRING(result);
-    c->bzs.avail_out = SMALLCHUNK;
+    c->bzs.avail_out = INITIAL_BUFFER_SIZE;
     for (;;) {
         char *this_out;
         int bzerror;
@@ -168,7 +180,7 @@
         if (c->bzs.avail_out == 0) {
             size_t buffer_left = PyBytes_GET_SIZE(result) - data_size;
             if (buffer_left == 0) {
-                if (grow_buffer(&result) < 0)
+                if (grow_buffer(&result, -1) < 0)
                     goto error;
                 c->bzs.next_out = PyBytes_AS_STRING(result) + data_size;
                 buffer_left = PyBytes_GET_SIZE(result) - data_size;
@@ -402,64 +414,176 @@
 
 /* BZ2Decompressor class. */
 
-static PyObject *
-decompress(BZ2Decompressor *d, char *data, size_t len)
+/* Decompress data of length d->bzs_avail_in_real in d->bzs.next_in.  The output
+   buffer is allocated dynamically and returned.  At most max_length bytes are
+   returned, so some of the input may not be consumed. d->bzs.next_in and
+   d->bzs_avail_in_real are updated to reflect the consumed input. */
+static PyObject*
+decompress_buf(BZ2Decompressor *d, Py_ssize_t max_length)
 {
-    size_t data_size = 0;
+    /* data_size is strictly positive, but because we repeatedly have to
+       compare against max_length and PyBytes_GET_SIZE we declare it as
+       signed */
+    Py_ssize_t data_size = 0;
     PyObject *result;
+    bz_stream *bzs = &d->bzs;
 
-    result = PyBytes_FromStringAndSize(NULL, SMALLCHUNK);
+    if (max_length < 0 || max_length >= INITIAL_BUFFER_SIZE)
+        result = PyBytes_FromStringAndSize(NULL, INITIAL_BUFFER_SIZE);
+    else
+        result = PyBytes_FromStringAndSize(NULL, max_length);
     if (result == NULL)
-        return result;
-    d->bzs.next_in = data;
-    /* On a 64-bit system, len might not fit in avail_in (an unsigned int).
-       Do decompression in chunks of no more than UINT_MAX bytes each. */
-    d->bzs.avail_in = (unsigned int)Py_MIN(len, UINT_MAX);
-    len -= d->bzs.avail_in;
-    d->bzs.next_out = PyBytes_AS_STRING(result);
-    d->bzs.avail_out = SMALLCHUNK;
+        return NULL;
+
+    bzs->next_out = PyBytes_AS_STRING(result);
     for (;;) {
-        char *this_out;
-        int bzerror;
+        int bzret;
+        size_t avail;
+
+        /* On a 64-bit system, buffer length might not fit in avail_out, so we
+           do decompression in chunks of no more than UINT_MAX bytes
+           each. Note that the expression for `avail` is guaranteed to be
+           positive, so the cast is safe. */
+        avail = (size_t) (PyBytes_GET_SIZE(result) - data_size);
+        bzs->avail_out = (unsigned int)Py_MIN(avail, UINT_MAX);
+        bzs->avail_in = (unsigned int)Py_MIN(d->bzs_avail_in_real, UINT_MAX);
+        d->bzs_avail_in_real -= bzs->avail_in;
 
         Py_BEGIN_ALLOW_THREADS
-        this_out = d->bzs.next_out;
-        bzerror = BZ2_bzDecompress(&d->bzs);
-        data_size += d->bzs.next_out - this_out;
+        bzret = BZ2_bzDecompress(bzs);
+        data_size = bzs->next_out - PyBytes_AS_STRING(result);
+        d->bzs_avail_in_real += bzs->avail_in;
         Py_END_ALLOW_THREADS
-        if (catch_bz2_error(bzerror))
+        if (catch_bz2_error(bzret))
             goto error;
-        if (bzerror == BZ_STREAM_END) {
+        if (bzret == BZ_STREAM_END) {
             d->eof = 1;
-            len += d->bzs.avail_in;
-            if (len > 0) { /* Save leftover input to unused_data */
-                Py_CLEAR(d->unused_data);
-                d->unused_data = PyBytes_FromStringAndSize(d->bzs.next_in, len);
-                if (d->unused_data == NULL)
-                    goto error;
-            }
             break;
-        }
-        if (d->bzs.avail_in == 0) {
-            if (len == 0)
+        } else if (d->bzs_avail_in_real == 0) {
+            break;
+        } else if (bzs->avail_out == 0) {
+            if (data_size == max_length)
                 break;
-            d->bzs.avail_in = (unsigned int)Py_MIN(len, UINT_MAX);
-            len -= d->bzs.avail_in;
-        }
-        if (d->bzs.avail_out == 0) {
-            size_t buffer_left = PyBytes_GET_SIZE(result) - data_size;
-            if (buffer_left == 0) {
-                if (grow_buffer(&result) < 0)
-                    goto error;
-                d->bzs.next_out = PyBytes_AS_STRING(result) + data_size;
-                buffer_left = PyBytes_GET_SIZE(result) - data_size;
-            }
-            d->bzs.avail_out = (unsigned int)Py_MIN(buffer_left, UINT_MAX);
+            if (data_size == PyBytes_GET_SIZE(result) &&
+                grow_buffer(&result, max_length) == -1)
+                goto error;
+            bzs->next_out = PyBytes_AS_STRING(result) + data_size;
         }
     }
-    if (data_size != (size_t)PyBytes_GET_SIZE(result))
-        if (_PyBytes_Resize(&result, data_size) < 0)
+    if (data_size != PyBytes_GET_SIZE(result))
+        if (_PyBytes_Resize(&result, data_size) == -1)
             goto error;
+
+    return result;
+
+error:
+    Py_XDECREF(result);
+    return NULL;
+}
+
+
+static PyObject *
+decompress(BZ2Decompressor *d, char *data, size_t len, Py_ssize_t max_length)
+{
+    char input_buffer_in_use;
+    PyObject *result;
+    bz_stream *bzs = &d->bzs;
+
+    /* Prepend unconsumed input if necessary */
+    if (bzs->next_in != NULL) {
+        size_t avail_now, avail_total;
+
+        /* Number of bytes we can append to input buffer */
+        avail_now = (d->input_buffer + d->input_buffer_size)
+            - (bzs->next_in + d->bzs_avail_in_real);
+
+        /* Number of bytes we can append if we move existing
+           contents to beginning of buffer (overwriting
+           consumed input) */
+        avail_total = d->input_buffer_size - d->bzs_avail_in_real;
+
+        if (avail_total < len) {
+            size_t offset = bzs->next_in - d->input_buffer;
+            char *tmp;
+            size_t new_size = d->input_buffer_size + len - avail_now;
+
+            /* Assign to temporary variable first, so we don't
+               lose address of allocated buffer if realloc fails */
+            tmp = PyMem_Realloc(d->input_buffer, new_size);
+            if (tmp == NULL) {
+                PyErr_SetNone(PyExc_MemoryError);
+                return NULL;
+            }
+            d->input_buffer = tmp;
+            d->input_buffer_size = new_size;
+
+            bzs->next_in = d->input_buffer + offset;
+        }
+        else if (avail_now < len) {
+            memmove(d->input_buffer, bzs->next_in,
+                    d->bzs_avail_in_real);
+            bzs->next_in = d->input_buffer;
+        }
+        memcpy((void*)(bzs->next_in + d->bzs_avail_in_real), data, len);
+        d->bzs_avail_in_real += len;
+        input_buffer_in_use = 1;
+    }
+    else {
+        bzs->next_in = data;
+        d->bzs_avail_in_real = len;
+        input_buffer_in_use = 0;
+    }
+
+    result = decompress_buf(d, max_length);
+    if(result == NULL)
+        return NULL;
+
+    if (d->eof) {
+        d->needs_input = 0;
+        if (d->bzs_avail_in_real > 0) {
+            Py_CLEAR(d->unused_data);
+            d->unused_data = PyBytes_FromStringAndSize(
+                bzs->next_in, d->bzs_avail_in_real);
+            if (d->unused_data == NULL)
+                goto error;
+        }
+    }
+    else if (d->bzs_avail_in_real == 0) {
+        bzs->next_in = NULL;
+        d->needs_input = 1;
+    }
+    else {
+        d->needs_input = 0;
+
+        /* If we did not use the input buffer, we now have
+           to copy the tail from the caller's buffer into the
+           input buffer */
+        if (!input_buffer_in_use) {
+
+            /* Discard buffer if it's too small
+               (resizing it may needlessly copy the current contents) */
+            if (d->input_buffer != NULL &&
+                d->input_buffer_size < d->bzs_avail_in_real) {
+                PyMem_Free(d->input_buffer);
+                d->input_buffer = NULL;
+            }
+
+            /* Allocate if necessary */
+            if (d->input_buffer == NULL) {
+                d->input_buffer = PyMem_Malloc(d->bzs_avail_in_real);
+                if (d->input_buffer == NULL) {
+                    PyErr_SetNone(PyExc_MemoryError);
+                    goto error;
+                }
+                d->input_buffer_size = d->bzs_avail_in_real;
+            }
+
+            /* Copy tail */
+            memcpy(d->input_buffer, bzs->next_in, d->bzs_avail_in_real);
+            bzs->next_in = d->input_buffer;
+        }
+    }
+
     return result;
 
 error:
@@ -470,21 +594,29 @@
 /*[clinic input]
 _bz2.BZ2Decompressor.decompress
 
+    self: self(type="BZ2Decompressor *")
     data: Py_buffer
-    /
+    max_length: Py_ssize_t=-1
 
-Provide data to the decompressor object.
+Decompress *data*, returning uncompressed data as bytes.
 
-Returns a chunk of decompressed data if possible, or b'' otherwise.
+If *max_length* is nonnegative, returns at most *max_length* bytes of
+decompressed data. If this limit is reached and further output can be
+produced, *self.needs_input* will be set to ``False``. In this case, the next
+call to *decompress()* may provide *data* as b'' to obtain more of the output.
 
-Attempting to decompress data after the end of stream is reached
-raises an EOFError.  Any data found after the end of the stream
-is ignored and saved in the unused_data attribute.
+If all of the input data was decompressed and returned (either because this
+was less than *max_length* bytes, or because *max_length* was negative),
+*self.needs_input* will be set to True.
+
+Attempting to decompress data after the end of stream is reached raises an
+EOFError.  Any data found after the end of the stream is ignored and saved in
+the unused_data attribute.
 [clinic start generated code]*/
 
 static PyObject *
-_bz2_BZ2Decompressor_decompress_impl(BZ2Decompressor *self, Py_buffer *data)
-/*[clinic end generated code: output=086e4b99e60cb3f6 input=616c2a6db5269961]*/
+_bz2_BZ2Decompressor_decompress_impl(BZ2Decompressor *self, Py_buffer *data, Py_ssize_t max_length)
+/*[clinic end generated code: output=7eeb5794035a2ca3 input=9558b424c8b00516]*/
 {
     PyObject *result = NULL;
 
@@ -492,7 +624,7 @@
     if (self->eof)
         PyErr_SetString(PyExc_EOFError, "End of stream already reached");
     else
-        result = decompress(self, data->buf, data->len);
+        result = decompress(self, data->buf, data->len, max_length);
     RELEASE_LOCK(self);
     return result;
 }
@@ -527,10 +659,14 @@
     }
 #endif
 
-    self->unused_data = PyBytes_FromStringAndSize("", 0);
+    self->needs_input = 1;
+    self->bzs_avail_in_real = 0;
+    self->input_buffer = NULL;
+    self->input_buffer_size = 0;
+    self->unused_data = PyBytes_FromStringAndSize(NULL, 0);
     if (self->unused_data == NULL)
         goto error;
-
+    
     bzerror = BZ2_bzDecompressInit(&self->bzs, 0, 0);
     if (catch_bz2_error(bzerror))
         goto error;
@@ -549,6 +685,8 @@
 static void
 BZ2Decompressor_dealloc(BZ2Decompressor *self)
 {
+    if(self->input_buffer != NULL)
+        PyMem_Free(self->input_buffer);
     BZ2_bzDecompressEnd(&self->bzs);
     Py_CLEAR(self->unused_data);
 #ifdef WITH_THREAD
@@ -570,11 +708,16 @@
 PyDoc_STRVAR(BZ2Decompressor_unused_data__doc__,
 "Data found after the end of the compressed stream.");
 
+PyDoc_STRVAR(BZ2Decompressor_needs_input_doc,
+"True if more input is needed before more decompressed data can be produced.");
+
 static PyMemberDef BZ2Decompressor_members[] = {
     {"eof", T_BOOL, offsetof(BZ2Decompressor, eof),
      READONLY, BZ2Decompressor_eof__doc__},
     {"unused_data", T_OBJECT_EX, offsetof(BZ2Decompressor, unused_data),
      READONLY, BZ2Decompressor_unused_data__doc__},
+    {"needs_input", T_BOOL, offsetof(BZ2Decompressor, needs_input), READONLY,
+     BZ2Decompressor_needs_input_doc},
     {NULL}
 };
 
diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c
index 69d93ae..d4794be 100644
--- a/Modules/_collectionsmodule.c
+++ b/Modules/_collectionsmodule.c
@@ -1,23 +1,26 @@
 #include "Python.h"
 #include "structmember.h"
 
+#ifdef STDC_HEADERS
+#include <stddef.h>
+#else
+#include <sys/types.h>          /* For size_t */
+#endif
+
 /* collections module implementation of a deque() datatype
    Written and maintained by Raymond D. Hettinger <python@rcn.com>
-   Copyright (c) 2004-2014 Python Software Foundation.
+   Copyright (c) 2004-2015 Python Software Foundation.
    All rights reserved.
 */
 
 /* The block length may be set to any number over 1.  Larger numbers
  * reduce the number of calls to the memory allocator, give faster
- * indexing and rotation, and reduce the link::data overhead ratio.
- *
- * Ideally, the block length will be set to two less than some
- * multiple of the cache-line length (so that the full block
- * including the leftlink and rightlink will fit neatly into
- * cache lines).
+ * indexing and rotation, and reduce the link to data overhead ratio.
+ * Making the block length a power of two speeds-up the modulo
+ * and division calculations in deque_item() and deque_ass_item().
  */
 
-#define BLOCKLEN 62
+#define BLOCKLEN 64
 #define CENTER ((BLOCKLEN - 1) / 2)
 
 /* A `dequeobject` is composed of a doubly-linked list of `block` nodes.
@@ -53,6 +56,19 @@
     struct BLOCK *rightlink;
 } block;
 
+typedef struct {
+    PyObject_VAR_HEAD
+    block *leftblock;
+    block *rightblock;
+    Py_ssize_t leftindex;       /* in range(BLOCKLEN) */
+    Py_ssize_t rightindex;      /* in range(BLOCKLEN) */
+    size_t state;               /* incremented whenever the indices move */
+    Py_ssize_t maxlen;
+    PyObject *weakreflist; /* List of weak references */
+} dequeobject;
+
+static PyTypeObject deque_type;
+
 /* For debug builds, add error checking to track the endpoints
  * in the chain of links.  The goal is to make sure that link
  * assignments only take place at endpoints so that links already
@@ -116,36 +132,7 @@
     }
 }
 
-typedef struct {
-    PyObject_VAR_HEAD
-    block *leftblock;
-    block *rightblock;
-    Py_ssize_t leftindex;       /* in range(BLOCKLEN) */
-    Py_ssize_t rightindex;      /* in range(BLOCKLEN) */
-    long state;                 /* incremented whenever the indices move */
-    Py_ssize_t maxlen;
-    PyObject *weakreflist; /* List of weak references */
-} dequeobject;
-
-/* The deque's size limit is d.maxlen.  The limit can be zero or positive.
- * If there is no limit, then d.maxlen == -1.
- *
- * After an item is added to a deque, we check to see if the size has grown past
- * the limit. If it has, we get the size back down to the limit by popping an
- * item off of the opposite end.  The methods that can trigger this are append(),
- * appendleft(), extend(), and extendleft().
- */
-
-#define TRIM(d, popfunction)                                    \
-    if (d->maxlen != -1 && Py_SIZE(d) > d->maxlen) {       \
-        PyObject *rv = popfunction(d, NULL);                \
-        assert(rv != NULL  &&  Py_SIZE(d) <= d->maxlen);    \
-        Py_DECREF(rv);                                      \
-    }
-
-static PyTypeObject deque_type;
-
-/* XXX Todo: 
+/* XXX Todo:
    If aligned memory allocations become available, make the
    deque object 64 byte aligned so that all of the fields
    can be retrieved or updated in a single cache line.
@@ -171,14 +158,14 @@
     MARK_END(b->rightlink);
 
     assert(BLOCKLEN >= 2);
+    Py_SIZE(deque) = 0;
     deque->leftblock = b;
     deque->rightblock = b;
     deque->leftindex = CENTER + 1;
     deque->rightindex = CENTER;
-    Py_SIZE(deque) = 0;
     deque->state = 0;
-    deque->weakreflist = NULL;
     deque->maxlen = -1;
+    deque->weakreflist = NULL;
 
     return (PyObject *)deque;
 }
@@ -258,6 +245,37 @@
 
 PyDoc_STRVAR(popleft_doc, "Remove and return the leftmost element.");
 
+/* The deque's size limit is d.maxlen.  The limit can be zero or positive.
+ * If there is no limit, then d.maxlen == -1.
+ *
+ * After an item is added to a deque, we check to see if the size has grown past
+ * the limit. If it has, we get the size back down to the limit by popping an
+ * item off of the opposite end.  The methods that can trigger this are append(),
+ * appendleft(), extend(), and extendleft().
+ */
+
+static void
+deque_trim_right(dequeobject *deque)
+{
+    if (deque->maxlen != -1 && Py_SIZE(deque) > deque->maxlen) {
+        PyObject *rv = deque_pop(deque, NULL);
+        assert(rv != NULL);
+        assert(Py_SIZE(deque) <= deque->maxlen);
+        Py_DECREF(rv);
+    }
+}
+
+static void
+deque_trim_left(dequeobject *deque)
+{
+    if (deque->maxlen != -1 && Py_SIZE(deque) > deque->maxlen) {
+        PyObject *rv = deque_popleft(deque, NULL);
+        assert(rv != NULL);
+        assert(Py_SIZE(deque) <= deque->maxlen);
+        Py_DECREF(rv);
+    }
+}
+
 static PyObject *
 deque_append(dequeobject *deque, PyObject *item)
 {
@@ -277,7 +295,7 @@
     Py_SIZE(deque)++;
     deque->rightindex++;
     deque->rightblock->data[deque->rightindex] = item;
-    TRIM(deque, deque_popleft);
+    deque_trim_left(deque);
     Py_RETURN_NONE;
 }
 
@@ -302,7 +320,7 @@
     Py_SIZE(deque)++;
     deque->leftindex--;
     deque->leftblock->data[deque->leftindex] = item;
-    TRIM(deque, deque_pop);
+    deque_trim_right(deque);
     Py_RETURN_NONE;
 }
 
@@ -375,7 +393,7 @@
         Py_SIZE(deque)++;
         deque->rightindex++;
         deque->rightblock->data[deque->rightindex] = item;
-        TRIM(deque, deque_popleft);
+        deque_trim_left(deque);
     }
     Py_DECREF(it);
     if (PyErr_Occurred())
@@ -436,7 +454,7 @@
         Py_SIZE(deque)++;
         deque->leftindex--;
         deque->leftblock->data[deque->leftindex] = item;
-        TRIM(deque, deque_pop);
+        deque_trim_right(deque);
     }
     Py_DECREF(it);
     if (PyErr_Occurred())
@@ -674,8 +692,8 @@
     Py_ssize_t n = Py_SIZE(deque);
     Py_ssize_t i;
     Py_ssize_t count = 0;
+    size_t start_state = deque->state;
     PyObject *item;
-    long start_state = deque->state;
     int cmp;
 
     for (i=0 ; i<n ; i++) {
@@ -757,9 +775,17 @@
         assert (item != NULL);
         Py_DECREF(item);
     }
-    assert(deque->leftblock == deque->rightblock &&
-           deque->leftindex - 1 == deque->rightindex &&
-           Py_SIZE(deque) == 0);
+    assert(deque->leftblock == deque->rightblock);
+    assert(deque->leftindex - 1 == deque->rightindex);
+    assert(Py_SIZE(deque) == 0);
+}
+
+static int
+valid_index(Py_ssize_t i, Py_ssize_t limit)
+{
+    /* The cast to size_t let us use just a single comparison
+       to check whether i is in the range: 0 <= i < limit */
+    return (size_t) i < (size_t) limit;
 }
 
 static PyObject *
@@ -769,9 +795,8 @@
     PyObject *item;
     Py_ssize_t n, index=i;
 
-    if (i < 0 || i >= Py_SIZE(deque)) {
-        PyErr_SetString(PyExc_IndexError,
-                        "deque index out of range");
+    if (!valid_index(i, Py_SIZE(deque))) {
+        PyErr_SetString(PyExc_IndexError, "deque index out of range");
         return NULL;
     }
 
@@ -783,14 +808,16 @@
         b = deque->rightblock;
     } else {
         i += deque->leftindex;
-        n = i / BLOCKLEN;
-        i %= BLOCKLEN;
+        n = (Py_ssize_t)((size_t) i / BLOCKLEN);
+        i = (Py_ssize_t)((size_t) i % BLOCKLEN);
         if (index < (Py_SIZE(deque) >> 1)) {
             b = deque->leftblock;
             while (n--)
                 b = b->rightlink;
         } else {
-            n = (deque->leftindex + Py_SIZE(deque) - 1) / BLOCKLEN - n;
+            n = (Py_ssize_t)(
+                    ((size_t)(deque->leftindex + Py_SIZE(deque) - 1))
+                    / BLOCKLEN - n);
             b = deque->rightblock;
             while (n--)
                 b = b->leftlink;
@@ -831,23 +858,24 @@
     block *b;
     Py_ssize_t n, len=Py_SIZE(deque), halflen=(len+1)>>1, index=i;
 
-    if (i < 0 || i >= len) {
-        PyErr_SetString(PyExc_IndexError,
-                        "deque index out of range");
+    if (!valid_index(i, len)) {
+        PyErr_SetString(PyExc_IndexError, "deque index out of range");
         return -1;
     }
     if (v == NULL)
         return deque_del_item(deque, i);
 
     i += deque->leftindex;
-    n = i / BLOCKLEN;
-    i %= BLOCKLEN;
+    n = (Py_ssize_t)((size_t) i / BLOCKLEN);
+    i = (Py_ssize_t)((size_t) i % BLOCKLEN);
     if (index <= halflen) {
         b = deque->leftblock;
         while (n--)
             b = b->rightlink;
     } else {
-        n = (deque->leftindex + len - 1) / BLOCKLEN - n;
+        n = (Py_ssize_t)(
+                ((size_t)(deque->leftindex + Py_SIZE(deque) - 1))
+                / BLOCKLEN - n);
         b = deque->rightblock;
         while (n--)
             b = b->leftlink;
@@ -1124,10 +1152,10 @@
     0,                                  /* sq_repeat */
     (ssizeargfunc)deque_item,           /* sq_item */
     0,                                  /* sq_slice */
-    (ssizeobjargproc)deque_ass_item,            /* sq_ass_item */
+    (ssizeobjargproc)deque_ass_item,    /* sq_ass_item */
     0,                                  /* sq_ass_slice */
     0,                                  /* sq_contains */
-    (binaryfunc)deque_inplace_concat,           /* sq_inplace_concat */
+    (binaryfunc)deque_inplace_concat,   /* sq_inplace_concat */
     0,                                  /* sq_inplace_repeat */
 
 };
@@ -1226,10 +1254,10 @@
 
 typedef struct {
     PyObject_HEAD
-    Py_ssize_t index;
     block *b;
+    Py_ssize_t index;
     dequeobject *deque;
-    long state;         /* state when the iterator is created */
+    size_t state;          /* state when the iterator is created */
     Py_ssize_t counter;    /* number of items remaining for iteration */
 } dequeiterobject;
 
@@ -1346,7 +1374,7 @@
 
 static PyTypeObject dequeiter_type = {
     PyVarObject_HEAD_INIT(NULL, 0)
-    "_collections._deque_iterator",              /* tp_name */
+    "_collections._deque_iterator",             /* tp_name */
     sizeof(dequeiterobject),                    /* tp_basicsize */
     0,                                          /* tp_itemsize */
     /* methods */
@@ -1365,7 +1393,7 @@
     PyObject_GenericGetAttr,                    /* tp_getattro */
     0,                                          /* tp_setattro */
     0,                                          /* tp_as_buffer */
-    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
+    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,    /* tp_flags */
     0,                                          /* tp_doc */
     (traverseproc)dequeiter_traverse,           /* tp_traverse */
     0,                                          /* tp_clear */
@@ -1468,7 +1496,7 @@
 
 static PyTypeObject dequereviter_type = {
     PyVarObject_HEAD_INIT(NULL, 0)
-    "_collections._deque_reverse_iterator",      /* tp_name */
+    "_collections._deque_reverse_iterator",     /* tp_name */
     sizeof(dequeiterobject),                    /* tp_basicsize */
     0,                                          /* tp_itemsize */
     /* methods */
@@ -1487,7 +1515,7 @@
     PyObject_GenericGetAttr,                    /* tp_getattro */
     0,                                          /* tp_setattro */
     0,                                          /* tp_as_buffer */
-    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
+    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,    /* tp_flags */
     0,                                          /* tp_doc */
     (traverseproc)dequeiter_traverse,           /* tp_traverse */
     0,                                          /* tp_clear */
@@ -1851,7 +1879,7 @@
                 (hash = ((PyASCIIObject *) key)->hash) == -1)
             {
                 hash = PyObject_Hash(key);
-                if (hash == -1) 
+                if (hash == -1)
                     goto done;
             }
 
diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
index c2889d2..6531aec 100644
--- a/Modules/_ctypes/_ctypes.c
+++ b/Modules/_ctypes/_ctypes.c
@@ -301,7 +301,7 @@
     char *new_prefix;
     char *result;
     char buf[32];
-    int prefix_len;
+    Py_ssize_t prefix_len;
     int k;
 
     prefix_len = 32 * ndim + 3;
@@ -2819,8 +2819,9 @@
                src->b_ptr,
                size);
 
-        if (PyCPointerTypeObject_Check(type))
-            /* XXX */;
+        if (PyCPointerTypeObject_Check(type)) {
+            /* XXX */
+        }
 
         value = GetKeepedObjects(src);
         if (value == NULL)
@@ -4305,8 +4306,11 @@
                                               slicelen);
             }
 
-            dest = (wchar_t *)PyMem_Malloc(
-                                    slicelen * sizeof(wchar_t));
+            dest = PyMem_New(wchar_t, slicelen);
+            if (dest == NULL) {
+                PyErr_NoMemory();
+                return NULL;
+            }
 
             for (cur = start, i = 0; i < slicelen;
                  cur += step, i++) {
@@ -4986,7 +4990,7 @@
                 return PyUnicode_FromWideChar(ptr + start,
                                               len);
             }
-            dest = (wchar_t *)PyMem_Malloc(len * sizeof(wchar_t));
+            dest = PyMem_New(wchar_t, len);
             if (dest == NULL)
                 return PyErr_NoMemory();
             for (cur = start, i = 0; i < len; cur += step, i++) {
diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c
index 728f751..879afb8 100644
--- a/Modules/_ctypes/stgdict.c
+++ b/Modules/_ctypes/stgdict.c
@@ -76,14 +76,18 @@
 
     if (src->format) {
         dst->format = PyMem_Malloc(strlen(src->format) + 1);
-        if (dst->format == NULL)
+        if (dst->format == NULL) {
+            PyErr_NoMemory();
             return -1;
+        }
         strcpy(dst->format, src->format);
     }
     if (src->shape) {
         dst->shape = PyMem_Malloc(sizeof(Py_ssize_t) * src->ndim);
-        if (dst->shape == NULL)
+        if (dst->shape == NULL) {
+            PyErr_NoMemory();
             return -1;
+        }
         memcpy(dst->shape, src->shape,
                sizeof(Py_ssize_t) * src->ndim);
     }
@@ -380,7 +384,7 @@
         union_size = 0;
         total_align = align ? align : 1;
         stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT;
-        stgdict->ffi_type_pointer.elements = PyMem_Malloc(sizeof(ffi_type *) * (basedict->length + len + 1));
+        stgdict->ffi_type_pointer.elements = PyMem_New(ffi_type *, basedict->length + len + 1);
         if (stgdict->ffi_type_pointer.elements == NULL) {
             PyErr_NoMemory();
             return -1;
@@ -398,7 +402,7 @@
         union_size = 0;
         total_align = 1;
         stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT;
-        stgdict->ffi_type_pointer.elements = PyMem_Malloc(sizeof(ffi_type *) * (len + 1));
+        stgdict->ffi_type_pointer.elements = PyMem_New(ffi_type *, len + 1);
         if (stgdict->ffi_type_pointer.elements == NULL) {
             PyErr_NoMemory();
             return -1;
diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c
index 701c587..09285d9 100644
--- a/Modules/_datetimemodule.c
+++ b/Modules/_datetimemodule.c
@@ -5020,8 +5020,7 @@
 
     {"utcfromtimestamp", (PyCFunction)datetime_utcfromtimestamp,
      METH_VARARGS | METH_CLASS,
-     PyDoc_STR("timestamp -> UTC datetime from a POSIX timestamp "
-               "(like time.time()).")},
+     PyDoc_STR("Construct a naive UTC datetime from a POSIX timestamp.")},
 
     {"strptime", (PyCFunction)datetime_strptime,
      METH_VARARGS | METH_CLASS,
diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c
index 9b3e9ed..bf4bc4a 100644
--- a/Modules/_elementtree.c
+++ b/Modules/_elementtree.c
@@ -11,6 +11,8 @@
  *--------------------------------------------------------------------
  */
 
+#define PY_SSIZE_T_CLEAN
+
 #include "Python.h"
 #include "structmember.h"
 
@@ -185,8 +187,8 @@
     PyObject* attrib;
 
     /* child elements */
-    int length; /* actual number of items */
-    int allocated; /* allocated items */
+    Py_ssize_t length; /* actual number of items */
+    Py_ssize_t allocated; /* allocated items */
 
     /* this either points to _children or to a malloced buffer */
     PyObject* *children;
@@ -251,7 +253,7 @@
 dealloc_extra(ElementObject* self)
 {
     ElementObjectExtra *myextra;
-    int i;
+    Py_ssize_t i;
 
     if (!self->extra)
         return;
@@ -429,9 +431,9 @@
 }
 
 LOCAL(int)
-element_resize(ElementObject* self, int extra)
+element_resize(ElementObject* self, Py_ssize_t extra)
 {
-    int size;
+    Py_ssize_t size;
     PyObject* *children;
 
     /* make sure self->children can hold the given number of extra
@@ -442,7 +444,7 @@
             return -1;
     }
 
-    size = self->extra->length + extra;
+    size = self->extra->length + extra;  /* never overflows */
 
     if (size > self->extra->allocated) {
         /* use Python 2.4's list growth strategy */
@@ -453,6 +455,8 @@
          * be safe.
          */
         size = size ? size : 1;
+        if ((size_t)size > PY_SSIZE_T_MAX/sizeof(PyObject*))
+            goto nomemory;
         if (self->extra->children != self->extra->_children) {
             /* Coverity CID #182 size_error: Allocating 1 bytes to pointer
              * "children", which needs at least 4 bytes. Although it's a
@@ -613,7 +617,7 @@
     Py_VISIT(JOIN_OBJ(self->tail));
 
     if (self->extra) {
-        int i;
+        Py_ssize_t i;
         Py_VISIT(self->extra->attrib);
 
         for (i = 0; i < self->extra->length; ++i)
@@ -689,7 +693,7 @@
 static PyObject*
 element_copy(ElementObject* self, PyObject* args)
 {
-    int i;
+    Py_ssize_t i;
     ElementObject* element;
 
     if (!PyArg_ParseTuple(args, ":__copy__"))
@@ -728,7 +732,7 @@
 static PyObject*
 element_deepcopy(ElementObject* self, PyObject* args)
 {
-    int i;
+    Py_ssize_t i;
     ElementObject* element;
     PyObject* tag;
     PyObject* attrib;
@@ -839,7 +843,7 @@
 static PyObject *
 element_getstate(ElementObject *self)
 {
-    int i, noattrib;
+    Py_ssize_t i, noattrib;
     PyObject *instancedict = NULL, *children;
 
     /* Build a list of children. */
@@ -1077,7 +1081,7 @@
 static PyObject*
 element_find(ElementObject *self, PyObject *args, PyObject *kwds)
 {
-    int i;
+    Py_ssize_t i;
     PyObject* tag;
     PyObject* namespaces = Py_None;
     static char *kwlist[] = {"path", "namespaces", 0};
@@ -1112,7 +1116,7 @@
 static PyObject*
 element_findtext(ElementObject *self, PyObject *args, PyObject *kwds)
 {
-    int i;
+    Py_ssize_t i;
     PyObject* tag;
     PyObject* default_value = Py_None;
     PyObject* namespaces = Py_None;
@@ -1153,7 +1157,7 @@
 static PyObject*
 element_findall(ElementObject *self, PyObject *args, PyObject *kwds)
 {
-    int i;
+    Py_ssize_t i;
     PyObject* out;
     PyObject* tag;
     PyObject* namespaces = Py_None;
@@ -1238,7 +1242,7 @@
 static PyObject*
 element_getchildren(ElementObject* self, PyObject* args)
 {
-    int i;
+    Py_ssize_t i;
     PyObject* list;
 
     /* FIXME: report as deprecated? */
@@ -1310,11 +1314,9 @@
 static PyObject*
 element_insert(ElementObject* self, PyObject* args)
 {
-    int i;
-
-    int index;
+    Py_ssize_t index, i;
     PyObject* element;
-    if (!PyArg_ParseTuple(args, "iO!:insert", &index,
+    if (!PyArg_ParseTuple(args, "nO!:insert", &index,
                           &Element_Type, &element))
         return NULL;
 
@@ -1402,7 +1404,7 @@
 static PyObject*
 element_remove(ElementObject* self, PyObject* args)
 {
-    int i;
+    Py_ssize_t i;
 
     PyObject* element;
     if (!PyArg_ParseTuple(args, "O!:remove", &Element_Type, &element))
@@ -1481,7 +1483,7 @@
 element_setitem(PyObject* self_, Py_ssize_t index, PyObject* item)
 {
     ElementObject* self = (ElementObject*) self_;
-    int i;
+    Py_ssize_t i;
     PyObject* old;
 
     if (!self->extra || index < 0 || index >= self->extra->length) {
@@ -2819,12 +2821,13 @@
  * message string is the default for the given error_code.
 */
 static void
-expat_set_error(enum XML_Error error_code, int line, int column, char *message)
+expat_set_error(enum XML_Error error_code, Py_ssize_t line, Py_ssize_t column,
+                const char *message)
 {
     PyObject *errmsg, *error, *position, *code;
     elementtreestate *st = ET_STATE_GLOBAL;
 
-    errmsg = PyUnicode_FromFormat("%s: line %d, column %d",
+    errmsg = PyUnicode_FromFormat("%s: line %zd, column %zd",
                 message ? message : EXPAT(ErrorString)(error_code),
                 line, column);
     if (errmsg == NULL)
@@ -2848,7 +2851,7 @@
     }
     Py_DECREF(code);
 
-    position = Py_BuildValue("(ii)", line, column);
+    position = Py_BuildValue("(nn)", line, column);
     if (!position) {
         Py_DECREF(error);
         return;
@@ -3477,8 +3480,14 @@
             break;
         }
 
+        if (PyBytes_GET_SIZE(buffer) > INT_MAX) {
+            Py_DECREF(buffer);
+            Py_DECREF(reader);
+            PyErr_SetString(PyExc_OverflowError, "size does not fit in an int");
+            return NULL;
+        }
         res = expat_parse(
-            self, PyBytes_AS_STRING(buffer), PyBytes_GET_SIZE(buffer), 0
+            self, PyBytes_AS_STRING(buffer), (int)PyBytes_GET_SIZE(buffer), 0
             );
 
         Py_DECREF(buffer);
diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c
index 57dfba0..3413b12 100644
--- a/Modules/_functoolsmodule.c
+++ b/Modules/_functoolsmodule.c
@@ -25,7 +25,7 @@
 static PyObject *
 partial_new(PyTypeObject *type, PyObject *args, PyObject *kw)
 {
-    PyObject *func;
+    PyObject *func, *pargs, *nargs, *pkw;
     partialobject *pto;
 
     if (PyTuple_GET_SIZE(args) < 1) {
@@ -34,7 +34,16 @@
         return NULL;
     }
 
+    pargs = pkw = Py_None;
     func = PyTuple_GET_ITEM(args, 0);
+    if (Py_TYPE(func) == &partial_type && type == &partial_type) {
+        partialobject *part = (partialobject *)func;
+        if (part->dict == NULL) {
+            pargs = part->args;
+            pkw = part->kw;
+            func = part->fn;
+        }
+    }
     if (!PyCallable_Check(func)) {
         PyErr_SetString(PyExc_TypeError,
                         "the first argument must be callable");
@@ -48,21 +57,53 @@
 
     pto->fn = func;
     Py_INCREF(func);
-    pto->args = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX);
-    if (pto->args == NULL) {
+
+    nargs = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX);
+    if (nargs == NULL) {
+        pto->args = NULL;
         pto->kw = NULL;
         Py_DECREF(pto);
         return NULL;
     }
+    if (pargs == Py_None || PyTuple_GET_SIZE(pargs) == 0) {
+        pto->args = nargs;
+        Py_INCREF(nargs);
+    }
+    else if (PyTuple_GET_SIZE(nargs) == 0) {
+        pto->args = pargs;
+        Py_INCREF(pargs);
+    }
+    else {
+        pto->args = PySequence_Concat(pargs, nargs);
+        if (pto->args == NULL) {
+            pto->kw = NULL;
+            Py_DECREF(pto);
+            return NULL;
+        }
+    }
+    Py_DECREF(nargs);
+
     if (kw != NULL) {
-        pto->kw = PyDict_Copy(kw);
+        if (pkw == Py_None) {
+            pto->kw = PyDict_Copy(kw);
+        }
+        else {
+            pto->kw = PyDict_Copy(pkw);
+            if (pto->kw != NULL) {
+                if (PyDict_Merge(pto->kw, kw, 1) != 0) {
+                    Py_DECREF(pto);
+                    return NULL;
+                }
+            }
+        }
         if (pto->kw == NULL) {
             Py_DECREF(pto);
             return NULL;
         }
-    } else {
-        pto->kw = Py_None;
-        Py_INCREF(Py_None);
+    }
+    else {
+        pto->kw = pkw;
+        Py_INCREF(pkw);
     }
 
     pto->weakreflist = NULL;
diff --git a/Modules/_io/_iomodule.h b/Modules/_io/_iomodule.h
index 8927864..9d5205e 100644
--- a/Modules/_io/_iomodule.h
+++ b/Modules/_io/_iomodule.h
@@ -69,7 +69,7 @@
  * Offset type for positioning.
  */
 
-/* Printing a variable of type off_t (with e.g., PyString_FromFormat)
+/* Printing a variable of type off_t (with e.g., PyUnicode_FromFormat)
    correctly and without producing compiler warnings is surprisingly painful.
    We identify an integer type whose size matches off_t and then: (1) cast the
    off_t to that integer type and (2) use the appropriate conversion
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
index 692ce41..370bb5e 100644
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -680,11 +680,7 @@
 _set_BlockingIOError(char *msg, Py_ssize_t written)
 {
     PyObject *err;
-#ifdef Py_DEBUG
-    /* in debug mode, PyEval_EvalFrameEx() fails with an assertion error
-       if an exception is set when it is called */
     PyErr_Clear();
-#endif
     err = PyObject_CallFunction(PyExc_BlockingIOError, "isn",
                                 errno, msg, written);
     if (err)
diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c
index 2435513..0f226ea 100644
--- a/Modules/_io/fileio.c
+++ b/Modules/_io/fileio.c
@@ -127,11 +127,18 @@
 static PyObject *
 fileio_close(fileio *self)
 {
+    PyObject *res;
+    PyObject *exc, *val, *tb;
+    int rc;
     _Py_IDENTIFIER(close);
+    res = _PyObject_CallMethodId((PyObject*)&PyRawIOBase_Type,
+                                 &PyId_close, "O", self);
     if (!self->closefd) {
         self->fd = -1;
-        Py_RETURN_NONE;
+        return res;
     }
+    if (res == NULL)
+        PyErr_Fetch(&exc, &val, &tb);
     if (self->finalizing) {
         PyObject *r = fileio_dealloc_warn(self, (PyObject *) self);
         if (r)
@@ -139,12 +146,12 @@
         else
             PyErr_Clear();
     }
-    errno = internal_close(self);
-    if (errno < 0)
-        return NULL;
-
-    return _PyObject_CallMethodId((PyObject*)&PyRawIOBase_Type,
-                                  &PyId_close, "O", self);
+    rc = internal_close(self);
+    if (res == NULL)
+        _PyErr_ChainExceptions(exc, val, tb);
+    if (rc < 0)
+        Py_CLEAR(res);
+    return res;
 }
 
 static PyObject *
@@ -173,9 +180,15 @@
 static int
 check_fd(int fd)
 {
-#if defined(HAVE_FSTAT)
-    struct stat buf;
-    if (!_PyVerify_fd(fd) || (fstat(fd, &buf) < 0 && errno == EBADF)) {
+#if defined(HAVE_FSTAT) || defined(MS_WINDOWS)
+    struct _Py_stat_struct buf;
+    if (_Py_fstat(fd, &buf) < 0 &&
+#ifdef MS_WINDOWS
+        GetLastError() == ERROR_INVALID_HANDLE
+#else
+        errno == EBADF
+#endif
+        ) {
         PyObject *exc;
         char *msg = strerror(EBADF);
         exc = PyObject_CallFunction(PyExc_OSError, "(is)",
@@ -215,8 +228,8 @@
 #elif !defined(MS_WINDOWS)
     int *atomic_flag_works = NULL;
 #endif
-#ifdef HAVE_FSTAT
-    struct stat fdfstat;
+#if defined(HAVE_FSTAT) || defined(MS_WINDOWS)
+    struct _Py_stat_struct fdfstat;
 #endif
     int async_err = 0;
 
@@ -369,10 +382,12 @@
                 else
 #endif
                     self->fd = open(name, flags, 0666);
-
                 Py_END_ALLOW_THREADS
             } while (self->fd < 0 && errno == EINTR &&
                      !(async_err = PyErr_CheckSignals()));
+
+            if (async_err)
+                goto error;
         }
         else {
             PyObject *fdobj;
@@ -401,8 +416,7 @@
 
         fd_is_own = 1;
         if (self->fd < 0) {
-            if (!async_err)
-                PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, nameobj);
+            PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, nameobj);
             goto error;
         }
 
@@ -413,9 +427,11 @@
     }
 
     self->blksize = DEFAULT_BUFFER_SIZE;
-#ifdef HAVE_FSTAT
-    if (fstat(self->fd, &fdfstat) < 0)
+#if defined(HAVE_FSTAT) || defined(MS_WINDOWS)
+    if (_Py_fstat(self->fd, &fdfstat) < 0) {
+        PyErr_SetFromErrno(PyExc_OSError);
         goto error;
+    }
 #if defined(S_ISDIR) && defined(EISDIR)
     /* On Unix, open will succeed for directories.
        In Python, there should be no file objects referring to
@@ -430,7 +446,7 @@
     if (fdfstat.st_blksize > 1)
         self->blksize = fdfstat.st_blksize;
 #endif /* HAVE_STRUCT_STAT_ST_BLKSIZE */
-#endif /* HAVE_FSTAT */
+#endif /* HAVE_FSTAT || MS_WINDOWS */
 
 #if defined(MS_WINDOWS) || defined(__CYGWIN__)
     /* don't translate newlines (\r\n <=> \n) */
@@ -567,12 +583,15 @@
 
     if (_PyVerify_fd(self->fd)) {
         len = pbuf.len;
+#ifdef MS_WINDOWS
+        if (len > INT_MAX)
+            len = INT_MAX;
+#endif
+
         do {
             Py_BEGIN_ALLOW_THREADS
             errno = 0;
 #ifdef MS_WINDOWS
-            if (len > INT_MAX)
-                len = INT_MAX;
             n = read(self->fd, pbuf.buf, (int)len);
 #else
             n = read(self->fd, pbuf.buf, len);
@@ -580,6 +599,9 @@
             Py_END_ALLOW_THREADS
         } while (n < 0 && errno == EINTR &&
                  !(async_err = PyErr_CheckSignals()));
+
+        if (async_err)
+            return NULL;
     } else
         n = -1;
     err = errno;
@@ -588,25 +610,14 @@
         if (err == EAGAIN)
             Py_RETURN_NONE;
         errno = err;
-        if (!async_err)
-            PyErr_SetFromErrno(PyExc_IOError);
+        PyErr_SetFromErrno(PyExc_IOError);
         return NULL;
     }
 
     return PyLong_FromSsize_t(n);
 }
 
-#ifndef HAVE_FSTAT
-
-static PyObject *
-fileio_readall(fileio *self)
-{
-    _Py_IDENTIFIER(readall);
-    return _PyObject_CallMethodId((PyObject*)&PyRawIOBase_Type,
-                                  &PyId_readall, "O", self);
-}
-
-#else
+#if defined(HAVE_FSTAT) || defined(MS_WINDOWS)
 
 static size_t
 new_buffersize(fileio *self, size_t currentsize)
@@ -630,11 +641,11 @@
 static PyObject *
 fileio_readall(fileio *self)
 {
-    struct stat st;
+    struct _Py_stat_struct st;
     Py_off_t pos, end;
     PyObject *result;
     Py_ssize_t bytes_read = 0;
-    Py_ssize_t n;
+    Py_ssize_t len, n;
     size_t bufsize;
     int async_err = 0;
 
@@ -648,7 +659,7 @@
 #else
     pos = lseek(self->fd, 0L, SEEK_CUR);
 #endif
-    if (fstat(self->fd, &st) == 0)
+    if (_Py_fstat(self->fd, &st) == 0)
         end = st.st_size;
     else
         end = (Py_off_t)-1;
@@ -683,20 +694,26 @@
                     return NULL;
             }
         }
+
+        len = bufsize - bytes_read;
+#ifdef MS_WINDOWS
+        if (len > INT_MAX)
+            len = INT_MAX;
+#endif
         do {
             Py_BEGIN_ALLOW_THREADS
             errno = 0;
-            n = bufsize - bytes_read;
 #ifdef MS_WINDOWS
-            if (n > INT_MAX)
-                n = INT_MAX;
-            n = read(self->fd, PyBytes_AS_STRING(result) + bytes_read, (int)n);
+            n = read(self->fd, PyBytes_AS_STRING(result) + bytes_read, (int)len);
 #else
-            n = read(self->fd, PyBytes_AS_STRING(result) + bytes_read, n);
+            n = read(self->fd, PyBytes_AS_STRING(result) + bytes_read, len);
 #endif
             Py_END_ALLOW_THREADS
         } while (n < 0 && errno == EINTR &&
                  !(async_err = PyErr_CheckSignals()));
+
+        if (async_err)
+            return NULL;
         if (n == 0)
             break;
         if (n < 0) {
@@ -707,8 +724,7 @@
                 Py_RETURN_NONE;
             }
             Py_DECREF(result);
-            if (!async_err)
-                PyErr_SetFromErrno(PyExc_IOError);
+            PyErr_SetFromErrno(PyExc_IOError);
             return NULL;
         }
         bytes_read += n;
@@ -722,7 +738,17 @@
     return result;
 }
 
-#endif /* HAVE_FSTAT */
+#else
+
+static PyObject *
+fileio_readall(fileio *self)
+{
+    _Py_IDENTIFIER(readall);
+    return _PyObject_CallMethodId((PyObject*)&PyRawIOBase_Type,
+                                  &PyId_readall, "O", self);
+}
+
+#endif /* HAVE_FSTAT || MS_WINDOWS */
 
 static PyObject *
 fileio_read(fileio *self, PyObject *args)
@@ -766,6 +792,9 @@
             Py_END_ALLOW_THREADS
         } while (n < 0 && errno == EINTR &&
                  !(async_err = PyErr_CheckSignals()));
+
+        if (async_err)
+            return NULL;
     } else
         n = -1;
 
@@ -775,8 +804,7 @@
         if (err == EAGAIN)
             Py_RETURN_NONE;
         errno = err;
-        if (!async_err)
-            PyErr_SetFromErrno(PyExc_IOError);
+        PyErr_SetFromErrno(PyExc_IOError);
         return NULL;
     }
 
@@ -806,19 +834,22 @@
         return NULL;
 
     if (_PyVerify_fd(self->fd)) {
+        len = pbuf.len;
+#ifdef MS_WINDOWS
+        if (len > 32767 && isatty(self->fd)) {
+            /* Issue #11395: the Windows console returns an error (12: not
+               enough space error) on writing into stdout if stdout mode is
+               binary and the length is greater than 66,000 bytes (or less,
+               depending on heap usage). */
+            len = 32767;
+        } else if (len > INT_MAX)
+            len = INT_MAX;
+#endif
+
         do {
             Py_BEGIN_ALLOW_THREADS
             errno = 0;
-            len = pbuf.len;
 #ifdef MS_WINDOWS
-            if (len > 32767 && isatty(self->fd)) {
-                /* Issue #11395: the Windows console returns an error (12: not
-                   enough space error) on writing into stdout if stdout mode is
-                   binary and the length is greater than 66,000 bytes (or less,
-                   depending on heap usage). */
-                len = 32767;
-            } else if (len > INT_MAX)
-                len = INT_MAX;
             n = write(self->fd, pbuf.buf, (int)len);
 #else
             n = write(self->fd, pbuf.buf, len);
@@ -826,6 +857,9 @@
             Py_END_ALLOW_THREADS
         } while (n < 0 && errno == EINTR &&
                  !(async_err = PyErr_CheckSignals()));
+
+        if (async_err)
+            return NULL;
     } else
         n = -1;
     err = errno;
@@ -836,8 +870,7 @@
         if (err == EAGAIN)
             Py_RETURN_NONE;
         errno = err;
-        if (!async_err)
-            PyErr_SetFromErrno(PyExc_IOError);
+        PyErr_SetFromErrno(PyExc_IOError);
         return NULL;
     }
 
diff --git a/Modules/_json.c b/Modules/_json.c
index 031471e..076859f 100644
--- a/Modules/_json.c
+++ b/Modules/_json.c
@@ -827,7 +827,7 @@
 
 static PyObject *
 _parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) {
-    /* Read a JSON array from PyString pystr.
+    /* Read a JSON array from PyUnicode pystr.
     idx is the index of the first character after the opening brace.
     *next_idx_ptr is a return-by-reference index to the first character after
         the closing brace.
@@ -899,8 +899,8 @@
 }
 
 static PyObject *
-_parse_constant(PyScannerObject *s, char *constant, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) {
-    /* Read a JSON constant from PyString pystr.
+_parse_constant(PyScannerObject *s, const char *constant, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) {
+    /* Read a JSON constant.
     constant is the constant string that was found
         ("NaN", "Infinity", "-Infinity").
     idx is the index of the first character of the constant
@@ -932,7 +932,7 @@
         the number.
 
     Returns a new PyObject representation of that number:
-        PyInt, PyLong, or PyFloat.
+        PyLong, or PyFloat.
         May return other types if parse_int or parse_float are set
     */
     void *str;
diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c
index 400c344..b1d6add 100644
--- a/Modules/_localemodule.c
+++ b/Modules/_localemodule.c
@@ -254,7 +254,7 @@
 
     /* assume no change in size, first */
     n1 = n1 + 1;
-    buf = PyMem_Malloc(n1 * sizeof(wchar_t));
+    buf = PyMem_New(wchar_t, n1);
     if (!buf) {
         PyErr_NoMemory();
         goto exit;
diff --git a/Modules/_lzmamodule.c b/Modules/_lzmamodule.c
index fb15233..bae7df6 100644
--- a/Modules/_lzmamodule.c
+++ b/Modules/_lzmamodule.c
@@ -1087,7 +1087,7 @@
 
 static PyObject *
 _lzma_LZMADecompressor_decompress_impl(Decompressor *self, Py_buffer *data, Py_ssize_t max_length)
-/*[clinic end generated code: output=1532a5bb23629001 input=262e4e217f49039b]*/
+/*[clinic end generated code: output=1532a5bb23629001 input=f2bb902cc1caf203]*/
 {
     PyObject *result = NULL;
 
diff --git a/Modules/_pickle.c b/Modules/_pickle.c
index 7a234f1..51e2f83 100644
--- a/Modules/_pickle.c
+++ b/Modules/_pickle.c
@@ -1979,7 +1979,7 @@
         if (_Pickler_Write(self, &op, 1) < 0)
             goto done;
 
-        buf = PyOS_double_to_string(x, 'g', 17, 0, NULL);
+        buf = PyOS_double_to_string(x, 'r', 0, Py_DTSF_ADD_DOT_0, NULL);
         if (!buf) {
             PyErr_NoMemory();
             goto done;
diff --git a/Modules/_randommodule.c b/Modules/_randommodule.c
index 4377ee0..af86182 100644
--- a/Modules/_randommodule.c
+++ b/Modules/_randommodule.c
@@ -78,8 +78,8 @@
 
 typedef struct {
     PyObject_HEAD
-    unsigned long state[N];
     int index;
+    unsigned long state[N];
 } RandomObject;
 
 static PyTypeObject Random_Type;
diff --git a/Modules/_sqlite/connection.h b/Modules/_sqlite/connection.h
index 0c9734c..fbd9063 100644
--- a/Modules/_sqlite/connection.h
+++ b/Modules/_sqlite/connection.h
@@ -52,7 +52,7 @@
      * first get called with count=0? */
     double timeout_started;
 
-    /* None for autocommit, otherwise a PyString with the isolation level */
+    /* None for autocommit, otherwise a PyUnicode with the isolation level */
     PyObject* isolation_level;
 
     /* NULL for autocommit, otherwise a string with the BEGIN statement; will be
diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c
index 7fe00e3..c1599c0 100644
--- a/Modules/_sqlite/cursor.c
+++ b/Modules/_sqlite/cursor.c
@@ -334,11 +334,7 @@
                 if (self->connection->text_factory == (PyObject*)&PyUnicode_Type) {
                     converted = PyUnicode_FromStringAndSize(val_str, nbytes);
                     if (!converted) {
-#ifdef Py_DEBUG
-                        /* in debug mode, type_call() fails with an assertion
-                           error if an exception is set when it is called */
                         PyErr_Clear();
-#endif
                         colname = sqlite3_column_name(self->statement->st, i);
                         if (!colname) {
                             colname = "<unknown column name>";
diff --git a/Modules/_sqlite/row.c b/Modules/_sqlite/row.c
index 64dfd06..ed8ad47 100644
--- a/Modules/_sqlite/row.c
+++ b/Modules/_sqlite/row.c
@@ -159,7 +159,7 @@
 PyObject* pysqlite_row_keys(pysqlite_Row* self, PyObject* args, PyObject* kwargs)
 {
     PyObject* list;
-    int nitems, i;
+    Py_ssize_t nitems, i;
 
     list = PyList_New(0);
     if (!list) {
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index 437d2b2..8596225 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -1841,26 +1841,6 @@
         BIO_set_nbio(SSL_get_wbio(self->ssl), nonblocking);
     }
 
-    /* first check if there are bytes ready to be read */
-    PySSL_BEGIN_ALLOW_THREADS
-    count = SSL_pending(self->ssl);
-    PySSL_END_ALLOW_THREADS
-
-    if (!count) {
-        sockstate = check_socket_and_wait_for_timeout(sock, 0);
-        if (sockstate == SOCKET_HAS_TIMED_OUT) {
-            PyErr_SetString(PySocketModule.timeout_error,
-                            "The read operation timed out");
-            goto error;
-        } else if (sockstate == SOCKET_TOO_LARGE_FOR_SELECT) {
-            PyErr_SetString(PySSLErrorObject,
-                            "Underlying socket too large for select().");
-            goto error;
-        } else if (sockstate == SOCKET_HAS_BEEN_CLOSED) {
-            count = 0;
-            goto done;
-        }
-    }
     do {
         PySSL_BEGIN_ALLOW_THREADS
         count = SSL_read(self->ssl, mem, len);
@@ -2219,6 +2199,15 @@
                                    sizeof(SID_CTX));
 #undef SID_CTX
 
+#ifdef X509_V_FLAG_TRUSTED_FIRST
+    {
+        /* Improve trust chain building when cross-signed intermediate
+           certificates are present. See https://bugs.python.org/issue23476. */
+        X509_STORE *store = SSL_CTX_get_cert_store(self->ctx);
+        X509_STORE_set_flags(store, X509_V_FLAG_TRUSTED_FIRST);
+    }
+#endif
+
     return (PyObject *)self;
 }
 
@@ -4287,10 +4276,11 @@
 
     if (_ssl_locks == NULL) {
         _ssl_locks_count = CRYPTO_num_locks();
-        _ssl_locks = (PyThread_type_lock *)
-            PyMem_Malloc(sizeof(PyThread_type_lock) * _ssl_locks_count);
-        if (_ssl_locks == NULL)
+        _ssl_locks = PyMem_New(PyThread_type_lock, _ssl_locks_count);
+        if (_ssl_locks == NULL) {
+            PyErr_NoMemory();
             return 0;
+        }
         memset(_ssl_locks, 0,
                sizeof(PyThread_type_lock) * _ssl_locks_count);
         for (i = 0;  i < _ssl_locks_count;  i++) {
@@ -4468,6 +4458,10 @@
                             X509_V_FLAG_CRL_CHECK|X509_V_FLAG_CRL_CHECK_ALL);
     PyModule_AddIntConstant(m, "VERIFY_X509_STRICT",
                             X509_V_FLAG_X509_STRICT);
+#ifdef X509_V_FLAG_TRUSTED_FIRST
+    PyModule_AddIntConstant(m, "VERIFY_X509_TRUSTED_FIRST",
+                            X509_V_FLAG_TRUSTED_FIRST);
+#endif
 
     /* Alert Descriptions from ssl.h */
     /* note RESERVED constants no longer intended for use have been removed */
diff --git a/Modules/_testbuffer.c b/Modules/_testbuffer.c
index a563a04..de7b567 100644
--- a/Modules/_testbuffer.c
+++ b/Modules/_testbuffer.c
@@ -850,7 +850,7 @@
     Py_ssize_t *dest;
     Py_ssize_t x, i;
 
-    dest = PyMem_Malloc(len * (sizeof *dest));
+    dest = PyMem_New(Py_ssize_t, len);
     if (dest == NULL) {
         PyErr_NoMemory();
         return NULL;
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index bed6568..a8ce0dc 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -69,6 +69,10 @@
 static PyObject*
 test_sizeof_c_types(PyObject *self)
 {
+#if defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 5)))
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wtype-limits"
+#endif
 #define CHECK_SIZEOF(TYPE, EXPECTED)         \
     if (EXPECTED != sizeof(TYPE))  {         \
         PyErr_Format(TestError,              \
@@ -126,6 +130,9 @@
 #undef IS_SIGNED
 #undef CHECK_SIGNESS
 #undef CHECK_SIZEOF
+#if defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 5)))
+#pragma GCC diagnostic pop
+#endif
 }
 
 
@@ -1517,7 +1524,7 @@
 
     if (!PyArg_ParseTuple(args, "Un", &unicode, &buflen))
         return NULL;
-    buffer = PyMem_Malloc(buflen * sizeof(wchar_t));
+    buffer = PyMem_New(wchar_t, buflen);
     if (buffer == NULL)
         return PyErr_NoMemory();
 
diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c
index d5396f6..52069ec 100644
--- a/Modules/_tkinter.c
+++ b/Modules/_tkinter.c
@@ -21,6 +21,7 @@
 
 */
 
+#define PY_SSIZE_T_CLEAN
 
 #include "Python.h"
 #include <ctype.h>
@@ -34,7 +35,7 @@
 #endif
 
 #define CHECK_SIZE(size, elemsize) \
-    ((size_t)(size) <= Py_MAX((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize)))
+    ((size_t)(size) <= Py_MIN((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize)))
 
 /* If Tcl is compiled for threads, we must also define TCL_THREAD. We define
    it always; if Tcl is not threaded, the thread functions in
@@ -409,7 +410,7 @@
 SplitObj(PyObject *arg)
 {
     if (PyTuple_Check(arg)) {
-        int i, size;
+        Py_ssize_t i, size;
         PyObject *elem, *newelem, *result;
 
         size = PyTuple_Size(arg);
@@ -425,7 +426,7 @@
                 return NULL;
             }
             if (!result) {
-                int k;
+                Py_ssize_t k;
                 if (newelem == elem) {
                     Py_DECREF(newelem);
                     continue;
@@ -446,7 +447,7 @@
         /* Fall through, returning arg. */
     }
     else if (PyList_Check(arg)) {
-        int i, size;
+        Py_ssize_t i, size;
         PyObject *elem, *newelem, *result;
 
         size = PyList_GET_SIZE(arg);
@@ -632,12 +633,12 @@
     /* some initial arguments need to be in argv */
     if (sync || use) {
         char *args;
-        int len = 0;
+        Py_ssize_t len = 0;
 
         if (sync)
             len += sizeof "-sync";
         if (use)
-            len += strlen(use) + sizeof "-use ";
+            len += strlen(use) + sizeof "-use ";  /* never overflows */
 
         args = (char*)PyMem_Malloc(len);
         if (!args) {
@@ -887,9 +888,14 @@
     long longVal;
     int overflow;
 
-    if (PyBytes_Check(value))
+    if (PyBytes_Check(value)) {
+        if (PyBytes_GET_SIZE(value) >= INT_MAX) {
+            PyErr_SetString(PyExc_OverflowError, "bytes object is too long");
+            return NULL;
+        }
         return Tcl_NewByteArrayObj((unsigned char *)PyBytes_AS_STRING(value),
-                                   PyBytes_GET_SIZE(value));
+                                   (int)PyBytes_GET_SIZE(value));
+    }
     else if (PyBool_Check(value))
         return Tcl_NewBooleanObj(PyObject_IsTrue(value));
     else if (PyLong_CheckExact(value) &&
@@ -921,7 +927,7 @@
         }
         for (i = 0; i < size; i++)
           argv[i] = AsObj(PySequence_Fast_GET_ITEM(value,i));
-        result = Tcl_NewListObj(size, argv);
+        result = Tcl_NewListObj((int)size, argv);
         PyMem_Free(argv);
         return result;
     }
@@ -946,7 +952,7 @@
         }
         kind = PyUnicode_KIND(value);
         if (kind == sizeof(Tcl_UniChar))
-            return Tcl_NewUnicodeObj(inbuf, size);
+            return Tcl_NewUnicodeObj(inbuf, (int)size);
         allocsize = ((size_t)size) * sizeof(Tcl_UniChar);
         outbuf = (Tcl_UniChar*)PyMem_Malloc(allocsize);
         /* Else overflow occurred, and we take the next exit */
@@ -971,7 +977,7 @@
 #endif
             outbuf[i] = ch;
         }
-        result = Tcl_NewUnicodeObj(outbuf, size);
+        result = Tcl_NewUnicodeObj(outbuf, (int)size);
         PyMem_Free(outbuf);
         return result;
     }
@@ -1139,10 +1145,10 @@
             Tcl_IncrRefCount(objv[i]);
         }
     }
-    *pobjc = objc;
+    *pobjc = (int)objc;
     return objv;
 finally:
-    Tkapp_CallDeallocArgs(objv, objStore, objc);
+    Tkapp_CallDeallocArgs(objv, objStore, (int)objc);
     return NULL;
 }
 
@@ -1495,7 +1501,6 @@
 #ifdef WITH_THREAD
     TkappObject *self = (TkappObject*)selfptr;
     if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) {
-        TkappObject *self = (TkappObject*)selfptr;
         VarEvent *ev;
         PyObject *res, *exc_type, *exc_val;
         Tcl_Condition cond = NULL;
@@ -2404,7 +2409,7 @@
 };
 
 static PyType_Spec Tktt_Type_spec = {
-    "tktimertoken",
+    "_tkinter.tktimertoken",
     sizeof(TkttObject),
     0,
     Py_TPFLAGS_DEFAULT,
@@ -2708,7 +2713,7 @@
 
 
 static PyType_Spec Tkapp_Type_spec = {
-    "tkapp",
+    "_tkinter.tkapp",
     sizeof(TkappObject),
     0,
     Py_TPFLAGS_DEFAULT,
@@ -2721,20 +2726,20 @@
 
 typedef struct {
     PyObject* tuple;
-    int size; /* current size */
-    int maxsize; /* allocated size */
+    Py_ssize_t size; /* current size */
+    Py_ssize_t maxsize; /* allocated size */
 } FlattenContext;
 
 static int
-_bump(FlattenContext* context, int size)
+_bump(FlattenContext* context, Py_ssize_t size)
 {
     /* expand tuple to hold (at least) size new items.
        return true if successful, false if an exception was raised */
 
-    int maxsize = context->maxsize * 2;
+    Py_ssize_t maxsize = context->maxsize * 2;  /* never overflows */
 
     if (maxsize < context->size + size)
-        maxsize = context->size + size;
+        maxsize = context->size + size;  /* never overflows */
 
     context->maxsize = maxsize;
 
@@ -2746,7 +2751,7 @@
 {
     /* add tuple or list to argument tuple (recursively) */
 
-    int i, size;
+    Py_ssize_t i, size;
 
     if (depth > 1000) {
         PyErr_SetString(PyExc_ValueError,
diff --git a/Modules/_winapi.c b/Modules/_winapi.c
index f118436..0274874 100644
--- a/Modules/_winapi.c
+++ b/Modules/_winapi.c
@@ -670,13 +670,23 @@
                 "environment can only contain strings");
             goto error;
         }
+        if (totalsize > PY_SSIZE_T_MAX - PyUnicode_GET_LENGTH(key) - 1) {
+            PyErr_SetString(PyExc_OverflowError, "environment too long");
+            goto error;
+        }
         totalsize += PyUnicode_GET_LENGTH(key) + 1;    /* +1 for '=' */
+        if (totalsize > PY_SSIZE_T_MAX - PyUnicode_GET_LENGTH(value) - 1) {
+            PyErr_SetString(PyExc_OverflowError, "environment too long");
+            goto error;
+        }
         totalsize += PyUnicode_GET_LENGTH(value) + 1;  /* +1 for '\0' */
     }
 
-    buffer = PyMem_Malloc(totalsize * sizeof(Py_UCS4));
-    if (! buffer)
+    buffer = PyMem_NEW(Py_UCS4, totalsize);
+    if (! buffer) {
+        PyErr_NoMemory();
         goto error;
+    }
     p = buffer;
     end = buffer + totalsize;
 
@@ -991,6 +1001,12 @@
 \n\
 Return the version number of the current operating system.");
 
+/* Disable deprecation warnings about GetVersionEx as the result is
+   being passed straight through to the caller, who is responsible for
+   using it correctly. */
+#pragma warning(push)
+#pragma warning(disable:4996)
+
 static PyObject *
 winapi_GetVersion(PyObject* self, PyObject* args)
 {
@@ -1000,6 +1016,8 @@
     return PyLong_FromUnsignedLong(GetVersion());
 }
 
+#pragma warning(pop)
+
 static PyObject *
 winapi_OpenProcess(PyObject *self, PyObject *args)
 {
diff --git a/Modules/binascii.c b/Modules/binascii.c
index 86b63bb..4e6953b 100644
--- a/Modules/binascii.c
+++ b/Modules/binascii.c
@@ -228,13 +228,13 @@
     if (PyObject_GetBuffer(arg, buf, PyBUF_SIMPLE) != 0) {
         PyErr_Format(PyExc_TypeError,
                      "argument should be bytes, buffer or ASCII string, "
-                     "not %R", Py_TYPE(arg));
+                     "not '%.100s'", Py_TYPE(arg)->tp_name);
         return 0;
     }
     if (!PyBuffer_IsContiguous(buf, 'C')) {
         PyErr_Format(PyExc_TypeError,
                      "argument should be a contiguous buffer, "
-                     "not %R", Py_TYPE(arg));
+                     "not '%.100s'", Py_TYPE(arg)->tp_name);
         PyBuffer_Release(buf);
         return 0;
     }
diff --git a/Modules/cjkcodecs/_codecs_iso2022.c b/Modules/cjkcodecs/_codecs_iso2022.c
index 5c401aa..1ce4218 100644
--- a/Modules/cjkcodecs/_codecs_iso2022.c
+++ b/Modules/cjkcodecs/_codecs_iso2022.c
@@ -292,7 +292,7 @@
                   const unsigned char **inbuf, Py_ssize_t *inleft)
 {
     unsigned char charset, designation;
-    Py_ssize_t i, esclen;
+    Py_ssize_t i, esclen = 0;
 
     for (i = 1;i < MAX_ESCSEQLEN;i++) {
         if (i >= *inleft)
@@ -307,10 +307,9 @@
         }
     }
 
-    if (i >= MAX_ESCSEQLEN)
-        return 1; /* unterminated escape sequence */
-
     switch (esclen) {
+    case 0:
+        return 1; /* unterminated escape sequence */
     case 3:
         if (INBYTE2 == '$') {
             charset = INBYTE3 | CHARSET_DBCS;
diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c
index 31bb35f..fe5b362 100644
--- a/Modules/cjkcodecs/multibytecodec.c
+++ b/Modules/cjkcodecs/multibytecodec.c
@@ -174,8 +174,10 @@
     orgsize = PyBytes_GET_SIZE(buf->outobj);
     incsize = (esize < (orgsize >> 1) ? (orgsize >> 1) | 1 : esize);
 
-    if (orgsize > PY_SSIZE_T_MAX - incsize)
+    if (orgsize > PY_SSIZE_T_MAX - incsize) {
+        PyErr_NoMemory();
         return -1;
+    }
 
     if (_PyBytes_Resize(&buf->outobj, orgsize + incsize) == -1)
         return -1;
@@ -186,11 +188,11 @@
 
     return 0;
 }
-#define REQUIRE_ENCODEBUFFER(buf, s) {                                  \
-    if ((s) < 1 || (buf)->outbuf + (s) > (buf)->outbuf_end)             \
+#define REQUIRE_ENCODEBUFFER(buf, s) do {                               \
+    if ((s) < 0 || (s) > (buf)->outbuf_end - (buf)->outbuf)             \
         if (expand_encodebuffer(buf, s) == -1)                          \
             goto errorexit;                                             \
-}
+} while(0)
 
 
 /**
@@ -324,10 +326,11 @@
 
     assert(PyBytes_Check(retstr));
     retstrsize = PyBytes_GET_SIZE(retstr);
-    REQUIRE_ENCODEBUFFER(buf, retstrsize);
-
-    memcpy(buf->outbuf, PyBytes_AS_STRING(retstr), retstrsize);
-    buf->outbuf += retstrsize;
+    if (retstrsize > 0) {
+        REQUIRE_ENCODEBUFFER(buf, retstrsize);
+        memcpy(buf->outbuf, PyBytes_AS_STRING(retstr), retstrsize);
+        buf->outbuf += retstrsize;
+    }
 
     newpos = PyLong_AsSsize_t(PyTuple_GET_ITEM(retobj, 1));
     if (newpos < 0 && !PyErr_Occurred())
diff --git a/Modules/clinic/_bz2module.c.h b/Modules/clinic/_bz2module.c.h
index 8a201a0..5201432 100644
--- a/Modules/clinic/_bz2module.c.h
+++ b/Modules/clinic/_bz2module.c.h
@@ -95,34 +95,43 @@
 }
 
 PyDoc_STRVAR(_bz2_BZ2Decompressor_decompress__doc__,
-"decompress($self, data, /)\n"
+"decompress($self, /, data, max_length=-1)\n"
 "--\n"
 "\n"
-"Provide data to the decompressor object.\n"
+"Decompress *data*, returning uncompressed data as bytes.\n"
 "\n"
-"Returns a chunk of decompressed data if possible, or b\'\' otherwise.\n"
+"If *max_length* is nonnegative, returns at most *max_length* bytes of\n"
+"decompressed data. If this limit is reached and further output can be\n"
+"produced, *self.needs_input* will be set to ``False``. In this case, the next\n"
+"call to *decompress()* may provide *data* as b\'\' to obtain more of the output.\n"
 "\n"
-"Attempting to decompress data after the end of stream is reached\n"
-"raises an EOFError.  Any data found after the end of the stream\n"
-"is ignored and saved in the unused_data attribute.");
+"If all of the input data was decompressed and returned (either because this\n"
+"was less than *max_length* bytes, or because *max_length* was negative),\n"
+"*self.needs_input* will be set to True.\n"
+"\n"
+"Attempting to decompress data after the end of stream is reached raises an\n"
+"EOFError.  Any data found after the end of the stream is ignored and saved in\n"
+"the unused_data attribute.");
 
 #define _BZ2_BZ2DECOMPRESSOR_DECOMPRESS_METHODDEF    \
-    {"decompress", (PyCFunction)_bz2_BZ2Decompressor_decompress, METH_VARARGS, _bz2_BZ2Decompressor_decompress__doc__},
+    {"decompress", (PyCFunction)_bz2_BZ2Decompressor_decompress, METH_VARARGS|METH_KEYWORDS, _bz2_BZ2Decompressor_decompress__doc__},
 
 static PyObject *
-_bz2_BZ2Decompressor_decompress_impl(BZ2Decompressor *self, Py_buffer *data);
+_bz2_BZ2Decompressor_decompress_impl(BZ2Decompressor *self, Py_buffer *data, Py_ssize_t max_length);
 
 static PyObject *
-_bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *args)
+_bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *args, PyObject *kwargs)
 {
     PyObject *return_value = NULL;
+    static char *_keywords[] = {"data", "max_length", NULL};
     Py_buffer data = {NULL, NULL};
+    Py_ssize_t max_length = -1;
 
-    if (!PyArg_ParseTuple(args,
-        "y*:decompress",
-        &data))
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+        "y*|n:decompress", _keywords,
+        &data, &max_length))
         goto exit;
-    return_value = _bz2_BZ2Decompressor_decompress_impl(self, &data);
+    return_value = _bz2_BZ2Decompressor_decompress_impl(self, &data, max_length);
 
 exit:
     /* Cleanup for data */
@@ -159,4 +168,4 @@
 exit:
     return return_value;
 }
-/*[clinic end generated code: output=21ca4405519a0931 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=8e65e3953430bc3d input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_lzmamodule.c.h b/Modules/clinic/_lzmamodule.c.h
index a46a152..6364271 100644
--- a/Modules/clinic/_lzmamodule.c.h
+++ b/Modules/clinic/_lzmamodule.c.h
@@ -65,7 +65,7 @@
 "decompress($self, /, data, max_length=-1)\n"
 "--\n"
 "\n"
-"Decompresses *data*, returning uncompressed data as bytes.\n"
+"Decompress *data*, returning uncompressed data as bytes.\n"
 "\n"
 "If *max_length* is nonnegative, returns at most *max_length* bytes of\n"
 "decompressed data. If this limit is reached and further output can be\n"
@@ -251,4 +251,4 @@
 
     return return_value;
 }
-/*[clinic end generated code: output=d17fac38b09626d8 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=dc42b73890609369 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/cmathmodule.c.h b/Modules/clinic/cmathmodule.c.h
index 83de518..d0c48d3 100644
--- a/Modules/clinic/cmathmodule.c.h
+++ b/Modules/clinic/cmathmodule.c.h
@@ -848,4 +848,4 @@
 exit:
     return return_value;
 }
-/*[clinic end generated code: output=4407f898ae07c83d input=a9049054013a1b77]*/
+/*[clinic end generated code: output=9b6d81711e4e3c4b input=a9049054013a1b77]*/
diff --git a/Modules/cmathmodule.c b/Modules/cmathmodule.c
index 5746db1..67161ad 100644
--- a/Modules/cmathmodule.c
+++ b/Modules/cmathmodule.c
@@ -212,7 +212,7 @@
 
 static Py_complex
 cmath_acosh_impl(PyModuleDef *module, Py_complex z)
-/*[clinic end generated code: output=c23c776429def981 input=bc016412080bb3e9]*/
+/*[clinic end generated code: output=c23c776429def981 input=3f61bee7d703e53c]*/
 {
     Py_complex s1, s2, r;
 
@@ -267,7 +267,7 @@
 
 static Py_complex
 cmath_asinh_impl(PyModuleDef *module, Py_complex z)
-/*[clinic end generated code: output=0c6664823c7b1b35 input=5a21fa0242928c9b]*/
+/*[clinic end generated code: output=0c6664823c7b1b35 input=5c09448fcfc89a79]*/
 {
     Py_complex s1, s2, r;
 
@@ -358,7 +358,7 @@
 
 static Py_complex
 cmath_atanh_impl(PyModuleDef *module, Py_complex z)
-/*[clinic end generated code: output=279e0b9fefc8da7c input=df19cdc9f9d431c9]*/
+/*[clinic end generated code: output=279e0b9fefc8da7c input=2b3fdb82fb34487b]*/
 {
     Py_complex r;
     double ay, h;
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
index 568724b..4643c0e 100644
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -458,7 +458,7 @@
         assert(st == PY_LOCK_FAILURE);
 
         /* get the thread holding the GIL, NULL if no thread hold the GIL */
-        current = _Py_atomic_load_relaxed(&_PyThreadState_Current);
+        current = (PyThreadState*)_Py_atomic_load_relaxed(&_PyThreadState_Current);
 
         write(thread.fd, thread.header, (int)thread.header_len);
 
@@ -927,12 +927,12 @@
 }
 
 #if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION)
-static void*
-stack_overflow(void *min_sp, void *max_sp, size_t *depth)
+static Py_uintptr_t
+stack_overflow(Py_uintptr_t min_sp, Py_uintptr_t max_sp, size_t *depth)
 {
     /* allocate 4096 bytes on the stack at each call */
     unsigned char buffer[4096];
-    void *sp = &buffer;
+    Py_uintptr_t sp = (Py_uintptr_t)&buffer;
     *depth += 1;
     if (sp < min_sp || max_sp < sp)
         return sp;
@@ -945,7 +945,8 @@
 faulthandler_stack_overflow(PyObject *self)
 {
     size_t depth, size;
-    char *sp = (char *)&depth, *stop;
+    Py_uintptr_t sp = (Py_uintptr_t)&depth;
+    Py_uintptr_t stop;
 
     faulthandler_suppress_crash_report();
     depth = 0;
diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c
index 1f1cef9..780e222 100644
--- a/Modules/fcntlmodule.c
+++ b/Modules/fcntlmodule.c
@@ -152,11 +152,8 @@
 /*[clinic end generated code: output=ad47738c118622bf input=a55a6ee8e494c449]*/
 {
 #define IOCTL_BUFSZ 1024
-    /* We use the unsigned non-checked 'I'
-       format for the 'code' parameter because Python turns 0x8000000
-       into either a large positive number (PyLong or PyInt on 64-bit
-       platforms) or a negative number on others (32-bit PyInt)
-       whereas the system expects it to be a 32bit bit field value
+    /* We use the unsigned non-checked 'I' format for the 'code' parameter
+       because the system expects it to be a 32bit bit field value
        regardless of it being passed as an int or unsigned long on
        various platforms.  See the termios.TIOCSWINSZ constant across
        platforms for an example of this.
diff --git a/Modules/getpath.c b/Modules/getpath.c
index 11eece4..3564d72 100644
--- a/Modules/getpath.c
+++ b/Modules/getpath.c
@@ -735,7 +735,7 @@
     bufsz += wcslen(zip_path) + 1;
     bufsz += wcslen(exec_prefix) + 1;
 
-    buf = (wchar_t *)PyMem_Malloc(bufsz * sizeof(wchar_t));
+    buf = PyMem_New(wchar_t, bufsz);
     if (buf == NULL) {
         Py_FatalError(
             "Not enough memory for dynamic PYTHONPATH");
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
index 1f42022..afff7e4 100644
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -3874,7 +3874,7 @@
 
 fast_mode:  when cnt an integer < PY_SSIZE_T_MAX and no step is specified.
 
-    assert(cnt != PY_SSIZE_T_MAX && long_cnt == NULL && long_step==PyInt(1));
+    assert(cnt != PY_SSIZE_T_MAX && long_cnt == NULL && long_step==PyLong(1));
     Advances with:  cnt += 1
     When count hits Y_SSIZE_T_MAX, switch to slow_mode.
 
diff --git a/Modules/main.c b/Modules/main.c
index c4883c9..74e512b 100644
--- a/Modules/main.c
+++ b/Modules/main.c
@@ -520,16 +520,16 @@
 #ifdef MS_WINDOWS
     if (!Py_IgnoreEnvironmentFlag && (wp = _wgetenv(L"PYTHONWARNINGS")) &&
         *wp != L'\0') {
-        wchar_t *buf, *warning;
+        wchar_t *buf, *warning, *context = NULL;
 
         buf = (wchar_t *)PyMem_RawMalloc((wcslen(wp) + 1) * sizeof(wchar_t));
         if (buf == NULL)
             Py_FatalError(
                "not enough memory to copy PYTHONWARNINGS");
         wcscpy(buf, wp);
-        for (warning = wcstok(buf, L",");
+        for (warning = wcstok_s(buf, L",", &context);
              warning != NULL;
-             warning = wcstok(NULL, L",")) {
+             warning = wcstok_s(NULL, L",", &context)) {
             PySys_AddWarnOption(warning);
         }
         PyMem_RawFree(buf);
@@ -752,9 +752,8 @@
                 }
             }
             {
-                /* XXX: does this work on Win/Win64? (see posix_fstat) */
-                struct stat sb;
-                if (fstat(fileno(fp), &sb) == 0 &&
+                struct _Py_stat_struct sb;
+                if (_Py_fstat(fileno(fp), &sb) == 0 &&
                     S_ISDIR(sb.st_mode)) {
                     fprintf(stderr, "%ls: '%ls' is a directory, cannot continue\n", argv[0], filename);
                     fclose(fp);
diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c
index 1371424..ac134b8 100644
--- a/Modules/mmapmodule.c
+++ b/Modules/mmapmodule.c
@@ -459,8 +459,8 @@
 
 #ifdef UNIX
     {
-        struct stat buf;
-        if (-1 == fstat(self->fd, &buf)) {
+        struct _Py_stat_struct buf;
+        if (-1 == _Py_fstat(self->fd, &buf)) {
             PyErr_SetFromErrno(PyExc_OSError);
             return NULL;
         }
@@ -1107,7 +1107,7 @@
 new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict)
 {
 #ifdef HAVE_FSTAT
-    struct stat st;
+    struct _Py_stat_struct st;
 #endif
     mmap_object *m_obj;
     PyObject *map_size_obj = NULL;
@@ -1174,7 +1174,7 @@
         (void)fcntl(fd, F_FULLFSYNC);
 #endif
 #ifdef HAVE_FSTAT
-    if (fd != -1 && fstat(fd, &st) == 0 && S_ISREG(st.st_mode)) {
+    if (fd != -1 && _Py_fstat(fd, &st) == 0 && S_ISREG(st.st_mode)) {
         if (map_size == 0) {
             if (st.st_size == 0) {
                 PyErr_SetString(PyExc_ValueError,
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index 118a380..e47bd84 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -25,6 +25,7 @@
 #define PY_SSIZE_T_CLEAN
 
 #include "Python.h"
+#include "structmember.h"
 #ifndef MS_WINDOWS
 #include "posixmodule.h"
 #else
@@ -350,8 +351,8 @@
 #ifdef MS_WINDOWS
 #       define STAT win32_stat
 #       define LSTAT win32_lstat
-#       define FSTAT win32_fstat
-#       define STRUCT_STAT struct win32_stat
+#       define FSTAT _Py_fstat
+#       define STRUCT_STAT struct _Py_stat_struct
 #else
 #       define STAT stat
 #       define LSTAT lstat
@@ -372,6 +373,20 @@
 
 #define DWORD_MAX 4294967295U
 
+#ifdef MS_WINDOWS
+#define INITFUNC PyInit_nt
+#define MODNAME "nt"
+#else
+#define INITFUNC PyInit_posix
+#define MODNAME "posix"
+#endif
+
+#ifdef MS_WINDOWS
+/* defined in fileutils.c */
+PyAPI_FUNC(void) _Py_time_t_to_FILE_TIME(time_t, int, FILETIME *);
+PyAPI_FUNC(void) _Py_attribute_data_to_stat(BY_HANDLE_FILE_INFORMATION *,
+                                            ULONG, struct _Py_stat_struct *);
+#endif
 
 #ifdef MS_WINDOWS
 static int
@@ -1045,99 +1060,16 @@
 }
 
 
-#if defined _MSC_VER && _MSC_VER >= 1400
-/* Microsoft CRT in VS2005 and higher will verify that a filehandle is
- * valid and raise an assertion if it isn't.
- * Normally, an invalid fd is likely to be a C program error and therefore
- * an assertion can be useful, but it does contradict the POSIX standard
- * which for write(2) states:
- *    "Otherwise, -1 shall be returned and errno set to indicate the error."
- *    "[EBADF] The fildes argument is not a valid file descriptor open for
- *     writing."
- * Furthermore, python allows the user to enter any old integer
- * as a fd and should merely raise a python exception on error.
- * The Microsoft CRT doesn't provide an official way to check for the
- * validity of a file descriptor, but we can emulate its internal behaviour
- * by using the exported __pinfo data member and knowledge of the
- * internal structures involved.
- * The structures below must be updated for each version of visual studio
- * according to the file internal.h in the CRT source, until MS comes
- * up with a less hacky way to do this.
- * (all of this is to avoid globally modifying the CRT behaviour using
- * _set_invalid_parameter_handler() and _CrtSetReportMode())
+#if defined _MSC_VER && _MSC_VER >= 1400 && _MSC_VER < 1900
+/* Legacy implementation of _PyVerify_fd_dup2 while transitioning to
+ * MSVC 14.0. This should eventually be removed. (issue23524)
  */
-/* The actual size of the structure is determined at runtime.
- * Only the first items must be present.
- */
-
-#if _MSC_VER >= 1900
-
-typedef struct {
-    CRITICAL_SECTION lock;
-    intptr_t osfhnd;
-    __int64 startpos;
-    char osfile;
-} my_ioinfo;
-
-#define IOINFO_L2E 6
-#define IOINFO_ARRAYS 128
-
-#else
-
-typedef struct {
-    intptr_t osfhnd;
-    char osfile;
-} my_ioinfo;
-
 #define IOINFO_L2E 5
 #define IOINFO_ARRAYS 64
-
-#endif
-
-extern __declspec(dllimport) char * __pioinfo[];
 #define IOINFO_ARRAY_ELTS   (1 << IOINFO_L2E)
 #define _NHANDLE_           (IOINFO_ARRAYS * IOINFO_ARRAY_ELTS)
-#define FOPEN 0x01
 #define _NO_CONSOLE_FILENO (intptr_t)-2
 
-/* This function emulates what the windows CRT does to validate file handles */
-int
-_PyVerify_fd(int fd)
-{
-    const int i1 = fd >> IOINFO_L2E;
-    const int i2 = fd & ((1 << IOINFO_L2E) - 1);
-
-    static size_t sizeof_ioinfo = 0;
-
-    /* Determine the actual size of the ioinfo structure,
-     * as used by the CRT loaded in memory
-     */
-    if (sizeof_ioinfo == 0 && __pioinfo[0] != NULL) {
-        sizeof_ioinfo = _msize(__pioinfo[0]) / IOINFO_ARRAY_ELTS;
-    }
-    if (sizeof_ioinfo == 0) {
-        /* This should not happen... */
-        goto fail;
-    }
-
-    /* See that it isn't a special CLEAR fileno */
-    if (fd != _NO_CONSOLE_FILENO) {
-        /* Microsoft CRT would check that 0<=fd<_nhandle but we can't do that.  Instead
-         * we check pointer validity and other info
-         */
-        if (0 <= i1 && i1 < IOINFO_ARRAYS && __pioinfo[i1] != NULL) {
-            /* finally, check that the file is open */
-            my_ioinfo* info = (my_ioinfo*)(__pioinfo[i1] + i2 * sizeof_ioinfo);
-            if (info->osfile & FOPEN) {
-                return 1;
-            }
-        }
-    }
-  fail:
-    errno = EBADF;
-    return 0;
-}
-
 /* the special case of checking dup2.  The target fd must be in a sensible range */
 static int
 _PyVerify_fd_dup2(int fd1, int fd2)
@@ -1152,8 +1084,7 @@
         return 0;
 }
 #else
-/* dummy version. _PyVerify_fd() is already defined in fileobject.h */
-#define _PyVerify_fd_dup2(A, B) (1)
+#define _PyVerify_fd_dup2(fd1, fd2) (_PyVerify_fd(fd1) && (fd2) >= 0)
 #endif
 
 #ifdef MS_WINDOWS
@@ -1451,91 +1382,6 @@
 #define HAVE_STAT_NSEC 1
 #define HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES 1
 
-struct win32_stat{
-    unsigned long st_dev;
-    __int64 st_ino;
-    unsigned short st_mode;
-    int st_nlink;
-    int st_uid;
-    int st_gid;
-    unsigned long st_rdev;
-    __int64 st_size;
-    time_t st_atime;
-    int st_atime_nsec;
-    time_t st_mtime;
-    int st_mtime_nsec;
-    time_t st_ctime;
-    int st_ctime_nsec;
-    unsigned long st_file_attributes;
-};
-
-static __int64 secs_between_epochs = 11644473600; /* Seconds between 1.1.1601 and 1.1.1970 */
-
-static void
-FILE_TIME_to_time_t_nsec(FILETIME *in_ptr, time_t *time_out, int* nsec_out)
-{
-    /* XXX endianness. Shouldn't matter, as all Windows implementations are little-endian */
-    /* Cannot simply cast and dereference in_ptr,
-       since it might not be aligned properly */
-    __int64 in;
-    memcpy(&in, in_ptr, sizeof(in));
-    *nsec_out = (int)(in % 10000000) * 100; /* FILETIME is in units of 100 nsec. */
-    *time_out = Py_SAFE_DOWNCAST((in / 10000000) - secs_between_epochs, __int64, time_t);
-}
-
-static void
-time_t_to_FILE_TIME(time_t time_in, int nsec_in, FILETIME *out_ptr)
-{
-    /* XXX endianness */
-    __int64 out;
-    out = time_in + secs_between_epochs;
-    out = out * 10000000 + nsec_in / 100;
-    memcpy(out_ptr, &out, sizeof(out));
-}
-
-/* Below, we *know* that ugo+r is 0444 */
-#if _S_IREAD != 0400
-#error Unsupported C library
-#endif
-static int
-attributes_to_mode(DWORD attr)
-{
-    int m = 0;
-    if (attr & FILE_ATTRIBUTE_DIRECTORY)
-        m |= _S_IFDIR | 0111; /* IFEXEC for user,group,other */
-    else
-        m |= _S_IFREG;
-    if (attr & FILE_ATTRIBUTE_READONLY)
-        m |= 0444;
-    else
-        m |= 0666;
-    return m;
-}
-
-static int
-attribute_data_to_stat(BY_HANDLE_FILE_INFORMATION *info, ULONG reparse_tag, struct win32_stat *result)
-{
-    memset(result, 0, sizeof(*result));
-    result->st_mode = attributes_to_mode(info->dwFileAttributes);
-    result->st_size = (((__int64)info->nFileSizeHigh)<<32) + info->nFileSizeLow;
-    result->st_dev = info->dwVolumeSerialNumber;
-    result->st_rdev = result->st_dev;
-    FILE_TIME_to_time_t_nsec(&info->ftCreationTime, &result->st_ctime, &result->st_ctime_nsec);
-    FILE_TIME_to_time_t_nsec(&info->ftLastWriteTime, &result->st_mtime, &result->st_mtime_nsec);
-    FILE_TIME_to_time_t_nsec(&info->ftLastAccessTime, &result->st_atime, &result->st_atime_nsec);
-    result->st_nlink = info->nNumberOfLinks;
-    result->st_ino = (((__int64)info->nFileIndexHigh)<<32) + info->nFileIndexLow;
-    if (reparse_tag == IO_REPARSE_TAG_SYMLINK) {
-        /* first clear the S_IFMT bits */
-        result->st_mode ^= (result->st_mode & S_IFMT);
-        /* now set the bits that make this a symlink */
-        result->st_mode |= S_IFLNK;
-    }
-    result->st_file_attributes = info->dwFileAttributes;
-
-    return 0;
-}
-
 static BOOL
 attributes_from_dir(LPCSTR pszFile, BY_HANDLE_FILE_INFORMATION *info, ULONG *reparse_tag)
 {
@@ -1559,6 +1405,25 @@
     return TRUE;
 }
 
+static void
+find_data_to_file_info_w(WIN32_FIND_DATAW *pFileData,
+                         BY_HANDLE_FILE_INFORMATION *info,
+                         ULONG *reparse_tag)
+{
+    memset(info, 0, sizeof(*info));
+    info->dwFileAttributes = pFileData->dwFileAttributes;
+    info->ftCreationTime   = pFileData->ftCreationTime;
+    info->ftLastAccessTime = pFileData->ftLastAccessTime;
+    info->ftLastWriteTime  = pFileData->ftLastWriteTime;
+    info->nFileSizeHigh    = pFileData->nFileSizeHigh;
+    info->nFileSizeLow     = pFileData->nFileSizeLow;
+/*  info->nNumberOfLinks   = 1; */
+    if (pFileData->dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)
+        *reparse_tag = pFileData->dwReserved0;
+    else
+        *reparse_tag = 0;
+}
+
 static BOOL
 attributes_from_dir_w(LPCWSTR pszFile, BY_HANDLE_FILE_INFORMATION *info, ULONG *reparse_tag)
 {
@@ -1568,17 +1433,7 @@
     if (hFindFile == INVALID_HANDLE_VALUE)
         return FALSE;
     FindClose(hFindFile);
-    memset(info, 0, sizeof(*info));
-    *reparse_tag = 0;
-    info->dwFileAttributes = FileData.dwFileAttributes;
-    info->ftCreationTime   = FileData.ftCreationTime;
-    info->ftLastAccessTime = FileData.ftLastAccessTime;
-    info->ftLastWriteTime  = FileData.ftLastWriteTime;
-    info->nFileSizeHigh    = FileData.nFileSizeHigh;
-    info->nFileSizeLow     = FileData.nFileSizeLow;
-/*  info->nNumberOfLinks   = 1; */
-    if (FileData.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)
-        *reparse_tag = FileData.dwReserved0;
+    find_data_to_file_info_w(&FileData, info, reparse_tag);
     return TRUE;
 }
 
@@ -1620,7 +1475,7 @@
     if(!buf_size)
         return FALSE;
 
-    buf = (wchar_t *)PyMem_Malloc((buf_size+1)*sizeof(wchar_t));
+    buf = PyMem_New(wchar_t, buf_size+1);
     if (!buf) {
         SetLastError(ERROR_OUTOFMEMORY);
         return FALSE;
@@ -1646,10 +1501,10 @@
 }
 
 static int
-win32_xstat_impl_w(const wchar_t *path, struct win32_stat *result,
+win32_xstat_impl_w(const wchar_t *path, struct _Py_stat_struct *result,
                    BOOL traverse);
 static int
-win32_xstat_impl(const char *path, struct win32_stat *result,
+win32_xstat_impl(const char *path, struct _Py_stat_struct *result,
                  BOOL traverse)
 {
     int code;
@@ -1732,7 +1587,7 @@
         } else
             CloseHandle(hFile);
     }
-    attribute_data_to_stat(&info, reparse_tag, result);
+    _Py_attribute_data_to_stat(&info, reparse_tag, result);
 
     /* Set S_IEXEC if it is an .exe, .bat, ... */
     dot = strrchr(path, '.');
@@ -1745,7 +1600,7 @@
 }
 
 static int
-win32_xstat_impl_w(const wchar_t *path, struct win32_stat *result,
+win32_xstat_impl_w(const wchar_t *path, struct _Py_stat_struct *result,
                    BOOL traverse)
 {
     int code;
@@ -1828,7 +1683,7 @@
         } else
             CloseHandle(hFile);
     }
-    attribute_data_to_stat(&info, reparse_tag, result);
+    _Py_attribute_data_to_stat(&info, reparse_tag, result);
 
     /* Set S_IEXEC if it is an .exe, .bat, ... */
     dot = wcsrchr(path, '.');
@@ -1841,7 +1696,7 @@
 }
 
 static int
-win32_xstat(const char *path, struct win32_stat *result, BOOL traverse)
+win32_xstat(const char *path, struct _Py_stat_struct *result, BOOL traverse)
 {
     /* Protocol violation: we explicitly clear errno, instead of
        setting it to a POSIX error. Callers should use GetLastError. */
@@ -1851,7 +1706,7 @@
 }
 
 static int
-win32_xstat_w(const wchar_t *path, struct win32_stat *result, BOOL traverse)
+win32_xstat_w(const wchar_t *path, struct _Py_stat_struct *result, BOOL traverse)
 {
     /* Protocol violation: we explicitly clear errno, instead of
        setting it to a POSIX error. Callers should use GetLastError. */
@@ -1873,80 +1728,29 @@
    The _w represent Unicode equivalents of the aforementioned ANSI functions. */
 
 static int
-win32_lstat(const char* path, struct win32_stat *result)
+win32_lstat(const char* path, struct _Py_stat_struct *result)
 {
     return win32_xstat(path, result, FALSE);
 }
 
 static int
-win32_lstat_w(const wchar_t* path, struct win32_stat *result)
+win32_lstat_w(const wchar_t* path, struct _Py_stat_struct *result)
 {
     return win32_xstat_w(path, result, FALSE);
 }
 
 static int
-win32_stat(const char* path, struct win32_stat *result)
+win32_stat(const char* path, struct _Py_stat_struct *result)
 {
     return win32_xstat(path, result, TRUE);
 }
 
 static int
-win32_stat_w(const wchar_t* path, struct win32_stat *result)
+win32_stat_w(const wchar_t* path, struct _Py_stat_struct *result)
 {
     return win32_xstat_w(path, result, TRUE);
 }
 
-static int
-win32_fstat(int file_number, struct win32_stat *result)
-{
-    BY_HANDLE_FILE_INFORMATION info;
-    HANDLE h;
-    int type;
-
-    if (!_PyVerify_fd(file_number))
-        h = INVALID_HANDLE_VALUE;
-    else
-        h = (HANDLE)_get_osfhandle(file_number);
-
-    /* Protocol violation: we explicitly clear errno, instead of
-       setting it to a POSIX error. Callers should use GetLastError. */
-    errno = 0;
-
-    if (h == INVALID_HANDLE_VALUE) {
-        /* This is really a C library error (invalid file handle).
-           We set the Win32 error to the closes one matching. */
-        SetLastError(ERROR_INVALID_HANDLE);
-        return -1;
-    }
-    memset(result, 0, sizeof(*result));
-
-    type = GetFileType(h);
-    if (type == FILE_TYPE_UNKNOWN) {
-        DWORD error = GetLastError();
-        if (error != 0) {
-            return -1;
-        }
-        /* else: valid but unknown file */
-    }
-
-    if (type != FILE_TYPE_DISK) {
-        if (type == FILE_TYPE_CHAR)
-            result->st_mode = _S_IFCHR;
-        else if (type == FILE_TYPE_PIPE)
-            result->st_mode = _S_IFIFO;
-        return 0;
-    }
-
-    if (!GetFileInformationByHandle(h, &info)) {
-        return -1;
-    }
-
-    attribute_data_to_stat(&info, 0, result);
-    /* specific to fstat() */
-    result->st_ino = (((__int64)info.nFileIndexHigh)<<32) + info.nFileIndexLow;
-    return 0;
-}
-
 #endif /* MS_WINDOWS */
 
 PyDoc_STRVAR(stat_result__doc__,
@@ -4472,7 +4276,7 @@
             len = wcslen(path->wide);
         }
         /* The +5 is so we can append "\\*.*\0" */
-        wnamebuf = PyMem_Malloc((len + 5) * sizeof(wchar_t));
+        wnamebuf = PyMem_New(wchar_t, len + 5);
         if (!wnamebuf) {
             PyErr_NoMemory();
             goto exit;
@@ -4809,7 +4613,7 @@
                                   Py_ARRAY_LENGTH(woutbuf),
                                   woutbuf, &wtemp);
         if (result > Py_ARRAY_LENGTH(woutbuf)) {
-            woutbufp = PyMem_Malloc(result * sizeof(wchar_t));
+            woutbufp = PyMem_New(wchar_t, result);
             if (!woutbufp)
                 return PyErr_NoMemory();
             result = GetFullPathNameW(wpath, result, woutbufp, &wtemp);
@@ -4923,7 +4727,7 @@
     if(!buf_size)
         return win32_error_object("GetFinalPathNameByHandle", path);
 
-    target_path = (wchar_t *)PyMem_Malloc((buf_size+1)*sizeof(wchar_t));
+    target_path = PyMem_New(wchar_t, buf_size+1);
     if(!target_path)
         return PyErr_NoMemory();
 
@@ -5041,7 +4845,7 @@
         return NULL;
     }
 
-    mountpath = (wchar_t *)PyMem_Malloc(buflen * sizeof(wchar_t));
+    mountpath = PyMem_New(wchar_t, buflen);
     if (mountpath == NULL)
         return PyErr_NoMemory();
 
@@ -6436,8 +6240,8 @@
         atime = mtime;
     }
     else {
-        time_t_to_FILE_TIME(utime.atime_s, utime.atime_ns, &atime);
-        time_t_to_FILE_TIME(utime.mtime_s, utime.mtime_ns, &mtime);
+        _Py_time_t_to_FILE_TIME(utime.atime_s, utime.atime_ns, &atime);
+        _Py_time_t_to_FILE_TIME(utime.mtime_s, utime.mtime_ns, &mtime);
     }
     if (!SetFileTime(hFile, NULL, &atime, &mtime)) {
         /* Avoid putting the file name into the error here,
@@ -8421,9 +8225,9 @@
 #endif
 
 #ifdef __APPLE__
-    groups = PyMem_Malloc(ngroups * sizeof(int));
+    groups = PyMem_New(int, ngroups);
 #else
-    groups = PyMem_Malloc(ngroups * sizeof(gid_t));
+    groups = PyMem_New(gid_t, ngroups);
 #endif
     if (groups == NULL)
         return PyErr_NoMemory();
@@ -8523,7 +8327,7 @@
         /* groups will fit in existing array */
         alt_grouplist = grouplist;
     } else {
-        alt_grouplist = PyMem_Malloc(n * sizeof(gid_t));
+        alt_grouplist = PyMem_New(gid_t, n);
         if (alt_grouplist == NULL) {
             errno = EINVAL;
             return posix_error();
@@ -8549,7 +8353,7 @@
                 /* Avoid malloc(0) */
                 alt_grouplist = grouplist;
             } else {
-                alt_grouplist = PyMem_Malloc(n * sizeof(gid_t));
+                alt_grouplist = PyMem_New(gid_t, n);
                 if (alt_grouplist == NULL) {
                     errno = EINVAL;
                     return posix_error();
@@ -16544,6 +16348,782 @@
 #endif   /* !MS_WINDOWS */
 
 
+PyDoc_STRVAR(posix_scandir__doc__,
+"scandir(path='.') -> iterator of DirEntry objects for given path");
+
+static char *follow_symlinks_keywords[] = {"follow_symlinks", NULL};
+
+typedef struct {
+    PyObject_HEAD
+    PyObject *name;
+    PyObject *path;
+    PyObject *stat;
+    PyObject *lstat;
+#ifdef MS_WINDOWS
+    struct _Py_stat_struct win32_lstat;
+    __int64 win32_file_index;
+    int got_file_index;
+#else /* POSIX */
+#ifdef HAVE_DIRENT_D_TYPE
+    unsigned char d_type;
+#endif
+    ino_t d_ino;
+#endif
+} DirEntry;
+
+static void
+DirEntry_dealloc(DirEntry *entry)
+{
+    Py_XDECREF(entry->name);
+    Py_XDECREF(entry->path);
+    Py_XDECREF(entry->stat);
+    Py_XDECREF(entry->lstat);
+    Py_TYPE(entry)->tp_free((PyObject *)entry);
+}
+
+/* Forward reference */
+static int
+DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits);
+
+/* Set exception and return -1 on error, 0 for False, 1 for True */
+static int
+DirEntry_is_symlink(DirEntry *self)
+{
+#ifdef MS_WINDOWS
+    return (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK;
+#elif defined(HAVE_DIRENT_D_TYPE)
+    /* POSIX */
+    if (self->d_type != DT_UNKNOWN)
+        return self->d_type == DT_LNK;
+    else
+        return DirEntry_test_mode(self, 0, S_IFLNK);
+#else
+    /* POSIX without d_type */
+    return DirEntry_test_mode(self, 0, S_IFLNK);
+#endif
+}
+
+static PyObject *
+DirEntry_py_is_symlink(DirEntry *self)
+{
+    int result;
+
+    result = DirEntry_is_symlink(self);
+    if (result == -1)
+        return NULL;
+    return PyBool_FromLong(result);
+}
+
+static PyObject *
+DirEntry_fetch_stat(DirEntry *self, int follow_symlinks)
+{
+    int result;
+    struct _Py_stat_struct st;
+
+#ifdef MS_WINDOWS
+    wchar_t *path;
+
+    path = PyUnicode_AsUnicode(self->path);
+    if (!path)
+        return NULL;
+
+    if (follow_symlinks)
+        result = win32_stat_w(path, &st);
+    else
+        result = win32_lstat_w(path, &st);
+
+    if (result != 0) {
+        return PyErr_SetExcFromWindowsErrWithFilenameObject(PyExc_OSError,
+                                                            0, self->path);
+    }
+#else /* POSIX */
+    PyObject *bytes;
+    char *path;
+
+    if (!PyUnicode_FSConverter(self->path, &bytes))
+        return NULL;
+    path = PyBytes_AS_STRING(bytes);
+
+    if (follow_symlinks)
+        result = STAT(path, &st);
+    else
+        result = LSTAT(path, &st);
+    Py_DECREF(bytes);
+
+    if (result != 0)
+        return PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, self->path);
+#endif
+
+    return _pystat_fromstructstat(&st);
+}
+
+static PyObject *
+DirEntry_get_lstat(DirEntry *self)
+{
+    if (!self->lstat) {
+#ifdef MS_WINDOWS
+        self->lstat = _pystat_fromstructstat(&self->win32_lstat);
+#else /* POSIX */
+        self->lstat = DirEntry_fetch_stat(self, 0);
+#endif
+    }
+    Py_XINCREF(self->lstat);
+    return self->lstat;
+}
+
+static PyObject *
+DirEntry_get_stat(DirEntry *self, int follow_symlinks)
+{
+    if (!follow_symlinks)
+        return DirEntry_get_lstat(self);
+
+    if (!self->stat) {
+        int result = DirEntry_is_symlink(self);
+        if (result == -1)
+            return NULL;
+        else if (result)
+            self->stat = DirEntry_fetch_stat(self, 1);
+        else
+            self->stat = DirEntry_get_lstat(self);
+    }
+
+    Py_XINCREF(self->stat);
+    return self->stat;
+}
+
+static PyObject *
+DirEntry_stat(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+    int follow_symlinks = 1;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|$p:DirEntry.stat",
+                                     follow_symlinks_keywords, &follow_symlinks))
+        return NULL;
+
+    return DirEntry_get_stat(self, follow_symlinks);
+}
+
+/* Set exception and return -1 on error, 0 for False, 1 for True */
+static int
+DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits)
+{
+    PyObject *stat = NULL;
+    PyObject *st_mode = NULL;
+    long mode;
+    int result;
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+    int is_symlink;
+    int need_stat;
+#endif
+#ifdef MS_WINDOWS
+    unsigned long dir_bits;
+#endif
+    _Py_IDENTIFIER(st_mode);
+
+#ifdef MS_WINDOWS
+    is_symlink = (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK;
+    need_stat = follow_symlinks && is_symlink;
+#elif defined(HAVE_DIRENT_D_TYPE)
+    is_symlink = self->d_type == DT_LNK;
+    need_stat = self->d_type == DT_UNKNOWN || (follow_symlinks && is_symlink);
+#endif
+
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+    if (need_stat) {
+#endif
+        stat = DirEntry_get_stat(self, follow_symlinks);
+        if (!stat) {
+            if (PyErr_ExceptionMatches(PyExc_FileNotFoundError)) {
+                /* If file doesn't exist (anymore), then return False
+                   (i.e., say it's not a file/directory) */
+                PyErr_Clear();
+                return 0;
+            }
+            goto error;
+        }
+        st_mode = _PyObject_GetAttrId(stat, &PyId_st_mode);
+        if (!st_mode)
+            goto error;
+
+        mode = PyLong_AsLong(st_mode);
+        if (mode == -1 && PyErr_Occurred())
+            goto error;
+        Py_CLEAR(st_mode);
+        Py_CLEAR(stat);
+        result = (mode & S_IFMT) == mode_bits;
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+    }
+    else if (is_symlink) {
+        assert(mode_bits != S_IFLNK);
+        result = 0;
+    }
+    else {
+        assert(mode_bits == S_IFDIR || mode_bits == S_IFREG);
+#ifdef MS_WINDOWS
+        dir_bits = self->win32_lstat.st_file_attributes & FILE_ATTRIBUTE_DIRECTORY;
+        if (mode_bits == S_IFDIR)
+            result = dir_bits != 0;
+        else
+            result = dir_bits == 0;
+#else /* POSIX */
+        if (mode_bits == S_IFDIR)
+            result = self->d_type == DT_DIR;
+        else
+            result = self->d_type == DT_REG;
+#endif
+    }
+#endif
+
+    return result;
+
+error:
+    Py_XDECREF(st_mode);
+    Py_XDECREF(stat);
+    return -1;
+}
+
+static PyObject *
+DirEntry_py_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits)
+{
+    int result;
+
+    result = DirEntry_test_mode(self, follow_symlinks, mode_bits);
+    if (result == -1)
+        return NULL;
+    return PyBool_FromLong(result);
+}
+
+static PyObject *
+DirEntry_is_dir(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+    int follow_symlinks = 1;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|$p:DirEntry.is_dir",
+                                     follow_symlinks_keywords, &follow_symlinks))
+        return NULL;
+
+    return DirEntry_py_test_mode(self, follow_symlinks, S_IFDIR);
+}
+
+static PyObject *
+DirEntry_is_file(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+    int follow_symlinks = 1;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|$p:DirEntry.is_file",
+                                     follow_symlinks_keywords, &follow_symlinks))
+        return NULL;
+
+    return DirEntry_py_test_mode(self, follow_symlinks, S_IFREG);
+}
+
+static PyObject *
+DirEntry_inode(DirEntry *self)
+{
+#ifdef MS_WINDOWS
+    if (!self->got_file_index) {
+        wchar_t *path;
+        struct _Py_stat_struct stat;
+
+        path = PyUnicode_AsUnicode(self->path);
+        if (!path)
+            return NULL;
+
+        if (win32_lstat_w(path, &stat) != 0) {
+            return PyErr_SetExcFromWindowsErrWithFilenameObject(PyExc_OSError,
+                                                                0, self->path);
+        }
+
+        self->win32_file_index = stat.st_ino;
+        self->got_file_index = 1;
+    }
+    return PyLong_FromLongLong((PY_LONG_LONG)self->win32_file_index);
+#else /* POSIX */
+#ifdef HAVE_LARGEFILE_SUPPORT
+    return PyLong_FromLongLong((PY_LONG_LONG)self->d_ino);
+#else
+    return PyLong_FromLong((long)self->d_ino);
+#endif
+#endif
+}
+
+static PyObject *
+DirEntry_repr(DirEntry *self)
+{
+    return PyUnicode_FromFormat("<DirEntry %R>", self->name);
+}
+
+static PyMemberDef DirEntry_members[] = {
+    {"name", T_OBJECT_EX, offsetof(DirEntry, name), READONLY,
+     "the entry's base filename, relative to scandir() \"path\" argument"},
+    {"path", T_OBJECT_EX, offsetof(DirEntry, path), READONLY,
+     "the entry's full path name; equivalent to os.path.join(scandir_path, entry.name)"},
+    {NULL}
+};
+
+static PyMethodDef DirEntry_methods[] = {
+    {"is_dir", (PyCFunction)DirEntry_is_dir, METH_VARARGS | METH_KEYWORDS,
+     "return True if the entry is a directory; cached per entry"
+    },
+    {"is_file", (PyCFunction)DirEntry_is_file, METH_VARARGS | METH_KEYWORDS,
+     "return True if the entry is a file; cached per entry"
+    },
+    {"is_symlink", (PyCFunction)DirEntry_py_is_symlink, METH_NOARGS,
+     "return True if the entry is a symbolic link; cached per entry"
+    },
+    {"stat", (PyCFunction)DirEntry_stat, METH_VARARGS | METH_KEYWORDS,
+     "return stat_result object for the entry; cached per entry"
+    },
+    {"inode", (PyCFunction)DirEntry_inode, METH_NOARGS,
+     "return inode of the entry; cached per entry",
+    },
+    {NULL}
+};
+
+PyTypeObject DirEntryType = {
+    PyVarObject_HEAD_INIT(NULL, 0)
+    MODNAME ".DirEntry",                    /* tp_name */
+    sizeof(DirEntry),                       /* tp_basicsize */
+    0,                                      /* tp_itemsize */
+    /* methods */
+    (destructor)DirEntry_dealloc,           /* tp_dealloc */
+    0,                                      /* tp_print */
+    0,                                      /* tp_getattr */
+    0,                                      /* tp_setattr */
+    0,                                      /* tp_compare */
+    (reprfunc)DirEntry_repr,                /* tp_repr */
+    0,                                      /* tp_as_number */
+    0,                                      /* tp_as_sequence */
+    0,                                      /* tp_as_mapping */
+    0,                                      /* tp_hash */
+    0,                                      /* tp_call */
+    0,                                      /* tp_str */
+    0,                                      /* tp_getattro */
+    0,                                      /* tp_setattro */
+    0,                                      /* tp_as_buffer */
+    Py_TPFLAGS_DEFAULT,                     /* tp_flags */
+    0,                                      /* tp_doc */
+    0,                                      /* tp_traverse */
+    0,                                      /* tp_clear */
+    0,                                      /* tp_richcompare */
+    0,                                      /* tp_weaklistoffset */
+    0,                                      /* tp_iter */
+    0,                                      /* tp_iternext */
+    DirEntry_methods,                       /* tp_methods */
+    DirEntry_members,                       /* tp_members */
+};
+
+#ifdef MS_WINDOWS
+
+static wchar_t *
+join_path_filenameW(wchar_t *path_wide, wchar_t* filename)
+{
+    Py_ssize_t path_len;
+    Py_ssize_t size;
+    wchar_t *result;
+    wchar_t ch;
+
+    if (!path_wide) { /* Default arg: "." */
+        path_wide = L".";
+        path_len = 1;
+    }
+    else {
+        path_len = wcslen(path_wide);
+    }
+
+    /* The +1's are for the path separator and the NUL */
+    size = path_len + 1 + wcslen(filename) + 1;
+    result = PyMem_New(wchar_t, size);
+    if (!result) {
+        PyErr_NoMemory();
+        return NULL;
+    }
+    wcscpy(result, path_wide);
+    if (path_len > 0) {
+        ch = result[path_len - 1];
+        if (ch != SEP && ch != ALTSEP && ch != L':')
+            result[path_len++] = SEP;
+        wcscpy(result + path_len, filename);
+    }
+    return result;
+}
+
+static PyObject *
+DirEntry_from_find_data(path_t *path, WIN32_FIND_DATAW *dataW)
+{
+    DirEntry *entry;
+    BY_HANDLE_FILE_INFORMATION file_info;
+    ULONG reparse_tag;
+    wchar_t *joined_path;
+
+    entry = PyObject_New(DirEntry, &DirEntryType);
+    if (!entry)
+        return NULL;
+    entry->name = NULL;
+    entry->path = NULL;
+    entry->stat = NULL;
+    entry->lstat = NULL;
+    entry->got_file_index = 0;
+
+    entry->name = PyUnicode_FromWideChar(dataW->cFileName, -1);
+    if (!entry->name)
+        goto error;
+
+    joined_path = join_path_filenameW(path->wide, dataW->cFileName);
+    if (!joined_path)
+        goto error;
+
+    entry->path = PyUnicode_FromWideChar(joined_path, -1);
+    PyMem_Free(joined_path);
+    if (!entry->path)
+        goto error;
+
+    find_data_to_file_info_w(dataW, &file_info, &reparse_tag);
+    _Py_attribute_data_to_stat(&file_info, reparse_tag, &entry->win32_lstat);
+
+    return (PyObject *)entry;
+
+error:
+    Py_DECREF(entry);
+    return NULL;
+}
+
+#else /* POSIX */
+
+static char *
+join_path_filename(char *path_narrow, char* filename, Py_ssize_t filename_len)
+{
+    Py_ssize_t path_len;
+    Py_ssize_t size;
+    char *result;
+
+    if (!path_narrow) { /* Default arg: "." */
+        path_narrow = ".";
+        path_len = 1;
+    }
+    else {
+        path_len = strlen(path_narrow);
+    }
+
+    if (filename_len == -1)
+        filename_len = strlen(filename);
+
+    /* The +1's are for the path separator and the NUL */
+    size = path_len + 1 + filename_len + 1;
+    result = PyMem_New(char, size);
+    if (!result) {
+        PyErr_NoMemory();
+        return NULL;
+    }
+    strcpy(result, path_narrow);
+    if (path_len > 0 && result[path_len - 1] != '/')
+        result[path_len++] = '/';
+    strcpy(result + path_len, filename);
+    return result;
+}
+
+static PyObject *
+DirEntry_from_posix_info(path_t *path, char *name, Py_ssize_t name_len,
+                         ino_t d_ino
+#ifdef HAVE_DIRENT_D_TYPE
+                         , unsigned char d_type
+#endif
+                         )
+{
+    DirEntry *entry;
+    char *joined_path;
+
+    entry = PyObject_New(DirEntry, &DirEntryType);
+    if (!entry)
+        return NULL;
+    entry->name = NULL;
+    entry->path = NULL;
+    entry->stat = NULL;
+    entry->lstat = NULL;
+
+    joined_path = join_path_filename(path->narrow, name, name_len);
+    if (!joined_path)
+        goto error;
+
+    if (!path->narrow || !PyBytes_Check(path->object)) {
+        entry->name = PyUnicode_DecodeFSDefaultAndSize(name, name_len);
+        entry->path = PyUnicode_DecodeFSDefault(joined_path);
+    }
+    else {
+        entry->name = PyBytes_FromStringAndSize(name, name_len);
+        entry->path = PyBytes_FromString(joined_path);
+    }
+    PyMem_Free(joined_path);
+    if (!entry->name || !entry->path)
+        goto error;
+
+#ifdef HAVE_DIRENT_D_TYPE
+    entry->d_type = d_type;
+#endif
+    entry->d_ino = d_ino;
+
+    return (PyObject *)entry;
+
+error:
+    Py_XDECREF(entry);
+    return NULL;
+}
+
+#endif
+
+
+typedef struct {
+    PyObject_HEAD
+    path_t path;
+#ifdef MS_WINDOWS
+    HANDLE handle;
+    WIN32_FIND_DATAW file_data;
+    int first_time;
+#else /* POSIX */
+    DIR *dirp;
+#endif
+} ScandirIterator;
+
+#ifdef MS_WINDOWS
+
+static void
+ScandirIterator_close(ScandirIterator *iterator)
+{
+    if (iterator->handle == INVALID_HANDLE_VALUE)
+        return;
+
+    Py_BEGIN_ALLOW_THREADS
+    FindClose(iterator->handle);
+    Py_END_ALLOW_THREADS
+    iterator->handle = INVALID_HANDLE_VALUE;
+}
+
+static PyObject *
+ScandirIterator_iternext(ScandirIterator *iterator)
+{
+    WIN32_FIND_DATAW *file_data = &iterator->file_data;
+    BOOL success;
+
+    /* Happens if the iterator is iterated twice */
+    if (iterator->handle == INVALID_HANDLE_VALUE) {
+        PyErr_SetNone(PyExc_StopIteration);
+        return NULL;
+    }
+
+    while (1) {
+        if (!iterator->first_time) {
+            Py_BEGIN_ALLOW_THREADS
+            success = FindNextFileW(iterator->handle, file_data);
+            Py_END_ALLOW_THREADS
+            if (!success) {
+                if (GetLastError() != ERROR_NO_MORE_FILES)
+                    return path_error(&iterator->path);
+                /* No more files found in directory, stop iterating */
+                break;
+            }
+        }
+        iterator->first_time = 0;
+
+        /* Skip over . and .. */
+        if (wcscmp(file_data->cFileName, L".") != 0 &&
+                wcscmp(file_data->cFileName, L"..") != 0)
+            return DirEntry_from_find_data(&iterator->path, file_data);
+
+        /* Loop till we get a non-dot directory or finish iterating */
+    }
+
+    ScandirIterator_close(iterator);
+
+    PyErr_SetNone(PyExc_StopIteration);
+    return NULL;
+}
+
+#else /* POSIX */
+
+static void
+ScandirIterator_close(ScandirIterator *iterator)
+{
+    if (!iterator->dirp)
+        return;
+
+    Py_BEGIN_ALLOW_THREADS
+    closedir(iterator->dirp);
+    Py_END_ALLOW_THREADS
+    iterator->dirp = NULL;
+    return;
+}
+
+static PyObject *
+ScandirIterator_iternext(ScandirIterator *iterator)
+{
+    struct dirent *direntp;
+    Py_ssize_t name_len;
+    int is_dot;
+
+    /* Happens if the iterator is iterated twice */
+    if (!iterator->dirp) {
+        PyErr_SetNone(PyExc_StopIteration);
+        return NULL;
+    }
+
+    while (1) {
+        errno = 0;
+        Py_BEGIN_ALLOW_THREADS
+        direntp = readdir(iterator->dirp);
+        Py_END_ALLOW_THREADS
+
+        if (!direntp) {
+            if (errno != 0)
+                return path_error(&iterator->path);
+            /* No more files found in directory, stop iterating */
+            break;
+        }
+
+        /* Skip over . and .. */
+        name_len = NAMLEN(direntp);
+        is_dot = direntp->d_name[0] == '.' &&
+                 (name_len == 1 || (direntp->d_name[1] == '.' && name_len == 2));
+        if (!is_dot) {
+            return DirEntry_from_posix_info(&iterator->path, direntp->d_name,
+                                            name_len, direntp->d_ino
+#ifdef HAVE_DIRENT_D_TYPE
+                                            , direntp->d_type
+#endif
+                                            );
+        }
+
+        /* Loop till we get a non-dot directory or finish iterating */
+    }
+
+    ScandirIterator_close(iterator);
+
+    PyErr_SetNone(PyExc_StopIteration);
+    return NULL;
+}
+
+#endif
+
+static void
+ScandirIterator_dealloc(ScandirIterator *iterator)
+{
+    ScandirIterator_close(iterator);
+    Py_XDECREF(iterator->path.object);
+    path_cleanup(&iterator->path);
+    Py_TYPE(iterator)->tp_free((PyObject *)iterator);
+}
+
+PyTypeObject ScandirIteratorType = {
+    PyVarObject_HEAD_INIT(NULL, 0)
+    MODNAME ".ScandirIterator",             /* tp_name */
+    sizeof(ScandirIterator),                /* tp_basicsize */
+    0,                                      /* tp_itemsize */
+    /* methods */
+    (destructor)ScandirIterator_dealloc,    /* tp_dealloc */
+    0,                                      /* tp_print */
+    0,                                      /* tp_getattr */
+    0,                                      /* tp_setattr */
+    0,                                      /* tp_compare */
+    0,                                      /* tp_repr */
+    0,                                      /* tp_as_number */
+    0,                                      /* tp_as_sequence */
+    0,                                      /* tp_as_mapping */
+    0,                                      /* tp_hash */
+    0,                                      /* tp_call */
+    0,                                      /* tp_str */
+    0,                                      /* tp_getattro */
+    0,                                      /* tp_setattro */
+    0,                                      /* tp_as_buffer */
+    Py_TPFLAGS_DEFAULT,                     /* tp_flags */
+    0,                                      /* tp_doc */
+    0,                                      /* tp_traverse */
+    0,                                      /* tp_clear */
+    0,                                      /* tp_richcompare */
+    0,                                      /* tp_weaklistoffset */
+    PyObject_SelfIter,                      /* tp_iter */
+    (iternextfunc)ScandirIterator_iternext, /* tp_iternext */
+};
+
+static PyObject *
+posix_scandir(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+    ScandirIterator *iterator;
+    static char *keywords[] = {"path", NULL};
+#ifdef MS_WINDOWS
+    wchar_t *path_strW;
+#else
+    char *path;
+#endif
+
+    iterator = PyObject_New(ScandirIterator, &ScandirIteratorType);
+    if (!iterator)
+        return NULL;
+    memset(&iterator->path, 0, sizeof(path_t));
+    iterator->path.function_name = "scandir";
+    iterator->path.nullable = 1;
+
+#ifdef MS_WINDOWS
+    iterator->handle = INVALID_HANDLE_VALUE;
+#else
+    iterator->dirp = NULL;
+#endif
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O&:scandir", keywords,
+                                     path_converter, &iterator->path))
+        goto error;
+
+    /* path_converter doesn't keep path.object around, so do it
+       manually for the lifetime of the iterator here (the refcount
+       is decremented in ScandirIterator_dealloc)
+    */
+    Py_XINCREF(iterator->path.object);
+
+#ifdef MS_WINDOWS
+    if (iterator->path.narrow) {
+        PyErr_SetString(PyExc_TypeError,
+                        "os.scandir() doesn't support bytes path on Windows, use Unicode instead");
+        goto error;
+    }
+    iterator->first_time = 1;
+
+    path_strW = join_path_filenameW(iterator->path.wide, L"*.*");
+    if (!path_strW)
+        goto error;
+
+    Py_BEGIN_ALLOW_THREADS
+    iterator->handle = FindFirstFileW(path_strW, &iterator->file_data);
+    Py_END_ALLOW_THREADS
+
+    PyMem_Free(path_strW);
+
+    if (iterator->handle == INVALID_HANDLE_VALUE) {
+        path_error(&iterator->path);
+        goto error;
+    }
+#else /* POSIX */
+    if (iterator->path.narrow)
+        path = iterator->path.narrow;
+    else
+        path = ".";
+
+    errno = 0;
+    Py_BEGIN_ALLOW_THREADS
+    iterator->dirp = opendir(path);
+    Py_END_ALLOW_THREADS
+
+    if (!iterator->dirp) {
+        path_error(&iterator->path);
+        goto error;
+    }
+#endif
+
+    return (PyObject *)iterator;
+
+error:
+    Py_DECREF(iterator);
+    return NULL;
+}
+
+
 /*[clinic input]
 dump buffer
 [clinic start generated code]*/
@@ -17216,6 +17796,9 @@
     {"get_blocking", posix_get_blocking, METH_VARARGS, get_blocking__doc__},
     {"set_blocking", posix_set_blocking, METH_VARARGS, set_blocking__doc__},
 #endif
+    {"scandir",         (PyCFunction)posix_scandir,
+                        METH_VARARGS | METH_KEYWORDS,
+                        posix_scandir__doc__},
     {NULL,              NULL}            /* Sentinel */
 };
 
@@ -17658,15 +18241,6 @@
 }
 
 
-#ifdef MS_WINDOWS
-#define INITFUNC PyInit_nt
-#define MODNAME "nt"
-
-#else
-#define INITFUNC PyInit_posix
-#define MODNAME "posix"
-#endif
-
 static struct PyModuleDef posixmodule = {
     PyModuleDef_HEAD_INIT,
     MODNAME,
@@ -17887,6 +18461,12 @@
         if (PyStructSequence_InitType2(&TerminalSizeType,
                                        &TerminalSize_desc) < 0)
             return NULL;
+
+        /* initialize scandir types */
+        if (PyType_Ready(&ScandirIteratorType) < 0)
+            return NULL;
+        if (PyType_Ready(&DirEntryType) < 0)
+            return NULL;
     }
 #if defined(HAVE_WAITID) && !defined(__APPLE__)
     Py_INCREF((PyObject*) &WaitidResultType);
diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c
index 3f34e63..21cb04a 100644
--- a/Modules/pyexpat.c
+++ b/Modules/pyexpat.c
@@ -1093,7 +1093,7 @@
     for (i = 0; handler_info[i].name != NULL; i++)
         /* do nothing */;
 
-    new_parser->handlers = PyMem_Malloc(sizeof(PyObject *) * i);
+    new_parser->handlers = PyMem_New(PyObject *, i);
     if (!new_parser->handlers) {
         Py_DECREF(new_parser);
         return PyErr_NoMemory();
@@ -1416,7 +1416,7 @@
     for (i = 0; handler_info[i].name != NULL; i++)
         /* do nothing */;
 
-    self->handlers = PyMem_Malloc(sizeof(PyObject *) * i);
+    self->handlers = PyMem_New(PyObject *, i);
     if (!self->handlers) {
         Py_DECREF(self);
         return PyErr_NoMemory();
diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c
index eabe2f0..3ad8ebb 100644
--- a/Modules/signalmodule.c
+++ b/Modules/signalmodule.c
@@ -503,24 +503,23 @@
 static PyObject *
 signal_set_wakeup_fd(PyObject *self, PyObject *args)
 {
+    struct _Py_stat_struct st;
 #ifdef MS_WINDOWS
     PyObject *fdobj;
-    SOCKET_T fd, old_fd;
+    SOCKET_T sockfd, old_sockfd;
     int res;
     int res_size = sizeof res;
     PyObject *mod;
-    struct stat st;
     int is_socket;
 
     if (!PyArg_ParseTuple(args, "O:set_wakeup_fd", &fdobj))
         return NULL;
 
-    fd = PyLong_AsSocket_t(fdobj);
-    if (fd == (SOCKET_T)(-1) && PyErr_Occurred())
+    sockfd = PyLong_AsSocket_t(fdobj);
+    if (sockfd == (SOCKET_T)(-1) && PyErr_Occurred())
         return NULL;
 #else
     int fd, old_fd;
-    struct stat st;
 
     if (!PyArg_ParseTuple(args, "i:set_wakeup_fd", &fd))
         return NULL;
@@ -536,7 +535,7 @@
 
 #ifdef MS_WINDOWS
     is_socket = 0;
-    if (fd != INVALID_FD) {
+    if (sockfd != INVALID_FD) {
         /* Import the _socket module to call WSAStartup() */
         mod = PyImport_ImportModuleNoBlock("_socket");
         if (mod == NULL)
@@ -544,21 +543,24 @@
         Py_DECREF(mod);
 
         /* test the socket */
-        if (getsockopt(fd, SOL_SOCKET, SO_ERROR,
+        if (getsockopt(sockfd, SOL_SOCKET, SO_ERROR,
                        (char *)&res, &res_size) != 0) {
-            int err = WSAGetLastError();
+            int fd, err;
+
+            err = WSAGetLastError();
             if (err != WSAENOTSOCK) {
                 PyErr_SetExcFromWindowsErr(PyExc_OSError, err);
                 return NULL;
             }
 
-            if (!_PyVerify_fd(fd)) {
+            fd = (int)sockfd;
+            if ((SOCKET_T)fd != sockfd || !_PyVerify_fd(fd)) {
                 PyErr_SetString(PyExc_ValueError, "invalid fd");
                 return NULL;
             }
 
-            if (fstat(fd, &st) != 0) {
-                PyErr_SetFromErrno(PyExc_OSError);
+            if (_Py_fstat(fd, &st) != 0) {
+                PyErr_SetExcFromWindowsErr(PyExc_OSError, GetLastError());
                 return NULL;
             }
 
@@ -572,12 +574,12 @@
         }
     }
 
-    old_fd = wakeup.fd;
-    wakeup.fd = fd;
+    old_sockfd = wakeup.fd;
+    wakeup.fd = sockfd;
     wakeup.use_send = is_socket;
 
-    if (old_fd != INVALID_FD)
-        return PyLong_FromSocket_t(old_fd);
+    if (old_sockfd != INVALID_FD)
+        return PyLong_FromSocket_t(old_sockfd);
     else
         return PyLong_FromLong(-1);
 #else
@@ -589,7 +591,7 @@
             return NULL;
         }
 
-        if (fstat(fd, &st) != 0) {
+        if (_Py_fstat(fd, &st) != 0) {
             PyErr_SetFromErrno(PyExc_OSError);
             return NULL;
         }
diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c
index 5913e65..057430b 100644
--- a/Modules/socketmodule.c
+++ b/Modules/socketmodule.c
@@ -1966,8 +1966,15 @@
     #pragma clang diagnostic push
     #pragma clang diagnostic ignored "-Wtautological-compare"
     #endif
+    #if defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 5)))
+    #pragma GCC diagnostic push
+    #pragma GCC diagnostic ignored "-Wtype-limits"
+    #endif
     if (msg->msg_controllen < 0)
         return 0;
+    #if defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 5)))
+    #pragma GCC diagnostic pop
+    #endif
     #ifdef __clang__
     #pragma clang diagnostic pop
     #endif
@@ -4213,9 +4220,11 @@
 
     /* MSDN says ERROR_MORE_DATA may occur because DNS allows longer
        names */
-    name = PyMem_Malloc(size * sizeof(wchar_t));
-    if (!name)
+    name = PyMem_New(wchar_t, size);
+    if (!name) {
+        PyErr_NoMemory();
         return NULL;
+    }
     if (!GetComputerNameExW(ComputerNamePhysicalDnsHostname,
                            name,
                            &size))
diff --git a/Modules/socketmodule.h b/Modules/socketmodule.h
index b83f9af..4b6a10e 100644
--- a/Modules/socketmodule.h
+++ b/Modules/socketmodule.h
@@ -14,6 +14,13 @@
 
 #else /* MS_WINDOWS */
 # include <winsock2.h>
+/* Windows 'supports' CMSG_LEN, but does not follow the POSIX standard
+ * interface at all, so there is no point including the code that
+ * attempts to use it.
+ */
+# ifdef PySocket_BUILDING_SOCKET
+#  undef CMSG_LEN
+# endif
 # include <ws2tcpip.h>
 /* VC6 is shipped with old platform headers, and does not have MSTcpIP.h
  * Separate SDKs have all the functions we want, but older ones don't have
diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c
index ec70e7a..507cef3 100644
--- a/Modules/unicodedata.c
+++ b/Modules/unicodedata.c
@@ -553,10 +553,17 @@
 
     stackptr = 0;
     isize = PyUnicode_GET_LENGTH(input);
+    space = isize;
     /* Overallocate at most 10 characters. */
-    space = (isize > 10 ? 10 : isize) + isize;
+    if (space > 10) {
+        if (space <= PY_SSIZE_T_MAX - 10)
+            space += 10;
+    }
+    else {
+        space *= 2;
+    }
     osize = space;
-    output = PyMem_Malloc(space * sizeof(Py_UCS4));
+    output = PyMem_NEW(Py_UCS4, space);
     if (!output) {
         PyErr_NoMemory();
         return NULL;
@@ -703,7 +710,7 @@
     /* We allocate a buffer for the output.
        If we find that we made no changes, we still return
        the NFD result. */
-    output = PyMem_Malloc(len * sizeof(Py_UCS4));
+    output = PyMem_NEW(Py_UCS4, len);
     if (!output) {
         PyErr_NoMemory();
         Py_DECREF(result);
diff --git a/Modules/zipimport.c b/Modules/zipimport.c
index 8fe9195..f2cc245 100644
--- a/Modules/zipimport.c
+++ b/Modules/zipimport.c
@@ -233,7 +233,7 @@
     Py_ssize_t len;
 
     len = PyUnicode_GET_LENGTH(prefix) + PyUnicode_GET_LENGTH(name) + 1;
-    p = buf = PyMem_Malloc(sizeof(Py_UCS4) * len);
+    p = buf = PyMem_New(Py_UCS4, len);
     if (buf == NULL) {
         PyErr_NoMemory();
         return NULL;
diff --git a/Objects/abstract.c b/Objects/abstract.c
index 06e3382..ab13476 100644
--- a/Objects/abstract.c
+++ b/Objects/abstract.c
@@ -2073,37 +2073,70 @@
     return PyEval_CallObjectWithKeywords(o, a, NULL);
 }
 
+PyObject*
+_Py_CheckFunctionResult(PyObject *result, const char *func_name)
+{
+    int err_occurred = (PyErr_Occurred() != NULL);
+
+#ifdef NDEBUG
+    /* In debug mode: abort() with an assertion error. Use two different
+       assertions, so if an assertion fails, it's possible to know
+       if result was set or not and if an exception was raised or not. */
+    if (result != NULL)
+        assert(!err_occurred);
+    else
+        assert(err_occurred);
+#endif
+
+    if (result == NULL) {
+        if (!err_occurred) {
+            PyErr_Format(PyExc_SystemError,
+                         "NULL result without error in %s", func_name);
+            return NULL;
+        }
+    }
+    else {
+        if (err_occurred) {
+            PyObject *exc, *val, *tb;
+            PyErr_Fetch(&exc, &val, &tb);
+
+            Py_DECREF(result);
+
+            PyErr_Format(PyExc_SystemError,
+                         "result with error in %s", func_name);
+            _PyErr_ChainExceptions(exc, val, tb);
+            return NULL;
+        }
+    }
+    return result;
+}
+
 PyObject *
 PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw)
 {
     ternaryfunc call;
+    PyObject *result;
 
     /* PyObject_Call() must not be called with an exception set,
        because it may clear it (directly or indirectly) and so the
        caller looses its exception */
     assert(!PyErr_Occurred());
 
-    if ((call = func->ob_type->tp_call) != NULL) {
-        PyObject *result;
-        if (Py_EnterRecursiveCall(" while calling a Python object"))
-            return NULL;
-        result = (*call)(func, arg, kw);
-        Py_LeaveRecursiveCall();
-#ifdef NDEBUG
-        if (result == NULL && !PyErr_Occurred()) {
-            PyErr_SetString(
-                PyExc_SystemError,
-                "NULL result without error in PyObject_Call");
-        }
-#else
-        assert((result != NULL && !PyErr_Occurred())
-                || (result == NULL && PyErr_Occurred()));
-#endif
-        return result;
+    call = func->ob_type->tp_call;
+    if (call == NULL) {
+        PyErr_Format(PyExc_TypeError, "'%.200s' object is not callable",
+                     func->ob_type->tp_name);
+        return NULL;
     }
-    PyErr_Format(PyExc_TypeError, "'%.200s' object is not callable",
-                 func->ob_type->tp_name);
-    return NULL;
+
+    if (Py_EnterRecursiveCall(" while calling a Python object"))
+        return NULL;
+
+    result = (*call)(func, arg, kw);
+
+    Py_LeaveRecursiveCall();
+
+    return _Py_CheckFunctionResult(result, "PyObject_Call");
 }
 
 static PyObject*
diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c
index b2bf098..b015974 100644
--- a/Objects/bytesobject.c
+++ b/Objects/bytesobject.c
@@ -433,105 +433,46 @@
     return result;
 }
 
-/* format_long emulates the format codes d, u, o, x and X, and
- * the F_ALT flag, for Python's long (unbounded) ints.  It's not used for
- * Python's regular ints.
- * Return value:  a new PyBytes*, or NULL if error.
- *  .  *pbuf is set to point into it,
- *     *plen set to the # of chars following that.
- *     Caller must decref it when done using pbuf.
- *     The string starting at *pbuf is of the form
- *         "-"? ("0x" | "0X")? digit+
- *     "0x"/"0X" are present only for x and X conversions, with F_ALT
- *         set in flags.  The case of hex digits will be correct,
- *     There will be at least prec digits, zero-filled on the left if
- *         necessary to get that many.
- * val          object to be converted
- * flags        bitmask of format flags; only F_ALT is looked at
- * prec         minimum number of digits; 0-fill on left if needed
- * type         a character in [duoxX]; u acts the same as d
- *
- * CAUTION:  o, x and X conversions on regular ints can never
- * produce a '-' sign, but can for Python's unbounded ints.
- */
-
-static PyObject *
-format_long(PyObject *val, int flags, int prec, int type,
-	    char **pbuf, int *plen)
-{
-    PyObject *s;
-    PyObject *result = NULL;
-
-    s = _PyUnicode_FormatLong(val, flags & F_ALT, prec, type);
-    if (!s)
-        return NULL;
-    result = _PyUnicode_AsASCIIString(s, "strict");
-    Py_DECREF(s);
-    if (!result)
-        return NULL;
-    *pbuf = PyBytes_AS_STRING(result);
-    *plen = PyBytes_GET_SIZE(result);
-    return result;
-}
-
 Py_LOCAL_INLINE(int)
-formatchar(char *buf, size_t buflen, PyObject *v)
+byte_converter(PyObject *arg, char *p)
 {
-    PyObject *w = NULL;
-    /* convert bytearray to bytes */
-    if (PyByteArray_Check(v)) {
-        w = PyBytes_FromObject(v);
-        if (w == NULL)
-            goto error;
-        v = w;
+    if (PyBytes_Check(arg) && PyBytes_Size(arg) == 1) {
+        *p = PyBytes_AS_STRING(arg)[0];
+        return 1;
     }
-    /* presume that the buffer is at least 2 characters long */
-    if (PyBytes_Check(v)) {
-        if (!PyArg_Parse(v, "c;%c requires an integer in range(256) or a single byte", &buf[0]))
-            goto error;
+    else if (PyByteArray_Check(arg) && PyByteArray_Size(arg) == 1) {
+        *p = PyByteArray_AS_STRING(arg)[0];
+        return 1;
     }
     else {
-        long ival = PyLong_AsLong(v);
-        if (ival == -1 && PyErr_Occurred()) {
-            PyErr_SetString(PyExc_TypeError,
-                "%c requires an integer in range(256) or a single byte");
-            goto error;
+        long ival = PyLong_AsLong(arg);
+        if (0 <= ival && ival <= 255) {
+            *p = (char)ival;
+            return 1;
         }
-        if (ival < 0 || ival > 255) {
-            PyErr_SetString(PyExc_TypeError,
-                "%c requires an integer in range(256) or a single byte");
-            goto error;
-        }
-        buf[0] = (char)ival;
     }
-    Py_XDECREF(w);
-    buf[1] = '\0';
-    return 1;
-
- error:
-    Py_XDECREF(w);
-    return -1;
+    PyErr_SetString(PyExc_TypeError,
+        "%c requires an integer in range(256) or a single byte");
+    return 0;
 }
 
 static PyObject *
-format_obj(PyObject *v)
+format_obj(PyObject *v, const char **pbuf, Py_ssize_t *plen)
 {
-    PyObject *result = NULL, *w = NULL;
-    PyObject *func;
+    PyObject *func, *result;
     _Py_IDENTIFIER(__bytes__);
-    /* convert bytearray to bytes */
-    if (PyByteArray_Check(v)) {
-        w = PyBytes_FromObject(v);
-        if (w == NULL)
-            return NULL;
-        v = w;
-    }
     /* is it a bytes object? */
     if (PyBytes_Check(v)) {
-        result = v;
+        *pbuf = PyBytes_AS_STRING(v);
+        *plen = PyBytes_GET_SIZE(v);
         Py_INCREF(v);
-        Py_XDECREF(w);
-        return result;
+        return v;
+    }
+    if (PyByteArray_Check(v)) {
+        *pbuf = PyByteArray_AS_STRING(v);
+        *plen = PyByteArray_GET_SIZE(v);
+        Py_INCREF(v);
+        return v;
     }
     /* does it support __bytes__? */
     func = _PyObject_LookupSpecial(v, &PyId___bytes__);
@@ -547,6 +488,8 @@
             Py_DECREF(result);
             return NULL;
         }
+        *pbuf = PyBytes_AS_STRING(result);
+        *plen = PyBytes_GET_SIZE(result);
         return result;
     }
     PyErr_Format(PyExc_TypeError,
@@ -573,7 +516,6 @@
     Py_ssize_t reslen, rescnt, fmtcnt;
     int args_owned = 0;
     PyObject *result;
-    PyObject *repr;
     PyObject *dict = NULL;
     if (format == NULL || !PyBytes_Check(format) || args == NULL) {
         PyErr_BadInternalCall();
@@ -619,15 +561,13 @@
             int prec = -1;
             int c = '\0';
             int fill;
-            int isnumok;
+            PyObject *iobj;
             PyObject *v = NULL;
             PyObject *temp = NULL;
-            Py_buffer buf = {NULL, NULL};
-            char *pbuf;
+            const char *pbuf = NULL;
             int sign;
-            Py_ssize_t len;
-            char formatbuf[FORMATBUFLEN];
-                 /* For format{int,char}() */
+            Py_ssize_t len = 0;
+            char onechar; /* For byte_converter() */
 
             fmt++;
             if (*fmt == '(') {
@@ -733,7 +673,7 @@
                             "* wants int");
                         goto error;
                     }
-                    prec = PyLong_AsSsize_t(v);
+                    prec = _PyLong_AsInt(v);
                     if (prec == -1 && PyErr_Occurred())
                         goto error;
                     if (prec < 0)
@@ -781,37 +721,21 @@
                 len = 1;
                 break;
             case 'a':
-                temp = PyObject_Repr(v);
+                temp = PyObject_ASCII(v);
                 if (temp == NULL)
                     goto error;
-                repr = PyUnicode_AsEncodedObject(temp, "ascii", "backslashreplace");
-                if (repr == NULL) {
-                    Py_DECREF(temp);
-                    goto error;
-                }
-                if (PyObject_GetBuffer(repr, &buf, PyBUF_SIMPLE) != 0) {
-                    temp = format_obj(repr);
-                    if (temp == NULL) {
-                        Py_DECREF(repr);
-                        goto error;
-                    }
-                    Py_DECREF(repr);
-                    repr = temp;
-                }
-                pbuf = PyBytes_AS_STRING(repr);
-                len = PyBytes_GET_SIZE(repr);
-                Py_DECREF(repr);
+                assert(PyUnicode_IS_ASCII(temp));
+                pbuf = (const char *)PyUnicode_1BYTE_DATA(temp);
+                len = PyUnicode_GET_LENGTH(temp);
                 if (prec >= 0 && len > prec)
                     len = prec;
                 break;
             case 's':
                 // %s is only for 2/3 code; 3 only code should use %b
             case 'b':
-                temp = format_obj(v);
+                temp = format_obj(v, &pbuf, &len);
                 if (temp == NULL)
                     goto error;
-                pbuf = PyBytes_AS_STRING(temp);
-                len = PyBytes_GET_SIZE(temp);
                 if (prec >= 0 && len > prec)
                     len = prec;
                 break;
@@ -823,41 +747,32 @@
             case 'X':
                 if (c == 'i')
                     c = 'd';
-                isnumok = 0;
+                iobj = NULL;
                 if (PyNumber_Check(v)) {
-                    PyObject *iobj=NULL;
-
                     if ((PyLong_Check(v))) {
                         iobj = v;
                         Py_INCREF(iobj);
                     }
                     else {
                         iobj = PyNumber_Long(v);
-                    }
-                    if (iobj!=NULL) {
-                        if (PyLong_Check(iobj)) {
-                            int ilen;
-
-                            isnumok = 1;
-                            temp = format_long(iobj, flags, prec, c,
-                                    &pbuf, &ilen);
-                            Py_DECREF(iobj);
-                            if (!temp)
-                                goto error;
-                            len = ilen;
-                            sign = 1;
-                        }
-                        else {
-                            Py_DECREF(iobj);
-                        }
+                        if (iobj != NULL && !PyLong_Check(iobj))
+                            Py_CLEAR(iobj);
                     }
                 }
-                if (!isnumok) {
+                if (iobj == NULL) {
                     PyErr_Format(PyExc_TypeError,
                         "%%%c format: a number is required, "
                         "not %.200s", c, Py_TYPE(v)->tp_name);
                     goto error;
                 }
+                temp = _PyUnicode_FormatLong(iobj, flags & F_ALT, prec, c);
+                Py_DECREF(iobj);
+                if (!temp)
+                    goto error;
+                assert(PyUnicode_IS_ASCII(temp));
+                pbuf = (const char *)PyUnicode_1BYTE_DATA(temp);
+                len = PyUnicode_GET_LENGTH(temp);
+                sign = 1;
                 if (flags & F_ZERO)
                     fill = '0';
                 break;
@@ -877,9 +792,9 @@
                     fill = '0';
                 break;
             case 'c':
-                pbuf = formatbuf;
-                len = formatchar(pbuf, sizeof(formatbuf), v);
-                if (len < 0)
+                pbuf = &onechar;
+                len = byte_converter(v, &onechar);
+                if (!len)
                     goto error;
                 break;
             default:
@@ -911,12 +826,10 @@
                 reslen += rescnt;
                 if (reslen < 0) {
                     Py_DECREF(result);
-                    PyBuffer_Release(&buf);
                     Py_XDECREF(temp);
                     return PyErr_NoMemory();
                 }
                 if (_PyBytes_Resize(&result, reslen)) {
-                    PyBuffer_Release(&buf);
                     Py_XDECREF(temp);
                     return NULL;
                 }
@@ -970,11 +883,9 @@
             if (dict && (argidx < arglen) && c != '%') {
                 PyErr_SetString(PyExc_TypeError,
                            "not all arguments converted during bytes formatting");
-                PyBuffer_Release(&buf);
                 Py_XDECREF(temp);
                 goto error;
             }
-            PyBuffer_Release(&buf);
             Py_XDECREF(temp);
         } /* '%' */
     } /* until end */
diff --git a/Objects/floatobject.c b/Objects/floatobject.c
index 8d85121..1d369f9 100644
--- a/Objects/floatobject.c
+++ b/Objects/floatobject.c
@@ -220,6 +220,7 @@
     if (fo == NULL)
         return -1;
     if (!PyFloat_Check(fo)) {
+        Py_DECREF(fo);
         PyErr_SetString(PyExc_TypeError,
                         "nb_float should return float object");
         return -1;
diff --git a/Objects/listobject.c b/Objects/listobject.c
index e7c4c82..8f88d18 100644
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -1832,7 +1832,8 @@
     assert(ms);
     while (ms->n > 1) {
         Py_ssize_t n = ms->n - 2;
-        if (n > 0 && p[n-1].len <= p[n].len + p[n+1].len) {
+        if ((n > 0 && p[n-1].len <= p[n].len + p[n+1].len) ||
+            (n > 1 && p[n-2].len <= p[n-1].len + p[n].len)) {
             if (p[n-1].len < p[n+1].len)
                 --n;
             if (merge_at(ms, n) < 0)
diff --git a/Objects/methodobject.c b/Objects/methodobject.c
index 686baf9..85b413f 100644
--- a/Objects/methodobject.c
+++ b/Objects/methodobject.c
@@ -78,68 +78,71 @@
 }
 
 PyObject *
-PyCFunction_Call(PyObject *func, PyObject *arg, PyObject *kw)
+PyCFunction_Call(PyObject *func, PyObject *args, PyObject *kwds)
 {
-#define CHECK_RESULT(res) assert(res != NULL || PyErr_Occurred())
-
     PyCFunctionObject* f = (PyCFunctionObject*)func;
     PyCFunction meth = PyCFunction_GET_FUNCTION(func);
     PyObject *self = PyCFunction_GET_SELF(func);
-    PyObject *res;
+    PyObject *arg, *res;
     Py_ssize_t size;
+    int flags;
 
-    switch (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST)) {
-    case METH_VARARGS:
-        if (kw == NULL || PyDict_Size(kw) == 0) {
-            res = (*meth)(self, arg);
-            CHECK_RESULT(res);
-            return res;
-        }
-        break;
-    case METH_VARARGS | METH_KEYWORDS:
-        res = (*(PyCFunctionWithKeywords)meth)(self, arg, kw);
-        CHECK_RESULT(res);
-        return res;
-    case METH_NOARGS:
-        if (kw == NULL || PyDict_Size(kw) == 0) {
-            size = PyTuple_GET_SIZE(arg);
-            if (size == 0) {
-                res = (*meth)(self, NULL);
-                CHECK_RESULT(res);
-                return res;
-            }
-            PyErr_Format(PyExc_TypeError,
-                "%.200s() takes no arguments (%zd given)",
-                f->m_ml->ml_name, size);
-            return NULL;
-        }
-        break;
-    case METH_O:
-        if (kw == NULL || PyDict_Size(kw) == 0) {
-            size = PyTuple_GET_SIZE(arg);
-            if (size == 1) {
-                res = (*meth)(self, PyTuple_GET_ITEM(arg, 0));
-                CHECK_RESULT(res);
-                return res;
-            }
-            PyErr_Format(PyExc_TypeError,
-                "%.200s() takes exactly one argument (%zd given)",
-                f->m_ml->ml_name, size);
-            return NULL;
-        }
-        break;
-    default:
-        PyErr_SetString(PyExc_SystemError, "Bad call flags in "
-                        "PyCFunction_Call. METH_OLDARGS is no "
-                        "longer supported!");
+    /* PyCFunction_Call() must not be called with an exception set,
+       because it may clear it (directly or indirectly) and so the
+       caller looses its exception */
+    assert(!PyErr_Occurred());
 
-        return NULL;
+    flags = PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST);
+
+    if (flags == (METH_VARARGS | METH_KEYWORDS)) {
+        res = (*(PyCFunctionWithKeywords)meth)(self, args, kwds);
     }
-    PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments",
-                 f->m_ml->ml_name);
-    return NULL;
+    else {
+        if (kwds != NULL && PyDict_Size(kwds) != 0) {
+            PyErr_Format(PyExc_TypeError, "%.200s() takes no keyword arguments",
+                         f->m_ml->ml_name);
+            return NULL;
+        }
 
-#undef CHECK_RESULT
+        switch (flags) {
+        case METH_VARARGS:
+            res = (*meth)(self, args);
+            break;
+
+        case METH_NOARGS:
+            size = PyTuple_GET_SIZE(args);
+            if (size != 0) {
+                PyErr_Format(PyExc_TypeError,
+                    "%.200s() takes no arguments (%zd given)",
+                    f->m_ml->ml_name, size);
+                return NULL;
+            }
+
+            res = (*meth)(self, NULL);
+            break;
+
+        case METH_O:
+            size = PyTuple_GET_SIZE(args);
+            if (size != 1) {
+                PyErr_Format(PyExc_TypeError,
+                    "%.200s() takes exactly one argument (%zd given)",
+                    f->m_ml->ml_name, size);
+                return NULL;
+            }
+
+            arg = PyTuple_GET_ITEM(args, 0);
+            res = (*meth)(self, arg);
+            break;
+
+        default:
+            PyErr_SetString(PyExc_SystemError,
+                            "Bad call flags in PyCFunction_Call. "
+                            "METH_OLDARGS is no longer supported!");
+            return NULL;
+        }
+    }
+
+    return _Py_CheckFunctionResult(res, "PyCFunction_Call");
 }
 
 /* Methods (the standard built-in methods, that is) */
diff --git a/Objects/object.c b/Objects/object.c
index 42cbbcd..c1d7a05 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -1825,9 +1825,6 @@
 
 #endif
 
-/* Hack to force loading of pycapsule.o */
-PyTypeObject *_PyCapsule_hack = &PyCapsule_Type;
-
 
 /* Hack to force loading of abstract.o */
 Py_ssize_t (*_Py_abstract_hack)(PyObject *) = PyObject_Size;
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
index e900cc3..7cc889f 100644
--- a/Objects/obmalloc.c
+++ b/Objects/obmalloc.c
@@ -1339,7 +1339,7 @@
         pool = (poolp)usable_arenas->pool_address;
         assert((block*)pool <= (block*)usable_arenas->address +
                                ARENA_SIZE - POOL_SIZE);
-        pool->arenaindex = usable_arenas - arenas;
+        pool->arenaindex = (uint)(usable_arenas - arenas);
         assert(&arenas[pool->arenaindex] == usable_arenas);
         pool->szidx = DUMMY_SIZE_IDX;
         usable_arenas->pool_address += POOL_SIZE;
diff --git a/Objects/setobject.c b/Objects/setobject.c
index 23ab95c..8197cd9 100644
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -84,8 +84,9 @@
                 return set_lookkey(so, key, hash);
             if (cmp > 0)                                          /* likely */
                 return entry;
+            mask = so->mask;                 /* help avoid a register spill */
         }
-        if (entry->key == dummy && freeslot == NULL)
+        if (entry->hash == -1 && freeslot == NULL)
             freeslot = entry;
 
         if (i + LINEAR_PROBES <= mask) {
@@ -111,8 +112,9 @@
                         return set_lookkey(so, key, hash);
                     if (cmp > 0)
                         return entry;
+                    mask = so->mask;
                 }
-                if (entry->key == dummy && freeslot == NULL)
+                if (entry->hash == -1 && freeslot == NULL)
                     freeslot = entry;
             }
         }
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index f0ad7fd..be53868 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -2808,6 +2808,12 @@
         _PyDict_SetItemId(type->tp_dict, &PyId___module__,
             PyUnicode_FromStringAndSize(
                 spec->name, (Py_ssize_t)(s - spec->name)));
+    else {
+        if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
+                "builtin type %.200s has no __module__ attribute",
+                spec->name))
+            goto fail;
+    }
 
     return (PyObject*)res;
 
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index 101bfbc..0dea83e 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -519,10 +519,6 @@
         return _PyUnicode_Copy(unicode);
 }
 
-#ifdef HAVE_MBCS
-static OSVERSIONINFOEX winver;
-#endif
-
 /* --- Bloom Filters ----------------------------------------------------- */
 
 /* stuff to implement simple "bloom filters" for Unicode characters.
@@ -1535,6 +1531,10 @@
         /* in case the native representation is 2-bytes, we need to allocate a
            new normalized 4-byte version. */
         length_wo_surrogates = _PyUnicode_WSTR_LENGTH(unicode) - num_surrogates;
+        if (length_wo_surrogates > PY_SSIZE_T_MAX / 4 - 1) {
+            PyErr_NoMemory();
+            return -1;
+        }
         _PyUnicode_DATA_ANY(unicode) = PyObject_MALLOC(4 * (length_wo_surrogates + 1));
         if (!_PyUnicode_DATA_ANY(unicode)) {
             PyErr_NoMemory();
@@ -2186,7 +2186,7 @@
     }
     switch (kind) {
     case PyUnicode_2BYTE_KIND:
-        result = PyMem_Malloc(len * sizeof(Py_UCS2));
+        result = PyMem_New(Py_UCS2, len);
         if (!result)
             return PyErr_NoMemory();
         assert(skind == PyUnicode_1BYTE_KIND);
@@ -2197,7 +2197,7 @@
             result);
         return result;
     case PyUnicode_4BYTE_KIND:
-        result = PyMem_Malloc(len * sizeof(Py_UCS4));
+        result = PyMem_New(Py_UCS4, len);
         if (!result)
             return PyErr_NoMemory();
         if (skind == PyUnicode_2BYTE_KIND) {
@@ -2239,11 +2239,7 @@
     if (copy_null)
         targetlen++;
     if (!target) {
-        if (PY_SSIZE_T_MAX / (Py_ssize_t)sizeof(Py_UCS4) < targetlen) {
-            PyErr_NoMemory();
-            return NULL;
-        }
-        target = PyMem_Malloc(targetlen * sizeof(Py_UCS4));
+        target = PyMem_New(Py_UCS4, targetlen);
         if (!target) {
             PyErr_NoMemory();
             return NULL;
@@ -2817,12 +2813,7 @@
     buflen = unicode_aswidechar(unicode, NULL, 0);
     if (buflen == -1)
         return NULL;
-    if (PY_SSIZE_T_MAX / (Py_ssize_t)sizeof(wchar_t) < buflen) {
-        PyErr_NoMemory();
-        return NULL;
-    }
-
-    buffer = PyMem_MALLOC(buflen * sizeof(wchar_t));
+    buffer = PyMem_NEW(wchar_t, buflen);
     if (buffer == NULL) {
         PyErr_NoMemory();
         return NULL;
@@ -3515,10 +3506,7 @@
             wstr = smallbuf;
         }
         else {
-            if (wlen > PY_SSIZE_T_MAX / sizeof(wchar_t) - 1)
-                return PyErr_NoMemory();
-
-            wstr = PyMem_Malloc((wlen+1) * sizeof(wchar_t));
+            wstr = PyMem_New(wchar_t, wlen+1);
             if (!wstr)
                 return PyErr_NoMemory();
         }
@@ -3823,6 +3811,11 @@
 #endif
         }
         else {
+            if ((size_t)_PyUnicode_LENGTH(unicode) >
+                    PY_SSIZE_T_MAX / sizeof(wchar_t) - 1) {
+                PyErr_NoMemory();
+                return NULL;
+            }
             _PyUnicode_WSTR(unicode) = (wchar_t *) PyObject_MALLOC(sizeof(wchar_t) *
                                                   (_PyUnicode_LENGTH(unicode) + 1));
             if (!_PyUnicode_WSTR(unicode)) {
@@ -7115,13 +7108,7 @@
 encode_code_page_flags(UINT code_page, const char *errors)
 {
     if (code_page == CP_UTF8) {
-        if (winver.dwMajorVersion >= 6)
-            /* CP_UTF8 supports WC_ERR_INVALID_CHARS on Windows Vista
-               and later */
-            return WC_ERR_INVALID_CHARS;
-        else
-            /* CP_UTF8 only supports flags=0 on Windows older than Vista */
-            return 0;
+        return WC_ERR_INVALID_CHARS;
     }
     else if (code_page == CP_UTF7) {
         /* CP_UTF7 only supports flags=0 */
@@ -14979,13 +14966,6 @@
     if (PyType_Ready(&PyFormatterIter_Type) < 0)
         Py_FatalError("Can't initialize formatter iter type");
 
-#ifdef HAVE_MBCS
-    winver.dwOSVersionInfoSize = sizeof(winver);
-    if (!GetVersionEx((OSVERSIONINFO*)&winver)) {
-        PyErr_SetFromWindowsErr(0);
-        return -1;
-    }
-#endif
     return 0;
 }
 
diff --git a/PC/getpathp.c b/PC/getpathp.c
index deb40e7..d4f4574 100644
--- a/PC/getpathp.c
+++ b/PC/getpathp.c
@@ -451,11 +451,12 @@
                                      tmpbuffer, MAXPATHLEN * 2);
             Py_DECREF(decoded);
             if (k >= 0) {
-                wchar_t * tok = wcstok(tmpbuffer, L" \t\r\n");
+                wchar_t * context = NULL;
+                wchar_t * tok = wcstok_s(tmpbuffer, L" \t\r\n", &context);
                 if ((tok != NULL) && !wcscmp(tok, key)) {
-                    tok = wcstok(NULL, L" \t");
+                    tok = wcstok_s(NULL, L" \t", &context);
                     if ((tok != NULL) && !wcscmp(tok, L"=")) {
-                        tok = wcstok(NULL, L"\r\n");
+                        tok = wcstok_s(NULL, L"\r\n", &context);
                         if (tok != NULL) {
                             wcsncpy(value, tok, MAXPATHLEN);
                             result = 1;
diff --git a/PC/invalid_parameter_handler.c b/PC/invalid_parameter_handler.c
new file mode 100644
index 0000000..3bc0104
--- /dev/null
+++ b/PC/invalid_parameter_handler.c
@@ -0,0 +1,22 @@
+#ifdef _MSC_VER
+
+#include <stdlib.h>
+
+#if _MSC_VER >= 1900
+/* pyconfig.h uses this function in the _Py_BEGIN/END_SUPPRESS_IPH
+ * macros. It does not need to be defined when building using MSVC
+ * earlier than 14.0 (_MSC_VER == 1900).
+ */
+
+static void __cdecl _silent_invalid_parameter_handler(
+    wchar_t const* expression,
+    wchar_t const* function,
+    wchar_t const* file,
+    unsigned int line,
+    uintptr_t pReserved) { }
+
+void *_Py_silent_invalid_parameter_handler =
+    (void*)_silent_invalid_parameter_handler;
+#endif
+
+#endif
diff --git a/PC/launcher.c b/PC/launcher.c
index d450d9f..33dd5da 100644
--- a/PC/launcher.c
+++ b/PC/launcher.c
@@ -384,6 +384,31 @@
 }
 
 
+static wchar_t *
+find_python_by_venv()
+{
+    static wchar_t venv_python[MAX_PATH];
+    wchar_t *virtual_env = get_env(L"VIRTUAL_ENV");
+    DWORD attrs;
+
+    /* Check for VIRTUAL_ENV environment variable */
+    if (virtual_env == NULL || virtual_env[0] == L'\0') {
+        return NULL;
+    }
+
+    /* Check for a python executable in the venv */
+    debug(L"Checking for Python executable in virtual env '%ls'\n", virtual_env);
+    _snwprintf_s(venv_python, MAX_PATH, _TRUNCATE,
+            L"%ls\\Scripts\\%ls", virtual_env, PYTHON_EXECUTABLE);
+    attrs = GetFileAttributesW(venv_python);
+    if (attrs == INVALID_FILE_ATTRIBUTES) {
+        debug(L"Python executable %ls missing from virtual env\n", venv_python);
+        return NULL;
+    }
+
+    return venv_python;
+}
+
 static wchar_t appdata_ini_path[MAX_PATH];
 static wchar_t launcher_ini_path[MAX_PATH];
 
@@ -1309,6 +1334,7 @@
 {
     wchar_t * wp;
     wchar_t * command;
+    wchar_t * executable;
     wchar_t * p;
     int rc = 0;
     size_t plen;
@@ -1453,6 +1479,7 @@
             if (ip == NULL)
                 error(RC_NO_PYTHON, L"Requested Python version (%ls) not \
 installed", &p[1]);
+            executable = ip->executable;
             command += wcslen(p);
             command = skip_whitespace(command);
         }
@@ -1470,9 +1497,16 @@
 #endif
 
     if (!valid) {
-        ip = locate_python(L"");
-        if (ip == NULL)
-            error(RC_NO_PYTHON, L"Can't find a default Python.");
+        /* Look for an active virtualenv */
+        executable = find_python_by_venv();
+
+        /* If we didn't find one, look for the default Python */
+        if (executable == NULL) {
+            ip = locate_python(L"");
+            if (ip == NULL)
+                error(RC_NO_PYTHON, L"Can't find a default Python.");
+            executable = ip->executable;
+        }
         if ((argc == 2) && (!_wcsicmp(p, L"-h") || !_wcsicmp(p, L"--help"))) {
 #if defined(_M_X64)
             BOOL canDo64bit = TRUE;
@@ -1500,7 +1534,7 @@
             fflush(stdout);
         }
     }
-    invoke_child(ip->executable, NULL, command);
+    invoke_child(executable, NULL, command);
     return rc;
 }
 
diff --git a/PC/pyconfig.h b/PC/pyconfig.h
index 328be0f..324a130 100644
--- a/PC/pyconfig.h
+++ b/PC/pyconfig.h
@@ -156,9 +156,9 @@
 #endif /* MS_WIN64 */
 
 /* set the version macros for the windows headers */
-/* Python 3.4+ requires Windows XP or greater */
-#define Py_WINVER 0x0501 /* _WIN32_WINNT_WINXP */
-#define Py_NTDDI NTDDI_WINXP
+/* Python 3.5+ requires Windows Vista or greater */
+#define Py_WINVER 0x0600 /* _WIN32_WINNT_VISTA */
+#define Py_NTDDI NTDDI_VISTA
 
 /* We only set these values when building Python - we don't want to force
    these values on extensions, as that will affect the prototypes and
diff --git a/PC/python_ver_rc.h b/PC/python_ver_rc.h
index 1f1bebc..827e9be 100644
--- a/PC/python_ver_rc.h
+++ b/PC/python_ver_rc.h
@@ -4,7 +4,7 @@
 #include "winver.h"
 
 #define PYTHON_COMPANY   "Python Software Foundation"
-#define PYTHON_COPYRIGHT "Copyright © 2001-2015 Python Software Foundation. Copyright © 2000 BeOpen.com. Copyright © 1995-2001 CNRI. Copyright © 1991-1995 SMC."
+#define PYTHON_COPYRIGHT "Copyright \xA9 2001-2015 Python Software Foundation. Copyright \xA9 2000 BeOpen.com. Copyright \xA9 1995-2001 CNRI. Copyright \xA9 1991-1995 SMC."
 
 #define MS_WINDOWS
 #include "modsupport.h"
diff --git a/PC/winreg.c b/PC/winreg.c
index 63c437e..19d5a70 100644
--- a/PC/winreg.c
+++ b/PC/winreg.c
@@ -939,7 +939,7 @@
                 wchar_t *data = (wchar_t *)retDataBuf;
                 int len = retDataSize / 2;
                 int s = countStrings(data, len);
-                wchar_t **str = (wchar_t **)PyMem_Malloc(sizeof(wchar_t *)*s);
+                wchar_t **str = PyMem_New(wchar_t *, s);
                 if (str == NULL)
                     return PyErr_NoMemory();
 
@@ -1206,7 +1206,7 @@
     ++retDataSize;
     bufDataSize = retDataSize;
     bufValueSize = retValueSize;
-    retValueBuf = (wchar_t *)PyMem_Malloc(sizeof(wchar_t) * retValueSize);
+    retValueBuf = PyMem_New(wchar_t, retValueSize);
     if (retValueBuf == NULL)
         return PyErr_NoMemory();
     retDataBuf = (BYTE *)PyMem_Malloc(retDataSize);
@@ -1277,7 +1277,7 @@
         return PyErr_SetFromWindowsErrWithFunction(retValueSize,
                                         "ExpandEnvironmentStrings");
     }
-    retValue = (wchar_t *)PyMem_Malloc(retValueSize * sizeof(wchar_t));
+    retValue = PyMem_New(wchar_t, retValueSize);
     if (retValue == NULL) {
         return PyErr_NoMemory();
     }
diff --git a/PCbuild/_freeze_importlib.vcxproj b/PCbuild/_freeze_importlib.vcxproj
index 7f4e419..f7714c0 100644
--- a/PCbuild/_freeze_importlib.vcxproj
+++ b/PCbuild/_freeze_importlib.vcxproj
@@ -85,8 +85,8 @@
     <Exec Command='"$(TargetPath)" "$(PySourcePath)Lib\importlib\_bootstrap.py" "$(IntDir)importlib.g.h"' />
     
     <PropertyGroup>
-      <_OldContent Condition="Exists('$(PySourcePath)Python\importlib.h')">$([System.IO.File]::ReadAllText('$(PySourcePath)Python\importlib.h'))</_OldContent>
-      <_NewContent Condition="Exists('$(IntDir)importlib.g.h')">$([System.IO.File]::ReadAllText('$(IntDir)importlib.g.h'))</_NewContent>
+      <_OldContent Condition="Exists('$(PySourcePath)Python\importlib.h')">$([System.IO.File]::ReadAllText('$(PySourcePath)Python\importlib.h').Replace(`&#x0D;&#x0A;`, `&#x0A;`))</_OldContent>
+      <_NewContent Condition="Exists('$(IntDir)importlib.g.h')">$([System.IO.File]::ReadAllText('$(IntDir)importlib.g.h').Replace(`&#x0D;&#x0A;`, `&#x0A;`))</_NewContent>
     </PropertyGroup>
     
     <Copy SourceFiles="$(IntDir)importlib.g.h"
diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj
index 479f68d..a5690f6 100644
--- a/PCbuild/pythoncore.vcxproj
+++ b/PCbuild/pythoncore.vcxproj
@@ -333,6 +333,7 @@
     <ClCompile Include="..\Parser\parser.c" />
     <ClCompile Include="..\Parser\parsetok.c" />
     <ClCompile Include="..\Parser\tokenizer.c" />
+    <ClCompile Include="..\PC\invalid_parameter_handler.c" />
     <ClCompile Include="..\PC\winreg.c" />
     <ClCompile Include="..\PC\config.c" />
     <ClCompile Include="..\PC\getpathp.c" />
@@ -394,25 +395,21 @@
   <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
   <ImportGroup Label="ExtensionTargets">
   </ImportGroup>
-  
   <Target Name="_GetBuildInfo" BeforeTargets="PrepareForBuild">
-    <Exec Command='hg id -b &gt; "$(IntDir)hgbranch.txt"' ContinueOnError="true" />
-    <Exec Command='hg id -i &gt; "$(IntDir)hgversion.txt"' ContinueOnError="true" />
-    <Exec Command='hg id -t &gt; "$(IntDir)hgtag.txt"' ContinueOnError="true" />
-    
+    <Exec Command="hg id -b &gt; &quot;$(IntDir)hgbranch.txt&quot;" ContinueOnError="true" />
+    <Exec Command="hg id -i &gt; &quot;$(IntDir)hgversion.txt&quot;" ContinueOnError="true" />
+    <Exec Command="hg id -t &gt; &quot;$(IntDir)hgtag.txt&quot;" ContinueOnError="true" />
     <PropertyGroup>
       <HgBranch Condition="Exists('$(IntDir)hgbranch.txt')">$([System.IO.File]::ReadAllText('$(IntDir)hgbranch.txt').Trim())</HgBranch>
       <HgVersion Condition="Exists('$(IntDir)hgversion.txt')">$([System.IO.File]::ReadAllText('$(IntDir)hgversion.txt').Trim())</HgVersion>
       <HgTag Condition="Exists('$(IntDir)hgtag.txt')">$([System.IO.File]::ReadAllText('$(IntDir)hgtag.txt').Trim())</HgTag>
     </PropertyGroup>
-    
     <ItemGroup>
       <ClCompile Include="..\Modules\getbuildinfo.c">
         <PreprocessorDefinitions>HGVERSION="$(HgVersion)";HGTAG="$(HgTag)";HGBRANCH="$(HgBranch)";%(PreprocessorDefinitions)</PreprocessorDefinitions>
       </ClCompile>
     </ItemGroup>
   </Target>
-
   <Target Name="_WarnAboutToolset" BeforeTargets="PrepareForBuild" Condition="$(PlatformToolset) != 'v140'">
     <Warning Text="Toolset $(PlatformToolset) is not used for official builds. Your build may have errors or incompatibilities." />
   </Target>
diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters
index 174140a..b415405 100644
--- a/PCbuild/pythoncore.vcxproj.filters
+++ b/PCbuild/pythoncore.vcxproj.filters
@@ -959,6 +959,9 @@
     <ClCompile Include="..\Modules\hashtable.c">
       <Filter>Modules</Filter>
     </ClCompile>
+    <ClCompile Include="..\PC\invalid_parameter_handler.c">
+      <Filter>PC</Filter>
+    </ClCompile>
   </ItemGroup>
   <ItemGroup>
     <ResourceCompile Include="..\PC\python_nt.rc">
diff --git a/Programs/_freeze_importlib.c b/Programs/_freeze_importlib.c
index b8899ec..31b3d31 100644
--- a/Programs/_freeze_importlib.c
+++ b/Programs/_freeze_importlib.c
@@ -17,7 +17,7 @@
    of frozen modules instead, left deliberately blank so as to avoid
    unintentional import of a stale version of _frozen_importlib. */
 
-const static struct _frozen _PyImport_FrozenModules[] = {
+static const struct _frozen _PyImport_FrozenModules[] = {
     {0, 0, 0} /* sentinel */
 };
 
@@ -35,7 +35,7 @@
 {
     char *inpath, *outpath;
     FILE *infile = NULL, *outfile = NULL;
-    struct stat st;
+    struct _Py_stat_struct st;
     size_t text_size, data_size, n;
     char *text = NULL;
     unsigned char *data;
@@ -54,7 +54,7 @@
         fprintf(stderr, "cannot open '%s' for reading\n", inpath);
         goto error;
     }
-    if (fstat(fileno(infile), &st)) {
+    if (_Py_fstat(fileno(infile), &st)) {
         fprintf(stderr, "cannot fstat '%s'\n", inpath);
         goto error;
     }
diff --git a/Python/ceval.c b/Python/ceval.c
index e09ff34..1c6089d 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -3192,8 +3192,7 @@
     if (why != WHY_RETURN)
         retval = NULL;
 
-    assert((retval != NULL && !PyErr_Occurred())
-            || (retval == NULL && PyErr_Occurred()));
+    assert((retval != NULL) ^ (PyErr_Occurred() != NULL));
 
 fast_yield:
     if (co->co_flags & CO_GENERATOR) {
@@ -3254,7 +3253,7 @@
     f->f_executing = 0;
     tstate->frame = f->f_back;
 
-    return retval;
+    return _Py_CheckFunctionResult(retval, "PyEval_EvalFrameEx");
 }
 
 static void
@@ -4119,13 +4118,6 @@
 {
     PyObject *result;
 
-#ifdef Py_DEBUG
-    /* PyEval_CallObjectWithKeywords() must not be called with an exception
-       set, because it may clear it (directly or indirectly)
-       and so the caller looses its exception */
-    assert(!PyErr_Occurred());
-#endif
-
     if (arg == NULL) {
         arg = PyTuple_New(0);
         if (arg == NULL)
@@ -4149,8 +4141,6 @@
     result = PyObject_Call(func, arg, kw);
     Py_DECREF(arg);
 
-    assert((result != NULL && !PyErr_Occurred())
-           || (result == NULL && PyErr_Occurred()));
     return result;
 }
 
@@ -4253,11 +4243,15 @@
             PyObject *self = PyCFunction_GET_SELF(func);
             if (flags & METH_NOARGS && na == 0) {
                 C_TRACE(x, (*meth)(self,NULL));
+
+                x = _Py_CheckFunctionResult(x, "call_function");
             }
             else if (flags & METH_O && na == 1) {
                 PyObject *arg = EXT_POP(*pp_stack);
                 C_TRACE(x, (*meth)(self,arg));
                 Py_DECREF(arg);
+
+                x = _Py_CheckFunctionResult(x, "call_function");
             }
             else {
                 err_args(func, flags, na);
@@ -4277,7 +4271,8 @@
                 x = NULL;
             }
         }
-    } else {
+    }
+    else {
         if (PyMethod_Check(func) && PyMethod_GET_SELF(func) != NULL) {
             /* optimize access to bound methods */
             PyObject *self = PyMethod_GET_SELF(func);
@@ -4299,9 +4294,9 @@
             x = do_call(func, pp_stack, na, nk);
         READ_TIMESTAMP(*pintr1);
         Py_DECREF(func);
+
+        assert((x != NULL) ^ (PyErr_Occurred() != NULL));
     }
-    assert((x != NULL && !PyErr_Occurred())
-           || (x == NULL && PyErr_Occurred()));
 
     /* Clear the stack of the function object.  Also removes
        the arguments in case they weren't consumed already
@@ -4313,8 +4308,7 @@
         PCALL(PCALL_POP);
     }
 
-    assert((x != NULL && !PyErr_Occurred())
-           || (x == NULL && PyErr_Occurred()));
+    assert((x != NULL) ^ (PyErr_Occurred() != NULL));
     return x;
 }
 
@@ -4601,12 +4595,10 @@
     Py_XDECREF(callargs);
     Py_XDECREF(kwdict);
     Py_XDECREF(stararg);
-    assert((result != NULL && !PyErr_Occurred())
-           || (result == NULL && PyErr_Occurred()));
     return result;
 }
 
-/* Extract a slice index from a PyInt or PyLong or an object with the
+/* Extract a slice index from a PyLong or an object with the
    nb_index slot defined, and store in *pi.
    Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
    and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h
index 2702d5c..4db56b6 100644
--- a/Python/ceval_gil.h
+++ b/Python/ceval_gil.h
@@ -191,7 +191,7 @@
     if (_Py_atomic_load_relaxed(&gil_drop_request) && tstate != NULL) {
         MUTEX_LOCK(switch_mutex);
         /* Not switched yet => wait */
-        if (_Py_atomic_load_relaxed(&gil_last_holder) == tstate) {
+        if ((PyThreadState*)_Py_atomic_load_relaxed(&gil_last_holder) == tstate) {
         RESET_GIL_DROP_REQUEST();
             /* NOTE: if COND_WAIT does not atomically start waiting when
                releasing the mutex, another thread can run through, take
@@ -239,7 +239,7 @@
     _Py_atomic_store_relaxed(&gil_locked, 1);
     _Py_ANNOTATE_RWLOCK_ACQUIRED(&gil_locked, /*is_write=*/1);
 
-    if (tstate != _Py_atomic_load_relaxed(&gil_last_holder)) {
+    if (tstate != (PyThreadState*)_Py_atomic_load_relaxed(&gil_last_holder)) {
         _Py_atomic_store_relaxed(&gil_last_holder, tstate);
         ++gil_switch_number;
     }
diff --git a/Python/codecs.c b/Python/codecs.c
index a558859..64fc3d6 100644
--- a/Python/codecs.c
+++ b/Python/codecs.c
@@ -1006,7 +1006,7 @@
             c = PyUnicode_READ_CHAR(object, i);
             if (ucnhash_CAPI &&
                 ucnhash_CAPI->getname(NULL, c, buffer, sizeof(buffer), 1)) {
-                replsize = 1+1+1+strlen(buffer)+1;
+                replsize = 1+1+1+(int)strlen(buffer)+1;
             }
             else if (c >= 0x10000) {
                 replsize = 1+1+8;
diff --git a/Python/dynload_shlib.c b/Python/dynload_shlib.c
index 5cd1efd..659adac 100644
--- a/Python/dynload_shlib.c
+++ b/Python/dynload_shlib.c
@@ -71,8 +71,8 @@
 
     if (fp != NULL) {
         int i;
-        struct stat statb;
-        if (fstat(fileno(fp), &statb) == -1) {
+        struct _Py_stat_struct statb;
+        if (_Py_fstat(fileno(fp), &statb) == -1) {
             PyErr_SetFromErrno(PyExc_IOError);
             return NULL;
         }
diff --git a/Python/fileutils.c b/Python/fileutils.c
index c5c8d4e..6502823 100644
--- a/Python/fileutils.c
+++ b/Python/fileutils.c
@@ -3,6 +3,7 @@
 #include <locale.h>
 
 #ifdef MS_WINDOWS
+#  include <malloc.h>
 #  include <windows.h>
 #endif
 
@@ -544,8 +545,143 @@
 }
 #endif
 
-#ifdef HAVE_STAT
 
+#if defined(HAVE_FSTAT) || defined(MS_WINDOWS)
+
+#ifdef MS_WINDOWS
+static __int64 secs_between_epochs = 11644473600; /* Seconds between 1.1.1601 and 1.1.1970 */
+
+static void
+FILE_TIME_to_time_t_nsec(FILETIME *in_ptr, time_t *time_out, int* nsec_out)
+{
+    /* XXX endianness. Shouldn't matter, as all Windows implementations are little-endian */
+    /* Cannot simply cast and dereference in_ptr,
+       since it might not be aligned properly */
+    __int64 in;
+    memcpy(&in, in_ptr, sizeof(in));
+    *nsec_out = (int)(in % 10000000) * 100; /* FILETIME is in units of 100 nsec. */
+    *time_out = Py_SAFE_DOWNCAST((in / 10000000) - secs_between_epochs, __int64, time_t);
+}
+
+void
+_Py_time_t_to_FILE_TIME(time_t time_in, int nsec_in, FILETIME *out_ptr)
+{
+    /* XXX endianness */
+    __int64 out;
+    out = time_in + secs_between_epochs;
+    out = out * 10000000 + nsec_in / 100;
+    memcpy(out_ptr, &out, sizeof(out));
+}
+
+/* Below, we *know* that ugo+r is 0444 */
+#if _S_IREAD != 0400
+#error Unsupported C library
+#endif
+static int
+attributes_to_mode(DWORD attr)
+{
+    int m = 0;
+    if (attr & FILE_ATTRIBUTE_DIRECTORY)
+        m |= _S_IFDIR | 0111; /* IFEXEC for user,group,other */
+    else
+        m |= _S_IFREG;
+    if (attr & FILE_ATTRIBUTE_READONLY)
+        m |= 0444;
+    else
+        m |= 0666;
+    return m;
+}
+
+void
+_Py_attribute_data_to_stat(BY_HANDLE_FILE_INFORMATION *info, ULONG reparse_tag, struct _Py_stat_struct *result)
+{
+    memset(result, 0, sizeof(*result));
+    result->st_mode = attributes_to_mode(info->dwFileAttributes);
+    result->st_size = (((__int64)info->nFileSizeHigh)<<32) + info->nFileSizeLow;
+    result->st_dev = info->dwVolumeSerialNumber;
+    result->st_rdev = result->st_dev;
+    FILE_TIME_to_time_t_nsec(&info->ftCreationTime, &result->st_ctime, &result->st_ctime_nsec);
+    FILE_TIME_to_time_t_nsec(&info->ftLastWriteTime, &result->st_mtime, &result->st_mtime_nsec);
+    FILE_TIME_to_time_t_nsec(&info->ftLastAccessTime, &result->st_atime, &result->st_atime_nsec);
+    result->st_nlink = info->nNumberOfLinks;
+    result->st_ino = (((__int64)info->nFileIndexHigh)<<32) + info->nFileIndexLow;
+    if (reparse_tag == IO_REPARSE_TAG_SYMLINK) {
+        /* first clear the S_IFMT bits */
+        result->st_mode ^= (result->st_mode & S_IFMT);
+        /* now set the bits that make this a symlink */
+        result->st_mode |= S_IFLNK;
+    }
+    result->st_file_attributes = info->dwFileAttributes;
+}
+#endif
+
+/* Return information about a file.
+
+   On POSIX, use fstat().
+
+   On Windows, use GetFileType() and GetFileInformationByHandle() which support
+   files larger than 2 GB.  fstat() may fail with EOVERFLOW on files larger
+   than 2 GB because the file size type is an signed 32-bit integer: see issue
+   #23152.
+   */
+int
+_Py_fstat(int fd, struct _Py_stat_struct *result)
+{
+#ifdef MS_WINDOWS
+    BY_HANDLE_FILE_INFORMATION info;
+    HANDLE h;
+    int type;
+
+    if (!_PyVerify_fd(fd))
+        h = INVALID_HANDLE_VALUE;
+    else
+        h = (HANDLE)_get_osfhandle(fd);
+
+    /* Protocol violation: we explicitly clear errno, instead of
+       setting it to a POSIX error. Callers should use GetLastError. */
+    errno = 0;
+
+    if (h == INVALID_HANDLE_VALUE) {
+        /* This is really a C library error (invalid file handle).
+           We set the Win32 error to the closes one matching. */
+        SetLastError(ERROR_INVALID_HANDLE);
+        return -1;
+    }
+    memset(result, 0, sizeof(*result));
+
+    type = GetFileType(h);
+    if (type == FILE_TYPE_UNKNOWN) {
+        DWORD error = GetLastError();
+        if (error != 0) {
+            return -1;
+        }
+        /* else: valid but unknown file */
+    }
+
+    if (type != FILE_TYPE_DISK) {
+        if (type == FILE_TYPE_CHAR)
+            result->st_mode = _S_IFCHR;
+        else if (type == FILE_TYPE_PIPE)
+            result->st_mode = _S_IFIFO;
+        return 0;
+    }
+
+    if (!GetFileInformationByHandle(h, &info)) {
+        return -1;
+    }
+
+    _Py_attribute_data_to_stat(&info, 0, result);
+    /* specific to fstat() */
+    result->st_ino = (((__int64)info.nFileIndexHigh)<<32) + info.nFileIndexLow;
+    return 0;
+#else
+    return fstat(fd, result);
+#endif
+}
+#endif   /* HAVE_FSTAT || MS_WINDOWS */
+
+
+#ifdef HAVE_STAT
 /* Call _wstat() on Windows, or encode the path to the filesystem encoding and
    call stat() otherwise. Only fill st_mode attribute on Windows.
 
@@ -578,7 +714,8 @@
 #endif
 }
 
-#endif
+#endif   /* HAVE_STAT */
+
 
 static int
 get_inheritable(int fd, int raise)
@@ -1131,3 +1268,102 @@
 }
 #endif
 
+#ifdef _MSC_VER
+#if _MSC_VER >= 1900
+
+/* This function lets the Windows CRT validate the file handle without
+   terminating the process if it's invalid. */
+int
+_PyVerify_fd(int fd)
+{
+    intptr_t osh;
+    /* Fast check for the only condition we know */
+    if (fd < 0) {
+        _set_errno(EBADF);
+        return 0;
+    }
+    osh = _get_osfhandle(fd);
+    return osh != (intptr_t)-1;
+}
+
+#elif _MSC_VER >= 1400
+/* Legacy implementation of _PyVerify_fd while transitioning to
+ * MSVC 14.0. This should eventually be removed. (issue23524)
+ */
+
+/* Microsoft CRT in VS2005 and higher will verify that a filehandle is
+ * valid and raise an assertion if it isn't.
+ * Normally, an invalid fd is likely to be a C program error and therefore
+ * an assertion can be useful, but it does contradict the POSIX standard
+ * which for write(2) states:
+ *    "Otherwise, -1 shall be returned and errno set to indicate the error."
+ *    "[EBADF] The fildes argument is not a valid file descriptor open for
+ *     writing."
+ * Furthermore, python allows the user to enter any old integer
+ * as a fd and should merely raise a python exception on error.
+ * The Microsoft CRT doesn't provide an official way to check for the
+ * validity of a file descriptor, but we can emulate its internal behaviour
+ * by using the exported __pinfo data member and knowledge of the
+ * internal structures involved.
+ * The structures below must be updated for each version of visual studio
+ * according to the file internal.h in the CRT source, until MS comes
+ * up with a less hacky way to do this.
+ * (all of this is to avoid globally modifying the CRT behaviour using
+ * _set_invalid_parameter_handler() and _CrtSetReportMode())
+ */
+/* The actual size of the structure is determined at runtime.
+ * Only the first items must be present.
+ */
+typedef struct {
+    intptr_t osfhnd;
+    char osfile;
+} my_ioinfo;
+
+extern __declspec(dllimport) char * __pioinfo[];
+#define IOINFO_L2E 5
+#define IOINFO_ARRAYS 64
+#define IOINFO_ARRAY_ELTS   (1 << IOINFO_L2E)
+#define _NHANDLE_           (IOINFO_ARRAYS * IOINFO_ARRAY_ELTS)
+#define FOPEN 0x01
+#define _NO_CONSOLE_FILENO (intptr_t)-2
+
+/* This function emulates what the windows CRT does to validate file handles */
+int
+_PyVerify_fd(int fd)
+{
+    const int i1 = fd >> IOINFO_L2E;
+    const int i2 = fd & ((1 << IOINFO_L2E) - 1);
+
+    static size_t sizeof_ioinfo = 0;
+
+    /* Determine the actual size of the ioinfo structure,
+     * as used by the CRT loaded in memory
+     */
+    if (sizeof_ioinfo == 0 && __pioinfo[0] != NULL) {
+        sizeof_ioinfo = _msize(__pioinfo[0]) / IOINFO_ARRAY_ELTS;
+    }
+    if (sizeof_ioinfo == 0) {
+        /* This should not happen... */
+        goto fail;
+    }
+
+    /* See that it isn't a special CLEAR fileno */
+    if (fd != _NO_CONSOLE_FILENO) {
+        /* Microsoft CRT would check that 0<=fd<_nhandle but we can't do that.  Instead
+         * we check pointer validity and other info
+         */
+        if (0 <= i1 && i1 < IOINFO_ARRAYS && __pioinfo[i1] != NULL) {
+            /* finally, check that the file is open */
+            my_ioinfo* info = (my_ioinfo*)(__pioinfo[i1] + i2 * sizeof_ioinfo);
+            if (info->osfile & FOPEN) {
+                return 1;
+            }
+        }
+    }
+  fail:
+    errno = EBADF;
+    return 0;
+}
+
+#endif /* _MSC_VER >= 1900 || _MSC_VER >= 1400 */
+#endif /* defined _MSC_VER */
diff --git a/Python/frozenmain.c b/Python/frozenmain.c
index cb84ed5..de8bd35 100644
--- a/Python/frozenmain.c
+++ b/Python/frozenmain.c
@@ -24,11 +24,13 @@
     /* We need a second copies, as Python might modify the first one. */
     wchar_t **argv_copy2 = NULL;
 
-    argv_copy = PyMem_RawMalloc(sizeof(wchar_t*) * argc);
-    argv_copy2 = PyMem_RawMalloc(sizeof(wchar_t*) * argc);
-    if (!argv_copy || !argv_copy2) {
-        fprintf(stderr, "out of memory\n");
-        goto error;
+    if (argc > 0) {
+        argv_copy = PyMem_RawMalloc(sizeof(wchar_t*) * argc);
+        argv_copy2 = PyMem_RawMalloc(sizeof(wchar_t*) * argc);
+        if (!argv_copy || !argv_copy2) {
+            fprintf(stderr, "out of memory\n");
+            goto error;
+        }
     }
 
     Py_FrozenFlag = 1; /* Suppress errors from getpath.c */
@@ -68,7 +70,8 @@
 #ifdef MS_WINDOWS
     PyInitFrozenExtensions();
 #endif /* MS_WINDOWS */
-    Py_SetProgramName(argv_copy[0]);
+    if (argc >= 1)
+        Py_SetProgramName(argv_copy[0]);
     Py_Initialize();
 #ifdef MS_WINDOWS
     PyWinFreeze_ExeInit();
diff --git a/Python/marshal.c b/Python/marshal.c
index 5acf0de..93d9d1d 100644
--- a/Python/marshal.c
+++ b/Python/marshal.c
@@ -12,6 +12,7 @@
 #include "longintrepr.h"
 #include "code.h"
 #include "marshal.h"
+#include "../Modules/hashtable.h"
 
 /* High water mark to determine when the marshalled object is dangerously deep
  * and risks coring the interpreter.  When the object stack gets this deep,
@@ -64,54 +65,83 @@
     FILE *fp;
     int error;  /* see WFERR_* values */
     int depth;
-    /* If fp == NULL, the following are valid: */
-    PyObject *readable;    /* Stream-like object being read from */
     PyObject *str;
-    PyObject *current_filename;
     char *ptr;
     char *end;
     char *buf;
-    Py_ssize_t buf_size;
-    PyObject *refs; /* dict on marshal, list on unmarshal */
+    _Py_hashtable_t *hashtable;
     int version;
 } WFILE;
 
-#define w_byte(c, p) if (((p)->fp)) putc((c), (p)->fp); \
-                      else if ((p)->ptr != (p)->end) *(p)->ptr++ = (c); \
-                           else w_more((c), p)
+#define w_byte(c, p) do {                               \
+        if ((p)->ptr != (p)->end || w_reserve((p), 1))  \
+            *(p)->ptr++ = (c);                          \
+    } while(0)
 
 static void
-w_more(char c, WFILE *p)
+w_flush(WFILE *p)
 {
-    Py_ssize_t size, newsize;
-    if (p->str == NULL)
-        return; /* An error already occurred */
-    size = PyBytes_Size(p->str);
-    newsize = size + size + 1024;
-    if (newsize > 32*1024*1024) {
-        newsize = size + (size >> 3);           /* 12.5% overallocation */
+    assert(p->fp != NULL);
+    fwrite(p->buf, 1, p->ptr - p->buf, p->fp);
+    p->ptr = p->buf;
+}
+
+static int
+w_reserve(WFILE *p, Py_ssize_t needed)
+{
+    Py_ssize_t pos, size, delta;
+    if (p->ptr == NULL)
+        return 0; /* An error already occurred */
+    if (p->fp != NULL) {
+        w_flush(p);
+        return needed <= p->end - p->ptr;
     }
-    if (_PyBytes_Resize(&p->str, newsize) != 0) {
-        p->ptr = p->end = NULL;
+    assert(p->str != NULL);
+    pos = p->ptr - p->buf;
+    size = PyBytes_Size(p->str);
+    if (size > 16*1024*1024)
+        delta = (size >> 3);            /* 12.5% overallocation */
+    else
+        delta = size + 1024;
+    delta = Py_MAX(delta, needed);
+    if (delta > PY_SSIZE_T_MAX - size) {
+        p->error = WFERR_NOMEMORY;
+        return 0;
+    }
+    size += delta;
+    if (_PyBytes_Resize(&p->str, size) != 0) {
+        p->ptr = p->buf = p->end = NULL;
+        return 0;
     }
     else {
-        p->ptr = PyBytes_AS_STRING((PyBytesObject *)p->str) + size;
-        p->end =
-            PyBytes_AS_STRING((PyBytesObject *)p->str) + newsize;
-        *p->ptr++ = c;
+        p->buf = PyBytes_AS_STRING(p->str);
+        p->ptr = p->buf + pos;
+        p->end = p->buf + size;
+        return 1;
     }
 }
 
 static void
 w_string(const char *s, Py_ssize_t n, WFILE *p)
 {
+    Py_ssize_t m;
+    if (!n || p->ptr == NULL)
+        return;
+    m = p->end - p->ptr;
     if (p->fp != NULL) {
-        fwrite(s, 1, n, p->fp);
+        if (n <= m) {
+            Py_MEMCPY(p->ptr, s, n);
+            p->ptr += n;
+        }
+        else {
+            w_flush(p);
+            fwrite(s, 1, n, p->fp);
+        }
     }
     else {
-        while (--n >= 0) {
-            w_byte(*s, p);
-            s++;
+        if (n <= m || w_reserve(p, n - m)) {
+            Py_MEMCPY(p->ptr, s, n);
+            p->ptr += n;
         }
     }
 }
@@ -223,46 +253,38 @@
 static int
 w_ref(PyObject *v, char *flag, WFILE *p)
 {
-    PyObject *id;
-    PyObject *idx;
+    _Py_hashtable_entry_t *entry;
+    int w;
 
-    if (p->version < 3 || p->refs == NULL)
+    if (p->version < 3 || p->hashtable == NULL)
         return 0; /* not writing object references */
 
     /* if it has only one reference, it definitely isn't shared */
     if (Py_REFCNT(v) == 1)
         return 0;
 
-    id = PyLong_FromVoidPtr((void*)v);
-    if (id == NULL)
-        goto err;
-    idx = PyDict_GetItem(p->refs, id);
-    if (idx != NULL) {
+    entry = _Py_hashtable_get_entry(p->hashtable, v);
+    if (entry != NULL) {
         /* write the reference index to the stream */
-        long w = PyLong_AsLong(idx);
-        Py_DECREF(id);
-        if (w == -1 && PyErr_Occurred()) {
-            goto err;
-        }
+        _Py_HASHTABLE_ENTRY_READ_DATA(p->hashtable, &w, sizeof(w), entry);
         /* we don't store "long" indices in the dict */
         assert(0 <= w && w <= 0x7fffffff);
         w_byte(TYPE_REF, p);
         w_long(w, p);
         return 1;
     } else {
-        int ok;
-        Py_ssize_t s = PyDict_Size(p->refs);
+        size_t s = p->hashtable->entries;
         /* we don't support long indices */
         if (s >= 0x7fffffff) {
             PyErr_SetString(PyExc_ValueError, "too many objects");
             goto err;
         }
-        idx = PyLong_FromSsize_t(s);
-        ok = idx && PyDict_SetItem(p->refs, id, idx) == 0;
-        Py_DECREF(id);
-        Py_XDECREF(idx);
-        if (!ok)
+        w = (int)s;
+        Py_INCREF(v);
+        if (_Py_HASHTABLE_SET(p->hashtable, v, w) < 0) {
+            Py_DECREF(v);
             goto err;
+        }
         *flag |= FLAG_REF;
         return 0;
     }
@@ -545,37 +567,81 @@
     }
 }
 
+static int
+w_init_refs(WFILE *wf, int version)
+{
+    if (version >= 3) {
+        wf->hashtable = _Py_hashtable_new(sizeof(int), _Py_hashtable_hash_ptr,
+                                          _Py_hashtable_compare_direct);
+        if (wf->hashtable == NULL) {
+            PyErr_NoMemory();
+            return -1;
+        }
+    }
+    return 0;
+}
+
+static int
+w_decref_entry(_Py_hashtable_entry_t *entry, void *Py_UNUSED(data))
+{
+    Py_XDECREF(entry->key);
+    return 0;
+}
+
+static void
+w_clear_refs(WFILE *wf)
+{
+    if (wf->hashtable != NULL) {
+        _Py_hashtable_foreach(wf->hashtable, w_decref_entry, NULL);
+        _Py_hashtable_destroy(wf->hashtable);
+    }
+}
+
 /* version currently has no effect for writing ints. */
 void
 PyMarshal_WriteLongToFile(long x, FILE *fp, int version)
 {
+    char buf[4];
     WFILE wf;
+    memset(&wf, 0, sizeof(wf));
     wf.fp = fp;
+    wf.ptr = wf.buf = buf;
+    wf.end = wf.ptr + sizeof(buf);
     wf.error = WFERR_OK;
-    wf.depth = 0;
-    wf.refs = NULL;
     wf.version = version;
     w_long(x, &wf);
+    w_flush(&wf);
 }
 
 void
 PyMarshal_WriteObjectToFile(PyObject *x, FILE *fp, int version)
 {
+    char buf[BUFSIZ];
     WFILE wf;
+    memset(&wf, 0, sizeof(wf));
     wf.fp = fp;
+    wf.ptr = wf.buf = buf;
+    wf.end = wf.ptr + sizeof(buf);
     wf.error = WFERR_OK;
-    wf.depth = 0;
-    if (version >= 3) {
-        if ((wf.refs = PyDict_New()) == NULL)
-            return; /* caller mush check PyErr_Occurred() */
-    } else
-        wf.refs = NULL;
     wf.version = version;
+    if (w_init_refs(&wf, version))
+        return; /* caller mush check PyErr_Occurred() */
     w_object(x, &wf);
-    Py_XDECREF(wf.refs);
+    w_clear_refs(&wf);
+    w_flush(&wf);
 }
 
-typedef WFILE RFILE; /* Same struct with different invariants */
+typedef struct {
+    FILE *fp;
+    int depth;
+    PyObject *readable;  /* Stream-like object being read from */
+    PyObject *current_filename;
+    char *ptr;
+    char *end;
+    char *buf;
+    Py_ssize_t buf_size;
+    PyObject *refs;  /* a list */
+} RFILE;
 
 static char *
 r_string(Py_ssize_t n, RFILE *p)
@@ -1415,16 +1481,20 @@
     return res;
 }
 
-#ifdef HAVE_FSTAT
-/* Return size of file in bytes; < 0 if unknown. */
+#if defined(HAVE_FSTAT) || defined(MS_WINDOWS)
+/* Return size of file in bytes; < 0 if unknown or INT_MAX if too big */
 static off_t
 getfilesize(FILE *fp)
 {
-    struct stat st;
-    if (fstat(fileno(fp), &st) != 0)
+    struct _Py_stat_struct st;
+    if (_Py_fstat(fileno(fp), &st) != 0)
         return -1;
+#if SIZEOF_OFF_T == 4
+    else if (st.st_size >= INT_MAX)
+        return (off_t)INT_MAX;
+#endif
     else
-        return st.st_size;
+        return (off_t)st.st_size;
 }
 #endif
 
@@ -1439,7 +1509,7 @@
 {
 /* REASONABLE_FILE_LIMIT is by defn something big enough for Tkinter.pyc. */
 #define REASONABLE_FILE_LIMIT (1L << 18)
-#ifdef HAVE_FSTAT
+#if defined(HAVE_FSTAT) || defined(MS_WINDOWS)
     off_t filesize;
     filesize = getfilesize(fp);
     if (filesize > 0 && filesize <= REASONABLE_FILE_LIMIT) {
@@ -1509,25 +1579,20 @@
 {
     WFILE wf;
 
-    wf.fp = NULL;
-    wf.readable = NULL;
+    memset(&wf, 0, sizeof(wf));
     wf.str = PyBytes_FromStringAndSize((char *)NULL, 50);
     if (wf.str == NULL)
         return NULL;
-    wf.ptr = PyBytes_AS_STRING((PyBytesObject *)wf.str);
+    wf.ptr = wf.buf = PyBytes_AS_STRING((PyBytesObject *)wf.str);
     wf.end = wf.ptr + PyBytes_Size(wf.str);
     wf.error = WFERR_OK;
-    wf.depth = 0;
     wf.version = version;
-    if (version >= 3) {
-        if ((wf.refs = PyDict_New()) == NULL) {
-            Py_DECREF(wf.str);
-            return NULL;
-        }
-    } else
-        wf.refs = NULL;
+    if (w_init_refs(&wf, version)) {
+        Py_DECREF(wf.str);
+        return NULL;
+    }
     w_object(x, &wf);
-    Py_XDECREF(wf.refs);
+    w_clear_refs(&wf);
     if (wf.str != NULL) {
         char *base = PyBytes_AS_STRING((PyBytesObject *)wf.str);
         if (wf.ptr - base > PY_SSIZE_T_MAX) {
diff --git a/Python/peephole.c b/Python/peephole.c
index 4185462..2f8f0e5 100644
--- a/Python/peephole.c
+++ b/Python/peephole.c
@@ -18,7 +18,11 @@
     || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP)
 #define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP)
 #define GETJUMPTGT(arr, i) (GETARG(arr,i) + (ABSOLUTE_JUMP(arr[i]) ? 0 : i+3))
-#define SETARG(arr, i, val) arr[i+2] = val>>8; arr[i+1] = val & 255
+#define SETARG(arr, i, val) do {                            \
+    assert(0 <= val && val <= 0xffff);                      \
+    arr[i+2] = (unsigned char)(((unsigned int)val)>>8);     \
+    arr[i+1] = (unsigned char)(((unsigned int)val) & 255);  \
+} while(0)
 #define CODESIZE(op)  (HAS_ARG(op) ? 3 : 1)
 #define ISBASICBLOCK(blocks, start, bytes) \
     (blocks[start]==blocks[start+bytes-1])
@@ -290,7 +294,7 @@
 static unsigned int *
 markblocks(unsigned char *code, Py_ssize_t len)
 {
-    unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int));
+    unsigned int *blocks = PyMem_New(unsigned int, len);
     int i,j, opcode, blockcnt = 0;
 
     if (blocks == NULL) {
@@ -355,7 +359,8 @@
     unsigned char *codestr = NULL;
     unsigned char *lineno;
     int *addrmap = NULL;
-    int new_line, cum_orig_line, last_line, tabsiz;
+    int new_line, cum_orig_line, last_line;
+    Py_ssize_t tabsiz;
     PyObject **const_stack = NULL;
     Py_ssize_t *load_const_stack = NULL;
     Py_ssize_t const_stack_top = -1;
@@ -398,7 +403,7 @@
         goto exitUnchanged;
 
     /* Mapping to new jump targets after NOPs are removed */
-    addrmap = (int *)PyMem_Malloc(codelen * sizeof(int));
+    addrmap = PyMem_New(int, codelen);
     if (addrmap == NULL) {
         PyErr_NoMemory();
         goto exitError;
@@ -660,7 +665,8 @@
 
     /* Fixup linenotab */
     for (i=0, nops=0 ; i<codelen ; i += CODESIZE(codestr[i])) {
-        addrmap[i] = i - nops;
+        assert(i - nops <= INT_MAX);
+        addrmap[i] = (int)(i - nops);
         if (codestr[i] == NOP)
             nops++;
     }
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index 25a4a60..2a36b53 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -1258,7 +1258,7 @@
         PyErr_PrintEx(0);
     }
     else {
-        tstate = _Py_atomic_load_relaxed(&_PyThreadState_Current);
+        tstate = (PyThreadState*)_Py_atomic_load_relaxed(&_PyThreadState_Current);
         if (tstate != NULL) {
             fputc('\n', stderr);
             fflush(stderr);
diff --git a/Python/pystate.c b/Python/pystate.c
index 2ac2fd5..ee1e469 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -22,6 +22,12 @@
 #endif
 #endif
 
+#if defined _MSC_VER && _MSC_VER >= 1900
+/* Issue #23524: Temporary fix to disable termination due to invalid parameters */
+PyAPI_DATA(void*) _Py_silent_invalid_parameter_handler;
+#include <stdlib.h>
+#endif
+
 #ifdef __cplusplus
 extern "C" {
 #endif
@@ -222,6 +228,11 @@
             tstate->next->prev = tstate;
         interp->tstate_head = tstate;
         HEAD_UNLOCK();
+        
+#if defined _MSC_VER && _MSC_VER >= 1900
+        /* Issue #23524: Temporary fix to disable termination due to invalid parameters */
+        _set_thread_local_invalid_parameter_handler((_invalid_parameter_handler)_Py_silent_invalid_parameter_handler);
+#endif
     }
 
     return tstate;
@@ -403,7 +414,7 @@
 void
 PyThreadState_Delete(PyThreadState *tstate)
 {
-    if (tstate == _Py_atomic_load_relaxed(&_PyThreadState_Current))
+    if (tstate == (PyThreadState*)_Py_atomic_load_relaxed(&_PyThreadState_Current))
         Py_FatalError("PyThreadState_Delete: tstate is still current");
 #ifdef WITH_THREAD
     if (autoInterpreterState && PyThread_get_key_value(autoTLSkey) == tstate)
@@ -662,7 +673,7 @@
 {
     /* Must be the tstate for this thread */
     assert(PyGILState_GetThisThreadState()==tstate);
-    return tstate == _Py_atomic_load_relaxed(&_PyThreadState_Current);
+    return tstate == (PyThreadState*)_Py_atomic_load_relaxed(&_PyThreadState_Current);
 }
 
 /* Internal initialization/finalization functions called by
diff --git a/Python/pytime.c b/Python/pytime.c
index a8460c6..cdaa22e 100644
--- a/Python/pytime.c
+++ b/Python/pytime.c
@@ -7,10 +7,6 @@
 #include <mach/mach_time.h>   /* mach_absolute_time(), mach_timebase_info() */
 #endif
 
-#ifdef MS_WINDOWS
-static OSVERSIONINFOEX winver;
-#endif
-
 static int
 pygettimeofday(_PyTime_timeval *tp, _Py_clock_info_t *info, int raise)
 {
@@ -124,41 +120,11 @@
     static _PyTime_timeval last = {0, -1};
 #endif
 #if defined(MS_WINDOWS)
-    static ULONGLONG (*GetTickCount64) (void) = NULL;
-    static ULONGLONG (CALLBACK *Py_GetTickCount64)(void);
-    static int has_gettickcount64 = -1;
     ULONGLONG result;
 
     assert(info == NULL || raise);
 
-    if (has_gettickcount64 == -1) {
-        /* GetTickCount64() was added to Windows Vista */
-        has_gettickcount64 = (winver.dwMajorVersion >= 6);
-        if (has_gettickcount64) {
-            HINSTANCE hKernel32;
-            hKernel32 = GetModuleHandleW(L"KERNEL32");
-            *(FARPROC*)&Py_GetTickCount64 = GetProcAddress(hKernel32,
-                                                           "GetTickCount64");
-            assert(Py_GetTickCount64 != NULL);
-        }
-    }
-
-    if (has_gettickcount64) {
-        result = Py_GetTickCount64();
-    }
-    else {
-        static DWORD last_ticks = 0;
-        static DWORD n_overflow = 0;
-        DWORD ticks;
-
-        ticks = GetTickCount();
-        if (ticks < last_ticks)
-            n_overflow++;
-        last_ticks = ticks;
-
-        result = (ULONGLONG)n_overflow << 32;
-        result += ticks;
-    }
+    result = GetTickCount64();
 
     tp->tv_sec = result / 1000;
     tp->tv_usec = (result % 1000) * 1000;
@@ -166,10 +132,7 @@
     if (info) {
         DWORD timeAdjustment, timeIncrement;
         BOOL isTimeAdjustmentDisabled, ok;
-        if (has_gettickcount64)
-            info->implementation = "GetTickCount64()";
-        else
-            info->implementation = "GetTickCount()";
+        info->implementation = "GetTickCount64()";
         info->monotonic = 1;
         ok = GetSystemTimeAdjustment(&timeAdjustment, &timeIncrement,
                                      &isTimeAdjustmentDisabled);
@@ -409,14 +372,6 @@
 {
     _PyTime_timeval tv;
 
-#ifdef MS_WINDOWS
-    winver.dwOSVersionInfoSize = sizeof(winver);
-    if (!GetVersionEx((OSVERSIONINFO*)&winver)) {
-        PyErr_SetFromWindowsErr(0);
-        return -1;
-    }
-#endif
-
     /* ensure that the system clock works */
     if (_PyTime_gettimeofday_info(&tv, NULL) < 0)
         return -1;
diff --git a/Python/random.c b/Python/random.c
index 93d300d..031d887 100644
--- a/Python/random.c
+++ b/Python/random.c
@@ -139,14 +139,14 @@
 {
     int fd;
     Py_ssize_t n;
-    struct stat st;
+    struct _Py_stat_struct st;
 
     if (size <= 0)
         return 0;
 
     if (urandom_cache.fd >= 0) {
         /* Does the fd point to the same thing as before? (issue #21207) */
-        if (fstat(urandom_cache.fd, &st)
+        if (_Py_fstat(urandom_cache.fd, &st)
             || st.st_dev != urandom_cache.st_dev
             || st.st_ino != urandom_cache.st_ino) {
             /* Something changed: forget the cached fd (but don't close it,
@@ -178,7 +178,7 @@
             fd = urandom_cache.fd;
         }
         else {
-            if (fstat(fd, &st)) {
+            if (_Py_fstat(fd, &st)) {
                 PyErr_SetFromErrno(PyExc_OSError);
                 close(fd);
                 return -1;
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 0639231..471389c 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -772,6 +772,12 @@
                                  via indexing, the rest are name only */
 };
 
+/* Disable deprecation warnings about GetVersionEx as the result is
+   being passed straight through to the caller, who is responsible for
+   using it correctly. */
+#pragma warning(push)
+#pragma warning(disable:4996)
+
 static PyObject *
 sys_getwindowsversion(PyObject *self)
 {
@@ -803,6 +809,8 @@
     return version;
 }
 
+#pragma warning(pop)
+
 #endif /* MS_WINDOWS */
 
 #ifdef HAVE_DLOPEN
@@ -1681,8 +1689,8 @@
     the shell already prevents that. */
 #if !defined(MS_WINDOWS)
     {
-        struct stat sb;
-        if (fstat(fileno(stdin), &sb) == 0 &&
+        struct _Py_stat_struct sb;
+        if (_Py_fstat(fileno(stdin), &sb) == 0 &&
             S_ISDIR(sb.st_mode)) {
             /* There's nothing more we can do. */
             /* Py_FatalError() will core dump, so just exit. */
diff --git a/README b/README
index c3e41a3..b00827a 100644
--- a/README
+++ b/README
@@ -1,4 +1,4 @@
-This is Python version 3.5.0 alpha 1
+This is Python version 3.5.0 alpha 2
 ====================================
 
 Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
diff --git a/Tools/msi/buildrelease.bat b/Tools/msi/buildrelease.bat
index 7183a84..09c63a6 100644
--- a/Tools/msi/buildrelease.bat
+++ b/Tools/msi/buildrelease.bat
@@ -26,15 +26,21 @@
 
 
 :CheckOpts
+if "%1" EQU "-h" goto Help
 if "%1" EQU "-c" (set CERTNAME=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "--certificate" (set CERTNAME=%~2) && shift && shift && goto CheckOpts
 if "%1" EQU "-o" (set OUTDIR=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "--out" (set OUTDIR=%~2) && shift && shift && goto CheckOpts
 if "%1" EQU "-D" (set SKIPDOC=1) && shift && goto CheckOpts
+if "%1" EQU "--skip-doc" (set SKIPDOC=1) && shift && goto CheckOpts
 if "%1" EQU "-B" (set SKIPBUILD=1) && shift && goto CheckOpts
+if "%1" EQU "--skip-build" (set SKIPBUILD=1) && shift && goto CheckOpts
 if "%1" EQU "--download" (set DOWNLOAD_URL=%~2) && shift && shift && goto CheckOpts
 if "%1" EQU "--test" (set TESTTARGETDIR=%~2) && shift && shift && goto CheckOpts
 if "%1" EQU "-b" (set TARGET=Build) && shift && goto CheckOpts
-if '%1' EQU '-x86' (set BUILDX86=1) && shift && goto CheckOpts
-if '%1' EQU '-x64' (set BUILDX64=1) && shift && goto CheckOpts
+if "%1" EQU "--build" (set TARGET=Build) && shift && goto CheckOpts
+if "%1" EQU "-x86" (set BUILDX86=1) && shift && goto CheckOpts
+if "%1" EQU "-x64" (set BUILDX64=1) && shift && goto CheckOpts
 
 if not defined BUILDX86 if not defined BUILDX64 (set BUILDX86=1) && (set BUILDX64=1)
 
@@ -42,21 +48,21 @@
 if "%SKIPBUILD%" EQU "1" goto skipdoc
 if "%SKIPDOC%" EQU "1" goto skipdoc
 
+if not defined PYTHON where py -q || echo Cannot find py on path and PYTHON is not set. && exit /B 1
+if not defined SPHINXBUILD where sphinx-build -q || echo Cannot find sphinx-build on path and SPHINXBUILD is not set. && exit /B 1
 call "%D%..\..\doc\make.bat" htmlhelp
 if errorlevel 1 goto :eof
 :skipdoc
 
-where hg >nul 2>nul
-if errorlevel 1 echo Cannot find hg on PATH & exit /B 1
+where hg /q || echo Cannot find Mercurial on PATH && exit /B 1
 
-where dlltool 2>nul >"%TEMP%\dlltool.loc"
-if errorlevel 1 dir "%D%..\..\externals\dlltool.exe" /s/b > "%TEMP%\dlltool.loc"
-if errorlevel 1 echo Cannot find binutils on PATH or in externals & exit /B 1
-set /P DLLTOOL= < "%TEMP%\dlltool.loc"
-set PATH=%PATH%;%DLLTOOL:~,-12%
-set DLLTOOL=
-del "%TEMP%\dlltool.loc"
-
+where dlltool /q && goto skipdlltoolsearch
+set _DLLTOOL_PATH=
+where /R "%D%..\..\externals" dlltool > "%TEMP%\dlltool.loc" 2> nul && set /P _DLLTOOL_PATH= < "%TEMP%\dlltool.loc" & del "%TEMP%\dlltool.loc" 
+if not exist "%_DLLTOOL_PATH%" echo Cannot find binutils on PATH or in external && exit /B 1
+for %%f in (%_DLLTOOL_PATH%) do set PATH=%PATH%;%%~dpf
+set _DLLTOOL_PATH=
+:skipdlltoolsearch
 
 if defined BUILDX86 (
     call :build x86
@@ -94,14 +100,12 @@
     set RELEASE_URI=%RELEASE_URI_X64%
 )
 
-echo on
 if exist "%BUILD%en-us" (
     echo Deleting %BUILD%en-us
     rmdir /q/s "%BUILD%en-us"
     if errorlevel 1 exit /B
 )
 
-echo on
 if exist "%D%obj\Release_%OBJDIR_PLAT%" (
     echo Deleting "%D%obj\Release_%OBJDIR_PLAT%"
     rmdir /q/s "%D%obj\Release_%OBJDIR_PLAT%"
@@ -136,5 +140,25 @@
     copy /Y "%BUILD%en-us\*.cab" "%OUTDIR%\%OUTDIR_PLAT%"
     copy /Y "%BUILD%en-us\*.exe" "%OUTDIR%\%OUTDIR_PLAT%"
     copy /Y "%BUILD%en-us\*.msi" "%OUTDIR%\%OUTDIR_PLAT%"
+    copy /Y "%BUILD%en-us\*.msu" "%OUTDIR%\%OUTDIR_PLAT%"
 )
 
+exit /B 0
+
+:Help
+echo buildrelease.bat [--out DIR] [-x86] [-x64] [--certificate CERTNAME] [--build] [--skip-build]
+echo                  [--skip-doc] [--download DOWNLOAD URL] [--test TARGETDIR] [-h]
+echo.
+echo    --out (-o)          Specify an additional output directory for installers
+echo    -x86                Build x86 installers
+echo    -x64                Build x64 installers
+echo    --build (-b)        Incrementally build Python rather than rebuilding
+echo    --skip-build (-B)   Do not build Python (just do the installers)
+echo    --skip-doc (-D)     Do not build documentation
+echo    --download          Specify the full download URL for MSIs (should include {2})
+echo    --test              Specify the test directory to run the installer tests
+echo    -h                  Display this help information
+echo.
+echo If no architecture is specified, all architectures will be built.
+echo If --test is not specified, the installer tests are not run.
+echo.
\ No newline at end of file
diff --git a/Tools/msi/bundle/Default.wxl b/Tools/msi/bundle/Default.wxl
index 6efd614..b62faf6 100644
--- a/Tools/msi/bundle/Default.wxl
+++ b/Tools/msi/bundle/Default.wxl
@@ -11,6 +11,11 @@
   <String Id="Uninstalling">Removing</String>
   <String Id="Uninstallation">Uninstall</String>
   
+  <String Id="ElevateForCRTInstall">You will be prompted for Administrator privileges to install a C Runtime Library update (KB2999226).
+
+
+Continue?</String>
+  
   <String Id="CancelButton">&amp;Cancel</String>
   <String Id="CloseButton">&amp;Close</String>
   <String Id="InstallHeader">Install [WixBundleName]</String>
diff --git a/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp b/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp
index ebe4da3..d090443 100644
--- a/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp
+++ b/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp
@@ -10,8 +10,6 @@
 
 #include "pch.h"
 
-static const LPCWSTR WIXBUNDLE_VARIABLE_ELEVATED = L"WixBundleElevated";
-
 static const LPCWSTR PYBA_WINDOW_CLASS = L"PythonBA";
 static const LPCWSTR PYBA_VARIABLE_LAUNCH_TARGET_PATH = L"LaunchTarget";
 static const LPCWSTR PYBA_VARIABLE_LAUNCH_TARGET_ELEVATED_ID = L"LaunchTargetElevatedId";
@@ -232,7 +230,7 @@
     void OnCommand(CONTROL_ID id) {
         LPWSTR defaultDir = nullptr;
         LPWSTR targetDir = nullptr;
-        LONGLONG elevated;
+        LONGLONG elevated, crtInstalled;
         BOOL checked;
         WCHAR wzPath[MAX_PATH] = { };
         BROWSEINFOW browseInfo = { };
@@ -320,12 +318,20 @@
             ReleaseStr(targetDir);
             BalExitOnFailure(hr, "Failed to set install target directory");
 
+            if (!QueryElevateForCrtInstall()) {
+                break;
+            }
+
             OnPlan(BOOTSTRAPPER_ACTION_INSTALL);
             break;
 
         case ID_CUSTOM1_BACK_BUTTON:
             SavePageSettings();
-            GoToPage(PAGE_INSTALL);
+            if (_modifying) {
+                GoToPage(PAGE_MODIFY);
+            } else {
+                GoToPage(PAGE_INSTALL);
+            }
             break;
 
         case ID_INSTALL_CUSTOM_BUTTON: __fallthrough;
@@ -348,6 +354,11 @@
                 ReleaseStr(targetDir);
             }
 
+            checked = ThemeIsControlChecked(_theme, ID_CUSTOM_INSTALL_ALL_USERS_CHECKBOX);
+            if (!checked && !QueryElevateForCrtInstall()) {
+                break;
+            }
+
             OnPlan(_command.action);
             break;
 
@@ -412,6 +423,7 @@
             _engine->SetVariableString(L"InstallAllUsersState", L"disable");
             _engine->SetVariableString(L"TargetDirState", L"disable");
             _engine->SetVariableString(L"CustomBrowseButtonState", L"disable");
+            _modifying = TRUE;
             GoToPage(PAGE_CUSTOM1);
             break;
 
@@ -2306,6 +2318,75 @@
         }
     }
 
+    BOOL IsCrtInstalled() {
+        if (_crtInstalledToken > 0) {
+            return TRUE;
+        } else if (_crtInstalledToken == 0) {
+            return FALSE;
+        }
+        
+        // Check whether at least CRT v10.0.9920.0 is available.
+        // It should only be installed as a Windows Update package, which means
+        // we don't need to worry about 32-bit/64-bit.
+        // However, since the WU package does not include vcruntime140.dll, we
+        // still install that ourselves.
+        LPCWSTR crtFile = L"api-ms-win-crt-runtime-l1-1-0.dll";
+
+        DWORD cbVer = GetFileVersionInfoSizeW(crtFile, nullptr);
+        if (!cbVer) {
+            _crtInstalledToken = 0;
+            return FALSE;
+        }
+
+        void *pData = malloc(cbVer);
+        if (!pData) {
+            _crtInstalledToken = 0;
+            return FALSE;
+        }
+
+        if (!GetFileVersionInfoW(crtFile, 0, cbVer, pData)) {
+            free(pData);
+            _crtInstalledToken = 0;
+            return FALSE;
+        }
+
+        VS_FIXEDFILEINFO *ffi;
+        UINT cb;
+        BOOL result = FALSE;
+
+        if (VerQueryValueW(pData, L"\\", (LPVOID*)&ffi, &cb) &&
+            ffi->dwFileVersionMS == 0x000A0000 && ffi->dwFileVersionLS >= 0x26C00000) {
+            result = TRUE;
+        }
+        
+        free(pData);
+        _crtInstalledToken = result ? 1 : 0;
+        return result;
+    }
+
+    BOOL QueryElevateForCrtInstall() {
+        // Called to prompt the user that even though they think they won't need
+        // to elevate, they actually will because of the CRT install.
+        if (IsCrtInstalled()) {
+            // CRT is already installed - no need to prompt
+            return TRUE;
+        }
+        
+        LONGLONG elevated;
+        HRESULT hr = BalGetNumericVariable(L"WixBundleElevated", &elevated);
+        if (SUCCEEDED(hr) && elevated) {
+            // Already elevated - no need to prompt
+            return TRUE;
+        }
+
+        LOC_STRING *locStr;
+        hr = LocGetString(_wixLoc, L"#(loc.ElevateForCRTInstall)", &locStr);
+        if (FAILED(hr)) {
+            BalLogError(hr, "Failed to get ElevateForCRTInstall string");
+            return FALSE;
+        }
+        return ::MessageBoxW(_hWnd, locStr->wzText, _theme->sczCaption, MB_YESNO) != IDNO;
+    }
 
     HRESULT EvaluateConditions() {
         HRESULT hr = S_OK;
@@ -2493,6 +2574,8 @@
             }
         }
 
+        pEngine->SetVariableNumeric(L"CRTInstalled", IsCrtInstalled() ? 1 : 0);
+
         _wixLoc = nullptr;
         memset(&_bundle, 0, sizeof(_bundle));
         memset(&_conditions, 0, sizeof(_conditions));
@@ -2518,6 +2601,9 @@
 
         _suppressDowngradeFailure = FALSE;
         _suppressRepair = FALSE;
+        _modifying = FALSE;
+
+        _crtInstalledToken = -1;
 
         _overridableVariables = nullptr;
         _taskbarList = nullptr;
@@ -2598,6 +2684,9 @@
 
     BOOL _suppressDowngradeFailure;
     BOOL _suppressRepair;
+    BOOL _modifying;
+
+    int _crtInstalledToken;
 
     STRINGDICT_HANDLE _overridableVariables;
 
diff --git a/Tools/msi/bundle/bundle.targets b/Tools/msi/bundle/bundle.targets
index 8e4eb9e..cc7e445 100644
--- a/Tools/msi/bundle/bundle.targets
+++ b/Tools/msi/bundle/bundle.targets
@@ -10,12 +10,14 @@
         <OutputName Condition="!$(BuildForRelease)">$(OutputName)-$(MajorVersionNumber).$(MinorVersionNumber).$(MicroVersionNumber).$(RevisionNumber)</OutputName>
         <OutputName Condition="$(Platform) == 'x64'">$(OutputName)-amd64</OutputName>
         <OutputName Condition="'$(OutputSuffix)' != ''">$(OutputName)-$(OutputSuffix)</OutputName>
+        <OutputName Condition="'$(Configuration)' == 'Debug'">$(OutputName)-d</OutputName>
+        <TargetName>$(OutputName)</TargetName>
 
         <OutputPath>$(OutputPath)en-us\</OutputPath>
         <OutDir>$(OutputPath)</OutDir>
         
-        <DownloadUrl Condition="'$(DownloadUrl)' == '' and '$(DownloadUrlBase)' != ''">$(DownloadUrlBase.TrimEnd(`/`))/$(MajorVersionNumber).$(MinorVersionNumber).$(MicroVersionNumber)/$(ArchName)$(ReleaseLevelName)/{2}</DownloadUrl>
-        <DefineConstants Condition="'$(DownloadUrl)' != ''">$(DefineConstants);DownloadUrl=$(DownloadUrl)</DefineConstants>
+        <DownloadUrl Condition="'$(DownloadUrl)' == '' and '$(DownloadUrlBase)' != ''">$(DownloadUrlBase.TrimEnd(`/`))/$(MajorVersionNumber).$(MinorVersionNumber).$(MicroVersionNumber)/$(ArchName)$(ReleaseLevelName)/</DownloadUrl>
+        <DefineConstants Condition="'$(DownloadUrl)' != ''">$(DefineConstants);DownloadUrl=$(DownloadUrl){2}</DefineConstants>
         <DefineConstants Condition="'$(DownloadUrl)' == ''">$(DefineConstants);DownloadUrl={2}</DefineConstants>
     </PropertyGroup>
     
@@ -43,6 +45,7 @@
         <Content Include="SideBar.png" />
     </ItemGroup>
     <ItemGroup>
+        <EmbeddedResource Include="bundle.wxl" />
         <WxlTemplate Include="*_en-US.wxl_template" />
     </ItemGroup>
     <ItemGroup>
diff --git a/Tools/msi/bundle/bundle.wxl b/Tools/msi/bundle/bundle.wxl
new file mode 100644
index 0000000..684e0da
--- /dev/null
+++ b/Tools/msi/bundle/bundle.wxl
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<WixLocalization Culture="en-us" xmlns="http://schemas.microsoft.com/wix/2006/localization">
+  <String Id="CRTDescription">C Runtime Update (KB2999226)</String>
+  <String Id="CompileAllDescription">Precompiling standard library</String>
+</WixLocalization>
diff --git a/Tools/msi/bundle/packagegroups/crt.wxs b/Tools/msi/bundle/packagegroups/crt.wxs
index e19b4f9..b079700 100644
--- a/Tools/msi/bundle/packagegroups/crt.wxs
+++ b/Tools/msi/bundle/packagegroups/crt.wxs
@@ -2,6 +2,11 @@
 <Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
     <Fragment>
         <PackageGroup Id="crt">
+            <PackageGroupRef Id="crt_14.0_v6.0" />
+            <PackageGroupRef Id="crt_14.0_v6.1" />
+            <PackageGroupRef Id="crt_14.0_v6.2" />
+            <PackageGroupRef Id="crt_14.0_v6.3" />
+
             <MsiPackage Id="crt_AllUsers"
                         SourceFile="crt.msi"
                         Compressed="$(var.CompressMSI)"
@@ -22,4 +27,42 @@
             </MsiPackage>
         </PackageGroup>
     </Fragment>
+
+    <?foreach ver in v6.0;v6.1;v6.2;v6.3 ?>
+    <?if "$(var.ver)" = "v6.0" ?>
+    <?define msuver=6.0 ?>
+    <?elseif "$(var.ver)" = "v6.1" ?>
+    <?define msuver=6.1 ?>
+    <?elseif "$(var.ver)" = "v6.2" ?>
+    <?define msuver=8-RT ?>
+    <?elseif "$(var.ver)" = "v6.3" ?>
+    <?define msuver=8.1 ?>
+    <?else ?>
+    <?error unknown version $(var.ver) ?>
+    <?endif ?>
+
+    <Fragment>
+        <PackageGroup Id="crt_14.0_$(var.ver)">
+            <MsuPackage Id="crt_14.0_$(var.ver)_x86"
+                        KB="2999226"
+                        SourceFile="!(bindpath.redist)\Windows$(var.msuver)-KB2999226-x86.msu"
+                        DisplayName="!(loc.CRTDescription)"
+                        Description="!(loc.CRTDescription)"
+                        Compressed="$(var.CompressMSI)"
+                        DownloadUrl="$(var.DownloadUrl)"
+                        InstallCondition="not CRTInstalled and VersionNT = $(var.ver) and not VersionNT64 and (Include_core or Include_exe or Include_launcher or Include_pip)" />
+
+            <MsuPackage Id="crt_14.0_$(var.ver)_x64"
+                        KB="2999226"
+                        SourceFile="!(bindpath.redist)\Windows$(var.msuver)-KB2999226-x64.msu"
+                        DisplayName="!(loc.CRTDescription)"
+                        Description="!(loc.CRTDescription)"
+                        Compressed="$(var.CompressMSI)"
+                        DownloadUrl="$(var.DownloadUrl)"
+                        InstallCondition="not CRTInstalled and VersionNT64 = $(var.ver) and (Include_core or Include_exe or Include_launcher or Include_pip)" />
+        </PackageGroup>
+    </Fragment>
+
+    <?undef msuver ?>
+    <?endforeach ?>
 </Wix>
\ No newline at end of file
diff --git a/Tools/msi/bundle/postinstall_en-US.wxl_template b/Tools/msi/bundle/postinstall_en-US.wxl_template
deleted file mode 100644
index 5f54aef..0000000
--- a/Tools/msi/bundle/postinstall_en-US.wxl_template
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<WixLocalization Culture="en-us" xmlns="http://schemas.microsoft.com/wix/2006/localization">
-    <String Id="CompileAllDescription">Precompiling standard library</String>
-</WixLocalization>
diff --git a/Tools/msi/core/core.props b/Tools/msi/core/core.props
deleted file mode 100644
index 2320607..0000000
--- a/Tools/msi/core/core.props
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-    <Import Project="..\msi.props" />
-    <ItemGroup>
-        <Compile Include="*.wxs" />
-    </ItemGroup>
-    <ItemGroup>
-        <EmbeddedResource Include="*.wxl" />
-    </ItemGroup>
-
-    <Import Project="..\msi.targets" />
-</Project>
\ No newline at end of file
diff --git a/Tools/msi/core/core.wixproj b/Tools/msi/core/core.wixproj
index 7265119..68e8bab 100644
--- a/Tools/msi/core/core.wixproj
+++ b/Tools/msi/core/core.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>core</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDefaultFeature=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="core.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="core.wxs" />
+        <Compile Include="core_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/core/core.wxs b/Tools/msi/core/core.wxs
index d354e37..0d4fbde 100644
--- a/Tools/msi/core/core.wxs
+++ b/Tools/msi/core/core.wxs
@@ -6,20 +6,8 @@
         
         <PropertyRef Id="UpgradeTable" />
         
-        <?ifdef IncludeDefaultFeature ?>
         <Feature Id="DefaultFeature" AllowAdvertise="no" Title="!(loc.Title)" Description="!(loc.Description)">
             <ComponentGroupRef Id="core_dll" />
         </Feature>
-        <?endif ?>
-        <?ifdef IncludeSymbols ?>
-        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
-            <ComponentGroupRef Id="core_symbols" />
-        </Feature>
-        <?endif ?>
-        <?ifdef IncludeDebugBinaries ?>
-        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
-            <ComponentGroupRef Id="core_dll_d" />
-        </Feature>
-        <?endif ?>
     </Product>
 </Wix>
diff --git a/Tools/msi/core/core_d.wixproj b/Tools/msi/core/core_d.wixproj
index f1f60a9..5b296bf 100644
--- a/Tools/msi/core/core_d.wixproj
+++ b/Tools/msi/core/core_d.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>core_d</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDebugBinaries=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="core.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="core_d.wxs" />
+        <Compile Include="core_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/core/core_d.wxs b/Tools/msi/core/core_d.wxs
new file mode 100644
index 0000000..8422117
--- /dev/null
+++ b/Tools/msi/core/core_d.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
+            <ComponentGroupRef Id="core_dll_d" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/core/core_pdb.wixproj b/Tools/msi/core/core_pdb.wixproj
index bbf9379..9c88389 100644
--- a/Tools/msi/core/core_pdb.wixproj
+++ b/Tools/msi/core/core_pdb.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>core_pdb</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeSymbols=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="core.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="core_pdb.wxs" />
+        <Compile Include="core_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/core/core_pdb.wxs b/Tools/msi/core/core_pdb.wxs
new file mode 100644
index 0000000..c9a558d
--- /dev/null
+++ b/Tools/msi/core/core_pdb.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
+            <ComponentGroupRef Id="core_symbols" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/crt/crt.wixproj b/Tools/msi/crt/crt.wixproj
index 8389b3a..75295ba 100644
--- a/Tools/msi/crt/crt.wixproj
+++ b/Tools/msi/crt/crt.wixproj
@@ -10,7 +10,7 @@
     <Import Project="..\msi.props" />
     <ItemGroup>
         <Compile Include="crt.wxs" />
-        <Compile Include="crt_files.$(VisualStudioVersion).wxs" />
+        <Compile Include="crt_files.wxs" />
     </ItemGroup>
     <ItemGroup>
         <EmbeddedResource Include="*.wxl" />
diff --git a/Tools/msi/crt/crt_files.12.0.wxs b/Tools/msi/crt/crt_files.12.0.wxs
deleted file mode 100644
index f62593f..0000000
--- a/Tools/msi/crt/crt_files.12.0.wxs
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
-    <Fragment>
-        <?if $(var.Platform)~=x64 ?>
-        <?define msvcr120Guid={0835C947-D6D2-4E52-AF14-0231D04E88EA}?>
-        <?else ?>
-        <?define msvcr120Guid={E5B92048-5859-4AF1-AEAD-B97EBF00B087} ?>
-        <?endif ?>
-        <ComponentGroup Id="crt_files">
-            <Component Id="msvcr120.dll_LM" Directory="SystemInstallDirectory" Guid="$(var.msvcr120Guid)" Shared="yes" SharedDllRefCount="yes">
-                <Condition>ALLUSERS=1</Condition>
-                <File Id="msvcr120.dll_LM" Source="!(bindpath.crt)\msvcr120.dll" />
-            </Component>
-            <Component Id="msvcr120.dll_CU" Directory="InstallDirectory" Guid="*">
-                <Condition>NOT ALLUSERS=1</Condition>
-                <File Id="msvcr120.dll_CU" Source="!(bindpath.crt)\msvcr120.dll" />
-            </Component>
-        </ComponentGroup>
-    </Fragment>
-</Wix>
diff --git a/Tools/msi/crt/crt_files.14.0.wxs b/Tools/msi/crt/crt_files.14.0.wxs
deleted file mode 100644
index be682c9..0000000
--- a/Tools/msi/crt/crt_files.14.0.wxs
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
-    <Fragment>
-        <?if $(var.Platform)~=x64 ?>
-        <?define appcrt140Guid={CC160FA9-B519-38EC-B358-B4146E8506C8}?>
-        <?define desktopcrt140Guid={4DB78A79-8D7F-35DD-B0E8-736DE44D25F3}?>
-        <?define vcruntime140Guid={B33258FD-750C-3B42-8BE4-535B48E97DB4}?>
-        <?else ?>
-        <?define appcrt140Guid={E3854F9C-4CFB-3B85-90BD-86AA22D82DC8}?>
-        <?define desktopcrt140Guid={46EAB1CD-C362-3139-BD7E-D8782E65253A}?>
-        <?define vcruntime140Guid={E8E39D3B-4F35-36D8-B892-4B28336FE041}?>
-        <?endif ?>
-        <ComponentGroup Id="crt_files">
-            <Component Id="appcrt140.dll_LM" Directory="SystemInstallDirectory" Guid="$(var.appcrt140Guid)" Shared="yes" SharedDllRefCount="yes">
-                <Condition>ALLUSERS=1</Condition>
-                <File Id="appcrt140.dll_LM" Source="!(bindpath.crt)\appcrt140.dll" />
-            </Component>
-            <Component Id="desktopcrt140.dll_LM" Directory="SystemInstallDirectory" Guid="$(var.desktopcrt140Guid)" Shared="yes" SharedDllRefCount="yes">
-                <Condition>ALLUSERS=1</Condition>
-                <File Id="desktopcrt140.dll_LM" Source="!(bindpath.crt)\desktopcrt140.dll" />
-            </Component>
-            <Component Id="vcruntime140.dll_LM" Directory="SystemInstallDirectory" Guid="$(var.vcruntime140Guid)" Shared="yes" SharedDllRefCount="yes">
-                <Condition>ALLUSERS=1</Condition>
-                <File Id="vcruntime140.dll_LM" Source="!(bindpath.crt)\vcruntime140.dll" />
-            </Component>
-            <Component Id="appcrt140.dll_CU" Directory="InstallDirectory" Guid="*">
-                <Condition>NOT ALLUSERS=1</Condition>
-                <File Id="appcrt140.dll_CU" Source="!(bindpath.crt)\appcrt140.dll" />
-            </Component>
-            <Component Id="desktopcrt140.dll_CU" Directory="InstallDirectory" Guid="*">
-                <Condition>NOT ALLUSERS=1</Condition>
-                <File Id="desktopcrt140.dll_CU" Source="!(bindpath.crt)\desktopcrt140.dll" />
-            </Component>
-            <Component Id="vcruntime140.dll_CU" Directory="InstallDirectory" Guid="*">
-                <Condition>NOT ALLUSERS=1</Condition>
-                <File Id="vcruntime140.dll_CU" Source="!(bindpath.crt)\vcruntime140.dll" />
-            </Component>
-        </ComponentGroup>
-    </Fragment>
-</Wix>
diff --git a/Tools/msi/crt/crt_files.wxs b/Tools/msi/crt/crt_files.wxs
new file mode 100644
index 0000000..46f7d6e
--- /dev/null
+++ b/Tools/msi/crt/crt_files.wxs
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Fragment>
+        <?if $(var.Platform)~=x64 ?>
+        <?define vcruntime140Guid={B33258FD-750C-3B42-8BE4-535B48E97DB4}?>
+        <?else ?>
+        <?define vcruntime140Guid={E8E39D3B-4F35-36D8-B892-4B28336FE041}?>
+        <?endif ?>
+        <ComponentGroup Id="crt_files">
+            <Component Id="vcruntime140.dll_LM" Directory="SystemInstallDirectory" Guid="$(var.vcruntime140Guid)" Shared="yes" SharedDllRefCount="yes">
+                <Condition>ALLUSERS=1</Condition>
+                <File Id="vcruntime140.dll_LM" Source="!(bindpath.crt)\vcruntime140.dll" />
+            </Component>
+            <Component Id="vcruntime140.dll_CU" Directory="InstallDirectory" Guid="*">
+                <Condition>NOT ALLUSERS=1</Condition>
+                <File Id="vcruntime140.dll_CU" Source="!(bindpath.crt)\vcruntime140.dll" />
+            </Component>
+        </ComponentGroup>
+    </Fragment>
+</Wix>
diff --git a/Tools/msi/dev/dev.props b/Tools/msi/dev/dev.props
deleted file mode 100644
index ca2549d..0000000
--- a/Tools/msi/dev/dev.props
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-    <Import Project="..\msi.props" />
-    <PropertyGroup>
-        <DefineConstants Condition="$(BuildForRelease)">
-            $(DefineConstants);
-            IncludeMinGWLib=1;
-        </DefineConstants>
-    </PropertyGroup>
-    <ItemGroup>
-        <Compile Include="*.wxs" />
-    </ItemGroup>
-    <ItemGroup>
-        <EmbeddedResource Include="*.wxl" />
-    </ItemGroup>
-    <ItemGroup>
-        <InstallFiles Include="$(PySourcePath)include\*.h">
-            <SourceBase>$(PySourcePath)</SourceBase>
-            <Source>!(bindpath.src)</Source>
-            <TargetBase>$(PySourcePath)</TargetBase>
-            <Target_></Target_>
-            <Group>dev_include</Group>
-        </InstallFiles>
-    </ItemGroup>
-    
-    <Target Name="BuildMinGWLib"
-            Inputs="$(BuildPath)$(PyDllName).dll"
-            Outputs="$(BuildPath)lib$(PyDllName).a"
-            AfterTargets="PrepareForBuild"
-            Condition="$(BuildForRelease)">
-        <!-- Build libpython##.a as part of this project. This requires gendef and dlltool on the path. -->
-        <PropertyGroup>
-            <_GenDefPlatform>i386</_GenDefPlatform>
-            <_GenDefPlatform Condition="$(Platform) == 'x64'">i386:x86-64</_GenDefPlatform>
-        </PropertyGroup>
-
-        <Exec Command='gendef - "$(BuildPath)$(PyDllName).dll" &gt; "$(IntermediateOutputPath)mingwlib.def"' ContinueOnError="false" />
-        <Exec Command='dlltool --dllname $(PyDllName).dll --def "$(IntermediateOutputPath)mingwlib.def" --output-lib "$(BuildPath)lib$(PyDllName).a" -m $(_GenDefPlatform)' />
-    </Target>
-
-    <Import Project="..\msi.targets" />
-</Project>
\ No newline at end of file
diff --git a/Tools/msi/dev/dev.wixproj b/Tools/msi/dev/dev.wixproj
index 8a2293f..e144878 100644
--- a/Tools/msi/dev/dev.wixproj
+++ b/Tools/msi/dev/dev.wixproj
@@ -5,7 +5,45 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName Condition="'$(OutputName)' == ''">dev</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDefaultFeature=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="dev.props" />
+    <Import Project="..\msi.props" />
+    <PropertyGroup>
+        <DefineConstants Condition="$(BuildForRelease)">
+            $(DefineConstants);
+            IncludeMinGWLib=1;
+        </DefineConstants>
+    </PropertyGroup>
+    <ItemGroup>
+        <Compile Include="dev.wxs" />
+        <Compile Include="dev_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+    <ItemGroup>
+        <InstallFiles Include="$(PySourcePath)include\*.h">
+            <SourceBase>$(PySourcePath)</SourceBase>
+            <Source>!(bindpath.src)</Source>
+            <TargetBase>$(PySourcePath)</TargetBase>
+            <Target_></Target_>
+            <Group>dev_include</Group>
+        </InstallFiles>
+    </ItemGroup>
+    
+    <Target Name="BuildMinGWLib"
+            Inputs="$(BuildPath)$(PyDllName).dll"
+            Outputs="$(BuildPath)lib$(PyDllName).a"
+            AfterTargets="PrepareForBuild"
+            Condition="$(BuildForRelease)">
+        <!-- Build libpython##.a as part of this project. This requires gendef and dlltool on the path. -->
+        <PropertyGroup>
+            <_GenDefPlatform>i386</_GenDefPlatform>
+            <_GenDefPlatform Condition="$(Platform) == 'x64'">i386:x86-64</_GenDefPlatform>
+        </PropertyGroup>
+
+        <Exec Command='gendef - "$(BuildPath)$(PyDllName).dll" &gt; "$(IntermediateOutputPath)mingwlib.def"' ContinueOnError="false" />
+        <Exec Command='dlltool --dllname $(PyDllName).dll --def "$(IntermediateOutputPath)mingwlib.def" --output-lib "$(BuildPath)lib$(PyDllName).a" -m $(_GenDefPlatform)' />
+    </Target>
+
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/dev/dev.wxs b/Tools/msi/dev/dev.wxs
index 48eba6b..f8af9aa 100644
--- a/Tools/msi/dev/dev.wxs
+++ b/Tools/msi/dev/dev.wxs
@@ -6,7 +6,6 @@
         
         <PropertyRef Id="UpgradeTable" />
         
-        <?ifdef IncludeDefaultFeature ?>
         <Feature Id="DefaultFeature" AllowAdvertise="no" Title="!(loc.Title)" Description="!(loc.Description)">
             <ComponentGroupRef Id="dev_include" />
             <ComponentGroupRef Id="dev_pyconfig" />
@@ -15,11 +14,5 @@
             <ComponentGroupRef Id="dev_mingw" />
 <?endif ?>
         </Feature>
-        <?endif ?>
-        <?ifdef IncludeDebugBinaries ?>
-        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
-            <ComponentGroupRef Id="dev_libs_d" />
-        </Feature>
-        <?endif ?>
     </Product>
 </Wix>
diff --git a/Tools/msi/dev/dev_d.wixproj b/Tools/msi/dev/dev_d.wixproj
index 2354d97..b3b0532 100644
--- a/Tools/msi/dev/dev_d.wixproj
+++ b/Tools/msi/dev/dev_d.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>dev_d</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDebugBinaries=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="dev.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="dev_d.wxs" />
+        <Compile Include="dev_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/dev/dev_d.wxs b/Tools/msi/dev/dev_d.wxs
new file mode 100644
index 0000000..c3cb2ea
--- /dev/null
+++ b/Tools/msi/dev/dev_d.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
+            <ComponentGroupRef Id="dev_libs_d" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/doc/doc.wxs b/Tools/msi/doc/doc.wxs
index 6becaf6..ddab83b 100644
--- a/Tools/msi/doc/doc.wxs
+++ b/Tools/msi/doc/doc.wxs
@@ -10,6 +10,7 @@
         <Feature Id="DefaultFeature" AllowAdvertise="no" Title="!(loc.Title)" Description="!(loc.Description)">
             <ComponentGroupRef Id="doc" />
             
+            <?ifdef DocFilename ?>
             <Component Id="doc_shortcut" Directory="MenuDir" Guid="*">
                 <RegistryKey Root="HKMU" Key="[REGISTRYKEY]">
                     <RegistryValue Key="Help\Main Python Documentation" Type="string" Value="[#python.chm]" KeyPath="yes" />
@@ -20,6 +21,7 @@
                           Description="!(loc.ShortcutDescription)" />
                 <RemoveFolder Id="Remove_MenuDir" On="uninstall" />
             </Component>
+            <?endif ?>
 
         </Feature>
     </Product>
diff --git a/Tools/msi/exe/exe.props b/Tools/msi/exe/exe.props
deleted file mode 100644
index bb9b7d3..0000000
--- a/Tools/msi/exe/exe.props
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-    <PropertyGroup>
-        <!-- Shortcut validation is not necessary -->
-        <SuppressICEs>ICE43</SuppressICEs>
-    </PropertyGroup>
-    <Import Project="..\msi.props" />
-    <ItemGroup>
-        <Compile Include="*.wxs" />
-    </ItemGroup>
-    <ItemGroup>
-        <EmbeddedResource Include="*.wxl" />
-        <WxlTemplate Include="*.wxl_template" />
-    </ItemGroup>
-    
-    <Target Name="_GenerateLicense" AfterTargets="PrepareForBuild">
-        <ItemGroup>
-            <LicenseFiles Include="$(PySourcePath)LICENSE;
-                                   crtlicense.txt;
-                                   $(bz2Dir)LICENSE;
-                                   $(opensslDir)LICENSE;
-                                   $(tclDir)license.terms;
-                                   $(tkDir)license.terms;
-                                   $(tixDir)license.terms" />
-            <_LicenseFiles Include="@(LicenseFiles)">
-                <Content>$([System.IO.File]::ReadAllText(%(FullPath)))</Content>
-            </_LicenseFiles>
-        </ItemGroup>
-        
-        <WriteLinesToFile File="$(BuildPath)LICENSE"
-                          Overwrite="true"
-                          Lines="@(_LicenseFiles->'%(Content)')" />
-    </Target>
-        
-    <Import Project="..\msi.targets" />
-</Project>
\ No newline at end of file
diff --git a/Tools/msi/exe/exe.wixproj b/Tools/msi/exe/exe.wixproj
index d3ed65a..d26a603 100644
--- a/Tools/msi/exe/exe.wixproj
+++ b/Tools/msi/exe/exe.wixproj
@@ -5,7 +5,39 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>exe</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDefaultFeature=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="exe.props" />
+    <PropertyGroup>
+        <!-- Shortcut validation is not necessary -->
+        <SuppressICEs>ICE43</SuppressICEs>
+    </PropertyGroup>
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="exe.wxs" />
+        <Compile Include="exe_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+        <WxlTemplate Include="*.wxl_template" />
+    </ItemGroup>
+    
+    <Target Name="_GenerateLicense" AfterTargets="PrepareForBuild">
+        <ItemGroup>
+            <LicenseFiles Include="$(PySourcePath)LICENSE;
+                                   crtlicense.txt;
+                                   $(bz2Dir)LICENSE;
+                                   $(opensslDir)LICENSE;
+                                   $(tclDir)license.terms;
+                                   $(tkDir)license.terms;
+                                   $(tixDir)license.terms" />
+            <_LicenseFiles Include="@(LicenseFiles)">
+                <Content>$([System.IO.File]::ReadAllText(%(FullPath)))</Content>
+            </_LicenseFiles>
+        </ItemGroup>
+        
+        <WriteLinesToFile File="$(BuildPath)LICENSE"
+                          Overwrite="true"
+                          Lines="@(_LicenseFiles->'%(Content)')" />
+    </Target>
+        
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/exe/exe.wxs b/Tools/msi/exe/exe.wxs
index 7b35836..9696c89 100644
--- a/Tools/msi/exe/exe.wxs
+++ b/Tools/msi/exe/exe.wxs
@@ -7,7 +7,6 @@
         <PropertyRef Id="UpgradeTable" />
         <PropertyRef Id="REGISTRYKEY" />
         
-        <?ifdef IncludeDefaultFeature ?>
         <Feature Id="DefaultFeature" AllowAdvertise="no" Title="!(loc.Title)" Description="!(loc.Description)">
             <ComponentGroupRef Id="exe_python" />
             <ComponentGroupRef Id="exe_txt" />
@@ -25,16 +24,5 @@
                 </RegistryKey>
             </Component>
         </Feature>
-        <?endif ?>
-        <?ifdef IncludeSymbols ?>
-        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
-            <ComponentGroupRef Id="exe_python_symbols" />
-        </Feature>
-        <?endif ?>
-        <?ifdef IncludeDebugBinaries ?>
-        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
-            <ComponentGroupRef Id="exe_python_d" />
-        </Feature>
-        <?endif ?>
     </Product>
 </Wix>
diff --git a/Tools/msi/exe/exe_d.wixproj b/Tools/msi/exe/exe_d.wixproj
index 9b57db8..27545ca 100644
--- a/Tools/msi/exe/exe_d.wixproj
+++ b/Tools/msi/exe/exe_d.wixproj
@@ -5,7 +5,16 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>exe_d</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDebugBinaries=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="exe.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="exe_d.wxs" />
+        <Compile Include="exe_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+        <WxlTemplate Include="*.wxl_template" />
+    </ItemGroup>
+
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/exe/exe_d.wxs b/Tools/msi/exe/exe_d.wxs
new file mode 100644
index 0000000..abcb012
--- /dev/null
+++ b/Tools/msi/exe/exe_d.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
+            <ComponentGroupRef Id="exe_python_d" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/exe/exe_en-US.wxl_template b/Tools/msi/exe/exe_en-US.wxl_template
index cc60ef6..577fbe5 100644
--- a/Tools/msi/exe/exe_en-US.wxl_template
+++ b/Tools/msi/exe/exe_en-US.wxl_template
@@ -4,6 +4,4 @@
     <String Id="ShortDescriptor">executable</String>
     <String Id="ShortcutName">Python {{ShortVersion}} ({{Bitness}})</String>
     <String Id="ShortcutDescription">Launches the !(loc.ProductName) interpreter.</String>
-    <String Id="PathTitle">Add to PATH</String>
-    <String Id="PathDescription">Adds the install directory to PATH and .py to PATHEXT.</String>
 </WixLocalization>
diff --git a/Tools/msi/exe/exe_pdb.wixproj b/Tools/msi/exe/exe_pdb.wixproj
index dae3d0c..4f4c869 100644
--- a/Tools/msi/exe/exe_pdb.wixproj
+++ b/Tools/msi/exe/exe_pdb.wixproj
@@ -5,7 +5,16 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>exe_pdb</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeSymbols=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="exe.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="exe_pdb.wxs" />
+        <Compile Include="exe_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+        <WxlTemplate Include="*.wxl_template" />
+    </ItemGroup>
+
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/exe/exe_pdb.wxs b/Tools/msi/exe/exe_pdb.wxs
new file mode 100644
index 0000000..5129ec0
--- /dev/null
+++ b/Tools/msi/exe/exe_pdb.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
+            <ComponentGroupRef Id="exe_python_symbols" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/launcher/launcher.props b/Tools/msi/launcher/launcher.props
deleted file mode 100644
index b145efe..0000000
--- a/Tools/msi/launcher/launcher.props
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-    <Import Project="..\msi.props" />
-    <ItemGroup>
-        <Compile Include="*.wxs" />
-    </ItemGroup>
-    <ItemGroup>
-        <EmbeddedResource Include="*.wxl" />
-    </ItemGroup>
-    
-    <Import Project="..\msi.targets" />
-</Project>
\ No newline at end of file
diff --git a/Tools/msi/launcher/launcher.wixproj b/Tools/msi/launcher/launcher.wixproj
index 2d06df0..73f26a8 100644
--- a/Tools/msi/launcher/launcher.wixproj
+++ b/Tools/msi/launcher/launcher.wixproj
@@ -5,7 +5,16 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>launcher</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDefaultFeature=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="launcher.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="launcher.wxs" />
+        <Compile Include="launcher_files.wxs" />
+        <Compile Include="launcher_reg.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/launcher/launcher.wxs b/Tools/msi/launcher/launcher.wxs
index fe88ba5..b20cff8 100644
--- a/Tools/msi/launcher/launcher.wxs
+++ b/Tools/msi/launcher/launcher.wxs
@@ -8,7 +8,6 @@
         <PropertyRef Id="UpgradeTable" />
         <PropertyRef Id="ARPPRODUCTICON" />
 
-        <?ifdef IncludeDefaultFeature ?>
         <Feature Id="DefaultFeature" AllowAdvertise="no" Title="!(loc.Title)" Description="!(loc.Description)">
             <ComponentGroupRef Id="launcher_exe" Primary="yes" />
         </Feature>
@@ -16,12 +15,6 @@
             <ComponentGroupRef Id="launcher_exe" />
             <ComponentGroupRef Id="launcher_reg" />
         </Feature>
-        <?endif ?>
-        <?ifdef IncludeSymbols ?>
-        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
-            <ComponentGroupRef Id="launcher_pdb" />
-        </Feature>
-        <?endif ?>
         
         <Directory Id="TARGETDIR" Name="SourceDir">
             <Directory Id="LauncherInstallDirectory" />
diff --git a/Tools/msi/launcher/launcher_files.wxs b/Tools/msi/launcher/launcher_files.wxs
index 65f1193..9606dc6 100644
--- a/Tools/msi/launcher/launcher_files.wxs
+++ b/Tools/msi/launcher/launcher_files.wxs
@@ -21,15 +21,4 @@
             </Component>
         </ComponentGroup>
     </Fragment>
-    
-    <Fragment>
-        <ComponentGroup Id="launcher_pdb">
-            <Component Id="py.pdb" Directory="LauncherInstallDirectory" Guid="*">
-                <File Id="py.pdb" Name="py.pdb" Source="py.pdb" />
-            </Component>
-            <Component Id="pyw.pdb" Directory="LauncherInstallDirectory" Guid="*">
-                <File Id="pyw.pdb" Name="pyw.pdb" Source="pyw.pdb" />
-            </Component>
-        </ComponentGroup>
-    </Fragment>
 </Wix>
diff --git a/Tools/msi/launcher/launcher_pdb.wixproj b/Tools/msi/launcher/launcher_pdb.wixproj
deleted file mode 100644
index 3efdd7f..0000000
--- a/Tools/msi/launcher/launcher_pdb.wixproj
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-    <PropertyGroup>
-        <ProjectGuid>{A21D4A23-483F-4822-A0B1-FCB14D8CEBA7}</ProjectGuid>
-        <SchemaVersion>2.0</SchemaVersion>
-        <OutputName>launcher_pdb</OutputName>
-        <OutputType>Package</OutputType>
-        <DefineConstants>IncludeSymbols=1;$(DefineConstants)</DefineConstants>
-    </PropertyGroup>
-    <Import Project="launcher.props" />
-</Project>
\ No newline at end of file
diff --git a/Tools/msi/lib/lib.props b/Tools/msi/lib/lib.props
deleted file mode 100644
index 413ba0c..0000000
--- a/Tools/msi/lib/lib.props
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-    <Import Project="..\msi.props" />
-    <ItemGroup>
-        <Compile Include="*.wxs" />
-    </ItemGroup>
-    <ItemGroup>
-        <EmbeddedResource Include="*.wxl" />
-    </ItemGroup>
-    <ItemGroup>
-        <ExcludeFolders Include="Lib\test;Lib\tests;Lib\tkinter;Lib\idlelib;Lib\turtledemo" />
-        <InstallFiles Include="$(PySourcePath)Lib\**\*"
-                      Exclude="$(PySourcePath)Lib\**\*.pyc;
-                               $(PySourcePath)Lib\**\*.pyo;
-                               $(PySourcePath)Lib\site-packages\README;
-                               @(ExcludeFolders->'$(PySourcePath)%(Identity)\*');
-                               @(ExcludeFolders->'$(PySourcePath)%(Identity)\**\*')">
-            <SourceBase>$(PySourcePath)Lib</SourceBase>
-            <Source>!(bindpath.src)Lib\</Source>
-            <TargetBase>$(PySourcePath)Lib</TargetBase>
-            <Target_>Lib\</Target_>
-            <Group>lib_py</Group>
-        </InstallFiles>
-    </ItemGroup>
-    
-    <Import Project="..\msi.targets" />
-</Project>
\ No newline at end of file
diff --git a/Tools/msi/lib/lib.wixproj b/Tools/msi/lib/lib.wixproj
index e9281da..64e5878 100644
--- a/Tools/msi/lib/lib.wixproj
+++ b/Tools/msi/lib/lib.wixproj
@@ -5,7 +5,30 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>lib</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDefaultFeature=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="lib.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="lib.wxs" />
+        <Compile Include="lib_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+    <ItemGroup>
+        <ExcludeFolders Include="Lib\test;Lib\tests;Lib\tkinter;Lib\idlelib;Lib\turtledemo" />
+        <InstallFiles Include="$(PySourcePath)Lib\**\*"
+                      Exclude="$(PySourcePath)Lib\**\*.pyc;
+                               $(PySourcePath)Lib\**\*.pyo;
+                               $(PySourcePath)Lib\site-packages\README;
+                               @(ExcludeFolders->'$(PySourcePath)%(Identity)\*');
+                               @(ExcludeFolders->'$(PySourcePath)%(Identity)\**\*')">
+            <SourceBase>$(PySourcePath)Lib</SourceBase>
+            <Source>!(bindpath.src)Lib\</Source>
+            <TargetBase>$(PySourcePath)Lib</TargetBase>
+            <Target_>Lib\</Target_>
+            <Group>lib_py</Group>
+        </InstallFiles>
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/lib/lib.wxs b/Tools/msi/lib/lib.wxs
index 89bea62..b1aec75 100644
--- a/Tools/msi/lib/lib.wxs
+++ b/Tools/msi/lib/lib.wxs
@@ -7,22 +7,10 @@
         <PropertyRef Id="UpgradeTable" />
         <PropertyRef Id="REGISTRYKEY" />
         
-        <?ifdef IncludeDefaultFeature ?>
         <Feature Id="DefaultFeature" AllowAdvertise="no" Title="!(loc.Title)" Description="!(loc.Description)">
             <ComponentGroupRef Id="lib_py" />
             <ComponentGroupRef Id="lib_files" />
             <ComponentGroupRef Id="lib_extensions" />
         </Feature>
-        <?endif ?>
-        <?ifdef IncludeSymbols ?>
-        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
-            <ComponentGroupRef Id="lib_extensions_symbols" />
-        </Feature>
-        <?endif ?>
-        <?ifdef IncludeDebugBinaries ?>
-        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
-            <ComponentGroupRef Id="lib_extensions_d" />
-        </Feature>
-        <?endif ?>
     </Product>
 </Wix>
diff --git a/Tools/msi/lib/lib_d.wixproj b/Tools/msi/lib/lib_d.wixproj
index e632319..587a82c 100644
--- a/Tools/msi/lib/lib_d.wixproj
+++ b/Tools/msi/lib/lib_d.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>lib_d</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDebugBinaries=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="lib.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="lib_d.wxs" />
+        <Compile Include="lib_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/lib/lib_d.wxs b/Tools/msi/lib/lib_d.wxs
new file mode 100644
index 0000000..5a5cf70
--- /dev/null
+++ b/Tools/msi/lib/lib_d.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
+            <ComponentGroupRef Id="lib_extensions_d" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/lib/lib_files.wxs b/Tools/msi/lib/lib_files.wxs
index 0c901d3..fa79a8d 100644
--- a/Tools/msi/lib/lib_files.wxs
+++ b/Tools/msi/lib/lib_files.wxs
@@ -46,6 +46,13 @@
             </Component>
             
             <?endforeach ?>
+            
+            <Component Id="sqlite3_d.dll" Directory="DLLs" Guid="*">
+                <File Name="sqlite3_d.dll" KeyPath="yes" />
+            </Component>
+            <Component Id="sqlite3_d.pdb" Directory="DLLs" Guid="*">
+                <File Name="sqlite3_d.pdb" KeyPath="yes" />
+            </Component>
         </ComponentGroup>
     </Fragment>
     <Fragment>
diff --git a/Tools/msi/lib/lib_pdb.wixproj b/Tools/msi/lib/lib_pdb.wixproj
index 21b5912..db1b5bb 100644
--- a/Tools/msi/lib/lib_pdb.wixproj
+++ b/Tools/msi/lib/lib_pdb.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>lib_pdb</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeSymbols=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="lib.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="lib_pdb.wxs" />
+        <Compile Include="lib_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/lib/lib_pdb.wxs b/Tools/msi/lib/lib_pdb.wxs
new file mode 100644
index 0000000..a2be0c9
--- /dev/null
+++ b/Tools/msi/lib/lib_pdb.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
+            <ComponentGroupRef Id="lib_extensions_symbols" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/msi.props b/Tools/msi/msi.props
index 5c02fd4..cbaa6b1 100644
--- a/Tools/msi/msi.props
+++ b/Tools/msi/msi.props
@@ -1,6 +1,7 @@
 <?xml version="1.0" encoding="utf-8"?>
 <Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" TreatAsLocalProperty="ReleaseUri">
     <PropertyGroup>
+        <TargetName>$(OutputName)</TargetName>
         <DefineSolutionProperties>false</DefineSolutionProperties>
         <TreatWarningsAsErrors>false</TreatWarningsAsErrors>
         <SuppressIces>$(SuppressIces);ICE03;ICE57;ICE61</SuppressIces>
@@ -48,10 +49,8 @@
         <OutputPath Condition="!HasTrailingSlash($(OutputPath))">$(OutputPath)\</OutputPath>
         <OutDir>$(OutputPath)</OutDir>
         <ReuseCabinetCache>true</ReuseCabinetCache>
-        <CRTModule Condition="'$(VisualStudioVersion)' == '12.0'">$(CommonProgramFiles)\Merge Modules\Microsoft_VC120_CRT_$(Platform).msm</CRTModule>
-        <CRTModule Condition="'$(VisualStudioVersion)' == '14.0'">$(CommonProgramFiles)\Merge Modules\Microsoft_VC140_CRT_$(Platform).msm</CRTModule>
-        <CRTRedist Condition="'$(VisualStudioVersion)' == '12.0'">$([System.IO.Path]::GetFullPath(`$(VS120COMNTOOLS)\..\..\VC\redist\$(Platform)\Microsoft.VC120.CRT`))</CRTRedist>
-        <CRTRedist Condition="'$(VisualStudioVersion)' == '14.0'">$([System.IO.Path]::GetFullPath(`$(VS140COMNTOOLS)\..\..\VC\redist\$(Platform)\Microsoft.VC140.CRT`))</CRTRedist>
+        <CRTRedist Condition="">$([System.IO.Path]::GetFullPath(`$(VS140COMNTOOLS)\..\..\VC\redist\$(Platform)\Microsoft.VC140.CRT`))</CRTRedist>
+        <CRTRedist Condition="'$(CRTRedist)' != '' and !Exists($(CRTRedist))">$(MSBuildThisFileDirectory)\redist\$(Platform)</CRTRedist>
         <CRTRedist Condition="'$(CRTRedist)' != '' and !Exists($(CRTRedist))"></CRTRedist>
 
         <RevisionNumber>$(ReleaseLevelNumber)</RevisionNumber>
@@ -69,13 +68,9 @@
             MajorVersionNumber=$(MajorVersionNumber);
             MinorVersionNumber=$(MinorVersionNumber);
             UpgradeMinimumVersion=$(MajorVersionNumber).$(MinorVersionNumber).0.0;
-            UpgradeMaximumVersion=$(MajorVersionNumber).$(MinorVersionNumber).150.0;
             NextMajorVersionNumber=$(MajorVersionNumber).$([msbuild]::Add($(MinorVersionNumber), 1)).0.0;
             PyDebugExt=$(PyDebugExt);
         </DefineConstants>
-        <DefineConstants Condition="'$(CRTModule)' != '' and Exists($(CRTModule))">
-            $(DefineConstants);CRTModule=$(CRTModule);
-        </DefineConstants>
         <DefineConstants Condition="'$(CRTRedist)' != ''">
             $(DefineConstants);CRTRedist=$(CRTRedist);
         </DefineConstants>
@@ -116,6 +111,9 @@
         <LinkerBindInputPaths Include="$(CRTRedist)" Condition="'$(CRTRedist)' != ''">
             <BindName>crt</BindName>
         </LinkerBindInputPaths>
+        <LinkerBindInputPaths Include="$(MSBuildThisFileDirectory)\redist">
+            <BindName>redist</BindName>
+        </LinkerBindInputPaths>
     </ItemGroup>
 
     <Target Name="_ValidateMsiProps" BeforeTargets="PrepareForBuild">
diff --git a/Tools/msi/redist/README.txt b/Tools/msi/redist/README.txt
new file mode 100644
index 0000000..48d26e1
--- /dev/null
+++ b/Tools/msi/redist/README.txt
@@ -0,0 +1,15 @@
+This folder is intentianally left empty in the repository.
+
+The following dependencies may be copied here if they cannot be detected
+automatically by the build scripts:
+
+redist\Windows6.0-KB2999226-x64.msu
+redist\Windows6.0-KB2999226-x86.msu
+redist\Windows6.1-KB2999226-x64.msu
+redist\Windows6.1-KB2999226-x86.msu
+redist\Windows8.1-KB2999226-x64.msu
+redist\Windows8.1-KB2999226-x86.msu
+redist\Windows8-RT-KB2999226-x64.msu
+redist\Windows8-RT-KB2999226-x86.msu
+redist\x64\vcruntime140.dll
+redist\x86\vcruntime140.dll
diff --git a/Tools/msi/tcltk/tcltk.props b/Tools/msi/tcltk/tcltk.props
deleted file mode 100644
index 41d67c5..0000000
--- a/Tools/msi/tcltk/tcltk.props
+++ /dev/null
@@ -1,49 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-    <PropertyGroup>
-        <!-- Shortcut validation is not necessary -->
-        <SuppressICEs>ICE43</SuppressICEs>
-    </PropertyGroup>
-    <Import Project="..\msi.props" />
-    <ItemGroup>
-        <Compile Include="*.wxs" />
-    </ItemGroup>
-    <ItemGroup>
-        <WxlTemplate Include="*.wxl_template" />
-    </ItemGroup>
-    <ItemGroup>
-        <InstallFiles Include="$(tcltkDir)bin\*.dll" Exclude="$(tcltkDir)bin\*g.dll">
-            <SourceBase>$(tcltkDir)</SourceBase>
-            <Source>!(bindpath.tcltk)</Source>
-            <TargetBase>$(tcltkDir)bin</TargetBase>
-            <Target_>DLLs\</Target_>
-            <Group>tcltk_dlls</Group>
-        </InstallFiles>
-        <InstallFiles Include="$(tcltkDir)bin\*g.dll">
-            <SourceBase>$(tcltkDir)</SourceBase>
-            <Source>!(bindpath.tcltk)</Source>
-            <TargetBase>$(tcltkDir)bin</TargetBase>
-            <Target_>DLLs\</Target_>
-            <Group>tcltk_dlls_d</Group>
-        </InstallFiles>
-
-        <InstallFiles Include="$(tcltkDir)lib\**\*">
-            <SourceBase>$(tcltkDir)</SourceBase>
-            <Source>!(bindpath.tcltk)</Source>
-            <TargetBase>$(tcltkDir)lib</TargetBase>
-            <Target_>tcl\</Target_>
-            <Group>tcltk_lib</Group>
-        </InstallFiles>
-
-        <InstallFiles Include="$(PySourcePath)Lib\tkinter\**\*;$(PySourcePath)Lib\idlelib\**\*;$(PySourcePath)Lib\turtledemo\**\*"
-                      Exclude="$(PySourcePath)Lib\**\*.pyc;$(PySourcePath)Lib\**\*.pyo">
-            <SourceBase>$(PySourcePath)</SourceBase>
-            <Source>!(bindpath.src)</Source>
-            <TargetBase>$(PySourcePath)</TargetBase>
-            <Target_></Target_>
-            <Group>tkinter_lib</Group>
-        </InstallFiles>
-    </ItemGroup>
-    
-    <Import Project="..\msi.targets" />
-</Project>
\ No newline at end of file
diff --git a/Tools/msi/tcltk/tcltk.wixproj b/Tools/msi/tcltk/tcltk.wixproj
index 1dcdfad..4d1d74c 100644
--- a/Tools/msi/tcltk/tcltk.wixproj
+++ b/Tools/msi/tcltk/tcltk.wixproj
@@ -5,7 +5,45 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>tcltk</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDefaultFeature=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="tcltk.props" />
+    <PropertyGroup>
+        <!-- Shortcut validation is not necessary -->
+        <SuppressICEs>ICE43</SuppressICEs>
+    </PropertyGroup>
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="tcltk.wxs" />
+        <Compile Include="tcltk_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <WxlTemplate Include="*.wxl_template" />
+    </ItemGroup>
+    <ItemGroup>
+        <InstallFiles Include="$(tcltkDir)bin\*.dll" Exclude="$(tcltkDir)bin\*g.dll">
+            <SourceBase>$(tcltkDir)</SourceBase>
+            <Source>!(bindpath.tcltk)</Source>
+            <TargetBase>$(tcltkDir)bin</TargetBase>
+            <Target_>DLLs\</Target_>
+            <Group>tcltk_dlls</Group>
+        </InstallFiles>
+
+        <InstallFiles Include="$(tcltkDir)lib\**\*">
+            <SourceBase>$(tcltkDir)</SourceBase>
+            <Source>!(bindpath.tcltk)</Source>
+            <TargetBase>$(tcltkDir)lib</TargetBase>
+            <Target_>tcl\</Target_>
+            <Group>tcltk_lib</Group>
+        </InstallFiles>
+
+        <InstallFiles Include="$(PySourcePath)Lib\tkinter\**\*;$(PySourcePath)Lib\idlelib\**\*;$(PySourcePath)Lib\turtledemo\**\*"
+                      Exclude="$(PySourcePath)Lib\**\*.pyc;$(PySourcePath)Lib\**\*.pyo">
+            <SourceBase>$(PySourcePath)</SourceBase>
+            <Source>!(bindpath.src)</Source>
+            <TargetBase>$(PySourcePath)</TargetBase>
+            <Target_></Target_>
+            <Group>tkinter_lib</Group>
+        </InstallFiles>
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/tcltk/tcltk.wxs b/Tools/msi/tcltk/tcltk.wxs
index 4d72200..819fccb 100644
--- a/Tools/msi/tcltk/tcltk.wxs
+++ b/Tools/msi/tcltk/tcltk.wxs
@@ -15,7 +15,6 @@
         
         <Condition Message="!(loc.NoPython)">PYTHON_EXE</Condition>
 
-        <?ifdef IncludeDefaultFeature ?>
         <Feature Id="DefaultFeature" AllowAdvertise="no" Title="!(loc.Title)" Description="!(loc.Description)">
             <ComponentGroupRef Id="tkinter_extension" />
             <ComponentGroupRef Id="tcltk_dlls" />
@@ -37,17 +36,5 @@
                 </Shortcut>
             </Component>
         </Feature>
-        <?endif ?>
-        <?ifdef IncludeSymbols ?>
-        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
-            <ComponentGroupRef Id="tkinter_extension_symbols" />
-        </Feature>
-        <?endif ?>
-        <?ifdef IncludeDebugBinaries ?>
-        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
-            <ComponentGroupRef Id="tkinter_extension_d" />
-            <ComponentGroupRef Id="tcltk_dlls_d" />
-        </Feature>
-        <?endif ?>
     </Product>
 </Wix>
diff --git a/Tools/msi/tcltk/tcltk_d.wixproj b/Tools/msi/tcltk/tcltk_d.wixproj
index ee9af33..3266190 100644
--- a/Tools/msi/tcltk/tcltk_d.wixproj
+++ b/Tools/msi/tcltk/tcltk_d.wixproj
@@ -5,7 +5,24 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>tcltk_d</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDebugBinaries=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="tcltk.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="tcltk_d.wxs" />
+        <Compile Include="tcltk_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <WxlTemplate Include="*.wxl_template" />
+    </ItemGroup>
+    <ItemGroup>
+        <InstallFiles Include="$(tcltkDir)bin\*g.dll">
+            <SourceBase>$(tcltkDir)</SourceBase>
+            <Source>!(bindpath.tcltk)</Source>
+            <TargetBase>$(tcltkDir)bin</TargetBase>
+            <Target_>DLLs\</Target_>
+            <Group>tcltk_dlls_d</Group>
+        </InstallFiles>
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/tcltk/tcltk_d.wxs b/Tools/msi/tcltk/tcltk_d.wxs
new file mode 100644
index 0000000..7f5048f
--- /dev/null
+++ b/Tools/msi/tcltk/tcltk_d.wxs
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
+            <ComponentGroupRef Id="tkinter_extension_d" />
+            <ComponentGroupRef Id="tcltk_dlls_d" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/tcltk/tcltk_pdb.wixproj b/Tools/msi/tcltk/tcltk_pdb.wixproj
index ac198d4..3370798 100644
--- a/Tools/msi/tcltk/tcltk_pdb.wixproj
+++ b/Tools/msi/tcltk/tcltk_pdb.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>tcltk_pdb</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeSymbols=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="tcltk.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="tcltk_pdb.wxs" />
+        <Compile Include="tcltk_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <WxlTemplate Include="*.wxl_template" />
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/tcltk/tcltk_pdb.wxs b/Tools/msi/tcltk/tcltk_pdb.wxs
new file mode 100644
index 0000000..75c62bb
--- /dev/null
+++ b/Tools/msi/tcltk/tcltk_pdb.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
+            <ComponentGroupRef Id="tkinter_extension_symbols" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/test/test.props b/Tools/msi/test/test.props
deleted file mode 100644
index d269107..0000000
--- a/Tools/msi/test/test.props
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-    <Import Project="..\msi.props" />
-    <ItemGroup>
-        <Compile Include="*.wxs" />
-    </ItemGroup>
-    <ItemGroup>
-        <EmbeddedResource Include="*.wxl" />
-    </ItemGroup>
-    <ItemGroup>
-        <InstallFiles Include="$(PySourcePath)Lib\test\**\*"
-                      Exclude="$(PySourcePath)Lib\**\*.pyc;$(PySourcePath)Lib\**\*.pyo">
-            <SourceBase>$(PySourcePath)</SourceBase>
-            <Source>!(bindpath.src)</Source>
-            <TargetBase>$(PySourcePath)</TargetBase>
-            <Target_></Target_>
-            <Group>test_py</Group>
-        </InstallFiles>
-    </ItemGroup>
-    
-    <Import Project="..\msi.targets" />
-</Project>
\ No newline at end of file
diff --git a/Tools/msi/test/test.wixproj b/Tools/msi/test/test.wixproj
index d747cc0..8347e3f 100644
--- a/Tools/msi/test/test.wixproj
+++ b/Tools/msi/test/test.wixproj
@@ -5,7 +5,25 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>test</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDefaultFeature=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="test.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="test.wxs" />
+        <Compile Include="test_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+    <ItemGroup>
+        <InstallFiles Include="$(PySourcePath)Lib\test\**\*"
+                      Exclude="$(PySourcePath)Lib\**\*.pyc;$(PySourcePath)Lib\**\*.pyo">
+            <SourceBase>$(PySourcePath)</SourceBase>
+            <Source>!(bindpath.src)</Source>
+            <TargetBase>$(PySourcePath)</TargetBase>
+            <Target_></Target_>
+            <Group>test_py</Group>
+        </InstallFiles>
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/test/test.wxs b/Tools/msi/test/test.wxs
index d4ac33f..de47785 100644
--- a/Tools/msi/test/test.wxs
+++ b/Tools/msi/test/test.wxs
@@ -7,21 +7,9 @@
         <PropertyRef Id="UpgradeTable" />
         <PropertyRef Id="REGISTRYKEY" />
         
-        <?ifdef IncludeDefaultFeature ?>
         <Feature Id="DefaultFeature" AllowAdvertise="no" Title="!(loc.Title)" Description="!(loc.Description)">
             <ComponentGroupRef Id="test_py" />
             <ComponentGroupRef Id="test_extensions" />
         </Feature>
-        <?endif ?>
-        <?ifdef IncludeSymbols ?>
-        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
-            <ComponentGroupRef Id="test_extensions_symbols" />
-        </Feature>
-        <?endif ?>
-        <?ifdef IncludeDebugBinaries ?>
-        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
-            <ComponentGroupRef Id="test_extensions_d" />
-        </Feature>
-        <?endif ?>
     </Product>
 </Wix>
diff --git a/Tools/msi/test/test_d.wixproj b/Tools/msi/test/test_d.wixproj
index 84d4313..33b04be 100644
--- a/Tools/msi/test/test_d.wixproj
+++ b/Tools/msi/test/test_d.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>test_d</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeDebugBinaries=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="test.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="test_d.wxs" />
+        <Compile Include="test_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/test/test_d.wxs b/Tools/msi/test/test_d.wxs
new file mode 100644
index 0000000..a25afdd
--- /dev/null
+++ b/Tools/msi/test/test_d.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="DebugBinaries" AllowAdvertise="no" Title="!(loc.Title_d)" Description="!(loc.Description_d)">
+            <ComponentGroupRef Id="test_extensions_d" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/test/test_pdb.wixproj b/Tools/msi/test/test_pdb.wixproj
index d607a47..965f0ed 100644
--- a/Tools/msi/test/test_pdb.wixproj
+++ b/Tools/msi/test/test_pdb.wixproj
@@ -5,7 +5,15 @@
         <SchemaVersion>2.0</SchemaVersion>
         <OutputName>test_pdb</OutputName>
         <OutputType>Package</OutputType>
-        <DefineConstants>IncludeSymbols=1;$(DefineConstants)</DefineConstants>
     </PropertyGroup>
-    <Import Project="test.props" />
+    <Import Project="..\msi.props" />
+    <ItemGroup>
+        <Compile Include="test_pdb.wxs" />
+        <Compile Include="test_files.wxs" />
+    </ItemGroup>
+    <ItemGroup>
+        <EmbeddedResource Include="*.wxl" />
+    </ItemGroup>
+    
+    <Import Project="..\msi.targets" />
 </Project>
\ No newline at end of file
diff --git a/Tools/msi/test/test_pdb.wxs b/Tools/msi/test/test_pdb.wxs
new file mode 100644
index 0000000..1510a6f
--- /dev/null
+++ b/Tools/msi/test/test_pdb.wxs
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
+    <Product Id="*" Language="!(loc.LCID)" Name="!(loc.Title)" Version="$(var.Version)" Manufacturer="!(loc.Manufacturer)" UpgradeCode="$(var.UpgradeCode)">
+        <Package InstallerVersion="300" Compressed="yes" InstallScope="perUser" Platform="$(var.Platform)" />
+        <MediaTemplate EmbedCab="yes" CompressionLevel="high" />
+        
+        <PropertyRef Id="UpgradeTable" />
+        
+        <Feature Id="Symbols" AllowAdvertise="no" Title="!(loc.TitlePdb)" Description="!(loc.DescriptionPdb)">
+            <ComponentGroupRef Id="test_extensions_symbols" />
+        </Feature>
+    </Product>
+</Wix>
diff --git a/Tools/msi/testrelease.bat b/Tools/msi/testrelease.bat
index bf1ceff..5c9e015 100644
--- a/Tools/msi/testrelease.bat
+++ b/Tools/msi/testrelease.bat
@@ -11,11 +11,15 @@
 set TESTPERUSER=
 
 :CheckOpts
-if '%1' EQU '-x86' (set TESTX86=1) && shift && goto CheckOpts
-if '%1' EQU '-x64' (set TESTX64=1) && shift && goto CheckOpts
-if '%1' EQU '-t' (set TARGETDIR=%~2) && shift && shift && goto CheckOpts
-if '%1' EQU '-a' (set TESTALLUSER=1) && shift && goto CheckOpts
-if '%1' EQU '-p' (set TESTPERUSER=1) && shift && goto CheckOpts
+if "%1" EQU "-h" goto Help
+if "%1" EQU "-x86" (set TESTX86=1) && shift && goto CheckOpts
+if "%1" EQU "-x64" (set TESTX64=1) && shift && goto CheckOpts
+if "%1" EQU "-t" (set TARGETDIR=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "--target" (set TARGETDIR=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "-a" (set TESTALLUSER=1) && shift && goto CheckOpts
+if "%1" EQU "--alluser" (set TESTALLUSER=1) && shift && goto CheckOpts
+if "%1" EQU "-p" (set TESTPERUSER=1) && shift && goto CheckOpts
+if "%1" EQU "--peruser" (set TESTPERUSER=1) && shift && goto CheckOpts
 
 if not defined TESTX86 if not defined TESTX64 (set TESTX86=1) && (set TESTX64=1)
 if not defined TESTALLUSER if not defined TESTPERUSER (set TESTALLUSER=1) && (set TESTPERUSER=1)
@@ -23,16 +27,18 @@
 
 if defined TESTX86 (
     for %%f in ("%PCBUILD%win32\en-us\*.exe") do (
-        if defined TESTALLUSER call :test "%%~ff" "%TARGETDIR%\%%~nf-alluser" InstallAllUsers=1
-        if defined TESTPERUSER call :test "%%~ff" "%TARGETDIR%\%%~nf-peruser" InstallAllUsers=0
+        if defined TESTALLUSER call :test "%%~ff" "%TARGETDIR%\%%~nf-alluser" "InstallAllUsers=1 CompileAll=1"
+        if errorlevel 1 exit /B
+        if defined TESTPERUSER call :test "%%~ff" "%TARGETDIR%\%%~nf-peruser" "InstallAllUsers=0 CompileAll=0"
         if errorlevel 1 exit /B
     )
 )
 
 if defined TESTX64 (
     for %%f in ("%PCBUILD%amd64\en-us\*.exe") do (
-        if defined TESTALLUSER call :test "%%~ff" "%TARGETDIR%\%%~nf-alluser" InstallAllUsers=1
-        if defined TESTPERUSER call :test "%%~ff" "%TARGETDIR%\%%~nf-peruser" InstallAllUsers=0
+        if defined TESTALLUSER call :test "%%~ff" "%TARGETDIR%\%%~nf-alluser" "InstallAllUsers=1 CompileAll=1"
+        if errorlevel 1 exit /B
+        if defined TESTPERUSER call :test "%%~ff" "%TARGETDIR%\%%~nf-peruser" "InstallAllUsers=0 CompileAll=0"
         if errorlevel 1 exit /B
     )
 )
@@ -47,7 +53,7 @@
 
 @set EXITCODE=0
 @echo Installing %1 into %2
-"%~1" /passive /log "%~2\install\log.txt" %~3 TargetDir="%~2\Python" Include_debug=1 Include_symbols=1 CompileAll=1
+"%~1" /passive /log "%~2\install\log.txt" TargetDir="%~2\Python" Include_debug=1 Include_symbols=1 %~3
 
 @if not errorlevel 1 (
     @echo Printing version
@@ -57,7 +63,7 @@
     @echo Installing package
     "%~2\Python\python.exe" -m pip install azure > "%~2\pip.txt" 2>&1
     @if not errorlevel 1 (
-        "%~2\Python\python.exe" -m pip uninstall -y azure python-dateutil six > "%~2\pip.txt" 2>&1
+        "%~2\Python\python.exe" -m pip uninstall -y azure python-dateutil six >> "%~2\pip.txt" 2>&1
     )
 )
 @if not errorlevel 1 (
@@ -78,3 +84,17 @@
 
 @echo off
 exit /B %EXITCODE%
+
+:Help
+echo testrelease.bat [--target TARGET] [-x86] [-x64] [--alluser] [--peruser] [-h]
+echo.
+echo    --target (-t)   Specify the target directory for installs and logs
+echo    -x86            Run tests for x86 installers
+echo    -x64            Run tests for x64 installers
+echo    --alluser (-a)  Run tests for all-user installs (requires Administrator)
+echo    --peruser (-p)  Run tests for per-user installs
+echo    -h              Display this help information
+echo.
+echo If no test architecture is specified, all architectures will be tested.
+echo If no install type is selected, all install types will be tested.
+echo.
diff --git a/Tools/msi/tools/tools.wixproj b/Tools/msi/tools/tools.wixproj
index 18e6bab..f43cf33 100644
--- a/Tools/msi/tools/tools.wixproj
+++ b/Tools/msi/tools/tools.wixproj
@@ -8,7 +8,8 @@
     </PropertyGroup>
     <Import Project="..\msi.props" />
     <ItemGroup>
-        <Compile Include="*.wxs" />
+        <Compile Include="tools.wxs" />
+        <Compile Include="tools_files.wxs" />
     </ItemGroup>
     <ItemGroup>
         <EmbeddedResource Include="*.wxl" />
diff --git a/Tools/msi/uploadrelease.bat b/Tools/msi/uploadrelease.bat
new file mode 100644
index 0000000..2064a67
--- /dev/null
+++ b/Tools/msi/uploadrelease.bat
@@ -0,0 +1,46 @@
+@setlocal
+@echo off
+
+set D=%~dp0
+set PCBUILD=%D%..\..\PCBuild\
+
+set HOST=
+set USER=
+set TARGET=
+set DRYRUN=false
+
+:CheckOpts
+if "%1" EQU "-h" goto Help
+if "%1" EQU "-o" (set HOST=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "--host" (set HOST=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "-u" (set USER=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "--user" (set USER=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "-t" (set TARGET=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "--target" (set TARGET=%~2) && shift && shift && goto CheckOpts
+if "%1" EQU "--dry-run" (set DRYRUN=true) && shift && goto CheckOpts
+
+if not defined PLINK where plink > "%TEMP%\plink.loc" 2> nul && set /P PLINK= < "%TEMP%\plink.loc" & del "%TEMP%\plink.loc"
+if not defined PLINK where /R "%ProgramFiles(x86)%" plink > "%TEMP%\plink.loc" 2> nul && set /P PLINK= < "%TEMP%\plink.loc" & del "%TEMP%\plink.loc"
+if not defined PLINK echo Cannot locate plink.exe & exit /B 1
+echo Found plink.exe at %PLINK%
+
+if not defined PSCP where pscp > "%TEMP%\pscp.loc" 2> nul && set /P pscp= < "%TEMP%\pscp.loc" & del "%TEMP%\pscp.loc"
+if not defined PSCP where /R "%ProgramFiles(x86)%" pscp > "%TEMP%\pscp.loc" 2> nul && set /P pscp= < "%TEMP%\pscp.loc" & del "%TEMP%\pscp.loc"
+if not defined PSCP echo Cannot locate pscp.exe & exit /B 1
+echo Found pscp.exe at %PSCP%
+
+call "%PCBUILD%env.bat" > nul 2> nul
+msbuild /v:m /nologo uploadrelease.proj /t:Upload /p:Platform=x86
+msbuild /v:m /nologo uploadrelease.proj /t:Upload /p:Platform=x64 /p:IncludeDoc=false
+
+exit /B 0
+
+:Help
+echo uploadrelease.bat --host HOST --user USERNAME [--target TARGET] [--dry-run] [-h]
+echo.
+echo    --host (-o)     Specify the upload host (required)
+echo    --user (-u)     Specify the user on the host (required)
+echo    --target (-t)   Specify the target directory on the host
+echo    --dry-run       Display commands and filenames without executing them
+echo    -h              Display this help information
+echo.
diff --git a/Tools/msi/uploadrelease.proj b/Tools/msi/uploadrelease.proj
new file mode 100644
index 0000000..a3ebe57
--- /dev/null
+++ b/Tools/msi/uploadrelease.proj
@@ -0,0 +1,61 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+    <PropertyGroup>
+        <ProjectGuid>{2D69F2AB-D5D0-4344-84B5-EF6DB34A9BC9}</ProjectGuid>
+        <OutputName>python</OutputName>
+        <OutputSuffix></OutputSuffix>
+        
+        <DownloadUrlBase Condition="'$(DownloadUrlBase)' == ''">$(TARGET)</DownloadUrlBase>
+        <DownloadUrlBase Condition="'$(DownloadUrlBase)' == ''">/srv/www.python.org/ftp/python</DownloadUrlBase>
+        <IncludeDoc Condition="'$(IncludeDoc)' == ''">true</IncludeDoc>
+        <DryRun Condition="'$(DryRun)' == ''">false</DryRun>
+    </PropertyGroup>
+
+    <Import Project="msi.props" />
+    <Import Project="bundle\bundle.targets" />
+    
+    <PropertyGroup>
+        <EXETarget>$(DownloadUrlBase.TrimEnd(`/`))/$(MajorVersionNumber).$(MinorVersionNumber).$(MicroVersionNumber)</EXETarget>
+        <MSITarget>$(DownloadUrl.TrimEnd(`/`))</MSITarget>
+    </PropertyGroup>
+    
+    <ItemGroup>
+        <File Include="$(OutputPath)\*.msi;$(OutputPath)\*.msu">
+            <CopyTo>$(MSITarget)</CopyTo>
+        </File>
+        <File Include="$(OutputPath)\*.exe">
+            <CopyTo>$(EXETarget)</CopyTo>
+        </File>
+        <File Include="$(PySourcePath)Doc\build\htmlhelp\python$(MajorVersionNumber)$(MinorVersionNumber)$(MicroVersionNumber)$(ReleaseLevelName).chm" Condition="$(IncludeDoc)">
+            <CopyTo>$(EXETarget)</CopyTo>
+        </File>
+    </ItemGroup>
+    
+    <Target Name="_ValidateProperties">
+        <Error Text="No value for Host provided" Condition="'$(Host)' == ''" />
+        <Error Text="No value for User provided" Condition="'$(User)' == ''" />
+        <Error Text="No path for PSCP provided" Condition="'$(PSCP)' == ''" />
+        <Error Text="No path for PLINK provided" Condition="'$(PLINK)' == ''" />
+    </Target>
+    
+    <Target Name="_Upload" Condition="!$(DryRun)">
+        <Exec Command="&quot;$(PLINK)&quot; $(User)@$(Host) mkdir %(File.CopyTo) ^&amp;^&amp; chgrp downloads %(File.CopyTo) ^&amp;^&amp; chmod g-w,o+rx %(File.CopyTo)
+&quot;$(PSCP)&quot; @(File,' ') $(User)@$(Host):%(File.CopyTo)
+&quot;$(PLINK)&quot; $(User)@$(Host) chgrp downloads %(File.CopyTo)/* ^&amp;^&amp; chmod g-w,o+r %(File.CopyTo)/*
+" />
+    </Target>
+    
+    <Target Name="_PrintNames" Condition="$(DryRun)">
+        <Exec Command="echo &quot;$(PLINK)&quot; $(User)@$(Host) mkdir %(File.CopyTo) ^&amp;^&amp; chgrp downloads %(File.CopyTo) ^&amp;^&amp; chmod g-w,o+rx %(File.CopyTo)
+echo &quot;$(PSCP)&quot; @(File,' ') $(User)@$(Host):%(File.CopyTo)
+echo &quot;$(PLINK)&quot; $(User)@$(Host) chgrp downloads %(File.CopyTo)/* ^&amp;^&amp; chmod g-w,o+r %(File.CopyTo)/*
+echo.
+echo." />
+    </Target>
+    
+    <Target Name="Upload" DependsOnTargets="_ValidateProperties;_PrintNames;_Upload" />
+    
+    <Target Name="Build">
+        <Error Text="This script should be invoked using uploadrelease.bat." />
+    </Target>
+</Project>
diff --git a/Tools/scripts/win_add2path.py b/Tools/scripts/win_add2path.py
index c85bea5..1c9aedc 100644
--- a/Tools/scripts/win_add2path.py
+++ b/Tools/scripts/win_add2path.py
@@ -22,7 +22,8 @@
     scripts = os.path.join(pythonpath, "Scripts")
     appdata = os.environ["APPDATA"]
     if hasattr(site, "USER_SITE"):
-        userpath = site.USER_SITE.replace(appdata, "%APPDATA%")
+        usersite = site.USER_SITE.replace(appdata, "%APPDATA%")
+        userpath = os.path.dirname(usersite)
         userscripts = os.path.join(userpath, "Scripts")
     else:
         userscripts = None
diff --git a/configure b/configure
index ecefde4..799c6c4 100755
--- a/configure
+++ b/configure
@@ -6338,7 +6338,11 @@
 	    if test "$Py_DEBUG" = 'true' ; then
 		# Optimization messes up debuggers, so turn it off for
 		# debug builds.
-		OPT="-g -O0 -Wall $STRICT_PROTO"
+                if "$CC" -v --help 2>/dev/null |grep -- -Og > /dev/null; then
+                    OPT="-g -Og -Wall $STRICT_PROTO"
+                else
+                    OPT="-g -O0 -Wall $STRICT_PROTO"
+                fi
 	    else
 		OPT="-g $WRAP -O3 -Wall $STRICT_PROTO"
 	    fi
@@ -15794,6 +15798,38 @@
 $as_echo "$ENSUREPIP" >&6; }
 
 
+# check if the dirent structure of a d_type field and DT_UNKNOWN is defined
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the dirent structure of a d_type field" >&5
+$as_echo_n "checking if the dirent structure of a d_type field... " >&6; }
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+
+    #include <dirent.h>
+
+    int main() {
+      struct dirent entry;
+      return entry.d_type == DT_UNKNOWN;
+    }
+
+
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  have_dirent_d_type=yes
+else
+  have_dirent_d_type=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $have_dirent_d_type" >&5
+$as_echo "$have_dirent_d_type" >&6; }
+
+if test "$have_dirent_d_type" = yes; then
+
+$as_echo "#define HAVE_DIRENT_D_TYPE 1" >>confdefs.h
+
+fi
+
 # generate output files
 ac_config_files="$ac_config_files Makefile.pre Modules/Setup.config Misc/python.pc Misc/python-config.sh"
 
diff --git a/configure.ac b/configure.ac
index c751622..263ae15 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1128,7 +1128,11 @@
 	    if test "$Py_DEBUG" = 'true' ; then
 		# Optimization messes up debuggers, so turn it off for
 		# debug builds.
-		OPT="-g -O0 -Wall $STRICT_PROTO"
+                if "$CC" -v --help 2>/dev/null |grep -- -Og > /dev/null; then
+                    OPT="-g -Og -Wall $STRICT_PROTO"
+                else
+                    OPT="-g -O0 -Wall $STRICT_PROTO"
+                fi
 	    else
 		OPT="-g $WRAP -O3 -Wall $STRICT_PROTO"
 	    fi
@@ -4940,6 +4944,26 @@
 AC_MSG_RESULT($ENSUREPIP)
 AC_SUBST(ENSUREPIP)
 
+# check if the dirent structure of a d_type field and DT_UNKNOWN is defined
+AC_MSG_CHECKING(if the dirent structure of a d_type field)
+AC_LINK_IFELSE(
+[
+  AC_LANG_SOURCE([[
+    #include <dirent.h>
+
+    int main() {
+      struct dirent entry;
+      return entry.d_type == DT_UNKNOWN;
+    }
+  ]])
+],[have_dirent_d_type=yes],[have_dirent_d_type=no])
+AC_MSG_RESULT($have_dirent_d_type)
+
+if test "$have_dirent_d_type" = yes; then
+    AC_DEFINE(HAVE_DIRENT_D_TYPE, 1,
+              [Define to 1 if the dirent structure has a d_type field])
+fi
+
 # generate output files
 AC_CONFIG_FILES(Makefile.pre Modules/Setup.config Misc/python.pc Misc/python-config.sh)
 AC_CONFIG_FILES([Modules/ld_so_aix], [chmod +x Modules/ld_so_aix])
diff --git a/pyconfig.h.in b/pyconfig.h.in
index 507a7ab..10426c7 100644
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -183,6 +183,9 @@
 /* Define to 1 if you have the <direct.h> header file. */
 #undef HAVE_DIRECT_H
 
+/* Define to 1 if the dirent structure has a d_type field */
+#undef HAVE_DIRENT_D_TYPE
+
 /* Define to 1 if you have the <dirent.h> header file, and it defines `DIR'.
    */
 #undef HAVE_DIRENT_H