Branch merge
diff --git a/Doc/includes/sqlite3/shortcut_methods.py b/Doc/includes/sqlite3/shortcut_methods.py
index 596d87c..71600d4 100644
--- a/Doc/includes/sqlite3/shortcut_methods.py
+++ b/Doc/includes/sqlite3/shortcut_methods.py
@@ -17,5 +17,4 @@
 for row in con.execute("select firstname, lastname from person"):
     print(row)
 
-# Using a dummy WHERE clause to not let SQLite take the shortcut table deletes.
-print("I just deleted", con.execute("delete from person where 1=1").rowcount, "rows")
+print("I just deleted", con.execute("delete from person").rowcount, "rows")
diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst
index 2eb4614..edd2a00 100644
--- a/Doc/library/sqlite3.rst
+++ b/Doc/library/sqlite3.rst
@@ -555,18 +555,17 @@
    attribute, the database engine's own support for the determination of "rows
    affected"/"rows selected" is quirky.
 
-   For ``DELETE`` statements, SQLite reports :attr:`rowcount` as 0 if you make a
-   ``DELETE FROM table`` without any condition.
-
    For :meth:`executemany` statements, the number of modifications are summed up
    into :attr:`rowcount`.
 
    As required by the Python DB API Spec, the :attr:`rowcount` attribute "is -1 in
    case no ``executeXX()`` has been performed on the cursor or the rowcount of the
-   last operation is not determinable by the interface".
+   last operation is not determinable by the interface". This includes ``SELECT``
+   statements because we cannot determine the number of rows a query produced
+   until all rows were fetched.
 
-   This includes ``SELECT`` statements because we cannot determine the number of
-   rows a query produced until all rows were fetched.
+   With SQLite versions before 3.6.5, :attr:`rowcount` is set to 0 if
+   you make a ``DELETE FROM table`` without any condition.
 
 .. attribute:: Cursor.lastrowid
 
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index f0650dd..8943be8 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -19,28 +19,33 @@
 
 # Bootstrap-related code ######################################################
 
-# TODO: when not on any of these platforms, replace _case_ok() w/
-#       ``lambda x,y: True``.
-CASE_OK_PLATFORMS = 'win', 'cygwin', 'darwin'
+CASE_INSENSITIVE_PLATFORMS = 'win', 'cygwin', 'darwin'
 
-def _case_ok(directory, check):
-    """Check if the directory contains something matching 'check'
-    case-sensitively when running on Windows or OS X.
+def _case_insensitive_ok(directory, check):
+    """Check if the directory contains something matching 'check' exists in the
+    directory.
 
-    If running on Window or OS X and PYTHONCASEOK is a defined environment
-    variable then no case-sensitive check is performed. No check is done to see
-    if what is being checked for exists, so if the platform is not Windows or
-    OS X then assume the case is fine.
+    If PYTHONCASEOK is a defined environment variable then skip the
+    case-sensitivity check.
 
     """
-    if (any(map(sys.platform.startswith, CASE_OK_PLATFORMS)) and
-            b'PYTHONCASEOK' not in _os.environ):
+    if b'PYTHONCASEOK' not in _os.environ:
         if not directory:
             directory = '.'
         return check in _os.listdir(directory)
     else:
         return True
 
+def _case_sensitive_ok(directory, check):
+    """Under case-sensitive filesystems always assume the case matches.
+
+    Since other code does the file existence check, that subsumes a
+    case-sensitivity check.
+
+    """
+    return True
+
+_case_ok = None
 
 
 # TODO: Expose from marshal
@@ -137,26 +142,16 @@
 
 
 def _write_atomic(path, data):
-    """Best-effort function to write data to a path atomically.
-    Be prepared to handle a FileExistsError if concurrent writing of the
-    temporary file is attempted."""
-    # Renaming should be atomic on most platforms (including Windows).
-    # Under Windows, the limitation is that we can't rename() to an existing
-    # path, while POSIX will overwrite it. But here we don't really care
-    # if there is a glimpse of time during which the final pyc file doesn't
-    # exist.
+    """Function to write data to a path atomically."""
     # id() is used to generate a pseudo-random filename.
     path_tmp = '{}.{}'.format(path, id(path))
     fd = _os.open(path_tmp, _os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, 0o666)
     try:
+        # We first write data to a temporary file, and then use os.replace() to
+        # perform an atomic rename.
         with _io.FileIO(fd, 'wb') as file:
             file.write(data)
-        try:
-            _os.rename(path_tmp, path)
-        except FileExistsError:
-            # Windows (if we had access to MoveFileEx, we could overwrite)
-            _os.unlink(path)
-            _os.rename(path_tmp, path)
+        _os.replace(path_tmp, path)
     except OSError:
         try:
             _os.unlink(path_tmp)
@@ -602,9 +597,8 @@
                 return
         try:
             _write_atomic(path, data)
-        except (PermissionError, FileExistsError):
-            # Don't worry if you can't write bytecode or someone is writing
-            # it at the same time.
+        except PermissionError:
+            # Don't worry if you can't write bytecode.
             pass
 
 
@@ -713,10 +707,12 @@
         the default hook, for which ImportError is raised.
 
         """
+        if path == '':
+            path = _os.getcwd()
         try:
             finder = sys.path_importer_cache[path]
         except KeyError:
-            finder = cls._path_hooks(path if path != '' else _os.getcwd())
+            finder = cls._path_hooks(path)
             sys.path_importer_cache[path] = finder
         else:
             if finder is None and default:
@@ -861,6 +857,84 @@
         imp.release_lock()
 
 
+def _resolve_name(name, package, level):
+    """Resolve a relative module name to an absolute one."""
+    dot = len(package)
+    for x in range(level, 1, -1):
+        try:
+            dot = package.rindex('.', 0, dot)
+        except ValueError:
+            raise ValueError("attempted relative import beyond "
+                             "top-level package")
+    if name:
+        return "{0}.{1}".format(package[:dot], name)
+    else:
+        return package[:dot]
+
+
+def _find_module(name, path):
+    """Find a module's loader."""
+    meta_path = sys.meta_path + _IMPLICIT_META_PATH
+    for finder in meta_path:
+        loader = finder.find_module(name, path)
+        if loader is not None:
+            # The parent import may have already imported this module.
+            if name not in sys.modules:
+                return loader
+            else:
+                return sys.modules[name].__loader__
+    else:
+        return None
+
+
+def _set___package__(module):
+    """Set __package__ on a module."""
+    # Watch out for what comes out of sys.modules to not be a module,
+    # e.g. an int.
+    try:
+        module.__package__ = module.__name__
+        if not hasattr(module, '__path__'):
+            module.__package__ = module.__package__.rpartition('.')[0]
+    except AttributeError:
+        pass
+
+
+def _sanity_check(name, package, level):
+    """Verify arguments are "sane"."""
+    if package:
+        if not hasattr(package, 'rindex'):
+            raise ValueError("__package__ not set to a string")
+        elif package not in sys.modules:
+            msg = ("Parent module {0!r} not loaded, cannot perform relative "
+                   "import")
+            raise SystemError(msg.format(package))
+    if not name and level == 0:
+        raise ValueError("Empty module name")
+
+
+def _find_search_path(name, import_):
+    """Find the search path for a module.
+
+    import_ is expected to be a callable which takes the name of a module to
+    import. It is required to decouple the function from importlib.
+
+    """
+    path = None
+    parent = name.rpartition('.')[0]
+    if parent:
+        if parent not in sys.modules:
+            import_(parent)
+        # Backwards-compatibility; be nicer to skip the dict lookup.
+        parent_module = sys.modules[parent]
+        try:
+            path = parent_module.__path__
+        except AttributeError:
+            msg = (_ERR_MSG + '; {} is not a package').format(name, parent)
+            raise ImportError(msg)
+    return parent, path
+
+
+
 _IMPLICIT_META_PATH = [BuiltinImporter, FrozenImporter, _DefaultPathFinder]
 
 _ERR_MSG = 'No module named {!r}'
@@ -874,27 +948,9 @@
     the loader did not.
 
     """
-    if package:
-        if not hasattr(package, 'rindex'):
-            raise ValueError("__package__ not set to a string")
-        elif package not in sys.modules:
-            msg = ("Parent module {0!r} not loaded, cannot perform relative "
-                   "import")
-            raise SystemError(msg.format(package))
-    if not name and level == 0:
-        raise ValueError("Empty module name")
+    _sanity_check(name, package, level)
     if level > 0:
-        dot = len(package)
-        for x in range(level, 1, -1):
-            try:
-                dot = package.rindex('.', 0, dot)
-            except ValueError:
-                raise ValueError("attempted relative import beyond "
-                                 "top-level package")
-        if name:
-            name = "{0}.{1}".format(package[:dot], name)
-        else:
-            name = package[:dot]
+        name = _resolve_name(name, package, level)
     with _ImportLockContext():
         try:
             module = sys.modules[name]
@@ -905,70 +961,33 @@
             return module
         except KeyError:
             pass
-        parent = name.rpartition('.')[0]
-        path = None
-        if parent:
-            if parent not in sys.modules:
-                _gcd_import(parent)
-            # Backwards-compatibility; be nicer to skip the dict lookup.
-            parent_module = sys.modules[parent]
-            try:
-                path = parent_module.__path__
-            except AttributeError:
-                msg = (_ERR_MSG + '; {} is not a package').format(name, parent)
-                raise ImportError(msg)
-        meta_path = sys.meta_path + _IMPLICIT_META_PATH
-        for finder in meta_path:
-            loader = finder.find_module(name, path)
-            if loader is not None:
-                # The parent import may have already imported this module.
-                if name not in sys.modules:
-                    loader.load_module(name)
-                break
-        else:
+        parent, path = _find_search_path(name, _gcd_import)
+        loader = _find_module(name, path)
+        if loader is None:
             raise ImportError(_ERR_MSG.format(name))
+        elif name not in sys.modules:
+            # The parent import may have already imported this module.
+            loader.load_module(name)
         # Backwards-compatibility; be nicer to skip the dict lookup.
         module = sys.modules[name]
         if parent:
             # Set the module as an attribute on its parent.
+            parent_module = sys.modules[parent]
             setattr(parent_module, name.rpartition('.')[2], module)
         # Set __package__ if the loader did not.
         if not hasattr(module, '__package__') or module.__package__ is None:
-            # Watch out for what comes out of sys.modules to not be a module,
-            # e.g. an int.
-            try:
-                module.__package__ = module.__name__
-                if not hasattr(module, '__path__'):
-                    module.__package__ = module.__package__.rpartition('.')[0]
-            except AttributeError:
-                pass
+            _set___package__(module)
         return module
 
 
-def __import__(name, globals={}, locals={}, fromlist=[], level=0):
-    """Import a module.
+def _return_module(module, name, fromlist, level, import_):
+    """Figure out what __import__ should return.
 
-    The 'globals' argument is used to infer where the import is occuring from
-    to handle relative imports. The 'locals' argument is ignored. The
-    'fromlist' argument specifies what should exist as attributes on the module
-    being imported (e.g. ``from module import <fromlist>``).  The 'level'
-    argument represents the package location to import from in a relative
-    import (e.g. ``from ..pkg import mod`` would have a 'level' of 2).
+    The import_ parameter is a callable which takes the name of module to
+    import. It is required to decouple the function from assuming importlib's
+    import implementation is desired.
 
     """
-    if not hasattr(name, 'rpartition'):
-        raise TypeError("module name must be str, not {}".format(type(name)))
-    if level == 0:
-        module = _gcd_import(name)
-    else:
-        # __package__ is not guaranteed to be defined or could be set to None
-        # to represent that its proper value is unknown
-        package = globals.get('__package__')
-        if package is None:
-            package = globals['__name__']
-            if '__path__' not in globals:
-                package = package.rpartition('.')[0]
-        module = _gcd_import(name, package, level)
     # The hell that is fromlist ...
     if not fromlist:
         # Return up to the first dot in 'name'. This is complicated by the fact
@@ -989,12 +1008,50 @@
                 fromlist.extend(module.__all__)
             for x in (y for y in fromlist if not hasattr(module,y)):
                 try:
-                    _gcd_import('{0}.{1}'.format(module.__name__, x))
+                    import_('{0}.{1}'.format(module.__name__, x))
                 except ImportError:
                     pass
         return module
 
 
+def _calc___package__(globals):
+    """Calculate what __package__ should be.
+
+    __package__ is not guaranteed to be defined or could be set to None
+    to represent that its proper value is unknown.
+
+    """
+    package = globals.get('__package__')
+    if package is None:
+        package = globals['__name__']
+        if '__path__' not in globals:
+            package = package.rpartition('.')[0]
+    return package
+
+
+def __import__(name, globals={}, locals={}, fromlist=[], level=0):
+    """Import a module.
+
+    The 'globals' argument is used to infer where the import is occuring from
+    to handle relative imports. The 'locals' argument is ignored. The
+    'fromlist' argument specifies what should exist as attributes on the module
+    being imported (e.g. ``from module import <fromlist>``).  The 'level'
+    argument represents the package location to import from in a relative
+    import (e.g. ``from ..pkg import mod`` would have a 'level' of 2).
+
+    """
+    if not hasattr(name, 'rpartition'):
+        raise TypeError("module name must be str, not {}".format(type(name)))
+    if level == 0:
+        module = _gcd_import(name)
+    elif level < 0:
+        raise ValueError('level must be >= 0')
+    else:
+        package = _calc___package__(globals)
+        module = _gcd_import(name, package, level)
+    return _return_module(module, name, fromlist, level, _gcd_import)
+
+
 def _setup(sys_module, imp_module):
     """Setup importlib by importing needed built-in modules and injecting them
     into the global namespace.
@@ -1003,7 +1060,7 @@
     modules, those two modules must be explicitly passed in.
 
     """
-    global imp, sys
+    global _case_ok, imp, sys
     imp = imp_module
     sys = sys_module
 
@@ -1037,6 +1094,11 @@
     setattr(self_module, '_os', os_module)
     setattr(self_module, 'path_sep', path_sep)
 
+    if sys_module.platform in CASE_INSENSITIVE_PLATFORMS:
+        _case_ok = _case_insensitive_ok
+    else:
+        _case_ok = _case_sensitive_ok
+
 
 def _install(sys_module, imp_module):
     """Install importlib as the implementation of import.
diff --git a/Lib/importlib/test/__main__.py b/Lib/importlib/test/__main__.py
index a1990b1..92171b2 100644
--- a/Lib/importlib/test/__main__.py
+++ b/Lib/importlib/test/__main__.py
@@ -7,7 +7,6 @@
 from importlib.test.import_ import util
 import os.path
 from test.support import run_unittest
-import sys
 import unittest
 
 
@@ -15,10 +14,17 @@
     start_dir = os.path.dirname(__file__)
     top_dir = os.path.dirname(os.path.dirname(start_dir))
     test_loader = unittest.TestLoader()
-    if '--builtin' in sys.argv:
-        util.using___import__ = True
     run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
 
 
 if __name__ == '__main__':
+    import argparse
+
+    parser = argparse.ArgumentParser(description='Execute the importlib test '
+                                                  'suite')
+    parser.add_argument('-b', '--builtin', action='store_true', default=False,
+                        help='use builtins.__import__() instead of importlib')
+    args = parser.parse_args()
+    if args.builtin:
+        util.using___import__ = True
     test_main()
diff --git a/Lib/importlib/test/import_/test_api.py b/Lib/importlib/test/import_/test_api.py
index 9075d42..2fa1f90 100644
--- a/Lib/importlib/test/import_/test_api.py
+++ b/Lib/importlib/test/import_/test_api.py
@@ -12,6 +12,13 @@
         with self.assertRaises(TypeError):
             util.import_(42)
 
+    def test_negative_level(self):
+        # Raise ValueError when a negative level is specified.
+        # PEP 328 did away with sys.module None entries and the ambiguity of
+        # absolute/relative imports.
+        with self.assertRaises(ValueError):
+            util.import_('os', globals(), level=-1)
+
 
 def test_main():
     from test.support import run_unittest
diff --git a/Lib/importlib/test/import_/test_path.py b/Lib/importlib/test/import_/test_path.py
index b28f25d..61fe226 100644
--- a/Lib/importlib/test/import_/test_path.py
+++ b/Lib/importlib/test/import_/test_path.py
@@ -82,7 +82,7 @@
         with util.import_state(path=[path], path_hooks=[hook]):
             loader = machinery.PathFinder.find_module(module)
             self.assertIs(loader, importer)
-            self.assertIn('', sys.path_importer_cache)
+            self.assertIn(os.getcwd(), sys.path_importer_cache)
 
 
 class DefaultPathFinderTests(unittest.TestCase):
diff --git a/Lib/pty.py b/Lib/pty.py
index 810ebd8..3ccf619 100644
--- a/Lib/pty.py
+++ b/Lib/pty.py
@@ -142,15 +142,21 @@
     Copies
             pty master -> standard output   (master_read)
             standard input -> pty master    (stdin_read)"""
-    while 1:
-        rfds, wfds, xfds = select(
-                [master_fd, STDIN_FILENO], [], [])
+    fds = [master_fd, STDIN_FILENO]
+    while True:
+        rfds, wfds, xfds = select(fds, [], [])
         if master_fd in rfds:
             data = master_read(master_fd)
-            os.write(STDOUT_FILENO, data)
+            if not data:  # Reached EOF.
+                fds.remove(master_fd)
+            else:
+                os.write(STDOUT_FILENO, data)
         if STDIN_FILENO in rfds:
             data = stdin_read(STDIN_FILENO)
-            _writen(master_fd, data)
+            if not data:
+                fds.remove(STDIN_FILENO)
+            else:
+                _writen(master_fd, data)
 
 def spawn(argv, master_read=_read, stdin_read=_read):
     """Create a spawned process."""
diff --git a/Lib/test/test_pty.py b/Lib/test/test_pty.py
index c6fc5e7..4f1251c 100644
--- a/Lib/test/test_pty.py
+++ b/Lib/test/test_pty.py
@@ -8,7 +8,9 @@
 import pty
 import os
 import sys
+import select
 import signal
+import socket
 import unittest
 
 TEST_STRING_1 = b"I wish to buy a fish license.\n"
@@ -194,9 +196,96 @@
 
         # pty.fork() passed.
 
+
+class SmallPtyTests(unittest.TestCase):
+    """These tests don't spawn children or hang."""
+
+    def setUp(self):
+        self.orig_stdin_fileno = pty.STDIN_FILENO
+        self.orig_stdout_fileno = pty.STDOUT_FILENO
+        self.orig_pty_select = pty.select
+        self.fds = []  # A list of file descriptors to close.
+        self.select_rfds_lengths = []
+        self.select_rfds_results = []
+
+    def tearDown(self):
+        pty.STDIN_FILENO = self.orig_stdin_fileno
+        pty.STDOUT_FILENO = self.orig_stdout_fileno
+        pty.select = self.orig_pty_select
+        for fd in self.fds:
+            try:
+                os.close(fd)
+            except:
+                pass
+
+    def _pipe(self):
+        pipe_fds = os.pipe()
+        self.fds.extend(pipe_fds)
+        return pipe_fds
+
+    def _mock_select(self, rfds, wfds, xfds):
+        # This will raise IndexError when no more expected calls exist.
+        self.assertEqual(self.select_rfds_lengths.pop(0), len(rfds))
+        return self.select_rfds_results.pop(0), [], []
+
+    def test__copy_to_each(self):
+        """Test the normal data case on both master_fd and stdin."""
+        read_from_stdout_fd, mock_stdout_fd = self._pipe()
+        pty.STDOUT_FILENO = mock_stdout_fd
+        mock_stdin_fd, write_to_stdin_fd = self._pipe()
+        pty.STDIN_FILENO = mock_stdin_fd
+        socketpair = socket.socketpair()
+        masters = [s.fileno() for s in socketpair]
+        self.fds.extend(masters)
+
+        # Feed data.  Smaller than PIPEBUF.  These writes will not block.
+        os.write(masters[1], b'from master')
+        os.write(write_to_stdin_fd, b'from stdin')
+
+        # Expect two select calls, the last one will cause IndexError
+        pty.select = self._mock_select
+        self.select_rfds_lengths.append(2)
+        self.select_rfds_results.append([mock_stdin_fd, masters[0]])
+        self.select_rfds_lengths.append(2)
+
+        with self.assertRaises(IndexError):
+            pty._copy(masters[0])
+
+        # Test that the right data went to the right places.
+        rfds = select.select([read_from_stdout_fd, masters[1]], [], [], 0)[0]
+        self.assertEqual([read_from_stdout_fd, masters[1]], rfds)
+        self.assertEqual(os.read(read_from_stdout_fd, 20), b'from master')
+        self.assertEqual(os.read(masters[1], 20), b'from stdin')
+
+    def test__copy_eof_on_all(self):
+        """Test the empty read EOF case on both master_fd and stdin."""
+        read_from_stdout_fd, mock_stdout_fd = self._pipe()
+        pty.STDOUT_FILENO = mock_stdout_fd
+        mock_stdin_fd, write_to_stdin_fd = self._pipe()
+        pty.STDIN_FILENO = mock_stdin_fd
+        socketpair = socket.socketpair()
+        masters = [s.fileno() for s in socketpair]
+        self.fds.extend(masters)
+
+        os.close(masters[1])
+        socketpair[1].close()
+        os.close(write_to_stdin_fd)
+
+        # Expect two select calls, the last one will cause IndexError
+        pty.select = self._mock_select
+        self.select_rfds_lengths.append(2)
+        self.select_rfds_results.append([mock_stdin_fd, masters[0]])
+        # We expect that both fds were removed from the fds list as they
+        # both encountered an EOF before the second select call.
+        self.select_rfds_lengths.append(0)
+
+        with self.assertRaises(IndexError):
+            pty._copy(masters[0])
+
+
 def test_main(verbose=None):
     try:
-        run_unittest(PtyTest)
+        run_unittest(SmallPtyTests, PtyTest)
     finally:
         reap_children()
 
diff --git a/Lib/test/test_sched.py b/Lib/test/test_sched.py
index ae82f94..50ada52 100644
--- a/Lib/test/test_sched.py
+++ b/Lib/test/test_sched.py
@@ -12,10 +12,10 @@
         l = []
         fun = lambda x: l.append(x)
         scheduler = sched.scheduler(time.time, time.sleep)
-        for x in [0.05, 0.04, 0.03, 0.02, 0.01]:
+        for x in [0.5, 0.4, 0.3, 0.2, 0.1]:
             z = scheduler.enter(x, 1, fun, (x,))
         scheduler.run()
-        self.assertEqual(l, [0.01, 0.02, 0.03, 0.04, 0.05])
+        self.assertEqual(l, [0.1, 0.2, 0.3, 0.4, 0.5])
 
     def test_enterabs(self):
         l = []
@@ -31,7 +31,7 @@
         fun = lambda x: l.append(x)
         scheduler = sched.scheduler(time.time, time.sleep)
         for priority in [1, 2, 3, 4, 5]:
-            z = scheduler.enter(0.01, priority, fun, (priority,))
+            z = scheduler.enterabs(0.01, priority, fun, (priority,))
         scheduler.run()
         self.assertEqual(l, [1, 2, 3, 4, 5])
 
@@ -39,11 +39,12 @@
         l = []
         fun = lambda x: l.append(x)
         scheduler = sched.scheduler(time.time, time.sleep)
-        event1 = scheduler.enter(0.01, 1, fun, (0.01,))
-        event2 = scheduler.enter(0.02, 1, fun, (0.02,))
-        event3 = scheduler.enter(0.03, 1, fun, (0.03,))
-        event4 = scheduler.enter(0.04, 1, fun, (0.04,))
-        event5 = scheduler.enter(0.05, 1, fun, (0.05,))
+        now = time.time()
+        event1 = scheduler.enterabs(now + 0.01, 1, fun, (0.01,))
+        event2 = scheduler.enterabs(now + 0.02, 1, fun, (0.02,))
+        event3 = scheduler.enterabs(now + 0.03, 1, fun, (0.03,))
+        event4 = scheduler.enterabs(now + 0.04, 1, fun, (0.04,))
+        event5 = scheduler.enterabs(now + 0.05, 1, fun, (0.05,))
         scheduler.cancel(event1)
         scheduler.cancel(event5)
         scheduler.run()
@@ -64,11 +65,12 @@
         l = []
         fun = lambda x: l.append(x)
         scheduler = sched.scheduler(time.time, time.sleep)
-        e5 = scheduler.enter(0.05, 1, fun)
-        e1 = scheduler.enter(0.01, 1, fun)
-        e2 = scheduler.enter(0.02, 1, fun)
-        e4 = scheduler.enter(0.04, 1, fun)
-        e3 = scheduler.enter(0.03, 1, fun)
+        now = time.time()
+        e5 = scheduler.enterabs(now + 0.05, 1, fun)
+        e1 = scheduler.enterabs(now + 0.01, 1, fun)
+        e2 = scheduler.enterabs(now + 0.02, 1, fun)
+        e4 = scheduler.enterabs(now + 0.04, 1, fun)
+        e3 = scheduler.enterabs(now + 0.03, 1, fun)
         # queue property is supposed to return an order list of
         # upcoming events
         self.assertEqual(list(scheduler.queue), [e1, e2, e3, e4, e5])
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
index a0ee377..58fdcd4 100644
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -1352,7 +1352,6 @@
     r"""
     Basic inclusion example (XInclude C.1)
 
-    >>> from xml.etree import ElementTree as ET
     >>> from xml.etree import ElementInclude
 
     >>> document = xinclude_loader("C1.xml")
@@ -1882,12 +1881,7 @@
 
     def __enter__(self):
         from xml.etree import ElementPath
-        if hasattr(ET, '_namespace_map'):
-            self._nsmap = ET._namespace_map
-        else:
-            # when testing the cElementTree alias
-            from xml.etree.ElementTree import _namespace_map
-            self._nsmap = _namespace_map
+        self._nsmap = ET.register_namespace._namespace_map
         # Copy the default namespace mapping
         self._nsmap_copy = self._nsmap.copy()
         # Copy the path cache (should be empty)
@@ -1904,12 +1898,20 @@
         self.checkwarnings.__exit__(*args)
 
 
+class TestAcceleratorNotImported(unittest.TestCase):
+    # Test that the C accelerator was not imported for pyET
+    def test_correct_import_pyET(self):
+        self.assertEqual(pyET.Element.__module__, 'xml.etree.ElementTree')
+
+
 def test_main(module=pyET):
     from test import test_xml_etree
 
     # The same doctests are used for both the Python and the C implementations
     test_xml_etree.ET = module
 
+    support.run_unittest(TestAcceleratorNotImported)
+
     # XXX the C module should give the same warnings as the Python module
     with CleanContext(quiet=(module is not pyET)):
         support.run_doctest(test_xml_etree, verbosity=True)
diff --git a/Lib/test/test_xml_etree_c.py b/Lib/test/test_xml_etree_c.py
index 6f62009..a73d0c4 100644
--- a/Lib/test/test_xml_etree_c.py
+++ b/Lib/test/test_xml_etree_c.py
@@ -5,7 +5,7 @@
 import unittest
 
 cET = import_fresh_module('xml.etree.ElementTree', fresh=['_elementtree'])
-cET_alias = import_fresh_module('xml.etree.cElementTree', fresh=['_elementtree'])
+cET_alias = import_fresh_module('xml.etree.cElementTree', fresh=['_elementtree', 'xml.etree'])
 
 
 # cElementTree specific tests
@@ -46,6 +46,15 @@
         finally:
             data = None
 
+@unittest.skipUnless(cET, 'requires _elementtree')
+class TestAcceleratorImported(unittest.TestCase):
+    # Test that the C accelerator was imported, as expected
+    def test_correct_import_cET(self):
+        self.assertEqual(cET.Element.__module__, '_elementtree')
+
+    def test_correct_import_cET_alias(self):
+        self.assertEqual(cET_alias.Element.__module__, '_elementtree')
+
 
 def test_main():
     from test import test_xml_etree, test_xml_etree_c
@@ -53,7 +62,7 @@
     # Run the tests specific to the C implementation
     support.run_doctest(test_xml_etree_c, verbosity=True)
 
-    support.run_unittest(MiscTests)
+    support.run_unittest(MiscTests, TestAcceleratorImported)
 
     # Run the same test suite as the Python module
     test_xml_etree.test_main(module=cET)
diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py
index 93147eb..defef0d 100644
--- a/Lib/xml/etree/ElementTree.py
+++ b/Lib/xml/etree/ElementTree.py
@@ -1086,6 +1086,8 @@
     # dublin core
     "http://purl.org/dc/elements/1.1/": "dc",
 }
+# For tests and troubleshooting
+register_namespace._namespace_map = _namespace_map
 
 def _raise_serialization_error(text):
     raise TypeError(
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 07aa5bf..46f461a 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -858,7 +858,7 @@
 	done
 	$(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(LDVERSION)$(EXE)
 	-if test "$(VERSION)" != "$(LDVERSION)"; then \
-		if test -f $(DESTDIR)$(BINDIR)/$(PYTHON)$(VERSION)$(EXE) -o -h $(DESTDIR)$(BINDIR)/$(PYTHON)$(VERSION)$(EXE); \
+		if test -f $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE) -o -h $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE); \
 		then rm -f $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE); \
 		fi; \
 		(cd $(DESTDIR)$(BINDIR); $(LN) python$(LDVERSION)$(EXE) python$(VERSION)$(EXE)); \
@@ -879,11 +879,11 @@
 	fi
 
 bininstall: altbininstall
-	-if test -f $(DESTDIR)$(BINDIR)/$(PYTHON)3$(EXE) -o -h $(DESTDIR)$(BINDIR)/$(PYTHON)3$(EXE); \
-	then rm -f $(DESTDIR)$(BINDIR)/$(PYTHON)3$(EXE); \
+	-if test -f $(DESTDIR)$(BINDIR)/python3$(EXE) -o -h $(DESTDIR)$(BINDIR)/python3$(EXE); \
+	then rm -f $(DESTDIR)$(BINDIR)/python3$(EXE); \
 	else true; \
 	fi
-	(cd $(DESTDIR)$(BINDIR); $(LN) python$(VERSION)$(EXE) $(PYTHON)3$(EXE))
+	(cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(EXE) python3$(EXE))
 	-if test "$(VERSION)" != "$(LDVERSION)"; then \
 		rm -f $(DESTDIR)$(BINDIR)/python$(VERSION)-config; \
 		(cd $(DESTDIR)$(BINDIR); $(LN) -s python$(LDVERSION)-config python$(VERSION)-config); \
diff --git a/Misc/NEWS b/Misc/NEWS
index 49d1851..07ecfc1 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -466,6 +466,13 @@
 Library
 -------
 
+- Issue #13961: Move importlib over to using os.replace() for atomic renaming.
+
+- Do away with ambiguous level values (as suggested by PEP 328) in
+  importlib.__import__() by raising ValueError when level < 0.
+
+- Issue #2489: pty.spawn could consume 100% cpu when it encountered an EOF.
+
 - Issue #13014: Fix a possible reference leak in SSLSocket.getpeercert().
 
 - Issue #13777: Add PF_SYSTEM sockets on OS X.
@@ -2256,8 +2263,8 @@
 Documentation
 -------------
 
-- Issue #13491: Fix many errors in sqlite3 documentation. Initial
-  patch by Johannes Vogel.
+- Issues #13491 and #13995: Fix many errors in sqlite3 documentation.
+  Initial patch for #13491 by Johannes Vogel.
 
 - Issue #13402: Document absoluteness of sys.executable.
 
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index 97fc07f..e25f354 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -2547,7 +2547,9 @@
     PyModule_AddIntConstant(m, "OP_CIPHER_SERVER_PREFERENCE",
                             SSL_OP_CIPHER_SERVER_PREFERENCE);
     PyModule_AddIntConstant(m, "OP_SINGLE_DH_USE", SSL_OP_SINGLE_DH_USE);
+#ifdef SSL_OP_SINGLE_ECDH_USE
     PyModule_AddIntConstant(m, "OP_SINGLE_ECDH_USE", SSL_OP_SINGLE_ECDH_USE);
+#endif
 #ifdef SSL_OP_NO_COMPRESSION
     PyModule_AddIntConstant(m, "OP_NO_COMPRESSION",
                             SSL_OP_NO_COMPRESSION);