Snap for 7156330 from aa3187c0e7f6b4619c6daa829188789daf6eef34 to s-keystone-qcom-release

Change-Id: Id4617a9f31654d6b9f8308124d7129d68779f5fd
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..a0099a3
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,9 @@
+third_party {
+  # would be NOTICE save for GPL in:
+  #   common/py3-stdlib/urllib/robotparser.py
+  #   common/bison/... see:
+  #     common/bison/README.md
+  #     common/bison/NOTICE
+  #     plus copyright headers
+  license_type: RESTRICTED
+}
diff --git a/common/framework/javac_extractor.jar b/common/framework/javac_extractor.jar
index 25c6b7f..3aa9424 100644
--- a/common/framework/javac_extractor.jar
+++ b/common/framework/javac_extractor.jar
Binary files differ
diff --git a/common/framework/turbine.jar b/common/framework/turbine.jar
index b80a8d5..60aa3de 100644
--- a/common/framework/turbine.jar
+++ b/common/framework/turbine.jar
Binary files differ
diff --git a/common/py3-stdlib/LICENSE b/common/py3-stdlib/LICENSE
index 66a3ac8..f42f8ad 100644
--- a/common/py3-stdlib/LICENSE
+++ b/common/py3-stdlib/LICENSE
@@ -59,6 +59,17 @@
 B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
 ===============================================================
 
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
 PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
 --------------------------------------------
 
@@ -252,3 +263,17 @@
 WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
 ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
 OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/common/py3-stdlib/__future__.py b/common/py3-stdlib/__future__.py
index d7cb8ac..0e7b555 100644
--- a/common/py3-stdlib/__future__.py
+++ b/common/py3-stdlib/__future__.py
@@ -66,18 +66,20 @@
 # code.h and used by compile.h, so that an editor search will find them here.
 # However, they're not exported in __all__, because they don't really belong to
 # this module.
-CO_NESTED            = 0x0010   # nested_scopes
-CO_GENERATOR_ALLOWED = 0        # generators (obsolete, was 0x1000)
-CO_FUTURE_DIVISION   = 0x20000   # division
-CO_FUTURE_ABSOLUTE_IMPORT = 0x40000 # perform absolute imports by default
-CO_FUTURE_WITH_STATEMENT  = 0x80000   # with statement
-CO_FUTURE_PRINT_FUNCTION  = 0x100000   # print function
-CO_FUTURE_UNICODE_LITERALS = 0x200000 # unicode string literals
+CO_NESTED = 0x0010                      # nested_scopes
+CO_GENERATOR_ALLOWED = 0                # generators (obsolete, was 0x1000)
+CO_FUTURE_DIVISION = 0x20000            # division
+CO_FUTURE_ABSOLUTE_IMPORT = 0x40000     # perform absolute imports by default
+CO_FUTURE_WITH_STATEMENT = 0x80000      # with statement
+CO_FUTURE_PRINT_FUNCTION = 0x100000     # print function
+CO_FUTURE_UNICODE_LITERALS = 0x200000   # unicode string literals
 CO_FUTURE_BARRY_AS_BDFL = 0x400000
-CO_FUTURE_GENERATOR_STOP  = 0x800000 # StopIteration becomes RuntimeError in generators
-CO_FUTURE_ANNOTATIONS     = 0x1000000  # annotations become strings at runtime
+CO_FUTURE_GENERATOR_STOP = 0x800000     # StopIteration becomes RuntimeError in generators
+CO_FUTURE_ANNOTATIONS = 0x1000000       # annotations become strings at runtime
+
 
 class _Feature:
+
     def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
         self.optional = optionalRelease
         self.mandatory = mandatoryRelease
@@ -88,7 +90,6 @@
 
         This is a 5-tuple, of the same form as sys.version_info.
         """
-
         return self.optional
 
     def getMandatoryRelease(self):
@@ -97,7 +98,6 @@
         This is a 5-tuple, of the same form as sys.version_info, or, if
         the feature was dropped, is None.
         """
-
         return self.mandatory
 
     def __repr__(self):
@@ -105,6 +105,7 @@
                                   self.mandatory,
                                   self.compiler_flag))
 
+
 nested_scopes = _Feature((2, 1, 0, "beta",  1),
                          (2, 2, 0, "alpha", 0),
                          CO_NESTED)
@@ -142,5 +143,5 @@
                           CO_FUTURE_GENERATOR_STOP)
 
 annotations = _Feature((3, 7, 0, "beta", 1),
-                       (4, 0, 0, "alpha", 0),
+                       (3, 10, 0, "alpha", 0),
                        CO_FUTURE_ANNOTATIONS)
diff --git a/common/py3-stdlib/_aix_support.py b/common/py3-stdlib/_aix_support.py
new file mode 100644
index 0000000..4550493
--- /dev/null
+++ b/common/py3-stdlib/_aix_support.py
@@ -0,0 +1,89 @@
+"""Shared AIX support functions."""
+
+import sys
+import sysconfig
+
+try:
+    import subprocess
+except ImportError:  # pragma: no cover
+    # _aix_support is used in distutils by setup.py to build C extensions,
+    # before subprocess dependencies like _posixsubprocess are available.
+    import _bootsubprocess as subprocess
+
+
+def _aix_tag(vrtl, bd):
+    # type: (List[int], int) -> str
+    # Infer the ABI bitwidth from maxsize (assuming 64 bit as the default)
+    _sz = 32 if sys.maxsize == (2**31-1) else 64
+    # vrtl[version, release, technology_level]
+    return "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(vrtl[0], vrtl[1], vrtl[2], bd, _sz)
+
+
+# extract version, release and technology level from a VRMF string
+def _aix_vrtl(vrmf):
+    # type: (str) -> List[int]
+    v, r, tl = vrmf.split(".")[:3]
+    return [int(v[-1]), int(r), int(tl)]
+
+
+def _aix_bosmp64():
+    # type: () -> Tuple[str, int]
+    """
+    Return a Tuple[str, int] e.g., ['7.1.4.34', 1806]
+    The fileset bos.mp64 is the AIX kernel. It's VRMF and builddate
+    reflect the current ABI levels of the runtime environment.
+    """
+    # We expect all AIX systems to have lslpp installed in this location
+    out = subprocess.check_output(["/usr/bin/lslpp", "-Lqc", "bos.mp64"])
+    out = out.decode("utf-8")
+    out = out.strip().split(":")  # type: ignore
+    # Use str() and int() to help mypy see types
+    return (str(out[2]), int(out[-1]))
+
+
+def aix_platform():
+    # type: () -> str
+    """
+    AIX filesets are identified by four decimal values: V.R.M.F.
+    V (version) and R (release) can be retreived using ``uname``
+    Since 2007, starting with AIX 5.3 TL7, the M value has been
+    included with the fileset bos.mp64 and represents the Technology
+    Level (TL) of AIX. The F (Fix) value also increases, but is not
+    relevant for comparing releases and binary compatibility.
+    For binary compatibility the so-called builddate is needed.
+    Again, the builddate of an AIX release is associated with bos.mp64.
+    AIX ABI compatibility is described  as guaranteed at: https://www.ibm.com/\
+    support/knowledgecenter/en/ssw_aix_72/install/binary_compatability.html
+
+    For pep425 purposes the AIX platform tag becomes:
+    "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(v, r, tl, builddate, bitsize)
+    e.g., "aix-6107-1415-32" for AIX 6.1 TL7 bd 1415, 32-bit
+    and, "aix-6107-1415-64" for AIX 6.1 TL7 bd 1415, 64-bit
+    """
+    vrmf, bd = _aix_bosmp64()
+    return _aix_tag(_aix_vrtl(vrmf), bd)
+
+
+# extract vrtl from the BUILD_GNU_TYPE as an int
+def _aix_bgt():
+    # type: () -> List[int]
+    gnu_type = sysconfig.get_config_var("BUILD_GNU_TYPE")
+    if not gnu_type:
+        raise ValueError("BUILD_GNU_TYPE is not defined")
+    return _aix_vrtl(vrmf=gnu_type)
+
+
+def aix_buildtag():
+    # type: () -> str
+    """
+    Return the platform_tag of the system Python was built on.
+    """
+    # AIX_BUILDDATE is defined by configure with:
+    # lslpp -Lcq bos.mp64 | awk -F:  '{ print $NF }'
+    build_date = sysconfig.get_config_var("AIX_BUILDDATE")
+    try:
+        build_date = int(build_date)
+    except (ValueError, TypeError):
+        raise ValueError(f"AIX_BUILDDATE is not defined or invalid: "
+                         f"{build_date!r}")
+    return _aix_tag(_aix_bgt(), build_date)
diff --git a/common/py3-stdlib/_bootsubprocess.py b/common/py3-stdlib/_bootsubprocess.py
new file mode 100644
index 0000000..014782f
--- /dev/null
+++ b/common/py3-stdlib/_bootsubprocess.py
@@ -0,0 +1,97 @@
+"""
+Basic subprocess implementation for POSIX which only uses os functions. Only
+implement features required by setup.py to build C extension modules when
+subprocess is unavailable. setup.py is not used on Windows.
+"""
+import os
+
+
+# distutils.spawn used by distutils.command.build_ext
+# calls subprocess.Popen().wait()
+class Popen:
+    def __init__(self, cmd, env=None):
+        self._cmd = cmd
+        self._env = env
+        self.returncode = None
+
+    def wait(self):
+        pid = os.fork()
+        if pid == 0:
+            # Child process
+            try:
+                if self._env is not None:
+                    os.execve(self._cmd[0], self._cmd, self._env)
+                else:
+                    os.execv(self._cmd[0], self._cmd)
+            finally:
+                os._exit(1)
+        else:
+            # Parent process
+            _, status = os.waitpid(pid, 0)
+            self.returncode = os.waitstatus_to_exitcode(status)
+
+        return self.returncode
+
+
+def _check_cmd(cmd):
+    # Use regex [a-zA-Z0-9./-]+: reject empty string, space, etc.
+    safe_chars = []
+    for first, last in (("a", "z"), ("A", "Z"), ("0", "9")):
+        for ch in range(ord(first), ord(last) + 1):
+            safe_chars.append(chr(ch))
+    safe_chars.append("./-")
+    safe_chars = ''.join(safe_chars)
+
+    if isinstance(cmd, (tuple, list)):
+        check_strs = cmd
+    elif isinstance(cmd, str):
+        check_strs = [cmd]
+    else:
+        return False
+
+    for arg in check_strs:
+        if not isinstance(arg, str):
+            return False
+        if not arg:
+            # reject empty string
+            return False
+        for ch in arg:
+            if ch not in safe_chars:
+                return False
+
+    return True
+
+
+# _aix_support used by distutil.util calls subprocess.check_output()
+def check_output(cmd, **kwargs):
+    if kwargs:
+        raise NotImplementedError(repr(kwargs))
+
+    if not _check_cmd(cmd):
+        raise ValueError(f"unsupported command: {cmd!r}")
+
+    tmp_filename = "check_output.tmp"
+    if not isinstance(cmd, str):
+        cmd = " ".join(cmd)
+    cmd = f"{cmd} >{tmp_filename}"
+
+    try:
+        # system() spawns a shell
+        status = os.system(cmd)
+        exitcode = os.waitstatus_to_exitcode(status)
+        if exitcode:
+            raise ValueError(f"Command {cmd!r} returned non-zero "
+                             f"exit status {exitcode!r}")
+
+        try:
+            with open(tmp_filename, "rb") as fp:
+                stdout = fp.read()
+        except FileNotFoundError:
+            stdout = b''
+    finally:
+        try:
+            os.unlink(tmp_filename)
+        except OSError:
+            pass
+
+    return stdout
diff --git a/common/py3-stdlib/_collections_abc.py b/common/py3-stdlib/_collections_abc.py
index 2b2ddba..36cd993 100644
--- a/common/py3-stdlib/_collections_abc.py
+++ b/common/py3-stdlib/_collections_abc.py
@@ -9,6 +9,8 @@
 from abc import ABCMeta, abstractmethod
 import sys
 
+GenericAlias = type(list[int])
+
 __all__ = ["Awaitable", "Coroutine",
            "AsyncIterable", "AsyncIterator", "AsyncGenerator",
            "Hashable", "Iterable", "Iterator", "Generator", "Reversible",
@@ -110,6 +112,8 @@
             return _check_methods(C, "__await__")
         return NotImplemented
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 class Coroutine(Awaitable):
 
@@ -169,6 +173,8 @@
             return _check_methods(C, "__aiter__")
         return NotImplemented
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 class AsyncIterator(AsyncIterable):
 
@@ -255,6 +261,8 @@
             return _check_methods(C, "__iter__")
         return NotImplemented
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 class Iterator(Iterable):
 
@@ -274,6 +282,7 @@
             return _check_methods(C, '__iter__', '__next__')
         return NotImplemented
 
+
 Iterator.register(bytes_iterator)
 Iterator.register(bytearray_iterator)
 #Iterator.register(callable_iterator)
@@ -353,6 +362,7 @@
                                   'send', 'throw', 'close')
         return NotImplemented
 
+
 Generator.register(generator)
 
 
@@ -385,6 +395,9 @@
             return _check_methods(C, "__contains__")
         return NotImplemented
 
+    __class_getitem__ = classmethod(GenericAlias)
+
+
 class Collection(Sized, Iterable, Container):
 
     __slots__ = ()
@@ -395,6 +408,7 @@
             return _check_methods(C,  "__len__", "__iter__", "__contains__")
         return NotImplemented
 
+
 class Callable(metaclass=ABCMeta):
 
     __slots__ = ()
@@ -409,6 +423,8 @@
             return _check_methods(C, "__call__")
         return NotImplemented
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 ### SETS ###
 
@@ -550,6 +566,7 @@
             h = 590923713
         return h
 
+
 Set.register(frozenset)
 
 
@@ -632,6 +649,7 @@
                 self.discard(value)
         return self
 
+
 MutableSet.register(set)
 
 
@@ -688,6 +706,7 @@
 
     __reversed__ = None
 
+
 Mapping.register(mappingproxy)
 
 
@@ -704,6 +723,8 @@
     def __repr__(self):
         return '{0.__class__.__name__}({0._mapping!r})'.format(self)
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 class KeysView(MappingView, Set):
 
@@ -719,6 +740,7 @@
     def __iter__(self):
         yield from self._mapping
 
+
 KeysView.register(dict_keys)
 
 
@@ -743,6 +765,7 @@
         for key in self._mapping:
             yield (key, self._mapping[key])
 
+
 ItemsView.register(dict_items)
 
 
@@ -761,6 +784,7 @@
         for key in self._mapping:
             yield self._mapping[key]
 
+
 ValuesView.register(dict_values)
 
 
@@ -847,6 +871,7 @@
             self[key] = default
         return default
 
+
 MutableMapping.register(dict)
 
 
@@ -914,6 +939,7 @@
         'S.count(value) -> integer -- return number of occurrences of value'
         return sum(1 for v in self if v is value or v == value)
 
+
 Sequence.register(tuple)
 Sequence.register(str)
 Sequence.register(range)
@@ -1000,5 +1026,6 @@
         self.extend(values)
         return self
 
+
 MutableSequence.register(list)
 MutableSequence.register(bytearray)  # Multiply inheriting, see ByteString
diff --git a/common/py3-stdlib/_dummy_thread.py b/common/py3-stdlib/_dummy_thread.py
deleted file mode 100644
index 2e46a07..0000000
--- a/common/py3-stdlib/_dummy_thread.py
+++ /dev/null
@@ -1,193 +0,0 @@
-"""Drop-in replacement for the thread module.
-
-Meant to be used as a brain-dead substitute so that threaded code does
-not need to be rewritten for when the thread module is not present.
-
-Suggested usage is::
-
-    try:
-        import _thread
-    except ImportError:
-        import _dummy_thread as _thread
-
-"""
-# Exports only things specified by thread documentation;
-# skipping obsolete synonyms allocate(), start_new(), exit_thread().
-__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
-           'interrupt_main', 'LockType', 'RLock']
-
-# A dummy value
-TIMEOUT_MAX = 2**31
-
-# NOTE: this module can be imported early in the extension building process,
-# and so top level imports of other modules should be avoided.  Instead, all
-# imports are done when needed on a function-by-function basis.  Since threads
-# are disabled, the import lock should not be an issue anyway (??).
-
-error = RuntimeError
-
-def start_new_thread(function, args, kwargs={}):
-    """Dummy implementation of _thread.start_new_thread().
-
-    Compatibility is maintained by making sure that ``args`` is a
-    tuple and ``kwargs`` is a dictionary.  If an exception is raised
-    and it is SystemExit (which can be done by _thread.exit()) it is
-    caught and nothing is done; all other exceptions are printed out
-    by using traceback.print_exc().
-
-    If the executed function calls interrupt_main the KeyboardInterrupt will be
-    raised when the function returns.
-
-    """
-    if type(args) != type(tuple()):
-        raise TypeError("2nd arg must be a tuple")
-    if type(kwargs) != type(dict()):
-        raise TypeError("3rd arg must be a dict")
-    global _main
-    _main = False
-    try:
-        function(*args, **kwargs)
-    except SystemExit:
-        pass
-    except:
-        import traceback
-        traceback.print_exc()
-    _main = True
-    global _interrupt
-    if _interrupt:
-        _interrupt = False
-        raise KeyboardInterrupt
-
-def exit():
-    """Dummy implementation of _thread.exit()."""
-    raise SystemExit
-
-def get_ident():
-    """Dummy implementation of _thread.get_ident().
-
-    Since this module should only be used when _threadmodule is not
-    available, it is safe to assume that the current process is the
-    only thread.  Thus a constant can be safely returned.
-    """
-    return 1
-
-def allocate_lock():
-    """Dummy implementation of _thread.allocate_lock()."""
-    return LockType()
-
-def stack_size(size=None):
-    """Dummy implementation of _thread.stack_size()."""
-    if size is not None:
-        raise error("setting thread stack size not supported")
-    return 0
-
-def _set_sentinel():
-    """Dummy implementation of _thread._set_sentinel()."""
-    return LockType()
-
-class LockType(object):
-    """Class implementing dummy implementation of _thread.LockType.
-
-    Compatibility is maintained by maintaining self.locked_status
-    which is a boolean that stores the state of the lock.  Pickling of
-    the lock, though, should not be done since if the _thread module is
-    then used with an unpickled ``lock()`` from here problems could
-    occur from this class not having atomic methods.
-
-    """
-
-    def __init__(self):
-        self.locked_status = False
-
-    def acquire(self, waitflag=None, timeout=-1):
-        """Dummy implementation of acquire().
-
-        For blocking calls, self.locked_status is automatically set to
-        True and returned appropriately based on value of
-        ``waitflag``.  If it is non-blocking, then the value is
-        actually checked and not set if it is already acquired.  This
-        is all done so that threading.Condition's assert statements
-        aren't triggered and throw a little fit.
-
-        """
-        if waitflag is None or waitflag:
-            self.locked_status = True
-            return True
-        else:
-            if not self.locked_status:
-                self.locked_status = True
-                return True
-            else:
-                if timeout > 0:
-                    import time
-                    time.sleep(timeout)
-                return False
-
-    __enter__ = acquire
-
-    def __exit__(self, typ, val, tb):
-        self.release()
-
-    def release(self):
-        """Release the dummy lock."""
-        # XXX Perhaps shouldn't actually bother to test?  Could lead
-        #     to problems for complex, threaded code.
-        if not self.locked_status:
-            raise error
-        self.locked_status = False
-        return True
-
-    def locked(self):
-        return self.locked_status
-
-    def __repr__(self):
-        return "<%s %s.%s object at %s>" % (
-            "locked" if self.locked_status else "unlocked",
-            self.__class__.__module__,
-            self.__class__.__qualname__,
-            hex(id(self))
-        )
-
-
-class RLock(LockType):
-    """Dummy implementation of threading._RLock.
-
-    Re-entrant lock can be aquired multiple times and needs to be released
-    just as many times. This dummy implemention does not check wheter the
-    current thread actually owns the lock, but does accounting on the call
-    counts.
-    """
-    def __init__(self):
-        super().__init__()
-        self._levels = 0
-
-    def acquire(self, waitflag=None, timeout=-1):
-        """Aquire the lock, can be called multiple times in succession.
-        """
-        locked = super().acquire(waitflag, timeout)
-        if locked:
-            self._levels += 1
-        return locked
-
-    def release(self):
-        """Release needs to be called once for every call to acquire().
-        """
-        if self._levels == 0:
-            raise error
-        if self._levels == 1:
-            super().release()
-        self._levels -= 1
-
-# Used to signal that interrupt_main was called in a "thread"
-_interrupt = False
-# True when not executing in a "thread"
-_main = True
-
-def interrupt_main():
-    """Set _interrupt flag to True to have start_new_thread raise
-    KeyboardInterrupt upon exiting."""
-    if _main:
-        raise KeyboardInterrupt
-    else:
-        global _interrupt
-        _interrupt = True
diff --git a/common/py3-stdlib/_osx_support.py b/common/py3-stdlib/_osx_support.py
index e9efce7..37975fe 100644
--- a/common/py3-stdlib/_osx_support.py
+++ b/common/py3-stdlib/_osx_support.py
@@ -52,7 +52,7 @@
         return executable
 
 
-def _read_output(commandstring):
+def _read_output(commandstring, capture_stderr=False):
     """Output from successful command execution or None"""
     # Similar to os.popen(commandstring, "r").read(),
     # but without actually using os.popen because that
@@ -67,7 +67,10 @@
             os.getpid(),), "w+b")
 
     with contextlib.closing(fp) as fp:
-        cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
+        if capture_stderr:
+            cmd = "%s >'%s' 2>&1" % (commandstring, fp.name)
+        else:
+            cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
         return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
 
 
@@ -110,6 +113,26 @@
 
     return _SYSTEM_VERSION
 
+_SYSTEM_VERSION_TUPLE = None
+def _get_system_version_tuple():
+    """
+    Return the macOS system version as a tuple
+
+    The return value is safe to use to compare
+    two version numbers.
+    """
+    global _SYSTEM_VERSION_TUPLE
+    if _SYSTEM_VERSION_TUPLE is None:
+        osx_version = _get_system_version()
+        if osx_version:
+            try:
+                _SYSTEM_VERSION_TUPLE = tuple(int(i) for i in osx_version.split('.'))
+            except ValueError:
+                _SYSTEM_VERSION_TUPLE = ()
+
+    return _SYSTEM_VERSION_TUPLE
+
+
 def _remove_original_values(_config_vars):
     """Remove original unmodified values for testing"""
     # This is needed for higher-level cross-platform tests of get_platform.
@@ -125,6 +148,33 @@
         _config_vars[_INITPRE + cv] = oldvalue
     _config_vars[cv] = newvalue
 
+
+_cache_default_sysroot = None
+def _default_sysroot(cc):
+    """ Returns the root of the default SDK for this system, or '/' """
+    global _cache_default_sysroot
+
+    if _cache_default_sysroot is not None:
+        return _cache_default_sysroot
+   
+    contents = _read_output('%s -c -E -v - </dev/null' % (cc,), True)
+    in_incdirs = False   
+    for line in contents.splitlines():
+        if line.startswith("#include <...>"):
+            in_incdirs = True
+        elif line.startswith("End of search list"):
+            in_incdirs = False
+        elif in_incdirs:
+            line = line.strip()
+            if line == '/usr/include':
+                _cache_default_sysroot = '/'
+            elif line.endswith(".sdk/usr/include"):
+                _cache_default_sysroot = line[:-12]
+    if _cache_default_sysroot is None:
+        _cache_default_sysroot = '/'
+
+    return _cache_default_sysroot
+
 def _supports_universal_builds():
     """Returns True if universal builds are supported on this system"""
     # As an approximation, we assume that if we are running on 10.4 or above,
@@ -132,14 +182,18 @@
     # builds, in particular -isysroot and -arch arguments to the compiler. This
     # is in support of allowing 10.4 universal builds to run on 10.3.x systems.
 
-    osx_version = _get_system_version()
-    if osx_version:
-        try:
-            osx_version = tuple(int(i) for i in osx_version.split('.'))
-        except ValueError:
-            osx_version = ''
+    osx_version = _get_system_version_tuple()
     return bool(osx_version >= (10, 4)) if osx_version else False
 
+def _supports_arm64_builds():
+    """Returns True if arm64 builds are supported on this system"""
+    # There are two sets of systems supporting macOS/arm64 builds:
+    # 1. macOS 11 and later, unconditionally
+    # 2. macOS 10.15 with Xcode 12.2 or later
+    # For now the second category is ignored.
+    osx_version = _get_system_version_tuple()
+    return osx_version >= (11, 0) if osx_version else False
+
 
 def _find_appropriate_compiler(_config_vars):
     """Find appropriate C compiler for extension module builds"""
@@ -331,6 +385,12 @@
             except ValueError:
                 break
 
+    elif not _supports_arm64_builds():
+        # Look for "-arch arm64" and drop that
+        for idx in reversed(range(len(compiler_so))):
+            if compiler_so[idx] == '-arch' and compiler_so[idx+1] == "arm64":
+                del compiler_so[idx:idx+2]
+
     if 'ARCHFLAGS' in os.environ and not stripArch:
         # User specified different -arch flags in the environ,
         # see also distutils.sysconfig
@@ -481,6 +541,8 @@
 
             if len(archs) == 1:
                 machine = archs[0]
+            elif archs == ('arm64', 'x86_64'):
+                machine = 'universal2'
             elif archs == ('i386', 'ppc'):
                 machine = 'fat'
             elif archs == ('i386', 'x86_64'):
diff --git a/common/py3-stdlib/_pyio.py b/common/py3-stdlib/_pyio.py
index fd31b8c..4804ed2 100644
--- a/common/py3-stdlib/_pyio.py
+++ b/common/py3-stdlib/_pyio.py
@@ -36,6 +36,8 @@
 # Does io.IOBase finalizer log the exception if the close() method fails?
 # The exception is ignored silently by default in release build.
 _IOBASE_EMITS_UNRAISABLE = (hasattr(sys, "gettotalrefcount") or sys.flags.dev_mode)
+# Does open() check its 'errors' argument?
+_CHECK_ERRORS = _IOBASE_EMITS_UNRAISABLE
 
 
 def open(file, mode="r", buffering=-1, encoding=None, errors=None,
@@ -802,6 +804,9 @@
         return pos
 
     def truncate(self, pos=None):
+        self._checkClosed()
+        self._checkWritable()
+
         # Flush the stream.  We're mixing buffered I/O with lower-level I/O,
         # and a flush may be necessary to synch both views of the current
         # file state.
@@ -1571,7 +1576,7 @@
                     raise IsADirectoryError(errno.EISDIR,
                                             os.strerror(errno.EISDIR), file)
             except AttributeError:
-                # Ignore the AttribueError if stat.S_ISDIR or errno.EISDIR
+                # Ignore the AttributeError if stat.S_ISDIR or errno.EISDIR
                 # don't exist.
                 pass
             self._blksize = getattr(fdfstat, 'st_blksize', 0)
@@ -2026,6 +2031,8 @@
         else:
             if not isinstance(errors, str):
                 raise ValueError("invalid errors: %r" % errors)
+            if _CHECK_ERRORS:
+                codecs.lookup_error(errors)
 
         self._buffer = buffer
         self._decoded_chars = ''  # buffer for text returned from decoder
@@ -2295,7 +2302,7 @@
         return not eof
 
     def _pack_cookie(self, position, dec_flags=0,
-                           bytes_to_feed=0, need_eof=0, chars_to_skip=0):
+                           bytes_to_feed=0, need_eof=False, chars_to_skip=0):
         # The meaning of a tell() cookie is: seek to position, set the
         # decoder flags to dec_flags, read bytes_to_feed bytes, feed them
         # into the decoder with need_eof as the EOF flag, then skip
@@ -2309,7 +2316,7 @@
         rest, dec_flags = divmod(rest, 1<<64)
         rest, bytes_to_feed = divmod(rest, 1<<64)
         need_eof, chars_to_skip = divmod(rest, 1<<64)
-        return position, dec_flags, bytes_to_feed, need_eof, chars_to_skip
+        return position, dec_flags, bytes_to_feed, bool(need_eof), chars_to_skip
 
     def tell(self):
         if not self._seekable:
@@ -2383,7 +2390,7 @@
             # (a point where the decoder has nothing buffered, so seek()
             # can safely start from there and advance to this location).
             bytes_fed = 0
-            need_eof = 0
+            need_eof = False
             # Chars decoded since `start_pos`
             chars_decoded = 0
             for i in range(skip_bytes, len(next_input)):
@@ -2400,7 +2407,7 @@
             else:
                 # We didn't get enough decoded data; signal EOF to get more.
                 chars_decoded += len(decoder.decode(b'', final=True))
-                need_eof = 1
+                need_eof = True
                 if chars_decoded < chars_to_skip:
                     raise OSError("can't reconstruct logical file position")
 
diff --git a/common/py3-stdlib/_strptime.py b/common/py3-stdlib/_strptime.py
index f4f3c0b..5df37f5 100644
--- a/common/py3-stdlib/_strptime.py
+++ b/common/py3-stdlib/_strptime.py
@@ -182,7 +182,7 @@
             self.locale_time = LocaleTime()
         base = super()
         base.__init__({
-            # The " \d" part of the regex is to make %c from ANSI C work
+            # The " [1-9]" part of the regex is to make %c from ANSI C work
             'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
             'f': r"(?P<f>[0-9]{1,6})",
             'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
diff --git a/common/py3-stdlib/_weakrefset.py b/common/py3-stdlib/_weakrefset.py
index 7a84823..b267780 100644
--- a/common/py3-stdlib/_weakrefset.py
+++ b/common/py3-stdlib/_weakrefset.py
@@ -3,6 +3,7 @@
 # by abc.py to load everything else at startup.
 
 from _weakref import ref
+from types import GenericAlias
 
 __all__ = ['WeakSet']
 
@@ -197,3 +198,5 @@
 
     def __repr__(self):
         return repr(self.data)
+
+    __class_getitem__ = classmethod(GenericAlias)
diff --git a/common/py3-stdlib/aifc.py b/common/py3-stdlib/aifc.py
index 1916e7e..ed5da7d 100644
--- a/common/py3-stdlib/aifc.py
+++ b/common/py3-stdlib/aifc.py
@@ -138,7 +138,7 @@
 import builtins
 import warnings
 
-__all__ = ["Error", "open", "openfp"]
+__all__ = ["Error", "open"]
 
 class Error(Exception):
     pass
@@ -920,10 +920,6 @@
     else:
         raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
 
-def openfp(f, mode=None):
-    warnings.warn("aifc.openfp is deprecated since Python 3.7. "
-                  "Use aifc.open instead.", DeprecationWarning, stacklevel=2)
-    return open(f, mode=mode)
 
 if __name__ == '__main__':
     import sys
diff --git a/common/py3-stdlib/antigravity.py b/common/py3-stdlib/antigravity.py
index c6f174c..6dc5207 100644
--- a/common/py3-stdlib/antigravity.py
+++ b/common/py3-stdlib/antigravity.py
@@ -12,6 +12,6 @@
 
     '''
     # https://xkcd.com/426/
-    h = hashlib.md5(datedow).hexdigest()
+    h = hashlib.md5(datedow, usedforsecurity=False).hexdigest()
     p, q = [('%f' % float.fromhex('0.' + x)) for x in (h[:16], h[16:32])]
     print('%d%s %d%s' % (latitude, p[1:], longitude, q[1:]))
diff --git a/common/py3-stdlib/argparse.py b/common/py3-stdlib/argparse.py
index 2dad5f1..2fb1da5 100644
--- a/common/py3-stdlib/argparse.py
+++ b/common/py3-stdlib/argparse.py
@@ -67,6 +67,7 @@
     'ArgumentParser',
     'ArgumentError',
     'ArgumentTypeError',
+    'BooleanOptionalAction',
     'FileType',
     'HelpFormatter',
     'ArgumentDefaultsHelpFormatter',
@@ -86,7 +87,6 @@
 
 import os as _os
 import re as _re
-import shutil as _shutil
 import sys as _sys
 
 from gettext import gettext as _, ngettext
@@ -129,7 +129,7 @@
         return '%s(%s)' % (type_name, ', '.join(arg_strings))
 
     def _get_kwargs(self):
-        return sorted(self.__dict__.items())
+        return list(self.__dict__.items())
 
     def _get_args(self):
         return []
@@ -166,7 +166,8 @@
 
         # default setting for width
         if width is None:
-            width = _shutil.get_terminal_size().columns
+            import shutil
+            width = shutil.get_terminal_size().columns
             width -= 2
 
         self._prog = prog
@@ -263,7 +264,7 @@
                 invocations.append(get_invocation(subaction))
 
             # update the maximum item length
-            invocation_length = max([len(s) for s in invocations])
+            invocation_length = max(map(len, invocations))
             action_length = invocation_length + self._current_indent
             self._action_max_length = max(self._action_max_length,
                                           action_length)
@@ -454,7 +455,7 @@
                 # if the Optional doesn't take a value, format is:
                 #    -s or --long
                 if action.nargs == 0:
-                    part = '%s' % option_string
+                    part = action.format_usage()
 
                 # if the Optional takes a value, format is:
                 #    -s ARGS or --long ARGS
@@ -590,7 +591,11 @@
         elif action.nargs == OPTIONAL:
             result = '[%s]' % get_metavar(1)
         elif action.nargs == ZERO_OR_MORE:
-            result = '[%s [%s ...]]' % get_metavar(2)
+            metavar = get_metavar(1)
+            if len(metavar) == 2:
+                result = '[%s [%s ...]]' % metavar
+            else:
+                result = '[%s ...]' % metavar
         elif action.nargs == ONE_OR_MORE:
             result = '%s [%s ...]' % get_metavar(2)
         elif action.nargs == REMAINDER:
@@ -842,9 +847,52 @@
         ]
         return [(name, getattr(self, name)) for name in names]
 
+    def format_usage(self):
+        return self.option_strings[0]
+
     def __call__(self, parser, namespace, values, option_string=None):
         raise NotImplementedError(_('.__call__() not defined'))
 
+class BooleanOptionalAction(Action):
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 default=None,
+                 type=None,
+                 choices=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+
+        _option_strings = []
+        for option_string in option_strings:
+            _option_strings.append(option_string)
+
+            if option_string.startswith('--'):
+                option_string = '--no-' + option_string[2:]
+                _option_strings.append(option_string)
+
+        if help is not None and default is not None:
+            help += f" (default: {default})"
+
+        super().__init__(
+            option_strings=_option_strings,
+            dest=dest,
+            nargs=0,
+            default=default,
+            type=type,
+            choices=choices,
+            required=required,
+            help=help,
+            metavar=metavar)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        if option_string in self.option_strings:
+            setattr(namespace, self.dest, not option_string.startswith('--no-'))
+
+    def format_usage(self):
+        return ' | '.join(self.option_strings)
+
 
 class _StoreAction(Action):
 
@@ -1490,10 +1538,8 @@
 
             # strings starting with two prefix characters are long options
             option_strings.append(option_string)
-            if option_string[0] in self.prefix_chars:
-                if len(option_string) > 1:
-                    if option_string[1] in self.prefix_chars:
-                        long_option_strings.append(option_string)
+            if len(option_string) > 1 and option_string[1] in self.prefix_chars:
+                long_option_strings.append(option_string)
 
         # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
         dest = kwargs.pop('dest', None)
@@ -1633,6 +1679,8 @@
         - conflict_handler -- String indicating how to handle conflicts
         - add_help -- Add a -h/-help option
         - allow_abbrev -- Allow long options to be abbreviated unambiguously
+        - exit_on_error -- Determines whether or not ArgumentParser exits with
+            error info when an error occurs
     """
 
     def __init__(self,
@@ -1647,7 +1695,8 @@
                  argument_default=None,
                  conflict_handler='error',
                  add_help=True,
-                 allow_abbrev=True):
+                 allow_abbrev=True,
+                 exit_on_error=True):
 
         superinit = super(ArgumentParser, self).__init__
         superinit(description=description,
@@ -1666,6 +1715,7 @@
         self.fromfile_prefix_chars = fromfile_prefix_chars
         self.add_help = add_help
         self.allow_abbrev = allow_abbrev
+        self.exit_on_error = exit_on_error
 
         add_group = self.add_argument_group
         self._positionals = add_group(_('positional arguments'))
@@ -1796,15 +1846,19 @@
                 setattr(namespace, dest, self._defaults[dest])
 
         # parse the arguments and exit if there are any errors
-        try:
+        if self.exit_on_error:
+            try:
+                namespace, args = self._parse_known_args(args, namespace)
+            except ArgumentError:
+                err = _sys.exc_info()[1]
+                self.error(str(err))
+        else:
             namespace, args = self._parse_known_args(args, namespace)
-            if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
-                args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
-                delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
-            return namespace, args
-        except ArgumentError:
-            err = _sys.exc_info()[1]
-            self.error(str(err))
+
+        if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
+            args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
+            delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
+        return namespace, args
 
     def _parse_known_args(self, arg_strings, namespace):
         # replace arg strings that are file references
diff --git a/common/py3-stdlib/ast.py b/common/py3-stdlib/ast.py
index 99a1148..ecd4895 100644
--- a/common/py3-stdlib/ast.py
+++ b/common/py3-stdlib/ast.py
@@ -24,7 +24,10 @@
     :copyright: Copyright 2008 by Armin Ronacher.
     :license: Python License.
 """
+import sys
 from _ast import *
+from contextlib import contextmanager, nullcontext
+from enum import IntEnum, auto
 
 
 def parse(source, filename='<unknown>', mode='exec', *,
@@ -82,6 +85,9 @@
             return list(map(_convert, node.elts))
         elif isinstance(node, Set):
             return set(map(_convert, node.elts))
+        elif (isinstance(node, Call) and isinstance(node.func, Name) and
+              node.func.id == 'set' and node.args == node.keywords == []):
+            return set()
         elif isinstance(node, Dict):
             if len(node.keys) != len(node.values):
                 _raise_malformed_node(node)
@@ -99,7 +105,7 @@
     return _convert(node_or_string)
 
 
-def dump(node, annotate_fields=True, include_attributes=False):
+def dump(node, annotate_fields=True, include_attributes=False, *, indent=None):
     """
     Return a formatted dump of the tree in node.  This is mainly useful for
     debugging purposes.  If annotate_fields is true (by default),
@@ -107,35 +113,63 @@
     If annotate_fields is false, the result string will be more compact by
     omitting unambiguous field names.  Attributes such as line
     numbers and column offsets are not dumped by default.  If this is wanted,
-    include_attributes can be set to true.
+    include_attributes can be set to true.  If indent is a non-negative
+    integer or string, then the tree will be pretty-printed with that indent
+    level. None (the default) selects the single line representation.
     """
-    def _format(node):
+    def _format(node, level=0):
+        if indent is not None:
+            level += 1
+            prefix = '\n' + indent * level
+            sep = ',\n' + indent * level
+        else:
+            prefix = ''
+            sep = ', '
         if isinstance(node, AST):
+            cls = type(node)
             args = []
+            allsimple = True
             keywords = annotate_fields
-            for field in node._fields:
+            for name in node._fields:
                 try:
-                    value = getattr(node, field)
+                    value = getattr(node, name)
                 except AttributeError:
                     keywords = True
+                    continue
+                if value is None and getattr(cls, name, ...) is None:
+                    keywords = True
+                    continue
+                value, simple = _format(value, level)
+                allsimple = allsimple and simple
+                if keywords:
+                    args.append('%s=%s' % (name, value))
                 else:
-                    if keywords:
-                        args.append('%s=%s' % (field, _format(value)))
-                    else:
-                        args.append(_format(value))
+                    args.append(value)
             if include_attributes and node._attributes:
-                for a in node._attributes:
+                for name in node._attributes:
                     try:
-                        args.append('%s=%s' % (a, _format(getattr(node, a))))
+                        value = getattr(node, name)
                     except AttributeError:
-                        pass
-            return '%s(%s)' % (node.__class__.__name__, ', '.join(args))
+                        continue
+                    if value is None and getattr(cls, name, ...) is None:
+                        continue
+                    value, simple = _format(value, level)
+                    allsimple = allsimple and simple
+                    args.append('%s=%s' % (name, value))
+            if allsimple and len(args) <= 3:
+                return '%s(%s)' % (node.__class__.__name__, ', '.join(args)), not args
+            return '%s(%s%s)' % (node.__class__.__name__, prefix, sep.join(args)), False
         elif isinstance(node, list):
-            return '[%s]' % ', '.join(_format(x) for x in node)
-        return repr(node)
+            if not node:
+                return '[]', True
+            return '[%s%s]' % (prefix, sep.join(_format(x, level)[0] for x in node)), False
+        return repr(node), True
+
     if not isinstance(node, AST):
         raise TypeError('expected AST, got %r' % node.__class__.__name__)
-    return _format(node)
+    if indent is not None and not isinstance(indent, str):
+        indent = ' ' * indent
+    return _format(node)[0]
 
 
 def copy_location(new_node, old_node):
@@ -144,9 +178,14 @@
     attributes) from *old_node* to *new_node* if possible, and return *new_node*.
     """
     for attr in 'lineno', 'col_offset', 'end_lineno', 'end_col_offset':
-        if attr in old_node._attributes and attr in new_node._attributes \
-           and hasattr(old_node, attr):
-            setattr(new_node, attr, getattr(old_node, attr))
+        if attr in old_node._attributes and attr in new_node._attributes:
+            value = getattr(old_node, attr, None)
+            # end_lineno and end_col_offset are optional attributes, and they
+            # should be copied whether the value is None or not.
+            if value is not None or (
+                hasattr(old_node, attr) and attr.startswith("end_")
+            ):
+                setattr(new_node, attr, value)
     return new_node
 
 
@@ -165,7 +204,7 @@
             else:
                 lineno = node.lineno
         if 'end_lineno' in node._attributes:
-            if not hasattr(node, 'end_lineno'):
+            if getattr(node, 'end_lineno', None) is None:
                 node.end_lineno = end_lineno
             else:
                 end_lineno = node.end_lineno
@@ -175,7 +214,7 @@
             else:
                 col_offset = node.col_offset
         if 'end_col_offset' in node._attributes:
-            if not hasattr(node, 'end_col_offset'):
+            if getattr(node, 'end_col_offset', None) is None:
                 node.end_col_offset = end_col_offset
             else:
                 end_col_offset = node.end_col_offset
@@ -194,8 +233,11 @@
     for child in walk(node):
         if 'lineno' in child._attributes:
             child.lineno = getattr(child, 'lineno', 0) + n
-        if 'end_lineno' in child._attributes:
-            child.end_lineno = getattr(child, 'end_lineno', 0) + n
+        if (
+            "end_lineno" in child._attributes
+            and (end_lineno := getattr(child, "end_lineno", 0)) is not None
+        ):
+            child.end_lineno = end_lineno + n
     return node
 
 
@@ -277,7 +319,7 @@
 
 
 def _pad_whitespace(source):
-    """Replace all chars except '\f\t' in a line with spaces."""
+    r"""Replace all chars except '\f\t' in a line with spaces."""
     result = ''
     for c in source:
         if c in '\f\t':
@@ -297,6 +339,8 @@
     be padded with spaces to match its original position.
     """
     try:
+        if node.end_lineno is None or node.end_col_offset is None:
+            return None
         lineno = node.lineno - 1
         end_lineno = node.end_lineno - 1
         col_offset = node.col_offset
@@ -389,7 +433,7 @@
             else:
                 import warnings
                 warnings.warn(f"{method} is deprecated; add visit_Constant",
-                              PendingDeprecationWarning, 2)
+                              DeprecationWarning, 2)
                 return visitor(node)
         return self.generic_visit(node)
 
@@ -413,7 +457,7 @@
            def visit_Name(self, node):
                return Subscript(
                    value=Name(id='data', ctx=Load()),
-                   slice=Index(value=Str(s=node.id)),
+                   slice=Constant(value=node.id),
                    ctx=node.ctx
                )
 
@@ -453,20 +497,26 @@
         return node
 
 
-# The following code is for backward compatibility.
-# It will be removed in future.
+# If the ast module is loaded more than once, only add deprecated methods once
+if not hasattr(Constant, 'n'):
+    # The following code is for backward compatibility.
+    # It will be removed in future.
 
-def _getter(self):
-    return self.value
+    def _getter(self):
+        """Deprecated. Use value instead."""
+        return self.value
 
-def _setter(self, value):
-    self.value = value
+    def _setter(self, value):
+        self.value = value
 
-Constant.n = property(_getter, _setter)
-Constant.s = property(_getter, _setter)
+    Constant.n = property(_getter, _setter)
+    Constant.s = property(_getter, _setter)
 
 class _ABC(type):
 
+    def __init__(cls, *args):
+        cls.__doc__ = """Deprecated AST node class. Use ast.Constant instead"""
+
     def __instancecheck__(cls, inst):
         if not isinstance(inst, Constant):
             return False
@@ -527,6 +577,7 @@
 _const_types_not = {
     Num: (bool,),
 }
+
 _const_node_type_names = {
     bool: 'NameConstant',  # should be before int
     type(None): 'NameConstant',
@@ -537,3 +588,1005 @@
     bytes: 'Bytes',
     type(...): 'Ellipsis',
 }
+
+class slice(AST):
+    """Deprecated AST node class."""
+
+class Index(slice):
+    """Deprecated AST node class. Use the index value directly instead."""
+    def __new__(cls, value, **kwargs):
+        return value
+
+class ExtSlice(slice):
+    """Deprecated AST node class. Use ast.Tuple instead."""
+    def __new__(cls, dims=(), **kwargs):
+        return Tuple(list(dims), Load(), **kwargs)
+
+# If the ast module is loaded more than once, only add deprecated methods once
+if not hasattr(Tuple, 'dims'):
+    # The following code is for backward compatibility.
+    # It will be removed in future.
+
+    def _dims_getter(self):
+        """Deprecated. Use elts instead."""
+        return self.elts
+
+    def _dims_setter(self, value):
+        self.elts = value
+
+    Tuple.dims = property(_dims_getter, _dims_setter)
+
+class Suite(mod):
+    """Deprecated AST node class.  Unused in Python 3."""
+
+class AugLoad(expr_context):
+    """Deprecated AST node class.  Unused in Python 3."""
+
+class AugStore(expr_context):
+    """Deprecated AST node class.  Unused in Python 3."""
+
+class Param(expr_context):
+    """Deprecated AST node class.  Unused in Python 3."""
+
+
+# Large float and imaginary literals get turned into infinities in the AST.
+# We unparse those infinities to INFSTR.
+_INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
+
+class _Precedence(IntEnum):
+    """Precedence table that originated from python grammar."""
+
+    TUPLE = auto()
+    YIELD = auto()           # 'yield', 'yield from'
+    TEST = auto()            # 'if'-'else', 'lambda'
+    OR = auto()              # 'or'
+    AND = auto()             # 'and'
+    NOT = auto()             # 'not'
+    CMP = auto()             # '<', '>', '==', '>=', '<=', '!=',
+                             # 'in', 'not in', 'is', 'is not'
+    EXPR = auto()
+    BOR = EXPR               # '|'
+    BXOR = auto()            # '^'
+    BAND = auto()            # '&'
+    SHIFT = auto()           # '<<', '>>'
+    ARITH = auto()           # '+', '-'
+    TERM = auto()            # '*', '@', '/', '%', '//'
+    FACTOR = auto()          # unary '+', '-', '~'
+    POWER = auto()           # '**'
+    AWAIT = auto()           # 'await'
+    ATOM = auto()
+
+    def next(self):
+        try:
+            return self.__class__(self + 1)
+        except ValueError:
+            return self
+
+
+_SINGLE_QUOTES = ("'", '"')
+_MULTI_QUOTES = ('"""', "'''")
+_ALL_QUOTES = (*_SINGLE_QUOTES, *_MULTI_QUOTES)
+
+class _Unparser(NodeVisitor):
+    """Methods in this class recursively traverse an AST and
+    output source code for the abstract syntax; original formatting
+    is disregarded."""
+
+    def __init__(self, *, _avoid_backslashes=False):
+        self._source = []
+        self._buffer = []
+        self._precedences = {}
+        self._type_ignores = {}
+        self._indent = 0
+        self._avoid_backslashes = _avoid_backslashes
+
+    def interleave(self, inter, f, seq):
+        """Call f on each item in seq, calling inter() in between."""
+        seq = iter(seq)
+        try:
+            f(next(seq))
+        except StopIteration:
+            pass
+        else:
+            for x in seq:
+                inter()
+                f(x)
+
+    def items_view(self, traverser, items):
+        """Traverse and separate the given *items* with a comma and append it to
+        the buffer. If *items* is a single item sequence, a trailing comma
+        will be added."""
+        if len(items) == 1:
+            traverser(items[0])
+            self.write(",")
+        else:
+            self.interleave(lambda: self.write(", "), traverser, items)
+
+    def maybe_newline(self):
+        """Adds a newline if it isn't the start of generated source"""
+        if self._source:
+            self.write("\n")
+
+    def fill(self, text=""):
+        """Indent a piece of text and append it, according to the current
+        indentation level"""
+        self.maybe_newline()
+        self.write("    " * self._indent + text)
+
+    def write(self, text):
+        """Append a piece of text"""
+        self._source.append(text)
+
+    def buffer_writer(self, text):
+        self._buffer.append(text)
+
+    @property
+    def buffer(self):
+        value = "".join(self._buffer)
+        self._buffer.clear()
+        return value
+
+    @contextmanager
+    def block(self, *, extra = None):
+        """A context manager for preparing the source for blocks. It adds
+        the character':', increases the indentation on enter and decreases
+        the indentation on exit. If *extra* is given, it will be directly
+        appended after the colon character.
+        """
+        self.write(":")
+        if extra:
+            self.write(extra)
+        self._indent += 1
+        yield
+        self._indent -= 1
+
+    @contextmanager
+    def delimit(self, start, end):
+        """A context manager for preparing the source for expressions. It adds
+        *start* to the buffer and enters, after exit it adds *end*."""
+
+        self.write(start)
+        yield
+        self.write(end)
+
+    def delimit_if(self, start, end, condition):
+        if condition:
+            return self.delimit(start, end)
+        else:
+            return nullcontext()
+
+    def require_parens(self, precedence, node):
+        """Shortcut to adding precedence related parens"""
+        return self.delimit_if("(", ")", self.get_precedence(node) > precedence)
+
+    def get_precedence(self, node):
+        return self._precedences.get(node, _Precedence.TEST)
+
+    def set_precedence(self, precedence, *nodes):
+        for node in nodes:
+            self._precedences[node] = precedence
+
+    def get_raw_docstring(self, node):
+        """If a docstring node is found in the body of the *node* parameter,
+        return that docstring node, None otherwise.
+
+        Logic mirrored from ``_PyAST_GetDocString``."""
+        if not isinstance(
+            node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)
+        ) or len(node.body) < 1:
+            return None
+        node = node.body[0]
+        if not isinstance(node, Expr):
+            return None
+        node = node.value
+        if isinstance(node, Constant) and isinstance(node.value, str):
+            return node
+
+    def get_type_comment(self, node):
+        comment = self._type_ignores.get(node.lineno) or node.type_comment
+        if comment is not None:
+            return f" # type: {comment}"
+
+    def traverse(self, node):
+        if isinstance(node, list):
+            for item in node:
+                self.traverse(item)
+        else:
+            super().visit(node)
+
+    def visit(self, node):
+        """Outputs a source code string that, if converted back to an ast
+        (using ast.parse) will generate an AST equivalent to *node*"""
+        self._source = []
+        self.traverse(node)
+        return "".join(self._source)
+
+    def _write_docstring_and_traverse_body(self, node):
+        if (docstring := self.get_raw_docstring(node)):
+            self._write_docstring(docstring)
+            self.traverse(node.body[1:])
+        else:
+            self.traverse(node.body)
+
+    def visit_Module(self, node):
+        self._type_ignores = {
+            ignore.lineno: f"ignore{ignore.tag}"
+            for ignore in node.type_ignores
+        }
+        self._write_docstring_and_traverse_body(node)
+        self._type_ignores.clear()
+
+    def visit_FunctionType(self, node):
+        with self.delimit("(", ")"):
+            self.interleave(
+                lambda: self.write(", "), self.traverse, node.argtypes
+            )
+
+        self.write(" -> ")
+        self.traverse(node.returns)
+
+    def visit_Expr(self, node):
+        self.fill()
+        self.set_precedence(_Precedence.YIELD, node.value)
+        self.traverse(node.value)
+
+    def visit_NamedExpr(self, node):
+        with self.require_parens(_Precedence.TUPLE, node):
+            self.set_precedence(_Precedence.ATOM, node.target, node.value)
+            self.traverse(node.target)
+            self.write(" := ")
+            self.traverse(node.value)
+
+    def visit_Import(self, node):
+        self.fill("import ")
+        self.interleave(lambda: self.write(", "), self.traverse, node.names)
+
+    def visit_ImportFrom(self, node):
+        self.fill("from ")
+        self.write("." * node.level)
+        if node.module:
+            self.write(node.module)
+        self.write(" import ")
+        self.interleave(lambda: self.write(", "), self.traverse, node.names)
+
+    def visit_Assign(self, node):
+        self.fill()
+        for target in node.targets:
+            self.traverse(target)
+            self.write(" = ")
+        self.traverse(node.value)
+        if type_comment := self.get_type_comment(node):
+            self.write(type_comment)
+
+    def visit_AugAssign(self, node):
+        self.fill()
+        self.traverse(node.target)
+        self.write(" " + self.binop[node.op.__class__.__name__] + "= ")
+        self.traverse(node.value)
+
+    def visit_AnnAssign(self, node):
+        self.fill()
+        with self.delimit_if("(", ")", not node.simple and isinstance(node.target, Name)):
+            self.traverse(node.target)
+        self.write(": ")
+        self.traverse(node.annotation)
+        if node.value:
+            self.write(" = ")
+            self.traverse(node.value)
+
+    def visit_Return(self, node):
+        self.fill("return")
+        if node.value:
+            self.write(" ")
+            self.traverse(node.value)
+
+    def visit_Pass(self, node):
+        self.fill("pass")
+
+    def visit_Break(self, node):
+        self.fill("break")
+
+    def visit_Continue(self, node):
+        self.fill("continue")
+
+    def visit_Delete(self, node):
+        self.fill("del ")
+        self.interleave(lambda: self.write(", "), self.traverse, node.targets)
+
+    def visit_Assert(self, node):
+        self.fill("assert ")
+        self.traverse(node.test)
+        if node.msg:
+            self.write(", ")
+            self.traverse(node.msg)
+
+    def visit_Global(self, node):
+        self.fill("global ")
+        self.interleave(lambda: self.write(", "), self.write, node.names)
+
+    def visit_Nonlocal(self, node):
+        self.fill("nonlocal ")
+        self.interleave(lambda: self.write(", "), self.write, node.names)
+
+    def visit_Await(self, node):
+        with self.require_parens(_Precedence.AWAIT, node):
+            self.write("await")
+            if node.value:
+                self.write(" ")
+                self.set_precedence(_Precedence.ATOM, node.value)
+                self.traverse(node.value)
+
+    def visit_Yield(self, node):
+        with self.require_parens(_Precedence.YIELD, node):
+            self.write("yield")
+            if node.value:
+                self.write(" ")
+                self.set_precedence(_Precedence.ATOM, node.value)
+                self.traverse(node.value)
+
+    def visit_YieldFrom(self, node):
+        with self.require_parens(_Precedence.YIELD, node):
+            self.write("yield from ")
+            if not node.value:
+                raise ValueError("Node can't be used without a value attribute.")
+            self.set_precedence(_Precedence.ATOM, node.value)
+            self.traverse(node.value)
+
+    def visit_Raise(self, node):
+        self.fill("raise")
+        if not node.exc:
+            if node.cause:
+                raise ValueError(f"Node can't use cause without an exception.")
+            return
+        self.write(" ")
+        self.traverse(node.exc)
+        if node.cause:
+            self.write(" from ")
+            self.traverse(node.cause)
+
+    def visit_Try(self, node):
+        self.fill("try")
+        with self.block():
+            self.traverse(node.body)
+        for ex in node.handlers:
+            self.traverse(ex)
+        if node.orelse:
+            self.fill("else")
+            with self.block():
+                self.traverse(node.orelse)
+        if node.finalbody:
+            self.fill("finally")
+            with self.block():
+                self.traverse(node.finalbody)
+
+    def visit_ExceptHandler(self, node):
+        self.fill("except")
+        if node.type:
+            self.write(" ")
+            self.traverse(node.type)
+        if node.name:
+            self.write(" as ")
+            self.write(node.name)
+        with self.block():
+            self.traverse(node.body)
+
+    def visit_ClassDef(self, node):
+        self.maybe_newline()
+        for deco in node.decorator_list:
+            self.fill("@")
+            self.traverse(deco)
+        self.fill("class " + node.name)
+        with self.delimit_if("(", ")", condition = node.bases or node.keywords):
+            comma = False
+            for e in node.bases:
+                if comma:
+                    self.write(", ")
+                else:
+                    comma = True
+                self.traverse(e)
+            for e in node.keywords:
+                if comma:
+                    self.write(", ")
+                else:
+                    comma = True
+                self.traverse(e)
+
+        with self.block():
+            self._write_docstring_and_traverse_body(node)
+
+    def visit_FunctionDef(self, node):
+        self._function_helper(node, "def")
+
+    def visit_AsyncFunctionDef(self, node):
+        self._function_helper(node, "async def")
+
+    def _function_helper(self, node, fill_suffix):
+        self.maybe_newline()
+        for deco in node.decorator_list:
+            self.fill("@")
+            self.traverse(deco)
+        def_str = fill_suffix + " " + node.name
+        self.fill(def_str)
+        with self.delimit("(", ")"):
+            self.traverse(node.args)
+        if node.returns:
+            self.write(" -> ")
+            self.traverse(node.returns)
+        with self.block(extra=self.get_type_comment(node)):
+            self._write_docstring_and_traverse_body(node)
+
+    def visit_For(self, node):
+        self._for_helper("for ", node)
+
+    def visit_AsyncFor(self, node):
+        self._for_helper("async for ", node)
+
+    def _for_helper(self, fill, node):
+        self.fill(fill)
+        self.traverse(node.target)
+        self.write(" in ")
+        self.traverse(node.iter)
+        with self.block(extra=self.get_type_comment(node)):
+            self.traverse(node.body)
+        if node.orelse:
+            self.fill("else")
+            with self.block():
+                self.traverse(node.orelse)
+
+    def visit_If(self, node):
+        self.fill("if ")
+        self.traverse(node.test)
+        with self.block():
+            self.traverse(node.body)
+        # collapse nested ifs into equivalent elifs.
+        while node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], If):
+            node = node.orelse[0]
+            self.fill("elif ")
+            self.traverse(node.test)
+            with self.block():
+                self.traverse(node.body)
+        # final else
+        if node.orelse:
+            self.fill("else")
+            with self.block():
+                self.traverse(node.orelse)
+
+    def visit_While(self, node):
+        self.fill("while ")
+        self.traverse(node.test)
+        with self.block():
+            self.traverse(node.body)
+        if node.orelse:
+            self.fill("else")
+            with self.block():
+                self.traverse(node.orelse)
+
+    def visit_With(self, node):
+        self.fill("with ")
+        self.interleave(lambda: self.write(", "), self.traverse, node.items)
+        with self.block(extra=self.get_type_comment(node)):
+            self.traverse(node.body)
+
+    def visit_AsyncWith(self, node):
+        self.fill("async with ")
+        self.interleave(lambda: self.write(", "), self.traverse, node.items)
+        with self.block(extra=self.get_type_comment(node)):
+            self.traverse(node.body)
+
+    def _str_literal_helper(
+        self, string, *, quote_types=_ALL_QUOTES, escape_special_whitespace=False
+    ):
+        """Helper for writing string literals, minimizing escapes.
+        Returns the tuple (string literal to write, possible quote types).
+        """
+        def escape_char(c):
+            # \n and \t are non-printable, but we only escape them if
+            # escape_special_whitespace is True
+            if not escape_special_whitespace and c in "\n\t":
+                return c
+            # Always escape backslashes and other non-printable characters
+            if c == "\\" or not c.isprintable():
+                return c.encode("unicode_escape").decode("ascii")
+            return c
+
+        escaped_string = "".join(map(escape_char, string))
+        possible_quotes = quote_types
+        if "\n" in escaped_string:
+            possible_quotes = [q for q in possible_quotes if q in _MULTI_QUOTES]
+        possible_quotes = [q for q in possible_quotes if q not in escaped_string]
+        if not possible_quotes:
+            # If there aren't any possible_quotes, fallback to using repr
+            # on the original string. Try to use a quote from quote_types,
+            # e.g., so that we use triple quotes for docstrings.
+            string = repr(string)
+            quote = next((q for q in quote_types if string[0] in q), string[0])
+            return string[1:-1], [quote]
+        if escaped_string:
+            # Sort so that we prefer '''"''' over """\""""
+            possible_quotes.sort(key=lambda q: q[0] == escaped_string[-1])
+            # If we're using triple quotes and we'd need to escape a final
+            # quote, escape it
+            if possible_quotes[0][0] == escaped_string[-1]:
+                assert len(possible_quotes[0]) == 3
+                escaped_string = escaped_string[:-1] + "\\" + escaped_string[-1]
+        return escaped_string, possible_quotes
+
+    def _write_str_avoiding_backslashes(self, string, *, quote_types=_ALL_QUOTES):
+        """Write string literal value with a best effort attempt to avoid backslashes."""
+        string, quote_types = self._str_literal_helper(string, quote_types=quote_types)
+        quote_type = quote_types[0]
+        self.write(f"{quote_type}{string}{quote_type}")
+
+    def visit_JoinedStr(self, node):
+        self.write("f")
+        if self._avoid_backslashes:
+            self._fstring_JoinedStr(node, self.buffer_writer)
+            self._write_str_avoiding_backslashes(self.buffer)
+            return
+
+        # If we don't need to avoid backslashes globally (i.e., we only need
+        # to avoid them inside FormattedValues), it's cosmetically preferred
+        # to use escaped whitespace. That is, it's preferred to use backslashes
+        # for cases like: f"{x}\n". To accomplish this, we keep track of what
+        # in our buffer corresponds to FormattedValues and what corresponds to
+        # Constant parts of the f-string, and allow escapes accordingly.
+        buffer = []
+        for value in node.values:
+            meth = getattr(self, "_fstring_" + type(value).__name__)
+            meth(value, self.buffer_writer)
+            buffer.append((self.buffer, isinstance(value, Constant)))
+        new_buffer = []
+        quote_types = _ALL_QUOTES
+        for value, is_constant in buffer:
+            # Repeatedly narrow down the list of possible quote_types
+            value, quote_types = self._str_literal_helper(
+                value, quote_types=quote_types,
+                escape_special_whitespace=is_constant
+            )
+            new_buffer.append(value)
+        value = "".join(new_buffer)
+        quote_type = quote_types[0]
+        self.write(f"{quote_type}{value}{quote_type}")
+
+    def visit_FormattedValue(self, node):
+        self.write("f")
+        self._fstring_FormattedValue(node, self.buffer_writer)
+        self._write_str_avoiding_backslashes(self.buffer)
+
+    def _fstring_JoinedStr(self, node, write):
+        for value in node.values:
+            meth = getattr(self, "_fstring_" + type(value).__name__)
+            meth(value, write)
+
+    def _fstring_Constant(self, node, write):
+        if not isinstance(node.value, str):
+            raise ValueError("Constants inside JoinedStr should be a string.")
+        value = node.value.replace("{", "{{").replace("}", "}}")
+        write(value)
+
+    def _fstring_FormattedValue(self, node, write):
+        write("{")
+        unparser = type(self)(_avoid_backslashes=True)
+        unparser.set_precedence(_Precedence.TEST.next(), node.value)
+        expr = unparser.visit(node.value)
+        if expr.startswith("{"):
+            write(" ")  # Separate pair of opening brackets as "{ {"
+        if "\\" in expr:
+            raise ValueError("Unable to avoid backslash in f-string expression part")
+        write(expr)
+        if node.conversion != -1:
+            conversion = chr(node.conversion)
+            if conversion not in "sra":
+                raise ValueError("Unknown f-string conversion.")
+            write(f"!{conversion}")
+        if node.format_spec:
+            write(":")
+            meth = getattr(self, "_fstring_" + type(node.format_spec).__name__)
+            meth(node.format_spec, write)
+        write("}")
+
+    def visit_Name(self, node):
+        self.write(node.id)
+
+    def _write_docstring(self, node):
+        self.fill()
+        if node.kind == "u":
+            self.write("u")
+        self._write_str_avoiding_backslashes(node.value, quote_types=_MULTI_QUOTES)
+
+    def _write_constant(self, value):
+        if isinstance(value, (float, complex)):
+            # Substitute overflowing decimal literal for AST infinities.
+            self.write(repr(value).replace("inf", _INFSTR))
+        elif self._avoid_backslashes and isinstance(value, str):
+            self._write_str_avoiding_backslashes(value)
+        else:
+            self.write(repr(value))
+
+    def visit_Constant(self, node):
+        value = node.value
+        if isinstance(value, tuple):
+            with self.delimit("(", ")"):
+                self.items_view(self._write_constant, value)
+        elif value is ...:
+            self.write("...")
+        else:
+            if node.kind == "u":
+                self.write("u")
+            self._write_constant(node.value)
+
+    def visit_List(self, node):
+        with self.delimit("[", "]"):
+            self.interleave(lambda: self.write(", "), self.traverse, node.elts)
+
+    def visit_ListComp(self, node):
+        with self.delimit("[", "]"):
+            self.traverse(node.elt)
+            for gen in node.generators:
+                self.traverse(gen)
+
+    def visit_GeneratorExp(self, node):
+        with self.delimit("(", ")"):
+            self.traverse(node.elt)
+            for gen in node.generators:
+                self.traverse(gen)
+
+    def visit_SetComp(self, node):
+        with self.delimit("{", "}"):
+            self.traverse(node.elt)
+            for gen in node.generators:
+                self.traverse(gen)
+
+    def visit_DictComp(self, node):
+        with self.delimit("{", "}"):
+            self.traverse(node.key)
+            self.write(": ")
+            self.traverse(node.value)
+            for gen in node.generators:
+                self.traverse(gen)
+
+    def visit_comprehension(self, node):
+        if node.is_async:
+            self.write(" async for ")
+        else:
+            self.write(" for ")
+        self.set_precedence(_Precedence.TUPLE, node.target)
+        self.traverse(node.target)
+        self.write(" in ")
+        self.set_precedence(_Precedence.TEST.next(), node.iter, *node.ifs)
+        self.traverse(node.iter)
+        for if_clause in node.ifs:
+            self.write(" if ")
+            self.traverse(if_clause)
+
+    def visit_IfExp(self, node):
+        with self.require_parens(_Precedence.TEST, node):
+            self.set_precedence(_Precedence.TEST.next(), node.body, node.test)
+            self.traverse(node.body)
+            self.write(" if ")
+            self.traverse(node.test)
+            self.write(" else ")
+            self.set_precedence(_Precedence.TEST, node.orelse)
+            self.traverse(node.orelse)
+
+    def visit_Set(self, node):
+        if not node.elts:
+            raise ValueError("Set node should have at least one item")
+        with self.delimit("{", "}"):
+            self.interleave(lambda: self.write(", "), self.traverse, node.elts)
+
+    def visit_Dict(self, node):
+        def write_key_value_pair(k, v):
+            self.traverse(k)
+            self.write(": ")
+            self.traverse(v)
+
+        def write_item(item):
+            k, v = item
+            if k is None:
+                # for dictionary unpacking operator in dicts {**{'y': 2}}
+                # see PEP 448 for details
+                self.write("**")
+                self.set_precedence(_Precedence.EXPR, v)
+                self.traverse(v)
+            else:
+                write_key_value_pair(k, v)
+
+        with self.delimit("{", "}"):
+            self.interleave(
+                lambda: self.write(", "), write_item, zip(node.keys, node.values)
+            )
+
+    def visit_Tuple(self, node):
+        with self.delimit("(", ")"):
+            self.items_view(self.traverse, node.elts)
+
+    unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
+    unop_precedence = {
+        "not": _Precedence.NOT,
+        "~": _Precedence.FACTOR,
+        "+": _Precedence.FACTOR,
+        "-": _Precedence.FACTOR,
+    }
+
+    def visit_UnaryOp(self, node):
+        operator = self.unop[node.op.__class__.__name__]
+        operator_precedence = self.unop_precedence[operator]
+        with self.require_parens(operator_precedence, node):
+            self.write(operator)
+            # factor prefixes (+, -, ~) shouldn't be seperated
+            # from the value they belong, (e.g: +1 instead of + 1)
+            if operator_precedence is not _Precedence.FACTOR:
+                self.write(" ")
+            self.set_precedence(operator_precedence, node.operand)
+            self.traverse(node.operand)
+
+    binop = {
+        "Add": "+",
+        "Sub": "-",
+        "Mult": "*",
+        "MatMult": "@",
+        "Div": "/",
+        "Mod": "%",
+        "LShift": "<<",
+        "RShift": ">>",
+        "BitOr": "|",
+        "BitXor": "^",
+        "BitAnd": "&",
+        "FloorDiv": "//",
+        "Pow": "**",
+    }
+
+    binop_precedence = {
+        "+": _Precedence.ARITH,
+        "-": _Precedence.ARITH,
+        "*": _Precedence.TERM,
+        "@": _Precedence.TERM,
+        "/": _Precedence.TERM,
+        "%": _Precedence.TERM,
+        "<<": _Precedence.SHIFT,
+        ">>": _Precedence.SHIFT,
+        "|": _Precedence.BOR,
+        "^": _Precedence.BXOR,
+        "&": _Precedence.BAND,
+        "//": _Precedence.TERM,
+        "**": _Precedence.POWER,
+    }
+
+    binop_rassoc = frozenset(("**",))
+    def visit_BinOp(self, node):
+        operator = self.binop[node.op.__class__.__name__]
+        operator_precedence = self.binop_precedence[operator]
+        with self.require_parens(operator_precedence, node):
+            if operator in self.binop_rassoc:
+                left_precedence = operator_precedence.next()
+                right_precedence = operator_precedence
+            else:
+                left_precedence = operator_precedence
+                right_precedence = operator_precedence.next()
+
+            self.set_precedence(left_precedence, node.left)
+            self.traverse(node.left)
+            self.write(f" {operator} ")
+            self.set_precedence(right_precedence, node.right)
+            self.traverse(node.right)
+
+    cmpops = {
+        "Eq": "==",
+        "NotEq": "!=",
+        "Lt": "<",
+        "LtE": "<=",
+        "Gt": ">",
+        "GtE": ">=",
+        "Is": "is",
+        "IsNot": "is not",
+        "In": "in",
+        "NotIn": "not in",
+    }
+
+    def visit_Compare(self, node):
+        with self.require_parens(_Precedence.CMP, node):
+            self.set_precedence(_Precedence.CMP.next(), node.left, *node.comparators)
+            self.traverse(node.left)
+            for o, e in zip(node.ops, node.comparators):
+                self.write(" " + self.cmpops[o.__class__.__name__] + " ")
+                self.traverse(e)
+
+    boolops = {"And": "and", "Or": "or"}
+    boolop_precedence = {"and": _Precedence.AND, "or": _Precedence.OR}
+
+    def visit_BoolOp(self, node):
+        operator = self.boolops[node.op.__class__.__name__]
+        operator_precedence = self.boolop_precedence[operator]
+
+        def increasing_level_traverse(node):
+            nonlocal operator_precedence
+            operator_precedence = operator_precedence.next()
+            self.set_precedence(operator_precedence, node)
+            self.traverse(node)
+
+        with self.require_parens(operator_precedence, node):
+            s = f" {operator} "
+            self.interleave(lambda: self.write(s), increasing_level_traverse, node.values)
+
+    def visit_Attribute(self, node):
+        self.set_precedence(_Precedence.ATOM, node.value)
+        self.traverse(node.value)
+        # Special case: 3.__abs__() is a syntax error, so if node.value
+        # is an integer literal then we need to either parenthesize
+        # it or add an extra space to get 3 .__abs__().
+        if isinstance(node.value, Constant) and isinstance(node.value.value, int):
+            self.write(" ")
+        self.write(".")
+        self.write(node.attr)
+
+    def visit_Call(self, node):
+        self.set_precedence(_Precedence.ATOM, node.func)
+        self.traverse(node.func)
+        with self.delimit("(", ")"):
+            comma = False
+            for e in node.args:
+                if comma:
+                    self.write(", ")
+                else:
+                    comma = True
+                self.traverse(e)
+            for e in node.keywords:
+                if comma:
+                    self.write(", ")
+                else:
+                    comma = True
+                self.traverse(e)
+
+    def visit_Subscript(self, node):
+        def is_simple_tuple(slice_value):
+            # when unparsing a non-empty tuple, the parantheses can be safely
+            # omitted if there aren't any elements that explicitly requires
+            # parantheses (such as starred expressions).
+            return (
+                isinstance(slice_value, Tuple)
+                and slice_value.elts
+                and not any(isinstance(elt, Starred) for elt in slice_value.elts)
+            )
+
+        self.set_precedence(_Precedence.ATOM, node.value)
+        self.traverse(node.value)
+        with self.delimit("[", "]"):
+            if is_simple_tuple(node.slice):
+                self.items_view(self.traverse, node.slice.elts)
+            else:
+                self.traverse(node.slice)
+
+    def visit_Starred(self, node):
+        self.write("*")
+        self.set_precedence(_Precedence.EXPR, node.value)
+        self.traverse(node.value)
+
+    def visit_Ellipsis(self, node):
+        self.write("...")
+
+    def visit_Slice(self, node):
+        if node.lower:
+            self.traverse(node.lower)
+        self.write(":")
+        if node.upper:
+            self.traverse(node.upper)
+        if node.step:
+            self.write(":")
+            self.traverse(node.step)
+
+    def visit_arg(self, node):
+        self.write(node.arg)
+        if node.annotation:
+            self.write(": ")
+            self.traverse(node.annotation)
+
+    def visit_arguments(self, node):
+        first = True
+        # normal arguments
+        all_args = node.posonlyargs + node.args
+        defaults = [None] * (len(all_args) - len(node.defaults)) + node.defaults
+        for index, elements in enumerate(zip(all_args, defaults), 1):
+            a, d = elements
+            if first:
+                first = False
+            else:
+                self.write(", ")
+            self.traverse(a)
+            if d:
+                self.write("=")
+                self.traverse(d)
+            if index == len(node.posonlyargs):
+                self.write(", /")
+
+        # varargs, or bare '*' if no varargs but keyword-only arguments present
+        if node.vararg or node.kwonlyargs:
+            if first:
+                first = False
+            else:
+                self.write(", ")
+            self.write("*")
+            if node.vararg:
+                self.write(node.vararg.arg)
+                if node.vararg.annotation:
+                    self.write(": ")
+                    self.traverse(node.vararg.annotation)
+
+        # keyword-only arguments
+        if node.kwonlyargs:
+            for a, d in zip(node.kwonlyargs, node.kw_defaults):
+                self.write(", ")
+                self.traverse(a)
+                if d:
+                    self.write("=")
+                    self.traverse(d)
+
+        # kwargs
+        if node.kwarg:
+            if first:
+                first = False
+            else:
+                self.write(", ")
+            self.write("**" + node.kwarg.arg)
+            if node.kwarg.annotation:
+                self.write(": ")
+                self.traverse(node.kwarg.annotation)
+
+    def visit_keyword(self, node):
+        if node.arg is None:
+            self.write("**")
+        else:
+            self.write(node.arg)
+            self.write("=")
+        self.traverse(node.value)
+
+    def visit_Lambda(self, node):
+        with self.require_parens(_Precedence.TEST, node):
+            self.write("lambda ")
+            self.traverse(node.args)
+            self.write(": ")
+            self.set_precedence(_Precedence.TEST, node.body)
+            self.traverse(node.body)
+
+    def visit_alias(self, node):
+        self.write(node.name)
+        if node.asname:
+            self.write(" as " + node.asname)
+
+    def visit_withitem(self, node):
+        self.traverse(node.context_expr)
+        if node.optional_vars:
+            self.write(" as ")
+            self.traverse(node.optional_vars)
+
+def unparse(ast_obj):
+    unparser = _Unparser()
+    return unparser.visit(ast_obj)
+
+
+def main():
+    import argparse
+
+    parser = argparse.ArgumentParser(prog='python -m ast')
+    parser.add_argument('infile', type=argparse.FileType(mode='rb'), nargs='?',
+                        default='-',
+                        help='the file to parse; defaults to stdin')
+    parser.add_argument('-m', '--mode', default='exec',
+                        choices=('exec', 'single', 'eval', 'func_type'),
+                        help='specify what kind of code must be parsed')
+    parser.add_argument('--no-type-comments', default=True, action='store_false',
+                        help="don't add information about type comments")
+    parser.add_argument('-a', '--include-attributes', action='store_true',
+                        help='include attributes such as line numbers and '
+                             'column offsets')
+    parser.add_argument('-i', '--indent', type=int, default=3,
+                        help='indentation of nodes (number of spaces)')
+    args = parser.parse_args()
+
+    with args.infile as infile:
+        source = infile.read()
+    tree = parse(source, args.infile.name, args.mode, type_comments=args.no_type_comments)
+    print(dump(tree, include_attributes=args.include_attributes, indent=args.indent))
+
+if __name__ == '__main__':
+    main()
diff --git a/common/py3-stdlib/asynchat.py b/common/py3-stdlib/asynchat.py
index fc1146a..f4ba361 100644
--- a/common/py3-stdlib/asynchat.py
+++ b/common/py3-stdlib/asynchat.py
@@ -117,7 +117,7 @@
             data = self.recv(self.ac_in_buffer_size)
         except BlockingIOError:
             return
-        except OSError as why:
+        except OSError:
             self.handle_error()
             return
 
diff --git a/common/py3-stdlib/asyncio/__init__.py b/common/py3-stdlib/asyncio/__init__.py
index 28c2e2c..eb84bfb 100644
--- a/common/py3-stdlib/asyncio/__init__.py
+++ b/common/py3-stdlib/asyncio/__init__.py
@@ -17,6 +17,7 @@
 from .streams import *
 from .subprocess import *
 from .tasks import *
+from .threads import *
 from .transports import *
 
 # Exposed for _asynciomodule.c to implement now deprecated
@@ -35,6 +36,7 @@
            streams.__all__ +
            subprocess.__all__ +
            tasks.__all__ +
+           threads.__all__ +
            transports.__all__)
 
 if sys.platform == 'win32':  # pragma: no cover
diff --git a/common/py3-stdlib/asyncio/base_events.py b/common/py3-stdlib/asyncio/base_events.py
index 799013d..b2d446a 100644
--- a/common/py3-stdlib/asyncio/base_events.py
+++ b/common/py3-stdlib/asyncio/base_events.py
@@ -410,6 +410,8 @@
         self._asyncgens = weakref.WeakSet()
         # Set to True when `loop.shutdown_asyncgens` is called.
         self._asyncgens_shutdown_called = False
+        # Set to True when `loop.shutdown_default_executor` is called.
+        self._executor_shutdown_called = False
 
     def __repr__(self):
         return (
@@ -507,6 +509,10 @@
         if self._closed:
             raise RuntimeError('Event loop is closed')
 
+    def _check_default_executor(self):
+        if self._executor_shutdown_called:
+            raise RuntimeError('Executor shutdown has been called')
+
     def _asyncgen_finalizer_hook(self, agen):
         self._asyncgens.discard(agen)
         if not self.is_closed():
@@ -547,6 +553,26 @@
                     'asyncgen': agen
                 })
 
+    async def shutdown_default_executor(self):
+        """Schedule the shutdown of the default executor."""
+        self._executor_shutdown_called = True
+        if self._default_executor is None:
+            return
+        future = self.create_future()
+        thread = threading.Thread(target=self._do_shutdown, args=(future,))
+        thread.start()
+        try:
+            await future
+        finally:
+            thread.join()
+
+    def _do_shutdown(self, future):
+        try:
+            self._default_executor.shutdown(wait=True)
+            self.call_soon_threadsafe(future.set_result, None)
+        except Exception as ex:
+            self.call_soon_threadsafe(future.set_exception, ex)
+
     def _check_running(self):
         if self.is_running():
             raise RuntimeError('This event loop is already running')
@@ -640,6 +666,7 @@
         self._closed = True
         self._ready.clear()
         self._scheduled.clear()
+        self._executor_shutdown_called = True
         executor = self._default_executor
         if executor is not None:
             self._default_executor = None
@@ -776,8 +803,12 @@
             self._check_callback(func, 'run_in_executor')
         if executor is None:
             executor = self._default_executor
+            # Only check when the default executor is being used
+            self._check_default_executor()
             if executor is None:
-                executor = concurrent.futures.ThreadPoolExecutor()
+                executor = concurrent.futures.ThreadPoolExecutor(
+                    thread_name_prefix='asyncio'
+                )
                 self._default_executor = executor
         return futures.wrap_future(
             executor.submit(func, *args), loop=self)
diff --git a/common/py3-stdlib/asyncio/base_futures.py b/common/py3-stdlib/asyncio/base_futures.py
index 22f2980..2c01ac9 100644
--- a/common/py3-stdlib/asyncio/base_futures.py
+++ b/common/py3-stdlib/asyncio/base_futures.py
@@ -1,6 +1,7 @@
 __all__ = ()
 
 import reprlib
+from _thread import get_ident
 
 from . import format_helpers
 
@@ -41,6 +42,16 @@
     return f'cb=[{cb}]'
 
 
+# bpo-42183: _repr_running is needed for repr protection
+# when a Future or Task result contains itself directly or indirectly.
+# The logic is borrowed from @reprlib.recursive_repr decorator.
+# Unfortunately, the direct decorator usage is impossible because of
+# AttributeError: '_asyncio.Task' object has no attribute '__module__' error.
+#
+# After fixing this thing we can return to the decorator based approach.
+_repr_running = set()
+
+
 def _future_repr_info(future):
     # (Future) -> str
     """helper function for Future.__repr__"""
@@ -49,9 +60,17 @@
         if future._exception is not None:
             info.append(f'exception={future._exception!r}')
         else:
-            # use reprlib to limit the length of the output, especially
-            # for very long strings
-            result = reprlib.repr(future._result)
+            key = id(future), get_ident()
+            if key in _repr_running:
+                result = '...'
+            else:
+                _repr_running.add(key)
+                try:
+                    # use reprlib to limit the length of the output, especially
+                    # for very long strings
+                    result = reprlib.repr(future._result)
+                finally:
+                    _repr_running.discard(key)
             info.append(f'result={result}')
     if future._callbacks:
         info.append(_format_callbacks(future._callbacks))
diff --git a/common/py3-stdlib/asyncio/events.py b/common/py3-stdlib/asyncio/events.py
index ca08663..0dce87b 100644
--- a/common/py3-stdlib/asyncio/events.py
+++ b/common/py3-stdlib/asyncio/events.py
@@ -19,7 +19,6 @@
 import threading
 
 from . import format_helpers
-from . import exceptions
 
 
 class Handle:
@@ -119,20 +118,24 @@
         return hash(self._when)
 
     def __lt__(self, other):
-        return self._when < other._when
+        if isinstance(other, TimerHandle):
+            return self._when < other._when
+        return NotImplemented
 
     def __le__(self, other):
-        if self._when < other._when:
-            return True
-        return self.__eq__(other)
+        if isinstance(other, TimerHandle):
+            return self._when < other._when or self.__eq__(other)
+        return NotImplemented
 
     def __gt__(self, other):
-        return self._when > other._when
+        if isinstance(other, TimerHandle):
+            return self._when > other._when
+        return NotImplemented
 
     def __ge__(self, other):
-        if self._when > other._when:
-            return True
-        return self.__eq__(other)
+        if isinstance(other, TimerHandle):
+            return self._when > other._when or self.__eq__(other)
+        return NotImplemented
 
     def __eq__(self, other):
         if isinstance(other, TimerHandle):
@@ -142,10 +145,6 @@
                     self._cancelled == other._cancelled)
         return NotImplemented
 
-    def __ne__(self, other):
-        equal = self.__eq__(other)
-        return NotImplemented if equal is NotImplemented else not equal
-
     def cancel(self):
         if not self._cancelled:
             self._loop._timer_handle_cancelled(self)
@@ -249,6 +248,10 @@
         """Shutdown all active asynchronous generators."""
         raise NotImplementedError
 
+    async def shutdown_default_executor(self):
+        """Schedule the shutdown of the default executor."""
+        raise NotImplementedError
+
     # Methods scheduling callbacks.  All these return Handles.
 
     def _timer_handle_cancelled(self, handle):
@@ -280,7 +283,7 @@
     def call_soon_threadsafe(self, callback, *args):
         raise NotImplementedError
 
-    async def run_in_executor(self, executor, func, *args):
+    def run_in_executor(self, executor, func, *args):
         raise NotImplementedError
 
     def set_default_executor(self, executor):
@@ -393,7 +396,7 @@
         The return value is a Server object, which can be used to stop
         the service.
 
-        path is a str, representing a file systsem path to bind the
+        path is a str, representing a file system path to bind the
         server socket to.
 
         sock can optionally be specified in order to use a preexisting
@@ -632,7 +635,7 @@
         """
         if (self._local._loop is None and
                 not self._local._set_called and
-                isinstance(threading.current_thread(), threading._MainThread)):
+                threading.current_thread() is threading.main_thread()):
             self.set_event_loop(self.new_event_loop())
 
         if self._local._loop is None:
diff --git a/common/py3-stdlib/asyncio/exceptions.py b/common/py3-stdlib/asyncio/exceptions.py
index e03602e..f07e448 100644
--- a/common/py3-stdlib/asyncio/exceptions.py
+++ b/common/py3-stdlib/asyncio/exceptions.py
@@ -34,8 +34,9 @@
     - expected: total number of expected bytes (or None if unknown)
     """
     def __init__(self, partial, expected):
+        r_expected = 'undefined' if expected is None else repr(expected)
         super().__init__(f'{len(partial)} bytes read on a total of '
-                         f'{expected!r} expected bytes')
+                         f'{r_expected} expected bytes')
         self.partial = partial
         self.expected = expected
 
diff --git a/common/py3-stdlib/asyncio/futures.py b/common/py3-stdlib/asyncio/futures.py
index 9afda22..bed4da5 100644
--- a/common/py3-stdlib/asyncio/futures.py
+++ b/common/py3-stdlib/asyncio/futures.py
@@ -51,6 +51,9 @@
     _exception = None
     _loop = None
     _source_traceback = None
+    _cancel_message = None
+    # A saved CancelledError for later chaining as an exception context.
+    _cancelled_exc = None
 
     # This field is used for a dual purpose:
     # - Its presence is a marker to declare that a class implements
@@ -103,6 +106,9 @@
             context['source_traceback'] = self._source_traceback
         self._loop.call_exception_handler(context)
 
+    def __class_getitem__(cls, type):
+        return cls
+
     @property
     def _log_traceback(self):
         return self.__log_traceback
@@ -120,7 +126,22 @@
             raise RuntimeError("Future object is not initialized.")
         return loop
 
-    def cancel(self):
+    def _make_cancelled_error(self):
+        """Create the CancelledError to raise if the Future is cancelled.
+
+        This should only be called once when handling a cancellation since
+        it erases the saved context exception value.
+        """
+        if self._cancel_message is None:
+            exc = exceptions.CancelledError()
+        else:
+            exc = exceptions.CancelledError(self._cancel_message)
+        exc.__context__ = self._cancelled_exc
+        # Remove the reference since we don't need this anymore.
+        self._cancelled_exc = None
+        return exc
+
+    def cancel(self, msg=None):
         """Cancel the future and schedule callbacks.
 
         If the future is already done or cancelled, return False.  Otherwise,
@@ -131,6 +152,7 @@
         if self._state != _PENDING:
             return False
         self._state = _CANCELLED
+        self._cancel_message = msg
         self.__schedule_callbacks()
         return True
 
@@ -170,7 +192,8 @@
         the future is done and has an exception set, this exception is raised.
         """
         if self._state == _CANCELLED:
-            raise exceptions.CancelledError
+            exc = self._make_cancelled_error()
+            raise exc
         if self._state != _FINISHED:
             raise exceptions.InvalidStateError('Result is not ready.')
         self.__log_traceback = False
@@ -187,7 +210,8 @@
         InvalidStateError.
         """
         if self._state == _CANCELLED:
-            raise exceptions.CancelledError
+            exc = self._make_cancelled_error()
+            raise exc
         if self._state != _FINISHED:
             raise exceptions.InvalidStateError('Exception is not set.')
         self.__log_traceback = False
diff --git a/common/py3-stdlib/asyncio/locks.py b/common/py3-stdlib/asyncio/locks.py
index d94daeb..f1ce732 100644
--- a/common/py3-stdlib/asyncio/locks.py
+++ b/common/py3-stdlib/asyncio/locks.py
@@ -3,96 +3,13 @@
 __all__ = ('Lock', 'Event', 'Condition', 'Semaphore', 'BoundedSemaphore')
 
 import collections
-import types
 import warnings
 
 from . import events
-from . import futures
 from . import exceptions
-from .import coroutines
-
-
-class _ContextManager:
-    """Context manager.
-
-    This enables the following idiom for acquiring and releasing a
-    lock around a block:
-
-        with (yield from lock):
-            <block>
-
-    while failing loudly when accidentally using:
-
-        with lock:
-            <block>
-
-    Deprecated, use 'async with' statement:
-        async with lock:
-            <block>
-    """
-
-    def __init__(self, lock):
-        self._lock = lock
-
-    def __enter__(self):
-        # We have no use for the "as ..."  clause in the with
-        # statement for locks.
-        return None
-
-    def __exit__(self, *args):
-        try:
-            self._lock.release()
-        finally:
-            self._lock = None  # Crudely prevent reuse.
 
 
 class _ContextManagerMixin:
-    def __enter__(self):
-        raise RuntimeError(
-            '"yield from" should be used as context manager expression')
-
-    def __exit__(self, *args):
-        # This must exist because __enter__ exists, even though that
-        # always raises; that's how the with-statement works.
-        pass
-
-    @types.coroutine
-    def __iter__(self):
-        # This is not a coroutine.  It is meant to enable the idiom:
-        #
-        #     with (yield from lock):
-        #         <block>
-        #
-        # as an alternative to:
-        #
-        #     yield from lock.acquire()
-        #     try:
-        #         <block>
-        #     finally:
-        #         lock.release()
-        # Deprecated, use 'async with' statement:
-        #     async with lock:
-        #         <block>
-        warnings.warn("'with (yield from lock)' is deprecated "
-                      "use 'async with lock' instead",
-                      DeprecationWarning, stacklevel=2)
-        yield from self.acquire()
-        return _ContextManager(self)
-
-    # The flag is needed for legacy asyncio.iscoroutine()
-    __iter__._is_coroutine = coroutines._is_coroutine
-
-    async def __acquire_ctx(self):
-        await self.acquire()
-        return _ContextManager(self)
-
-    def __await__(self):
-        warnings.warn("'with await lock' is deprecated "
-                      "use 'async with lock' instead",
-                      DeprecationWarning, stacklevel=2)
-        # To make "with await lock" work.
-        return self.__acquire_ctx().__await__()
-
     async def __aenter__(self):
         await self.acquire()
         # We have no use for the "as ..."  clause in the with
diff --git a/common/py3-stdlib/asyncio/proactor_events.py b/common/py3-stdlib/asyncio/proactor_events.py
index 830d8ed..b4cd414 100644
--- a/common/py3-stdlib/asyncio/proactor_events.py
+++ b/common/py3-stdlib/asyncio/proactor_events.py
@@ -711,7 +711,7 @@
             raise exceptions.SendfileNotAvailableError("not a regular file")
         try:
             fsize = os.fstat(fileno).st_size
-        except OSError as err:
+        except OSError:
             raise exceptions.SendfileNotAvailableError("not a regular file")
         blocksize = count if count else fsize
         if not blocksize:
@@ -766,6 +766,14 @@
         try:
             if f is not None:
                 f.result()  # may raise
+            if self._self_reading_future is not f:
+                # When we scheduled this Future, we assigned it to
+                # _self_reading_future. If it's not there now, something has
+                # tried to cancel the loop while this callback was still in the
+                # queue (see windows_events.ProactorEventLoop.run_forever). In
+                # that case stop here instead of continuing to schedule a new
+                # iteration.
+                return
             f = self._proactor.recv(self._ssock, 4096)
         except exceptions.CancelledError:
             # _close_self_pipe() has been called, stop waiting for data
@@ -783,8 +791,17 @@
             f.add_done_callback(self._loop_self_reading)
 
     def _write_to_self(self):
+        # This may be called from a different thread, possibly after
+        # _close_self_pipe() has been called or even while it is
+        # running.  Guard for self._csock being None or closed.  When
+        # a socket is closed, send() raises OSError (with errno set to
+        # EBADF, but let's not rely on the exact error code).
+        csock = self._csock
+        if csock is None:
+            return
+
         try:
-            self._csock.send(b'\0')
+            csock.send(b'\0')
         except OSError:
             if self._debug:
                 logger.debug("Fail to write a null byte into the "
diff --git a/common/py3-stdlib/asyncio/queues.py b/common/py3-stdlib/asyncio/queues.py
index 390ae9a..cd3f7c6 100644
--- a/common/py3-stdlib/asyncio/queues.py
+++ b/common/py3-stdlib/asyncio/queues.py
@@ -76,6 +76,9 @@
     def __str__(self):
         return f'<{type(self).__name__} {self._format()}>'
 
+    def __class_getitem__(cls, type):
+        return cls
+
     def _format(self):
         result = f'maxsize={self._maxsize!r}'
         if getattr(self, '_queue', None):
diff --git a/common/py3-stdlib/asyncio/runners.py b/common/py3-stdlib/asyncio/runners.py
index 2e37e18..268635d 100644
--- a/common/py3-stdlib/asyncio/runners.py
+++ b/common/py3-stdlib/asyncio/runners.py
@@ -5,7 +5,7 @@
 from . import tasks
 
 
-def run(main, *, debug=False):
+def run(main, *, debug=None):
     """Execute the coroutine and return the result.
 
     This function runs the passed coroutine, taking care of
@@ -39,12 +39,14 @@
     loop = events.new_event_loop()
     try:
         events.set_event_loop(loop)
-        loop.set_debug(debug)
+        if debug is not None:
+            loop.set_debug(debug)
         return loop.run_until_complete(main)
     finally:
         try:
             _cancel_all_tasks(loop)
             loop.run_until_complete(loop.shutdown_asyncgens())
+            loop.run_until_complete(loop.shutdown_default_executor())
         finally:
             events.set_event_loop(None)
             loop.close()
diff --git a/common/py3-stdlib/asyncio/selector_events.py b/common/py3-stdlib/asyncio/selector_events.py
index a05cbb6..59cb6b1 100644
--- a/common/py3-stdlib/asyncio/selector_events.py
+++ b/common/py3-stdlib/asyncio/selector_events.py
@@ -133,14 +133,16 @@
         # a socket is closed, send() raises OSError (with errno set to
         # EBADF, but let's not rely on the exact error code).
         csock = self._csock
-        if csock is not None:
-            try:
-                csock.send(b'\0')
-            except OSError:
-                if self._debug:
-                    logger.debug("Fail to write a null byte into the "
-                                 "self-pipe socket",
-                                 exc_info=True)
+        if csock is None:
+            return
+
+        try:
+            csock.send(b'\0')
+        except OSError:
+            if self._debug:
+                logger.debug("Fail to write a null byte into the "
+                             "self-pipe socket",
+                             exc_info=True)
 
     def _start_serving(self, protocol_factory, sock,
                        sslcontext=None, server=None, backlog=100,
@@ -266,6 +268,7 @@
                                   (handle, writer))
             if reader is not None:
                 reader.cancel()
+        return handle
 
     def _remove_reader(self, fd):
         if self.is_closed():
@@ -302,6 +305,7 @@
                                   (reader, handle))
             if writer is not None:
                 writer.cancel()
+        return handle
 
     def _remove_writer(self, fd):
         """Remove a writer callback."""
@@ -329,7 +333,7 @@
     def add_reader(self, fd, callback, *args):
         """Add a reader callback."""
         self._ensure_fd_no_transport(fd)
-        return self._add_reader(fd, callback, *args)
+        self._add_reader(fd, callback, *args)
 
     def remove_reader(self, fd):
         """Remove a reader callback."""
@@ -339,7 +343,7 @@
     def add_writer(self, fd, callback, *args):
         """Add a writer callback.."""
         self._ensure_fd_no_transport(fd)
-        return self._add_writer(fd, callback, *args)
+        self._add_writer(fd, callback, *args)
 
     def remove_writer(self, fd):
         """Remove a writer callback."""
@@ -362,13 +366,15 @@
             pass
         fut = self.create_future()
         fd = sock.fileno()
-        self.add_reader(fd, self._sock_recv, fut, sock, n)
+        self._ensure_fd_no_transport(fd)
+        handle = self._add_reader(fd, self._sock_recv, fut, sock, n)
         fut.add_done_callback(
-            functools.partial(self._sock_read_done, fd))
+            functools.partial(self._sock_read_done, fd, handle=handle))
         return await fut
 
-    def _sock_read_done(self, fd, fut):
-        self.remove_reader(fd)
+    def _sock_read_done(self, fd, fut, handle=None):
+        if handle is None or not handle.cancelled():
+            self.remove_reader(fd)
 
     def _sock_recv(self, fut, sock, n):
         # _sock_recv() can add itself as an I/O callback if the operation can't
@@ -401,9 +407,10 @@
             pass
         fut = self.create_future()
         fd = sock.fileno()
-        self.add_reader(fd, self._sock_recv_into, fut, sock, buf)
+        self._ensure_fd_no_transport(fd)
+        handle = self._add_reader(fd, self._sock_recv_into, fut, sock, buf)
         fut.add_done_callback(
-            functools.partial(self._sock_read_done, fd))
+            functools.partial(self._sock_read_done, fd, handle=handle))
         return await fut
 
     def _sock_recv_into(self, fut, sock, buf):
@@ -446,11 +453,12 @@
 
         fut = self.create_future()
         fd = sock.fileno()
-        fut.add_done_callback(
-            functools.partial(self._sock_write_done, fd))
+        self._ensure_fd_no_transport(fd)
         # use a trick with a list in closure to store a mutable state
-        self.add_writer(fd, self._sock_sendall, fut, sock,
-                        memoryview(data), [n])
+        handle = self._add_writer(fd, self._sock_sendall, fut, sock,
+                                  memoryview(data), [n])
+        fut.add_done_callback(
+            functools.partial(self._sock_write_done, fd, handle=handle))
         return await fut
 
     def _sock_sendall(self, fut, sock, view, pos):
@@ -502,9 +510,11 @@
             # connection runs in background. We have to wait until the socket
             # becomes writable to be notified when the connection succeed or
             # fails.
+            self._ensure_fd_no_transport(fd)
+            handle = self._add_writer(
+                fd, self._sock_connect_cb, fut, sock, address)
             fut.add_done_callback(
-                functools.partial(self._sock_write_done, fd))
-            self.add_writer(fd, self._sock_connect_cb, fut, sock, address)
+                functools.partial(self._sock_write_done, fd, handle=handle))
         except (SystemExit, KeyboardInterrupt):
             raise
         except BaseException as exc:
@@ -512,8 +522,9 @@
         else:
             fut.set_result(None)
 
-    def _sock_write_done(self, fd, fut):
-        self.remove_writer(fd)
+    def _sock_write_done(self, fd, fut, handle=None):
+        if handle is None or not handle.cancelled():
+            self.remove_writer(fd)
 
     def _sock_connect_cb(self, fut, sock, address):
         if fut.done():
@@ -546,20 +557,19 @@
         if self._debug and sock.gettimeout() != 0:
             raise ValueError("the socket must be non-blocking")
         fut = self.create_future()
-        self._sock_accept(fut, False, sock)
+        self._sock_accept(fut, sock)
         return await fut
 
-    def _sock_accept(self, fut, registered, sock):
+    def _sock_accept(self, fut, sock):
         fd = sock.fileno()
-        if registered:
-            self.remove_reader(fd)
-        if fut.done():
-            return
         try:
             conn, address = sock.accept()
             conn.setblocking(False)
         except (BlockingIOError, InterruptedError):
-            self.add_reader(fd, self._sock_accept, fut, True, sock)
+            self._ensure_fd_no_transport(fd)
+            handle = self._add_reader(fd, self._sock_accept, fut, sock)
+            fut.add_done_callback(
+                functools.partial(self._sock_read_done, fd, handle=handle))
         except (SystemExit, KeyboardInterrupt):
             raise
         except BaseException as exc:
diff --git a/common/py3-stdlib/asyncio/sslproto.py b/common/py3-stdlib/asyncio/sslproto.py
index 3eca6b4..cad25b2 100644
--- a/common/py3-stdlib/asyncio/sslproto.py
+++ b/common/py3-stdlib/asyncio/sslproto.py
@@ -5,7 +5,6 @@
 except ImportError:  # pragma: no cover
     ssl = None
 
-from . import base_events
 from . import constants
 from . import protocols
 from . import transports
diff --git a/common/py3-stdlib/asyncio/tasks.py b/common/py3-stdlib/asyncio/tasks.py
index 66e81f9..f486b67 100644
--- a/common/py3-stdlib/asyncio/tasks.py
+++ b/common/py3-stdlib/asyncio/tasks.py
@@ -113,34 +113,6 @@
     # status is still pending
     _log_destroy_pending = True
 
-    @classmethod
-    def current_task(cls, loop=None):
-        """Return the currently running task in an event loop or None.
-
-        By default the current task for the current event loop is returned.
-
-        None is returned when called not in the context of a Task.
-        """
-        warnings.warn("Task.current_task() is deprecated since Python 3.7, "
-                      "use asyncio.current_task() instead",
-                      DeprecationWarning,
-                      stacklevel=2)
-        if loop is None:
-            loop = events.get_event_loop()
-        return current_task(loop)
-
-    @classmethod
-    def all_tasks(cls, loop=None):
-        """Return a set of all tasks for an event loop.
-
-        By default all tasks for the current event loop are returned.
-        """
-        warnings.warn("Task.all_tasks() is deprecated since Python 3.7, "
-                      "use asyncio.all_tasks() instead",
-                      DeprecationWarning,
-                      stacklevel=2)
-        return _all_tasks_compat(loop)
-
     def __init__(self, coro, *, loop=None, name=None):
         super().__init__(loop=loop)
         if self._source_traceback:
@@ -175,6 +147,9 @@
             self._loop.call_exception_handler(context)
         super().__del__()
 
+    def __class_getitem__(cls, type):
+        return cls
+
     def _repr_info(self):
         return base_tasks._task_repr_info(self)
 
@@ -227,7 +202,7 @@
         """
         return base_tasks._task_print_stack(self, limit, file)
 
-    def cancel(self):
+    def cancel(self, msg=None):
         """Request that this task cancel itself.
 
         This arranges for a CancelledError to be thrown into the
@@ -251,13 +226,14 @@
         if self.done():
             return False
         if self._fut_waiter is not None:
-            if self._fut_waiter.cancel():
+            if self._fut_waiter.cancel(msg=msg):
                 # Leave self._fut_waiter; it may be a Task that
                 # catches and ignores the cancellation so we may have
                 # to cancel it again later.
                 return True
         # It must be the case that self.__step is already scheduled.
         self._must_cancel = True
+        self._cancel_message = msg
         return True
 
     def __step(self, exc=None):
@@ -266,7 +242,7 @@
                 f'_step(): already done: {self!r}, {exc!r}')
         if self._must_cancel:
             if not isinstance(exc, exceptions.CancelledError):
-                exc = exceptions.CancelledError()
+                exc = self._make_cancelled_error()
             self._must_cancel = False
         coro = self._coro
         self._fut_waiter = None
@@ -284,10 +260,12 @@
             if self._must_cancel:
                 # Task is cancelled right before coro stops.
                 self._must_cancel = False
-                super().cancel()
+                super().cancel(msg=self._cancel_message)
             else:
                 super().set_result(exc.value)
-        except exceptions.CancelledError:
+        except exceptions.CancelledError as exc:
+            # Save the original exception so we can chain it later.
+            self._cancelled_exc = exc
             super().cancel()  # I.e., Future.cancel(self).
         except (KeyboardInterrupt, SystemExit) as exc:
             super().set_exception(exc)
@@ -316,7 +294,8 @@
                             self.__wakeup, context=self._context)
                         self._fut_waiter = result
                         if self._must_cancel:
-                            if self._fut_waiter.cancel():
+                            if self._fut_waiter.cancel(
+                                    msg=self._cancel_message):
                                 self._must_cancel = False
                 else:
                     new_exc = RuntimeError(
@@ -394,7 +373,7 @@
 async def wait(fs, *, loop=None, timeout=None, return_when=ALL_COMPLETED):
     """Wait for the Futures and coroutines given by fs to complete.
 
-    The sequence futures must not be empty.
+    The fs iterable must not be empty.
 
     Coroutines will be wrapped in Tasks.
 
@@ -421,7 +400,15 @@
                       "and scheduled for removal in Python 3.10.",
                       DeprecationWarning, stacklevel=2)
 
-    fs = {ensure_future(f, loop=loop) for f in set(fs)}
+    fs = set(fs)
+
+    if any(coroutines.iscoroutine(f) for f in fs):
+        warnings.warn("The explicit passing of coroutine objects to "
+                      "asyncio.wait() is deprecated since Python 3.8, and "
+                      "scheduled for removal in Python 3.11.",
+                      DeprecationWarning, stacklevel=2)
+
+    fs = {ensure_future(f, loop=loop) for f in fs}
 
     return await _wait(fs, timeout, return_when, loop)
 
@@ -460,8 +447,13 @@
         if fut.done():
             return fut.result()
 
-        fut.cancel()
-        raise exceptions.TimeoutError()
+        await _cancel_and_wait(fut, loop=loop)
+        try:
+            fut.result()
+        except exceptions.CancelledError as exc:
+            raise exceptions.TimeoutError() from exc
+        else:
+            raise exceptions.TimeoutError()
 
     waiter = loop.create_future()
     timeout_handle = loop.call_later(timeout, _release_waiter, waiter)
@@ -475,9 +467,12 @@
         try:
             await waiter
         except exceptions.CancelledError:
-            fut.remove_done_callback(cb)
-            fut.cancel()
-            raise
+            if fut.done():
+                return fut.result()
+            else:
+                fut.remove_done_callback(cb)
+                fut.cancel()
+                raise
 
         if fut.done():
             return fut.result()
@@ -487,7 +482,15 @@
             # after wait_for() returns.
             # See https://bugs.python.org/issue32751
             await _cancel_and_wait(fut, loop=loop)
-            raise exceptions.TimeoutError()
+            # In case task cancellation failed with some
+            # exception, we should re-raise it
+            # See https://bugs.python.org/issue40607
+            try:
+                fut.result()
+            except exceptions.CancelledError as exc:
+                raise exceptions.TimeoutError() from exc
+            else:
+                raise exceptions.TimeoutError()
     finally:
         timeout_handle.cancel()
 
@@ -572,7 +575,7 @@
     Note: The futures 'f' are not necessarily members of fs.
     """
     if futures.isfuture(fs) or coroutines.iscoroutine(fs):
-        raise TypeError(f"expect a list of futures, not {type(fs).__name__}")
+        raise TypeError(f"expect an iterable of futures, not {type(fs).__name__}")
 
     from .queues import Queue  # Import here to avoid circular import problem.
     done = Queue(loop=loop)
@@ -699,12 +702,12 @@
         self._children = children
         self._cancel_requested = False
 
-    def cancel(self):
+    def cancel(self, msg=None):
         if self.done():
             return False
         ret = False
         for child in self._children:
-            if child.cancel():
+            if child.cancel(msg=msg):
                 ret = True
         if ret:
             # If any child tasks were actually cancelled, we should
@@ -770,7 +773,7 @@
                 # Check if 'fut' is cancelled first, as
                 # 'fut.exception()' will *raise* a CancelledError
                 # instead of returning it.
-                exc = exceptions.CancelledError()
+                exc = fut._make_cancelled_error()
                 outer.set_exception(exc)
                 return
             else:
@@ -786,10 +789,15 @@
 
             for fut in children:
                 if fut.cancelled():
-                    # Check if 'fut' is cancelled first, as
-                    # 'fut.exception()' will *raise* a CancelledError
-                    # instead of returning it.
-                    res = exceptions.CancelledError()
+                    # Check if 'fut' is cancelled first, as 'fut.exception()'
+                    # will *raise* a CancelledError instead of returning it.
+                    # Also, since we're adding the exception return value
+                    # to 'results' instead of raising it, don't bother
+                    # setting __context__.  This also lets us preserve
+                    # calling '_make_cancelled_error()' at most once.
+                    res = exceptions.CancelledError(
+                        '' if fut._cancel_message is None else
+                        fut._cancel_message)
                 else:
                     res = fut.exception()
                     if res is None:
@@ -800,7 +808,8 @@
                 # If gather is being cancelled we must propagate the
                 # cancellation regardless of *return_exceptions* argument.
                 # See issue 32684.
-                outer.set_exception(exceptions.CancelledError())
+                exc = fut._make_cancelled_error()
+                outer.set_exception(exc)
             else:
                 outer.set_result(results)
 
diff --git a/common/py3-stdlib/asyncio/threads.py b/common/py3-stdlib/asyncio/threads.py
new file mode 100644
index 0000000..34b7513
--- /dev/null
+++ b/common/py3-stdlib/asyncio/threads.py
@@ -0,0 +1,25 @@
+"""High-level support for working with threads in asyncio"""
+
+import functools
+import contextvars
+
+from . import events
+
+
+__all__ = "to_thread",
+
+
+async def to_thread(func, /, *args, **kwargs):
+    """Asynchronously run function *func* in a separate thread.
+
+    Any *args and **kwargs supplied for this function are directly passed
+    to *func*. Also, the current :class:`contextvars.Context` is propogated,
+    allowing context variables from the main thread to be accessed in the
+    separate thread.
+
+    Return a coroutine that can be awaited to get the eventual result of *func*.
+    """
+    loop = events.get_running_loop()
+    ctx = contextvars.copy_context()
+    func_call = functools.partial(ctx.run, func, *args, **kwargs)
+    return await loop.run_in_executor(None, func_call)
diff --git a/common/py3-stdlib/asyncio/transports.py b/common/py3-stdlib/asyncio/transports.py
index 513b1c0..45e155c 100644
--- a/common/py3-stdlib/asyncio/transports.py
+++ b/common/py3-stdlib/asyncio/transports.py
@@ -29,8 +29,8 @@
 
         Buffered data will be flushed asynchronously.  No more data
         will be received.  After all buffered data is flushed, the
-        protocol's connection_lost() method will (eventually) called
-        with None as its argument.
+        protocol's connection_lost() method will (eventually) be
+        called with None as its argument.
         """
         raise NotImplementedError
 
diff --git a/common/py3-stdlib/asyncio/unix_events.py b/common/py3-stdlib/asyncio/unix_events.py
index 1ff8c42..f34a5b4 100644
--- a/common/py3-stdlib/asyncio/unix_events.py
+++ b/common/py3-stdlib/asyncio/unix_events.py
@@ -29,7 +29,7 @@
 __all__ = (
     'SelectorEventLoop',
     'AbstractChildWatcher', 'SafeChildWatcher',
-    'FastChildWatcher',
+    'FastChildWatcher', 'PidfdChildWatcher',
     'MultiLoopChildWatcher', 'ThreadedChildWatcher',
     'DefaultEventLoopPolicy',
 )
@@ -330,7 +330,7 @@
     async def _sock_sendfile_native(self, sock, file, offset, count):
         try:
             os.sendfile
-        except AttributeError as exc:
+        except AttributeError:
             raise exceptions.SendfileNotAvailableError(
                 "os.sendfile() is not available")
         try:
@@ -339,7 +339,7 @@
             raise exceptions.SendfileNotAvailableError("not a regular file")
         try:
             fsize = os.fstat(fileno).st_size
-        except OSError as err:
+        except OSError:
             raise exceptions.SendfileNotAvailableError("not a regular file")
         blocksize = count if count else fsize
         if not blocksize:
@@ -878,6 +878,84 @@
         raise NotImplementedError()
 
 
+class PidfdChildWatcher(AbstractChildWatcher):
+    """Child watcher implementation using Linux's pid file descriptors.
+
+    This child watcher polls process file descriptors (pidfds) to await child
+    process termination. In some respects, PidfdChildWatcher is a "Goldilocks"
+    child watcher implementation. It doesn't require signals or threads, doesn't
+    interfere with any processes launched outside the event loop, and scales
+    linearly with the number of subprocesses launched by the event loop. The
+    main disadvantage is that pidfds are specific to Linux, and only work on
+    recent (5.3+) kernels.
+    """
+
+    def __init__(self):
+        self._loop = None
+        self._callbacks = {}
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, exc_traceback):
+        pass
+
+    def is_active(self):
+        return self._loop is not None and self._loop.is_running()
+
+    def close(self):
+        self.attach_loop(None)
+
+    def attach_loop(self, loop):
+        if self._loop is not None and loop is None and self._callbacks:
+            warnings.warn(
+                'A loop is being detached '
+                'from a child watcher with pending handlers',
+                RuntimeWarning)
+        for pidfd, _, _ in self._callbacks.values():
+            self._loop._remove_reader(pidfd)
+            os.close(pidfd)
+        self._callbacks.clear()
+        self._loop = loop
+
+    def add_child_handler(self, pid, callback, *args):
+        existing = self._callbacks.get(pid)
+        if existing is not None:
+            self._callbacks[pid] = existing[0], callback, args
+        else:
+            pidfd = os.pidfd_open(pid)
+            self._loop._add_reader(pidfd, self._do_wait, pid)
+            self._callbacks[pid] = pidfd, callback, args
+
+    def _do_wait(self, pid):
+        pidfd, callback, args = self._callbacks.pop(pid)
+        self._loop._remove_reader(pidfd)
+        try:
+            _, status = os.waitpid(pid, 0)
+        except ChildProcessError:
+            # The child process is already reaped
+            # (may happen if waitpid() is called elsewhere).
+            returncode = 255
+            logger.warning(
+                "child process pid %d exit status already read: "
+                " will report returncode 255",
+                pid)
+        else:
+            returncode = _compute_returncode(status)
+
+        os.close(pidfd)
+        callback(pid, returncode, *args)
+
+    def remove_child_handler(self, pid):
+        try:
+            pidfd, _, _ = self._callbacks.pop(pid)
+        except KeyError:
+            return False
+        self._loop._remove_reader(pidfd)
+        os.close(pidfd)
+        return True
+
+
 def _compute_returncode(status):
     if os.WIFSIGNALED(status):
         # The child process died because of a signal.
@@ -1346,8 +1424,7 @@
         with events._lock:
             if self._watcher is None:  # pragma: no branch
                 self._watcher = ThreadedChildWatcher()
-                if isinstance(threading.current_thread(),
-                              threading._MainThread):
+                if threading.current_thread() is threading.main_thread():
                     self._watcher.attach_loop(self._local._loop)
 
     def set_event_loop(self, loop):
@@ -1361,7 +1438,7 @@
         super().set_event_loop(loop)
 
         if (self._watcher is not None and
-                isinstance(threading.current_thread(), threading._MainThread)):
+                threading.current_thread() is threading.main_thread()):
             self._watcher.attach_loop(loop)
 
     def get_child_watcher(self):
diff --git a/common/py3-stdlib/asyncio/windows_events.py b/common/py3-stdlib/asyncio/windows_events.py
index ac51109..5e7cd79 100644
--- a/common/py3-stdlib/asyncio/windows_events.py
+++ b/common/py3-stdlib/asyncio/windows_events.py
@@ -75,9 +75,9 @@
             self._loop.call_exception_handler(context)
         self._ov = None
 
-    def cancel(self):
+    def cancel(self, msg=None):
         self._cancel_overlapped()
-        return super().cancel()
+        return super().cancel(msg=msg)
 
     def set_exception(self, exception):
         super().set_exception(exception)
@@ -149,9 +149,9 @@
 
         self._unregister_wait_cb(None)
 
-    def cancel(self):
+    def cancel(self, msg=None):
         self._unregister_wait()
-        return super().cancel()
+        return super().cancel(msg=msg)
 
     def set_exception(self, exception):
         self._unregister_wait()
@@ -318,8 +318,12 @@
             if self._self_reading_future is not None:
                 ov = self._self_reading_future._ov
                 self._self_reading_future.cancel()
-                # self_reading_future was just cancelled so it will never be signalled
-                # Unregister it otherwise IocpProactor.close will wait for it forever
+                # self_reading_future was just cancelled so if it hasn't been
+                # finished yet, it never will be (it's possible that it has
+                # already finished and its callback is waiting in the queue,
+                # where it could still happen if the event loop is restarted).
+                # Unregister it otherwise IocpProactor.close will wait for it
+                # forever
                 if ov is not None:
                     self._proactor._unregister(ov)
                 self._self_reading_future = None
@@ -469,7 +473,7 @@
             else:
                 ov.ReadFileInto(conn.fileno(), buf)
         except BrokenPipeError:
-            return self._result(b'')
+            return self._result(0)
 
         def finish_recv(trans, key, ov):
             try:
diff --git a/common/py3-stdlib/asyncore.py b/common/py3-stdlib/asyncore.py
index 0e92be3..ce16f11 100644
--- a/common/py3-stdlib/asyncore.py
+++ b/common/py3-stdlib/asyncore.py
@@ -228,7 +228,7 @@
         if sock:
             # Set to nonblocking just to make sure for cases where we
             # get a socket from a blocking source.
-            sock.setblocking(0)
+            sock.setblocking(False)
             self.set_socket(sock, map)
             self.connected = True
             # The constructor no longer requires that the socket
@@ -280,7 +280,7 @@
     def create_socket(self, family=socket.AF_INET, type=socket.SOCK_STREAM):
         self.family_and_type = family, type
         sock = socket.socket(family, type)
-        sock.setblocking(0)
+        sock.setblocking(False)
         self.set_socket(sock)
 
     def set_socket(self, sock, map=None):
diff --git a/common/py3-stdlib/base64.py b/common/py3-stdlib/base64.py
index 2e70223..a28109f 100755
--- a/common/py3-stdlib/base64.py
+++ b/common/py3-stdlib/base64.py
@@ -531,28 +531,12 @@
         pieces.append(binascii.b2a_base64(chunk))
     return b"".join(pieces)
 
-def encodestring(s):
-    """Legacy alias of encodebytes()."""
-    import warnings
-    warnings.warn("encodestring() is a deprecated alias since 3.1, "
-                  "use encodebytes()",
-                  DeprecationWarning, 2)
-    return encodebytes(s)
-
 
 def decodebytes(s):
     """Decode a bytestring of base-64 data into a bytes object."""
     _input_type_check(s)
     return binascii.a2b_base64(s)
 
-def decodestring(s):
-    """Legacy alias of decodebytes()."""
-    import warnings
-    warnings.warn("decodestring() is a deprecated alias since Python 3.1, "
-                  "use decodebytes()",
-                  DeprecationWarning, 2)
-    return decodebytes(s)
-
 
 # Usable as a script...
 def main():
diff --git a/common/py3-stdlib/bdb.py b/common/py3-stdlib/bdb.py
index 18491da..b18a061 100644
--- a/common/py3-stdlib/bdb.py
+++ b/common/py3-stdlib/bdb.py
@@ -611,26 +611,11 @@
 
     # This method is more useful to debug a single function call.
 
-    def runcall(*args, **kwds):
+    def runcall(self, func, /, *args, **kwds):
         """Debug a single function call.
 
         Return the result of the function call.
         """
-        if len(args) >= 2:
-            self, func, *args = args
-        elif not args:
-            raise TypeError("descriptor 'runcall' of 'Bdb' object "
-                            "needs an argument")
-        elif 'func' in kwds:
-            func = kwds.pop('func')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'func' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('runcall expected at least 1 positional argument, '
-                            'got %d' % (len(args)-1))
-
         self.reset()
         sys.settrace(self.trace_dispatch)
         res = None
@@ -642,7 +627,6 @@
             self.quitting = True
             sys.settrace(None)
         return res
-    runcall.__text_signature__ = '($self, func, /, *args, **kwds)'
 
 
 def set_trace():
diff --git a/common/py3-stdlib/binhex.py b/common/py3-stdlib/binhex.py
index 56b5f85..ace5217 100644
--- a/common/py3-stdlib/binhex.py
+++ b/common/py3-stdlib/binhex.py
@@ -21,10 +21,16 @@
 # input. The resulting code (xx 90 90) would appear to be interpreted as an
 # escaped *value* of 0x90. All coders I've seen appear to ignore this nicety...
 #
+import binascii
+import contextlib
 import io
 import os
 import struct
-import binascii
+import warnings
+
+warnings.warn('the binhex module is deprecated', DeprecationWarning,
+              stacklevel=2)
+
 
 __all__ = ["binhex","hexbin","Error"]
 
@@ -76,6 +82,16 @@
     def close(self):
         pass
 
+
+# DeprecationWarning is already emitted on "import binhex". There is no need
+# to repeat the warning at each call to deprecated binascii functions.
+@contextlib.contextmanager
+def _ignore_deprecation_warning():
+    with warnings.catch_warnings():
+        warnings.filterwarnings('ignore', '', DeprecationWarning)
+        yield
+
+
 class _Hqxcoderengine:
     """Write data to the coder in 3-byte chunks"""
 
@@ -93,23 +109,25 @@
         self.data = self.data[todo:]
         if not data:
             return
-        self.hqxdata = self.hqxdata + binascii.b2a_hqx(data)
+        with _ignore_deprecation_warning():
+            self.hqxdata = self.hqxdata + binascii.b2a_hqx(data)
         self._flush(0)
 
     def _flush(self, force):
         first = 0
         while first <= len(self.hqxdata) - self.linelen:
             last = first + self.linelen
-            self.ofp.write(self.hqxdata[first:last] + b'\n')
+            self.ofp.write(self.hqxdata[first:last] + b'\r')
             self.linelen = LINELEN
             first = last
         self.hqxdata = self.hqxdata[first:]
         if force:
-            self.ofp.write(self.hqxdata + b':\n')
+            self.ofp.write(self.hqxdata + b':\r')
 
     def close(self):
         if self.data:
-            self.hqxdata = self.hqxdata + binascii.b2a_hqx(self.data)
+            with _ignore_deprecation_warning():
+                self.hqxdata = self.hqxdata + binascii.b2a_hqx(self.data)
         self._flush(1)
         self.ofp.close()
         del self.ofp
@@ -125,13 +143,15 @@
         self.data = self.data + data
         if len(self.data) < REASONABLY_LARGE:
             return
-        rledata = binascii.rlecode_hqx(self.data)
+        with _ignore_deprecation_warning():
+            rledata = binascii.rlecode_hqx(self.data)
         self.ofp.write(rledata)
         self.data = b''
 
     def close(self):
         if self.data:
-            rledata = binascii.rlecode_hqx(self.data)
+            with _ignore_deprecation_warning():
+                rledata = binascii.rlecode_hqx(self.data)
             self.ofp.write(rledata)
         self.ofp.close()
         del self.ofp
@@ -276,7 +296,8 @@
             #
             while True:
                 try:
-                    decdatacur, self.eof = binascii.a2b_hqx(data)
+                    with _ignore_deprecation_warning():
+                        decdatacur, self.eof = binascii.a2b_hqx(data)
                     break
                 except binascii.Incomplete:
                     pass
@@ -312,8 +333,9 @@
     def _fill(self, wtd):
         self.pre_buffer = self.pre_buffer + self.ifp.read(wtd + 4)
         if self.ifp.eof:
-            self.post_buffer = self.post_buffer + \
-                binascii.rledecode_hqx(self.pre_buffer)
+            with _ignore_deprecation_warning():
+                self.post_buffer = self.post_buffer + \
+                    binascii.rledecode_hqx(self.pre_buffer)
             self.pre_buffer = b''
             return
 
@@ -340,8 +362,9 @@
         else:
             mark = mark - 1
 
-        self.post_buffer = self.post_buffer + \
-            binascii.rledecode_hqx(self.pre_buffer[:mark])
+        with _ignore_deprecation_warning():
+            self.post_buffer = self.post_buffer + \
+                binascii.rledecode_hqx(self.pre_buffer[:mark])
         self.pre_buffer = self.pre_buffer[mark:]
 
     def close(self):
diff --git a/common/py3-stdlib/bisect.py b/common/py3-stdlib/bisect.py
index 9786fc9..8f3f6a3 100644
--- a/common/py3-stdlib/bisect.py
+++ b/common/py3-stdlib/bisect.py
@@ -29,6 +29,7 @@
         hi = len(a)
     while lo < hi:
         mid = (lo+hi)//2
+        # Use __lt__ to match the logic in list.sort() and in heapq
         if x < a[mid]: hi = mid
         else: lo = mid+1
     return lo
@@ -63,6 +64,7 @@
         hi = len(a)
     while lo < hi:
         mid = (lo+hi)//2
+        # Use __lt__ to match the logic in list.sort() and in heapq
         if a[mid] < x: lo = mid+1
         else: hi = mid
     return lo
diff --git a/common/py3-stdlib/bz2.py b/common/py3-stdlib/bz2.py
index 21e8ff4..ce07ebe 100644
--- a/common/py3-stdlib/bz2.py
+++ b/common/py3-stdlib/bz2.py
@@ -12,7 +12,6 @@
 from builtins import open as _builtin_open
 import io
 import os
-import warnings
 import _compression
 from threading import RLock
 
@@ -24,8 +23,6 @@
 # Value 2 no longer used
 _MODE_WRITE    = 3
 
-_sentinel = object()
-
 
 class BZ2File(_compression.BaseStream):
 
@@ -38,7 +35,7 @@
     returned as bytes, and data to be written should be given as bytes.
     """
 
-    def __init__(self, filename, mode="r", buffering=_sentinel, compresslevel=9):
+    def __init__(self, filename, mode="r", *, compresslevel=9):
         """Open a bzip2-compressed file.
 
         If filename is a str, bytes, or PathLike object, it gives the
@@ -49,8 +46,6 @@
         'x' for creating exclusively, or 'a' for appending. These can
         equivalently be given as 'rb', 'wb', 'xb', and 'ab'.
 
-        buffering is ignored since Python 3.0. Its use is deprecated.
-
         If mode is 'w', 'x' or 'a', compresslevel can be a number between 1
         and 9 specifying the level of compression: 1 produces the least
         compression, and 9 (default) produces the most compression.
@@ -65,12 +60,6 @@
         self._closefp = False
         self._mode = _MODE_CLOSED
 
-        if buffering is not _sentinel:
-            warnings.warn("Use of 'buffering' argument is deprecated and ignored "
-                          "since Python 3.0.",
-                          DeprecationWarning,
-                          stacklevel=2)
-
         if not (1 <= compresslevel <= 9):
             raise ValueError("compresslevel must be between 1 and 9")
 
diff --git a/common/py3-stdlib/cProfile.py b/common/py3-stdlib/cProfile.py
index 369d02e..59b4699 100755
--- a/common/py3-stdlib/cProfile.py
+++ b/common/py3-stdlib/cProfile.py
@@ -103,28 +103,12 @@
         return self
 
     # This method is more useful to profile a single function call.
-    def runcall(*args, **kw):
-        if len(args) >= 2:
-            self, func, *args = args
-        elif not args:
-            raise TypeError("descriptor 'runcall' of 'Profile' object "
-                            "needs an argument")
-        elif 'func' in kw:
-            func = kw.pop('func')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'func' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('runcall expected at least 1 positional argument, '
-                            'got %d' % (len(args)-1))
-
+    def runcall(self, func, /, *args, **kw):
         self.enable()
         try:
             return func(*args, **kw)
         finally:
             self.disable()
-    runcall.__text_signature__ = '($self, func, /, *args, **kw)'
 
     def __enter__(self):
         self.enable()
@@ -168,6 +152,11 @@
     (options, args) = parser.parse_args()
     sys.argv[:] = args
 
+    # The script that we're profiling may chdir, so capture the absolute path
+    # to the output file at startup.
+    if options.outfile is not None:
+        options.outfile = os.path.abspath(options.outfile)
+
     if len(args) > 0:
         if options.module:
             code = "run_module(modname, run_name='__main__')"
diff --git a/common/py3-stdlib/codeop.py b/common/py3-stdlib/codeop.py
index 3c2bb60..4c10470 100644
--- a/common/py3-stdlib/codeop.py
+++ b/common/py3-stdlib/codeop.py
@@ -81,12 +81,14 @@
 
     try:
         code = compiler(source, filename, symbol)
-    except SyntaxError as err:
+    except SyntaxError:
         pass
 
-    # Suppress warnings after the first compile to avoid duplication.
+    # Catch syntax warnings after the first compile
+    # to emit warnings (SyntaxWarning, DeprecationWarning) at most once.
     with warnings.catch_warnings():
-        warnings.simplefilter("ignore")
+        warnings.simplefilter("error")
+
         try:
             code1 = compiler(source + "\n", filename, symbol)
         except SyntaxError as e:
@@ -138,7 +140,7 @@
         self.flags = PyCF_DONT_IMPLY_DEDENT
 
     def __call__(self, source, filename, symbol):
-        codeob = compile(source, filename, symbol, self.flags, 1)
+        codeob = compile(source, filename, symbol, self.flags, True)
         for feature in _features:
             if codeob.co_flags & feature.compiler_flag:
                 self.flags |= feature.compiler_flag
diff --git a/common/py3-stdlib/collections/__init__.py b/common/py3-stdlib/collections/__init__.py
index a78a47c..bc69a67 100644
--- a/common/py3-stdlib/collections/__init__.py
+++ b/common/py3-stdlib/collections/__init__.py
@@ -14,17 +14,30 @@
 
 '''
 
-__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList',
-            'UserString', 'Counter', 'OrderedDict', 'ChainMap']
+__all__ = [
+    'ChainMap',
+    'Counter',
+    'OrderedDict',
+    'UserDict',
+    'UserList',
+    'UserString',
+    'defaultdict',
+    'deque',
+    'namedtuple',
+]
 
 import _collections_abc
-from operator import itemgetter as _itemgetter, eq as _eq
-from keyword import iskeyword as _iskeyword
-import sys as _sys
 import heapq as _heapq
-from _weakref import proxy as _proxy
-from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
+import sys as _sys
+
+from itertools import chain as _chain
+from itertools import repeat as _repeat
+from itertools import starmap as _starmap
+from keyword import iskeyword as _iskeyword
+from operator import eq as _eq
+from operator import itemgetter as _itemgetter
 from reprlib import recursive_repr as _recursive_repr
+from _weakref import proxy as _proxy
 
 try:
     from _collections import deque
@@ -48,12 +61,13 @@
         import warnings
         warnings.warn("Using or importing the ABCs from 'collections' instead "
                       "of from 'collections.abc' is deprecated since Python 3.3, "
-                      "and in 3.9 it will stop working",
+                      "and in 3.10 it will stop working",
                       DeprecationWarning, stacklevel=2)
         globals()[name] = obj
         return obj
     raise AttributeError(f'module {__name__!r} has no attribute {name!r}')
 
+
 ################################################################################
 ### OrderedDict
 ################################################################################
@@ -293,6 +307,24 @@
             return dict.__eq__(self, other) and all(map(_eq, self, other))
         return dict.__eq__(self, other)
 
+    def __ior__(self, other):
+        self.update(other)
+        return self
+
+    def __or__(self, other):
+        if not isinstance(other, dict):
+            return NotImplemented
+        new = self.__class__(self)
+        new.update(other)
+        return new
+
+    def __ror__(self, other):
+        if not isinstance(other, dict):
+            return NotImplemented
+        new = self.__class__(other)
+        new.update(self)
+        return new
+
 
 try:
     from _collections import OrderedDict
@@ -381,18 +413,23 @@
     # Variables used in the methods and docstrings
     field_names = tuple(map(_sys.intern, field_names))
     num_fields = len(field_names)
-    arg_list = repr(field_names).replace("'", "")[1:-1]
+    arg_list = ', '.join(field_names)
+    if num_fields == 1:
+        arg_list += ','
     repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
     tuple_new = tuple.__new__
     _dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
 
     # Create all the named tuple methods to be added to the class namespace
 
-    s = f'def __new__(_cls, {arg_list}): return _tuple_new(_cls, ({arg_list}))'
-    namespace = {'_tuple_new': tuple_new, '__name__': f'namedtuple_{typename}'}
-    # Note: exec() has the side-effect of interning the field names
-    exec(s, namespace)
-    __new__ = namespace['__new__']
+    namespace = {
+        '_tuple_new': tuple_new,
+        '__builtins__': None,
+        '__name__': f'namedtuple_{typename}',
+    }
+    code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
+    __new__ = eval(code, namespace)
+    __new__.__name__ = '__new__'
     __new__.__doc__ = f'Create new instance of {typename}({arg_list})'
     if defaults is not None:
         __new__.__defaults__ = defaults
@@ -429,8 +466,14 @@
         return _tuple(self)
 
     # Modify function metadata to help with introspection and debugging
-    for method in (__new__, _make.__func__, _replace,
-                   __repr__, _asdict, __getnewargs__):
+    for method in (
+        __new__,
+        _make.__func__,
+        _replace,
+        __repr__,
+        _asdict,
+        __getnewargs__,
+    ):
         method.__qualname__ = f'{typename}.{method.__name__}'
 
     # Build-up the class namespace dictionary
@@ -440,8 +483,6 @@
         '__slots__': (),
         '_fields': field_names,
         '_field_defaults': field_defaults,
-        # alternate spelling for backward compatibility
-        '_fields_defaults': field_defaults,
         '__new__': __new__,
         '_make': _make,
         '_replace': _replace,
@@ -548,7 +589,7 @@
         >>> c = Counter(a=4, b=2)                   # a new counter from keyword args
 
         '''
-        super(Counter, self).__init__()
+        super().__init__()
         self.update(iterable, **kwds)
 
     def __missing__(self, key):
@@ -632,7 +673,8 @@
                     for elem, count in iterable.items():
                         self[elem] = count + self_get(elem, 0)
                 else:
-                    super(Counter, self).update(iterable) # fast path when counter is empty
+                    # fast path when counter is empty
+                    super().update(iterable)
             else:
                 _count_elements(self, iterable)
         if kwds:
@@ -679,13 +721,14 @@
 
     def __repr__(self):
         if not self:
-            return '%s()' % self.__class__.__name__
+            return f'{self.__class__.__name__}()'
         try:
-            items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
-            return '%s({%s})' % (self.__class__.__name__, items)
+            # dict() preserves the ordering returned by most_common()
+            d = dict(self.most_common())
         except TypeError:
             # handle case where values are not orderable
-            return '{0}({1!r})'.format(self.__class__.__name__, dict(self))
+            d = dict(self)
+        return f'{self.__class__.__name__}({d!r})'
 
     # Multiset-style mathematical operations discussed in:
     #       Knuth TAOCP Volume II section 4.6.3 exercise 19
@@ -906,7 +949,7 @@
     def __iter__(self):
         d = {}
         for mapping in reversed(self.maps):
-            d.update(mapping)                   # reuses stored hash values if possible
+            d.update(dict.fromkeys(mapping))    # reuses stored hash values if possible
         return iter(d)
 
     def __contains__(self, key):
@@ -950,7 +993,7 @@
         try:
             del self.maps[0][key]
         except KeyError:
-            raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+            raise KeyError(f'Key not found in the first mapping: {key!r}')
 
     def popitem(self):
         'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
@@ -964,12 +1007,31 @@
         try:
             return self.maps[0].pop(key, *args)
         except KeyError:
-            raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+            raise KeyError(f'Key not found in the first mapping: {key!r}')
 
     def clear(self):
         'Clear maps[0], leaving maps[1:] intact.'
         self.maps[0].clear()
 
+    def __ior__(self, other):
+        self.maps[0].update(other)
+        return self
+
+    def __or__(self, other):
+        if not isinstance(other, _collections_abc.Mapping):
+            return NotImplemented
+        m = self.copy()
+        m.maps[0].update(other)
+        return m
+
+    def __ror__(self, other):
+        if not isinstance(other, _collections_abc.Mapping):
+            return NotImplemented
+        m = dict(other)
+        for child in reversed(self.maps):
+            m.update(child)
+        return self.__class__(m)
+
 
 ################################################################################
 ### UserDict
@@ -978,38 +1040,29 @@
 class UserDict(_collections_abc.MutableMapping):
 
     # Start by filling-out the abstract methods
-    def __init__(*args, **kwargs):
-        if not args:
-            raise TypeError("descriptor '__init__' of 'UserDict' object "
-                            "needs an argument")
-        self, *args = args
-        if len(args) > 1:
-            raise TypeError('expected at most 1 arguments, got %d' % len(args))
-        if args:
-            dict = args[0]
-        elif 'dict' in kwargs:
-            dict = kwargs.pop('dict')
-            import warnings
-            warnings.warn("Passing 'dict' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            dict = None
+    def __init__(self, dict=None, /, **kwargs):
         self.data = {}
         if dict is not None:
             self.update(dict)
         if kwargs:
             self.update(kwargs)
-    __init__.__text_signature__ = '($self, dict=None, /, **kwargs)'
 
-    def __len__(self): return len(self.data)
+    def __len__(self):
+        return len(self.data)
+
     def __getitem__(self, key):
         if key in self.data:
             return self.data[key]
         if hasattr(self.__class__, "__missing__"):
             return self.__class__.__missing__(self, key)
         raise KeyError(key)
-    def __setitem__(self, key, item): self.data[key] = item
-    def __delitem__(self, key): del self.data[key]
+
+    def __setitem__(self, key, item):
+        self.data[key] = item
+
+    def __delitem__(self, key):
+        del self.data[key]
+
     def __iter__(self):
         return iter(self.data)
 
@@ -1018,7 +1071,30 @@
         return key in self.data
 
     # Now, add the methods in dicts but not in MutableMapping
-    def __repr__(self): return repr(self.data)
+    def __repr__(self):
+        return repr(self.data)
+
+    def __or__(self, other):
+        if isinstance(other, UserDict):
+            return self.__class__(self.data | other.data)
+        if isinstance(other, dict):
+            return self.__class__(self.data | other)
+        return NotImplemented
+
+    def __ror__(self, other):
+        if isinstance(other, UserDict):
+            return self.__class__(other.data | self.data)
+        if isinstance(other, dict):
+            return self.__class__(other | self.data)
+        return NotImplemented
+
+    def __ior__(self, other):
+        if isinstance(other, UserDict):
+            self.data |= other.data
+        else:
+            self.data |= other
+        return self
+
     def __copy__(self):
         inst = self.__class__.__new__(self.__class__)
         inst.__dict__.update(self.__dict__)
@@ -1047,13 +1123,13 @@
         return d
 
 
-
 ################################################################################
 ### UserList
 ################################################################################
 
 class UserList(_collections_abc.MutableSequence):
     """A more or less complete user-defined wrapper around list objects."""
+
     def __init__(self, initlist=None):
         self.data = []
         if initlist is not None:
@@ -1064,35 +1140,60 @@
                 self.data[:] = initlist.data[:]
             else:
                 self.data = list(initlist)
-    def __repr__(self): return repr(self.data)
-    def __lt__(self, other): return self.data <  self.__cast(other)
-    def __le__(self, other): return self.data <= self.__cast(other)
-    def __eq__(self, other): return self.data == self.__cast(other)
-    def __gt__(self, other): return self.data >  self.__cast(other)
-    def __ge__(self, other): return self.data >= self.__cast(other)
+
+    def __repr__(self):
+        return repr(self.data)
+
+    def __lt__(self, other):
+        return self.data < self.__cast(other)
+
+    def __le__(self, other):
+        return self.data <= self.__cast(other)
+
+    def __eq__(self, other):
+        return self.data == self.__cast(other)
+
+    def __gt__(self, other):
+        return self.data > self.__cast(other)
+
+    def __ge__(self, other):
+        return self.data >= self.__cast(other)
+
     def __cast(self, other):
         return other.data if isinstance(other, UserList) else other
-    def __contains__(self, item): return item in self.data
-    def __len__(self): return len(self.data)
+
+    def __contains__(self, item):
+        return item in self.data
+
+    def __len__(self):
+        return len(self.data)
+
     def __getitem__(self, i):
         if isinstance(i, slice):
             return self.__class__(self.data[i])
         else:
             return self.data[i]
-    def __setitem__(self, i, item): self.data[i] = item
-    def __delitem__(self, i): del self.data[i]
+
+    def __setitem__(self, i, item):
+        self.data[i] = item
+
+    def __delitem__(self, i):
+        del self.data[i]
+
     def __add__(self, other):
         if isinstance(other, UserList):
             return self.__class__(self.data + other.data)
         elif isinstance(other, type(self.data)):
             return self.__class__(self.data + other)
         return self.__class__(self.data + list(other))
+
     def __radd__(self, other):
         if isinstance(other, UserList):
             return self.__class__(other.data + self.data)
         elif isinstance(other, type(self.data)):
             return self.__class__(other + self.data)
         return self.__class__(list(other) + self.data)
+
     def __iadd__(self, other):
         if isinstance(other, UserList):
             self.data += other.data
@@ -1101,28 +1202,53 @@
         else:
             self.data += list(other)
         return self
+
     def __mul__(self, n):
-        return self.__class__(self.data*n)
+        return self.__class__(self.data * n)
+
     __rmul__ = __mul__
+
     def __imul__(self, n):
         self.data *= n
         return self
+
     def __copy__(self):
         inst = self.__class__.__new__(self.__class__)
         inst.__dict__.update(self.__dict__)
         # Create a copy and avoid triggering descriptors
         inst.__dict__["data"] = self.__dict__["data"][:]
         return inst
-    def append(self, item): self.data.append(item)
-    def insert(self, i, item): self.data.insert(i, item)
-    def pop(self, i=-1): return self.data.pop(i)
-    def remove(self, item): self.data.remove(item)
-    def clear(self): self.data.clear()
-    def copy(self): return self.__class__(self)
-    def count(self, item): return self.data.count(item)
-    def index(self, item, *args): return self.data.index(item, *args)
-    def reverse(self): self.data.reverse()
-    def sort(self, /, *args, **kwds): self.data.sort(*args, **kwds)
+
+    def append(self, item):
+        self.data.append(item)
+
+    def insert(self, i, item):
+        self.data.insert(i, item)
+
+    def pop(self, i=-1):
+        return self.data.pop(i)
+
+    def remove(self, item):
+        self.data.remove(item)
+
+    def clear(self):
+        self.data.clear()
+
+    def copy(self):
+        return self.__class__(self)
+
+    def count(self, item):
+        return self.data.count(item)
+
+    def index(self, item, *args):
+        return self.data.index(item, *args)
+
+    def reverse(self):
+        self.data.reverse()
+
+    def sort(self, /, *args, **kwds):
+        self.data.sort(*args, **kwds)
+
     def extend(self, other):
         if isinstance(other, UserList):
             self.data.extend(other.data)
@@ -1130,12 +1256,12 @@
             self.data.extend(other)
 
 
-
 ################################################################################
 ### UserString
 ################################################################################
 
 class UserString(_collections_abc.Sequence):
+
     def __init__(self, seq):
         if isinstance(seq, str):
             self.data = seq
@@ -1143,12 +1269,25 @@
             self.data = seq.data[:]
         else:
             self.data = str(seq)
-    def __str__(self): return str(self.data)
-    def __repr__(self): return repr(self.data)
-    def __int__(self): return int(self.data)
-    def __float__(self): return float(self.data)
-    def __complex__(self): return complex(self.data)
-    def __hash__(self): return hash(self.data)
+
+    def __str__(self):
+        return str(self.data)
+
+    def __repr__(self):
+        return repr(self.data)
+
+    def __int__(self):
+        return int(self.data)
+
+    def __float__(self):
+        return float(self.data)
+
+    def __complex__(self):
+        return complex(self.data)
+
+    def __hash__(self):
+        return hash(self.data)
+
     def __getnewargs__(self):
         return (self.data[:],)
 
@@ -1156,18 +1295,22 @@
         if isinstance(string, UserString):
             return self.data == string.data
         return self.data == string
+
     def __lt__(self, string):
         if isinstance(string, UserString):
             return self.data < string.data
         return self.data < string
+
     def __le__(self, string):
         if isinstance(string, UserString):
             return self.data <= string.data
         return self.data <= string
+
     def __gt__(self, string):
         if isinstance(string, UserString):
             return self.data > string.data
         return self.data > string
+
     def __ge__(self, string):
         if isinstance(string, UserString):
             return self.data >= string.data
@@ -1178,102 +1321,188 @@
             char = char.data
         return char in self.data
 
-    def __len__(self): return len(self.data)
-    def __getitem__(self, index): return self.__class__(self.data[index])
+    def __len__(self):
+        return len(self.data)
+
+    def __getitem__(self, index):
+        return self.__class__(self.data[index])
+
     def __add__(self, other):
         if isinstance(other, UserString):
             return self.__class__(self.data + other.data)
         elif isinstance(other, str):
             return self.__class__(self.data + other)
         return self.__class__(self.data + str(other))
+
     def __radd__(self, other):
         if isinstance(other, str):
             return self.__class__(other + self.data)
         return self.__class__(str(other) + self.data)
+
     def __mul__(self, n):
-        return self.__class__(self.data*n)
+        return self.__class__(self.data * n)
+
     __rmul__ = __mul__
+
     def __mod__(self, args):
         return self.__class__(self.data % args)
+
     def __rmod__(self, template):
         return self.__class__(str(template) % self)
+
     # the following methods are defined in alphabetical order:
-    def capitalize(self): return self.__class__(self.data.capitalize())
+    def capitalize(self):
+        return self.__class__(self.data.capitalize())
+
     def casefold(self):
         return self.__class__(self.data.casefold())
+
     def center(self, width, *args):
         return self.__class__(self.data.center(width, *args))
+
     def count(self, sub, start=0, end=_sys.maxsize):
         if isinstance(sub, UserString):
             sub = sub.data
         return self.data.count(sub, start, end)
+
+    def removeprefix(self, prefix, /):
+        if isinstance(prefix, UserString):
+            prefix = prefix.data
+        return self.__class__(self.data.removeprefix(prefix))
+
+    def removesuffix(self, suffix, /):
+        if isinstance(suffix, UserString):
+            suffix = suffix.data
+        return self.__class__(self.data.removesuffix(suffix))
+
     def encode(self, encoding='utf-8', errors='strict'):
         encoding = 'utf-8' if encoding is None else encoding
         errors = 'strict' if errors is None else errors
         return self.data.encode(encoding, errors)
+
     def endswith(self, suffix, start=0, end=_sys.maxsize):
         return self.data.endswith(suffix, start, end)
+
     def expandtabs(self, tabsize=8):
         return self.__class__(self.data.expandtabs(tabsize))
+
     def find(self, sub, start=0, end=_sys.maxsize):
         if isinstance(sub, UserString):
             sub = sub.data
         return self.data.find(sub, start, end)
+
     def format(self, /, *args, **kwds):
         return self.data.format(*args, **kwds)
+
     def format_map(self, mapping):
         return self.data.format_map(mapping)
+
     def index(self, sub, start=0, end=_sys.maxsize):
         return self.data.index(sub, start, end)
-    def isalpha(self): return self.data.isalpha()
-    def isalnum(self): return self.data.isalnum()
-    def isascii(self): return self.data.isascii()
-    def isdecimal(self): return self.data.isdecimal()
-    def isdigit(self): return self.data.isdigit()
-    def isidentifier(self): return self.data.isidentifier()
-    def islower(self): return self.data.islower()
-    def isnumeric(self): return self.data.isnumeric()
-    def isprintable(self): return self.data.isprintable()
-    def isspace(self): return self.data.isspace()
-    def istitle(self): return self.data.istitle()
-    def isupper(self): return self.data.isupper()
-    def join(self, seq): return self.data.join(seq)
+
+    def isalpha(self):
+        return self.data.isalpha()
+
+    def isalnum(self):
+        return self.data.isalnum()
+
+    def isascii(self):
+        return self.data.isascii()
+
+    def isdecimal(self):
+        return self.data.isdecimal()
+
+    def isdigit(self):
+        return self.data.isdigit()
+
+    def isidentifier(self):
+        return self.data.isidentifier()
+
+    def islower(self):
+        return self.data.islower()
+
+    def isnumeric(self):
+        return self.data.isnumeric()
+
+    def isprintable(self):
+        return self.data.isprintable()
+
+    def isspace(self):
+        return self.data.isspace()
+
+    def istitle(self):
+        return self.data.istitle()
+
+    def isupper(self):
+        return self.data.isupper()
+
+    def join(self, seq):
+        return self.data.join(seq)
+
     def ljust(self, width, *args):
         return self.__class__(self.data.ljust(width, *args))
-    def lower(self): return self.__class__(self.data.lower())
-    def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
+
+    def lower(self):
+        return self.__class__(self.data.lower())
+
+    def lstrip(self, chars=None):
+        return self.__class__(self.data.lstrip(chars))
+
     maketrans = str.maketrans
+
     def partition(self, sep):
         return self.data.partition(sep)
+
     def replace(self, old, new, maxsplit=-1):
         if isinstance(old, UserString):
             old = old.data
         if isinstance(new, UserString):
             new = new.data
         return self.__class__(self.data.replace(old, new, maxsplit))
+
     def rfind(self, sub, start=0, end=_sys.maxsize):
         if isinstance(sub, UserString):
             sub = sub.data
         return self.data.rfind(sub, start, end)
+
     def rindex(self, sub, start=0, end=_sys.maxsize):
         return self.data.rindex(sub, start, end)
+
     def rjust(self, width, *args):
         return self.__class__(self.data.rjust(width, *args))
+
     def rpartition(self, sep):
         return self.data.rpartition(sep)
+
     def rstrip(self, chars=None):
         return self.__class__(self.data.rstrip(chars))
+
     def split(self, sep=None, maxsplit=-1):
         return self.data.split(sep, maxsplit)
+
     def rsplit(self, sep=None, maxsplit=-1):
         return self.data.rsplit(sep, maxsplit)
-    def splitlines(self, keepends=False): return self.data.splitlines(keepends)
+
+    def splitlines(self, keepends=False):
+        return self.data.splitlines(keepends)
+
     def startswith(self, prefix, start=0, end=_sys.maxsize):
         return self.data.startswith(prefix, start, end)
-    def strip(self, chars=None): return self.__class__(self.data.strip(chars))
-    def swapcase(self): return self.__class__(self.data.swapcase())
-    def title(self): return self.__class__(self.data.title())
+
+    def strip(self, chars=None):
+        return self.__class__(self.data.strip(chars))
+
+    def swapcase(self):
+        return self.__class__(self.data.swapcase())
+
+    def title(self):
+        return self.__class__(self.data.title())
+
     def translate(self, *args):
         return self.__class__(self.data.translate(*args))
-    def upper(self): return self.__class__(self.data.upper())
-    def zfill(self, width): return self.__class__(self.data.zfill(width))
+
+    def upper(self):
+        return self.__class__(self.data.upper())
+
+    def zfill(self, width):
+        return self.__class__(self.data.zfill(width))
diff --git a/common/py3-stdlib/compileall.py b/common/py3-stdlib/compileall.py
index bfac8ef..fe7f450 100644
--- a/common/py3-stdlib/compileall.py
+++ b/common/py3-stdlib/compileall.py
@@ -15,12 +15,14 @@
 import importlib.util
 import py_compile
 import struct
+import filecmp
 
 from functools import partial
+from pathlib import Path
 
 __all__ = ["compile_dir","compile_file","compile_path"]
 
-def _walk_dir(dir, ddir=None, maxlevels=10, quiet=0):
+def _walk_dir(dir, maxlevels, quiet=0):
     if quiet < 2 and isinstance(dir, os.PathLike):
         dir = os.fspath(dir)
     if not quiet:
@@ -36,37 +38,49 @@
         if name == '__pycache__':
             continue
         fullname = os.path.join(dir, name)
-        if ddir is not None:
-            dfile = os.path.join(ddir, name)
-        else:
-            dfile = None
         if not os.path.isdir(fullname):
-            yield fullname, ddir
+            yield fullname
         elif (maxlevels > 0 and name != os.curdir and name != os.pardir and
               os.path.isdir(fullname) and not os.path.islink(fullname)):
-            yield from _walk_dir(fullname, ddir=dfile,
-                                 maxlevels=maxlevels - 1, quiet=quiet)
+            yield from _walk_dir(fullname, maxlevels=maxlevels - 1,
+                                 quiet=quiet)
 
-def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None,
-                quiet=0, legacy=False, optimize=-1, workers=1,
-                invalidation_mode=None):
+def compile_dir(dir, maxlevels=None, ddir=None, force=False,
+                rx=None, quiet=0, legacy=False, optimize=-1, workers=1,
+                invalidation_mode=None, *, stripdir=None,
+                prependdir=None, limit_sl_dest=None, hardlink_dupes=False):
     """Byte-compile all modules in the given directory tree.
 
     Arguments (only dir is required):
 
     dir:       the directory to byte-compile
-    maxlevels: maximum recursion level (default 10)
+    maxlevels: maximum recursion level (default `sys.getrecursionlimit()`)
     ddir:      the directory that will be prepended to the path to the
                file as it is compiled into each byte-code file.
     force:     if True, force compilation, even if timestamps are up-to-date
     quiet:     full output with False or 0, errors only with 1,
                no output with 2
     legacy:    if True, produce legacy pyc paths instead of PEP 3147 paths
-    optimize:  optimization level or -1 for level of the interpreter
+    optimize:  int or list of optimization levels or -1 for level of
+               the interpreter. Multiple levels leads to multiple compiled
+               files each with one optimization level.
     workers:   maximum number of parallel workers
     invalidation_mode: how the up-to-dateness of the pyc will be checked
+    stripdir:  part of path to left-strip from source file path
+    prependdir: path to prepend to beginning of original file path, applied
+               after stripdir
+    limit_sl_dest: ignore symlinks if they are pointing outside of
+                   the defined path
+    hardlink_dupes: hardlink duplicated pyc files
     """
     ProcessPoolExecutor = None
+    if ddir is not None and (stripdir is not None or prependdir is not None):
+        raise ValueError(("Destination dir (ddir) cannot be used "
+                          "in combination with stripdir or prependdir"))
+    if ddir is not None:
+        stripdir = dir
+        prependdir = ddir
+        ddir = None
     if workers < 0:
         raise ValueError('workers must be greater or equal to 0')
     if workers != 1:
@@ -76,36 +90,40 @@
             from concurrent.futures import ProcessPoolExecutor
         except ImportError:
             workers = 1
-    files_and_ddirs = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels,
-                                ddir=ddir)
+    if maxlevels is None:
+        maxlevels = sys.getrecursionlimit()
+    files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels)
     success = True
     if workers != 1 and ProcessPoolExecutor is not None:
         # If workers == 0, let ProcessPoolExecutor choose
         workers = workers or None
         with ProcessPoolExecutor(max_workers=workers) as executor:
-            results = executor.map(
-                    partial(_compile_file_tuple,
-                            force=force, rx=rx, quiet=quiet,
-                            legacy=legacy, optimize=optimize,
-                            invalidation_mode=invalidation_mode,
-                        ),
-                    files_and_ddirs)
+            results = executor.map(partial(compile_file,
+                                           ddir=ddir, force=force,
+                                           rx=rx, quiet=quiet,
+                                           legacy=legacy,
+                                           optimize=optimize,
+                                           invalidation_mode=invalidation_mode,
+                                           stripdir=stripdir,
+                                           prependdir=prependdir,
+                                           limit_sl_dest=limit_sl_dest,
+                                           hardlink_dupes=hardlink_dupes),
+                                   files)
             success = min(results, default=True)
     else:
-        for file, dfile in files_and_ddirs:
-            if not compile_file(file, dfile, force, rx, quiet,
-                                legacy, optimize, invalidation_mode):
+        for file in files:
+            if not compile_file(file, ddir, force, rx, quiet,
+                                legacy, optimize, invalidation_mode,
+                                stripdir=stripdir, prependdir=prependdir,
+                                limit_sl_dest=limit_sl_dest,
+                                hardlink_dupes=hardlink_dupes):
                 success = False
     return success
 
-def _compile_file_tuple(file_and_dfile, **kwargs):
-    """Needs to be toplevel for ProcessPoolExecutor."""
-    file, dfile = file_and_dfile
-    return compile_file(file, dfile, **kwargs)
-
 def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
                  legacy=False, optimize=-1,
-                 invalidation_mode=None):
+                 invalidation_mode=None, *, stripdir=None, prependdir=None,
+                 limit_sl_dest=None, hardlink_dupes=False):
     """Byte-compile one file.
 
     Arguments (only fullname is required):
@@ -117,32 +135,85 @@
     quiet:     full output with False or 0, errors only with 1,
                no output with 2
     legacy:    if True, produce legacy pyc paths instead of PEP 3147 paths
-    optimize:  optimization level or -1 for level of the interpreter
+    optimize:  int or list of optimization levels or -1 for level of
+               the interpreter. Multiple levels leads to multiple compiled
+               files each with one optimization level.
     invalidation_mode: how the up-to-dateness of the pyc will be checked
+    stripdir:  part of path to left-strip from source file path
+    prependdir: path to prepend to beginning of original file path, applied
+               after stripdir
+    limit_sl_dest: ignore symlinks if they are pointing outside of
+                   the defined path.
+    hardlink_dupes: hardlink duplicated pyc files
     """
+
+    if ddir is not None and (stripdir is not None or prependdir is not None):
+        raise ValueError(("Destination dir (ddir) cannot be used "
+                          "in combination with stripdir or prependdir"))
+
     success = True
     if quiet < 2 and isinstance(fullname, os.PathLike):
         fullname = os.fspath(fullname)
     name = os.path.basename(fullname)
+
+    dfile = None
+
     if ddir is not None:
         dfile = os.path.join(ddir, name)
-    else:
-        dfile = None
+
+    if stripdir is not None:
+        fullname_parts = fullname.split(os.path.sep)
+        stripdir_parts = stripdir.split(os.path.sep)
+        ddir_parts = list(fullname_parts)
+
+        for spart, opart in zip(stripdir_parts, fullname_parts):
+            if spart == opart:
+                ddir_parts.remove(spart)
+
+        dfile = os.path.join(*ddir_parts)
+
+    if prependdir is not None:
+        if dfile is None:
+            dfile = os.path.join(prependdir, fullname)
+        else:
+            dfile = os.path.join(prependdir, dfile)
+
+    if isinstance(optimize, int):
+        optimize = [optimize]
+
+    # Use set() to remove duplicates.
+    # Use sorted() to create pyc files in a deterministic order.
+    optimize = sorted(set(optimize))
+
+    if hardlink_dupes and len(optimize) < 2:
+        raise ValueError("Hardlinking of duplicated bytecode makes sense "
+                          "only for more than one optimization level")
+
     if rx is not None:
         mo = rx.search(fullname)
         if mo:
             return success
+
+    if limit_sl_dest is not None and os.path.islink(fullname):
+        if Path(limit_sl_dest).resolve() not in Path(fullname).resolve().parents:
+            return success
+
+    opt_cfiles = {}
+
     if os.path.isfile(fullname):
-        if legacy:
-            cfile = fullname + 'c'
-        else:
-            if optimize >= 0:
-                opt = optimize if optimize >= 1 else ''
-                cfile = importlib.util.cache_from_source(
-                                fullname, optimization=opt)
+        for opt_level in optimize:
+            if legacy:
+                opt_cfiles[opt_level] = fullname + 'c'
             else:
-                cfile = importlib.util.cache_from_source(fullname)
-            cache_dir = os.path.dirname(cfile)
+                if opt_level >= 0:
+                    opt = opt_level if opt_level >= 1 else ''
+                    cfile = (importlib.util.cache_from_source(
+                             fullname, optimization=opt))
+                    opt_cfiles[opt_level] = cfile
+                else:
+                    cfile = importlib.util.cache_from_source(fullname)
+                    opt_cfiles[opt_level] = cfile
+
         head, tail = name[:-3], name[-3:]
         if tail == '.py':
             if not force:
@@ -150,18 +221,28 @@
                     mtime = int(os.stat(fullname).st_mtime)
                     expect = struct.pack('<4sll', importlib.util.MAGIC_NUMBER,
                                          0, mtime)
-                    with open(cfile, 'rb') as chandle:
-                        actual = chandle.read(12)
-                    if expect == actual:
+                    for cfile in opt_cfiles.values():
+                        with open(cfile, 'rb') as chandle:
+                            actual = chandle.read(12)
+                        if expect != actual:
+                            break
+                    else:
                         return success
                 except OSError:
                     pass
             if not quiet:
                 print('Compiling {!r}...'.format(fullname))
             try:
-                ok = py_compile.compile(fullname, cfile, dfile, True,
-                                        optimize=optimize,
-                                        invalidation_mode=invalidation_mode)
+                for index, opt_level in enumerate(optimize):
+                    cfile = opt_cfiles[opt_level]
+                    ok = py_compile.compile(fullname, cfile, dfile, True,
+                                            optimize=opt_level,
+                                            invalidation_mode=invalidation_mode)
+                    if index > 0 and hardlink_dupes:
+                        previous_cfile = opt_cfiles[optimize[index - 1]]
+                        if filecmp.cmp(cfile, previous_cfile, shallow=False):
+                            os.unlink(cfile)
+                            os.link(previous_cfile, cfile)
             except py_compile.PyCompileError as err:
                 success = False
                 if quiet >= 2:
@@ -230,7 +311,7 @@
     parser = argparse.ArgumentParser(
         description='Utilities to support installing Python libraries.')
     parser.add_argument('-l', action='store_const', const=0,
-                        default=10, dest='maxlevels',
+                        default=None, dest='maxlevels',
                         help="don't recurse into subdirectories")
     parser.add_argument('-r', type=int, dest='recursion',
                         help=('control the maximum recursion level. '
@@ -248,6 +329,20 @@
                               'compile-time tracebacks and in runtime '
                               'tracebacks in cases where the source file is '
                               'unavailable'))
+    parser.add_argument('-s', metavar='STRIPDIR',  dest='stripdir',
+                        default=None,
+                        help=('part of path to left-strip from path '
+                              'to source file - for example buildroot. '
+                              '`-d` and `-s` options cannot be '
+                              'specified together.'))
+    parser.add_argument('-p', metavar='PREPENDDIR',  dest='prependdir',
+                        default=None,
+                        help=('path to add as prefix to path '
+                              'to source file - for example / to make '
+                              'it absolute when some part is removed '
+                              'by `-s` option. '
+                              '`-d` and `-p` options cannot be '
+                              'specified together.'))
     parser.add_argument('-x', metavar='REGEXP', dest='rx', default=None,
                         help=('skip files matching the regular expression; '
                               'the regexp is searched for in the full path '
@@ -270,6 +365,15 @@
                               '"checked-hash" if the SOURCE_DATE_EPOCH '
                               'environment variable is set, and '
                               '"timestamp" otherwise.'))
+    parser.add_argument('-o', action='append', type=int, dest='opt_levels',
+                        help=('Optimization levels to run compilation with.'
+                              'Default is -1 which uses optimization level of'
+                              'Python interpreter itself (specified by -O).'))
+    parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest',
+                        help='Ignore symlinks pointing outsite of the DIR')
+    parser.add_argument('--hardlink-dupes', action='store_true',
+                        dest='hardlink_dupes',
+                        help='Hardlink duplicated pyc files')
 
     args = parser.parse_args()
     compile_dests = args.compile_dest
@@ -278,12 +382,26 @@
         import re
         args.rx = re.compile(args.rx)
 
+    if args.limit_sl_dest == "":
+        args.limit_sl_dest = None
 
     if args.recursion is not None:
         maxlevels = args.recursion
     else:
         maxlevels = args.maxlevels
 
+    if args.opt_levels is None:
+        args.opt_levels = [-1]
+
+    if len(args.opt_levels) == 1 and args.hardlink_dupes:
+        parser.error(("Hardlinking of duplicated bytecode makes sense "
+                      "only for more than one optimization level."))
+
+    if args.ddir is not None and (
+        args.stripdir is not None or args.prependdir is not None
+    ):
+        parser.error("-d cannot be used in combination with -s or -p")
+
     # if flist is provided then load it
     if args.flist:
         try:
@@ -308,13 +426,23 @@
                 if os.path.isfile(dest):
                     if not compile_file(dest, args.ddir, args.force, args.rx,
                                         args.quiet, args.legacy,
-                                        invalidation_mode=invalidation_mode):
+                                        invalidation_mode=invalidation_mode,
+                                        stripdir=args.stripdir,
+                                        prependdir=args.prependdir,
+                                        optimize=args.opt_levels,
+                                        limit_sl_dest=args.limit_sl_dest,
+                                        hardlink_dupes=args.hardlink_dupes):
                         success = False
                 else:
                     if not compile_dir(dest, maxlevels, args.ddir,
                                        args.force, args.rx, args.quiet,
                                        args.legacy, workers=args.workers,
-                                       invalidation_mode=invalidation_mode):
+                                       invalidation_mode=invalidation_mode,
+                                       stripdir=args.stripdir,
+                                       prependdir=args.prependdir,
+                                       optimize=args.opt_levels,
+                                       limit_sl_dest=args.limit_sl_dest,
+                                       hardlink_dupes=args.hardlink_dupes):
                         success = False
             return success
         else:
diff --git a/common/py3-stdlib/concurrent/futures/_base.py b/common/py3-stdlib/concurrent/futures/_base.py
index 6001e3b..00eb548 100644
--- a/common/py3-stdlib/concurrent/futures/_base.py
+++ b/common/py3-stdlib/concurrent/futures/_base.py
@@ -7,6 +7,7 @@
 import logging
 import threading
 import time
+import types
 
 FIRST_COMPLETED = 'FIRST_COMPLETED'
 FIRST_EXCEPTION = 'FIRST_EXCEPTION'
@@ -544,10 +545,12 @@
             self._condition.notify_all()
         self._invoke_callbacks()
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 class Executor(object):
     """This is an abstract base class for concrete asynchronous executors."""
 
-    def submit(*args, **kwargs):
+    def submit(self, fn, /, *args, **kwargs):
         """Submits a callable to be executed with the given arguments.
 
         Schedules the callable to be executed as fn(*args, **kwargs) and returns
@@ -556,21 +559,7 @@
         Returns:
             A Future representing the given call.
         """
-        if len(args) >= 2:
-            pass
-        elif not args:
-            raise TypeError("descriptor 'submit' of 'Executor' object "
-                            "needs an argument")
-        elif 'fn' in kwargs:
-            import warnings
-            warnings.warn("Passing 'fn' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('submit expected at least 1 positional argument, '
-                            'got %d' % (len(args)-1))
-
         raise NotImplementedError()
-    submit.__text_signature__ = '($self, fn, /, *args, **kwargs)'
 
     def map(self, fn, *iterables, timeout=None, chunksize=1):
         """Returns an iterator equivalent to map(fn, iter).
@@ -616,7 +605,7 @@
                     future.cancel()
         return result_iterator()
 
-    def shutdown(self, wait=True):
+    def shutdown(self, wait=True, *, cancel_futures=False):
         """Clean-up the resources associated with the Executor.
 
         It is safe to call this method several times. Otherwise, no other
@@ -626,6 +615,9 @@
             wait: If True then shutdown will not return until all running
                 futures have finished executing and the resources used by the
                 executor have been reclaimed.
+            cancel_futures: If True then shutdown will cancel all pending
+                futures. Futures that are completed or running will not be
+                cancelled.
         """
         pass
 
diff --git a/common/py3-stdlib/concurrent/futures/process.py b/common/py3-stdlib/concurrent/futures/process.py
index 2b2b78e..90bc98b 100644
--- a/common/py3-stdlib/concurrent/futures/process.py
+++ b/common/py3-stdlib/concurrent/futures/process.py
@@ -45,11 +45,9 @@
 
 __author__ = 'Brian Quinlan (brian@sweetapp.com)'
 
-import atexit
 import os
 from concurrent.futures import _base
 import queue
-from queue import Full
 import multiprocessing as mp
 import multiprocessing.connection
 from multiprocessing.queues import Queue
@@ -60,19 +58,6 @@
 import sys
 import traceback
 
-# Workers are created as daemon threads and processes. This is done to allow the
-# interpreter to exit when there are still idle processes in a
-# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
-# allowing workers to die with the interpreter has two undesirable properties:
-#   - The workers would still be running during interpreter shutdown,
-#     meaning that they would fail in unpredictable ways.
-#   - The workers could be killed while evaluating a work item, which could
-#     be bad if the callable being evaluated has external side-effects e.g.
-#     writing to a file.
-#
-# To work around this problem, an exit handler is installed which tells the
-# workers to exit when their work queues are empty and then waits until the
-# threads/processes finish.
 
 _threads_wakeups = weakref.WeakKeyDictionary()
 _global_shutdown = False
@@ -80,18 +65,23 @@
 
 class _ThreadWakeup:
     def __init__(self):
+        self._closed = False
         self._reader, self._writer = mp.Pipe(duplex=False)
 
     def close(self):
-        self._writer.close()
-        self._reader.close()
+        if not self._closed:
+            self._closed = True
+            self._writer.close()
+            self._reader.close()
 
     def wakeup(self):
-        self._writer.send_bytes(b"")
+        if not self._closed:
+            self._writer.send_bytes(b"")
 
     def clear(self):
-        while self._reader.poll():
-            self._reader.recv_bytes()
+        if not self._closed:
+            while self._reader.poll():
+                self._reader.recv_bytes()
 
 
 def _python_exit():
@@ -99,10 +89,17 @@
     _global_shutdown = True
     items = list(_threads_wakeups.items())
     for _, thread_wakeup in items:
+        # call not protected by ProcessPoolExecutor._shutdown_lock
         thread_wakeup.wakeup()
     for t, _ in items:
         t.join()
 
+# Register for `_python_exit()` to be called just before joining all
+# non-daemon threads. This is used instead of `atexit.register()` for
+# compatibility with subinterpreters, which no longer support daemon threads.
+# See bpo-39812 for context.
+threading._register_atexit(_python_exit)
+
 # Controls how many more calls than processes will be queued in the call queue.
 # A smaller number will mean that processes spend more time idle waiting for
 # work while a larger number will make Future.cancel() succeed less frequently
@@ -160,8 +157,11 @@
 
 class _SafeQueue(Queue):
     """Safe Queue set exception to the future object linked to a job"""
-    def __init__(self, max_size=0, *, ctx, pending_work_items):
+    def __init__(self, max_size=0, *, ctx, pending_work_items, shutdown_lock,
+                 thread_wakeup):
         self.pending_work_items = pending_work_items
+        self.shutdown_lock = shutdown_lock
+        self.thread_wakeup = thread_wakeup
         super().__init__(max_size, ctx=ctx)
 
     def _on_queue_feeder_error(self, e, obj):
@@ -169,8 +169,11 @@
             tb = traceback.format_exception(type(e), e, e.__traceback__)
             e.__cause__ = _RemoteTraceback('\n"""\n{}"""'.format(''.join(tb)))
             work_item = self.pending_work_items.pop(obj.work_id, None)
-            # work_item can be None if another process terminated. In this case,
-            # the queue_manager_thread fails all work_items with BrokenProcessPool
+            with self.shutdown_lock:
+                self.thread_wakeup.wakeup()
+            # work_item can be None if another process terminated. In this
+            # case, the executor_manager_thread fails all work_items
+            # with BrokenProcessPool
             if work_item is not None:
                 work_item.future.set_exception(e)
         else:
@@ -186,6 +189,7 @@
             return
         yield chunk
 
+
 def _process_chunk(fn, chunk):
     """ Processes a chunk of an iterable passed to map.
 
@@ -249,120 +253,132 @@
         del call_item
 
 
-def _add_call_item_to_queue(pending_work_items,
-                            work_ids,
-                            call_queue):
-    """Fills call_queue with _WorkItems from pending_work_items.
-
-    This function never blocks.
-
-    Args:
-        pending_work_items: A dict mapping work ids to _WorkItems e.g.
-            {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
-        work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
-            are consumed and the corresponding _WorkItems from
-            pending_work_items are transformed into _CallItems and put in
-            call_queue.
-        call_queue: A multiprocessing.Queue that will be filled with _CallItems
-            derived from _WorkItems.
-    """
-    while True:
-        if call_queue.full():
-            return
-        try:
-            work_id = work_ids.get(block=False)
-        except queue.Empty:
-            return
-        else:
-            work_item = pending_work_items[work_id]
-
-            if work_item.future.set_running_or_notify_cancel():
-                call_queue.put(_CallItem(work_id,
-                                         work_item.fn,
-                                         work_item.args,
-                                         work_item.kwargs),
-                               block=True)
-            else:
-                del pending_work_items[work_id]
-                continue
-
-
-def _queue_management_worker(executor_reference,
-                             processes,
-                             pending_work_items,
-                             work_ids_queue,
-                             call_queue,
-                             result_queue,
-                             thread_wakeup):
+class _ExecutorManagerThread(threading.Thread):
     """Manages the communication between this process and the worker processes.
 
-    This function is run in a local thread.
+    The manager is run in a local thread.
 
     Args:
-        executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
-            this thread. Used to determine if the ProcessPoolExecutor has been
-            garbage collected and that this function can exit.
-        process: A list of the ctx.Process instances used as
-            workers.
-        pending_work_items: A dict mapping work ids to _WorkItems e.g.
-            {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
-        work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
-        call_queue: A ctx.Queue that will be filled with _CallItems
-            derived from _WorkItems for processing by the process workers.
-        result_queue: A ctx.SimpleQueue of _ResultItems generated by the
-            process workers.
-        thread_wakeup: A _ThreadWakeup to allow waking up the
-            queue_manager_thread from the main Thread and avoid deadlocks
-            caused by permanently locked queues.
+        executor: A reference to the ProcessPoolExecutor that owns
+            this thread. A weakref will be own by the manager as well as
+            references to internal objects used to introspect the state of
+            the executor.
     """
-    executor = None
 
-    def shutting_down():
-        return (_global_shutdown or executor is None
-                or executor._shutdown_thread)
+    def __init__(self, executor):
+        # Store references to necessary internals of the executor.
 
-    def shutdown_worker():
-        # This is an upper bound on the number of children alive.
-        n_children_alive = sum(p.is_alive() for p in processes.values())
-        n_children_to_stop = n_children_alive
-        n_sentinels_sent = 0
-        # Send the right number of sentinels, to make sure all children are
-        # properly terminated.
-        while n_sentinels_sent < n_children_to_stop and n_children_alive > 0:
-            for i in range(n_children_to_stop - n_sentinels_sent):
-                try:
-                    call_queue.put_nowait(None)
-                    n_sentinels_sent += 1
-                except Full:
-                    break
-            n_children_alive = sum(p.is_alive() for p in processes.values())
+        # A _ThreadWakeup to allow waking up the queue_manager_thread from the
+        # main Thread and avoid deadlocks caused by permanently locked queues.
+        self.thread_wakeup = executor._executor_manager_thread_wakeup
+        self.shutdown_lock = executor._shutdown_lock
 
-        # Release the queue's resources as soon as possible.
-        call_queue.close()
-        # If .join() is not called on the created processes then
-        # some ctx.Queue methods may deadlock on Mac OS X.
-        for p in processes.values():
-            p.join()
+        # A weakref.ref to the ProcessPoolExecutor that owns this thread. Used
+        # to determine if the ProcessPoolExecutor has been garbage collected
+        # and that the manager can exit.
+        # When the executor gets garbage collected, the weakref callback
+        # will wake up the queue management thread so that it can terminate
+        # if there is no pending work item.
+        def weakref_cb(_,
+                       thread_wakeup=self.thread_wakeup,
+                       shutdown_lock=self.shutdown_lock):
+            mp.util.debug('Executor collected: triggering callback for'
+                          ' QueueManager wakeup')
+            with shutdown_lock:
+                thread_wakeup.wakeup()
 
-    result_reader = result_queue._reader
-    wakeup_reader = thread_wakeup._reader
-    readers = [result_reader, wakeup_reader]
+        self.executor_reference = weakref.ref(executor, weakref_cb)
 
-    while True:
-        _add_call_item_to_queue(pending_work_items,
-                                work_ids_queue,
-                                call_queue)
+        # A list of the ctx.Process instances used as workers.
+        self.processes = executor._processes
 
+        # A ctx.Queue that will be filled with _CallItems derived from
+        # _WorkItems for processing by the process workers.
+        self.call_queue = executor._call_queue
+
+        # A ctx.SimpleQueue of _ResultItems generated by the process workers.
+        self.result_queue = executor._result_queue
+
+        # A queue.Queue of work ids e.g. Queue([5, 6, ...]).
+        self.work_ids_queue = executor._work_ids
+
+        # A dict mapping work ids to _WorkItems e.g.
+        #     {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+        self.pending_work_items = executor._pending_work_items
+
+        super().__init__()
+
+    def run(self):
+        # Main loop for the executor manager thread.
+
+        while True:
+            self.add_call_item_to_queue()
+
+            result_item, is_broken, cause = self.wait_result_broken_or_wakeup()
+
+            if is_broken:
+                self.terminate_broken(cause)
+                return
+            if result_item is not None:
+                self.process_result_item(result_item)
+                # Delete reference to result_item to avoid keeping references
+                # while waiting on new results.
+                del result_item
+
+                # attempt to increment idle process count
+                executor = self.executor_reference()
+                if executor is not None:
+                    executor._idle_worker_semaphore.release()
+                del executor
+
+            if self.is_shutting_down():
+                self.flag_executor_shutting_down()
+
+                # Since no new work items can be added, it is safe to shutdown
+                # this thread if there are no pending work items.
+                if not self.pending_work_items:
+                    self.join_executor_internals()
+                    return
+
+    def add_call_item_to_queue(self):
+        # Fills call_queue with _WorkItems from pending_work_items.
+        # This function never blocks.
+        while True:
+            if self.call_queue.full():
+                return
+            try:
+                work_id = self.work_ids_queue.get(block=False)
+            except queue.Empty:
+                return
+            else:
+                work_item = self.pending_work_items[work_id]
+
+                if work_item.future.set_running_or_notify_cancel():
+                    self.call_queue.put(_CallItem(work_id,
+                                                  work_item.fn,
+                                                  work_item.args,
+                                                  work_item.kwargs),
+                                        block=True)
+                else:
+                    del self.pending_work_items[work_id]
+                    continue
+
+    def wait_result_broken_or_wakeup(self):
         # Wait for a result to be ready in the result_queue while checking
         # that all worker processes are still running, or for a wake up
         # signal send. The wake up signals come either from new tasks being
         # submitted, from the executor being shutdown/gc-ed, or from the
         # shutdown of the python interpreter.
-        worker_sentinels = [p.sentinel for p in processes.values()]
+        result_reader = self.result_queue._reader
+        assert not self.thread_wakeup._closed
+        wakeup_reader = self.thread_wakeup._reader
+        readers = [result_reader, wakeup_reader]
+        worker_sentinels = [p.sentinel for p in self.processes.values()]
         ready = mp.connection.wait(readers + worker_sentinels)
 
         cause = None
         is_broken = True
+        result_item = None
         if result_reader in ready:
             try:
                 result_item = result_reader.recv()
@@ -372,79 +388,138 @@
 
         elif wakeup_reader in ready:
             is_broken = False
-            result_item = None
-        thread_wakeup.clear()
-        if is_broken:
-            # Mark the process pool broken so that submits fail right now.
-            executor = executor_reference()
-            if executor is not None:
-                executor._broken = ('A child process terminated '
-                                    'abruptly, the process pool is not '
-                                    'usable anymore')
-                executor._shutdown_thread = True
-                executor = None
-            bpe = BrokenProcessPool("A process in the process pool was "
-                                    "terminated abruptly while the future was "
-                                    "running or pending.")
-            if cause is not None:
-                bpe.__cause__ = _RemoteTraceback(
-                    f"\n'''\n{''.join(cause)}'''")
-            # All futures in flight must be marked failed
-            for work_id, work_item in pending_work_items.items():
-                work_item.future.set_exception(bpe)
-                # Delete references to object. See issue16284
-                del work_item
-            pending_work_items.clear()
-            # Terminate remaining workers forcibly: the queues or their
-            # locks may be in a dirty state and block forever.
-            for p in processes.values():
-                p.terminate()
-            shutdown_worker()
-            return
+
+        with self.shutdown_lock:
+            self.thread_wakeup.clear()
+
+        return result_item, is_broken, cause
+
+    def process_result_item(self, result_item):
+        # Process the received a result_item. This can be either the PID of a
+        # worker that exited gracefully or a _ResultItem
+
         if isinstance(result_item, int):
             # Clean shutdown of a worker using its PID
             # (avoids marking the executor broken)
-            assert shutting_down()
-            p = processes.pop(result_item)
+            assert self.is_shutting_down()
+            p = self.processes.pop(result_item)
             p.join()
-            if not processes:
-                shutdown_worker()
+            if not self.processes:
+                self.join_executor_internals()
                 return
-        elif result_item is not None:
-            work_item = pending_work_items.pop(result_item.work_id, None)
+        else:
+            # Received a _ResultItem so mark the future as completed.
+            work_item = self.pending_work_items.pop(result_item.work_id, None)
             # work_item can be None if another process terminated (see above)
             if work_item is not None:
                 if result_item.exception:
                     work_item.future.set_exception(result_item.exception)
                 else:
                     work_item.future.set_result(result_item.result)
-                # Delete references to object. See issue16284
-                del work_item
-            # Delete reference to result_item
-            del result_item
 
-        # Check whether we should start shutting down.
-        executor = executor_reference()
+    def is_shutting_down(self):
+        # Check whether we should start shutting down the executor.
+        executor = self.executor_reference()
         # No more work items can be added if:
         #   - The interpreter is shutting down OR
         #   - The executor that owns this worker has been collected OR
         #   - The executor that owns this worker has been shutdown.
-        if shutting_down():
-            try:
-                # Flag the executor as shutting down as early as possible if it
-                # is not gc-ed yet.
-                if executor is not None:
-                    executor._shutdown_thread = True
-                # Since no new work items can be added, it is safe to shutdown
-                # this thread if there are no pending work items.
-                if not pending_work_items:
-                    shutdown_worker()
-                    return
-            except Full:
-                # This is not a problem: we will eventually be woken up (in
-                # result_queue.get()) and be able to send a sentinel again.
-                pass
-        executor = None
+        return (_global_shutdown or executor is None
+                or executor._shutdown_thread)
+
+    def terminate_broken(self, cause):
+        # Terminate the executor because it is in a broken state. The cause
+        # argument can be used to display more information on the error that
+        # lead the executor into becoming broken.
+
+        # Mark the process pool broken so that submits fail right now.
+        executor = self.executor_reference()
+        if executor is not None:
+            executor._broken = ('A child process terminated '
+                                'abruptly, the process pool is not '
+                                'usable anymore')
+            executor._shutdown_thread = True
+            executor = None
+
+        # All pending tasks are to be marked failed with the following
+        # BrokenProcessPool error
+        bpe = BrokenProcessPool("A process in the process pool was "
+                                "terminated abruptly while the future was "
+                                "running or pending.")
+        if cause is not None:
+            bpe.__cause__ = _RemoteTraceback(
+                f"\n'''\n{''.join(cause)}'''")
+
+        # Mark pending tasks as failed.
+        for work_id, work_item in self.pending_work_items.items():
+            work_item.future.set_exception(bpe)
+            # Delete references to object. See issue16284
+            del work_item
+        self.pending_work_items.clear()
+
+        # Terminate remaining workers forcibly: the queues or their
+        # locks may be in a dirty state and block forever.
+        for p in self.processes.values():
+            p.terminate()
+
+        # clean up resources
+        self.join_executor_internals()
+
+    def flag_executor_shutting_down(self):
+        # Flag the executor as shutting down and cancel remaining tasks if
+        # requested as early as possible if it is not gc-ed yet.
+        executor = self.executor_reference()
+        if executor is not None:
+            executor._shutdown_thread = True
+            # Cancel pending work items if requested.
+            if executor._cancel_pending_futures:
+                # Cancel all pending futures and update pending_work_items
+                # to only have futures that are currently running.
+                new_pending_work_items = {}
+                for work_id, work_item in self.pending_work_items.items():
+                    if not work_item.future.cancel():
+                        new_pending_work_items[work_id] = work_item
+                self.pending_work_items = new_pending_work_items
+                # Drain work_ids_queue since we no longer need to
+                # add items to the call queue.
+                while True:
+                    try:
+                        self.work_ids_queue.get_nowait()
+                    except queue.Empty:
+                        break
+                # Make sure we do this only once to not waste time looping
+                # on running processes over and over.
+                executor._cancel_pending_futures = False
+
+    def shutdown_workers(self):
+        n_children_to_stop = self.get_n_children_alive()
+        n_sentinels_sent = 0
+        # Send the right number of sentinels, to make sure all children are
+        # properly terminated.
+        while (n_sentinels_sent < n_children_to_stop
+                and self.get_n_children_alive() > 0):
+            for i in range(n_children_to_stop - n_sentinels_sent):
+                try:
+                    self.call_queue.put_nowait(None)
+                    n_sentinels_sent += 1
+                except queue.Full:
+                    break
+
+    def join_executor_internals(self):
+        self.shutdown_workers()
+        # Release the queue's resources as soon as possible.
+        self.call_queue.close()
+        self.call_queue.join_thread()
+        with self.shutdown_lock:
+            self.thread_wakeup.close()
+        # If .join() is not called on the created processes then
+        # some ctx.Queue methods may deadlock on Mac OS X.
+        for p in self.processes.values():
+            p.join()
+
+    def get_n_children_alive(self):
+        # This is an upper bound on the number of children alive.
+        return sum(p.is_alive() for p in self.processes.values())
 
 
 _system_limits_checked = False
@@ -535,7 +610,7 @@
         self._initargs = initargs
 
         # Management thread
-        self._queue_management_thread = None
+        self._executor_manager_thread = None
 
         # Map of pids to processes
         self._processes = {}
@@ -543,9 +618,21 @@
         # Shutdown is a two-step process.
         self._shutdown_thread = False
         self._shutdown_lock = threading.Lock()
+        self._idle_worker_semaphore = threading.Semaphore(0)
         self._broken = False
         self._queue_count = 0
         self._pending_work_items = {}
+        self._cancel_pending_futures = False
+
+        # _ThreadWakeup is a communication channel used to interrupt the wait
+        # of the main loop of executor_manager_thread from another thread (e.g.
+        # when calling executor.submit or executor.shutdown). We do not use the
+        # _result_queue to send wakeup signals to the executor_manager_thread
+        # as it could result in a deadlock if a worker process dies with the
+        # _result_queue write lock still acquired.
+        #
+        # _shutdown_lock must be locked to access _ThreadWakeup.
+        self._executor_manager_thread_wakeup = _ThreadWakeup()
 
         # Create communication channels for the executor
         # Make the call queue slightly larger than the number of processes to
@@ -554,7 +641,9 @@
         queue_size = self._max_workers + EXTRA_QUEUED_CALLS
         self._call_queue = _SafeQueue(
             max_size=queue_size, ctx=self._mp_context,
-            pending_work_items=self._pending_work_items)
+            pending_work_items=self._pending_work_items,
+            shutdown_lock=self._shutdown_lock,
+            thread_wakeup=self._executor_manager_thread_wakeup)
         # Killed worker processes can produce spurious "broken pipe"
         # tracebacks in the queue's own worker thread. But we detect killed
         # processes anyway, so silence the tracebacks.
@@ -562,43 +651,21 @@
         self._result_queue = mp_context.SimpleQueue()
         self._work_ids = queue.Queue()
 
-        # _ThreadWakeup is a communication channel used to interrupt the wait
-        # of the main loop of queue_manager_thread from another thread (e.g.
-        # when calling executor.submit or executor.shutdown). We do not use the
-        # _result_queue to send the wakeup signal to the queue_manager_thread
-        # as it could result in a deadlock if a worker process dies with the
-        # _result_queue write lock still acquired.
-        self._queue_management_thread_wakeup = _ThreadWakeup()
-
-    def _start_queue_management_thread(self):
-        if self._queue_management_thread is None:
-            # When the executor gets garbarge collected, the weakref callback
-            # will wake up the queue management thread so that it can terminate
-            # if there is no pending work item.
-            def weakref_cb(_,
-                           thread_wakeup=self._queue_management_thread_wakeup):
-                mp.util.debug('Executor collected: triggering callback for'
-                              ' QueueManager wakeup')
-                thread_wakeup.wakeup()
+    def _start_executor_manager_thread(self):
+        if self._executor_manager_thread is None:
             # Start the processes so that their sentinels are known.
-            self._adjust_process_count()
-            self._queue_management_thread = threading.Thread(
-                target=_queue_management_worker,
-                args=(weakref.ref(self, weakref_cb),
-                      self._processes,
-                      self._pending_work_items,
-                      self._work_ids,
-                      self._call_queue,
-                      self._result_queue,
-                      self._queue_management_thread_wakeup),
-                name="QueueManagerThread")
-            self._queue_management_thread.daemon = True
-            self._queue_management_thread.start()
-            _threads_wakeups[self._queue_management_thread] = \
-                self._queue_management_thread_wakeup
+            self._executor_manager_thread = _ExecutorManagerThread(self)
+            self._executor_manager_thread.start()
+            _threads_wakeups[self._executor_manager_thread] = \
+                self._executor_manager_thread_wakeup
 
     def _adjust_process_count(self):
-        for _ in range(len(self._processes), self._max_workers):
+        # if there's an idle process, we don't need to spawn a new one.
+        if self._idle_worker_semaphore.acquire(blocking=False):
+            return
+
+        process_count = len(self._processes)
+        if process_count < self._max_workers:
             p = self._mp_context.Process(
                 target=_process_worker,
                 args=(self._call_queue,
@@ -608,22 +675,7 @@
             p.start()
             self._processes[p.pid] = p
 
-    def submit(*args, **kwargs):
-        if len(args) >= 2:
-            self, fn, *args = args
-        elif not args:
-            raise TypeError("descriptor 'submit' of 'ProcessPoolExecutor' object "
-                            "needs an argument")
-        elif 'fn' in kwargs:
-            fn = kwargs.pop('fn')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'fn' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('submit expected at least 1 positional argument, '
-                            'got %d' % (len(args)-1))
-
+    def submit(self, fn, /, *args, **kwargs):
         with self._shutdown_lock:
             if self._broken:
                 raise BrokenProcessPool(self._broken)
@@ -640,11 +692,11 @@
             self._work_ids.put(self._queue_count)
             self._queue_count += 1
             # Wake up queue management thread
-            self._queue_management_thread_wakeup.wakeup()
+            self._executor_manager_thread_wakeup.wakeup()
 
-            self._start_queue_management_thread()
+            self._adjust_process_count()
+            self._start_executor_manager_thread()
             return f
-    submit.__text_signature__ = _base.Executor.submit.__text_signature__
     submit.__doc__ = _base.Executor.submit.__doc__
 
     def map(self, fn, *iterables, timeout=None, chunksize=1):
@@ -676,29 +728,24 @@
                               timeout=timeout)
         return _chain_from_iterable_of_lists(results)
 
-    def shutdown(self, wait=True):
+    def shutdown(self, wait=True, *, cancel_futures=False):
         with self._shutdown_lock:
+            self._cancel_pending_futures = cancel_futures
             self._shutdown_thread = True
-        if self._queue_management_thread:
-            # Wake up queue management thread
-            self._queue_management_thread_wakeup.wakeup()
-            if wait:
-                self._queue_management_thread.join()
+            if self._executor_manager_thread_wakeup is not None:
+                # Wake up queue management thread
+                self._executor_manager_thread_wakeup.wakeup()
+
+        if self._executor_manager_thread is not None and wait:
+            self._executor_manager_thread.join()
         # To reduce the risk of opening too many files, remove references to
         # objects that use file descriptors.
-        self._queue_management_thread = None
-        if self._call_queue is not None:
-            self._call_queue.close()
-            if wait:
-                self._call_queue.join_thread()
-            self._call_queue = None
+        self._executor_manager_thread = None
+        self._call_queue = None
+        if self._result_queue is not None and wait:
+            self._result_queue.close()
         self._result_queue = None
         self._processes = None
-
-        if self._queue_management_thread_wakeup:
-            self._queue_management_thread_wakeup.close()
-            self._queue_management_thread_wakeup = None
+        self._executor_manager_thread_wakeup = None
 
     shutdown.__doc__ = _base.Executor.shutdown.__doc__
-
-atexit.register(_python_exit)
diff --git a/common/py3-stdlib/concurrent/futures/thread.py b/common/py3-stdlib/concurrent/futures/thread.py
index 9e669b2..b7a2cac 100644
--- a/common/py3-stdlib/concurrent/futures/thread.py
+++ b/common/py3-stdlib/concurrent/futures/thread.py
@@ -5,41 +5,36 @@
 
 __author__ = 'Brian Quinlan (brian@sweetapp.com)'
 
-import atexit
 from concurrent.futures import _base
 import itertools
 import queue
 import threading
+import types
 import weakref
 import os
 
-# Workers are created as daemon threads. This is done to allow the interpreter
-# to exit when there are still idle threads in a ThreadPoolExecutor's thread
-# pool (i.e. shutdown() was not called). However, allowing workers to die with
-# the interpreter has two undesirable properties:
-#   - The workers would still be running during interpreter shutdown,
-#     meaning that they would fail in unpredictable ways.
-#   - The workers could be killed while evaluating a work item, which could
-#     be bad if the callable being evaluated has external side-effects e.g.
-#     writing to a file.
-#
-# To work around this problem, an exit handler is installed which tells the
-# workers to exit when their work queues are empty and then waits until the
-# threads finish.
 
 _threads_queues = weakref.WeakKeyDictionary()
 _shutdown = False
+# Lock that ensures that new workers are not created while the interpreter is
+# shutting down. Must be held while mutating _threads_queues and _shutdown.
+_global_shutdown_lock = threading.Lock()
 
 def _python_exit():
     global _shutdown
-    _shutdown = True
+    with _global_shutdown_lock:
+        _shutdown = True
     items = list(_threads_queues.items())
     for t, q in items:
         q.put(None)
     for t, q in items:
         t.join()
 
-atexit.register(_python_exit)
+# Register for `_python_exit()` to be called just before joining all
+# non-daemon threads. This is used instead of `atexit.register()` for
+# compatibility with subinterpreters, which no longer support daemon threads.
+# See bpo-39812 for context.
+threading._register_atexit(_python_exit)
 
 
 class _WorkItem(object):
@@ -62,6 +57,8 @@
         else:
             self.future.set_result(result)
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 
 def _worker(executor_reference, work_queue, initializer, initargs):
     if initializer is not None:
@@ -155,23 +152,8 @@
         self._initializer = initializer
         self._initargs = initargs
 
-    def submit(*args, **kwargs):
-        if len(args) >= 2:
-            self, fn, *args = args
-        elif not args:
-            raise TypeError("descriptor 'submit' of 'ThreadPoolExecutor' object "
-                            "needs an argument")
-        elif 'fn' in kwargs:
-            fn = kwargs.pop('fn')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'fn' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('submit expected at least 1 positional argument, '
-                            'got %d' % (len(args)-1))
-
-        with self._shutdown_lock:
+    def submit(self, fn, /, *args, **kwargs):
+        with self._shutdown_lock, _global_shutdown_lock:
             if self._broken:
                 raise BrokenThreadPool(self._broken)
 
@@ -187,7 +169,6 @@
             self._work_queue.put(w)
             self._adjust_thread_count()
             return f
-    submit.__text_signature__ = _base.Executor.submit.__text_signature__
     submit.__doc__ = _base.Executor.submit.__doc__
 
     def _adjust_thread_count(self):
@@ -209,7 +190,6 @@
                                        self._work_queue,
                                        self._initializer,
                                        self._initargs))
-            t.daemon = True
             t.start()
             self._threads.add(t)
             _threads_queues[t] = self._work_queue
@@ -227,9 +207,22 @@
                 if work_item is not None:
                     work_item.future.set_exception(BrokenThreadPool(self._broken))
 
-    def shutdown(self, wait=True):
+    def shutdown(self, wait=True, *, cancel_futures=False):
         with self._shutdown_lock:
             self._shutdown = True
+            if cancel_futures:
+                # Drain all work items from the queue, and then cancel their
+                # associated futures.
+                while True:
+                    try:
+                        work_item = self._work_queue.get_nowait()
+                    except queue.Empty:
+                        break
+                    if work_item is not None:
+                        work_item.future.cancel()
+
+            # Send a wake-up to prevent threads calling
+            # _work_queue.get(block=True) from permanently blocking.
             self._work_queue.put(None)
         if wait:
             for t in self._threads:
diff --git a/common/py3-stdlib/contextlib.py b/common/py3-stdlib/contextlib.py
index 94dc2bf..ff92d9f 100644
--- a/common/py3-stdlib/contextlib.py
+++ b/common/py3-stdlib/contextlib.py
@@ -4,7 +4,7 @@
 import _collections_abc
 from collections import deque
 from functools import wraps
-from types import MethodType
+from types import MethodType, GenericAlias
 
 __all__ = ["asynccontextmanager", "contextmanager", "closing", "nullcontext",
            "AbstractContextManager", "AbstractAsyncContextManager",
@@ -16,6 +16,8 @@
 
     """An abstract base class for context managers."""
 
+    __class_getitem__ = classmethod(GenericAlias)
+
     def __enter__(self):
         """Return `self` upon entering the runtime context."""
         return self
@@ -36,6 +38,8 @@
 
     """An abstract base class for asynchronous context managers."""
 
+    __class_getitem__ = classmethod(GenericAlias)
+
     async def __aenter__(self):
         """Return `self` upon entering the runtime context."""
         return self
@@ -426,26 +430,11 @@
         self._push_cm_exit(cm, _exit)
         return result
 
-    def callback(*args, **kwds):
+    def callback(self, callback, /, *args, **kwds):
         """Registers an arbitrary callback and arguments.
 
         Cannot suppress exceptions.
         """
-        if len(args) >= 2:
-            self, callback, *args = args
-        elif not args:
-            raise TypeError("descriptor 'callback' of '_BaseExitStack' object "
-                            "needs an argument")
-        elif 'callback' in kwds:
-            callback = kwds.pop('callback')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'callback' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('callback expected at least 1 positional argument, '
-                            'got %d' % (len(args)-1))
-
         _exit_wrapper = self._create_cb_wrapper(callback, *args, **kwds)
 
         # We changed the signature, so using @wraps is not appropriate, but
@@ -453,7 +442,6 @@
         _exit_wrapper.__wrapped__ = callback
         self._push_exit_callback(_exit_wrapper)
         return callback  # Allow use as a decorator
-    callback.__text_signature__ = '($self, callback, /, *args, **kwds)'
 
     def _push_cm_exit(self, cm, cm_exit):
         """Helper to correctly register callbacks to __exit__ methods."""
@@ -587,26 +575,11 @@
             self._push_async_cm_exit(exit, exit_method)
         return exit  # Allow use as a decorator
 
-    def push_async_callback(*args, **kwds):
+    def push_async_callback(self, callback, /, *args, **kwds):
         """Registers an arbitrary coroutine function and arguments.
 
         Cannot suppress exceptions.
         """
-        if len(args) >= 2:
-            self, callback, *args = args
-        elif not args:
-            raise TypeError("descriptor 'push_async_callback' of "
-                            "'AsyncExitStack' object needs an argument")
-        elif 'callback' in kwds:
-            callback = kwds.pop('callback')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'callback' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('push_async_callback expected at least 1 '
-                            'positional argument, got %d' % (len(args)-1))
-
         _exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds)
 
         # We changed the signature, so using @wraps is not appropriate, but
@@ -614,7 +587,6 @@
         _exit_wrapper.__wrapped__ = callback
         self._push_exit_callback(_exit_wrapper, False)
         return callback  # Allow use as a decorator
-    push_async_callback.__text_signature__ = '($self, callback, /, *args, **kwds)'
 
     async def aclose(self):
         """Immediately unwind the context stack."""
diff --git a/common/py3-stdlib/copyreg.py b/common/py3-stdlib/copyreg.py
index dfc463c..7ab8c12 100644
--- a/common/py3-stdlib/copyreg.py
+++ b/common/py3-stdlib/copyreg.py
@@ -48,6 +48,7 @@
     return obj
 
 _HEAPTYPE = 1<<9
+_new_type = type(int.__new__)
 
 # Python code for object.__reduce_ex__ for protocols 0 and 1
 
@@ -57,6 +58,9 @@
     for base in cls.__mro__:
         if hasattr(base, '__flags__') and not base.__flags__ & _HEAPTYPE:
             break
+        new = base.__new__
+        if isinstance(new, _new_type) and new.__self__ is base:
+            break
     else:
         base = object # not really reachable
     if base is object:
diff --git a/common/py3-stdlib/crypt.py b/common/py3-stdlib/crypt.py
index 8846602..33dbc46 100644
--- a/common/py3-stdlib/crypt.py
+++ b/common/py3-stdlib/crypt.py
@@ -10,6 +10,7 @@
     else:
         raise ImportError("The required _crypt module was not built as part of CPython")
 
+import errno
 import string as _string
 from random import SystemRandom as _SystemRandom
 from collections import namedtuple as _namedtuple
@@ -88,7 +89,14 @@
     method = _Method(name, *args)
     globals()['METHOD_' + name] = method
     salt = mksalt(method, rounds=rounds)
-    result = crypt('', salt)
+    result = None
+    try:
+        result = crypt('', salt)
+    except OSError as e:
+        # Not all libc libraries support all encryption methods.
+        if e.errno == errno.EINVAL:
+            return False
+        raise
     if result and len(result) == method.total_size:
         methods.append(method)
         return True
diff --git a/common/py3-stdlib/ctypes/__init__.py b/common/py3-stdlib/ctypes/__init__.py
index 8f09911..4afa4eb 100644
--- a/common/py3-stdlib/ctypes/__init__.py
+++ b/common/py3-stdlib/ctypes/__init__.py
@@ -1,6 +1,7 @@
 """create and manipulate C data types in Python"""
 
 import os as _os, sys as _sys
+import types as _types
 
 __version__ = "1.1.0"
 
@@ -450,6 +451,8 @@
     def LoadLibrary(self, name):
         return self._dlltype(name)
 
+    __class_getitem__ = classmethod(_types.GenericAlias)
+
 cdll = LibraryLoader(CDLL)
 pydll = LibraryLoader(PyDLL)
 
diff --git a/common/py3-stdlib/ctypes/macholib/dyld.py b/common/py3-stdlib/ctypes/macholib/dyld.py
index 9d86b05..1c3f8fd 100644
--- a/common/py3-stdlib/ctypes/macholib/dyld.py
+++ b/common/py3-stdlib/ctypes/macholib/dyld.py
@@ -6,6 +6,11 @@
 from ctypes.macholib.framework import framework_info
 from ctypes.macholib.dylib import dylib_info
 from itertools import *
+try:
+    from _ctypes import _dyld_shared_cache_contains_path
+except ImportError:
+    def _dyld_shared_cache_contains_path(*args):
+        raise NotImplementedError
 
 __all__ = [
     'dyld_find', 'framework_find',
@@ -122,8 +127,15 @@
                 dyld_executable_path_search(name, executable_path),
                 dyld_default_search(name, env),
             ), env):
+
         if os.path.isfile(path):
             return path
+        try:
+            if _dyld_shared_cache_contains_path(path):
+                return path
+        except NotImplementedError:
+            pass
+
     raise ValueError("dylib %s could not be found" % (name,))
 
 def framework_find(fn, executable_path=None, env=None):
diff --git a/common/py3-stdlib/ctypes/test/test_find.py b/common/py3-stdlib/ctypes/test/test_find.py
index b99fdcb..92ac184 100644
--- a/common/py3-stdlib/ctypes/test/test_find.py
+++ b/common/py3-stdlib/ctypes/test/test_find.py
@@ -1,4 +1,5 @@
 import unittest
+import unittest.mock
 import os.path
 import sys
 import test.support
@@ -72,7 +73,7 @@
 
 @unittest.skipUnless(sys.platform.startswith('linux'),
                      'Test only valid for Linux')
-class LibPathFindTest(unittest.TestCase):
+class FindLibraryLinux(unittest.TestCase):
     def test_find_on_libpath(self):
         import subprocess
         import tempfile
@@ -111,6 +112,15 @@
                 # LD_LIBRARY_PATH)
                 self.assertEqual(find_library(libname), 'lib%s.so' % libname)
 
+    def test_find_library_with_gcc(self):
+        with unittest.mock.patch("ctypes.util._findSoname_ldconfig", lambda *args: None):
+            self.assertNotEqual(find_library('c'), None)
+
+    def test_find_library_with_ld(self):
+        with unittest.mock.patch("ctypes.util._findSoname_ldconfig", lambda *args: None), \
+             unittest.mock.patch("ctypes.util._findLib_gcc", lambda *args: None):
+            self.assertNotEqual(find_library('c'), None)
+
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/common/py3-stdlib/ctypes/test/test_loading.py b/common/py3-stdlib/ctypes/test/test_loading.py
index 5c48b0d..ba655bc 100644
--- a/common/py3-stdlib/ctypes/test/test_loading.py
+++ b/common/py3-stdlib/ctypes/test/test_loading.py
@@ -3,7 +3,6 @@
 import shutil
 import subprocess
 import sys
-import sysconfig
 import unittest
 import test.support
 from ctypes.util import find_library
diff --git a/common/py3-stdlib/ctypes/test/test_macholib.py b/common/py3-stdlib/ctypes/test/test_macholib.py
index 6b35269..a1bac26 100644
--- a/common/py3-stdlib/ctypes/test/test_macholib.py
+++ b/common/py3-stdlib/ctypes/test/test_macholib.py
@@ -45,19 +45,22 @@
 class MachOTest(unittest.TestCase):
     @unittest.skipUnless(sys.platform == "darwin", 'OSX-specific test')
     def test_find(self):
-
-        self.assertEqual(find_lib('pthread'),
-                             '/usr/lib/libSystem.B.dylib')
+        # On Mac OS 11, system dylibs are only present in the shared cache,
+        # so symlinks like libpthread.dylib -> libSystem.B.dylib will not
+        # be resolved by dyld_find
+        self.assertIn(find_lib('pthread'),
+                              ('/usr/lib/libSystem.B.dylib', '/usr/lib/libpthread.dylib'))
 
         result = find_lib('z')
         # Issue #21093: dyld default search path includes $HOME/lib and
         # /usr/local/lib before /usr/lib, which caused test failures if
         # a local copy of libz exists in one of them. Now ignore the head
         # of the path.
-        self.assertRegex(result, r".*/lib/libz\..*.*\.dylib")
+        self.assertRegex(result, r".*/lib/libz.*\.dylib")
 
-        self.assertEqual(find_lib('IOKit'),
-                             '/System/Library/Frameworks/IOKit.framework/Versions/A/IOKit')
+        self.assertIn(find_lib('IOKit'),
+                              ('/System/Library/Frameworks/IOKit.framework/Versions/A/IOKit',
+                              '/System/Library/Frameworks/IOKit.framework/IOKit'))
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/common/py3-stdlib/ctypes/test/test_stringptr.py b/common/py3-stdlib/ctypes/test/test_stringptr.py
index 95cd161..c20951f 100644
--- a/common/py3-stdlib/ctypes/test/test_stringptr.py
+++ b/common/py3-stdlib/ctypes/test/test_stringptr.py
@@ -70,8 +70,8 @@
         x = r[0], r[1], r[2], r[3], r[4]
         self.assertEqual(x, (b"c", b"d", b"e", b"f", b"\000"))
         del buf
-        # x1 will NOT be the same as x, usually:
-        x1 = r[0], r[1], r[2], r[3], r[4]
+        # Because r is a pointer to memory that is freed after deleting buf,
+        # the pointer is hanging and using it would reference freed memory.
 
 if __name__ == '__main__':
     unittest.main()
diff --git a/common/py3-stdlib/ctypes/test/test_structures.py b/common/py3-stdlib/ctypes/test/test_structures.py
index cdbaa7f..245cd94 100644
--- a/common/py3-stdlib/ctypes/test/test_structures.py
+++ b/common/py3-stdlib/ctypes/test/test_structures.py
@@ -7,6 +7,11 @@
 import _ctypes_test
 from test import support
 
+# The following definition is meant to be used from time to time to assist
+# temporarily disabling tests on specific architectures while investigations
+# are in progress, to keep buildbots happy.
+MACHINE = platform.machine()
+
 class SubclassesTest(unittest.TestCase):
     def test_subclass(self):
         class X(Structure):
diff --git a/common/py3-stdlib/ctypes/test/test_wintypes.py b/common/py3-stdlib/ctypes/test/test_wintypes.py
index 71442df..243d596 100644
--- a/common/py3-stdlib/ctypes/test/test_wintypes.py
+++ b/common/py3-stdlib/ctypes/test/test_wintypes.py
@@ -1,12 +1,13 @@
-import sys
 import unittest
 
-from ctypes import *
+# also work on POSIX
 
-@unittest.skipUnless(sys.platform.startswith('win'), 'Windows-only test')
+from ctypes import *
+from ctypes import wintypes
+
+
 class WinTypesTest(unittest.TestCase):
     def test_variant_bool(self):
-        from ctypes import wintypes
         # reads 16-bits from memory, anything non-zero is True
         for true_value in (1, 32767, 32768, 65535, 65537):
             true = POINTER(c_int16)(c_int16(true_value))
@@ -37,5 +38,6 @@
         vb.value = []
         self.assertIs(vb.value, False)
 
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/common/py3-stdlib/ctypes/util.py b/common/py3-stdlib/ctypes/util.py
index 97973bc..0c2510e 100644
--- a/common/py3-stdlib/ctypes/util.py
+++ b/common/py3-stdlib/ctypes/util.py
@@ -93,6 +93,12 @@
     # Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump
     import re, tempfile
 
+    def _is_elf(filename):
+        "Return True if the given file is an ELF file"
+        elf_header = b'\x7fELF'
+        with open(filename, 'br') as thefile:
+            return thefile.read(4) == elf_header
+
     def _findLib_gcc(name):
         # Run GCC's linker with the -t (aka --trace) option and examine the
         # library name it prints out. The GCC command will fail because we
@@ -130,10 +136,17 @@
                 # Raised if the file was already removed, which is the normal
                 # behaviour of GCC if linking fails
                 pass
-        res = re.search(expr, trace)
+        res = re.findall(expr, trace)
         if not res:
             return None
-        return os.fsdecode(res.group(0))
+
+        for file in res:
+            # Check if the given file is an elf file: gcc can report
+            # some files that are linker scripts and not actual
+            # shared objects. See bpo-41976 for more details
+            if not _is_elf(file):
+                continue
+            return os.fsdecode(file)
 
 
     if sys.platform == "sunos5":
@@ -299,17 +312,22 @@
                                      stderr=subprocess.PIPE,
                                      universal_newlines=True)
                 out, _ = p.communicate()
-                res = re.search(expr, os.fsdecode(out))
-                if res:
-                    result = res.group(0)
-            except Exception as e:
+                res = re.findall(expr, os.fsdecode(out))
+                for file in res:
+                    # Check if the given file is an elf file: gcc can report
+                    # some files that are linker scripts and not actual
+                    # shared objects. See bpo-41976 for more details
+                    if not _is_elf(file):
+                        continue
+                    return os.fsdecode(file)
+            except Exception:
                 pass  # result will be None
             return result
 
         def find_library(name):
             # See issue #9998
             return _findSoname_ldconfig(name) or \
-                   _get_soname(_findLib_gcc(name) or _findLib_ld(name))
+                   _get_soname(_findLib_gcc(name)) or _get_soname(_findLib_ld(name))
 
 ################################################################
 # test code
diff --git a/common/py3-stdlib/dataclasses.py b/common/py3-stdlib/dataclasses.py
index 10bb33e..530d3e9 100644
--- a/common/py3-stdlib/dataclasses.py
+++ b/common/py3-stdlib/dataclasses.py
@@ -7,6 +7,7 @@
 import builtins
 import functools
 import _thread
+from types import GenericAlias
 
 
 __all__ = ['dataclass',
@@ -199,11 +200,7 @@
 # https://bugs.python.org/issue33453 for details.
 _MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')
 
-class _InitVarMeta(type):
-    def __getitem__(self, params):
-        return InitVar(params)
-
-class InitVar(metaclass=_InitVarMeta):
+class InitVar:
     __slots__ = ('type', )
 
     def __init__(self, type):
@@ -217,6 +214,9 @@
             type_name = repr(self.type)
         return f'dataclasses.InitVar[{type_name}]'
 
+    def __class_getitem__(cls, type):
+        return InitVar(type)
+
 
 # Instances of Field are only ever created from within this module,
 # and only from the field() function, although Field instances are
@@ -285,6 +285,8 @@
             # it.
             func(self.default, owner, name)
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 class _DataclassParams:
     __slots__ = ('init',
@@ -1231,7 +1233,7 @@
                      unsafe_hash=unsafe_hash, frozen=frozen)
 
 
-def replace(*args, **changes):
+def replace(obj, /, **changes):
     """Return a new object replacing specified fields with new values.
 
     This is especially useful for frozen classes.  Example usage:
@@ -1245,17 +1247,6 @@
       c1 = replace(c, x=3)
       assert c1.x == 3 and c1.y == 2
       """
-    if len(args) > 1:
-        raise TypeError(f'replace() takes 1 positional argument but {len(args)} were given')
-    if args:
-        obj, = args
-    elif 'obj' in changes:
-        obj = changes.pop('obj')
-        import warnings
-        warnings.warn("Passing 'obj' as keyword argument is deprecated",
-                      DeprecationWarning, stacklevel=2)
-    else:
-        raise TypeError("replace() missing 1 required positional argument: 'obj'")
 
     # We're going to mutate 'changes', but that's okay because it's a
     # new dict, even if called with 'replace(obj, **my_changes)'.
@@ -1291,4 +1282,3 @@
     # changes that aren't fields, this will correctly raise a
     # TypeError.
     return obj.__class__(**changes)
-replace.__text_signature__ = '(obj, /, **kwargs)'
diff --git a/common/py3-stdlib/datetime.py b/common/py3-stdlib/datetime.py
index 0adf1dd..e508d99 100644
--- a/common/py3-stdlib/datetime.py
+++ b/common/py3-stdlib/datetime.py
@@ -4,6 +4,10 @@
 time zone and DST data sources.
 """
 
+__all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo",
+           "MINYEAR", "MAXYEAR")
+
+
 import time as _time
 import math as _math
 import sys
@@ -1091,7 +1095,7 @@
         return self.toordinal() % 7 or 7
 
     def isocalendar(self):
-        """Return a 3-tuple containing ISO year, week number, and weekday.
+        """Return a named tuple containing ISO year, week number, and weekday.
 
         The first ISO week of the year is the (Mon-Sun) week
         containing the year's first Thursday; everything else derives
@@ -1116,7 +1120,7 @@
             if today >= _isoweek1monday(year+1):
                 year += 1
                 week = 0
-        return year, week+1, day+1
+        return _IsoCalendarDate(year, week+1, day+1)
 
     # Pickle support.
 
@@ -1206,6 +1210,36 @@
         else:
             return (self.__class__, args, state)
 
+
+class IsoCalendarDate(tuple):
+
+    def __new__(cls, year, week, weekday, /):
+        return super().__new__(cls, (year, week, weekday))
+
+    @property
+    def year(self):
+        return self[0]
+
+    @property
+    def week(self):
+        return self[1]
+
+    @property
+    def weekday(self):
+        return self[2]
+
+    def __reduce__(self):
+        # This code is intended to pickle the object without making the
+        # class public. See https://bugs.python.org/msg352381
+        return (tuple, (tuple(self),))
+
+    def __repr__(self):
+        return (f'{self.__class__.__name__}'
+                f'(year={self[0]}, week={self[1]}, weekday={self[2]})')
+
+
+_IsoCalendarDate = IsoCalendarDate
+del IsoCalendarDate
 _tzinfo_class = tzinfo
 
 class time:
@@ -1418,7 +1452,8 @@
         part is omitted if self.microsecond == 0.
 
         The optional argument timespec specifies the number of additional
-        terms of the time to include.
+        terms of the time to include. Valid options are 'auto', 'hours',
+        'minutes', 'seconds', 'milliseconds' and 'microseconds'.
         """
         s = _format_time(self._hour, self._minute, self._second,
                           self._microsecond, timespec)
@@ -1544,7 +1579,7 @@
         self._tzinfo = tzinfo
 
     def __reduce_ex__(self, protocol):
-        return (time, self._getstate(protocol))
+        return (self.__class__, self._getstate(protocol))
 
     def __reduce__(self):
         return self.__reduce_ex__(2)
@@ -1555,6 +1590,7 @@
 time.max = time(23, 59, 59, 999999)
 time.resolution = timedelta(microseconds=1)
 
+
 class datetime(date):
     """datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]])
 
@@ -1902,7 +1938,8 @@
         time, default 'T'.
 
         The optional argument timespec specifies the number of additional
-        terms of the time to include.
+        terms of the time to include. Valid options are 'auto', 'hours',
+        'minutes', 'seconds', 'milliseconds' and 'microseconds'.
         """
         s = ("%04d-%02d-%02d%c" % (self._year, self._month, self._day, sep) +
              _format_time(self._hour, self._minute, self._second,
@@ -2510,7 +2547,7 @@
          _format_time, _format_offset, _is_leap, _isoweek1monday, _math,
          _ord2ymd, _time, _time_class, _tzinfo_class, _wrap_strftime, _ymd2ord,
          _divide_and_round, _parse_isoformat_date, _parse_isoformat_time,
-         _parse_hh_mm_ss_ff)
+         _parse_hh_mm_ss_ff, _IsoCalendarDate)
     # XXX Since import * above excludes names that start with _,
     # docstring does not get overwritten. In the future, it may be
     # appropriate to maintain a single module level docstring and
diff --git a/common/py3-stdlib/difflib.py b/common/py3-stdlib/difflib.py
index 5d75643..0dda80d 100644
--- a/common/py3-stdlib/difflib.py
+++ b/common/py3-stdlib/difflib.py
@@ -32,6 +32,7 @@
 
 from heapq import nlargest as _nlargest
 from collections import namedtuple as _namedtuple
+from types import GenericAlias
 
 Match = _namedtuple('Match', 'a b size')
 
@@ -129,7 +130,7 @@
     set_seq2(b)
         Set the second sequence to be compared.
 
-    find_longest_match(alo, ahi, blo, bhi)
+    find_longest_match(alo=0, ahi=None, blo=0, bhi=None)
         Find longest matching block in a[alo:ahi] and b[blo:bhi].
 
     get_matching_blocks()
@@ -333,9 +334,11 @@
             for elt in popular: # ditto; as fast for 1% deletion
                 del b2j[elt]
 
-    def find_longest_match(self, alo, ahi, blo, bhi):
+    def find_longest_match(self, alo=0, ahi=None, blo=0, bhi=None):
         """Find longest matching block in a[alo:ahi] and b[blo:bhi].
 
+        By default it will find the longest match in the entirety of a and b.
+
         If isjunk is not defined:
 
         Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
@@ -390,6 +393,10 @@
         # the unique 'b's and then matching the first two 'a's.
 
         a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.bjunk.__contains__
+        if ahi is None:
+            ahi = len(a)
+        if bhi is None:
+            bhi = len(b)
         besti, bestj, bestsize = alo, blo, 0
         # find longest junk-free match
         # during an iteration of the loop, j2len[j] = length of longest
@@ -685,6 +692,9 @@
         # shorter sequence
         return _calculate_ratio(min(la, lb), la + lb)
 
+    __class_getitem__ = classmethod(GenericAlias)
+
+
 def get_close_matches(word, possibilities, n=3, cutoff=0.6):
     """Use SequenceMatcher to return list of the best "good enough" matches.
 
diff --git a/common/py3-stdlib/dis.py b/common/py3-stdlib/dis.py
index 10e5f7f..e289e17 100644
--- a/common/py3-stdlib/dis.py
+++ b/common/py3-stdlib/dis.py
@@ -542,7 +542,7 @@
     import argparse
 
     parser = argparse.ArgumentParser()
-    parser.add_argument('infile', type=argparse.FileType(), nargs='?', default='-')
+    parser.add_argument('infile', type=argparse.FileType('rb'), nargs='?', default='-')
     args = parser.parse_args()
     with args.infile as infile:
         source = infile.read()
diff --git a/common/py3-stdlib/distutils/_msvccompiler.py b/common/py3-stdlib/distutils/_msvccompiler.py
index 03a5986..af8099a 100644
--- a/common/py3-stdlib/distutils/_msvccompiler.py
+++ b/common/py3-stdlib/distutils/_msvccompiler.py
@@ -14,8 +14,6 @@
 # ported to VS 2015 by Steve Dower
 
 import os
-import shutil
-import stat
 import subprocess
 import winreg
 
@@ -65,8 +63,6 @@
     If vswhere.exe is not available, by definition, VS 2017 is not
     installed.
     """
-    import json
-
     root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
     if not root:
         return None, None
diff --git a/common/py3-stdlib/distutils/bcppcompiler.py b/common/py3-stdlib/distutils/bcppcompiler.py
index 9f4c432..071fea5 100644
--- a/common/py3-stdlib/distutils/bcppcompiler.py
+++ b/common/py3-stdlib/distutils/bcppcompiler.py
@@ -14,10 +14,10 @@
 
 import os
 from distutils.errors import \
-     DistutilsExecError, DistutilsPlatformError, \
+     DistutilsExecError, \
      CompileError, LibError, LinkError, UnknownFileError
 from distutils.ccompiler import \
-     CCompiler, gen_preprocess_options, gen_lib_options
+     CCompiler, gen_preprocess_options
 from distutils.file_util import write_file
 from distutils.dep_util import newer
 from distutils import log
diff --git a/common/py3-stdlib/distutils/ccompiler.py b/common/py3-stdlib/distutils/ccompiler.py
index 4cfc6c7..b5ef143 100644
--- a/common/py3-stdlib/distutils/ccompiler.py
+++ b/common/py3-stdlib/distutils/ccompiler.py
@@ -8,7 +8,7 @@
 from distutils.spawn import spawn
 from distutils.file_util import move_file
 from distutils.dir_util import mkpath
-from distutils.dep_util import newer_pairwise, newer_group
+from distutils.dep_util import newer_group
 from distutils.util import split_quoted, execute
 from distutils import log
 
diff --git a/common/py3-stdlib/distutils/command/bdist_msi.py b/common/py3-stdlib/distutils/command/bdist_msi.py
index f335a34..0863a18 100644
--- a/common/py3-stdlib/distutils/command/bdist_msi.py
+++ b/common/py3-stdlib/distutils/command/bdist_msi.py
@@ -6,7 +6,9 @@
 Implements the bdist_msi command.
 """
 
-import sys, os
+import os
+import sys
+import warnings
 from distutils.core import Command
 from distutils.dir_util import remove_tree
 from distutils.sysconfig import get_python_version
@@ -122,6 +124,12 @@
                     '3.5', '3.6', '3.7', '3.8', '3.9']
     other_version = 'X'
 
+    def __init__(self, *args, **kw):
+        super().__init__(*args, **kw)
+        warnings.warn("bdist_msi command is deprecated since Python 3.9, "
+                      "use bdist_wheel (wheel packages) instead",
+                      DeprecationWarning, 2)
+
     def initialize_options(self):
         self.bdist_dir = None
         self.plat_name = None
diff --git a/common/py3-stdlib/distutils/command/bdist_rpm.py b/common/py3-stdlib/distutils/command/bdist_rpm.py
index 74381cc..550cbfa 100644
--- a/common/py3-stdlib/distutils/command/bdist_rpm.py
+++ b/common/py3-stdlib/distutils/command/bdist_rpm.py
@@ -6,7 +6,6 @@
 import subprocess, sys, os
 from distutils.core import Command
 from distutils.debug import DEBUG
-from distutils.util import get_platform
 from distutils.file_util import write_file
 from distutils.errors import *
 from distutils.sysconfig import get_python_version
diff --git a/common/py3-stdlib/distutils/command/bdist_wininst.py b/common/py3-stdlib/distutils/command/bdist_wininst.py
index b5ed6f0..0e9ddaa 100644
--- a/common/py3-stdlib/distutils/command/bdist_wininst.py
+++ b/common/py3-stdlib/distutils/command/bdist_wininst.py
@@ -8,7 +8,7 @@
 import warnings
 from distutils.core import Command
 from distutils.util import get_platform
-from distutils.dir_util import create_tree, remove_tree
+from distutils.dir_util import remove_tree
 from distutils.errors import *
 from distutils.sysconfig import get_python_version
 from distutils import log
diff --git a/common/py3-stdlib/distutils/command/build_ext.py b/common/py3-stdlib/distutils/command/build_ext.py
index dbcd9d1..1a9bd12 100644
--- a/common/py3-stdlib/distutils/command/build_ext.py
+++ b/common/py3-stdlib/distutils/command/build_ext.py
@@ -490,7 +490,8 @@
                   "in 'ext_modules' option (extension '%s'), "
                   "'sources' must be present and must be "
                   "a list of source filenames" % ext.name)
-        sources = list(sources)
+        # sort to make the resulting .so file build reproducible
+        sources = sorted(sources)
 
         ext_path = self.get_ext_fullpath(ext.name)
         depends = sources + ext.depends
diff --git a/common/py3-stdlib/distutils/command/check.py b/common/py3-stdlib/distutils/command/check.py
index 04c2f96..ada2500 100644
--- a/common/py3-stdlib/distutils/command/check.py
+++ b/common/py3-stdlib/distutils/command/check.py
@@ -11,7 +11,6 @@
     from docutils.parsers.rst import Parser
     from docutils import frontend
     from docutils import nodes
-    from io import StringIO
 
     class SilentReporter(Reporter):
 
@@ -80,8 +79,11 @@
     def check_metadata(self):
         """Ensures that all required elements of meta-data are supplied.
 
-        name, version, URL, (author and author_email) or
-        (maintainer and maintainer_email)).
+        Required fields:
+            name, version, URL
+
+        Recommended fields:
+            (author and author_email) or (maintainer and maintainer_email))
 
         Warns if any are missing.
         """
@@ -97,15 +99,15 @@
         if metadata.author:
             if not metadata.author_email:
                 self.warn("missing meta-data: if 'author' supplied, " +
-                          "'author_email' must be supplied too")
+                          "'author_email' should be supplied too")
         elif metadata.maintainer:
             if not metadata.maintainer_email:
                 self.warn("missing meta-data: if 'maintainer' supplied, " +
-                          "'maintainer_email' must be supplied too")
+                          "'maintainer_email' should be supplied too")
         else:
             self.warn("missing meta-data: either (author and author_email) " +
                       "or (maintainer and maintainer_email) " +
-                      "must be supplied")
+                      "should be supplied")
 
     def check_restructuredtext(self):
         """Checks if the long string fields are reST-compliant."""
diff --git a/common/py3-stdlib/distutils/command/install.py b/common/py3-stdlib/distutils/command/install.py
index c625c95..aaa300e 100644
--- a/common/py3-stdlib/distutils/command/install.py
+++ b/common/py3-stdlib/distutils/command/install.py
@@ -30,14 +30,14 @@
 INSTALL_SCHEMES = {
     'unix_prefix': {
         'purelib': '$base/lib/python$py_version_short/site-packages',
-        'platlib': '$platbase/lib/python$py_version_short/site-packages',
+        'platlib': '$platbase/$platlibdir/python$py_version_short/site-packages',
         'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
         'scripts': '$base/bin',
         'data'   : '$base',
         },
     'unix_home': {
         'purelib': '$base/lib/python',
-        'platlib': '$base/lib/python',
+        'platlib': '$base/$platlibdir/python',
         'headers': '$base/include/python/$dist_name',
         'scripts': '$base/bin',
         'data'   : '$base',
@@ -298,6 +298,7 @@
                             'sys_exec_prefix': exec_prefix,
                             'exec_prefix': exec_prefix,
                             'abiflags': abiflags,
+                            'platlibdir': sys.platlibdir,
                            }
 
         if HAS_USER_SITE:
diff --git a/common/py3-stdlib/distutils/command/upload.py b/common/py3-stdlib/distutils/command/upload.py
index 11afa24..95e9fda 100644
--- a/common/py3-stdlib/distutils/command/upload.py
+++ b/common/py3-stdlib/distutils/command/upload.py
@@ -7,7 +7,6 @@
 
 import os
 import io
-import platform
 import hashlib
 from base64 import standard_b64encode
 from urllib.request import urlopen, Request, HTTPError
@@ -17,6 +16,16 @@
 from distutils.spawn import spawn
 from distutils import log
 
+
+# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
+# https://bugs.python.org/issue40698
+_FILE_CONTENT_DIGESTS = {
+    "md5_digest": getattr(hashlib, "md5", None),
+    "sha256_digest": getattr(hashlib, "sha256", None),
+    "blake2_256_digest": getattr(hashlib, "blake2b", None),
+}
+
+
 class upload(PyPIRCCommand):
 
     description = "upload binary package to PyPI"
@@ -88,6 +97,7 @@
             content = f.read()
         finally:
             f.close()
+
         meta = self.distribution.metadata
         data = {
             # action
@@ -102,7 +112,6 @@
             'content': (os.path.basename(filename),content),
             'filetype': command,
             'pyversion': pyversion,
-            'md5_digest': hashlib.md5(content).hexdigest(),
 
             # additional meta-data
             'metadata_version': '1.0',
@@ -124,6 +133,16 @@
 
         data['comment'] = ''
 
+        # file content digests
+        for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
+            if digest_cons is None:
+                continue
+            try:
+                data[digest_name] = digest_cons(content).hexdigest()
+            except ValueError:
+                # hash digest not available or blocked by security policy
+                pass
+
         if self.sign:
             with open(filename + ".asc", "rb") as f:
                 data['gpg_signature'] = (os.path.basename(filename) + ".asc",
diff --git a/common/py3-stdlib/distutils/cygwinccompiler.py b/common/py3-stdlib/distutils/cygwinccompiler.py
index 6c5d777..66c12dd 100644
--- a/common/py3-stdlib/distutils/cygwinccompiler.py
+++ b/common/py3-stdlib/distutils/cygwinccompiler.py
@@ -51,12 +51,10 @@
 from subprocess import Popen, PIPE, check_output
 import re
 
-from distutils.ccompiler import gen_preprocess_options, gen_lib_options
 from distutils.unixccompiler import UnixCCompiler
 from distutils.file_util import write_file
 from distutils.errors import (DistutilsExecError, CCompilerError,
         CompileError, UnknownFileError)
-from distutils import log
 from distutils.version import LooseVersion
 from distutils.spawn import find_executable
 
diff --git a/common/py3-stdlib/distutils/msvc9compiler.py b/common/py3-stdlib/distutils/msvc9compiler.py
index 4c0036a..6934e96 100644
--- a/common/py3-stdlib/distutils/msvc9compiler.py
+++ b/common/py3-stdlib/distutils/msvc9compiler.py
@@ -19,8 +19,7 @@
 
 from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
                              CompileError, LibError, LinkError
-from distutils.ccompiler import CCompiler, gen_preprocess_options, \
-                                gen_lib_options
+from distutils.ccompiler import CCompiler, gen_lib_options
 from distutils import log
 from distutils.util import get_platform
 
diff --git a/common/py3-stdlib/distutils/msvccompiler.py b/common/py3-stdlib/distutils/msvccompiler.py
index d1de2fb..d5857cb 100644
--- a/common/py3-stdlib/distutils/msvccompiler.py
+++ b/common/py3-stdlib/distutils/msvccompiler.py
@@ -13,7 +13,7 @@
      DistutilsExecError, DistutilsPlatformError, \
      CompileError, LibError, LinkError
 from distutils.ccompiler import \
-     CCompiler, gen_preprocess_options, gen_lib_options
+     CCompiler, gen_lib_options
 from distutils import log
 
 _can_read_reg = False
diff --git a/common/py3-stdlib/distutils/spawn.py b/common/py3-stdlib/distutils/spawn.py
index ceb9494..f50edd2 100644
--- a/common/py3-stdlib/distutils/spawn.py
+++ b/common/py3-stdlib/distutils/spawn.py
@@ -8,11 +8,18 @@
 
 import sys
 import os
+import subprocess
 
 from distutils.errors import DistutilsPlatformError, DistutilsExecError
 from distutils.debug import DEBUG
 from distutils import log
 
+
+if sys.platform == 'darwin':
+    _cfg_target = None
+    _cfg_target_split = None
+
+
 def spawn(cmd, search_path=1, verbose=0, dry_run=0):
     """Run another program, specified as a command list 'cmd', in a new process.
 
@@ -32,71 +39,23 @@
     # cmd is documented as a list, but just in case some code passes a tuple
     # in, protect our %-formatting code against horrible death
     cmd = list(cmd)
-    if os.name == 'posix':
-        _spawn_posix(cmd, search_path, dry_run=dry_run)
-    elif os.name == 'nt':
-        _spawn_nt(cmd, search_path, dry_run=dry_run)
-    else:
-        raise DistutilsPlatformError(
-              "don't know how to spawn programs on platform '%s'" % os.name)
 
-def _nt_quote_args(args):
-    """Quote command-line arguments for DOS/Windows conventions.
-
-    Just wraps every argument which contains blanks in double quotes, and
-    returns a new argument list.
-    """
-    # XXX this doesn't seem very robust to me -- but if the Windows guys
-    # say it'll work, I guess I'll have to accept it.  (What if an arg
-    # contains quotes?  What other magic characters, other than spaces,
-    # have to be escaped?  Is there an escaping mechanism other than
-    # quoting?)
-    for i, arg in enumerate(args):
-        if ' ' in arg:
-            args[i] = '"%s"' % arg
-    return args
-
-def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
-    executable = cmd[0]
-    cmd = _nt_quote_args(cmd)
-    if search_path:
-        # either we find one or it stays the same
-        executable = find_executable(executable) or executable
-    log.info(' '.join([executable] + cmd[1:]))
-    if not dry_run:
-        # spawn for NT requires a full path to the .exe
-        try:
-            rc = os.spawnv(os.P_WAIT, executable, cmd)
-        except OSError as exc:
-            # this seems to happen when the command isn't found
-            if not DEBUG:
-                cmd = executable
-            raise DistutilsExecError(
-                  "command %r failed: %s" % (cmd, exc.args[-1]))
-        if rc != 0:
-            # and this reflects the command running but failing
-            if not DEBUG:
-                cmd = executable
-            raise DistutilsExecError(
-                  "command %r failed with exit status %d" % (cmd, rc))
-
-if sys.platform == 'darwin':
-    _cfg_target = None
-    _cfg_target_split = None
-
-def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0):
     log.info(' '.join(cmd))
     if dry_run:
         return
-    executable = cmd[0]
-    exec_fn = search_path and os.execvp or os.execv
+
+    if search_path:
+        executable = find_executable(cmd[0])
+        if executable is not None:
+            cmd[0] = executable
+
     env = None
     if sys.platform == 'darwin':
         global _cfg_target, _cfg_target_split
         if _cfg_target is None:
             from distutils import sysconfig
-            _cfg_target = sysconfig.get_config_var(
-                                  'MACOSX_DEPLOYMENT_TARGET') or ''
+            _cfg_target = str(sysconfig.get_config_var(
+                                  'MACOSX_DEPLOYMENT_TARGET') or '')
             if _cfg_target:
                 _cfg_target_split = [int(x) for x in _cfg_target.split('.')]
         if _cfg_target:
@@ -111,60 +70,23 @@
                 raise DistutilsPlatformError(my_msg)
             env = dict(os.environ,
                        MACOSX_DEPLOYMENT_TARGET=cur_target)
-            exec_fn = search_path and os.execvpe or os.execve
-    pid = os.fork()
-    if pid == 0: # in the child
-        try:
-            if env is None:
-                exec_fn(executable, cmd)
-            else:
-                exec_fn(executable, cmd, env)
-        except OSError as e:
-            if not DEBUG:
-                cmd = executable
-            sys.stderr.write("unable to execute %r: %s\n"
-                             % (cmd, e.strerror))
-            os._exit(1)
 
+    try:
+        proc = subprocess.Popen(cmd, env=env)
+        proc.wait()
+        exitcode = proc.returncode
+    except OSError as exc:
         if not DEBUG:
-            cmd = executable
-        sys.stderr.write("unable to execute %r for unknown reasons" % cmd)
-        os._exit(1)
-    else: # in the parent
-        # Loop until the child either exits or is terminated by a signal
-        # (ie. keep waiting if it's merely stopped)
-        while True:
-            try:
-                pid, status = os.waitpid(pid, 0)
-            except OSError as exc:
-                if not DEBUG:
-                    cmd = executable
-                raise DistutilsExecError(
-                      "command %r failed: %s" % (cmd, exc.args[-1]))
-            if os.WIFSIGNALED(status):
-                if not DEBUG:
-                    cmd = executable
-                raise DistutilsExecError(
-                      "command %r terminated by signal %d"
-                      % (cmd, os.WTERMSIG(status)))
-            elif os.WIFEXITED(status):
-                exit_status = os.WEXITSTATUS(status)
-                if exit_status == 0:
-                    return   # hey, it succeeded!
-                else:
-                    if not DEBUG:
-                        cmd = executable
-                    raise DistutilsExecError(
-                          "command %r failed with exit status %d"
-                          % (cmd, exit_status))
-            elif os.WIFSTOPPED(status):
-                continue
-            else:
-                if not DEBUG:
-                    cmd = executable
-                raise DistutilsExecError(
-                      "unknown error executing %r: termination status %d"
-                      % (cmd, status))
+            cmd = cmd[0]
+        raise DistutilsExecError(
+            "command %r failed: %s" % (cmd, exc.args[-1])) from exc
+
+    if exitcode:
+        if not DEBUG:
+            cmd = cmd[0]
+        raise DistutilsExecError(
+              "command %r failed with exit code %s" % (cmd, exitcode))
+
 
 def find_executable(executable, path=None):
     """Tries to find 'executable' in the directories listed in 'path'.
diff --git a/common/py3-stdlib/distutils/sysconfig.py b/common/py3-stdlib/distutils/sysconfig.py
index b51629e..37feae5 100644
--- a/common/py3-stdlib/distutils/sysconfig.py
+++ b/common/py3-stdlib/distutils/sysconfig.py
@@ -15,7 +15,6 @@
 import sys
 
 from .errors import DistutilsPlatformError
-from .util import get_platform, get_host_platform
 
 # These are needed in a couple of spots, so just compute them once.
 PREFIX = os.path.normpath(sys.prefix)
@@ -146,8 +145,15 @@
             prefix = plat_specific and EXEC_PREFIX or PREFIX
 
     if os.name == "posix":
-        libpython = os.path.join(prefix,
-                                 "lib", "python" + get_python_version())
+        if plat_specific or standard_lib:
+            # Platform-specific modules (any module from a non-pure-Python
+            # module distribution) or standard Python library modules.
+            libdir = sys.platlibdir
+        else:
+            # Pure Python
+            libdir = "lib"
+        libpython = os.path.join(prefix, libdir,
+                                 "python" + get_python_version())
         if standard_lib:
             return libpython
         else:
diff --git a/common/py3-stdlib/distutils/tests/support.py b/common/py3-stdlib/distutils/tests/support.py
index 0413098..259af88 100644
--- a/common/py3-stdlib/distutils/tests/support.py
+++ b/common/py3-stdlib/distutils/tests/support.py
@@ -39,8 +39,6 @@
         self.logs.append((level, msg, args))
 
     def get_logs(self, *levels):
-        def _format(msg, args):
-            return msg % args
         return [msg % args for level, msg, args
                 in self.logs if level in levels]
 
diff --git a/common/py3-stdlib/distutils/tests/test_bdist_msi.py b/common/py3-stdlib/distutils/tests/test_bdist_msi.py
index 15d8bdf..418e60e 100644
--- a/common/py3-stdlib/distutils/tests/test_bdist_msi.py
+++ b/common/py3-stdlib/distutils/tests/test_bdist_msi.py
@@ -1,7 +1,7 @@
 """Tests for distutils.command.bdist_msi."""
 import sys
 import unittest
-from test.support import run_unittest
+from test.support import run_unittest, check_warnings
 from distutils.tests import support
 
 
@@ -14,7 +14,8 @@
         # minimal test XXX need more tests
         from distutils.command.bdist_msi import bdist_msi
         project_dir, dist = self.create_dist()
-        cmd = bdist_msi(dist)
+        with check_warnings(("", DeprecationWarning)):
+            cmd = bdist_msi(dist)
         cmd.ensure_finalized()
 
 
diff --git a/common/py3-stdlib/distutils/tests/test_build_clib.py b/common/py3-stdlib/distutils/tests/test_build_clib.py
index 85d0990..abd8313 100644
--- a/common/py3-stdlib/distutils/tests/test_build_clib.py
+++ b/common/py3-stdlib/distutils/tests/test_build_clib.py
@@ -8,7 +8,6 @@
 from distutils.command.build_clib import build_clib
 from distutils.errors import DistutilsSetupError
 from distutils.tests import support
-from distutils.spawn import find_executable
 
 class BuildCLibTestCase(support.TempdirManager,
                         support.LoggingSilencer,
diff --git a/common/py3-stdlib/distutils/tests/test_build_ext.py b/common/py3-stdlib/distutils/tests/test_build_ext.py
index 5e47e07..1b034c9 100644
--- a/common/py3-stdlib/distutils/tests/test_build_ext.py
+++ b/common/py3-stdlib/distutils/tests/test_build_ext.py
@@ -455,7 +455,7 @@
         deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
         if deptarget:
             # increment the minor version number (i.e. 10.6 -> 10.7)
-            deptarget = [int(x) for x in deptarget.split('.')]
+            deptarget = [int(x) for x in str(deptarget).split('.')]
             deptarget[-1] += 1
             deptarget = '.'.join(str(i) for i in deptarget)
             self._try_compile_deployment_target('<', deptarget)
@@ -488,16 +488,20 @@
 
         # get the deployment target that the interpreter was built with
         target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-        target = tuple(map(int, target.split('.')[0:2]))
+        target = tuple(map(int, str(target).split('.')[0:2]))
         # format the target value as defined in the Apple
         # Availability Macros.  We can't use the macro names since
         # at least one value we test with will not exist yet.
-        if target[1] < 10:
+        if target[:2] < (10, 10):
             # for 10.1 through 10.9.x -> "10n0"
             target = '%02d%01d0' % target
         else:
             # for 10.10 and beyond -> "10nn00"
-            target = '%02d%02d00' % target
+            if len(target) >= 2:
+                target = '%02d%02d00' % target
+            else:
+                # 11 and later can have no minor version (11 instead of 11.0)
+                target = '%02d0000' % target
         deptarget_ext = Extension(
             'deptarget',
             [deptarget_c],
diff --git a/common/py3-stdlib/distutils/tests/test_config_cmd.py b/common/py3-stdlib/distutils/tests/test_config_cmd.py
index 8bd2c94..9aeab07 100644
--- a/common/py3-stdlib/distutils/tests/test_config_cmd.py
+++ b/common/py3-stdlib/distutils/tests/test_config_cmd.py
@@ -39,7 +39,6 @@
 
     @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
     def test_search_cpp(self):
-        import shutil
         cmd = missing_compiler_executable(['preprocessor'])
         if cmd is not None:
             self.skipTest('The %r command is not found' % cmd)
diff --git a/common/py3-stdlib/distutils/tests/test_dist.py b/common/py3-stdlib/distutils/tests/test_dist.py
index cc34725..60956da 100644
--- a/common/py3-stdlib/distutils/tests/test_dist.py
+++ b/common/py3-stdlib/distutils/tests/test_dist.py
@@ -8,7 +8,7 @@
 
 from unittest import mock
 
-from distutils.dist import Distribution, fix_help_options, DistributionMetadata
+from distutils.dist import Distribution, fix_help_options
 from distutils.cmd import Command
 
 from test.support import (
diff --git a/common/py3-stdlib/distutils/tests/test_install.py b/common/py3-stdlib/distutils/tests/test_install.py
index 287ab19..51c80e0 100644
--- a/common/py3-stdlib/distutils/tests/test_install.py
+++ b/common/py3-stdlib/distutils/tests/test_install.py
@@ -58,7 +58,8 @@
 
         libdir = os.path.join(destination, "lib", "python")
         check_path(cmd.install_lib, libdir)
-        check_path(cmd.install_platlib, libdir)
+        platlibdir = os.path.join(destination, sys.platlibdir, "python")
+        check_path(cmd.install_platlib, platlibdir)
         check_path(cmd.install_purelib, libdir)
         check_path(cmd.install_headers,
                    os.path.join(destination, "include", "python", "foopkg"))
diff --git a/common/py3-stdlib/distutils/tests/test_spawn.py b/common/py3-stdlib/distutils/tests/test_spawn.py
index f9ae69e..ad50381 100644
--- a/common/py3-stdlib/distutils/tests/test_spawn.py
+++ b/common/py3-stdlib/distutils/tests/test_spawn.py
@@ -2,13 +2,11 @@
 import os
 import stat
 import sys
-import unittest
-from unittest import mock
+import unittest.mock
 from test.support import run_unittest, unix_shell
 from test import support as test_support
 
 from distutils.spawn import find_executable
-from distutils.spawn import _nt_quote_args
 from distutils.spawn import spawn
 from distutils.errors import DistutilsExecError
 from distutils.tests import support
@@ -17,16 +15,6 @@
                     support.LoggingSilencer,
                     unittest.TestCase):
 
-    def test_nt_quote_args(self):
-
-        for (args, wanted) in ((['with space', 'nospace'],
-                                ['"with space"', 'nospace']),
-                               (['nochange', 'nospace'],
-                                ['nochange', 'nospace'])):
-            res = _nt_quote_args(args)
-            self.assertEqual(res, wanted)
-
-
     @unittest.skipUnless(os.name in ('nt', 'posix'),
                          'Runs only under posix or nt')
     def test_spawn(self):
@@ -136,6 +124,11 @@
                     rv = find_executable(program)
                     self.assertEqual(rv, filename)
 
+    def test_spawn_missing_exe(self):
+        with self.assertRaises(DistutilsExecError) as ctx:
+            spawn(['does-not-exist'])
+        self.assertIn("command 'does-not-exist' failed", str(ctx.exception))
+
 
 def test_suite():
     return unittest.makeSuite(SpawnTestCase)
diff --git a/common/py3-stdlib/distutils/tests/test_upload.py b/common/py3-stdlib/distutils/tests/test_upload.py
index c17d8e7..bca5516 100644
--- a/common/py3-stdlib/distutils/tests/test_upload.py
+++ b/common/py3-stdlib/distutils/tests/test_upload.py
@@ -130,14 +130,30 @@
 
         # what did we send ?
         headers = dict(self.last_open.req.headers)
-        self.assertEqual(headers['Content-length'], '2162')
+        self.assertGreaterEqual(int(headers['Content-length']), 2162)
         content_type = headers['Content-type']
         self.assertTrue(content_type.startswith('multipart/form-data'))
         self.assertEqual(self.last_open.req.get_method(), 'POST')
         expected_url = 'https://upload.pypi.org/legacy/'
         self.assertEqual(self.last_open.req.get_full_url(), expected_url)
-        self.assertTrue(b'xxx' in self.last_open.req.data)
-        self.assertIn(b'protocol_version', self.last_open.req.data)
+        data = self.last_open.req.data
+        self.assertIn(b'xxx',data)
+        self.assertIn(b'protocol_version', data)
+        self.assertIn(b'sha256_digest', data)
+        self.assertIn(
+            b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf'
+            b'6860',
+            data
+        )
+        if b'md5_digest' in data:
+            self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data)
+        if b'blake2_256_digest' in data:
+            self.assertIn(
+                b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be'
+                b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc'
+                b'ce443f1534330a',
+                data
+            )
 
         # The PyPI response body was echoed
         results = self.get_logs(INFO)
@@ -166,7 +182,7 @@
         cmd.run()
 
         headers = dict(self.last_open.req.headers)
-        self.assertEqual(headers['Content-length'], '2172')
+        self.assertGreaterEqual(int(headers['Content-length']), 2172)
         self.assertIn(b'long description\r', self.last_open.req.data)
 
     def test_upload_fails(self):
diff --git a/common/py3-stdlib/distutils/tests/test_version.py b/common/py3-stdlib/distutils/tests/test_version.py
index 15f14c7..8671cd2 100644
--- a/common/py3-stdlib/distutils/tests/test_version.py
+++ b/common/py3-stdlib/distutils/tests/test_version.py
@@ -45,6 +45,14 @@
             self.assertEqual(res, wanted,
                              'cmp(%s, %s) should be %s, got %s' %
                              (v1, v2, wanted, res))
+            res = StrictVersion(v1)._cmp(v2)
+            self.assertEqual(res, wanted,
+                             'cmp(%s, %s) should be %s, got %s' %
+                             (v1, v2, wanted, res))
+            res = StrictVersion(v1)._cmp(object())
+            self.assertIs(res, NotImplemented,
+                          'cmp(%s, %s) should be NotImplemented, got %s' %
+                          (v1, v2, res))
 
 
     def test_cmp(self):
@@ -63,6 +71,14 @@
             self.assertEqual(res, wanted,
                              'cmp(%s, %s) should be %s, got %s' %
                              (v1, v2, wanted, res))
+            res = LooseVersion(v1)._cmp(v2)
+            self.assertEqual(res, wanted,
+                             'cmp(%s, %s) should be %s, got %s' %
+                             (v1, v2, wanted, res))
+            res = LooseVersion(v1)._cmp(object())
+            self.assertIs(res, NotImplemented,
+                          'cmp(%s, %s) should be NotImplemented, got %s' %
+                          (v1, v2, res))
 
 def test_suite():
     return unittest.makeSuite(VersionTestCase)
diff --git a/common/py3-stdlib/distutils/unixccompiler.py b/common/py3-stdlib/distutils/unixccompiler.py
index 4d7a6de..f0792de 100644
--- a/common/py3-stdlib/distutils/unixccompiler.py
+++ b/common/py3-stdlib/distutils/unixccompiler.py
@@ -290,7 +290,7 @@
             cflags = sysconfig.get_config_var('CFLAGS')
             m = re.search(r'-isysroot\s*(\S+)', cflags)
             if m is None:
-                sysroot = '/'
+                sysroot = _osx_support._default_sysroot(sysconfig.get_config_var('CC'))
             else:
                 sysroot = m.group(1)
 
diff --git a/common/py3-stdlib/distutils/util.py b/common/py3-stdlib/distutils/util.py
index 17a94bc..4b002ec 100644
--- a/common/py3-stdlib/distutils/util.py
+++ b/common/py3-stdlib/distutils/util.py
@@ -79,7 +79,8 @@
             machine += ".%s" % bitness[sys.maxsize]
         # fall through to standard osname-release-machine representation
     elif osname[:3] == "aix":
-        return "%s-%s.%s" % (osname, version, release)
+        from _aix_support import aix_platform
+        return aix_platform()
     elif osname[:6] == "cygwin":
         osname = "cygwin"
         rel_re = re.compile (r'[\d.]+', re.ASCII)
diff --git a/common/py3-stdlib/distutils/version.py b/common/py3-stdlib/distutils/version.py
index af14cc1..c33beba 100644
--- a/common/py3-stdlib/distutils/version.py
+++ b/common/py3-stdlib/distutils/version.py
@@ -166,6 +166,8 @@
     def _cmp (self, other):
         if isinstance(other, str):
             other = StrictVersion(other)
+        elif not isinstance(other, StrictVersion):
+            return NotImplemented
 
         if self.version != other.version:
             # numeric versions don't match
@@ -331,6 +333,8 @@
     def _cmp (self, other):
         if isinstance(other, str):
             other = LooseVersion(other)
+        elif not isinstance(other, LooseVersion):
+            return NotImplemented
 
         if self.version == other.version:
             return 0
diff --git a/common/py3-stdlib/doctest.py b/common/py3-stdlib/doctest.py
index ee71984..baa503c 100644
--- a/common/py3-stdlib/doctest.py
+++ b/common/py3-stdlib/doctest.py
@@ -1334,7 +1334,7 @@
             try:
                 # Don't blink!  This is where the user's code gets run.
                 exec(compile(example.source, filename, "single",
-                             compileflags, 1), test.globs)
+                             compileflags, True), test.globs)
                 self.debugger.set_continue() # ==== Example Finished ====
                 exception = None
             except KeyboardInterrupt:
diff --git a/common/py3-stdlib/dummy_threading.py b/common/py3-stdlib/dummy_threading.py
deleted file mode 100644
index 1bb7eee..0000000
--- a/common/py3-stdlib/dummy_threading.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""Faux ``threading`` version using ``dummy_thread`` instead of ``thread``.
-
-The module ``_dummy_threading`` is added to ``sys.modules`` in order
-to not have ``threading`` considered imported.  Had ``threading`` been
-directly imported it would have made all subsequent imports succeed
-regardless of whether ``_thread`` was available which is not desired.
-
-"""
-from sys import modules as sys_modules
-
-import _dummy_thread
-
-# Declaring now so as to not have to nest ``try``s to get proper clean-up.
-holding_thread = False
-holding_threading = False
-holding__threading_local = False
-
-try:
-    # Could have checked if ``_thread`` was not in sys.modules and gone
-    # a different route, but decided to mirror technique used with
-    # ``threading`` below.
-    if '_thread' in sys_modules:
-        held_thread = sys_modules['_thread']
-        holding_thread = True
-    # Must have some module named ``_thread`` that implements its API
-    # in order to initially import ``threading``.
-    sys_modules['_thread'] = sys_modules['_dummy_thread']
-
-    if 'threading' in sys_modules:
-        # If ``threading`` is already imported, might as well prevent
-        # trying to import it more than needed by saving it if it is
-        # already imported before deleting it.
-        held_threading = sys_modules['threading']
-        holding_threading = True
-        del sys_modules['threading']
-
-    if '_threading_local' in sys_modules:
-        # If ``_threading_local`` is already imported, might as well prevent
-        # trying to import it more than needed by saving it if it is
-        # already imported before deleting it.
-        held__threading_local = sys_modules['_threading_local']
-        holding__threading_local = True
-        del sys_modules['_threading_local']
-
-    import threading
-    # Need a copy of the code kept somewhere...
-    sys_modules['_dummy_threading'] = sys_modules['threading']
-    del sys_modules['threading']
-    sys_modules['_dummy__threading_local'] = sys_modules['_threading_local']
-    del sys_modules['_threading_local']
-    from _dummy_threading import *
-    from _dummy_threading import __all__
-
-finally:
-    # Put back ``threading`` if we overwrote earlier
-
-    if holding_threading:
-        sys_modules['threading'] = held_threading
-        del held_threading
-    del holding_threading
-
-    # Put back ``_threading_local`` if we overwrote earlier
-
-    if holding__threading_local:
-        sys_modules['_threading_local'] = held__threading_local
-        del held__threading_local
-    del holding__threading_local
-
-    # Put back ``thread`` if we overwrote, else del the entry we made
-    if holding_thread:
-        sys_modules['_thread'] = held_thread
-        del held_thread
-    else:
-        del sys_modules['_thread']
-    del holding_thread
-
-    del _dummy_thread
-    del sys_modules
diff --git a/common/py3-stdlib/email/generator.py b/common/py3-stdlib/email/generator.py
index ae670c2..c9b1216 100644
--- a/common/py3-stdlib/email/generator.py
+++ b/common/py3-stdlib/email/generator.py
@@ -186,7 +186,11 @@
         # If we munged the cte, copy the message again and re-fix the CTE.
         if munge_cte:
             msg = deepcopy(msg)
-            msg.replace_header('content-transfer-encoding', munge_cte[0])
+            # Preserve the header order if the CTE header already exists.
+            if msg.get('content-transfer-encoding') is None:
+                msg['Content-Transfer-Encoding'] = munge_cte[0]
+            else:
+                msg.replace_header('content-transfer-encoding', munge_cte[0])
             msg.replace_header('content-type', munge_cte[1])
         # Write the headers.  First we see if the message object wants to
         # handle that itself.  If not, we'll do it generically.
diff --git a/common/py3-stdlib/email/headerregistry.py b/common/py3-stdlib/email/headerregistry.py
index d0914fd..5d84fc0 100644
--- a/common/py3-stdlib/email/headerregistry.py
+++ b/common/py3-stdlib/email/headerregistry.py
@@ -74,11 +74,9 @@
         """The addr_spec (username@domain) portion of the address, quoted
         according to RFC 5322 rules, but with no Content Transfer Encoding.
         """
-        nameset = set(self.username)
-        if len(nameset) > len(nameset-parser.DOT_ATOM_ENDS):
-            lp = parser.quote_string(self.username)
-        else:
-            lp = self.username
+        lp = self.username
+        if not parser.DOT_ATOM_ENDS.isdisjoint(lp):
+            lp = parser.quote_string(lp)
         if self.domain:
             return lp + '@' + self.domain
         if not lp:
@@ -91,19 +89,17 @@
                         self.display_name, self.username, self.domain)
 
     def __str__(self):
-        nameset = set(self.display_name)
-        if len(nameset) > len(nameset-parser.SPECIALS):
-            disp = parser.quote_string(self.display_name)
-        else:
-            disp = self.display_name
+        disp = self.display_name
+        if not parser.SPECIALS.isdisjoint(disp):
+            disp = parser.quote_string(disp)
         if disp:
             addr_spec = '' if self.addr_spec=='<>' else self.addr_spec
             return "{} <{}>".format(disp, addr_spec)
         return self.addr_spec
 
     def __eq__(self, other):
-        if type(other) != type(self):
-            return False
+        if not isinstance(other, Address):
+            return NotImplemented
         return (self.display_name == other.display_name and
                 self.username == other.username and
                 self.domain == other.domain)
@@ -146,17 +142,15 @@
         if self.display_name is None and len(self.addresses)==1:
             return str(self.addresses[0])
         disp = self.display_name
-        if disp is not None:
-            nameset = set(disp)
-            if len(nameset) > len(nameset-parser.SPECIALS):
-                disp = parser.quote_string(disp)
+        if disp is not None and not parser.SPECIALS.isdisjoint(disp):
+            disp = parser.quote_string(disp)
         adrstr = ", ".join(str(x) for x in self.addresses)
         adrstr = ' ' + adrstr if adrstr else adrstr
         return "{}:{};".format(disp, adrstr)
 
     def __eq__(self, other):
-        if type(other) != type(self):
-            return False
+        if not isinstance(other, Group):
+            return NotImplemented
         return (self.display_name == other.display_name and
                 self.addresses == other.addresses)
 
diff --git a/common/py3-stdlib/email/message.py b/common/py3-stdlib/email/message.py
index 1262602..3701b30 100644
--- a/common/py3-stdlib/email/message.py
+++ b/common/py3-stdlib/email/message.py
@@ -141,7 +141,7 @@
         header.  For backward compatibility reasons, if maxheaderlen is
         not specified it defaults to 0, so you must override it explicitly
         if you want a different maxheaderlen.  'policy' is passed to the
-        Generator instance used to serialize the mesasge; if it is not
+        Generator instance used to serialize the message; if it is not
         specified the policy associated with the message instance is used.
 
         If the message object contains binary data that is not encoded
@@ -958,7 +958,7 @@
         header.  maxheaderlen is retained for backward compatibility with the
         base Message class, but defaults to None, meaning that the policy value
         for max_line_length controls the header maximum length.  'policy' is
-        passed to the Generator instance used to serialize the mesasge; if it
+        passed to the Generator instance used to serialize the message; if it
         is not specified the policy associated with the message instance is
         used.
         """
diff --git a/common/py3-stdlib/email/utils.py b/common/py3-stdlib/email/utils.py
index 07dd029..1a7719d 100644
--- a/common/py3-stdlib/email/utils.py
+++ b/common/py3-stdlib/email/utils.py
@@ -259,21 +259,13 @@
 
     params is a sequence of 2-tuples containing (param name, string value).
     """
-    # Copy params so we don't mess with the original
-    params = params[:]
-    new_params = []
+    new_params = [params[0]]
     # Map parameter's name to a list of continuations.  The values are a
     # 3-tuple of the continuation number, the string value, and a flag
     # specifying whether a particular segment is %-encoded.
     rfc2231_params = {}
-    name, value = params.pop(0)
-    new_params.append((name, value))
-    while params:
-        name, value = params.pop(0)
-        if name.endswith('*'):
-            encoded = True
-        else:
-            encoded = False
+    for name, value in params[1:]:
+        encoded = name.endswith('*')
         value = unquote(value)
         mo = rfc2231_continuation.match(name)
         if mo:
diff --git a/common/py3-stdlib/encodings/aliases.py b/common/py3-stdlib/encodings/aliases.py
index 2444f9f..d85afd6 100644
--- a/common/py3-stdlib/encodings/aliases.py
+++ b/common/py3-stdlib/encodings/aliases.py
@@ -450,6 +450,7 @@
 
     # mac_latin2 codec
     'maccentraleurope'   : 'mac_latin2',
+    'mac_centeuro'       : 'mac_latin2',
     'maclatin2'          : 'mac_latin2',
 
     # mac_roman codec
@@ -493,9 +494,6 @@
     'sjisx0213'          : 'shift_jisx0213',
     's_jisx0213'         : 'shift_jisx0213',
 
-    # tactis codec
-    'tis260'             : 'tactis',
-
     # tis_620 codec
     'tis620'             : 'tis_620',
     'tis_620_0'          : 'tis_620',
diff --git a/common/py3-stdlib/encodings/mac_centeuro.py b/common/py3-stdlib/encodings/mac_centeuro.py
deleted file mode 100644
index 5785a0e..0000000
--- a/common/py3-stdlib/encodings/mac_centeuro.py
+++ /dev/null
@@ -1,307 +0,0 @@
-""" Python Character Mapping Codec mac_centeuro generated from 'MAPPINGS/VENDORS/APPLE/CENTEURO.TXT' with gencodec.py.
-
-"""#"
-
-import codecs
-
-### Codec APIs
-
-class Codec(codecs.Codec):
-
-    def encode(self,input,errors='strict'):
-        return codecs.charmap_encode(input,errors,encoding_table)
-
-    def decode(self,input,errors='strict'):
-        return codecs.charmap_decode(input,errors,decoding_table)
-
-class IncrementalEncoder(codecs.IncrementalEncoder):
-    def encode(self, input, final=False):
-        return codecs.charmap_encode(input,self.errors,encoding_table)[0]
-
-class IncrementalDecoder(codecs.IncrementalDecoder):
-    def decode(self, input, final=False):
-        return codecs.charmap_decode(input,self.errors,decoding_table)[0]
-
-class StreamWriter(Codec,codecs.StreamWriter):
-    pass
-
-class StreamReader(Codec,codecs.StreamReader):
-    pass
-
-### encodings module API
-
-def getregentry():
-    return codecs.CodecInfo(
-        name='mac-centeuro',
-        encode=Codec().encode,
-        decode=Codec().decode,
-        incrementalencoder=IncrementalEncoder,
-        incrementaldecoder=IncrementalDecoder,
-        streamreader=StreamReader,
-        streamwriter=StreamWriter,
-    )
-
-
-### Decoding Table
-
-decoding_table = (
-    '\x00'     #  0x00 -> CONTROL CHARACTER
-    '\x01'     #  0x01 -> CONTROL CHARACTER
-    '\x02'     #  0x02 -> CONTROL CHARACTER
-    '\x03'     #  0x03 -> CONTROL CHARACTER
-    '\x04'     #  0x04 -> CONTROL CHARACTER
-    '\x05'     #  0x05 -> CONTROL CHARACTER
-    '\x06'     #  0x06 -> CONTROL CHARACTER
-    '\x07'     #  0x07 -> CONTROL CHARACTER
-    '\x08'     #  0x08 -> CONTROL CHARACTER
-    '\t'       #  0x09 -> CONTROL CHARACTER
-    '\n'       #  0x0A -> CONTROL CHARACTER
-    '\x0b'     #  0x0B -> CONTROL CHARACTER
-    '\x0c'     #  0x0C -> CONTROL CHARACTER
-    '\r'       #  0x0D -> CONTROL CHARACTER
-    '\x0e'     #  0x0E -> CONTROL CHARACTER
-    '\x0f'     #  0x0F -> CONTROL CHARACTER
-    '\x10'     #  0x10 -> CONTROL CHARACTER
-    '\x11'     #  0x11 -> CONTROL CHARACTER
-    '\x12'     #  0x12 -> CONTROL CHARACTER
-    '\x13'     #  0x13 -> CONTROL CHARACTER
-    '\x14'     #  0x14 -> CONTROL CHARACTER
-    '\x15'     #  0x15 -> CONTROL CHARACTER
-    '\x16'     #  0x16 -> CONTROL CHARACTER
-    '\x17'     #  0x17 -> CONTROL CHARACTER
-    '\x18'     #  0x18 -> CONTROL CHARACTER
-    '\x19'     #  0x19 -> CONTROL CHARACTER
-    '\x1a'     #  0x1A -> CONTROL CHARACTER
-    '\x1b'     #  0x1B -> CONTROL CHARACTER
-    '\x1c'     #  0x1C -> CONTROL CHARACTER
-    '\x1d'     #  0x1D -> CONTROL CHARACTER
-    '\x1e'     #  0x1E -> CONTROL CHARACTER
-    '\x1f'     #  0x1F -> CONTROL CHARACTER
-    ' '        #  0x20 -> SPACE
-    '!'        #  0x21 -> EXCLAMATION MARK
-    '"'        #  0x22 -> QUOTATION MARK
-    '#'        #  0x23 -> NUMBER SIGN
-    '$'        #  0x24 -> DOLLAR SIGN
-    '%'        #  0x25 -> PERCENT SIGN
-    '&'        #  0x26 -> AMPERSAND
-    "'"        #  0x27 -> APOSTROPHE
-    '('        #  0x28 -> LEFT PARENTHESIS
-    ')'        #  0x29 -> RIGHT PARENTHESIS
-    '*'        #  0x2A -> ASTERISK
-    '+'        #  0x2B -> PLUS SIGN
-    ','        #  0x2C -> COMMA
-    '-'        #  0x2D -> HYPHEN-MINUS
-    '.'        #  0x2E -> FULL STOP
-    '/'        #  0x2F -> SOLIDUS
-    '0'        #  0x30 -> DIGIT ZERO
-    '1'        #  0x31 -> DIGIT ONE
-    '2'        #  0x32 -> DIGIT TWO
-    '3'        #  0x33 -> DIGIT THREE
-    '4'        #  0x34 -> DIGIT FOUR
-    '5'        #  0x35 -> DIGIT FIVE
-    '6'        #  0x36 -> DIGIT SIX
-    '7'        #  0x37 -> DIGIT SEVEN
-    '8'        #  0x38 -> DIGIT EIGHT
-    '9'        #  0x39 -> DIGIT NINE
-    ':'        #  0x3A -> COLON
-    ';'        #  0x3B -> SEMICOLON
-    '<'        #  0x3C -> LESS-THAN SIGN
-    '='        #  0x3D -> EQUALS SIGN
-    '>'        #  0x3E -> GREATER-THAN SIGN
-    '?'        #  0x3F -> QUESTION MARK
-    '@'        #  0x40 -> COMMERCIAL AT
-    'A'        #  0x41 -> LATIN CAPITAL LETTER A
-    'B'        #  0x42 -> LATIN CAPITAL LETTER B
-    'C'        #  0x43 -> LATIN CAPITAL LETTER C
-    'D'        #  0x44 -> LATIN CAPITAL LETTER D
-    'E'        #  0x45 -> LATIN CAPITAL LETTER E
-    'F'        #  0x46 -> LATIN CAPITAL LETTER F
-    'G'        #  0x47 -> LATIN CAPITAL LETTER G
-    'H'        #  0x48 -> LATIN CAPITAL LETTER H
-    'I'        #  0x49 -> LATIN CAPITAL LETTER I
-    'J'        #  0x4A -> LATIN CAPITAL LETTER J
-    'K'        #  0x4B -> LATIN CAPITAL LETTER K
-    'L'        #  0x4C -> LATIN CAPITAL LETTER L
-    'M'        #  0x4D -> LATIN CAPITAL LETTER M
-    'N'        #  0x4E -> LATIN CAPITAL LETTER N
-    'O'        #  0x4F -> LATIN CAPITAL LETTER O
-    'P'        #  0x50 -> LATIN CAPITAL LETTER P
-    'Q'        #  0x51 -> LATIN CAPITAL LETTER Q
-    'R'        #  0x52 -> LATIN CAPITAL LETTER R
-    'S'        #  0x53 -> LATIN CAPITAL LETTER S
-    'T'        #  0x54 -> LATIN CAPITAL LETTER T
-    'U'        #  0x55 -> LATIN CAPITAL LETTER U
-    'V'        #  0x56 -> LATIN CAPITAL LETTER V
-    'W'        #  0x57 -> LATIN CAPITAL LETTER W
-    'X'        #  0x58 -> LATIN CAPITAL LETTER X
-    'Y'        #  0x59 -> LATIN CAPITAL LETTER Y
-    'Z'        #  0x5A -> LATIN CAPITAL LETTER Z
-    '['        #  0x5B -> LEFT SQUARE BRACKET
-    '\\'       #  0x5C -> REVERSE SOLIDUS
-    ']'        #  0x5D -> RIGHT SQUARE BRACKET
-    '^'        #  0x5E -> CIRCUMFLEX ACCENT
-    '_'        #  0x5F -> LOW LINE
-    '`'        #  0x60 -> GRAVE ACCENT
-    'a'        #  0x61 -> LATIN SMALL LETTER A
-    'b'        #  0x62 -> LATIN SMALL LETTER B
-    'c'        #  0x63 -> LATIN SMALL LETTER C
-    'd'        #  0x64 -> LATIN SMALL LETTER D
-    'e'        #  0x65 -> LATIN SMALL LETTER E
-    'f'        #  0x66 -> LATIN SMALL LETTER F
-    'g'        #  0x67 -> LATIN SMALL LETTER G
-    'h'        #  0x68 -> LATIN SMALL LETTER H
-    'i'        #  0x69 -> LATIN SMALL LETTER I
-    'j'        #  0x6A -> LATIN SMALL LETTER J
-    'k'        #  0x6B -> LATIN SMALL LETTER K
-    'l'        #  0x6C -> LATIN SMALL LETTER L
-    'm'        #  0x6D -> LATIN SMALL LETTER M
-    'n'        #  0x6E -> LATIN SMALL LETTER N
-    'o'        #  0x6F -> LATIN SMALL LETTER O
-    'p'        #  0x70 -> LATIN SMALL LETTER P
-    'q'        #  0x71 -> LATIN SMALL LETTER Q
-    'r'        #  0x72 -> LATIN SMALL LETTER R
-    's'        #  0x73 -> LATIN SMALL LETTER S
-    't'        #  0x74 -> LATIN SMALL LETTER T
-    'u'        #  0x75 -> LATIN SMALL LETTER U
-    'v'        #  0x76 -> LATIN SMALL LETTER V
-    'w'        #  0x77 -> LATIN SMALL LETTER W
-    'x'        #  0x78 -> LATIN SMALL LETTER X
-    'y'        #  0x79 -> LATIN SMALL LETTER Y
-    'z'        #  0x7A -> LATIN SMALL LETTER Z
-    '{'        #  0x7B -> LEFT CURLY BRACKET
-    '|'        #  0x7C -> VERTICAL LINE
-    '}'        #  0x7D -> RIGHT CURLY BRACKET
-    '~'        #  0x7E -> TILDE
-    '\x7f'     #  0x7F -> CONTROL CHARACTER
-    '\xc4'     #  0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
-    '\u0100'   #  0x81 -> LATIN CAPITAL LETTER A WITH MACRON
-    '\u0101'   #  0x82 -> LATIN SMALL LETTER A WITH MACRON
-    '\xc9'     #  0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
-    '\u0104'   #  0x84 -> LATIN CAPITAL LETTER A WITH OGONEK
-    '\xd6'     #  0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
-    '\xdc'     #  0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
-    '\xe1'     #  0x87 -> LATIN SMALL LETTER A WITH ACUTE
-    '\u0105'   #  0x88 -> LATIN SMALL LETTER A WITH OGONEK
-    '\u010c'   #  0x89 -> LATIN CAPITAL LETTER C WITH CARON
-    '\xe4'     #  0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
-    '\u010d'   #  0x8B -> LATIN SMALL LETTER C WITH CARON
-    '\u0106'   #  0x8C -> LATIN CAPITAL LETTER C WITH ACUTE
-    '\u0107'   #  0x8D -> LATIN SMALL LETTER C WITH ACUTE
-    '\xe9'     #  0x8E -> LATIN SMALL LETTER E WITH ACUTE
-    '\u0179'   #  0x8F -> LATIN CAPITAL LETTER Z WITH ACUTE
-    '\u017a'   #  0x90 -> LATIN SMALL LETTER Z WITH ACUTE
-    '\u010e'   #  0x91 -> LATIN CAPITAL LETTER D WITH CARON
-    '\xed'     #  0x92 -> LATIN SMALL LETTER I WITH ACUTE
-    '\u010f'   #  0x93 -> LATIN SMALL LETTER D WITH CARON
-    '\u0112'   #  0x94 -> LATIN CAPITAL LETTER E WITH MACRON
-    '\u0113'   #  0x95 -> LATIN SMALL LETTER E WITH MACRON
-    '\u0116'   #  0x96 -> LATIN CAPITAL LETTER E WITH DOT ABOVE
-    '\xf3'     #  0x97 -> LATIN SMALL LETTER O WITH ACUTE
-    '\u0117'   #  0x98 -> LATIN SMALL LETTER E WITH DOT ABOVE
-    '\xf4'     #  0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
-    '\xf6'     #  0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
-    '\xf5'     #  0x9B -> LATIN SMALL LETTER O WITH TILDE
-    '\xfa'     #  0x9C -> LATIN SMALL LETTER U WITH ACUTE
-    '\u011a'   #  0x9D -> LATIN CAPITAL LETTER E WITH CARON
-    '\u011b'   #  0x9E -> LATIN SMALL LETTER E WITH CARON
-    '\xfc'     #  0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
-    '\u2020'   #  0xA0 -> DAGGER
-    '\xb0'     #  0xA1 -> DEGREE SIGN
-    '\u0118'   #  0xA2 -> LATIN CAPITAL LETTER E WITH OGONEK
-    '\xa3'     #  0xA3 -> POUND SIGN
-    '\xa7'     #  0xA4 -> SECTION SIGN
-    '\u2022'   #  0xA5 -> BULLET
-    '\xb6'     #  0xA6 -> PILCROW SIGN
-    '\xdf'     #  0xA7 -> LATIN SMALL LETTER SHARP S
-    '\xae'     #  0xA8 -> REGISTERED SIGN
-    '\xa9'     #  0xA9 -> COPYRIGHT SIGN
-    '\u2122'   #  0xAA -> TRADE MARK SIGN
-    '\u0119'   #  0xAB -> LATIN SMALL LETTER E WITH OGONEK
-    '\xa8'     #  0xAC -> DIAERESIS
-    '\u2260'   #  0xAD -> NOT EQUAL TO
-    '\u0123'   #  0xAE -> LATIN SMALL LETTER G WITH CEDILLA
-    '\u012e'   #  0xAF -> LATIN CAPITAL LETTER I WITH OGONEK
-    '\u012f'   #  0xB0 -> LATIN SMALL LETTER I WITH OGONEK
-    '\u012a'   #  0xB1 -> LATIN CAPITAL LETTER I WITH MACRON
-    '\u2264'   #  0xB2 -> LESS-THAN OR EQUAL TO
-    '\u2265'   #  0xB3 -> GREATER-THAN OR EQUAL TO
-    '\u012b'   #  0xB4 -> LATIN SMALL LETTER I WITH MACRON
-    '\u0136'   #  0xB5 -> LATIN CAPITAL LETTER K WITH CEDILLA
-    '\u2202'   #  0xB6 -> PARTIAL DIFFERENTIAL
-    '\u2211'   #  0xB7 -> N-ARY SUMMATION
-    '\u0142'   #  0xB8 -> LATIN SMALL LETTER L WITH STROKE
-    '\u013b'   #  0xB9 -> LATIN CAPITAL LETTER L WITH CEDILLA
-    '\u013c'   #  0xBA -> LATIN SMALL LETTER L WITH CEDILLA
-    '\u013d'   #  0xBB -> LATIN CAPITAL LETTER L WITH CARON
-    '\u013e'   #  0xBC -> LATIN SMALL LETTER L WITH CARON
-    '\u0139'   #  0xBD -> LATIN CAPITAL LETTER L WITH ACUTE
-    '\u013a'   #  0xBE -> LATIN SMALL LETTER L WITH ACUTE
-    '\u0145'   #  0xBF -> LATIN CAPITAL LETTER N WITH CEDILLA
-    '\u0146'   #  0xC0 -> LATIN SMALL LETTER N WITH CEDILLA
-    '\u0143'   #  0xC1 -> LATIN CAPITAL LETTER N WITH ACUTE
-    '\xac'     #  0xC2 -> NOT SIGN
-    '\u221a'   #  0xC3 -> SQUARE ROOT
-    '\u0144'   #  0xC4 -> LATIN SMALL LETTER N WITH ACUTE
-    '\u0147'   #  0xC5 -> LATIN CAPITAL LETTER N WITH CARON
-    '\u2206'   #  0xC6 -> INCREMENT
-    '\xab'     #  0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
-    '\xbb'     #  0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
-    '\u2026'   #  0xC9 -> HORIZONTAL ELLIPSIS
-    '\xa0'     #  0xCA -> NO-BREAK SPACE
-    '\u0148'   #  0xCB -> LATIN SMALL LETTER N WITH CARON
-    '\u0150'   #  0xCC -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
-    '\xd5'     #  0xCD -> LATIN CAPITAL LETTER O WITH TILDE
-    '\u0151'   #  0xCE -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
-    '\u014c'   #  0xCF -> LATIN CAPITAL LETTER O WITH MACRON
-    '\u2013'   #  0xD0 -> EN DASH
-    '\u2014'   #  0xD1 -> EM DASH
-    '\u201c'   #  0xD2 -> LEFT DOUBLE QUOTATION MARK
-    '\u201d'   #  0xD3 -> RIGHT DOUBLE QUOTATION MARK
-    '\u2018'   #  0xD4 -> LEFT SINGLE QUOTATION MARK
-    '\u2019'   #  0xD5 -> RIGHT SINGLE QUOTATION MARK
-    '\xf7'     #  0xD6 -> DIVISION SIGN
-    '\u25ca'   #  0xD7 -> LOZENGE
-    '\u014d'   #  0xD8 -> LATIN SMALL LETTER O WITH MACRON
-    '\u0154'   #  0xD9 -> LATIN CAPITAL LETTER R WITH ACUTE
-    '\u0155'   #  0xDA -> LATIN SMALL LETTER R WITH ACUTE
-    '\u0158'   #  0xDB -> LATIN CAPITAL LETTER R WITH CARON
-    '\u2039'   #  0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
-    '\u203a'   #  0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
-    '\u0159'   #  0xDE -> LATIN SMALL LETTER R WITH CARON
-    '\u0156'   #  0xDF -> LATIN CAPITAL LETTER R WITH CEDILLA
-    '\u0157'   #  0xE0 -> LATIN SMALL LETTER R WITH CEDILLA
-    '\u0160'   #  0xE1 -> LATIN CAPITAL LETTER S WITH CARON
-    '\u201a'   #  0xE2 -> SINGLE LOW-9 QUOTATION MARK
-    '\u201e'   #  0xE3 -> DOUBLE LOW-9 QUOTATION MARK
-    '\u0161'   #  0xE4 -> LATIN SMALL LETTER S WITH CARON
-    '\u015a'   #  0xE5 -> LATIN CAPITAL LETTER S WITH ACUTE
-    '\u015b'   #  0xE6 -> LATIN SMALL LETTER S WITH ACUTE
-    '\xc1'     #  0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
-    '\u0164'   #  0xE8 -> LATIN CAPITAL LETTER T WITH CARON
-    '\u0165'   #  0xE9 -> LATIN SMALL LETTER T WITH CARON
-    '\xcd'     #  0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
-    '\u017d'   #  0xEB -> LATIN CAPITAL LETTER Z WITH CARON
-    '\u017e'   #  0xEC -> LATIN SMALL LETTER Z WITH CARON
-    '\u016a'   #  0xED -> LATIN CAPITAL LETTER U WITH MACRON
-    '\xd3'     #  0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
-    '\xd4'     #  0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
-    '\u016b'   #  0xF0 -> LATIN SMALL LETTER U WITH MACRON
-    '\u016e'   #  0xF1 -> LATIN CAPITAL LETTER U WITH RING ABOVE
-    '\xda'     #  0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
-    '\u016f'   #  0xF3 -> LATIN SMALL LETTER U WITH RING ABOVE
-    '\u0170'   #  0xF4 -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
-    '\u0171'   #  0xF5 -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
-    '\u0172'   #  0xF6 -> LATIN CAPITAL LETTER U WITH OGONEK
-    '\u0173'   #  0xF7 -> LATIN SMALL LETTER U WITH OGONEK
-    '\xdd'     #  0xF8 -> LATIN CAPITAL LETTER Y WITH ACUTE
-    '\xfd'     #  0xF9 -> LATIN SMALL LETTER Y WITH ACUTE
-    '\u0137'   #  0xFA -> LATIN SMALL LETTER K WITH CEDILLA
-    '\u017b'   #  0xFB -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
-    '\u0141'   #  0xFC -> LATIN CAPITAL LETTER L WITH STROKE
-    '\u017c'   #  0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE
-    '\u0122'   #  0xFE -> LATIN CAPITAL LETTER G WITH CEDILLA
-    '\u02c7'   #  0xFF -> CARON
-)
-
-### Encoding table
-encoding_table=codecs.charmap_build(decoding_table)
diff --git a/common/py3-stdlib/enum.py b/common/py3-stdlib/enum.py
index 14cc00e..ebadd9f 100644
--- a/common/py3-stdlib/enum.py
+++ b/common/py3-stdlib/enum.py
@@ -104,9 +104,9 @@
                 # enum overwriting a descriptor?
                 raise TypeError('%r already defined as: %r' % (key, self[key]))
             if isinstance(value, auto):
-                self._auto_called = True
                 if value.value == _auto_null:
                     value.value = self._generate_next_value(key, 1, len(self._member_names), self._last_values[:])
+                    self._auto_called = True
                 value = value.value
             self._member_names.append(key)
             self._last_values.append(value)
@@ -123,10 +123,12 @@
     """Metaclass for Enum"""
     @classmethod
     def __prepare__(metacls, cls, bases):
+        # check that previous enum members do not exist
+        metacls._check_for_existing_members(cls, bases)
         # create the namespace dict
         enum_dict = _EnumDict()
         # inherit previous flags and _generate_next_value_ function
-        member_type, first_enum = metacls._get_mixins_(bases)
+        member_type, first_enum = metacls._get_mixins_(cls, bases)
         if first_enum is not None:
             enum_dict['_generate_next_value_'] = getattr(first_enum, '_generate_next_value_', None)
         return enum_dict
@@ -142,7 +144,7 @@
         ignore = classdict['_ignore_']
         for key in ignore:
             classdict.pop(key, None)
-        member_type, first_enum = metacls._get_mixins_(bases)
+        member_type, first_enum = metacls._get_mixins_(cls, bases)
         __new__, save_new, use_args = metacls._find_new_(classdict, member_type,
                                                         first_enum)
 
@@ -249,7 +251,11 @@
 
         # double check that repr and friends are not the mixin's or various
         # things break (such as pickle)
+        # however, if the method is defined in the Enum itself, don't replace
+        # it
         for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
+            if name in classdict:
+                continue
             class_method = getattr(enum_class, name)
             obj_method = getattr(member_type, name, None)
             enum_method = getattr(first_enum, name, None)
@@ -397,7 +403,7 @@
         """
         metacls = cls.__class__
         bases = (cls, ) if type is None else (type, cls)
-        _, first_enum = cls._get_mixins_(bases)
+        _, first_enum = cls._get_mixins_(cls, bases)
         classdict = metacls.__prepare__(class_name, bases)
 
         # special processing needed for names?
@@ -425,7 +431,7 @@
         if module is None:
             try:
                 module = sys._getframe(2).f_globals['__name__']
-            except (AttributeError, ValueError, KeyError) as exc:
+            except (AttributeError, ValueError, KeyError):
                 pass
         if module is None:
             _make_class_unpicklable(enum_class)
@@ -469,14 +475,15 @@
         module_globals[name] = cls
         return cls
 
-    def _convert(cls, *args, **kwargs):
-        import warnings
-        warnings.warn("_convert is deprecated and will be removed in 3.9, use "
-                      "_convert_ instead.", DeprecationWarning, stacklevel=2)
-        return cls._convert_(*args, **kwargs)
+    @staticmethod
+    def _check_for_existing_members(class_name, bases):
+        for chain in bases:
+            for base in chain.__mro__:
+                if issubclass(base, Enum) and base._member_names_:
+                    raise TypeError("%s: cannot extend enumeration %r" % (class_name, base.__name__))
 
     @staticmethod
-    def _get_mixins_(bases):
+    def _get_mixins_(class_name, bases):
         """Returns the type for creating enum members, and the first inherited
         enum class.
 
@@ -487,14 +494,25 @@
             return object, Enum
 
         def _find_data_type(bases):
+            data_types = []
             for chain in bases:
+                candidate = None
                 for base in chain.__mro__:
                     if base is object:
                         continue
                     elif '__new__' in base.__dict__:
                         if issubclass(base, Enum):
                             continue
-                        return base
+                        data_types.append(candidate or base)
+                        break
+                    elif not issubclass(base, Enum):
+                        candidate = base
+            if len(data_types) > 1:
+                raise TypeError('%r: too many data types: %r' % (class_name, data_types))
+            elif data_types:
+                return data_types[0]
+            else:
+                return None
 
         # ensure final parent class is an Enum derivative, find any concrete
         # data type, and check that Enum has no members
@@ -588,7 +606,7 @@
         if isinstance(result, cls):
             return result
         else:
-            ve_exc = ValueError("%r is not a valid %s" % (value, cls.__name__))
+            ve_exc = ValueError("%r is not a valid %s" % (value, cls.__qualname__))
             if result is None and exc is None:
                 raise ve_exc
             elif exc is None:
@@ -610,7 +628,7 @@
 
     @classmethod
     def _missing_(cls, value):
-        raise ValueError("%r is not a valid %s" % (value, cls.__name__))
+        return None
 
     def __repr__(self):
         return "<%s.%s: %r>" % (
@@ -633,8 +651,9 @@
         # we can get strange results with the Enum name showing up instead of
         # the value
 
-        # pure Enum branch
-        if self._member_type_ is object:
+        # pure Enum branch, or branch with __str__ explicitly overridden
+        str_overridden = type(self).__str__ != Enum.__str__
+        if self._member_type_ is object or str_overridden:
             cls = str
             val = str(self)
         # mix-in branch
@@ -716,7 +735,7 @@
             # verify all bits are accounted for
             _, extra_flags = _decompose(cls, value)
             if extra_flags:
-                raise ValueError("%r is not a valid %s" % (value, cls.__name__))
+                raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
             # construct a singleton enum pseudo-member
             pseudo_member = object.__new__(cls)
             pseudo_member._name_ = None
@@ -790,7 +809,7 @@
     @classmethod
     def _missing_(cls, value):
         if not isinstance(value, int):
-            raise ValueError("%r is not a valid %s" % (value, cls.__name__))
+            raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
         new_member = cls._create_pseudo_member_(value)
         return new_member
 
@@ -871,28 +890,20 @@
     # _decompose is only called if the value is not named
     not_covered = value
     negative = value < 0
-    # issue29167: wrap accesses to _value2member_map_ in a list to avoid race
-    #             conditions between iterating over it and having more pseudo-
-    #             members added to it
-    if negative:
-        # only check for named flags
-        flags_to_check = [
-                (m, v)
-                for v, m in list(flag._value2member_map_.items())
-                if m.name is not None
-                ]
-    else:
-        # check for named flags and powers-of-two flags
-        flags_to_check = [
-                (m, v)
-                for v, m in list(flag._value2member_map_.items())
-                if m.name is not None or _power_of_two(v)
-                ]
     members = []
-    for member, member_value in flags_to_check:
+    for member in flag:
+        member_value = member.value
         if member_value and member_value & value == member_value:
             members.append(member)
             not_covered &= ~member_value
+    if not negative:
+        tmp = not_covered
+        while tmp:
+            flag_value = 2 ** _high_bit(tmp)
+            if flag_value in flag._value2member_map_:
+                members.append(flag._value2member_map_[flag_value])
+                not_covered &= ~flag_value
+            tmp &= ~flag_value
     if not members and value in flag._value2member_map_:
         members.append(flag._value2member_map_[value])
     members.sort(key=lambda m: m._value_, reverse=True)
@@ -900,8 +911,3 @@
         # we have the breakdown, don't need the value member itself
         members.pop(0)
     return members, not_covered
-
-def _power_of_two(value):
-    if value < 1:
-        return False
-    return value == 2 ** _high_bit(value)
diff --git a/common/py3-stdlib/filecmp.py b/common/py3-stdlib/filecmp.py
index e5ad839..7a4da6b 100644
--- a/common/py3-stdlib/filecmp.py
+++ b/common/py3-stdlib/filecmp.py
@@ -13,6 +13,7 @@
 import os
 import stat
 from itertools import filterfalse
+from types import GenericAlias
 
 __all__ = ['clear_cache', 'cmp', 'dircmp', 'cmpfiles', 'DEFAULT_IGNORES']
 
@@ -156,12 +157,12 @@
             ok = 1
             try:
                 a_stat = os.stat(a_path)
-            except OSError as why:
+            except OSError:
                 # print('Can\'t stat', a_path, ':', why.args[1])
                 ok = 0
             try:
                 b_stat = os.stat(b_path)
-            except OSError as why:
+            except OSError:
                 # print('Can\'t stat', b_path, ':', why.args[1])
                 ok = 0
 
@@ -247,6 +248,9 @@
         self.methodmap[attr](self)
         return getattr(self, attr)
 
+    __class_getitem__ = classmethod(GenericAlias)
+
+
 def cmpfiles(a, b, common, shallow=True):
     """Compare common files in two directories.
 
diff --git a/common/py3-stdlib/fileinput.py b/common/py3-stdlib/fileinput.py
index c1b0ec9..0c31f93 100644
--- a/common/py3-stdlib/fileinput.py
+++ b/common/py3-stdlib/fileinput.py
@@ -73,6 +73,7 @@
 """
 
 import sys, os
+from types import GenericAlias
 
 __all__ = ["input", "close", "nextfile", "filename", "lineno", "filelineno",
            "fileno", "isfirstline", "isstdin", "FileInput", "hook_compressed",
@@ -391,6 +392,8 @@
     def isstdin(self):
         return self._isstdin
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 def hook_compressed(filename, mode):
     ext = os.path.splitext(filename)[1]
diff --git a/common/py3-stdlib/fnmatch.py b/common/py3-stdlib/fnmatch.py
index b98e641..0eb1802 100644
--- a/common/py3-stdlib/fnmatch.py
+++ b/common/py3-stdlib/fnmatch.py
@@ -16,6 +16,12 @@
 
 __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
 
+# Build a thread-safe incrementing counter to help create unique regexp group
+# names across calls.
+from itertools import count
+_nextgroupnum = count().__next__
+del count
+
 def fnmatch(name, pat):
     """Test whether FILENAME matches PATTERN.
 
@@ -77,15 +83,19 @@
     There is no way to quote meta-characters.
     """
 
+    STAR = object()
+    res = []
+    add = res.append
     i, n = 0, len(pat)
-    res = ''
     while i < n:
         c = pat[i]
         i = i+1
         if c == '*':
-            res = res + '.*'
+            # compress consecutive `*` into one
+            if (not res) or res[-1] is not STAR:
+                add(STAR)
         elif c == '?':
-            res = res + '.'
+            add('.')
         elif c == '[':
             j = i
             if j < n and pat[j] == '!':
@@ -95,7 +105,7 @@
             while j < n and pat[j] != ']':
                 j = j+1
             if j >= n:
-                res = res + '\\['
+                add('\\[')
             else:
                 stuff = pat[i:j]
                 if '--' not in stuff:
@@ -122,7 +132,52 @@
                     stuff = '^' + stuff[1:]
                 elif stuff[0] in ('^', '['):
                     stuff = '\\' + stuff
-                res = '%s[%s]' % (res, stuff)
+                add(f'[{stuff}]')
         else:
-            res = res + re.escape(c)
-    return r'(?s:%s)\Z' % res
+            add(re.escape(c))
+    assert i == n
+
+    # Deal with STARs.
+    inp = res
+    res = []
+    add = res.append
+    i, n = 0, len(inp)
+    # Fixed pieces at the start?
+    while i < n and inp[i] is not STAR:
+        add(inp[i])
+        i += 1
+    # Now deal with STAR fixed STAR fixed ...
+    # For an interior `STAR fixed` pairing, we want to do a minimal
+    # .*? match followed by `fixed`, with no possibility of backtracking.
+    # We can't spell that directly, but can trick it into working by matching
+    #    .*?fixed
+    # in a lookahead assertion, save the matched part in a group, then
+    # consume that group via a backreference. If the overall match fails,
+    # the lookahead assertion won't try alternatives. So the translation is:
+    #     (?=(?P<name>.*?fixed))(?P=name)
+    # Group names are created as needed: g0, g1, g2, ...
+    # The numbers are obtained from _nextgroupnum() to ensure they're unique
+    # across calls and across threads. This is because people rely on the
+    # undocumented ability to join multiple translate() results together via
+    # "|" to build large regexps matching "one of many" shell patterns.
+    while i < n:
+        assert inp[i] is STAR
+        i += 1
+        if i == n:
+            add(".*")
+            break
+        assert inp[i] is not STAR
+        fixed = []
+        while i < n and inp[i] is not STAR:
+            fixed.append(inp[i])
+            i += 1
+        fixed = "".join(fixed)
+        if i == n:
+            add(".*")
+            add(fixed)
+        else:
+            groupnum = _nextgroupnum()
+            add(f"(?=(?P<g{groupnum}>.*?{fixed}))(?P=g{groupnum})")
+    assert i == n
+    res = "".join(res)
+    return fr'(?s:{res})\Z'
diff --git a/common/py3-stdlib/fractions.py b/common/py3-stdlib/fractions.py
index e4fcc89..de3e23b 100644
--- a/common/py3-stdlib/fractions.py
+++ b/common/py3-stdlib/fractions.py
@@ -10,31 +10,9 @@
 import re
 import sys
 
-__all__ = ['Fraction', 'gcd']
+__all__ = ['Fraction']
 
 
-
-def gcd(a, b):
-    """Calculate the Greatest Common Divisor of a and b.
-
-    Unless b==0, the result will have the same sign as b (so that when
-    b is divided by it, the result comes out positive).
-    """
-    import warnings
-    warnings.warn('fractions.gcd() is deprecated. Use math.gcd() instead.',
-                  DeprecationWarning, 2)
-    if type(a) is int is type(b):
-        if (b or a) < 0:
-            return -math.gcd(a, b)
-        return math.gcd(a, b)
-    return _gcd(a, b)
-
-def _gcd(a, b):
-    # Supports non-integers for backward compatibility.
-    while b:
-        a, b = b, a%b
-    return a
-
 # Constants related to the hash implementation;  hash(x) is based
 # on the reduction of x modulo the prime _PyHASH_MODULUS.
 _PyHASH_MODULUS = sys.hash_info.modulus
@@ -177,13 +155,9 @@
         if denominator == 0:
             raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
         if _normalize:
-            if type(numerator) is int is type(denominator):
-                # *very* normal case
-                g = math.gcd(numerator, denominator)
-                if denominator < 0:
-                    g = -g
-            else:
-                g = _gcd(numerator, denominator)
+            g = math.gcd(numerator, denominator)
+            if denominator < 0:
+                g = -g
             numerator //= g
             denominator //= g
         self._numerator = numerator
@@ -556,23 +530,34 @@
     def __hash__(self):
         """hash(self)"""
 
-        # XXX since this method is expensive, consider caching the result
+        # To make sure that the hash of a Fraction agrees with the hash
+        # of a numerically equal integer, float or Decimal instance, we
+        # follow the rules for numeric hashes outlined in the
+        # documentation.  (See library docs, 'Built-in Types').
 
-        # In order to make sure that the hash of a Fraction agrees
-        # with the hash of a numerically equal integer, float or
-        # Decimal instance, we follow the rules for numeric hashes
-        # outlined in the documentation.  (See library docs, 'Built-in
-        # Types').
-
-        # dinv is the inverse of self._denominator modulo the prime
-        # _PyHASH_MODULUS, or 0 if self._denominator is divisible by
-        # _PyHASH_MODULUS.
-        dinv = pow(self._denominator, _PyHASH_MODULUS - 2, _PyHASH_MODULUS)
-        if not dinv:
+        try:
+            dinv = pow(self._denominator, -1, _PyHASH_MODULUS)
+        except ValueError:
+            # ValueError means there is no modular inverse.
             hash_ = _PyHASH_INF
         else:
-            hash_ = abs(self._numerator) * dinv % _PyHASH_MODULUS
-        result = hash_ if self >= 0 else -hash_
+            # The general algorithm now specifies that the absolute value of
+            # the hash is
+            #    (|N| * dinv) % P
+            # where N is self._numerator and P is _PyHASH_MODULUS.  That's
+            # optimized here in two ways:  first, for a non-negative int i,
+            # hash(i) == i % P, but the int hash implementation doesn't need
+            # to divide, and is faster than doing % P explicitly.  So we do
+            #    hash(|N| * dinv)
+            # instead.  Second, N is unbounded, so its product with dinv may
+            # be arbitrarily expensive to compute.  The final answer is the
+            # same if we use the bounded |N| % P instead, which can again
+            # be done with an int hash() call.  If 0 <= i < P, hash(i) == i,
+            # so this nested hash() call wastes a bit of time making a
+            # redundant copy when |N| < P, but can save an arbitrarily large
+            # amount of computation for large |N|.
+            hash_ = hash(hash(abs(self._numerator)) * dinv)
+        result = hash_ if self._numerator >= 0 else -hash_
         return -2 if result == -1 else result
 
     def __eq__(a, b):
diff --git a/common/py3-stdlib/ftplib.py b/common/py3-stdlib/ftplib.py
index 58a46bc..1f760ed 100644
--- a/common/py3-stdlib/ftplib.py
+++ b/common/py3-stdlib/ftplib.py
@@ -72,17 +72,17 @@
 
 # The class itself
 class FTP:
-
     '''An FTP client class.
 
     To create a connection, call the class using these arguments:
-            host, user, passwd, acct, timeout
+            host, user, passwd, acct, timeout, source_address, encoding
 
     The first four arguments are all strings, and have default value ''.
-    timeout must be numeric and defaults to None if not passed,
-    meaning that no timeout will be set on any ftp socket(s)
+    The parameter ´timeout´ must be numeric and defaults to None if not
+    passed, meaning that no timeout will be set on any ftp socket(s).
     If a timeout is passed, then this is now the default timeout for all ftp
     socket operations for this instance.
+    The last parameter is the encoding of filenames, which defaults to utf-8.
 
     Then use self.connect() with optional host and port argument.
 
@@ -103,14 +103,16 @@
     file = None
     welcome = None
     passiveserver = 1
-    encoding = "latin-1"
 
-    # Initialization method (called by class instantiation).
-    # Initialize host to localhost, port to standard ftp port
-    # Optional arguments are host (for connect()),
-    # and user, passwd, acct (for login())
     def __init__(self, host='', user='', passwd='', acct='',
-                 timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
+                 timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *,
+                 encoding='utf-8'):
+        """Initialization method (called by class instantiation).
+        Initialize host to localhost, port to standard ftp port.
+        Optional arguments are host (for connect()),
+        and user, passwd, acct (for login()).
+        """
+        self.encoding = encoding
         self.source_address = source_address
         self.timeout = timeout
         if host:
@@ -146,6 +148,8 @@
             self.port = port
         if timeout != -999:
             self.timeout = timeout
+        if self.timeout is not None and not self.timeout:
+            raise ValueError('Non-blocking socket (timeout=0) is not supported')
         if source_address is not None:
             self.source_address = source_address
         sys.audit("ftplib.connect", self, self.host, self.port)
@@ -704,9 +708,10 @@
         '''
         ssl_version = ssl.PROTOCOL_TLS_CLIENT
 
-        def __init__(self, host='', user='', passwd='', acct='', keyfile=None,
-                     certfile=None, context=None,
-                     timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
+        def __init__(self, host='', user='', passwd='', acct='',
+                     keyfile=None, certfile=None, context=None,
+                     timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *,
+                     encoding='utf-8'):
             if context is not None and keyfile is not None:
                 raise ValueError("context and keyfile arguments are mutually "
                                  "exclusive")
@@ -725,12 +730,13 @@
                                                      keyfile=keyfile)
             self.context = context
             self._prot_p = False
-            FTP.__init__(self, host, user, passwd, acct, timeout, source_address)
+            super().__init__(host, user, passwd, acct,
+                             timeout, source_address, encoding=encoding)
 
         def login(self, user='', passwd='', acct='', secure=True):
             if secure and not isinstance(self.sock, ssl.SSLSocket):
                 self.auth()
-            return FTP.login(self, user, passwd, acct)
+            return super().login(user, passwd, acct)
 
         def auth(self):
             '''Set up secure control connection by using TLS/SSL.'''
@@ -740,8 +746,7 @@
                 resp = self.voidcmd('AUTH TLS')
             else:
                 resp = self.voidcmd('AUTH SSL')
-            self.sock = self.context.wrap_socket(self.sock,
-                                                 server_hostname=self.host)
+            self.sock = self.context.wrap_socket(self.sock, server_hostname=self.host)
             self.file = self.sock.makefile(mode='r', encoding=self.encoding)
             return resp
 
@@ -778,7 +783,7 @@
         # --- Overridden FTP methods
 
         def ntransfercmd(self, cmd, rest=None):
-            conn, size = FTP.ntransfercmd(self, cmd, rest)
+            conn, size = super().ntransfercmd(cmd, rest)
             if self._prot_p:
                 conn = self.context.wrap_socket(conn,
                                                 server_hostname=self.host)
@@ -823,7 +828,6 @@
     '''Parse the '227' response for a PASV request.
     Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)'
     Return ('host.addr.as.numbers', port#) tuple.'''
-
     if resp[:3] != '227':
         raise error_reply(resp)
     global _227_re
@@ -843,7 +847,6 @@
     '''Parse the '229' response for an EPSV request.
     Raises error_proto if it does not contain '(|||port|)'
     Return ('host.addr.as.numbers', port#) tuple.'''
-
     if resp[:3] != '229':
         raise error_reply(resp)
     left = resp.find('(')
@@ -865,7 +868,6 @@
     '''Parse the '257' response for a MKD or PWD request.
     This is a response to a MKD or PWD request: a directory name.
     Returns the directoryname in the 257 reply.'''
-
     if resp[:3] != '257':
         raise error_reply(resp)
     if resp[3:5] != ' "':
diff --git a/common/py3-stdlib/functools.py b/common/py3-stdlib/functools.py
index 4cde5f5..5cab497 100644
--- a/common/py3-stdlib/functools.py
+++ b/common/py3-stdlib/functools.py
@@ -10,15 +10,16 @@
 # See C source code for _functools credits/copyright
 
 __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
-           'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial',
-           'partialmethod', 'singledispatch', 'singledispatchmethod',
-           "cached_property"]
+           'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce',
+           'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod',
+           'cached_property']
 
 from abc import get_cache_token
 from collections import namedtuple
 # import types, weakref  # Deferred to single_dispatch()
 from reprlib import recursive_repr
 from _thread import RLock
+from types import GenericAlias
 
 
 ################################################################################
@@ -95,6 +96,8 @@
 def _le_from_lt(self, other, NotImplemented=NotImplemented):
     'Return a <= b.  Computed by @total_ordering from (a < b) or (a == b).'
     op_result = self.__lt__(other)
+    if op_result is NotImplemented:
+        return op_result
     return op_result or self == other
 
 def _ge_from_lt(self, other, NotImplemented=NotImplemented):
@@ -135,6 +138,8 @@
 def _ge_from_gt(self, other, NotImplemented=NotImplemented):
     'Return a >= b.  Computed by @total_ordering from (a > b) or (a == b).'
     op_result = self.__gt__(other)
+    if op_result is NotImplemented:
+        return op_result
     return op_result or self == other
 
 def _le_from_gt(self, other, NotImplemented=NotImplemented):
@@ -346,23 +351,7 @@
     callables as instance methods.
     """
 
-    def __init__(*args, **keywords):
-        if len(args) >= 2:
-            self, func, *args = args
-        elif not args:
-            raise TypeError("descriptor '__init__' of partialmethod "
-                            "needs an argument")
-        elif 'func' in keywords:
-            func = keywords.pop('func')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'func' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError("type 'partialmethod' takes at least one argument, "
-                            "got %d" % (len(args)-1))
-        args = tuple(args)
-
+    def __init__(self, func, /, *args, **keywords):
         if not callable(func) and not hasattr(func, "__get__"):
             raise TypeError("{!r} is not callable or a descriptor"
                                  .format(func))
@@ -380,7 +369,6 @@
             self.func = func
             self.args = args
             self.keywords = keywords
-    __init__.__text_signature__ = '($self, func, /, *args, **keywords)'
 
     def __repr__(self):
         args = ", ".join(map(repr, self.args))
@@ -424,6 +412,9 @@
     def __isabstractmethod__(self):
         return getattr(self.func, "__isabstractmethod__", False)
 
+    __class_getitem__ = classmethod(GenericAlias)
+
+
 # Helper functions
 
 def _unwrap_partial(func):
@@ -517,6 +508,7 @@
         # The user_function was passed in directly via the maxsize argument
         user_function, maxsize = maxsize, 128
         wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
+        wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
         return update_wrapper(wrapper, user_function)
     elif maxsize is not None:
         raise TypeError(
@@ -524,6 +516,7 @@
 
     def decorating_function(user_function):
         wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
+        wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
         return update_wrapper(wrapper, user_function)
 
     return decorating_function
@@ -651,6 +644,15 @@
 
 
 ################################################################################
+### cache -- simplified access to the infinity cache
+################################################################################
+
+def cache(user_function, /):
+    'Simple lightweight unbounded cache.  Sometimes called "memoize".'
+    return lru_cache(maxsize=None)(user_function)
+
+
+################################################################################
 ### singledispatch() - single-dispatch generic function decorator
 ################################################################################
 
@@ -974,3 +976,5 @@
                         )
                         raise TypeError(msg) from None
         return val
+
+    __class_getitem__ = classmethod(GenericAlias)
diff --git a/common/py3-stdlib/getpass.py b/common/py3-stdlib/getpass.py
index 36e17e4..6911f41 100644
--- a/common/py3-stdlib/getpass.py
+++ b/common/py3-stdlib/getpass.py
@@ -52,7 +52,7 @@
             stack.enter_context(input)
             if not stream:
                 stream = input
-        except OSError as e:
+        except OSError:
             # If that fails, see if stdin can be controlled.
             stack.close()
             try:
diff --git a/common/py3-stdlib/gettext.py b/common/py3-stdlib/gettext.py
index b98f501..77b67ae 100644
--- a/common/py3-stdlib/gettext.py
+++ b/common/py3-stdlib/gettext.py
@@ -46,7 +46,6 @@
 #   find this format documented anywhere.
 
 
-import locale
 import os
 import re
 import sys
@@ -210,6 +209,7 @@
 
 
 def _expand_lang(loc):
+    import locale
     loc = locale.normalize(loc)
     COMPONENT_CODESET   = 1 << 0
     COMPONENT_TERRITORY = 1 << 1
@@ -278,6 +278,7 @@
         import warnings
         warnings.warn('lgettext() is deprecated, use gettext() instead',
                       DeprecationWarning, 2)
+        import locale
         if self._fallback:
             with warnings.catch_warnings():
                 warnings.filterwarnings('ignore', r'.*\blgettext\b.*',
@@ -299,6 +300,7 @@
         import warnings
         warnings.warn('lngettext() is deprecated, use ngettext() instead',
                       DeprecationWarning, 2)
+        import locale
         if self._fallback:
             with warnings.catch_warnings():
                 warnings.filterwarnings('ignore', r'.*\blngettext\b.*',
@@ -462,6 +464,7 @@
         import warnings
         warnings.warn('lgettext() is deprecated, use gettext() instead',
                       DeprecationWarning, 2)
+        import locale
         missing = object()
         tmsg = self._catalog.get(message, missing)
         if tmsg is missing:
@@ -476,6 +479,7 @@
         import warnings
         warnings.warn('lngettext() is deprecated, use ngettext() instead',
                       DeprecationWarning, 2)
+        import locale
         try:
             tmsg = self._catalog[(msgid1, self.plural(n))]
         except KeyError:
@@ -668,6 +672,7 @@
     import warnings
     warnings.warn('ldgettext() is deprecated, use dgettext() instead',
                   DeprecationWarning, 2)
+    import locale
     codeset = _localecodesets.get(domain)
     try:
         with warnings.catch_warnings():
@@ -695,6 +700,7 @@
     import warnings
     warnings.warn('ldngettext() is deprecated, use dngettext() instead',
                   DeprecationWarning, 2)
+    import locale
     codeset = _localecodesets.get(domain)
     try:
         with warnings.catch_warnings():
diff --git a/common/py3-stdlib/graphlib.py b/common/py3-stdlib/graphlib.py
new file mode 100644
index 0000000..d0e7a48
--- /dev/null
+++ b/common/py3-stdlib/graphlib.py
@@ -0,0 +1,246 @@
+__all__ = ["TopologicalSorter", "CycleError"]
+
+_NODE_OUT = -1
+_NODE_DONE = -2
+
+
+class _NodeInfo:
+    __slots__ = "node", "npredecessors", "successors"
+
+    def __init__(self, node):
+        # The node this class is augmenting.
+        self.node = node
+
+        # Number of predecessors, generally >= 0. When this value falls to 0,
+        # and is returned by get_ready(), this is set to _NODE_OUT and when the
+        # node is marked done by a call to done(), set to _NODE_DONE.
+        self.npredecessors = 0
+
+        # List of successor nodes. The list can contain duplicated elements as
+        # long as they're all reflected in the successor's npredecessors attribute).
+        self.successors = []
+
+
+class CycleError(ValueError):
+    """Subclass of ValueError raised by TopologicalSorter.prepare if cycles
+    exist in the working graph.
+
+    If multiple cycles exist, only one undefined choice among them will be reported
+    and included in the exception. The detected cycle can be accessed via the second
+    element in the *args* attribute of the exception instance and consists in a list
+    of nodes, such that each node is, in the graph, an immediate predecessor of the
+    next node in the list. In the reported list, the first and the last node will be
+    the same, to make it clear that it is cyclic.
+    """
+
+    pass
+
+
+class TopologicalSorter:
+    """Provides functionality to topologically sort a graph of hashable nodes"""
+
+    def __init__(self, graph=None):
+        self._node2info = {}
+        self._ready_nodes = None
+        self._npassedout = 0
+        self._nfinished = 0
+
+        if graph is not None:
+            for node, predecessors in graph.items():
+                self.add(node, *predecessors)
+
+    def _get_nodeinfo(self, node):
+        if (result := self._node2info.get(node)) is None:
+            self._node2info[node] = result = _NodeInfo(node)
+        return result
+
+    def add(self, node, *predecessors):
+        """Add a new node and its predecessors to the graph.
+
+        Both the *node* and all elements in *predecessors* must be hashable.
+
+        If called multiple times with the same node argument, the set of dependencies
+        will be the union of all dependencies passed in.
+
+        It is possible to add a node with no dependencies (*predecessors* is not provided)
+        as well as provide a dependency twice. If a node that has not been provided before
+        is included among *predecessors* it will be automatically added to the graph with
+        no predecessors of its own.
+
+        Raises ValueError if called after "prepare".
+        """
+        if self._ready_nodes is not None:
+            raise ValueError("Nodes cannot be added after a call to prepare()")
+
+        # Create the node -> predecessor edges
+        nodeinfo = self._get_nodeinfo(node)
+        nodeinfo.npredecessors += len(predecessors)
+
+        # Create the predecessor -> node edges
+        for pred in predecessors:
+            pred_info = self._get_nodeinfo(pred)
+            pred_info.successors.append(node)
+
+    def prepare(self):
+        """Mark the graph as finished and check for cycles in the graph.
+
+        If any cycle is detected, "CycleError" will be raised, but "get_ready" can
+        still be used to obtain as many nodes as possible until cycles block more
+        progress. After a call to this function, the graph cannot be modified and
+        therefore no more nodes can be added using "add".
+        """
+        if self._ready_nodes is not None:
+            raise ValueError("cannot prepare() more than once")
+
+        self._ready_nodes = [
+            i.node for i in self._node2info.values() if i.npredecessors == 0
+        ]
+        # ready_nodes is set before we look for cycles on purpose:
+        # if the user wants to catch the CycleError, that's fine,
+        # they can continue using the instance to grab as many
+        # nodes as possible before cycles block more progress
+        cycle = self._find_cycle()
+        if cycle:
+            raise CycleError(f"nodes are in a cycle", cycle)
+
+    def get_ready(self):
+        """Return a tuple of all the nodes that are ready.
+
+        Initially it returns all nodes with no predecessors; once those are marked
+        as processed by calling "done", further calls will return all new nodes that
+        have all their predecessors already processed. Once no more progress can be made,
+        empty tuples are returned.
+
+        Raises ValueError if called without calling "prepare" previously.
+        """
+        if self._ready_nodes is None:
+            raise ValueError("prepare() must be called first")
+
+        # Get the nodes that are ready and mark them
+        result = tuple(self._ready_nodes)
+        n2i = self._node2info
+        for node in result:
+            n2i[node].npredecessors = _NODE_OUT
+
+        # Clean the list of nodes that are ready and update
+        # the counter of nodes that we have returned.
+        self._ready_nodes.clear()
+        self._npassedout += len(result)
+
+        return result
+
+    def is_active(self):
+        """Return ``True`` if more progress can be made and ``False`` otherwise.
+
+        Progress can be made if cycles do not block the resolution and either there
+        are still nodes ready that haven't yet been returned by "get_ready" or the
+        number of nodes marked "done" is less than the number that have been returned
+        by "get_ready".
+
+        Raises ValueError if called without calling "prepare" previously.
+        """
+        if self._ready_nodes is None:
+            raise ValueError("prepare() must be called first")
+        return self._nfinished < self._npassedout or bool(self._ready_nodes)
+
+    def __bool__(self):
+        return self.is_active()
+
+    def done(self, *nodes):
+        """Marks a set of nodes returned by "get_ready" as processed.
+
+        This method unblocks any successor of each node in *nodes* for being returned
+        in the future by a call to "get_ready".
+
+        Raises :exec:`ValueError` if any node in *nodes* has already been marked as
+        processed by a previous call to this method, if a node was not added to the
+        graph by using "add" or if called without calling "prepare" previously or if
+        node has not yet been returned by "get_ready".
+        """
+
+        if self._ready_nodes is None:
+            raise ValueError("prepare() must be called first")
+
+        n2i = self._node2info
+
+        for node in nodes:
+
+            # Check if we know about this node (it was added previously using add()
+            if (nodeinfo := n2i.get(node)) is None:
+                raise ValueError(f"node {node!r} was not added using add()")
+
+            # If the node has not being returned (marked as ready) previously, inform the user.
+            stat = nodeinfo.npredecessors
+            if stat != _NODE_OUT:
+                if stat >= 0:
+                    raise ValueError(
+                        f"node {node!r} was not passed out (still not ready)"
+                    )
+                elif stat == _NODE_DONE:
+                    raise ValueError(f"node {node!r} was already marked done")
+                else:
+                    assert False, f"node {node!r}: unknown status {stat}"
+
+            # Mark the node as processed
+            nodeinfo.npredecessors = _NODE_DONE
+
+            # Go to all the successors and reduce the number of predecessors, collecting all the ones
+            # that are ready to be returned in the next get_ready() call.
+            for successor in nodeinfo.successors:
+                successor_info = n2i[successor]
+                successor_info.npredecessors -= 1
+                if successor_info.npredecessors == 0:
+                    self._ready_nodes.append(successor)
+            self._nfinished += 1
+
+    def _find_cycle(self):
+        n2i = self._node2info
+        stack = []
+        itstack = []
+        seen = set()
+        node2stacki = {}
+
+        for node in n2i:
+            if node in seen:
+                continue
+
+            while True:
+                if node in seen:
+                    # If we have seen already the node and is in the
+                    # current stack we have found a cycle.
+                    if node in node2stacki:
+                        return stack[node2stacki[node] :] + [node]
+                    # else go on to get next successor
+                else:
+                    seen.add(node)
+                    itstack.append(iter(n2i[node].successors).__next__)
+                    node2stacki[node] = len(stack)
+                    stack.append(node)
+
+                # Backtrack to the topmost stack entry with
+                # at least another successor.
+                while stack:
+                    try:
+                        node = itstack[-1]()
+                        break
+                    except StopIteration:
+                        del node2stacki[stack.pop()]
+                        itstack.pop()
+                else:
+                    break
+        return None
+
+    def static_order(self):
+        """Returns an iterable of nodes in a topological order.
+
+        The particular order that is returned may depend on the specific
+        order in which the items were inserted in the graph.
+
+        Using this method does not require to call "prepare" or "done". If any
+        cycle is detected, :exc:`CycleError` will be raised.
+        """
+        self.prepare()
+        while self.is_active():
+            node_group = self.get_ready()
+            yield from node_group
+            self.done(*node_group)
diff --git a/common/py3-stdlib/gzip.py b/common/py3-stdlib/gzip.py
index 87b553d..e422773 100644
--- a/common/py3-stdlib/gzip.py
+++ b/common/py3-stdlib/gzip.py
@@ -177,6 +177,7 @@
                 filename = ''
         else:
             filename = os.fspath(filename)
+        origmode = mode
         if mode is None:
             mode = getattr(fileobj, 'mode', 'rb')
 
@@ -187,6 +188,13 @@
             self.name = filename
 
         elif mode.startswith(('w', 'a', 'x')):
+            if origmode is None:
+                import warnings
+                warnings.warn(
+                    "GzipFile was opened for writing, but this will "
+                    "change in future Python releases.  "
+                    "Specify the mode argument for opening it for writing.",
+                    FutureWarning, 2)
             self.mode = WRITE
             self._init_write(filename)
             self.compress = zlib.compressobj(compresslevel,
diff --git a/common/py3-stdlib/hashlib.py b/common/py3-stdlib/hashlib.py
index 56873b7..58c340d 100644
--- a/common/py3-stdlib/hashlib.py
+++ b/common/py3-stdlib/hashlib.py
@@ -70,9 +70,12 @@
 
 __builtin_constructor_cache = {}
 
+# Prefer our blake2 implementation
+# OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. The OpenSSL
+# implementations neither support keyed blake2 (blake2 MAC) nor advanced
+# features like salt, personalization, or tree hashing. OpenSSL hash-only
+# variants are available as 'blake2b512' and 'blake2s256', though.
 __block_openssl_constructor = {
-    'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
-    'shake_128', 'shake_256',
     'blake2b', 'blake2s',
 }
 
@@ -122,13 +125,16 @@
 
 def __get_openssl_constructor(name):
     if name in __block_openssl_constructor:
-        # Prefer our blake2 and sha3 implementation.
+        # Prefer our builtin blake2 implementation.
         return __get_builtin_constructor(name)
     try:
+        # MD5, SHA1, and SHA2 are in all supported OpenSSL versions
+        # SHA3/shake are available in OpenSSL 1.1.1+
         f = getattr(_hashlib, 'openssl_' + name)
         # Allow the C module to raise ValueError.  The function will be
-        # defined but the hash not actually available thanks to OpenSSL.
-        f()
+        # defined but the hash not actually available.  Don't fall back to
+        # builtin if the current security policy blocks a digest, bpo#40695.
+        f(usedforsecurity=False)
         # Use the C function directly (very fast)
         return f
     except (AttributeError, ValueError):
@@ -148,13 +154,10 @@
     optionally initialized with data (which must be a bytes-like object).
     """
     if name in __block_openssl_constructor:
-        # Prefer our blake2 and sha3 implementation
-        # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s.
-        # It does neither support keyed blake2 nor advanced features like
-        # salt, personal, tree hashing or SSE.
+        # Prefer our builtin blake2 implementation.
         return __get_builtin_constructor(name)(data, **kwargs)
     try:
-        return _hashlib.new(name, data)
+        return _hashlib.new(name, data, **kwargs)
     except ValueError:
         # If the _hashlib module (OpenSSL) doesn't support the named
         # hash, try using our builtin implementations.
diff --git a/common/py3-stdlib/hmac.py b/common/py3-stdlib/hmac.py
index b769876..180bc37 100644
--- a/common/py3-stdlib/hmac.py
+++ b/common/py3-stdlib/hmac.py
@@ -4,14 +4,15 @@
 """
 
 import warnings as _warnings
-from _operator import _compare_digest as compare_digest
 try:
     import _hashlib as _hashopenssl
 except ImportError:
     _hashopenssl = None
     _openssl_md_meths = None
+    from _operator import _compare_digest as compare_digest
 else:
     _openssl_md_meths = frozenset(_hashopenssl.openssl_md_meth_names)
+    compare_digest = _hashopenssl.compare_digest
 import hashlib as _hashlib
 
 trans_5C = bytes((x ^ 0x5C) for x in range(256))
@@ -30,6 +31,10 @@
     """
     blocksize = 64  # 512-bit HMAC; can be changed in subclasses.
 
+    __slots__ = (
+        "_digest_cons", "_inner", "_outer", "block_size", "digest_size"
+    )
+
     def __init__(self, key, msg=None, digestmod=''):
         """Create a new HMAC object.
 
@@ -51,18 +56,18 @@
             raise TypeError("Missing required parameter 'digestmod'.")
 
         if callable(digestmod):
-            self.digest_cons = digestmod
+            self._digest_cons = digestmod
         elif isinstance(digestmod, str):
-            self.digest_cons = lambda d=b'': _hashlib.new(digestmod, d)
+            self._digest_cons = lambda d=b'': _hashlib.new(digestmod, d)
         else:
-            self.digest_cons = lambda d=b'': digestmod.new(d)
+            self._digest_cons = lambda d=b'': digestmod.new(d)
 
-        self.outer = self.digest_cons()
-        self.inner = self.digest_cons()
-        self.digest_size = self.inner.digest_size
+        self._outer = self._digest_cons()
+        self._inner = self._digest_cons()
+        self.digest_size = self._inner.digest_size
 
-        if hasattr(self.inner, 'block_size'):
-            blocksize = self.inner.block_size
+        if hasattr(self._inner, 'block_size'):
+            blocksize = self._inner.block_size
             if blocksize < 16:
                 _warnings.warn('block_size of %d seems too small; using our '
                                'default of %d.' % (blocksize, self.blocksize),
@@ -79,21 +84,33 @@
         self.block_size = blocksize
 
         if len(key) > blocksize:
-            key = self.digest_cons(key).digest()
+            key = self._digest_cons(key).digest()
 
         key = key.ljust(blocksize, b'\0')
-        self.outer.update(key.translate(trans_5C))
-        self.inner.update(key.translate(trans_36))
+        self._outer.update(key.translate(trans_5C))
+        self._inner.update(key.translate(trans_36))
         if msg is not None:
             self.update(msg)
 
     @property
     def name(self):
-        return "hmac-" + self.inner.name
+        return "hmac-" + self._inner.name
+
+    @property
+    def digest_cons(self):
+        return self._digest_cons
+
+    @property
+    def inner(self):
+        return self._inner
+
+    @property
+    def outer(self):
+        return self._outer
 
     def update(self, msg):
         """Feed data from msg into this hashing object."""
-        self.inner.update(msg)
+        self._inner.update(msg)
 
     def copy(self):
         """Return a separate copy of this hashing object.
@@ -102,10 +119,10 @@
         """
         # Call __new__ directly to avoid the expensive __init__.
         other = self.__class__.__new__(self.__class__)
-        other.digest_cons = self.digest_cons
+        other._digest_cons = self._digest_cons
         other.digest_size = self.digest_size
-        other.inner = self.inner.copy()
-        other.outer = self.outer.copy()
+        other._inner = self._inner.copy()
+        other._outer = self._outer.copy()
         return other
 
     def _current(self):
@@ -113,8 +130,8 @@
 
         To be used only internally with digest() and hexdigest().
         """
-        h = self.outer.copy()
-        h.update(self.inner.digest())
+        h = self._outer.copy()
+        h.update(self._inner.digest())
         return h
 
     def digest(self):
diff --git a/common/py3-stdlib/html/parser.py b/common/py3-stdlib/html/parser.py
index de81879..6083077 100644
--- a/common/py3-stdlib/html/parser.py
+++ b/common/py3-stdlib/html/parser.py
@@ -9,7 +9,6 @@
 
 
 import re
-import warnings
 import _markupbase
 
 from html import unescape
@@ -461,10 +460,3 @@
 
     def unknown_decl(self, data):
         pass
-
-    # Internal -- helper to remove special character quoting
-    def unescape(self, s):
-        warnings.warn('The unescape method is deprecated and will be removed '
-                      'in 3.5, use html.unescape() instead.',
-                      DeprecationWarning, stacklevel=2)
-        return unescape(s)
diff --git a/common/py3-stdlib/http/__init__.py b/common/py3-stdlib/http/__init__.py
index 350afe7..37be765 100644
--- a/common/py3-stdlib/http/__init__.py
+++ b/common/py3-stdlib/http/__init__.py
@@ -17,6 +17,9 @@
         * RFC 2774: An HTTP Extension Framework
         * RFC 7725: An HTTP Status Code to Report Legal Obstacles
         * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
+        * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
+        * RFC 8297: An HTTP Status Code for Indicating Hints
+        * RFC 8470: Using Early Data in HTTP
     """
     def __new__(cls, value, phrase, description=''):
         obj = int.__new__(cls, value)
@@ -31,6 +34,7 @@
     SWITCHING_PROTOCOLS = (101, 'Switching Protocols',
             'Switching to new protocol; obey Upgrade header')
     PROCESSING = 102, 'Processing'
+    EARLY_HINTS = 103, 'Early Hints'
 
     # success
     OK = 200, 'OK', 'Request fulfilled, document follows'
@@ -100,11 +104,14 @@
         'Cannot satisfy request range')
     EXPECTATION_FAILED = (417, 'Expectation Failed',
         'Expect condition could not be satisfied')
+    IM_A_TEAPOT = (418, 'I\'m a Teapot',
+        'Server refuses to brew coffee because it is a teapot.')
     MISDIRECTED_REQUEST = (421, 'Misdirected Request',
         'Server is not able to produce a response')
     UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity'
     LOCKED = 423, 'Locked'
     FAILED_DEPENDENCY = 424, 'Failed Dependency'
+    TOO_EARLY = 425, 'Too Early'
     UPGRADE_REQUIRED = 426, 'Upgrade Required'
     PRECONDITION_REQUIRED = (428, 'Precondition Required',
         'The origin server requires the request to be conditional')
diff --git a/common/py3-stdlib/http/cookies.py b/common/py3-stdlib/http/cookies.py
index 6694f54..35ac2dc 100644
--- a/common/py3-stdlib/http/cookies.py
+++ b/common/py3-stdlib/http/cookies.py
@@ -131,6 +131,7 @@
 #
 import re
 import string
+import types
 
 __all__ = ["CookieError", "BaseCookie", "SimpleCookie"]
 
@@ -419,6 +420,8 @@
         # Return the result
         return _semispacejoin(result)
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 
 #
 # Pattern for finding cookie
diff --git a/common/py3-stdlib/http/server.py b/common/py3-stdlib/http/server.py
index 38f7acc..def05f4 100644
--- a/common/py3-stdlib/http/server.py
+++ b/common/py3-stdlib/http/server.py
@@ -639,11 +639,17 @@
     """
 
     server_version = "SimpleHTTP/" + __version__
+    extensions_map = _encodings_map_default = {
+        '.gz': 'application/gzip',
+        '.Z': 'application/octet-stream',
+        '.bz2': 'application/x-bzip2',
+        '.xz': 'application/x-xz',
+    }
 
     def __init__(self, *args, directory=None, **kwargs):
         if directory is None:
             directory = os.getcwd()
-        self.directory = directory
+        self.directory = os.fspath(directory)
         super().__init__(*args, **kwargs)
 
     def do_GET(self):
@@ -866,25 +872,16 @@
         slow) to look inside the data to make a better guess.
 
         """
-
         base, ext = posixpath.splitext(path)
         if ext in self.extensions_map:
             return self.extensions_map[ext]
         ext = ext.lower()
         if ext in self.extensions_map:
             return self.extensions_map[ext]
-        else:
-            return self.extensions_map['']
-
-    if not mimetypes.inited:
-        mimetypes.init() # try to read system mime.types
-    extensions_map = mimetypes.types_map.copy()
-    extensions_map.update({
-        '': 'application/octet-stream', # Default
-        '.py': 'text/plain',
-        '.c': 'text/plain',
-        '.h': 'text/plain',
-        })
+        guess, _ = mimetypes.guess_type(path)
+        if guess:
+            return guess
+        return 'application/octet-stream'
 
 
 # Utilities for CGIHTTPRequestHandler
@@ -1015,8 +1012,10 @@
         """
         collapsed_path = _url_collapse_path(self.path)
         dir_sep = collapsed_path.find('/', 1)
-        head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
-        if head in self.cgi_directories:
+        while dir_sep > 0 and not collapsed_path[:dir_sep] in self.cgi_directories:
+            dir_sep = collapsed_path.find('/', dir_sep+1)
+        if dir_sep > 0:
+            head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
             self.cgi_info = head, tail
             return True
         return False
@@ -1124,12 +1123,7 @@
         referer = self.headers.get('referer')
         if referer:
             env['HTTP_REFERER'] = referer
-        accept = []
-        for line in self.headers.getallmatchingheaders('accept'):
-            if line[:1] in "\t\n\r ":
-                accept.append(line.strip())
-            else:
-                accept = accept + line[7:].split(',')
+        accept = self.headers.get_all('accept', ())
         env['HTTP_ACCEPT'] = ','.join(accept)
         ua = self.headers.get('user-agent')
         if ua:
@@ -1165,8 +1159,9 @@
                 while select.select([self.rfile], [], [], 0)[0]:
                     if not self.rfile.read(1):
                         break
-                if sts:
-                    self.log_error("CGI script exit status %#x", sts)
+                exitcode = os.waitstatus_to_exitcode(sts)
+                if exitcode:
+                    self.log_error(f"CGI script exit code {exitcode}")
                 return
             # Child
             try:
diff --git a/common/py3-stdlib/imaplib.py b/common/py3-stdlib/imaplib.py
index 822d9d6..d9720f2 100644
--- a/common/py3-stdlib/imaplib.py
+++ b/common/py3-stdlib/imaplib.py
@@ -98,6 +98,7 @@
         'THREAD':       ('SELECTED',),
         'UID':          ('SELECTED',),
         'UNSUBSCRIBE':  ('AUTH', 'SELECTED'),
+        'UNSELECT':     ('SELECTED',),
         }
 
 #       Patterns to match server responses
@@ -135,10 +136,13 @@
 
     r"""IMAP4 client class.
 
-    Instantiate with: IMAP4([host[, port]])
+    Instantiate with: IMAP4([host[, port[, timeout=None]]])
 
             host - host's name (default: localhost);
             port - port number (default: standard IMAP4 port).
+            timeout - socket timeout (default: None)
+                      If timeout is not given or is None,
+                      the global default socket timeout is used
 
     All IMAP4rev1 commands are supported by methods of the same
     name (in lower-case).
@@ -181,7 +185,7 @@
     class abort(error): pass        # Service errors - close and retry
     class readonly(abort): pass     # Mailbox status changed to READ-ONLY
 
-    def __init__(self, host='', port=IMAP4_PORT):
+    def __init__(self, host='', port=IMAP4_PORT, timeout=None):
         self.debug = Debug
         self.state = 'LOGOUT'
         self.literal = None             # A literal argument to a command
@@ -195,7 +199,7 @@
 
         # Open socket to server.
 
-        self.open(host, port)
+        self.open(host, port, timeout)
 
         try:
             self._connect()
@@ -284,15 +288,20 @@
     #       Overridable methods
 
 
-    def _create_socket(self):
+    def _create_socket(self, timeout):
         # Default value of IMAP4.host is '', but socket.getaddrinfo()
         # (which is used by socket.create_connection()) expects None
         # as a default value for host.
+        if timeout is not None and not timeout:
+            raise ValueError('Non-blocking socket (timeout=0) is not supported')
         host = None if not self.host else self.host
         sys.audit("imaplib.open", self, self.host, self.port)
-        return socket.create_connection((host, self.port))
+        address = (host, self.port)
+        if timeout is not None:
+            return socket.create_connection(address, timeout)
+        return socket.create_connection(address)
 
-    def open(self, host = '', port = IMAP4_PORT):
+    def open(self, host='', port=IMAP4_PORT, timeout=None):
         """Setup connection to remote server on "host:port"
             (default: localhost:standard IMAP4 port).
         This connection will be used by the routines:
@@ -300,7 +309,7 @@
         """
         self.host = host
         self.port = port
-        self.sock = self._create_socket()
+        self.sock = self._create_socket(timeout)
         self.file = self.sock.makefile('rb')
 
 
@@ -502,7 +511,7 @@
     def enable(self, capability):
         """Send an RFC5161 enable string to the server.
 
-        (typ, [data]) = <intance>.enable(capability)
+        (typ, [data]) = <instance>.enable(capability)
         """
         if 'ENABLE' not in self.capabilities:
             raise IMAP4.error("Server does not support ENABLE")
@@ -894,6 +903,22 @@
         return self._simple_command('UNSUBSCRIBE', mailbox)
 
 
+    def unselect(self):
+        """Free server's resources associated with the selected mailbox
+        and returns the server to the authenticated state.
+        This command performs the same actions as CLOSE, except
+        that no messages are permanently removed from the currently
+        selected mailbox.
+
+        (typ, [data]) = <instance>.unselect()
+        """
+        try:
+            typ, data = self._simple_command('UNSELECT')
+        finally:
+            self.state = 'AUTH'
+        return typ, data
+
+
     def xatom(self, name, *args):
         """Allow simple extension commands
                 notified by server in CAPABILITY response.
@@ -1261,7 +1286,7 @@
 
         """IMAP4 client class over SSL connection
 
-        Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile[, ssl_context]]]]])
+        Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile[, ssl_context[, timeout=None]]]]]])
 
                 host - host's name (default: localhost);
                 port - port number (default: standard IMAP4 SSL port);
@@ -1271,13 +1296,15 @@
                               and private key (default: None)
                 Note: if ssl_context is provided, then parameters keyfile or
                 certfile should not be set otherwise ValueError is raised.
+                timeout - socket timeout (default: None) If timeout is not given or is None,
+                          the global default socket timeout is used
 
         for more documentation see the docstring of the parent class IMAP4.
         """
 
 
         def __init__(self, host='', port=IMAP4_SSL_PORT, keyfile=None,
-                     certfile=None, ssl_context=None):
+                     certfile=None, ssl_context=None, timeout=None):
             if ssl_context is not None and keyfile is not None:
                 raise ValueError("ssl_context and keyfile arguments are mutually "
                                  "exclusive")
@@ -1294,20 +1321,20 @@
                 ssl_context = ssl._create_stdlib_context(certfile=certfile,
                                                          keyfile=keyfile)
             self.ssl_context = ssl_context
-            IMAP4.__init__(self, host, port)
+            IMAP4.__init__(self, host, port, timeout)
 
-        def _create_socket(self):
-            sock = IMAP4._create_socket(self)
+        def _create_socket(self, timeout):
+            sock = IMAP4._create_socket(self, timeout)
             return self.ssl_context.wrap_socket(sock,
                                                 server_hostname=self.host)
 
-        def open(self, host='', port=IMAP4_SSL_PORT):
+        def open(self, host='', port=IMAP4_SSL_PORT, timeout=None):
             """Setup connection to remote server on "host:port".
                 (default: localhost:standard IMAP4 SSL port).
             This connection will be used by the routines:
                 read, readline, send, shutdown.
             """
-            IMAP4.open(self, host, port)
+            IMAP4.open(self, host, port, timeout)
 
     __all__.append("IMAP4_SSL")
 
@@ -1329,7 +1356,7 @@
         IMAP4.__init__(self)
 
 
-    def open(self, host = None, port = None):
+    def open(self, host=None, port=None, timeout=None):
         """Setup a stream connection.
         This connection will be used by the routines:
             read, readline, send, shutdown.
diff --git a/common/py3-stdlib/importlib/_bootstrap.py b/common/py3-stdlib/importlib/_bootstrap.py
index 32deef1..e00b27e 100644
--- a/common/py3-stdlib/importlib/_bootstrap.py
+++ b/common/py3-stdlib/importlib/_bootstrap.py
@@ -67,6 +67,7 @@
         # Deadlock avoidance for concurrent circular imports.
         me = _thread.get_ident()
         tid = self.owner
+        seen = set()
         while True:
             lock = _blocking_on.get(tid)
             if lock is None:
@@ -74,6 +75,14 @@
             tid = lock.owner
             if tid == me:
                 return True
+            if tid in seen:
+                # bpo 38091: the chain of tid's we encounter here
+                # eventually leads to a fixpoint or a cycle, but
+                # does not reach 'me'.  This means we would not
+                # actually deadlock.  This can happen if other
+                # threads are at the beginning of acquire() below.
+                return False
+            seen.add(tid)
 
     def acquire(self):
         """
@@ -371,7 +380,7 @@
                     self.cached == other.cached and
                     self.has_location == other.has_location)
         except AttributeError:
-            return False
+            return NotImplemented
 
     @property
     def cached(self):
@@ -713,6 +722,8 @@
 
     """
 
+    _ORIGIN = "built-in"
+
     @staticmethod
     def module_repr(module):
         """Return repr for the module.
@@ -720,14 +731,14 @@
         The method is deprecated.  The import machinery does the job itself.
 
         """
-        return '<module {!r} (built-in)>'.format(module.__name__)
+        return f'<module {module.__name__!r} ({BuiltinImporter._ORIGIN})>'
 
     @classmethod
     def find_spec(cls, fullname, path=None, target=None):
         if path is not None:
             return None
         if _imp.is_builtin(fullname):
-            return spec_from_loader(fullname, cls, origin='built-in')
+            return spec_from_loader(fullname, cls, origin=cls._ORIGIN)
         else:
             return None
 
@@ -873,7 +884,7 @@
     """Resolve a relative module name to an absolute one."""
     bits = package.rsplit('.', level - 1)
     if len(bits) < level:
-        raise ValueError('attempted relative import beyond top-level package')
+        raise ImportError('attempted relative import beyond top-level package')
     base = bits[0]
     return '{}.{}'.format(base, name) if name else base
 
@@ -976,7 +987,12 @@
     if parent:
         # Set the module as an attribute on its parent.
         parent_module = sys.modules[parent]
-        setattr(parent_module, name.rpartition('.')[2], module)
+        child = name.rpartition('.')[2]
+        try:
+            setattr(parent_module, child, module)
+        except AttributeError:
+            msg = f"Cannot set an attribute on {parent!r} for child module {child!r}"
+            _warnings.warn(msg, ImportWarning)
     return module
 
 
diff --git a/common/py3-stdlib/importlib/_bootstrap_external.py b/common/py3-stdlib/importlib/_bootstrap_external.py
index b8ac482..25a3f8c 100644
--- a/common/py3-stdlib/importlib/_bootstrap_external.py
+++ b/common/py3-stdlib/importlib/_bootstrap_external.py
@@ -34,8 +34,8 @@
             key = b'PYTHONCASEOK'
 
         def _relax_case():
-            """True if filenames must be checked case-insensitively."""
-            return key in _os.environ
+            """True if filenames must be checked case-insensitively and ignore environment flags are not set."""
+            return not sys.flags.ignore_environment and key in _os.environ
     else:
         def _relax_case():
             """True if filenames must be checked case-insensitively."""
@@ -271,6 +271,13 @@
 #     Python 3.8b2  3412 (Swap the position of positional args and positional
 #                         only args in ast.arguments #37593)
 #     Python 3.8b4  3413 (Fix "break" and "continue" in "finally" #37830)
+#     Python 3.9a0  3420 (add LOAD_ASSERTION_ERROR #34880)
+#     Python 3.9a0  3421 (simplified bytecode for with blocks #32949)
+#     Python 3.9a0  3422 (remove BEGIN_FINALLY, END_FINALLY, CALL_FINALLY, POP_FINALLY bytecodes #33387)
+#     Python 3.9a2  3423 (add IS_OP, CONTAINS_OP and JUMP_IF_NOT_EXC_MATCH bytecodes #39156)
+#     Python 3.9a2  3424 (simplify bytecodes for *value unpacking)
+#     Python 3.9a2  3425 (simplify bytecodes for **value unpacking)
+
 #
 # MAGIC must change whenever the bytecode emitted by the compiler may no
 # longer be understood by older implementations of the eval loop (usually
@@ -279,7 +286,7 @@
 # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
 # in PC/launcher.c must also be updated.
 
-MAGIC_NUMBER = (3413).to_bytes(2, 'little') + b'\r\n'
+MAGIC_NUMBER = (3425).to_bytes(2, 'little') + b'\r\n'
 _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little')  # For import.c
 
 _PYCACHE = '__pycache__'
@@ -709,9 +716,9 @@
     @classmethod
     def _open_registry(cls, key):
         try:
-            return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, key)
+            return winreg.OpenKey(winreg.HKEY_CURRENT_USER, key)
         except OSError:
-            return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key)
+            return winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key)
 
     @classmethod
     def _search_registry(cls, fullname):
@@ -723,7 +730,7 @@
                                   sys_version='%d.%d' % sys.version_info[:2])
         try:
             with cls._open_registry(key) as hkey:
-                filepath = _winreg.QueryValue(hkey, '')
+                filepath = winreg.QueryValue(hkey, '')
         except OSError:
             return None
         return filepath
@@ -1577,14 +1584,7 @@
     sys = _bootstrap.sys
     _imp = _bootstrap._imp
 
-    # Directly load built-in modules needed during bootstrap.
     self_module = sys.modules[__name__]
-    for builtin_name in ('_io', '_warnings', 'builtins', 'marshal'):
-        if builtin_name not in sys.modules:
-            builtin_module = _bootstrap._builtin_from_name(builtin_name)
-        else:
-            builtin_module = sys.modules[builtin_name]
-        setattr(self_module, builtin_name, builtin_module)
 
     # Directly load the os module (needed during bootstrap).
     os_details = ('posix', ['/']), ('nt', ['\\', '/'])
@@ -1603,23 +1603,22 @@
                 continue
     else:
         raise ImportError('importlib requires posix or nt')
+
     setattr(self_module, '_os', os_module)
     setattr(self_module, 'path_sep', path_sep)
     setattr(self_module, 'path_separators', ''.join(path_separators))
     setattr(self_module, '_pathseps_with_colon', {f':{s}' for s in path_separators})
 
-    # Directly load the _thread module (needed during bootstrap).
-    thread_module = _bootstrap._builtin_from_name('_thread')
-    setattr(self_module, '_thread', thread_module)
-
-    # Directly load the _weakref module (needed during bootstrap).
-    weakref_module = _bootstrap._builtin_from_name('_weakref')
-    setattr(self_module, '_weakref', weakref_module)
-
-    # Directly load the winreg module (needed during bootstrap).
+    # Directly load built-in modules needed during bootstrap.
+    builtin_names = ['_io', '_warnings', 'marshal']
     if builtin_os == 'nt':
-        winreg_module = _bootstrap._builtin_from_name('winreg')
-        setattr(self_module, '_winreg', winreg_module)
+        builtin_names.append('winreg')
+    for builtin_name in builtin_names:
+        if builtin_name not in sys.modules:
+            builtin_module = _bootstrap._builtin_from_name(builtin_name)
+        else:
+            builtin_module = sys.modules[builtin_name]
+        setattr(self_module, builtin_name, builtin_module)
 
     # Constants
     setattr(self_module, '_relax_case', _make_relax_case())
diff --git a/common/py3-stdlib/importlib/_common.py b/common/py3-stdlib/importlib/_common.py
new file mode 100644
index 0000000..c1204f0
--- /dev/null
+++ b/common/py3-stdlib/importlib/_common.py
@@ -0,0 +1,62 @@
+import os
+import pathlib
+import zipfile
+import tempfile
+import functools
+import contextlib
+
+
+def from_package(package):
+    """
+    Return a Traversable object for the given package.
+
+    """
+    return fallback_resources(package.__spec__)
+
+
+def fallback_resources(spec):
+    package_directory = pathlib.Path(spec.origin).parent
+    try:
+        archive_path = spec.loader.archive
+        rel_path = package_directory.relative_to(archive_path)
+        return zipfile.Path(archive_path, str(rel_path) + '/')
+    except Exception:
+        pass
+    return package_directory
+
+
+@contextlib.contextmanager
+def _tempfile(reader, suffix=''):
+    # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+    # blocks due to the need to close the temporary file to work on Windows
+    # properly.
+    fd, raw_path = tempfile.mkstemp(suffix=suffix)
+    try:
+        os.write(fd, reader())
+        os.close(fd)
+        yield pathlib.Path(raw_path)
+    finally:
+        try:
+            os.remove(raw_path)
+        except FileNotFoundError:
+            pass
+
+
+@functools.singledispatch
+@contextlib.contextmanager
+def as_file(path):
+    """
+    Given a Traversable object, return that object as a
+    path on the local file system in a context manager.
+    """
+    with _tempfile(path.read_bytes, suffix=path.name) as local:
+        yield local
+
+
+@as_file.register(pathlib.Path)
+@contextlib.contextmanager
+def _(path):
+    """
+    Degenerate behavior for pathlib.Path objects.
+    """
+    yield path
diff --git a/common/py3-stdlib/importlib/abc.py b/common/py3-stdlib/importlib/abc.py
index 4b2d3de..b8a9bb1 100644
--- a/common/py3-stdlib/importlib/abc.py
+++ b/common/py3-stdlib/importlib/abc.py
@@ -10,10 +10,11 @@
     _frozen_importlib = None
 try:
     import _frozen_importlib_external
-except ImportError as exc:
+except ImportError:
     _frozen_importlib_external = _bootstrap_external
 import abc
 import warnings
+from typing import Protocol, runtime_checkable
 
 
 def _register(abstract_cls, *classes):
@@ -386,3 +387,88 @@
 
 
 _register(ResourceReader, machinery.SourceFileLoader)
+
+
+@runtime_checkable
+class Traversable(Protocol):
+    """
+    An object with a subset of pathlib.Path methods suitable for
+    traversing directories and opening files.
+    """
+
+    @abc.abstractmethod
+    def iterdir(self):
+        """
+        Yield Traversable objects in self
+        """
+
+    @abc.abstractmethod
+    def read_bytes(self):
+        """
+        Read contents of self as bytes
+        """
+
+    @abc.abstractmethod
+    def read_text(self, encoding=None):
+        """
+        Read contents of self as bytes
+        """
+
+    @abc.abstractmethod
+    def is_dir(self):
+        """
+        Return True if self is a dir
+        """
+
+    @abc.abstractmethod
+    def is_file(self):
+        """
+        Return True if self is a file
+        """
+
+    @abc.abstractmethod
+    def joinpath(self, child):
+        """
+        Return Traversable child in self
+        """
+
+    @abc.abstractmethod
+    def __truediv__(self, child):
+        """
+        Return Traversable child in self
+        """
+
+    @abc.abstractmethod
+    def open(self, mode='r', *args, **kwargs):
+        """
+        mode may be 'r' or 'rb' to open as text or binary. Return a handle
+        suitable for reading (same as pathlib.Path.open).
+
+        When opening as text, accepts encoding parameters such as those
+        accepted by io.TextIOWrapper.
+        """
+
+    @abc.abstractproperty
+    def name(self):
+        # type: () -> str
+        """
+        The base name of this object without any parent references.
+        """
+
+
+class TraversableResources(ResourceReader):
+    @abc.abstractmethod
+    def files(self):
+        """Return a Traversable object for the loaded package."""
+
+    def open_resource(self, resource):
+        return self.files().joinpath(resource).open('rb')
+
+    def resource_path(self, resource):
+        raise FileNotFoundError(resource)
+
+    def is_resource(self, path):
+        return self.files().joinpath(path).isfile()
+
+    def contents(self):
+        return (item.name for item in self.files().iterdir())
diff --git a/common/py3-stdlib/importlib/metadata.py b/common/py3-stdlib/importlib/metadata.py
index 831f593..ffa0cba 100644
--- a/common/py3-stdlib/importlib/metadata.py
+++ b/common/py3-stdlib/importlib/metadata.py
@@ -79,6 +79,16 @@
         return functools.reduce(getattr, attrs, module)
 
     @property
+    def module(self):
+        match = self.pattern.match(self.value)
+        return match.group('module')
+
+    @property
+    def attr(self):
+        match = self.pattern.match(self.value)
+        return match.group('attr')
+
+    @property
     def extras(self):
         match = self.pattern.match(self.value)
         return list(re.finditer(r'\w+', match.group('extras') or ''))
@@ -170,7 +180,7 @@
         """
         for resolver in cls._discover_resolvers():
             dists = resolver(DistributionFinder.Context(name=name))
-            dist = next(dists, None)
+            dist = next(iter(dists), None)
             if dist is not None:
                 return dist
         else:
@@ -213,6 +223,17 @@
             )
         return filter(None, declared)
 
+    @classmethod
+    def _local(cls, root='.'):
+        from pep517 import build, meta
+        system = build.compat_system(root)
+        builder = functools.partial(
+            meta.build,
+            source_dir=root,
+            system=system,
+            )
+        return PathDistribution(zipfile.Path(meta.build_as_zip(builder)))
+
     @property
     def metadata(self):
         """Return the parsed metadata for this Distribution.
@@ -391,7 +412,7 @@
 
     def __init__(self, root):
         self.root = root
-        self.base = os.path.basename(root).lower()
+        self.base = os.path.basename(self.root).lower()
 
     def joinpath(self, child):
         return pathlib.Path(self.root, child)
@@ -408,8 +429,8 @@
         names = zip_path.root.namelist()
         self.joinpath = zip_path.joinpath
 
-        return (
-            posixpath.split(child)[0]
+        return dict.fromkeys(
+            child.split(posixpath.sep, 1)[0]
             for child in names
             )
 
@@ -475,7 +496,6 @@
             )
 
 
-
 class PathDistribution(Distribution):
     def __init__(self, path):
         """Construct a distribution from a path to the metadata directory.
diff --git a/common/py3-stdlib/importlib/resources.py b/common/py3-stdlib/importlib/resources.py
index fc3a1c9..b803a01 100644
--- a/common/py3-stdlib/importlib/resources.py
+++ b/common/py3-stdlib/importlib/resources.py
@@ -1,23 +1,25 @@
 import os
-import tempfile
 
 from . import abc as resources_abc
+from . import _common
+from ._common import as_file
 from contextlib import contextmanager, suppress
 from importlib import import_module
 from importlib.abc import ResourceLoader
 from io import BytesIO, TextIOWrapper
 from pathlib import Path
 from types import ModuleType
-from typing import Iterable, Iterator, Optional, Set, Union   # noqa: F401
+from typing import ContextManager, Iterable, Optional, Union
 from typing import cast
 from typing.io import BinaryIO, TextIO
-from zipimport import ZipImportError
 
 
 __all__ = [
     'Package',
     'Resource',
+    'as_file',
     'contents',
+    'files',
     'is_resource',
     'open_binary',
     'open_text',
@@ -31,24 +33,23 @@
 Resource = Union[str, os.PathLike]
 
 
+def _resolve(name) -> ModuleType:
+    """If name is a string, resolve to a module."""
+    if hasattr(name, '__spec__'):
+        return name
+    return import_module(name)
+
+
 def _get_package(package) -> ModuleType:
     """Take a package name or module object and return the module.
 
-    If a name, the module is imported.  If the passed or imported module
+    If a name, the module is imported.  If the resolved module
     object is not a package, raise an exception.
     """
-    if hasattr(package, '__spec__'):
-        if package.__spec__.submodule_search_locations is None:
-            raise TypeError('{!r} is not a package'.format(
-                package.__spec__.name))
-        else:
-            return package
-    else:
-        module = import_module(package)
-        if module.__spec__.submodule_search_locations is None:
-            raise TypeError('{!r} is not a package'.format(package))
-        else:
-            return module
+    module = _resolve(package)
+    if module.__spec__.submodule_search_locations is None:
+        raise TypeError('{!r} is not a package'.format(package))
+    return module
 
 
 def _normalize_path(path) -> str:
@@ -59,8 +60,7 @@
     parent, file_name = os.path.split(path)
     if parent:
         raise ValueError('{!r} must be only a file name'.format(path))
-    else:
-        return file_name
+    return file_name
 
 
 def _get_resource_reader(
@@ -89,8 +89,8 @@
     reader = _get_resource_reader(package)
     if reader is not None:
         return reader.open_resource(resource)
-    _check_location(package)
-    absolute_package_path = os.path.abspath(package.__spec__.origin)
+    absolute_package_path = os.path.abspath(
+        package.__spec__.origin or 'non-existent file')
     package_path = os.path.dirname(absolute_package_path)
     full_path = os.path.join(package_path, resource)
     try:
@@ -109,8 +109,7 @@
             message = '{!r} resource not found in {!r}'.format(
                 resource, package_name)
             raise FileNotFoundError(message)
-        else:
-            return BytesIO(data)
+        return BytesIO(data)
 
 
 def open_text(package: Package,
@@ -118,39 +117,12 @@
               encoding: str = 'utf-8',
               errors: str = 'strict') -> TextIO:
     """Return a file-like object opened for text reading of the resource."""
-    resource = _normalize_path(resource)
-    package = _get_package(package)
-    reader = _get_resource_reader(package)
-    if reader is not None:
-        return TextIOWrapper(reader.open_resource(resource), encoding, errors)
-    _check_location(package)
-    absolute_package_path = os.path.abspath(package.__spec__.origin)
-    package_path = os.path.dirname(absolute_package_path)
-    full_path = os.path.join(package_path, resource)
-    try:
-        return open(full_path, mode='r', encoding=encoding, errors=errors)
-    except OSError:
-        # Just assume the loader is a resource loader; all the relevant
-        # importlib.machinery loaders are and an AttributeError for
-        # get_data() will make it clear what is needed from the loader.
-        loader = cast(ResourceLoader, package.__spec__.loader)
-        data = None
-        if hasattr(package.__spec__.loader, 'get_data'):
-            with suppress(OSError):
-                data = loader.get_data(full_path)
-        if data is None:
-            package_name = package.__spec__.name
-            message = '{!r} resource not found in {!r}'.format(
-                resource, package_name)
-            raise FileNotFoundError(message)
-        else:
-            return TextIOWrapper(BytesIO(data), encoding, errors)
+    return TextIOWrapper(
+        open_binary(package, resource), encoding=encoding, errors=errors)
 
 
 def read_binary(package: Package, resource: Resource) -> bytes:
     """Return the binary contents of the resource."""
-    resource = _normalize_path(resource)
-    package = _get_package(package)
     with open_binary(package, resource) as fp:
         return fp.read()
 
@@ -164,14 +136,20 @@
     The decoding-related arguments have the same semantics as those of
     bytes.decode().
     """
-    resource = _normalize_path(resource)
-    package = _get_package(package)
     with open_text(package, resource, encoding, errors) as fp:
         return fp.read()
 
 
-@contextmanager
-def path(package: Package, resource: Resource) -> Iterator[Path]:
+def files(package: Package) -> resources_abc.Traversable:
+    """
+    Get a Traversable resource from a package
+    """
+    return _common.from_package(_get_package(package))
+
+
+def path(
+        package: Package, resource: Resource,
+        ) -> 'ContextManager[Path]':
     """A context manager providing a file path object to the resource.
 
     If the resource does not already exist on its own on the file system,
@@ -180,39 +158,23 @@
     raised if the file was deleted prior to the context manager
     exiting).
     """
-    resource = _normalize_path(resource)
-    package = _get_package(package)
-    reader = _get_resource_reader(package)
-    if reader is not None:
-        try:
-            yield Path(reader.resource_path(resource))
-            return
-        except FileNotFoundError:
-            pass
-    else:
-        _check_location(package)
-    # Fall-through for both the lack of resource_path() *and* if
-    # resource_path() raises FileNotFoundError.
-    package_directory = Path(package.__spec__.origin).parent
-    file_path = package_directory / resource
-    if file_path.exists():
-        yield file_path
-    else:
-        with open_binary(package, resource) as fp:
-            data = fp.read()
-        # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
-        # blocks due to the need to close the temporary file to work on
-        # Windows properly.
-        fd, raw_path = tempfile.mkstemp()
-        try:
-            os.write(fd, data)
-            os.close(fd)
-            yield Path(raw_path)
-        finally:
-            try:
-                os.remove(raw_path)
-            except FileNotFoundError:
-                pass
+    reader = _get_resource_reader(_get_package(package))
+    return (
+        _path_from_reader(reader, resource)
+        if reader else
+        _common.as_file(files(package).joinpath(_normalize_path(resource)))
+        )
+
+
+@contextmanager
+def _path_from_reader(reader, resource):
+    norm_resource = _normalize_path(resource)
+    with suppress(FileNotFoundError):
+        yield Path(reader.resource_path(norm_resource))
+        return
+    opener_reader = reader.open_resource(norm_resource)
+    with _common._tempfile(opener_reader.read, suffix=norm_resource) as res:
+        yield res
 
 
 def is_resource(package: Package, name: str) -> bool:
@@ -225,17 +187,10 @@
     reader = _get_resource_reader(package)
     if reader is not None:
         return reader.is_resource(name)
-    try:
-        package_contents = set(contents(package))
-    except (NotADirectoryError, FileNotFoundError):
-        return False
+    package_contents = set(contents(package))
     if name not in package_contents:
         return False
-    # Just because the given file_name lives as an entry in the package's
-    # contents doesn't necessarily mean it's a resource.  Directories are not
-    # resources, so let's try to find out if it's a directory or not.
-    path = Path(package.__spec__.origin).parent / name
-    return path.is_file()
+    return (_common.from_package(package) / name).is_file()
 
 
 def contents(package: Package) -> Iterable[str]:
@@ -250,10 +205,11 @@
     if reader is not None:
         return reader.contents()
     # Is the package a namespace package?  By definition, namespace packages
-    # cannot have resources.  We could use _check_location() and catch the
-    # exception, but that's extra work, so just inline the check.
-    elif package.__spec__.origin is None or not package.__spec__.has_location:
+    # cannot have resources.
+    namespace = (
+        package.__spec__.origin is None or
+        package.__spec__.origin == 'namespace'
+        )
+    if namespace or not package.__spec__.has_location:
         return ()
-    else:
-        package_directory = Path(package.__spec__.origin).parent
-        return os.listdir(package_directory)
+    return list(item.name for item in _common.from_package(package).iterdir())
diff --git a/common/py3-stdlib/importlib/util.py b/common/py3-stdlib/importlib/util.py
index 201e0f4..269a6fa 100644
--- a/common/py3-stdlib/importlib/util.py
+++ b/common/py3-stdlib/importlib/util.py
@@ -29,8 +29,8 @@
     if not name.startswith('.'):
         return name
     elif not package:
-        raise ValueError(f'no package specified for {repr(name)} '
-                         '(required for relative module names)')
+        raise ImportError(f'no package specified for {repr(name)} '
+                          '(required for relative module names)')
     level = 0
     for character in name:
         if character != '.':
diff --git a/common/py3-stdlib/inspect.py b/common/py3-stdlib/inspect.py
index e8ea8c2..18bed90 100644
--- a/common/py3-stdlib/inspect.py
+++ b/common/py3-stdlib/inspect.py
@@ -32,6 +32,7 @@
               'Yury Selivanov <yselivanov@sprymix.com>')
 
 import abc
+import ast
 import dis
 import collections.abc
 import enum
@@ -769,6 +770,42 @@
         if builtinobject is object:
             return builtin
 
+
+class ClassFoundException(Exception):
+    pass
+
+
+class _ClassFinder(ast.NodeVisitor):
+
+    def __init__(self, qualname):
+        self.stack = []
+        self.qualname = qualname
+
+    def visit_FunctionDef(self, node):
+        self.stack.append(node.name)
+        self.stack.append('<locals>')
+        self.generic_visit(node)
+        self.stack.pop()
+        self.stack.pop()
+
+    visit_AsyncFunctionDef = visit_FunctionDef
+
+    def visit_ClassDef(self, node):
+        self.stack.append(node.name)
+        if self.qualname == '.'.join(self.stack):
+            # Return the decorator for the class if present
+            if node.decorator_list:
+                line_number = node.decorator_list[0].lineno
+            else:
+                line_number = node.lineno
+
+            # decrement by one since lines starts with indexing by zero
+            line_number -= 1
+            raise ClassFoundException(line_number)
+        self.generic_visit(node)
+        self.stack.pop()
+
+
 def findsource(object):
     """Return the entire source file and starting line number for an object.
 
@@ -801,25 +838,15 @@
         return lines, 0
 
     if isclass(object):
-        name = object.__name__
-        pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
-        # make some effort to find the best matching class definition:
-        # use the one with the least indentation, which is the one
-        # that's most probably not inside a function definition.
-        candidates = []
-        for i in range(len(lines)):
-            match = pat.match(lines[i])
-            if match:
-                # if it's at toplevel, it's already the best one
-                if lines[i][0] == 'c':
-                    return lines, i
-                # else add whitespace to candidate list
-                candidates.append((match.group(1), i))
-        if candidates:
-            # this will sort by whitespace, and by line number,
-            # less whitespace first
-            candidates.sort()
-            return lines, candidates[0][1]
+        qualname = object.__qualname__
+        source = ''.join(lines)
+        tree = ast.parse(source)
+        class_finder = _ClassFinder(qualname)
+        try:
+            class_finder.visit(tree)
+        except ClassFoundException as e:
+            line_number = e.args[0]
+            return lines, line_number
         else:
             raise OSError('could not find class definition')
 
@@ -837,7 +864,12 @@
         lnum = object.co_firstlineno - 1
         pat = re.compile(r'^(\s*def\s)|(\s*async\s+def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
         while lnum > 0:
-            if pat.match(lines[lnum]): break
+            try:
+                line = lines[lnum]
+            except IndexError:
+                raise OSError('lineno is out of bounds')
+            if pat.match(line):
+                break
             lnum = lnum - 1
         return lines, lnum
     raise OSError('could not find code object')
@@ -899,6 +931,7 @@
         self.indecorator = False
         self.decoratorhasargs = False
         self.last = 1
+        self.body_col0 = None
 
     def tokeneater(self, type, token, srowcol, erowcol, line):
         if not self.started and not self.indecorator:
@@ -930,6 +963,8 @@
         elif self.passline:
             pass
         elif type == tokenize.INDENT:
+            if self.body_col0 is None and self.started:
+                self.body_col0 = erowcol[1]
             self.indent = self.indent + 1
             self.passline = True
         elif type == tokenize.DEDENT:
@@ -939,6 +974,10 @@
             #  not e.g. for "if: else:" or "try: finally:" blocks)
             if self.indent <= 0:
                 raise EndOfBlock
+        elif type == tokenize.COMMENT:
+            if self.body_col0 is not None and srowcol[1] >= self.body_col0:
+                # Include comments if indented at least as much as the block
+                self.last = srowcol[0]
         elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL):
             # any other token on the same indentation level end the previous
             # block as well, except the pseudo-tokens COMMENT and NL.
@@ -1136,7 +1175,6 @@
     varkw = None
     posonlyargs = []
     kwonlyargs = []
-    defaults = ()
     annotations = {}
     defaults = ()
     kwdefaults = {}
@@ -2603,7 +2641,7 @@
 
     Has the following public attributes:
 
-    * arguments : OrderedDict
+    * arguments : dict
         An ordered mutable mapping of parameters' names to arguments' values.
         Does not contain arguments' default values.
     * signature : Signature
@@ -2703,7 +2741,7 @@
                     # Signature.bind_partial().
                     continue
                 new_arguments.append((name, val))
-        self.arguments = OrderedDict(new_arguments)
+        self.arguments = dict(new_arguments)
 
     def __eq__(self, other):
         if self is other:
@@ -2771,7 +2809,7 @@
                 top_kind = _POSITIONAL_ONLY
                 kind_defaults = False
 
-                for idx, param in enumerate(parameters):
+                for param in parameters:
                     kind = param.kind
                     name = param.name
 
@@ -2806,8 +2844,7 @@
 
                     params[name] = param
             else:
-                params = OrderedDict(((param.name, param)
-                                                for param in parameters))
+                params = OrderedDict((param.name, param) for param in parameters)
 
         self._parameters = types.MappingProxyType(params)
         self._return_annotation = return_annotation
@@ -2889,7 +2926,7 @@
     def _bind(self, args, kwargs, *, partial=False):
         """Private method. Don't use directly."""
 
-        arguments = OrderedDict()
+        arguments = {}
 
         parameters = iter(self.parameters.values())
         parameters_ex = ()
diff --git a/common/py3-stdlib/ipaddress.py b/common/py3-stdlib/ipaddress.py
index a3a04f7..bc662c4 100644
--- a/common/py3-stdlib/ipaddress.py
+++ b/common/py3-stdlib/ipaddress.py
@@ -560,6 +560,8 @@
         return self.__class__, (str(self),)
 
 
+_address_fmt_re = None
+
 @functools.total_ordering
 class _BaseAddress(_IPAddressBase):
 
@@ -618,6 +620,55 @@
     def __reduce__(self):
         return self.__class__, (self._ip,)
 
+    def __format__(self, fmt):
+        """Returns an IP address as a formatted string.
+
+        Supported presentation types are:
+        's': returns the IP address as a string (default)
+        'b': converts to binary and returns a zero-padded string
+        'X' or 'x': converts to upper- or lower-case hex and returns a zero-padded string
+        'n': the same as 'b' for IPv4 and 'x' for IPv6
+
+        For binary and hex presentation types, the alternate form specifier
+        '#' and the grouping option '_' are supported.
+        """
+
+        # Support string formatting
+        if not fmt or fmt[-1] == 's':
+            return format(str(self), fmt)
+
+        # From here on down, support for 'bnXx'
+        global _address_fmt_re
+        if _address_fmt_re is None:
+            import re
+            _address_fmt_re = re.compile('(#?)(_?)([xbnX])')
+
+        m = _address_fmt_re.fullmatch(fmt)
+        if not m:
+            return super().__format__(fmt)
+
+        alternate, grouping, fmt_base = m.groups()
+
+        # Set some defaults
+        if fmt_base == 'n':
+            if self._version == 4:
+                fmt_base = 'b'  # Binary is default for ipv4
+            else:
+                fmt_base = 'x'  # Hex is default for ipv6
+
+        if fmt_base == 'b':
+            padlen = self._max_prefixlen
+        else:
+            padlen = self._max_prefixlen // 4
+
+        if grouping:
+            padlen += padlen // 4 - 1
+
+        if alternate:
+            padlen += 2  # 0b or 0x
+
+        return format(int(self), f'{alternate}0{padlen}{grouping}{fmt_base}')
+
 
 @functools.total_ordering
 class _BaseNetwork(_IPAddressBase):
@@ -1073,7 +1124,6 @@
         return (self.network_address.is_loopback and
                 self.broadcast_address.is_loopback)
 
-
 class _BaseV4:
 
     """Base IPv4 object.
@@ -1347,7 +1397,7 @@
 
     def __eq__(self, other):
         address_equal = IPv4Address.__eq__(self, other)
-        if not address_equal or address_equal is NotImplemented:
+        if address_equal is NotImplemented or not address_equal:
             return address_equal
         try:
             return self.network == other.network
@@ -1416,7 +1466,7 @@
             address: A string or integer representing the IP [& network].
               '192.0.2.0/24'
               '192.0.2.0/255.255.255.0'
-              '192.0.0.2/0.0.0.255'
+              '192.0.2.0/0.0.0.255'
               are all functionally the same in IPv4. Similarly,
               '192.0.2.1'
               '192.0.2.1/255.255.255.255'
@@ -1458,6 +1508,8 @@
 
         if self._prefixlen == (self._max_prefixlen - 1):
             self.hosts = self.__iter__
+        elif self._prefixlen == (self._max_prefixlen):
+            self.hosts = lambda: [IPv4Address(addr)]
 
     @property
     @functools.lru_cache()
@@ -1785,6 +1837,26 @@
         reverse_chars = self.exploded[::-1].replace(':', '')
         return '.'.join(reverse_chars) + '.ip6.arpa'
 
+    @staticmethod
+    def _split_scope_id(ip_str):
+        """Helper function to parse IPv6 string address with scope id.
+
+        See RFC 4007 for details.
+
+        Args:
+            ip_str: A string, the IPv6 address.
+
+        Returns:
+            (addr, scope_id) tuple.
+
+        """
+        addr, sep, scope_id = ip_str.partition('%')
+        if not sep:
+            scope_id = None
+        elif not scope_id or '%' in scope_id:
+            raise AddressValueError('Invalid IPv6 address: "%r"' % ip_str)
+        return addr, scope_id
+
     @property
     def max_prefixlen(self):
         return self._max_prefixlen
@@ -1798,7 +1870,7 @@
 
     """Represent and manipulate single IPv6 Addresses."""
 
-    __slots__ = ('_ip', '__weakref__')
+    __slots__ = ('_ip', '_scope_id', '__weakref__')
 
     def __init__(self, address):
         """Instantiate a new IPv6 address object.
@@ -1821,12 +1893,14 @@
         if isinstance(address, int):
             self._check_int_address(address)
             self._ip = address
+            self._scope_id = None
             return
 
         # Constructing from a packed address
         if isinstance(address, bytes):
             self._check_packed_address(address, 16)
             self._ip = int.from_bytes(address, 'big')
+            self._scope_id = None
             return
 
         # Assume input argument to be string or any object representation
@@ -1834,8 +1908,37 @@
         addr_str = str(address)
         if '/' in addr_str:
             raise AddressValueError("Unexpected '/' in %r" % address)
+        addr_str, self._scope_id = self._split_scope_id(addr_str)
+
         self._ip = self._ip_int_from_string(addr_str)
 
+    def __str__(self):
+        ip_str = super().__str__()
+        return ip_str + '%' + self._scope_id if self._scope_id else ip_str
+
+    def __hash__(self):
+        return hash((self._ip, self._scope_id))
+
+    def __eq__(self, other):
+        address_equal = super().__eq__(other)
+        if address_equal is NotImplemented:
+            return NotImplemented
+        if not address_equal:
+            return False
+        return self._scope_id == getattr(other, '_scope_id', None)
+
+    @property
+    def scope_id(self):
+        """Identifier of a particular zone of the address's scope.
+
+        See RFC 4007 for details.
+
+        Returns:
+            A string identifying the zone of the address if specified, else None.
+
+        """
+        return self._scope_id
+
     @property
     def packed(self):
         """The binary representation of this address."""
@@ -1989,12 +2092,12 @@
         return self.network.hostmask
 
     def __str__(self):
-        return '%s/%d' % (self._string_from_ip_int(self._ip),
+        return '%s/%d' % (super().__str__(),
                           self._prefixlen)
 
     def __eq__(self, other):
         address_equal = IPv6Address.__eq__(self, other)
-        if not address_equal or address_equal is NotImplemented:
+        if address_equal is NotImplemented or not address_equal:
             return address_equal
         try:
             return self.network == other.network
@@ -2007,7 +2110,7 @@
     def __lt__(self, other):
         address_less = IPv6Address.__lt__(self, other)
         if address_less is NotImplemented:
-            return NotImplemented
+            return address_less
         try:
             return (self.network < other.network or
                     self.network == other.network and address_less)
@@ -2110,6 +2213,8 @@
 
         if self._prefixlen == (self._max_prefixlen - 1):
             self.hosts = self.__iter__
+        elif self._prefixlen == self._max_prefixlen:
+            self.hosts = lambda: [IPv6Address(addr)]
 
     def hosts(self):
         """Generate Iterator over usable hosts in a network.
diff --git a/common/py3-stdlib/json/__init__.py b/common/py3-stdlib/json/__init__.py
index 1ba8b48..2c52bde 100644
--- a/common/py3-stdlib/json/__init__.py
+++ b/common/py3-stdlib/json/__init__.py
@@ -329,8 +329,6 @@
 
     To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
     kwarg; otherwise ``JSONDecoder`` is used.
-
-    The ``encoding`` argument is ignored and deprecated since Python 3.1.
     """
     if isinstance(s, str):
         if s.startswith('\ufeff'):
@@ -342,15 +340,6 @@
                             f'not {s.__class__.__name__}')
         s = s.decode(detect_encoding(s), 'surrogatepass')
 
-    if "encoding" in kw:
-        import warnings
-        warnings.warn(
-            "'encoding' is ignored and deprecated. It will be removed in Python 3.9",
-            DeprecationWarning,
-            stacklevel=2
-        )
-        del kw['encoding']
-
     if (cls is None and object_hook is None and
             parse_int is None and parse_float is None and
             parse_constant is None and object_pairs_hook is None and not kw):
diff --git a/common/py3-stdlib/json/tool.py b/common/py3-stdlib/json/tool.py
index c42138a..5dee0a7 100644
--- a/common/py3-stdlib/json/tool.py
+++ b/common/py3-stdlib/json/tool.py
@@ -30,22 +30,42 @@
                         default=sys.stdout)
     parser.add_argument('--sort-keys', action='store_true', default=False,
                         help='sort the output of dictionaries alphabetically by key')
+    parser.add_argument('--no-ensure-ascii', dest='ensure_ascii', action='store_false',
+                        help='disable escaping of non-ASCII characters')
     parser.add_argument('--json-lines', action='store_true', default=False,
-                        help='parse input using the jsonlines format')
+                        help='parse input using the JSON Lines format. '
+                        'Use with --no-indent or --compact to produce valid JSON Lines output.')
+    group = parser.add_mutually_exclusive_group()
+    group.add_argument('--indent', default=4, type=int,
+                       help='separate items with newlines and use this number '
+                       'of spaces for indentation')
+    group.add_argument('--tab', action='store_const', dest='indent',
+                       const='\t', help='separate items with newlines and use '
+                       'tabs for indentation')
+    group.add_argument('--no-indent', action='store_const', dest='indent',
+                       const=None,
+                       help='separate items with spaces rather than newlines')
+    group.add_argument('--compact', action='store_true',
+                       help='suppress all whitespace separation (most compact)')
     options = parser.parse_args()
 
-    infile = options.infile
-    outfile = options.outfile
-    sort_keys = options.sort_keys
-    json_lines = options.json_lines
-    with infile, outfile:
+    dump_args = {
+        'sort_keys': options.sort_keys,
+        'indent': options.indent,
+        'ensure_ascii': options.ensure_ascii,
+    }
+    if options.compact:
+        dump_args['indent'] = None
+        dump_args['separators'] = ',', ':'
+
+    with options.infile as infile, options.outfile as outfile:
         try:
-            if json_lines:
+            if options.json_lines:
                 objs = (json.loads(line) for line in infile)
             else:
                 objs = (json.load(infile), )
             for obj in objs:
-                json.dump(obj, outfile, sort_keys=sort_keys, indent=4)
+                json.dump(obj, outfile, **dump_args)
                 outfile.write('\n')
         except ValueError as e:
             raise SystemExit(e)
diff --git a/common/py3-stdlib/keyword.py b/common/py3-stdlib/keyword.py
index ddcbb25..59fcfb0 100644
--- a/common/py3-stdlib/keyword.py
+++ b/common/py3-stdlib/keyword.py
@@ -1,23 +1,25 @@
-"""Keywords (from "Grammar/Grammar")
+"""Keywords (from "Grammar/python.gram")
 
 This file is automatically generated; please don't muck it up!
 
 To update the symbols in this file, 'cd' to the top directory of
 the python source tree and run:
 
-    python3 -m Parser.pgen.keywordgen Grammar/Grammar \
-                                      Grammar/Tokens \
-                                      Lib/keyword.py
+    PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \
+        Grammar/Grammar \
+        Grammar/Tokens \
+        Lib/keyword.py
 
 Alternatively, you can run 'make regen-keyword'.
 """
 
-__all__ = ["iskeyword", "kwlist"]
+__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"]
 
 kwlist = [
     'False',
     'None',
     'True',
+    '__peg_parser__',
     'and',
     'as',
     'assert',
@@ -52,4 +54,9 @@
     'yield'
 ]
 
+softkwlist = [
+
+]
+
 iskeyword = frozenset(kwlist).__contains__
+issoftkeyword = frozenset(softkwlist).__contains__
diff --git a/common/py3-stdlib/linecache.py b/common/py3-stdlib/linecache.py
index c87e180..fa5dbd0 100644
--- a/common/py3-stdlib/linecache.py
+++ b/common/py3-stdlib/linecache.py
@@ -10,17 +10,8 @@
 import os
 import tokenize
 
-__all__ = ["getline", "clearcache", "checkcache"]
+__all__ = ["getline", "clearcache", "checkcache", "lazycache"]
 
-def getline(filename, lineno, module_globals=None):
-    lines = getlines(filename, module_globals)
-    if 1 <= lineno <= len(lines):
-        return lines[lineno-1]
-    else:
-        return ''
-
-
-# The cache
 
 # The cache. Maps filenames to either a thunk which will provide source code,
 # or a tuple (size, mtime, lines, fullname) once loaded.
@@ -29,9 +20,17 @@
 
 def clearcache():
     """Clear the cache entirely."""
+    cache.clear()
 
-    global cache
-    cache = {}
+
+def getline(filename, lineno, module_globals=None):
+    """Get a line for a Python source file from the cache.
+    Update the cache if it doesn't contain an entry for this file already."""
+
+    lines = getlines(filename, module_globals)
+    if 1 <= lineno <= len(lines):
+        return lines[lineno - 1]
+    return ''
 
 
 def getlines(filename, module_globals=None):
@@ -56,11 +55,10 @@
 
     if filename is None:
         filenames = list(cache.keys())
+    elif filename in cache:
+        filenames = [filename]
     else:
-        if filename in cache:
-            filenames = [filename]
-        else:
-            return
+        return
 
     for filename in filenames:
         entry = cache[filename]
@@ -109,8 +107,10 @@
                     # for this module.
                     return []
                 cache[filename] = (
-                    len(data), None,
-                    [line+'\n' for line in data.splitlines()], fullname
+                    len(data),
+                    None,
+                    [line + '\n' for line in data.splitlines()],
+                    fullname
                 )
                 return cache[filename][2]
 
diff --git a/common/py3-stdlib/locale.py b/common/py3-stdlib/locale.py
index dd8a085..1a4e9f6 100644
--- a/common/py3-stdlib/locale.py
+++ b/common/py3-stdlib/locale.py
@@ -279,6 +279,8 @@
         if precedes:
             s = smb + (separated and ' ' or '') + s
         else:
+            if international and smb[-1] == ' ':
+                smb = smb[:-1]
             s = s + (separated and ' ' or '') + smb
 
     sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn']
diff --git a/common/py3-stdlib/logging/__init__.py b/common/py3-stdlib/logging/__init__.py
index 0cfaec8..7b169a1 100644
--- a/common/py3-stdlib/logging/__init__.py
+++ b/common/py3-stdlib/logging/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2017 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2019 by Vinay Sajip. All Rights Reserved.
 #
 # Permission to use, copy, modify, and distribute this software and its
 # documentation for any purpose and without fee is hereby granted,
@@ -18,7 +18,7 @@
 Logging package for Python. Based on PEP 282 and comments thereto in
 comp.lang.python.
 
-Copyright (C) 2001-2017 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2019 Vinay Sajip. All Rights Reserved.
 
 To use, simply 'import logging' and log away!
 """
@@ -234,11 +234,9 @@
     def _register_at_fork_reinit_lock(instance):
         pass  # no-op when os.register_at_fork does not exist.
 else:
-    # A collection of instances with a createLock method (logging.Handler)
+    # A collection of instances with a _at_fork_reinit method (logging.Handler)
     # to be called in the child after forking.  The weakref avoids us keeping
-    # discarded Handler instances alive.  A set is used to avoid accumulating
-    # duplicate registrations as createLock() is responsible for registering
-    # a new Handler instance with this set in the first place.
+    # discarded Handler instances alive.
     _at_fork_reinit_lock_weakset = weakref.WeakSet()
 
     def _register_at_fork_reinit_lock(instance):
@@ -249,16 +247,12 @@
             _releaseLock()
 
     def _after_at_fork_child_reinit_locks():
-        # _acquireLock() was called in the parent before forking.
         for handler in _at_fork_reinit_lock_weakset:
-            try:
-                handler.createLock()
-            except Exception as err:
-                # Similar to what PyErr_WriteUnraisable does.
-                print("Ignoring exception from logging atfork", instance,
-                      "._reinit_lock() method:", err, file=sys.stderr)
-        _releaseLock()  # Acquired by os.register_at_fork(before=.
+            handler._at_fork_reinit()
 
+        # _acquireLock() was called in the parent before forking.
+        # The lock is reinitialized to unlocked state.
+        _lock._at_fork_reinit()
 
     os.register_at_fork(before=_acquireLock,
                         after_in_child=_after_at_fork_child_reinit_locks,
@@ -515,7 +509,7 @@
     responsible for converting a LogRecord to (usually) a string which can
     be interpreted by either a human or an external system. The base Formatter
     allows a formatting string to be specified. If none is supplied, the
-    the style-dependent default value, "%(message)s", "{message}", or
+    style-dependent default value, "%(message)s", "{message}", or
     "${message}", is used.
 
     The Formatter can be initialized with a format string which makes use of
@@ -603,8 +597,9 @@
         if datefmt:
             s = time.strftime(datefmt, ct)
         else:
-            t = time.strftime(self.default_time_format, ct)
-            s = self.default_msec_format % (t, record.msecs)
+            s = time.strftime(self.default_time_format, ct)
+            if self.default_msec_format:
+                s = self.default_msec_format % (s, record.msecs)
         return s
 
     def formatException(self, ei):
@@ -753,8 +748,8 @@
         """
         Determine if the specified record is to be logged.
 
-        Is the specified record to be logged? Returns 0 for no, nonzero for
-        yes. If deemed appropriate, the record may be modified in-place.
+        Returns True if the record should be logged, or False otherwise.
+        If deemed appropriate, the record may be modified in-place.
         """
         if self.nlen == 0:
             return True
@@ -891,6 +886,9 @@
         self.lock = threading.RLock()
         _register_at_fork_reinit_lock(self)
 
+    def _at_fork_reinit(self):
+        self.lock._at_fork_reinit()
+
     def acquire(self):
         """
         Acquire the I/O thread lock.
@@ -1122,7 +1120,7 @@
     """
     A handler class which writes formatted logging records to disk files.
     """
-    def __init__(self, filename, mode='a', encoding=None, delay=False):
+    def __init__(self, filename, mode='a', encoding=None, delay=False, errors=None):
         """
         Open the specified file and use it as the stream for logging.
         """
@@ -1133,6 +1131,7 @@
         self.baseFilename = os.path.abspath(filename)
         self.mode = mode
         self.encoding = encoding
+        self.errors = errors
         self.delay = delay
         if delay:
             #We don't open the stream, but we still need to call the
@@ -1169,7 +1168,8 @@
         Open the current base file with the (original) mode and encoding.
         Return the resulting stream.
         """
-        return open(self.baseFilename, self.mode, encoding=self.encoding)
+        return open(self.baseFilename, self.mode, encoding=self.encoding,
+                    errors=self.errors)
 
     def emit(self, record):
         """
@@ -1931,15 +1931,20 @@
               attached to the root logger are removed and closed, before
               carrying out the configuration as specified by the other
               arguments.
+    encoding  If specified together with a filename, this encoding is passed to
+              the created FileHandler, causing it to be used when the file is
+              opened.
+    errors    If specified together with a filename, this value is passed to the
+              created FileHandler, causing it to be used when the file is
+              opened in text mode. If not specified, the default value is
+              `backslashreplace`.
+
     Note that you could specify a stream created using open(filename, mode)
     rather than passing the filename and mode in. However, it should be
     remembered that StreamHandler does not close its stream (since it may be
     using sys.stdout or sys.stderr), whereas FileHandler closes its stream
     when the handler is closed.
 
-    .. versionchanged:: 3.8
-       Added the ``force`` parameter.
-
     .. versionchanged:: 3.2
        Added the ``style`` parameter.
 
@@ -1949,12 +1954,20 @@
        ``filename``/``filemode``, or ``filename``/``filemode`` specified
        together with ``stream``, or ``handlers`` specified together with
        ``stream``.
+
+    .. versionchanged:: 3.8
+       Added the ``force`` parameter.
+
+    .. versionchanged:: 3.9
+       Added the ``encoding`` and ``errors`` parameters.
     """
     # Add thread safety in case someone mistakenly calls
     # basicConfig() from multiple threads
     _acquireLock()
     try:
         force = kwargs.pop('force', False)
+        encoding = kwargs.pop('encoding', None)
+        errors = kwargs.pop('errors', 'backslashreplace')
         if force:
             for h in root.handlers[:]:
                 root.removeHandler(h)
@@ -1973,7 +1986,10 @@
                 filename = kwargs.pop("filename", None)
                 mode = kwargs.pop("filemode", 'a')
                 if filename:
-                    h = FileHandler(filename, mode)
+                    if 'b'in mode:
+                        errors = None
+                    h = FileHandler(filename, mode,
+                                    encoding=encoding, errors=errors)
                 else:
                     stream = kwargs.pop("stream", None)
                     h = StreamHandler(stream)
@@ -2009,10 +2025,9 @@
 
     If no name is specified, return the root logger.
     """
-    if name:
-        return Logger.manager.getLogger(name)
-    else:
+    if not name or isinstance(name, str) and name == root.name:
         return root
+    return Logger.manager.getLogger(name)
 
 def critical(msg, *args, **kwargs):
     """
@@ -2151,6 +2166,9 @@
     def createLock(self):
         self.lock = None
 
+    def _at_fork_reinit(self):
+        pass
+
 # Warnings integration
 
 _warnings_showwarning = None
diff --git a/common/py3-stdlib/logging/config.py b/common/py3-stdlib/logging/config.py
index 3cd5fea..fd3aded 100644
--- a/common/py3-stdlib/logging/config.py
+++ b/common/py3-stdlib/logging/config.py
@@ -143,6 +143,7 @@
         kwargs = section.get("kwargs", '{}')
         kwargs = eval(kwargs, vars(logging))
         h = klass(*args, **kwargs)
+        h.name = hand
         if "level" in section:
             level = section["level"]
             h.setLevel(level)
diff --git a/common/py3-stdlib/logging/handlers.py b/common/py3-stdlib/logging/handlers.py
index 34ff7a0..867ef4e 100644
--- a/common/py3-stdlib/logging/handlers.py
+++ b/common/py3-stdlib/logging/handlers.py
@@ -48,15 +48,19 @@
     Not meant to be instantiated directly.  Instead, use RotatingFileHandler
     or TimedRotatingFileHandler.
     """
-    def __init__(self, filename, mode, encoding=None, delay=False):
+    namer = None
+    rotator = None
+
+    def __init__(self, filename, mode, encoding=None, delay=False, errors=None):
         """
         Use the specified filename for streamed logging
         """
-        logging.FileHandler.__init__(self, filename, mode, encoding, delay)
+        logging.FileHandler.__init__(self, filename, mode=mode,
+                                     encoding=encoding, delay=delay,
+                                     errors=errors)
         self.mode = mode
         self.encoding = encoding
-        self.namer = None
-        self.rotator = None
+        self.errors = errors
 
     def emit(self, record):
         """
@@ -117,7 +121,8 @@
     Handler for logging to a set of files, which switches from one file
     to the next when the current file reaches a certain size.
     """
-    def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
+    def __init__(self, filename, mode='a', maxBytes=0, backupCount=0,
+                 encoding=None, delay=False, errors=None):
         """
         Open the specified file and use it as the stream for logging.
 
@@ -145,7 +150,8 @@
         # on each run.
         if maxBytes > 0:
             mode = 'a'
-        BaseRotatingHandler.__init__(self, filename, mode, encoding, delay)
+        BaseRotatingHandler.__init__(self, filename, mode, encoding=encoding,
+                                     delay=delay, errors=errors)
         self.maxBytes = maxBytes
         self.backupCount = backupCount
 
@@ -196,8 +202,11 @@
     If backupCount is > 0, when rollover is done, no more than backupCount
     files are kept - the oldest ones are deleted.
     """
-    def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False, atTime=None):
-        BaseRotatingHandler.__init__(self, filename, 'a', encoding, delay)
+    def __init__(self, filename, when='h', interval=1, backupCount=0,
+                 encoding=None, delay=False, utc=False, atTime=None,
+                 errors=None):
+        BaseRotatingHandler.__init__(self, filename, 'a', encoding=encoding,
+                                     delay=delay, errors=errors)
         self.when = when.upper()
         self.backupCount = backupCount
         self.utc = utc
@@ -431,8 +440,11 @@
     This handler is based on a suggestion and patch by Chad J.
     Schroeder.
     """
-    def __init__(self, filename, mode='a', encoding=None, delay=False):
-        logging.FileHandler.__init__(self, filename, mode, encoding, delay)
+    def __init__(self, filename, mode='a', encoding=None, delay=False,
+                 errors=None):
+        logging.FileHandler.__init__(self, filename, mode=mode,
+                                     encoding=encoding, delay=delay,
+                                     errors=errors)
         self.dev, self.ino = -1, -1
         self._statstream()
 
@@ -730,6 +742,10 @@
     LOG_CRON      = 9       #  clock daemon
     LOG_AUTHPRIV  = 10      #  security/authorization messages (private)
     LOG_FTP       = 11      #  FTP daemon
+    LOG_NTP       = 12      #  NTP subsystem
+    LOG_SECURITY  = 13      #  Log audit
+    LOG_CONSOLE   = 14      #  Log alert
+    LOG_SOLCRON   = 15      #  Scheduling daemon (Solaris)
 
     #  other codes through 15 reserved for system use
     LOG_LOCAL0    = 16      #  reserved for local use
@@ -757,27 +773,30 @@
         }
 
     facility_names = {
-        "auth":     LOG_AUTH,
-        "authpriv": LOG_AUTHPRIV,
-        "cron":     LOG_CRON,
-        "daemon":   LOG_DAEMON,
-        "ftp":      LOG_FTP,
-        "kern":     LOG_KERN,
-        "lpr":      LOG_LPR,
-        "mail":     LOG_MAIL,
-        "news":     LOG_NEWS,
-        "security": LOG_AUTH,       #  DEPRECATED
-        "syslog":   LOG_SYSLOG,
-        "user":     LOG_USER,
-        "uucp":     LOG_UUCP,
-        "local0":   LOG_LOCAL0,
-        "local1":   LOG_LOCAL1,
-        "local2":   LOG_LOCAL2,
-        "local3":   LOG_LOCAL3,
-        "local4":   LOG_LOCAL4,
-        "local5":   LOG_LOCAL5,
-        "local6":   LOG_LOCAL6,
-        "local7":   LOG_LOCAL7,
+        "auth":         LOG_AUTH,
+        "authpriv":     LOG_AUTHPRIV,
+        "console":      LOG_CONSOLE,
+        "cron":         LOG_CRON,
+        "daemon":       LOG_DAEMON,
+        "ftp":          LOG_FTP,
+        "kern":         LOG_KERN,
+        "lpr":          LOG_LPR,
+        "mail":         LOG_MAIL,
+        "news":         LOG_NEWS,
+        "ntp":          LOG_NTP,
+        "security":     LOG_SECURITY,
+        "solaris-cron": LOG_SOLCRON,
+        "syslog":       LOG_SYSLOG,
+        "user":         LOG_USER,
+        "uucp":         LOG_UUCP,
+        "local0":       LOG_LOCAL0,
+        "local1":       LOG_LOCAL1,
+        "local2":       LOG_LOCAL2,
+        "local3":       LOG_LOCAL3,
+        "local4":       LOG_LOCAL4,
+        "local5":       LOG_LOCAL5,
+        "local6":       LOG_LOCAL6,
+        "local7":       LOG_LOCAL7,
         }
 
     #The map below appears to be trivially lowercasing the key. However,
@@ -1154,6 +1173,20 @@
         """
         return record.__dict__
 
+    def getConnection(self, host, secure):
+        """
+        get a HTTP[S]Connection.
+
+        Override when a custom connection is required, for example if
+        there is a proxy.
+        """
+        import http.client
+        if secure:
+            connection = http.client.HTTPSConnection(host, context=self.context)
+        else:
+            connection = http.client.HTTPConnection(host)
+        return connection
+
     def emit(self, record):
         """
         Emit a record.
@@ -1161,12 +1194,9 @@
         Send the record to the Web server as a percent-encoded dictionary
         """
         try:
-            import http.client, urllib.parse
+            import urllib.parse
             host = self.host
-            if self.secure:
-                h = http.client.HTTPSConnection(host, context=self.context)
-            else:
-                h = http.client.HTTPConnection(host)
+            h = self.getConnection(host, self.secure)
             url = self.url
             data = urllib.parse.urlencode(self.mapLogRecord(record))
             if self.method == "GET":
@@ -1242,7 +1272,7 @@
         """
         self.acquire()
         try:
-            self.buffer = []
+            self.buffer.clear()
         finally:
             self.release()
 
@@ -1294,7 +1324,11 @@
         """
         Set the target handler for this handler.
         """
-        self.target = target
+        self.acquire()
+        try:
+            self.target = target
+        finally:
+            self.release()
 
     def flush(self):
         """
@@ -1309,7 +1343,7 @@
             if self.target:
                 for record in self.buffer:
                     self.target.handle(record)
-                self.buffer = []
+                self.buffer.clear()
         finally:
             self.release()
 
diff --git a/common/py3-stdlib/mailbox.py b/common/py3-stdlib/mailbox.py
index 5b4e864..70da07e 100644
--- a/common/py3-stdlib/mailbox.py
+++ b/common/py3-stdlib/mailbox.py
@@ -18,6 +18,7 @@
 import email.generator
 import io
 import contextlib
+from types import GenericAlias
 try:
     import fcntl
 except ImportError:
@@ -260,6 +261,8 @@
         else:
             raise TypeError('Invalid message type: %s' % type(message))
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 class Maildir(Mailbox):
     """A qmail-style Maildir mailbox."""
@@ -2015,6 +2018,8 @@
             return False
         return self._file.closed
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 class _PartialFile(_ProxyFile):
     """A read-only wrapper of part of a file."""
diff --git a/common/py3-stdlib/mailcap.py b/common/py3-stdlib/mailcap.py
index bd0fc09..ae416a8 100644
--- a/common/py3-stdlib/mailcap.py
+++ b/common/py3-stdlib/mailcap.py
@@ -251,6 +251,7 @@
         else:
             print("Executing:", command)
             sts = os.system(command)
+            sts = os.waitstatus_to_exitcode(sts)
             if sts:
                 print("Exit status:", sts)
 
diff --git a/common/py3-stdlib/mimetypes.py b/common/py3-stdlib/mimetypes.py
index 954bb0a..92c2a47 100644
--- a/common/py3-stdlib/mimetypes.py
+++ b/common/py3-stdlib/mimetypes.py
@@ -401,6 +401,7 @@
         '.Z': 'compress',
         '.bz2': 'bzip2',
         '.xz': 'xz',
+        '.br': 'br',
         }
 
     # Before adding new types, make sure they are either registered with IANA,
@@ -447,7 +448,6 @@
         '.dvi'    : 'application/x-dvi',
         '.gtar'   : 'application/x-gtar',
         '.hdf'    : 'application/x-hdf',
-        '.h5'     : 'application/x-hdf5',
         '.latex'  : 'application/x-latex',
         '.mif'    : 'application/x-mif',
         '.cdf'    : 'application/x-netcdf',
diff --git a/common/py3-stdlib/modulefinder.py b/common/py3-stdlib/modulefinder.py
index aadcd23..cb455f4 100644
--- a/common/py3-stdlib/modulefinder.py
+++ b/common/py3-stdlib/modulefinder.py
@@ -7,8 +7,6 @@
 import os
 import io
 import sys
-import types
-import warnings
 
 
 LOAD_CONST = dis.opmap['LOAD_CONST']
diff --git a/common/py3-stdlib/multiprocessing/connection.py b/common/py3-stdlib/multiprocessing/connection.py
index 8e2facf..510e4b5 100644
--- a/common/py3-stdlib/multiprocessing/connection.py
+++ b/common/py3-stdlib/multiprocessing/connection.py
@@ -73,6 +73,11 @@
     if family == 'AF_INET':
         return ('localhost', 0)
     elif family == 'AF_UNIX':
+        # Prefer abstract sockets if possible to avoid problems with the address
+        # size.  When coding portable applications, some implementations have
+        # sun_path as short as 92 bytes in the sockaddr_un struct.
+        if util.abstract_sockets_supported:
+            return f"\0listener-{os.getpid()}-{next(_mmap_counter)}"
         return tempfile.mktemp(prefix='listener-', dir=util.get_temp_dir())
     elif family == 'AF_PIPE':
         return tempfile.mktemp(prefix=r'\\.\pipe\pyc-%d-%d-' %
diff --git a/common/py3-stdlib/multiprocessing/forkserver.py b/common/py3-stdlib/multiprocessing/forkserver.py
index 215ac39..22a911a 100644
--- a/common/py3-stdlib/multiprocessing/forkserver.py
+++ b/common/py3-stdlib/multiprocessing/forkserver.py
@@ -237,14 +237,8 @@
                             break
                         child_w = pid_to_fd.pop(pid, None)
                         if child_w is not None:
-                            if os.WIFSIGNALED(sts):
-                                returncode = -os.WTERMSIG(sts)
-                            else:
-                                if not os.WIFEXITED(sts):
-                                    raise AssertionError(
-                                        "Child {0:n} status is {1:n}".format(
-                                            pid,sts))
-                                returncode = os.WEXITSTATUS(sts)
+                            returncode = os.waitstatus_to_exitcode(sts)
+
                             # Send exit code to client process
                             try:
                                 write_signed(child_w, returncode)
diff --git a/common/py3-stdlib/multiprocessing/managers.py b/common/py3-stdlib/multiprocessing/managers.py
index 85e0d88..0eb16c6 100644
--- a/common/py3-stdlib/multiprocessing/managers.py
+++ b/common/py3-stdlib/multiprocessing/managers.py
@@ -21,6 +21,7 @@
 import array
 import queue
 import time
+import types
 import os
 from os import getpid
 
@@ -248,7 +249,7 @@
                     try:
                         obj, exposed, gettypeid = \
                             self.id_to_local_proxy_obj[ident]
-                    except KeyError as second_ke:
+                    except KeyError:
                         raise ke
 
                 if methodname not in exposed:
@@ -296,7 +297,7 @@
             try:
                 try:
                     send(msg)
-                except Exception as e:
+                except Exception:
                     send(('#UNSERIALIZABLE', format_exc()))
             except Exception as e:
                 util.info('exception in thread serving %r',
@@ -360,36 +361,10 @@
         finally:
             self.stop_event.set()
 
-    def create(*args, **kwds):
+    def create(self, c, typeid, /, *args, **kwds):
         '''
         Create a new shared object and return its id
         '''
-        if len(args) >= 3:
-            self, c, typeid, *args = args
-        elif not args:
-            raise TypeError("descriptor 'create' of 'Server' object "
-                            "needs an argument")
-        else:
-            if 'typeid' not in kwds:
-                raise TypeError('create expected at least 2 positional '
-                                'arguments, got %d' % (len(args)-1))
-            typeid = kwds.pop('typeid')
-            if len(args) >= 2:
-                self, c, *args = args
-                import warnings
-                warnings.warn("Passing 'typeid' as keyword argument is deprecated",
-                              DeprecationWarning, stacklevel=2)
-            else:
-                if 'c' not in kwds:
-                    raise TypeError('create expected at least 2 positional '
-                                    'arguments, got %d' % (len(args)-1))
-                c = kwds.pop('c')
-                self, *args = args
-                import warnings
-                warnings.warn("Passing 'c' as keyword argument is deprecated",
-                              DeprecationWarning, stacklevel=2)
-        args = tuple(args)
-
         with self.mutex:
             callable, exposed, method_to_typeid, proxytype = \
                       self.registry[typeid]
@@ -421,7 +396,6 @@
 
         self.incref(c, ident)
         return ident, tuple(exposed)
-    create.__text_signature__ = '($self, c, typeid, /, *args, **kwds)'
 
     def get_methods(self, c, token):
         '''
@@ -1156,6 +1130,8 @@
         return self._callmethod('set', (value,))
     value = property(get, set)
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 
 BaseListProxy = MakeProxyType('BaseListProxy', (
     '__add__', '__contains__', '__delitem__', '__getitem__', '__len__',
@@ -1297,26 +1273,15 @@
                 _SharedMemoryTracker(f"shm_{address}_{getpid()}")
             util.debug(f"SharedMemoryServer started by pid {getpid()}")
 
-        def create(*args, **kwargs):
+        def create(self, c, typeid, /, *args, **kwargs):
             """Create a new distributed-shared object (not backed by a shared
             memory block) and return its id to be used in a Proxy Object."""
             # Unless set up as a shared proxy, don't make shared_memory_context
             # a standard part of kwargs.  This makes things easier for supplying
             # simple functions.
-            if len(args) >= 3:
-                typeod = args[2]
-            elif 'typeid' in kwargs:
-                typeid = kwargs['typeid']
-            elif not args:
-                raise TypeError("descriptor 'create' of 'SharedMemoryServer' "
-                                "object needs an argument")
-            else:
-                raise TypeError('create expected at least 2 positional '
-                                'arguments, got %d' % (len(args)-1))
             if hasattr(self.registry[typeid][-1], "_shared_memory_proxy"):
                 kwargs['shared_memory_context'] = self.shared_memory_context
-            return Server.create(*args, **kwargs)
-        create.__text_signature__ = '($self, c, typeid, /, *args, **kwargs)'
+            return Server.create(self, c, typeid, *args, **kwargs)
 
         def shutdown(self, c):
             "Call unlink() on all tracked shared memory, terminate the Server."
diff --git a/common/py3-stdlib/multiprocessing/pool.py b/common/py3-stdlib/multiprocessing/pool.py
index 41dd923..bbe05a5 100644
--- a/common/py3-stdlib/multiprocessing/pool.py
+++ b/common/py3-stdlib/multiprocessing/pool.py
@@ -20,8 +20,8 @@
 import threading
 import time
 import traceback
+import types
 import warnings
-from queue import Empty
 
 # If threading is available then ThreadPool should be provided.  Therefore
 # we avoid top-level imports which are liable to fail on some systems.
@@ -780,6 +780,8 @@
         del self._cache[self._job]
         self._pool = None
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 AsyncResult = ApplyResult       # create alias -- see #17805
 
 #
diff --git a/common/py3-stdlib/multiprocessing/popen_fork.py b/common/py3-stdlib/multiprocessing/popen_fork.py
index 11e2160..625981c 100644
--- a/common/py3-stdlib/multiprocessing/popen_fork.py
+++ b/common/py3-stdlib/multiprocessing/popen_fork.py
@@ -25,16 +25,12 @@
         if self.returncode is None:
             try:
                 pid, sts = os.waitpid(self.pid, flag)
-            except OSError as e:
+            except OSError:
                 # Child process not yet created. See #1731717
                 # e.errno == errno.ECHILD == 10
                 return None
             if pid == self.pid:
-                if os.WIFSIGNALED(sts):
-                    self.returncode = -os.WTERMSIG(sts)
-                else:
-                    assert os.WIFEXITED(sts), "Status is {:n}".format(sts)
-                    self.returncode = os.WEXITSTATUS(sts)
+                self.returncode = os.waitstatus_to_exitcode(sts)
         return self.returncode
 
     def wait(self, timeout=None):
diff --git a/common/py3-stdlib/multiprocessing/process.py b/common/py3-stdlib/multiprocessing/process.py
index be13c07..0b2e0b4 100644
--- a/common/py3-stdlib/multiprocessing/process.py
+++ b/common/py3-stdlib/multiprocessing/process.py
@@ -317,12 +317,12 @@
             finally:
                 util._exit_function()
         except SystemExit as e:
-            if not e.args:
-                exitcode = 1
-            elif isinstance(e.args[0], int):
-                exitcode = e.args[0]
+            if e.code is None:
+                exitcode = 0
+            elif isinstance(e.code, int):
+                exitcode = e.code
             else:
-                sys.stderr.write(str(e.args[0]) + '\n')
+                sys.stderr.write(str(e.code) + '\n')
                 exitcode = 1
         except:
             exitcode = 1
diff --git a/common/py3-stdlib/multiprocessing/queues.py b/common/py3-stdlib/multiprocessing/queues.py
index d112db2..a290181 100644
--- a/common/py3-stdlib/multiprocessing/queues.py
+++ b/common/py3-stdlib/multiprocessing/queues.py
@@ -14,6 +14,7 @@
 import threading
 import collections
 import time
+import types
 import weakref
 import errno
 
@@ -48,8 +49,7 @@
         self._sem = ctx.BoundedSemaphore(maxsize)
         # For use by concurrent.futures
         self._ignore_epipe = False
-
-        self._after_fork()
+        self._reset()
 
         if sys.platform != 'win32':
             register_after_fork(self, Queue._after_fork)
@@ -62,11 +62,17 @@
     def __setstate__(self, state):
         (self._ignore_epipe, self._maxsize, self._reader, self._writer,
          self._rlock, self._wlock, self._sem, self._opid) = state
-        self._after_fork()
+        self._reset()
 
     def _after_fork(self):
         debug('Queue._after_fork()')
-        self._notempty = threading.Condition(threading.Lock())
+        self._reset(after_fork=True)
+
+    def _reset(self, after_fork=False):
+        if after_fork:
+            self._notempty._at_fork_reinit()
+        else:
+            self._notempty = threading.Condition(threading.Lock())
         self._buffer = collections.deque()
         self._thread = None
         self._jointhread = None
@@ -340,6 +346,10 @@
         else:
             self._wlock = ctx.Lock()
 
+    def close(self):
+        self._reader.close()
+        self._writer.close()
+
     def empty(self):
         return not self._poll()
 
@@ -366,3 +376,5 @@
         else:
             with self._wlock:
                 self._writer.send_bytes(obj)
+
+    __class_getitem__ = classmethod(types.GenericAlias)
diff --git a/common/py3-stdlib/multiprocessing/resource_sharer.py b/common/py3-stdlib/multiprocessing/resource_sharer.py
index 8d5c990..6607650 100644
--- a/common/py3-stdlib/multiprocessing/resource_sharer.py
+++ b/common/py3-stdlib/multiprocessing/resource_sharer.py
@@ -63,7 +63,6 @@
     def __init__(self):
         self._key = 0
         self._cache = {}
-        self._old_locks = []
         self._lock = threading.Lock()
         self._listener = None
         self._address = None
@@ -113,10 +112,7 @@
         for key, (send, close) in self._cache.items():
             close()
         self._cache.clear()
-        # If self._lock was locked at the time of the fork, it may be broken
-        # -- see issue 6721.  Replace it without letting it be gc'ed.
-        self._old_locks.append(self._lock)
-        self._lock = threading.Lock()
+        self._lock._at_fork_reinit()
         if self._listener is not None:
             self._listener.close()
         self._listener = None
diff --git a/common/py3-stdlib/multiprocessing/shared_memory.py b/common/py3-stdlib/multiprocessing/shared_memory.py
index f92eb01..122b3fc 100644
--- a/common/py3-stdlib/multiprocessing/shared_memory.py
+++ b/common/py3-stdlib/multiprocessing/shared_memory.py
@@ -14,6 +14,7 @@
 import errno
 import struct
 import secrets
+import types
 
 if os.name == "nt":
     import _winapi
@@ -75,6 +76,8 @@
             raise ValueError("'size' must be a positive integer")
         if create:
             self._flags = _O_CREX | os.O_RDWR
+            if size == 0:
+                raise ValueError("'size' must be a positive number different from zero")
         if name is None and not self._flags & os.O_EXCL:
             raise ValueError("'name' can only be None if create=True")
 
@@ -251,6 +254,15 @@
     packing format for any storable value must require no more than 8
     characters to describe its format."""
 
+    # The shared memory area is organized as follows:
+    # - 8 bytes: number of items (N) as a 64-bit integer
+    # - (N + 1) * 8 bytes: offsets of each element from the start of the
+    #                      data area
+    # - K bytes: the data area storing item values (with encoding and size
+    #            depending on their respective types)
+    # - N * 8 bytes: `struct` format string for each element
+    # - N bytes: index into _back_transforms_mapping for each element
+    #            (for reconstructing the corresponding Python value)
     _types_mapping = {
         int: "q",
         float: "d",
@@ -282,7 +294,8 @@
             return 3  # NoneType
 
     def __init__(self, sequence=None, *, name=None):
-        if sequence is not None:
+        if name is None or sequence is not None:
+            sequence = sequence or ()
             _formats = [
                 self._types_mapping[type(item)]
                     if not isinstance(item, (str, bytes))
@@ -293,10 +306,14 @@
             ]
             self._list_len = len(_formats)
             assert sum(len(fmt) <= 8 for fmt in _formats) == self._list_len
-            self._allocated_bytes = tuple(
-                    self._alignment if fmt[-1] != "s" else int(fmt[:-1])
-                    for fmt in _formats
-            )
+            offset = 0
+            # The offsets of each list element into the shared memory's
+            # data area (0 meaning the start of the data area, not the start
+            # of the shared memory area).
+            self._allocated_offsets = [0]
+            for fmt in _formats:
+                offset += self._alignment if fmt[-1] != "s" else int(fmt[:-1])
+                self._allocated_offsets.append(offset)
             _recreation_codes = [
                 self._extract_recreation_code(item) for item in sequence
             ]
@@ -307,13 +324,9 @@
                 self._format_back_transform_codes
             )
 
-        else:
-            requested_size = 8  # Some platforms require > 0.
-
-        if name is not None and sequence is None:
-            self.shm = SharedMemory(name)
-        else:
             self.shm = SharedMemory(name, create=True, size=requested_size)
+        else:
+            self.shm = SharedMemory(name)
 
         if sequence is not None:
             _enc = _encoding
@@ -322,7 +335,7 @@
                 self.shm.buf,
                 0,
                 self._list_len,
-                *(self._allocated_bytes)
+                *(self._allocated_offsets)
             )
             struct.pack_into(
                 "".join(_formats),
@@ -345,10 +358,12 @@
 
         else:
             self._list_len = len(self)  # Obtains size from offset 0 in buffer.
-            self._allocated_bytes = struct.unpack_from(
-                self._format_size_metainfo,
-                self.shm.buf,
-                1 * 8
+            self._allocated_offsets = list(
+                struct.unpack_from(
+                    self._format_size_metainfo,
+                    self.shm.buf,
+                    1 * 8
+                )
             )
 
     def _get_packing_format(self, position):
@@ -370,7 +385,6 @@
     def _get_back_transform(self, position):
         "Gets the back transformation function for a single value."
 
-        position = position if position >= 0 else position + self._list_len
         if (position >= self._list_len) or (self._list_len < 0):
             raise IndexError("Requested position out of range.")
 
@@ -387,7 +401,6 @@
         """Sets the packing format and back transformation code for a
         single value in the list at the specified position."""
 
-        position = position if position >= 0 else position + self._list_len
         if (position >= self._list_len) or (self._list_len < 0):
             raise IndexError("Requested position out of range.")
 
@@ -407,9 +420,9 @@
         )
 
     def __getitem__(self, position):
+        position = position if position >= 0 else position + self._list_len
         try:
-            offset = self._offset_data_start \
-                     + sum(self._allocated_bytes[:position])
+            offset = self._offset_data_start + self._allocated_offsets[position]
             (v,) = struct.unpack_from(
                 self._get_packing_format(position),
                 self.shm.buf,
@@ -424,9 +437,10 @@
         return v
 
     def __setitem__(self, position, value):
+        position = position if position >= 0 else position + self._list_len
         try:
-            offset = self._offset_data_start \
-                     + sum(self._allocated_bytes[:position])
+            item_offset = self._allocated_offsets[position]
+            offset = self._offset_data_start + item_offset
             current_format = self._get_packing_format(position)
         except IndexError:
             raise IndexError("assignment index out of range")
@@ -435,15 +449,17 @@
             new_format = self._types_mapping[type(value)]
             encoded_value = value
         else:
+            allocated_length = self._allocated_offsets[position + 1] - item_offset
+
             encoded_value = (value.encode(_encoding)
                              if isinstance(value, str) else value)
-            if len(encoded_value) > self._allocated_bytes[position]:
+            if len(encoded_value) > allocated_length:
                 raise ValueError("bytes/str item exceeds available storage")
             if current_format[-1] == "s":
                 new_format = current_format
             else:
                 new_format = self._types_mapping[str] % (
-                    self._allocated_bytes[position],
+                    allocated_length,
                 )
 
         self._set_packing_format_and_transform(
@@ -464,33 +480,35 @@
 
     @property
     def format(self):
-        "The struct packing format used by all currently stored values."
+        "The struct packing format used by all currently stored items."
         return "".join(
             self._get_packing_format(i) for i in range(self._list_len)
         )
 
     @property
     def _format_size_metainfo(self):
-        "The struct packing format used for metainfo on storage sizes."
-        return f"{self._list_len}q"
+        "The struct packing format used for the items' storage offsets."
+        return "q" * (self._list_len + 1)
 
     @property
     def _format_packing_metainfo(self):
-        "The struct packing format used for the values' packing formats."
+        "The struct packing format used for the items' packing formats."
         return "8s" * self._list_len
 
     @property
     def _format_back_transform_codes(self):
-        "The struct packing format used for the values' back transforms."
+        "The struct packing format used for the items' back transforms."
         return "b" * self._list_len
 
     @property
     def _offset_data_start(self):
-        return (self._list_len + 1) * 8  # 8 bytes per "q"
+        # - 8 bytes for the list length
+        # - (N + 1) * 8 bytes for the element offsets
+        return (self._list_len + 2) * 8
 
     @property
     def _offset_packing_formats(self):
-        return self._offset_data_start + sum(self._allocated_bytes)
+        return self._offset_data_start + self._allocated_offsets[-1]
 
     @property
     def _offset_back_transform_codes(self):
@@ -510,3 +528,5 @@
                 return position
         else:
             raise ValueError(f"{value!r} not in this container")
+
+    __class_getitem__ = classmethod(types.GenericAlias)
diff --git a/common/py3-stdlib/multiprocessing/synchronize.py b/common/py3-stdlib/multiprocessing/synchronize.py
index 4fcbefc..d0be48f 100644
--- a/common/py3-stdlib/multiprocessing/synchronize.py
+++ b/common/py3-stdlib/multiprocessing/synchronize.py
@@ -270,7 +270,7 @@
     def notify(self, n=1):
         assert self._lock._semlock._is_mine(), 'lock is not owned'
         assert not self._wait_semaphore.acquire(
-            False), ('notify: Should not have been able to acquire'
+            False), ('notify: Should not have been able to acquire '
                      + '_wait_semaphore')
 
         # to take account of timeouts since last notify*() we subtract
diff --git a/common/py3-stdlib/multiprocessing/util.py b/common/py3-stdlib/multiprocessing/util.py
index 44abfe5..21f2a7e 100644
--- a/common/py3-stdlib/multiprocessing/util.py
+++ b/common/py3-stdlib/multiprocessing/util.py
@@ -367,13 +367,13 @@
 
 class ForkAwareThreadLock(object):
     def __init__(self):
-        self._reset()
-        register_after_fork(self, ForkAwareThreadLock._reset)
-
-    def _reset(self):
         self._lock = threading.Lock()
         self.acquire = self._lock.acquire
         self.release = self._lock.release
+        register_after_fork(self, ForkAwareThreadLock._at_fork_reinit)
+
+    def _at_fork_reinit(self):
+        self._lock._at_fork_reinit()
 
     def __enter__(self):
         return self._lock.__enter__()
@@ -452,7 +452,7 @@
         return _posixsubprocess.fork_exec(
             args, [os.fsencode(path)], True, passfds, None, None,
             -1, -1, -1, -1, -1, -1, errpipe_read, errpipe_write,
-            False, False, None)
+            False, False, None, None, None, -1, None)
     finally:
         os.close(errpipe_read)
         os.close(errpipe_write)
diff --git a/common/py3-stdlib/nntplib.py b/common/py3-stdlib/nntplib.py
index 9036f36..f6e746e 100644
--- a/common/py3-stdlib/nntplib.py
+++ b/common/py3-stdlib/nntplib.py
@@ -67,7 +67,6 @@
 import socket
 import collections
 import datetime
-import warnings
 import sys
 
 try:
@@ -294,7 +293,7 @@
 
 
 # The classes themselves
-class _NNTPBase:
+class NNTP:
     # UTF-8 is the character set for all NNTP commands and responses: they
     # are automatically encoded (when sending) and decoded (and receiving)
     # by this class.
@@ -310,13 +309,18 @@
     encoding = 'utf-8'
     errors = 'surrogateescape'
 
-    def __init__(self, file, host,
-                 readermode=None, timeout=_GLOBAL_DEFAULT_TIMEOUT):
+    def __init__(self, host, port=NNTP_PORT, user=None, password=None,
+                 readermode=None, usenetrc=False,
+                 timeout=_GLOBAL_DEFAULT_TIMEOUT):
         """Initialize an instance.  Arguments:
-        - file: file-like object (open for read/write in binary mode)
-        - host: hostname of the server
+        - host: hostname to connect to
+        - port: port to connect to (default the standard NNTP port)
+        - user: username to authenticate with
+        - password: password to use with username
         - readermode: if true, send 'mode reader' command after
                       connecting.
+        - usenetrc: allow loading username and password from ~/.netrc file
+                    if not specified explicitly
         - timeout: timeout (in seconds) used for socket connections
 
         readermode is sometimes necessary if you are connecting to an
@@ -326,7 +330,24 @@
         readermode.
         """
         self.host = host
-        self.file = file
+        self.port = port
+        self.sock = self._create_socket(timeout)
+        self.file = None
+        try:
+            self.file = self.sock.makefile("rwb")
+            self._base_init(readermode)
+            if user or usenetrc:
+                self.login(user, password, usenetrc)
+        except:
+            if self.file:
+                self.file.close()
+            self.sock.close()
+            raise
+
+    def _base_init(self, readermode):
+        """Partial initialization for the NNTP protocol.
+        This instance method is extracted for supporting the test code.
+        """
         self.debugging = 0
         self.welcome = self._getresp()
 
@@ -371,6 +392,12 @@
                 if is_connected():
                     self._close()
 
+    def _create_socket(self, timeout):
+        if timeout is not None and not timeout:
+            raise ValueError('Non-blocking socket (timeout=0) is not supported')
+        sys.audit("nntplib.connect", self, self.host, self.port)
+        return socket.create_connection((self.host, self.port), timeout)
+
     def getwelcome(self):
         """Get the welcome message from the server
         (this is read and squirreled away by __init__()).
@@ -834,44 +861,6 @@
         fmt = self._getoverviewfmt()
         return resp, _parse_overview(lines, fmt)
 
-    def xgtitle(self, group, *, file=None):
-        """Process an XGTITLE command (optional server extension) Arguments:
-        - group: group name wildcard (i.e. news.*)
-        Returns:
-        - resp: server response if successful
-        - list: list of (name,title) strings"""
-        warnings.warn("The XGTITLE extension is not actively used, "
-                      "use descriptions() instead",
-                      DeprecationWarning, 2)
-        line_pat = re.compile('^([^ \t]+)[ \t]+(.*)$')
-        resp, raw_lines = self._longcmdstring('XGTITLE ' + group, file)
-        lines = []
-        for raw_line in raw_lines:
-            match = line_pat.search(raw_line.strip())
-            if match:
-                lines.append(match.group(1, 2))
-        return resp, lines
-
-    def xpath(self, id):
-        """Process an XPATH command (optional server extension) Arguments:
-        - id: Message id of article
-        Returns:
-        resp: server response if successful
-        path: directory path to article
-        """
-        warnings.warn("The XPATH extension is not actively used",
-                      DeprecationWarning, 2)
-
-        resp = self._shortcmd('XPATH {0}'.format(id))
-        if not resp.startswith('223'):
-            raise NNTPReplyError(resp)
-        try:
-            [resp_num, path] = resp.split()
-        except ValueError:
-            raise NNTPReplyError(resp) from None
-        else:
-            return resp, path
-
     def date(self):
         """Process the DATE command.
         Returns:
@@ -927,8 +916,12 @@
         return self._post('IHAVE {0}'.format(message_id), data)
 
     def _close(self):
-        self.file.close()
-        del self.file
+        try:
+            if self.file:
+                self.file.close()
+                del self.file
+        finally:
+            self.sock.close()
 
     def quit(self):
         """Process a QUIT command and close the socket.  Returns:
@@ -1018,54 +1011,8 @@
                 raise NNTPError("TLS failed to start.")
 
 
-class NNTP(_NNTPBase):
-
-    def __init__(self, host, port=NNTP_PORT, user=None, password=None,
-                 readermode=None, usenetrc=False,
-                 timeout=_GLOBAL_DEFAULT_TIMEOUT):
-        """Initialize an instance.  Arguments:
-        - host: hostname to connect to
-        - port: port to connect to (default the standard NNTP port)
-        - user: username to authenticate with
-        - password: password to use with username
-        - readermode: if true, send 'mode reader' command after
-                      connecting.
-        - usenetrc: allow loading username and password from ~/.netrc file
-                    if not specified explicitly
-        - timeout: timeout (in seconds) used for socket connections
-
-        readermode is sometimes necessary if you are connecting to an
-        NNTP server on the local machine and intend to call
-        reader-specific commands, such as `group'.  If you get
-        unexpected NNTPPermanentErrors, you might need to set
-        readermode.
-        """
-        self.host = host
-        self.port = port
-        sys.audit("nntplib.connect", self, host, port)
-        self.sock = socket.create_connection((host, port), timeout)
-        file = None
-        try:
-            file = self.sock.makefile("rwb")
-            _NNTPBase.__init__(self, file, host,
-                               readermode, timeout)
-            if user or usenetrc:
-                self.login(user, password, usenetrc)
-        except:
-            if file:
-                file.close()
-            self.sock.close()
-            raise
-
-    def _close(self):
-        try:
-            _NNTPBase._close(self)
-        finally:
-            self.sock.close()
-
-
 if _have_ssl:
-    class NNTP_SSL(_NNTPBase):
+    class NNTP_SSL(NNTP):
 
         def __init__(self, host, port=NNTP_SSL_PORT,
                     user=None, password=None, ssl_context=None,
@@ -1074,27 +1021,19 @@
             """This works identically to NNTP.__init__, except for the change
             in default port and the `ssl_context` argument for SSL connections.
             """
-            sys.audit("nntplib.connect", self, host, port)
-            self.sock = socket.create_connection((host, port), timeout)
-            file = None
-            try:
-                self.sock = _encrypt_on(self.sock, ssl_context, host)
-                file = self.sock.makefile("rwb")
-                _NNTPBase.__init__(self, file, host,
-                                   readermode=readermode, timeout=timeout)
-                if user or usenetrc:
-                    self.login(user, password, usenetrc)
-            except:
-                if file:
-                    file.close()
-                self.sock.close()
-                raise
+            self.ssl_context = ssl_context
+            super().__init__(host, port, user, password, readermode,
+                             usenetrc, timeout)
 
-        def _close(self):
+        def _create_socket(self, timeout):
+            sock = super()._create_socket(timeout)
             try:
-                _NNTPBase._close(self)
-            finally:
-                self.sock.close()
+                sock = _encrypt_on(sock, self.ssl_context, self.host)
+            except:
+                sock.close()
+                raise
+            else:
+                return sock
 
     __all__.append("NNTP_SSL")
 
diff --git a/common/py3-stdlib/opcode.py b/common/py3-stdlib/opcode.py
index 3fb716b..ac1aa53 100644
--- a/common/py3-stdlib/opcode.py
+++ b/common/py3-stdlib/opcode.py
@@ -21,8 +21,7 @@
 except ImportError:
     pass
 
-cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is',
-        'is not', 'exception match', 'BAD')
+cmp_op = ('<', '<=', '==', '!=', '>', '>=')
 
 hasconst = []
 hasname = []
@@ -84,10 +83,12 @@
 def_op('INPLACE_FLOOR_DIVIDE', 28)
 def_op('INPLACE_TRUE_DIVIDE', 29)
 
+def_op('RERAISE', 48)
+def_op('WITH_EXCEPT_START', 49)
 def_op('GET_AITER', 50)
 def_op('GET_ANEXT', 51)
 def_op('BEFORE_ASYNC_WITH', 52)
-def_op('BEGIN_FINALLY', 53)
+
 def_op('END_ASYNC_FOR', 54)
 def_op('INPLACE_ADD', 55)
 def_op('INPLACE_SUBTRACT', 56)
@@ -109,20 +110,20 @@
 def_op('LOAD_BUILD_CLASS', 71)
 def_op('YIELD_FROM', 72)
 def_op('GET_AWAITABLE', 73)
-
+def_op('LOAD_ASSERTION_ERROR', 74)
 def_op('INPLACE_LSHIFT', 75)
 def_op('INPLACE_RSHIFT', 76)
 def_op('INPLACE_AND', 77)
 def_op('INPLACE_XOR', 78)
 def_op('INPLACE_OR', 79)
-def_op('WITH_CLEANUP_START', 81)
-def_op('WITH_CLEANUP_FINISH', 82)
+
+def_op('LIST_TO_TUPLE', 82)
 def_op('RETURN_VALUE', 83)
 def_op('IMPORT_STAR', 84)
 def_op('SETUP_ANNOTATIONS', 85)
 def_op('YIELD_VALUE', 86)
 def_op('POP_BLOCK', 87)
-def_op('END_FINALLY', 88)
+
 def_op('POP_EXCEPT', 89)
 
 HAVE_ARGUMENT = 90              # Opcodes from here have an argument:
@@ -158,6 +159,10 @@
 
 name_op('LOAD_GLOBAL', 116)     # Index in name list
 
+def_op('IS_OP', 117)
+def_op('CONTAINS_OP', 118)
+
+jabs_op('JUMP_IF_NOT_EXC_MATCH', 121)
 jrel_op('SETUP_FINALLY', 122)   # Distance to target address
 
 def_op('LOAD_FAST', 124)        # Local variable number
@@ -195,22 +200,18 @@
 def_op('EXTENDED_ARG', 144)
 EXTENDED_ARG = 144
 
-def_op('BUILD_LIST_UNPACK', 149)
-def_op('BUILD_MAP_UNPACK', 150)
-def_op('BUILD_MAP_UNPACK_WITH_CALL', 151)
-def_op('BUILD_TUPLE_UNPACK', 152)
-def_op('BUILD_SET_UNPACK', 153)
-
 jrel_op('SETUP_ASYNC_WITH', 154)
 
 def_op('FORMAT_VALUE', 155)
 def_op('BUILD_CONST_KEY_MAP', 156)
 def_op('BUILD_STRING', 157)
-def_op('BUILD_TUPLE_UNPACK_WITH_CALL', 158)
 
 name_op('LOAD_METHOD', 160)
 def_op('CALL_METHOD', 161)
-jrel_op('CALL_FINALLY', 162)
-def_op('POP_FINALLY', 163)
+
+def_op('LIST_EXTEND', 162)
+def_op('SET_UPDATE', 163)
+def_op('DICT_MERGE', 164)
+def_op('DICT_UPDATE', 165)
 
 del def_op, name_op, jrel_op, jabs_op
diff --git a/common/py3-stdlib/os.py b/common/py3-stdlib/os.py
index 253cad1..b794159 100644
--- a/common/py3-stdlib/os.py
+++ b/common/py3-stdlib/os.py
@@ -28,6 +28,8 @@
 
 from _collections_abc import _check_methods
 
+GenericAlias = type(list[int])
+
 _names = sys.builtin_module_names
 
 # Note:  more names are added to __all__ later.
@@ -336,7 +338,10 @@
             dirs.remove('CVS')  # don't visit CVS directories
 
     """
-    top = fspath(top)
+    sys.audit("os.walk", top, topdown, onerror, followlinks)
+    return _walk(fspath(top), topdown, onerror, followlinks)
+
+def _walk(top, topdown, onerror, followlinks):
     dirs = []
     nondirs = []
     walk_dirs = []
@@ -410,11 +415,11 @@
             # the caller can replace the directory entry during the "yield"
             # above.
             if followlinks or not islink(new_path):
-                yield from walk(new_path, topdown, onerror, followlinks)
+                yield from _walk(new_path, topdown, onerror, followlinks)
     else:
         # Recurse into sub-directories
         for new_path in walk_dirs:
-            yield from walk(new_path, topdown, onerror, followlinks)
+            yield from _walk(new_path, topdown, onerror, followlinks)
         # Yield after recursion if going bottom up
         yield top, dirs, nondirs
 
@@ -455,6 +460,7 @@
             if 'CVS' in dirs:
                 dirs.remove('CVS')  # don't visit CVS directories
         """
+        sys.audit("os.fwalk", top, topdown, onerror, follow_symlinks, dir_fd)
         if not isinstance(top, int) or not hasattr(top, '__index__'):
             top = fspath(top)
         # Note: To guard against symlink races, we use the standard
@@ -654,17 +660,15 @@
     return path_list.split(pathsep)
 
 
-# Change environ to automatically call putenv(), unsetenv if they exist.
-from _collections_abc import MutableMapping
+# Change environ to automatically call putenv() and unsetenv()
+from _collections_abc import MutableMapping, Mapping
 
 class _Environ(MutableMapping):
-    def __init__(self, data, encodekey, decodekey, encodevalue, decodevalue, putenv, unsetenv):
+    def __init__(self, data, encodekey, decodekey, encodevalue, decodevalue):
         self.encodekey = encodekey
         self.decodekey = decodekey
         self.encodevalue = encodevalue
         self.decodevalue = decodevalue
-        self.putenv = putenv
-        self.unsetenv = unsetenv
         self._data = data
 
     def __getitem__(self, key):
@@ -678,12 +682,12 @@
     def __setitem__(self, key, value):
         key = self.encodekey(key)
         value = self.encodevalue(value)
-        self.putenv(key, value)
+        putenv(key, value)
         self._data[key] = value
 
     def __delitem__(self, key):
         encodedkey = self.encodekey(key)
-        self.unsetenv(encodedkey)
+        unsetenv(encodedkey)
         try:
             del self._data[encodedkey]
         except KeyError:
@@ -712,21 +716,23 @@
             self[key] = value
         return self[key]
 
-try:
-    _putenv = putenv
-except NameError:
-    _putenv = lambda key, value: None
-else:
-    if "putenv" not in __all__:
-        __all__.append("putenv")
+    def __ior__(self, other):
+        self.update(other)
+        return self
 
-try:
-    _unsetenv = unsetenv
-except NameError:
-    _unsetenv = lambda key: _putenv(key, "")
-else:
-    if "unsetenv" not in __all__:
-        __all__.append("unsetenv")
+    def __or__(self, other):
+        if not isinstance(other, Mapping):
+            return NotImplemented
+        new = dict(self)
+        new.update(other)
+        return new
+
+    def __ror__(self, other):
+        if not isinstance(other, Mapping):
+            return NotImplemented
+        new = dict(other)
+        new.update(self)
+        return new
 
 def _createenviron():
     if name == 'nt':
@@ -755,8 +761,7 @@
         data = environ
     return _Environ(data,
         encodekey, decode,
-        encode, decode,
-        _putenv, _unsetenv)
+        encode, decode)
 
 # unicode environ
 environ = _createenviron()
@@ -781,8 +786,7 @@
     # bytes environ
     environb = _Environ(environ._data,
         _check_bytes, bytes,
-        _check_bytes, bytes,
-        _putenv, _unsetenv)
+        _check_bytes, bytes)
     del _check_bytes
 
     def getenvb(key, default=None):
@@ -862,12 +866,8 @@
                 wpid, sts = waitpid(pid, 0)
                 if WIFSTOPPED(sts):
                     continue
-                elif WIFSIGNALED(sts):
-                    return -WTERMSIG(sts)
-                elif WIFEXITED(sts):
-                    return WEXITSTATUS(sts)
-                else:
-                    raise OSError("Not stopped, signaled or exited???")
+
+                return waitstatus_to_exitcode(sts)
 
     def spawnv(mode, file, args):
         """spawnv(mode, file, args) -> integer
@@ -1076,6 +1076,8 @@
             return _check_methods(subclass, '__fspath__')
         return NotImplemented
 
+    __class_getitem__ = classmethod(GenericAlias)
+
 
 if name == 'nt':
     class _AddedDllDirectory:
diff --git a/common/py3-stdlib/pathlib.py b/common/py3-stdlib/pathlib.py
index ff8bac9..147be2f 100644
--- a/common/py3-stdlib/pathlib.py
+++ b/common/py3-stdlib/pathlib.py
@@ -329,7 +329,10 @@
                     # parent dir
                     path, _, _ = path.rpartition(sep)
                     continue
-                newpath = path + sep + name
+                if path.endswith(sep):
+                    newpath = path + name
+                else:
+                    newpath = path + sep + name
                 if newpath in seen:
                     # Already seen this path
                     path = seen[newpath]
@@ -447,6 +450,20 @@
     def readlink(self, path):
         return os.readlink(path)
 
+    def owner(self, path):
+        try:
+            import pwd
+            return pwd.getpwuid(self.stat(path).st_uid).pw_name
+        except ImportError:
+            raise NotImplementedError("Path.owner() is unsupported on this system")
+
+    def group(self, path):
+        try:
+            import grp
+            return grp.getgrgid(self.stat(path).st_gid).gr_name
+        except ImportError:
+            raise NotImplementedError("Path.group() is unsupported on this system")
+
 
 _normal_accessor = _NormalAccessor()
 
@@ -785,6 +802,9 @@
             return NotImplemented
         return self._cparts >= other._cparts
 
+    def __class_getitem__(cls, type):
+        return cls
+
     drive = property(attrgetter('_drv'),
                      doc="""The drive prefix (letter or UNC path), if any.""")
 
@@ -853,6 +873,10 @@
         return self._from_parsed_parts(self._drv, self._root,
                                        self._parts[:-1] + [name])
 
+    def with_stem(self, stem):
+        """Return a new path with the stem changed."""
+        return self.with_name(stem + self.suffix)
+
     def with_suffix(self, suffix):
         """Return a new path with the file suffix changed.  If the path
         has no suffix, add given suffix.  If the given suffix is an empty
@@ -901,11 +925,21 @@
         cf = self._flavour.casefold_parts
         if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts):
             formatted = self._format_parsed_parts(to_drv, to_root, to_parts)
-            raise ValueError("{!r} does not start with {!r}"
+            raise ValueError("{!r} is not in the subpath of {!r}"
+                    " OR one path is relative and the other is absolute."
                              .format(str(self), str(formatted)))
         return self._from_parsed_parts('', root if n == 1 else '',
                                        abs_parts[n:])
 
+    def is_relative_to(self, *other):
+        """Return True if the path is relative to another path or False.
+        """
+        try:
+            self.relative_to(*other)
+            return True
+        except ValueError:
+            return False
+
     @property
     def parts(self):
         """An object providing sequence-like access to the
@@ -1029,7 +1063,6 @@
     """
     __slots__ = (
         '_accessor',
-        '_closed',
     )
 
     def __new__(cls, *args, **kwargs):
@@ -1046,7 +1079,6 @@
               # Private non-constructor arguments
               template=None,
               ):
-        self._closed = False
         if template is not None:
             self._accessor = template._accessor
         else:
@@ -1059,15 +1091,18 @@
         return self._from_parsed_parts(self._drv, self._root, parts)
 
     def __enter__(self):
-        if self._closed:
-            self._raise_closed()
         return self
 
     def __exit__(self, t, v, tb):
-        self._closed = True
-
-    def _raise_closed(self):
-        raise ValueError("I/O operation on closed path")
+        # https://bugs.python.org/issue39682
+        # In previous versions of pathlib, this method marked this path as
+        # closed; subsequent attempts to perform I/O would raise an IOError.
+        # This functionality was never documented, and had the effect of
+        # making Path objects mutable, contrary to PEP 428. In Python 3.9 the
+        # _closed attribute was removed, and this method made a no-op.
+        # This method and __enter__()/__exit__() should be deprecated and
+        # removed in the future.
+        pass
 
     def _opener(self, name, flags, mode=0o666):
         # A stub for the opener argument to built-in open()
@@ -1078,8 +1113,6 @@
         Open the file pointed by this path and return a file descriptor,
         as os.open() does.
         """
-        if self._closed:
-            self._raise_closed()
         return self._accessor.open(self, flags, mode)
 
     # Public API
@@ -1106,27 +1139,24 @@
         try:
             other_st = other_path.stat()
         except AttributeError:
-            other_st = os.stat(other_path)
+            other_st = self._accessor.stat(other_path)
         return os.path.samestat(st, other_st)
 
     def iterdir(self):
         """Iterate over the files in this directory.  Does not yield any
         result for the special paths '.' and '..'.
         """
-        if self._closed:
-            self._raise_closed()
         for name in self._accessor.listdir(self):
             if name in {'.', '..'}:
                 # Yielding a path object for these makes little sense
                 continue
             yield self._make_child_relpath(name)
-            if self._closed:
-                self._raise_closed()
 
     def glob(self, pattern):
         """Iterate over this subtree and yield all existing files (of any
         kind, including directories) matching the given relative pattern.
         """
+        sys.audit("pathlib.Path.glob", self, pattern)
         if not pattern:
             raise ValueError("Unacceptable pattern: {!r}".format(pattern))
         drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
@@ -1141,6 +1171,7 @@
         directories) matching the given relative pattern, anywhere in
         this subtree.
         """
+        sys.audit("pathlib.Path.rglob", self, pattern)
         drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
         if drv or root:
             raise NotImplementedError("Non-relative patterns are unsupported")
@@ -1156,8 +1187,6 @@
         Use resolve() to get the canonical path to a file.
         """
         # XXX untested yet!
-        if self._closed:
-            self._raise_closed()
         if self.is_absolute():
             return self
         # FIXME this must defer to the specific flavour (and, under Windows,
@@ -1172,8 +1201,6 @@
         normalizing it (for example turning slashes into backslashes under
         Windows).
         """
-        if self._closed:
-            self._raise_closed()
         s = self._flavour.resolve(self, strict=strict)
         if s is None:
             # No symlink resolution => for consistency, raise an error if
@@ -1197,15 +1224,13 @@
         """
         Return the login name of the file owner.
         """
-        import pwd
-        return pwd.getpwuid(self.stat().st_uid).pw_name
+        return self._accessor.owner(self)
 
     def group(self):
         """
         Return the group name of the file gid.
         """
-        import grp
-        return grp.getgrgid(self.stat().st_gid).gr_name
+        return self._accessor.group(self)
 
     def open(self, mode='r', buffering=-1, encoding=None,
              errors=None, newline=None):
@@ -1213,8 +1238,6 @@
         Open the file pointed by this path and return a file object, as
         the built-in open() function does.
         """
-        if self._closed:
-            self._raise_closed()
         return io.open(self, mode, buffering, encoding, errors, newline,
                        opener=self._opener)
 
@@ -1251,12 +1274,19 @@
         with self.open(mode='w', encoding=encoding, errors=errors) as f:
             return f.write(data)
 
+    def readlink(self):
+        """
+        Return the path to which the symbolic link points.
+        """
+        path = self._accessor.readlink(self)
+        obj = self._from_parts((path,), init=False)
+        obj._init(template=self)
+        return obj
+
     def touch(self, mode=0o666, exist_ok=True):
         """
         Create this file with the given access mode, if it doesn't exist.
         """
-        if self._closed:
-            self._raise_closed()
         if exist_ok:
             # First try to bump modification time
             # Implementation note: GNU touch uses the UTIME_NOW option of
@@ -1278,8 +1308,6 @@
         """
         Create a new directory at this given path.
         """
-        if self._closed:
-            self._raise_closed()
         try:
             self._accessor.mkdir(self, mode)
         except FileNotFoundError:
@@ -1297,8 +1325,6 @@
         """
         Change the permissions of the path, like os.chmod().
         """
-        if self._closed:
-            self._raise_closed()
         self._accessor.chmod(self, mode)
 
     def lchmod(self, mode):
@@ -1306,8 +1332,6 @@
         Like chmod(), except if the path points to a symlink, the symlink's
         permissions are changed, rather than its target's.
         """
-        if self._closed:
-            self._raise_closed()
         self._accessor.lchmod(self, mode)
 
     def unlink(self, missing_ok=False):
@@ -1315,8 +1339,6 @@
         Remove this file or link.
         If the path is a directory, use rmdir() instead.
         """
-        if self._closed:
-            self._raise_closed()
         try:
             self._accessor.unlink(self)
         except FileNotFoundError:
@@ -1327,8 +1349,6 @@
         """
         Remove this directory.  The directory must be empty.
         """
-        if self._closed:
-            self._raise_closed()
         self._accessor.rmdir(self)
 
     def lstat(self):
@@ -1336,36 +1356,37 @@
         Like stat(), except if the path points to a symlink, the symlink's
         status information is returned, rather than its target's.
         """
-        if self._closed:
-            self._raise_closed()
         return self._accessor.lstat(self)
 
     def link_to(self, target):
         """
         Create a hard link pointing to a path named target.
         """
-        if self._closed:
-            self._raise_closed()
         self._accessor.link_to(self, target)
 
     def rename(self, target):
         """
-        Rename this path to the given path,
-        and return a new Path instance pointing to the given path.
+        Rename this path to the target path.
+
+        The target path may be absolute or relative. Relative paths are
+        interpreted relative to the current working directory, *not* the
+        directory of the Path object.
+
+        Returns the new Path instance pointing to the target path.
         """
-        if self._closed:
-            self._raise_closed()
         self._accessor.rename(self, target)
         return self.__class__(target)
 
     def replace(self, target):
         """
-        Rename this path to the given path, clobbering the existing
-        destination if it exists, and return a new Path instance
-        pointing to the given path.
+        Rename this path to the target path, overwriting if that path exists.
+
+        The target path may be absolute or relative. Relative paths are
+        interpreted relative to the current working directory, *not* the
+        directory of the Path object.
+
+        Returns the new Path instance pointing to the target path.
         """
-        if self._closed:
-            self._raise_closed()
         self._accessor.replace(self, target)
         return self.__class__(target)
 
@@ -1374,8 +1395,6 @@
         Make this path a symlink pointing to the given path.
         Note the order of arguments (self, target) is the reverse of os.symlink's.
         """
-        if self._closed:
-            self._raise_closed()
         self._accessor.symlink(target, self, target_is_directory)
 
     # Convenience functions for querying the stat results
@@ -1436,9 +1455,8 @@
         if not self.exists() or not self.is_dir():
             return False
 
-        parent = Path(self.parent)
         try:
-            parent_dev = parent.stat().st_dev
+            parent_dev = self.parent.stat().st_dev
         except OSError:
             return False
 
@@ -1446,7 +1464,7 @@
         if dev != parent_dev:
             return True
         ino = self.stat().st_ino
-        parent_ino = parent.stat().st_ino
+        parent_ino = self.parent.stat().st_ino
         return ino == parent_ino
 
     def is_symlink(self):
@@ -1554,11 +1572,5 @@
     """
     __slots__ = ()
 
-    def owner(self):
-        raise NotImplementedError("Path.owner() is unsupported on this system")
-
-    def group(self):
-        raise NotImplementedError("Path.group() is unsupported on this system")
-
     def is_mount(self):
         raise NotImplementedError("Path.is_mount() is unsupported on this system")
diff --git a/common/py3-stdlib/pdb.py b/common/py3-stdlib/pdb.py
index 0810235..d7d9571 100755
--- a/common/py3-stdlib/pdb.py
+++ b/common/py3-stdlib/pdb.py
@@ -1312,14 +1312,6 @@
             # _getval() already printed the error
             return
         code = None
-        # Is it a function?
-        try:
-            code = value.__code__
-        except Exception:
-            pass
-        if code:
-            self.message('Function %s' % code.co_name)
-            return
         # Is it an instance method?
         try:
             code = value.__func__.__code__
@@ -1328,6 +1320,14 @@
         if code:
             self.message('Method %s' % code.co_name)
             return
+        # Is it a function?
+        try:
+            code = value.__code__
+        except Exception:
+            pass
+        if code:
+            self.message('Function %s' % code.co_name)
+            return
         # Is it a class?
         if value.__class__ is type:
             self.message('Class %s.%s' % (value.__module__, value.__qualname__))
diff --git a/common/py3-stdlib/pickle.py b/common/py3-stdlib/pickle.py
index af50a9b..e63a8b6 100644
--- a/common/py3-stdlib/pickle.py
+++ b/common/py3-stdlib/pickle.py
@@ -13,7 +13,7 @@
     dump(object, file)
     dumps(object) -> string
     load(file) -> object
-    loads(string) -> object
+    loads(bytes) -> object
 
 Misc variables:
 
@@ -340,7 +340,9 @@
     # Protect the iteration by using a list copy of sys.modules against dynamic
     # modules that trigger imports of other modules upon calls to getattr.
     for module_name, module in sys.modules.copy().items():
-        if module_name == '__main__' or module is None:
+        if (module_name == '__main__'
+            or module_name == '__mp_main__'  # bpo-42406
+            or module is None):
             continue
         try:
             if _getattribute(module, name)[0] is obj:
@@ -1604,17 +1606,29 @@
 
     def load_get(self):
         i = int(self.readline()[:-1])
-        self.append(self.memo[i])
+        try:
+            self.append(self.memo[i])
+        except KeyError:
+            msg = f'Memo value not found at index {i}'
+            raise UnpicklingError(msg) from None
     dispatch[GET[0]] = load_get
 
     def load_binget(self):
         i = self.read(1)[0]
-        self.append(self.memo[i])
+        try:
+            self.append(self.memo[i])
+        except KeyError as exc:
+            msg = f'Memo value not found at index {i}'
+            raise UnpicklingError(msg) from None
     dispatch[BINGET[0]] = load_binget
 
     def load_long_binget(self):
         i, = unpack('<I', self.read(4))
-        self.append(self.memo[i])
+        try:
+            self.append(self.memo[i])
+        except KeyError as exc:
+            msg = f'Memo value not found at index {i}'
+            raise UnpicklingError(msg) from None
     dispatch[LONG_BINGET[0]] = load_long_binget
 
     def load_put(self):
@@ -1749,7 +1763,7 @@
     return _Unpickler(file, fix_imports=fix_imports, buffers=buffers,
                      encoding=encoding, errors=errors).load()
 
-def _loads(s, *, fix_imports=True, encoding="ASCII", errors="strict",
+def _loads(s, /, *, fix_imports=True, encoding="ASCII", errors="strict",
            buffers=None):
     if isinstance(s, str):
         raise TypeError("Can't load pickle from unicode string")
diff --git a/common/py3-stdlib/pkgutil.py b/common/py3-stdlib/pkgutil.py
index 8474a77..4c18467 100644
--- a/common/py3-stdlib/pkgutil.py
+++ b/common/py3-stdlib/pkgutil.py
@@ -7,6 +7,7 @@
 import importlib.machinery
 import os
 import os.path
+import re
 import sys
 from types import ModuleType
 import warnings
@@ -635,3 +636,72 @@
     parts.insert(0, os.path.dirname(mod.__file__))
     resource_name = os.path.join(*parts)
     return loader.get_data(resource_name)
+
+
+_DOTTED_WORDS = r'(?!\d)(\w+)(\.(?!\d)(\w+))*'
+_NAME_PATTERN = re.compile(f'^(?P<pkg>{_DOTTED_WORDS})(?P<cln>:(?P<obj>{_DOTTED_WORDS})?)?$', re.U)
+del _DOTTED_WORDS
+
+def resolve_name(name):
+    """
+    Resolve a name to an object.
+
+    It is expected that `name` will be a string in one of the following
+    formats, where W is shorthand for a valid Python identifier and dot stands
+    for a literal period in these pseudo-regexes:
+
+    W(.W)*
+    W(.W)*:(W(.W)*)?
+
+    The first form is intended for backward compatibility only. It assumes that
+    some part of the dotted name is a package, and the rest is an object
+    somewhere within that package, possibly nested inside other objects.
+    Because the place where the package stops and the object hierarchy starts
+    can't be inferred by inspection, repeated attempts to import must be done
+    with this form.
+
+    In the second form, the caller makes the division point clear through the
+    provision of a single colon: the dotted name to the left of the colon is a
+    package to be imported, and the dotted name to the right is the object
+    hierarchy within that package. Only one import is needed in this form. If
+    it ends with the colon, then a module object is returned.
+
+    The function will return an object (which might be a module), or raise one
+    of the following exceptions:
+
+    ValueError - if `name` isn't in a recognised format
+    ImportError - if an import failed when it shouldn't have
+    AttributeError - if a failure occurred when traversing the object hierarchy
+                     within the imported package to get to the desired object)
+    """
+    m = _NAME_PATTERN.match(name)
+    if not m:
+        raise ValueError(f'invalid format: {name!r}')
+    gd = m.groupdict()
+    if gd.get('cln'):
+        # there is a colon - a one-step import is all that's needed
+        mod = importlib.import_module(gd['pkg'])
+        parts = gd.get('obj')
+        parts = parts.split('.') if parts else []
+    else:
+        # no colon - have to iterate to find the package boundary
+        parts = name.split('.')
+        modname = parts.pop(0)
+        # first part *must* be a module/package.
+        mod = importlib.import_module(modname)
+        while parts:
+            p = parts[0]
+            s = f'{modname}.{p}'
+            try:
+                mod = importlib.import_module(s)
+                parts.pop(0)
+                modname = s
+            except ImportError:
+                break
+    # if we reach this point, mod is the module, already imported, and
+    # parts is the list of parts in the object hierarchy to be traversed, or
+    # an empty list if just the module is wanted.
+    result = mod
+    for p in parts:
+        result = getattr(result, p)
+    return result
diff --git a/common/py3-stdlib/platform.py b/common/py3-stdlib/platform.py
index 994d892..e9f50ab 100755
--- a/common/py3-stdlib/platform.py
+++ b/common/py3-stdlib/platform.py
@@ -116,6 +116,9 @@
 import os
 import re
 import sys
+import subprocess
+import functools
+import itertools
 
 ### Globals & Constants
 
@@ -600,22 +603,6 @@
             os.path.join(os.path.dirname(filepath), os.readlink(filepath)))
     return filepath
 
-def _syscmd_uname(option, default=''):
-
-    """ Interface to the system's uname command.
-    """
-    if sys.platform in ('dos', 'win32', 'win16'):
-        # XXX Others too ?
-        return default
-
-    import subprocess
-    try:
-        output = subprocess.check_output(('uname', option),
-                                         stderr=subprocess.DEVNULL,
-                                         text=True)
-    except (OSError, subprocess.CalledProcessError):
-        return default
-    return (output.strip() or default)
 
 def _syscmd_file(target, default=''):
 
@@ -736,13 +723,90 @@
 
     return bits, linkage
 
+
+def _get_machine_win32():
+    # Try to use the PROCESSOR_* environment variables
+    # available on Win XP and later; see
+    # http://support.microsoft.com/kb/888731 and
+    # http://www.geocities.com/rick_lively/MANUALS/ENV/MSWIN/PROCESSI.HTM
+
+    # WOW64 processes mask the native architecture
+    return (
+        os.environ.get('PROCESSOR_ARCHITEW6432', '') or
+        os.environ.get('PROCESSOR_ARCHITECTURE', '')
+    )
+
+
+class _Processor:
+    @classmethod
+    def get(cls):
+        func = getattr(cls, f'get_{sys.platform}', cls.from_subprocess)
+        return func() or ''
+
+    def get_win32():
+        return os.environ.get('PROCESSOR_IDENTIFIER', _get_machine_win32())
+
+    def get_OpenVMS():
+        try:
+            import vms_lib
+        except ImportError:
+            pass
+        else:
+            csid, cpu_number = vms_lib.getsyi('SYI$_CPU', 0)
+            return 'Alpha' if cpu_number >= 128 else 'VAX'
+
+    def from_subprocess():
+        """
+        Fall back to `uname -p`
+        """
+        try:
+            return subprocess.check_output(
+                ['uname', '-p'],
+                stderr=subprocess.DEVNULL,
+                text=True,
+            ).strip()
+        except (OSError, subprocess.CalledProcessError):
+            pass
+
+
+def _unknown_as_blank(val):
+    return '' if val == 'unknown' else val
+
+
 ### Portable uname() interface
 
-uname_result = collections.namedtuple("uname_result",
-                    "system node release version machine processor")
+class uname_result(
+    collections.namedtuple(
+        "uname_result_base",
+        "system node release version machine")
+        ):
+    """
+    A uname_result that's largely compatible with a
+    simple namedtuple except that 'platform' is
+    resolved late and cached to avoid calling "uname"
+    except when needed.
+    """
+
+    @functools.cached_property
+    def processor(self):
+        return _unknown_as_blank(_Processor.get())
+
+    def __iter__(self):
+        return itertools.chain(
+            super().__iter__(),
+            (self.processor,)
+        )
+
+    def __getitem__(self, key):
+        return tuple(iter(self))[key]
+
+    def __len__(self):
+        return len(tuple(iter(self)))
+
 
 _uname_cache = None
 
+
 def uname():
 
     """ Fairly portable uname interface. Returns a tuple
@@ -756,52 +820,30 @@
 
     """
     global _uname_cache
-    no_os_uname = 0
 
     if _uname_cache is not None:
         return _uname_cache
 
-    processor = ''
-
     # Get some infos from the builtin os.uname API...
     try:
-        system, node, release, version, machine = os.uname()
+        system, node, release, version, machine = infos = os.uname()
     except AttributeError:
-        no_os_uname = 1
+        system = sys.platform
+        node = _node()
+        release = version = machine = ''
+        infos = ()
 
-    if no_os_uname or not list(filter(None, (system, node, release, version, machine))):
-        # Hmm, no there is either no uname or uname has returned
-        #'unknowns'... we'll have to poke around the system then.
-        if no_os_uname:
-            system = sys.platform
-            release = ''
-            version = ''
-            node = _node()
-            machine = ''
-
-        use_syscmd_ver = 1
+    if not any(infos):
+        # uname is not available
 
         # Try win32_ver() on win32 platforms
         if system == 'win32':
             release, version, csd, ptype = win32_ver()
-            if release and version:
-                use_syscmd_ver = 0
-            # Try to use the PROCESSOR_* environment variables
-            # available on Win XP and later; see
-            # http://support.microsoft.com/kb/888731 and
-            # http://www.geocities.com/rick_lively/MANUALS/ENV/MSWIN/PROCESSI.HTM
-            if not machine:
-                # WOW64 processes mask the native architecture
-                if "PROCESSOR_ARCHITEW6432" in os.environ:
-                    machine = os.environ.get("PROCESSOR_ARCHITEW6432", '')
-                else:
-                    machine = os.environ.get('PROCESSOR_ARCHITECTURE', '')
-            if not processor:
-                processor = os.environ.get('PROCESSOR_IDENTIFIER', machine)
+            machine = machine or _get_machine_win32()
 
         # Try the 'ver' system command available on some
         # platforms
-        if use_syscmd_ver:
+        if not (release and version):
             system, release, version = _syscmd_ver(system)
             # Normalize system to what win32_ver() normally returns
             # (_syscmd_ver() tends to return the vendor name as well)
@@ -841,42 +883,15 @@
         if not release or release == '0':
             release = version
             version = ''
-        # Get processor information
-        try:
-            import vms_lib
-        except ImportError:
-            pass
-        else:
-            csid, cpu_number = vms_lib.getsyi('SYI$_CPU', 0)
-            if (cpu_number >= 128):
-                processor = 'Alpha'
-            else:
-                processor = 'VAX'
-    if not processor:
-        # Get processor information from the uname system command
-        processor = _syscmd_uname('-p', '')
-
-    #If any unknowns still exist, replace them with ''s, which are more portable
-    if system == 'unknown':
-        system = ''
-    if node == 'unknown':
-        node = ''
-    if release == 'unknown':
-        release = ''
-    if version == 'unknown':
-        version = ''
-    if machine == 'unknown':
-        machine = ''
-    if processor == 'unknown':
-        processor = ''
 
     #  normalize name
     if system == 'Microsoft' and release == 'Windows':
         system = 'Windows'
         release = 'Vista'
 
-    _uname_cache = uname_result(system, node, release, version,
-                                machine, processor)
+    vals = system, node, release, version, machine
+    # Replace 'unknown' values with the more portable ''
+    _uname_cache = uname_result(*map(_unknown_as_blank, vals))
     return _uname_cache
 
 ### Direct interfaces to some of the uname() return values
@@ -1202,7 +1217,7 @@
 
     elif system in ('Linux',):
         # check for libc vs. glibc
-        libcname, libcversion = libc_ver(sys.executable)
+        libcname, libcversion = libc_ver()
         platform = _platform(system, release, machine, processor,
                              'with',
                              libcname+libcversion)
diff --git a/common/py3-stdlib/plistlib.py b/common/py3-stdlib/plistlib.py
index 04f8a87..2eeebe4 100644
--- a/common/py3-stdlib/plistlib.py
+++ b/common/py3-stdlib/plistlib.py
@@ -46,14 +46,11 @@
     print(pl["aKey"])
 """
 __all__ = [
-    "readPlist", "writePlist", "readPlistFromBytes", "writePlistToBytes",
-    "Data", "InvalidFileException", "FMT_XML", "FMT_BINARY",
-    "load", "dump", "loads", "dumps", "UID"
+    "InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"
 ]
 
 import binascii
 import codecs
-import contextlib
 import datetime
 import enum
 from io import BytesIO
@@ -61,7 +58,6 @@
 import os
 import re
 import struct
-from warnings import warn
 from xml.parsers.expat import ParserCreate
 
 
@@ -69,112 +65,6 @@
 globals().update(PlistFormat.__members__)
 
 
-#
-#
-# Deprecated functionality
-#
-#
-
-
-@contextlib.contextmanager
-def _maybe_open(pathOrFile, mode):
-    if isinstance(pathOrFile, str):
-        with open(pathOrFile, mode) as fp:
-            yield fp
-
-    else:
-        yield pathOrFile
-
-
-def readPlist(pathOrFile):
-    """
-    Read a .plist from a path or file. pathOrFile should either
-    be a file name, or a readable binary file object.
-
-    This function is deprecated, use load instead.
-    """
-    warn("The readPlist function is deprecated, use load() instead",
-        DeprecationWarning, 2)
-
-    with _maybe_open(pathOrFile, 'rb') as fp:
-        return load(fp, fmt=None, use_builtin_types=False)
-
-def writePlist(value, pathOrFile):
-    """
-    Write 'value' to a .plist file. 'pathOrFile' may either be a
-    file name or a (writable) file object.
-
-    This function is deprecated, use dump instead.
-    """
-    warn("The writePlist function is deprecated, use dump() instead",
-        DeprecationWarning, 2)
-    with _maybe_open(pathOrFile, 'wb') as fp:
-        dump(value, fp, fmt=FMT_XML, sort_keys=True, skipkeys=False)
-
-
-def readPlistFromBytes(data):
-    """
-    Read a plist data from a bytes object. Return the root object.
-
-    This function is deprecated, use loads instead.
-    """
-    warn("The readPlistFromBytes function is deprecated, use loads() instead",
-        DeprecationWarning, 2)
-    return load(BytesIO(data), fmt=None, use_builtin_types=False)
-
-
-def writePlistToBytes(value):
-    """
-    Return 'value' as a plist-formatted bytes object.
-
-    This function is deprecated, use dumps instead.
-    """
-    warn("The writePlistToBytes function is deprecated, use dumps() instead",
-        DeprecationWarning, 2)
-    f = BytesIO()
-    dump(value, f, fmt=FMT_XML, sort_keys=True, skipkeys=False)
-    return f.getvalue()
-
-
-class Data:
-    """
-    Wrapper for binary data.
-
-    This class is deprecated, use a bytes object instead.
-    """
-
-    def __init__(self, data):
-        if not isinstance(data, bytes):
-            raise TypeError("data must be as bytes")
-        self.data = data
-
-    @classmethod
-    def fromBase64(cls, data):
-        # base64.decodebytes just calls binascii.a2b_base64;
-        # it seems overkill to use both base64 and binascii.
-        return cls(_decode_base64(data))
-
-    def asBase64(self, maxlinelength=76):
-        return _encode_base64(self.data, maxlinelength)
-
-    def __eq__(self, other):
-        if isinstance(other, self.__class__):
-            return self.data == other.data
-        elif isinstance(other, bytes):
-            return self.data == other
-        else:
-            return NotImplemented
-
-    def __repr__(self):
-        return "%s(%s)" % (self.__class__.__name__, repr(self.data))
-
-#
-#
-# End of deprecated functionality
-#
-#
-
-
 class UID:
     def __init__(self, data):
         if not isinstance(data, int):
@@ -202,7 +92,6 @@
     def __hash__(self):
         return hash(self.data)
 
-
 #
 # XML support
 #
@@ -273,11 +162,10 @@
     return text
 
 class _PlistParser:
-    def __init__(self, use_builtin_types, dict_type):
+    def __init__(self, dict_type):
         self.stack = []
         self.current_key = None
         self.root = None
-        self._use_builtin_types = use_builtin_types
         self._dict_type = dict_type
 
     def parse(self, fileobj):
@@ -285,9 +173,16 @@
         self.parser.StartElementHandler = self.handle_begin_element
         self.parser.EndElementHandler = self.handle_end_element
         self.parser.CharacterDataHandler = self.handle_data
+        self.parser.EntityDeclHandler = self.handle_entity_decl
         self.parser.ParseFile(fileobj)
         return self.root
 
+    def handle_entity_decl(self, entity_name, is_parameter_entity, value, base, system_id, public_id, notation_name):
+        # Reject plist files with entity declarations to avoid XML vulnerabilies in expat.
+        # Regular plist files don't contain those declerations, and Apple's plutil tool does not
+        # accept them either.
+        raise InvalidFileException("XML entity declarations are not supported in plist files")
+
     def handle_begin_element(self, element, attrs):
         self.data = []
         handler = getattr(self, "begin_" + element, None)
@@ -357,7 +252,11 @@
         self.add_object(False)
 
     def end_integer(self):
-        self.add_object(int(self.get_data()))
+        raw = self.get_data()
+        if raw.startswith('0x') or raw.startswith('0X'):
+            self.add_object(int(raw, 16))
+        else:
+            self.add_object(int(raw))
 
     def end_real(self):
         self.add_object(float(self.get_data()))
@@ -366,11 +265,7 @@
         self.add_object(self.get_data())
 
     def end_data(self):
-        if self._use_builtin_types:
-            self.add_object(_decode_base64(self.get_data()))
-
-        else:
-            self.add_object(Data.fromBase64(self.get_data()))
+        self.add_object(_decode_base64(self.get_data()))
 
     def end_date(self):
         self.add_object(_date_from_string(self.get_data()))
@@ -452,9 +347,6 @@
         elif isinstance(value, dict):
             self.write_dict(value)
 
-        elif isinstance(value, Data):
-            self.write_data(value)
-
         elif isinstance(value, (bytes, bytearray)):
             self.write_bytes(value)
 
@@ -467,9 +359,6 @@
         else:
             raise TypeError("unsupported type: %s" % type(value))
 
-    def write_data(self, data):
-        self.write_bytes(data.data)
-
     def write_bytes(self, data):
         self.begin_element("data")
         self._indent_level -= 1
@@ -563,8 +452,7 @@
 
     see also: http://opensource.apple.com/source/CF/CF-744.18/CFBinaryPList.c
     """
-    def __init__(self, use_builtin_types, dict_type):
-        self._use_builtin_types = use_builtin_types
+    def __init__(self, dict_type):
         self._dict_type = dict_type
 
     def parse(self, fp):
@@ -589,7 +477,7 @@
             return self._read_object(top_object)
 
         except (OSError, IndexError, struct.error, OverflowError,
-                UnicodeDecodeError):
+                ValueError):
             raise InvalidFileException()
 
     def _get_size(self, tokenL):
@@ -605,7 +493,7 @@
     def _read_ints(self, n, size):
         data = self._fp.read(size * n)
         if size in _BINARY_FORMAT:
-            return struct.unpack('>' + _BINARY_FORMAT[size] * n, data)
+            return struct.unpack(f'>{n}{_BINARY_FORMAT[size]}', data)
         else:
             if not size or len(data) != size * n:
                 raise InvalidFileException()
@@ -664,18 +552,23 @@
 
         elif tokenH == 0x40:  # data
             s = self._get_size(tokenL)
-            if self._use_builtin_types:
-                result = self._fp.read(s)
-            else:
-                result = Data(self._fp.read(s))
+            result = self._fp.read(s)
+            if len(result) != s:
+                raise InvalidFileException()
 
         elif tokenH == 0x50:  # ascii string
             s = self._get_size(tokenL)
-            result =  self._fp.read(s).decode('ascii')
+            data = self._fp.read(s)
+            if len(data) != s:
+                raise InvalidFileException()
+            result = data.decode('ascii')
 
         elif tokenH == 0x60:  # unicode string
-            s = self._get_size(tokenL)
-            result = self._fp.read(s * 2).decode('utf-16be')
+            s = self._get_size(tokenL) * 2
+            data = self._fp.read(s)
+            if len(data) != s:
+                raise InvalidFileException()
+            result = data.decode('utf-16be')
 
         elif tokenH == 0x80:  # UID
             # used by Key-Archiver plist files
@@ -700,9 +593,11 @@
             obj_refs = self._read_refs(s)
             result = self._dict_type()
             self._objects[ref] = result
-            for k, o in zip(key_refs, obj_refs):
-                result[self._read_object(k)] = self._read_object(o)
-
+            try:
+                for k, o in zip(key_refs, obj_refs):
+                    result[self._read_object(k)] = self._read_object(o)
+            except TypeError:
+                raise InvalidFileException()
         else:
             raise InvalidFileException()
 
@@ -716,7 +611,7 @@
     elif count < 1 << 16:
         return 2
 
-    elif count << 1 << 32:
+    elif count < 1 << 32:
         return 4
 
     else:
@@ -783,10 +678,6 @@
             if (type(value), value) in self._objtable:
                 return
 
-        elif isinstance(value, Data):
-            if (type(value.data), value.data) in self._objtable:
-                return
-
         elif id(value) in self._objidtable:
             return
 
@@ -795,8 +686,6 @@
         self._objlist.append(value)
         if isinstance(value, _scalars):
             self._objtable[(type(value), value)] = refnum
-        elif isinstance(value, Data):
-            self._objtable[(type(value.data), value.data)] = refnum
         else:
             self._objidtable[id(value)] = refnum
 
@@ -826,8 +715,6 @@
     def _getrefnum(self, value):
         if isinstance(value, _scalars):
             return self._objtable[(type(value), value)]
-        elif isinstance(value, Data):
-            return self._objtable[(type(value.data), value.data)]
         else:
             return self._objidtable[id(value)]
 
@@ -885,10 +772,6 @@
             f = (value - datetime.datetime(2001, 1, 1)).total_seconds()
             self._fp.write(struct.pack('>Bd', 0x33, f))
 
-        elif isinstance(value, Data):
-            self._write_size(0x40, len(value.data))
-            self._fp.write(value.data)
-
         elif isinstance(value, (bytes, bytearray)):
             self._write_size(0x40, len(value))
             self._fp.write(value)
@@ -970,7 +853,7 @@
 }
 
 
-def load(fp, *, fmt=None, use_builtin_types=True, dict_type=dict):
+def load(fp, *, fmt=None, dict_type=dict):
     """Read a .plist file. 'fp' should be a readable and binary file object.
     Return the unpacked root object (which usually is a dictionary).
     """
@@ -988,17 +871,16 @@
     else:
         P = _FORMATS[fmt]['parser']
 
-    p = P(use_builtin_types=use_builtin_types, dict_type=dict_type)
+    p = P(dict_type=dict_type)
     return p.parse(fp)
 
 
-def loads(value, *, fmt=None, use_builtin_types=True, dict_type=dict):
+def loads(value, *, fmt=None, dict_type=dict):
     """Read a .plist file from a bytes object.
     Return the unpacked root object (which usually is a dictionary).
     """
     fp = BytesIO(value)
-    return load(
-        fp, fmt=fmt, use_builtin_types=use_builtin_types, dict_type=dict_type)
+    return load(fp, fmt=fmt, dict_type=dict_type)
 
 
 def dump(value, fp, *, fmt=FMT_XML, sort_keys=True, skipkeys=False):
diff --git a/common/py3-stdlib/poplib.py b/common/py3-stdlib/poplib.py
index e3bd2ab..0f85873 100644
--- a/common/py3-stdlib/poplib.py
+++ b/common/py3-stdlib/poplib.py
@@ -107,6 +107,8 @@
         self.welcome = self._getresp()
 
     def _create_socket(self, timeout):
+        if timeout is not None and not timeout:
+            raise ValueError('Non-blocking socket (timeout=0) is not supported')
         return socket.create_connection((self.host, self.port), timeout)
 
     def _putline(self, line):
@@ -385,7 +387,7 @@
             for capline in rawcaps:
                 capnm, capargs = _parsecap(capline)
                 caps[capnm] = capargs
-        except error_proto as _err:
+        except error_proto:
             raise error_proto('-ERR CAPA not supported by server')
         return caps
 
diff --git a/common/py3-stdlib/pprint.py b/common/py3-stdlib/pprint.py
index 4bfcc31..7c1118a 100644
--- a/common/py3-stdlib/pprint.py
+++ b/common/py3-stdlib/pprint.py
@@ -342,6 +342,33 @@
 
     _dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy
 
+    def _pprint_simplenamespace(self, object, stream, indent, allowance, context, level):
+        if type(object) is _types.SimpleNamespace:
+            # The SimpleNamespace repr is "namespace" instead of the class
+            # name, so we do the same here. For subclasses; use the class name.
+            cls_name = 'namespace'
+        else:
+            cls_name = object.__class__.__name__
+        indent += len(cls_name) + 1
+        delimnl = ',\n' + ' ' * indent
+        items = object.__dict__.items()
+        last_index = len(items) - 1
+
+        stream.write(cls_name + '(')
+        for i, (key, ent) in enumerate(items):
+            stream.write(key)
+            stream.write('=')
+
+            last = i == last_index
+            self._format(ent, stream, indent + len(key) + 1,
+                         allowance if last else 1,
+                         context, level)
+            if not last:
+                stream.write(delimnl)
+        stream.write(')')
+
+    _dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace
+
     def _format_dict_items(self, items, stream, indent, allowance, context,
                            level):
         write = stream.write
diff --git a/common/py3-stdlib/profile.py b/common/py3-stdlib/profile.py
index 1346297..5cb017e 100755
--- a/common/py3-stdlib/profile.py
+++ b/common/py3-stdlib/profile.py
@@ -425,29 +425,13 @@
         return self
 
     # This method is more useful to profile a single function call.
-    def runcall(*args, **kw):
-        if len(args) >= 2:
-            self, func, *args = args
-        elif not args:
-            raise TypeError("descriptor 'runcall' of 'Profile' object "
-                            "needs an argument")
-        elif 'func' in kw:
-            func = kw.pop('func')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'func' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('runcall expected at least 1 positional argument, '
-                            'got %d' % (len(args)-1))
-
+    def runcall(self, func, /, *args, **kw):
         self.set_cmd(repr(func))
         sys.setprofile(self.dispatcher)
         try:
             return func(*args, **kw)
         finally:
             sys.setprofile(None)
-    runcall.__text_signature__ = '($self, func, /, *args, **kw)'
 
 
     #******************************************************************
@@ -587,6 +571,11 @@
     (options, args) = parser.parse_args()
     sys.argv[:] = args
 
+    # The script that we're profiling may chdir, so capture the absolute path
+    # to the output file at startup.
+    if options.outfile is not None:
+        options.outfile = os.path.abspath(options.outfile)
+
     if len(args) > 0:
         if options.module:
             import runpy
diff --git a/common/py3-stdlib/pstats.py b/common/py3-stdlib/pstats.py
index 4b419a8..0f93ae0 100644
--- a/common/py3-stdlib/pstats.py
+++ b/common/py3-stdlib/pstats.py
@@ -25,11 +25,13 @@
 import time
 import marshal
 import re
+
 from enum import Enum
 from functools import cmp_to_key
+from dataclasses import dataclass
+from typing import Dict
 
-__all__ = ["Stats", "SortKey"]
-
+__all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"]
 
 class SortKey(str, Enum):
     CALLS = 'calls', 'ncalls'
@@ -43,15 +45,31 @@
     TIME = 'time', 'tottime'
 
     def __new__(cls, *values):
-        obj = str.__new__(cls)
-
-        obj._value_ = values[0]
+        value = values[0]
+        obj = str.__new__(cls, value)
+        obj._value_ = value
         for other_value in values[1:]:
             cls._value2member_map_[other_value] = obj
         obj._all_values = values
         return obj
 
 
+@dataclass(unsafe_hash=True)
+class FunctionProfile:
+    ncalls: int
+    tottime: float
+    percall_tottime: float
+    cumtime: float
+    percall_cumtime: float
+    file_name: str
+    line_number: int
+
+@dataclass(unsafe_hash=True)
+class StatsProfile:
+    '''Class for keeping track of an item in inventory.'''
+    total_tt: float
+    func_profiles: Dict[str, FunctionProfile]
+
 class Stats:
     """This class is used for creating reports from data generated by the
     Profile class.  It is a "friend" of that class, and imports data either
@@ -333,6 +351,41 @@
 
         return new_list, msg
 
+    def get_stats_profile(self):
+        """This method returns an instance of StatsProfile, which contains a mapping
+        of function names to instances of FunctionProfile. Each FunctionProfile
+        instance holds information related to the function's profile such as how
+        long the function took to run, how many times it was called, etc...
+        """
+        func_list = self.fcn_list[:] if self.fcn_list else list(self.stats.keys())
+        if not func_list:
+            return StatsProfile(0, {})
+
+        total_tt = float(f8(self.total_tt))
+        func_profiles = {}
+        stats_profile = StatsProfile(total_tt, func_profiles)
+
+        for func in func_list:
+            cc, nc, tt, ct, callers = self.stats[func]
+            file_name, line_number, func_name = func
+            ncalls = str(nc) if nc == cc else (str(nc) + '/' + str(cc))
+            tottime = float(f8(tt))
+            percall_tottime = -1 if nc == 0 else float(f8(tt/nc))
+            cumtime = float(f8(ct))
+            percall_cumtime = -1 if cc == 0 else float(f8(ct/cc))
+            func_profile = FunctionProfile(
+                ncalls,
+                tottime, # time spent in this function alone
+                percall_tottime,
+                cumtime, # time spent in the function plus all functions that this function called,
+                percall_cumtime,
+                file_name,
+                line_number
+            )
+            func_profiles[func_name] = func_profile
+
+        return stats_profile
+
     def get_print_list(self, sel_list):
         width = self.max_name_len
         if self.fcn_list:
diff --git a/common/py3-stdlib/py_compile.py b/common/py3-stdlib/py_compile.py
index 2173689..a81f493 100644
--- a/common/py3-stdlib/py_compile.py
+++ b/common/py3-stdlib/py_compile.py
@@ -197,12 +197,10 @@
                 compile(filename, doraise=True)
             except PyCompileError as error:
                 rv = 1
-                if quiet < 2:
-                    sys.stderr.write("%s\n" % error.msg)
+                sys.stderr.write("%s\n" % error.msg)
             except OSError as error:
                 rv = 1
-                if quiet < 2:
-                    sys.stderr.write("%s\n" % error)
+                sys.stderr.write("%s\n" % error)
     else:
         for filename in args:
             try:
@@ -210,8 +208,7 @@
             except PyCompileError as error:
                 # return value to indicate at least one failure
                 rv = 1
-                if quiet < 2:
-                    sys.stderr.write("%s\n" % error.msg)
+                sys.stderr.write("%s\n" % error.msg)
     return rv
 
 if __name__ == "__main__":
diff --git a/common/py3-stdlib/pydoc.py b/common/py3-stdlib/pydoc.py
old mode 100644
new mode 100755
index dc3377d..35ef3eb
--- a/common/py3-stdlib/pydoc.py
+++ b/common/py3-stdlib/pydoc.py
@@ -90,9 +90,101 @@
             normdirs.append(normdir)
     return dirs
 
+def _findclass(func):
+    cls = sys.modules.get(func.__module__)
+    if cls is None:
+        return None
+    for name in func.__qualname__.split('.')[:-1]:
+        cls = getattr(cls, name)
+    if not inspect.isclass(cls):
+        return None
+    return cls
+
+def _finddoc(obj):
+    if inspect.ismethod(obj):
+        name = obj.__func__.__name__
+        self = obj.__self__
+        if (inspect.isclass(self) and
+            getattr(getattr(self, name, None), '__func__') is obj.__func__):
+            # classmethod
+            cls = self
+        else:
+            cls = self.__class__
+    elif inspect.isfunction(obj):
+        name = obj.__name__
+        cls = _findclass(obj)
+        if cls is None or getattr(cls, name) is not obj:
+            return None
+    elif inspect.isbuiltin(obj):
+        name = obj.__name__
+        self = obj.__self__
+        if (inspect.isclass(self) and
+            self.__qualname__ + '.' + name == obj.__qualname__):
+            # classmethod
+            cls = self
+        else:
+            cls = self.__class__
+    # Should be tested before isdatadescriptor().
+    elif isinstance(obj, property):
+        func = obj.fget
+        name = func.__name__
+        cls = _findclass(func)
+        if cls is None or getattr(cls, name) is not obj:
+            return None
+    elif inspect.ismethoddescriptor(obj) or inspect.isdatadescriptor(obj):
+        name = obj.__name__
+        cls = obj.__objclass__
+        if getattr(cls, name) is not obj:
+            return None
+        if inspect.ismemberdescriptor(obj):
+            slots = getattr(cls, '__slots__', None)
+            if isinstance(slots, dict) and name in slots:
+                return slots[name]
+    else:
+        return None
+    for base in cls.__mro__:
+        try:
+            doc = _getowndoc(getattr(base, name))
+        except AttributeError:
+            continue
+        if doc is not None:
+            return doc
+    return None
+
+def _getowndoc(obj):
+    """Get the documentation string for an object if it is not
+    inherited from its class."""
+    try:
+        doc = object.__getattribute__(obj, '__doc__')
+        if doc is None:
+            return None
+        if obj is not type:
+            typedoc = type(obj).__doc__
+            if isinstance(typedoc, str) and typedoc == doc:
+                return None
+        return doc
+    except AttributeError:
+        return None
+
+def _getdoc(object):
+    """Get the documentation string for an object.
+
+    All tabs are expanded to spaces.  To clean up docstrings that are
+    indented to line up with blocks of code, any whitespace than can be
+    uniformly removed from the second line onwards is removed."""
+    doc = _getowndoc(object)
+    if doc is None:
+        try:
+            doc = _finddoc(object)
+        except (AttributeError, TypeError):
+            return None
+    if not isinstance(doc, str):
+        return None
+    return inspect.cleandoc(doc)
+
 def getdoc(object):
     """Get the doc string or comments for an object."""
-    result = inspect.getdoc(object) or inspect.getcomments(object)
+    result = _getdoc(object) or inspect.getcomments(object)
     return result and re.sub('^ *\n', '', result.rstrip()) or ''
 
 def splitdoc(doc):
@@ -584,7 +676,7 @@
         escape = escape or self.escape
         results = []
         here = 0
-        pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
+        pattern = re.compile(r'\b((http|https|ftp)://\S+[\w/]|'
                                 r'RFC[- ]?(\d+)|'
                                 r'PEP[- ]?(\d+)|'
                                 r'(self\.)?(\w+))')
@@ -825,11 +917,8 @@
                 push(msg)
                 for name, kind, homecls, value in ok:
                     base = self.docother(getattr(object, name), name, mod)
-                    if callable(value) or inspect.isdatadescriptor(value):
-                        doc = getattr(value, "__doc__", None)
-                    else:
-                        doc = None
-                    if doc is None:
+                    doc = getdoc(value)
+                    if not doc:
                         push('<dl><dt>%s</dl>\n' % base)
                     else:
                         doc = self.markup(getdoc(value), self.preformat,
@@ -1309,10 +1398,7 @@
                 hr.maybe()
                 push(msg)
                 for name, kind, homecls, value in ok:
-                    if callable(value) or inspect.isdatadescriptor(value):
-                        doc = getdoc(value)
-                    else:
-                        doc = None
+                    doc = getdoc(value)
                     try:
                         obj = getattr(object, name)
                     except AttributeError:
@@ -1448,8 +1534,10 @@
             chop = maxlen - len(line)
             if chop < 0: repr = repr[:chop] + '...'
         line = (name and self.bold(name) + ' = ' or '') + repr
-        if doc is not None:
-            line += '\n' + self.indent(str(doc))
+        if not doc:
+            doc = getdoc(object)
+        if doc:
+            line += '\n' + self.indent(str(doc)) + '\n'
         return line
 
 class _PlainTextDoc(TextDoc):
@@ -1672,11 +1760,15 @@
     if not (inspect.ismodule(object) or
               inspect.isclass(object) or
               inspect.isroutine(object) or
-              inspect.isdatadescriptor(object)):
+              inspect.isdatadescriptor(object) or
+              _getdoc(object)):
         # If the passed object is a piece of data or an instance,
         # document its available methods instead of its value.
-        object = type(object)
-        desc += ' object'
+        if hasattr(object, '__origin__'):
+            object = object.__origin__
+        else:
+            object = type(object)
+            desc += ' object'
     return title % desc + '\n\n' + renderer.document(object, name)
 
 def doc(thing, title='Python Library Documentation: %s', forceload=0,
@@ -1725,6 +1817,7 @@
         'False': '',
         'None': '',
         'True': '',
+        '__peg_parser__': '',
         'and': 'BOOLEAN',
         'as': 'with',
         'assert': ('assert', ''),
diff --git a/common/py3-stdlib/pydoc_data/topics.py b/common/py3-stdlib/pydoc_data/topics.py
index 6834657..d8dd8c5 100644
--- a/common/py3-stdlib/pydoc_data/topics.py
+++ b/common/py3-stdlib/pydoc_data/topics.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Mon Jul 20 14:14:54 2020
+# Autogenerated by Sphinx on Mon Dec  7 15:00:07 2020
 topics = {'assert': 'The "assert" statement\n'
            '**********************\n'
            '\n'
@@ -99,27 +99,26 @@
                'assigned,\n'
                '  from left to right, to the corresponding targets.\n'
                '\n'
-               '  * If the target list contains one target prefixed with an\n'
-               '    asterisk, called a “starred” target: The object must be '
-               'an\n'
-               '    iterable with at least as many items as there are targets '
-               'in the\n'
-               '    target list, minus one.  The first items of the iterable '
-               'are\n'
-               '    assigned, from left to right, to the targets before the '
+               '  * If the target list contains one target prefixed with an '
+               'asterisk,\n'
+               '    called a “starred” target: The object must be an iterable '
+               'with at\n'
+               '    least as many items as there are targets in the target '
+               'list, minus\n'
+               '    one.  The first items of the iterable are assigned, from '
+               'left to\n'
+               '    right, to the targets before the starred target.  The '
+               'final items\n'
+               '    of the iterable are assigned to the targets after the '
                'starred\n'
-               '    target.  The final items of the iterable are assigned to '
-               'the\n'
-               '    targets after the starred target.  A list of the remaining '
-               'items\n'
-               '    in the iterable is then assigned to the starred target '
-               '(the list\n'
-               '    can be empty).\n'
+               '    target.  A list of the remaining items in the iterable is '
+               'then\n'
+               '    assigned to the starred target (the list can be empty).\n'
                '\n'
                '  * Else: The object must be an iterable with the same number '
-               'of\n'
-               '    items as there are targets in the target list, and the '
-               'items are\n'
+               'of items\n'
+               '    as there are targets in the target list, and the items '
+               'are\n'
                '    assigned, from left to right, to the corresponding '
                'targets.\n'
                '\n'
@@ -135,10 +134,10 @@
                'in the\n'
                '    current local namespace.\n'
                '\n'
-               '  * Otherwise: the name is bound to the object in the global\n'
-               '    namespace or the outer namespace determined by '
-               '"nonlocal",\n'
-               '    respectively.\n'
+               '  * Otherwise: the name is bound to the object in the global '
+               'namespace\n'
+               '    or the outer namespace determined by "nonlocal", '
+               'respectively.\n'
                '\n'
                '  The name is rebound if it was already bound.  This may cause '
                'the\n'
@@ -225,26 +224,27 @@
                'called with\n'
                '  appropriate arguments.\n'
                '\n'
-               '* If the target is a slicing: The primary expression in the\n'
-               '  reference is evaluated.  It should yield a mutable sequence '
-               'object\n'
-               '  (such as a list).  The assigned object should be a sequence '
-               'object\n'
-               '  of the same type.  Next, the lower and upper bound '
-               'expressions are\n'
-               '  evaluated, insofar they are present; defaults are zero and '
-               'the\n'
-               '  sequence’s length.  The bounds should evaluate to integers. '
-               'If\n'
-               '  either bound is negative, the sequence’s length is added to '
-               'it.  The\n'
-               '  resulting bounds are clipped to lie between zero and the '
+               '* If the target is a slicing: The primary expression in the '
+               'reference\n'
+               '  is evaluated.  It should yield a mutable sequence object '
+               '(such as a\n'
+               '  list).  The assigned object should be a sequence object of '
+               'the same\n'
+               '  type.  Next, the lower and upper bound expressions are '
+               'evaluated,\n'
+               '  insofar they are present; defaults are zero and the '
                'sequence’s\n'
-               '  length, inclusive.  Finally, the sequence object is asked to '
-               'replace\n'
-               '  the slice with the items of the assigned sequence.  The '
-               'length of\n'
-               '  the slice may be different from the length of the assigned '
+               '  length.  The bounds should evaluate to integers. If either '
+               'bound is\n'
+               '  negative, the sequence’s length is added to it.  The '
+               'resulting\n'
+               '  bounds are clipped to lie between zero and the sequence’s '
+               'length,\n'
+               '  inclusive.  Finally, the sequence object is asked to replace '
+               'the\n'
+               '  slice with the items of the assigned sequence.  The length '
+               'of the\n'
+               '  slice may be different from the length of the assigned '
                'sequence,\n'
                '  thus changing the length of the target sequence, if the '
                'target\n'
@@ -514,8 +514,8 @@
           'is semantically equivalent to:\n'
           '\n'
           '   manager = (EXPRESSION)\n'
-          '   aexit = type(manager).__aexit__\n'
           '   aenter = type(manager).__aenter__\n'
+          '   aexit = type(manager).__aexit__\n'
           '   value = await aenter(manager)\n'
           '   hit_except = False\n'
           '\n'
@@ -544,13 +544,17 @@
           '\n'
           '-[ Footnotes ]-\n'
           '\n'
-          '[1] The exception is propagated to the invocation stack unless\n'
-          '    there is a "finally" clause which happens to raise another\n'
-          '    exception. That new exception causes the old one to be lost.\n'
+          '[1] The exception is propagated to the invocation stack unless '
+          'there\n'
+          '    is a "finally" clause which happens to raise another '
+          'exception.\n'
+          '    That new exception causes the old one to be lost.\n'
           '\n'
-          '[2] A string literal appearing as the first statement in the\n'
-          '    function body is transformed into the function’s "__doc__"\n'
-          '    attribute and therefore the function’s *docstring*.\n'
+          '[2] A string literal appearing as the first statement in the '
+          'function\n'
+          '    body is transformed into the function’s "__doc__" attribute '
+          'and\n'
+          '    therefore the function’s *docstring*.\n'
           '\n'
           '[3] A string literal appearing as the first statement in the class\n'
           '    body is transformed into the namespace’s "__doc__" item and\n'
@@ -688,11 +692,18 @@
                      'needs, for\n'
                      '   example, "object.__getattribute__(self, name)".\n'
                      '\n'
-                     '   Note: This method may still be bypassed when looking '
-                     'up special\n'
-                     '     methods as the result of implicit invocation via '
-                     'language syntax\n'
-                     '     or built-in functions. See Special method lookup.\n'
+                     '   Note:\n'
+                     '\n'
+                     '     This method may still be bypassed when looking up '
+                     'special methods\n'
+                     '     as the result of implicit invocation via language '
+                     'syntax or\n'
+                     '     built-in functions. See Special method lookup.\n'
+                     '\n'
+                     '   For certain sensitive attribute accesses, raises an '
+                     'auditing event\n'
+                     '   "object.__getattr__" with arguments "obj" and '
+                     '"name".\n'
                      '\n'
                      'object.__setattr__(self, name, value)\n'
                      '\n'
@@ -710,6 +721,11 @@
                      'for example,\n'
                      '   "object.__setattr__(self, name, value)".\n'
                      '\n'
+                     '   For certain sensitive attribute assignments, raises '
+                     'an auditing\n'
+                     '   event "object.__setattr__" with arguments "obj", '
+                     '"name", "value".\n'
+                     '\n'
                      'object.__delattr__(self, name)\n'
                      '\n'
                      '   Like "__setattr__()" but for attribute deletion '
@@ -718,6 +734,11 @@
                      'obj.name" is\n'
                      '   meaningful for the object.\n'
                      '\n'
+                     '   For certain sensitive attribute deletions, raises an '
+                     'auditing event\n'
+                     '   "object.__delattr__" with arguments "obj" and '
+                     '"name".\n'
+                     '\n'
                      'object.__dir__(self)\n'
                      '\n'
                      '   Called when "dir()" is called on the object. A '
@@ -776,15 +797,16 @@
                      '\n'
                      '   sys.modules[__name__].__class__ = VerboseModule\n'
                      '\n'
-                     'Note: Defining module "__getattr__" and setting module '
-                     '"__class__"\n'
-                     '  only affect lookups made using the attribute access '
-                     'syntax –\n'
-                     '  directly accessing the module globals (whether by code '
-                     'within the\n'
-                     '  module, or via a reference to the module’s globals '
-                     'dictionary) is\n'
-                     '  unaffected.\n'
+                     'Note:\n'
+                     '\n'
+                     '  Defining module "__getattr__" and setting module '
+                     '"__class__" only\n'
+                     '  affect lookups made using the attribute access syntax '
+                     '– directly\n'
+                     '  accessing the module globals (whether by code within '
+                     'the module, or\n'
+                     '  via a reference to the module’s globals dictionary) is '
+                     'unaffected.\n'
                      '\n'
                      'Changed in version 3.5: "__class__" module attribute is '
                      'now writable.\n'
@@ -867,12 +889,14 @@
                      'created. The\n'
                      '   descriptor has been assigned to *name*.\n'
                      '\n'
-                     '   Note: "__set_name__()" is only called implicitly as '
-                     'part of the\n'
-                     '     "type" constructor, so it will need to be called '
-                     'explicitly with\n'
-                     '     the appropriate parameters when a descriptor is '
-                     'added to a class\n'
+                     '   Note:\n'
+                     '\n'
+                     '     "__set_name__()" is only called implicitly as part '
+                     'of the "type"\n'
+                     '     constructor, so it will need to be called '
+                     'explicitly with the\n'
+                     '     appropriate parameters when a descriptor is added '
+                     'to a class\n'
                      '     after initial creation:\n'
                      '\n'
                      '        class A:\n'
@@ -979,12 +1003,13 @@
                      'define both\n'
                      '"__get__()" and "__set__()", while non-data descriptors '
                      'have just the\n'
-                     '"__get__()" method.  Data descriptors with "__set__()" '
-                     'and "__get__()"\n'
-                     'defined always override a redefinition in an instance '
-                     'dictionary.  In\n'
-                     'contrast, non-data descriptors can be overridden by '
-                     'instances.\n'
+                     '"__get__()" method.  Data descriptors with "__get__()" '
+                     'and "__set__()"\n'
+                     '(and/or "__delete__()") defined always override a '
+                     'redefinition in an\n'
+                     'instance dictionary.  In contrast, non-data descriptors '
+                     'can be\n'
+                     'overridden by instances.\n'
                      '\n'
                      'Python methods (including "staticmethod()" and '
                      '"classmethod()") are\n'
@@ -1032,10 +1057,9 @@
                      '--------------------------\n'
                      '\n'
                      '* When inheriting from a class without *__slots__*, the '
-                     '*__dict__*\n'
-                     '  and *__weakref__* attribute of the instances will '
-                     'always be\n'
-                     '  accessible.\n'
+                     '*__dict__* and\n'
+                     '  *__weakref__* attribute of the instances will always '
+                     'be accessible.\n'
                      '\n'
                      '* Without a *__dict__* variable, instances cannot be '
                      'assigned new\n'
@@ -1050,14 +1074,12 @@
                      '  declaration.\n'
                      '\n'
                      '* Without a *__weakref__* variable for each instance, '
-                     'classes\n'
-                     '  defining *__slots__* do not support weak references to '
-                     'its\n'
-                     '  instances. If weak reference support is needed, then '
-                     'add\n'
-                     '  "\'__weakref__\'" to the sequence of strings in the '
-                     '*__slots__*\n'
-                     '  declaration.\n'
+                     'classes defining\n'
+                     '  *__slots__* do not support weak references to its '
+                     'instances. If weak\n'
+                     '  reference support is needed, then add '
+                     '"\'__weakref__\'" to the\n'
+                     '  sequence of strings in the *__slots__* declaration.\n'
                      '\n'
                      '* *__slots__* are implemented at the class level by '
                      'creating\n'
@@ -1070,24 +1092,23 @@
                      '  attribute would overwrite the descriptor assignment.\n'
                      '\n'
                      '* The action of a *__slots__* declaration is not limited '
-                     'to the\n'
-                     '  class where it is defined.  *__slots__* declared in '
-                     'parents are\n'
-                     '  available in child classes. However, child subclasses '
-                     'will get a\n'
-                     '  *__dict__* and *__weakref__* unless they also define '
-                     '*__slots__*\n'
-                     '  (which should only contain names of any *additional* '
-                     'slots).\n'
+                     'to the class\n'
+                     '  where it is defined.  *__slots__* declared in parents '
+                     'are available\n'
+                     '  in child classes. However, child subclasses will get a '
+                     '*__dict__*\n'
+                     '  and *__weakref__* unless they also define *__slots__* '
+                     '(which should\n'
+                     '  only contain names of any *additional* slots).\n'
                      '\n'
                      '* If a class defines a slot also defined in a base '
-                     'class, the\n'
-                     '  instance variable defined by the base class slot is '
-                     'inaccessible\n'
-                     '  (except by retrieving its descriptor directly from the '
-                     'base class).\n'
-                     '  This renders the meaning of the program undefined.  In '
-                     'the future, a\n'
+                     'class, the instance\n'
+                     '  variable defined by the base class slot is '
+                     'inaccessible (except by\n'
+                     '  retrieving its descriptor directly from the base '
+                     'class). This\n'
+                     '  renders the meaning of the program undefined.  In the '
+                     'future, a\n'
                      '  check may be added to prevent this.\n'
                      '\n'
                      '* Nonempty *__slots__* does not work for classes derived '
@@ -1096,9 +1117,9 @@
                      '"bytes" and "tuple".\n'
                      '\n'
                      '* Any non-string iterable may be assigned to '
-                     '*__slots__*. Mappings\n'
-                     '  may also be used; however, in the future, special '
-                     'meaning may be\n'
+                     '*__slots__*. Mappings may\n'
+                     '  also be used; however, in the future, special meaning '
+                     'may be\n'
                      '  assigned to the values corresponding to each key.\n'
                      '\n'
                      '* *__class__* assignment works only if both classes have '
@@ -1114,9 +1135,9 @@
                      '  raise "TypeError".\n'
                      '\n'
                      '* If an iterator is used for *__slots__* then a '
-                     'descriptor is\n'
-                     '  created for each of the iterator’s values. However, '
-                     'the *__slots__*\n'
+                     'descriptor is created\n'
+                     '  for each of the iterator’s values. However, the '
+                     '*__slots__*\n'
                      '  attribute will be an empty iterator.\n',
  'attribute-references': 'Attribute references\n'
                          '********************\n'
@@ -1458,8 +1479,8 @@
                    '\n'
                    '   Called when the instance is “called” as a function; if '
                    'this method\n'
-                   '   is defined, "x(arg1, arg2, ...)" is a shorthand for\n'
-                   '   "x.__call__(arg1, arg2, ...)".\n',
+                   '   is defined, "x(arg1, arg2, ...)" roughly translates to\n'
+                   '   "type(x).__call__(x, arg1, ...)".\n',
  'calls': 'Calls\n'
           '*****\n'
           '\n'
@@ -1716,6 +1737,10 @@
           'for\n'
           'function decorators.  The result is then bound to the class name.\n'
           '\n'
+          'Changed in version 3.9: Classes may be decorated with any valid\n'
+          '"assignment_expression". Previously, the grammar was much more\n'
+          'restrictive; see **PEP 614** for details.\n'
+          '\n'
           '**Programmer’s note:** Variables defined in the class definition '
           'are\n'
           'class attributes; they are shared by instances.  Instance '
@@ -1877,10 +1902,10 @@
                 '  != x" is true.  This behavior is compliant with IEEE 754.\n'
                 '\n'
                 '* "None" and "NotImplemented" are singletons.  **PEP 8** '
-                'advises\n'
-                '  that comparisons for singletons should always be done with '
-                '"is" or\n'
-                '  "is not", never the equality operators.\n'
+                'advises that\n'
+                '  comparisons for singletons should always be done with "is" '
+                'or "is\n'
+                '  not", never the equality operators.\n'
                 '\n'
                 '* Binary sequences (instances of "bytes" or "bytearray") can '
                 'be\n'
@@ -1896,15 +1921,15 @@
                 '\n'
                 '  Strings and binary sequences cannot be directly compared.\n'
                 '\n'
-                '* Sequences (instances of "tuple", "list", or "range") can '
-                'be\n'
-                '  compared only within each of their types, with the '
-                'restriction that\n'
-                '  ranges do not support order comparison.  Equality '
-                'comparison across\n'
-                '  these types results in inequality, and ordering comparison '
-                'across\n'
-                '  these types raises "TypeError".\n'
+                '* Sequences (instances of "tuple", "list", or "range") can be '
+                'compared\n'
+                '  only within each of their types, with the restriction that '
+                'ranges do\n'
+                '  not support order comparison.  Equality comparison across '
+                'these\n'
+                '  types results in inequality, and ordering comparison across '
+                'these\n'
+                '  types raises "TypeError".\n'
                 '\n'
                 '  Sequences compare lexicographically using comparison of\n'
                 '  corresponding elements.  The built-in containers typically '
@@ -1928,8 +1953,8 @@
                 '    false because the type is not the same).\n'
                 '\n'
                 '  * Collections that support order comparison are ordered the '
-                'same\n'
-                '    as their first unequal elements (for example, "[1,2,x] <= '
+                'same as\n'
+                '    their first unequal elements (for example, "[1,2,x] <= '
                 '[1,2,y]"\n'
                 '    has the same value as "x <= y").  If a corresponding '
                 'element does\n'
@@ -1947,8 +1972,8 @@
                 '"TypeError".\n'
                 '\n'
                 '* Sets (instances of "set" or "frozenset") can be compared '
-                'within\n'
-                '  and across their types.\n'
+                'within and\n'
+                '  across their types.\n'
                 '\n'
                 '  They define order comparison operators to mean subset and '
                 'superset\n'
@@ -1967,8 +1992,8 @@
                 '  Comparison of sets enforces reflexivity of its elements.\n'
                 '\n'
                 '* Most other built-in types have no comparison methods '
-                'implemented,\n'
-                '  so they inherit the default comparison behavior.\n'
+                'implemented, so\n'
+                '  they inherit the default comparison behavior.\n'
                 '\n'
                 'User-defined classes that customize their comparison behavior '
                 'should\n'
@@ -2017,10 +2042,10 @@
                 '  "total_ordering()" decorator.\n'
                 '\n'
                 '* The "hash()" result should be consistent with equality. '
-                'Objects\n'
-                '  that are equal should either have the same hash value, or '
-                'be marked\n'
-                '  as unhashable.\n'
+                'Objects that\n'
+                '  are equal should either have the same hash value, or be '
+                'marked as\n'
+                '  unhashable.\n'
                 '\n'
                 'Python does not enforce these consistency rules. In fact, '
                 'the\n'
@@ -2294,10 +2319,11 @@
              ':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, '
              '2]".\n'
              '\n'
-             'Note: There is a subtlety when the sequence is being modified by '
-             'the\n'
-             '  loop (this can only occur for mutable sequences, e.g. lists).  '
-             'An\n'
+             'Note:\n'
+             '\n'
+             '  There is a subtlety when the sequence is being modified by the '
+             'loop\n'
+             '  (this can only occur for mutable sequences, e.g. lists).  An\n'
              '  internal counter is used to keep track of which item is used '
              'next,\n'
              '  and this is incremented on each iteration.  When this counter '
@@ -2520,8 +2546,8 @@
              'follows:\n'
              '\n'
              '1. The context expression (the expression given in the '
-             '"with_item")\n'
-             '   is evaluated to obtain a context manager.\n'
+             '"with_item") is\n'
+             '   evaluated to obtain a context manager.\n'
              '\n'
              '2. The context manager’s "__enter__()" is loaded for later use.\n'
              '\n'
@@ -2529,13 +2555,15 @@
              '\n'
              '4. The context manager’s "__enter__()" method is invoked.\n'
              '\n'
-             '5. If a target was included in the "with" statement, the return\n'
-             '   value from "__enter__()" is assigned to it.\n'
+             '5. If a target was included in the "with" statement, the return '
+             'value\n'
+             '   from "__enter__()" is assigned to it.\n'
              '\n'
-             '   Note: The "with" statement guarantees that if the '
-             '"__enter__()"\n'
-             '     method returns without an error, then "__exit__()" will '
-             'always be\n'
+             '   Note:\n'
+             '\n'
+             '     The "with" statement guarantees that if the "__enter__()" '
+             'method\n'
+             '     returns without an error, then "__exit__()" will always be\n'
              '     called. Thus, if an error occurs during the assignment to '
              'the\n'
              '     target list, it will be treated the same as an error '
@@ -2625,8 +2653,8 @@
              '[parameter_list] ")"\n'
              '               ["->" expression] ":" suite\n'
              '   decorators                ::= decorator+\n'
-             '   decorator                 ::= "@" dotted_name ["(" '
-             '[argument_list [","]] ")"] NEWLINE\n'
+             '   decorator                 ::= "@" assignment_expression '
+             'NEWLINE\n'
              '   dotted_name               ::= identifier ("." identifier)*\n'
              '   parameter_list            ::= defparameter ("," '
              'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n'
@@ -2680,6 +2708,11 @@
              'the name\n'
              '"func".\n'
              '\n'
+             'Changed in version 3.9: Functions may be decorated with any '
+             'valid\n'
+             '"assignment_expression". Previously, the grammar was much more\n'
+             'restrictive; see **PEP 614** for details.\n'
+             '\n'
              'When one or more *parameters* have the form *parameter* "="\n'
              '*expression*, the function is said to have “default parameter '
              'values.”\n'
@@ -2881,6 +2914,10 @@
              'function decorators.  The result is then bound to the class '
              'name.\n'
              '\n'
+             'Changed in version 3.9: Classes may be decorated with any valid\n'
+             '"assignment_expression". Previously, the grammar was much more\n'
+             'restrictive; see **PEP 614** for details.\n'
+             '\n'
              '**Programmer’s note:** Variables defined in the class definition '
              'are\n'
              'class attributes; they are shared by instances.  Instance '
@@ -3009,8 +3046,8 @@
              'is semantically equivalent to:\n'
              '\n'
              '   manager = (EXPRESSION)\n'
-             '   aexit = type(manager).__aexit__\n'
              '   aenter = type(manager).__aenter__\n'
+             '   aexit = type(manager).__aexit__\n'
              '   value = await aenter(manager)\n'
              '   hit_except = False\n'
              '\n'
@@ -3040,14 +3077,17 @@
              '\n'
              '-[ Footnotes ]-\n'
              '\n'
-             '[1] The exception is propagated to the invocation stack unless\n'
-             '    there is a "finally" clause which happens to raise another\n'
-             '    exception. That new exception causes the old one to be '
-             'lost.\n'
+             '[1] The exception is propagated to the invocation stack unless '
+             'there\n'
+             '    is a "finally" clause which happens to raise another '
+             'exception.\n'
+             '    That new exception causes the old one to be lost.\n'
              '\n'
-             '[2] A string literal appearing as the first statement in the\n'
-             '    function body is transformed into the function’s "__doc__"\n'
-             '    attribute and therefore the function’s *docstring*.\n'
+             '[2] A string literal appearing as the first statement in the '
+             'function\n'
+             '    body is transformed into the function’s "__doc__" attribute '
+             'and\n'
+             '    therefore the function’s *docstring*.\n'
              '\n'
              '[3] A string literal appearing as the first statement in the '
              'class\n'
@@ -3146,8 +3186,8 @@
                 '  complex;\n'
                 '\n'
                 '* otherwise, if either argument is a floating point number, '
-                'the\n'
-                '  other is converted to floating point;\n'
+                'the other\n'
+                '  is converted to floating point;\n'
                 '\n'
                 '* otherwise, both must be integers and no conversion is '
                 'necessary.\n'
@@ -3257,7 +3297,9 @@
                   'for\n'
                   '   objects that still exist when the interpreter exits.\n'
                   '\n'
-                  '   Note: "del x" doesn’t directly call "x.__del__()" — the '
+                  '   Note:\n'
+                  '\n'
+                  '     "del x" doesn’t directly call "x.__del__()" — the '
                   'former\n'
                   '     decrements the reference count for "x" by one, and the '
                   'latter is\n'
@@ -3281,13 +3323,15 @@
                   '\n'
                   '   See also: Documentation for the "gc" module.\n'
                   '\n'
-                  '   Warning: Due to the precarious circumstances under '
-                  'which\n'
-                  '     "__del__()" methods are invoked, exceptions that occur '
-                  'during\n'
-                  '     their execution are ignored, and a warning is printed '
-                  'to\n'
-                  '     "sys.stderr" instead. In particular:\n'
+                  '   Warning:\n'
+                  '\n'
+                  '     Due to the precarious circumstances under which '
+                  '"__del__()"\n'
+                  '     methods are invoked, exceptions that occur during '
+                  'their execution\n'
+                  '     are ignored, and a warning is printed to "sys.stderr" '
+                  'instead.\n'
+                  '     In particular:\n'
                   '\n'
                   '     * "__del__()" can be invoked when arbitrary code is '
                   'being\n'
@@ -3300,22 +3344,20 @@
                   '       that gets interrupted to execute "__del__()".\n'
                   '\n'
                   '     * "__del__()" can be executed during interpreter '
-                  'shutdown.  As\n'
-                  '       a consequence, the global variables it needs to '
-                  'access\n'
-                  '       (including other modules) may already have been '
-                  'deleted or set\n'
-                  '       to "None". Python guarantees that globals whose name '
-                  'begins\n'
-                  '       with a single underscore are deleted from their '
-                  'module before\n'
-                  '       other globals are deleted; if no other references to '
-                  'such\n'
-                  '       globals exist, this may help in assuring that '
-                  'imported modules\n'
-                  '       are still available at the time when the "__del__()" '
-                  'method is\n'
-                  '       called.\n'
+                  'shutdown.  As a\n'
+                  '       consequence, the global variables it needs to access '
+                  '(including\n'
+                  '       other modules) may already have been deleted or set '
+                  'to "None".\n'
+                  '       Python guarantees that globals whose name begins '
+                  'with a single\n'
+                  '       underscore are deleted from their module before '
+                  'other globals\n'
+                  '       are deleted; if no other references to such globals '
+                  'exist, this\n'
+                  '       may help in assuring that imported modules are still '
+                  'available\n'
+                  '       at the time when the "__del__()" method is called.\n'
                   '\n'
                   'object.__repr__(self)\n'
                   '\n'
@@ -3400,7 +3442,7 @@
                   '\n'
                   '   Changed in version 3.7: "object.__format__(x, \'\')" is '
                   'now\n'
-                  '   equivalent to "str(x)" rather than "format(str(self), '
+                  '   equivalent to "str(x)" rather than "format(str(x), '
                   '\'\')".\n'
                   '\n'
                   'object.__lt__(self, other)\n'
@@ -3434,16 +3476,21 @@
                   '   on the value to determine if the result is true or '
                   'false.\n'
                   '\n'
-                  '   By default, "__ne__()" delegates to "__eq__()" and '
-                  'inverts the\n'
-                  '   result unless it is "NotImplemented".  There are no '
-                  'other implied\n'
-                  '   relationships among the comparison operators, for '
-                  'example, the\n'
-                  '   truth of "(x<y or x==y)" does not imply "x<=y". To '
-                  'automatically\n'
-                  '   generate ordering operations from a single root '
-                  'operation, see\n'
+                  '   By default, "object" implements "__eq__()" by using '
+                  '"is", returning\n'
+                  '   "NotImplemented" in the case of a false comparison: '
+                  '"True if x is y\n'
+                  '   else NotImplemented". For "__ne__()", by default it '
+                  'delegates to\n'
+                  '   "__eq__()" and inverts the result unless it is '
+                  '"NotImplemented".\n'
+                  '   There are no other implied relationships among the '
+                  'comparison\n'
+                  '   operators or default implementations; for example, the '
+                  'truth of\n'
+                  '   "(x<y or x==y)" does not imply "x<=y". To automatically '
+                  'generate\n'
+                  '   ordering operations from a single root operation, see\n'
                   '   "functools.total_ordering()".\n'
                   '\n'
                   '   See the paragraph on "__hash__()" for some important '
@@ -3491,19 +3538,21 @@
                   '      def __hash__(self):\n'
                   '          return hash((self.name, self.nick, self.color))\n'
                   '\n'
-                  '   Note: "hash()" truncates the value returned from an '
-                  'object’s\n'
-                  '     custom "__hash__()" method to the size of a '
-                  '"Py_ssize_t".  This\n'
-                  '     is typically 8 bytes on 64-bit builds and 4 bytes on '
-                  '32-bit\n'
-                  '     builds. If an object’s   "__hash__()" must '
-                  'interoperate on builds\n'
-                  '     of different bit sizes, be sure to check the width on '
-                  'all\n'
-                  '     supported builds.  An easy way to do this is with '
-                  '"python -c\n'
-                  '     "import sys; print(sys.hash_info.width)"".\n'
+                  '   Note:\n'
+                  '\n'
+                  '     "hash()" truncates the value returned from an object’s '
+                  'custom\n'
+                  '     "__hash__()" method to the size of a "Py_ssize_t".  '
+                  'This is\n'
+                  '     typically 8 bytes on 64-bit builds and 4 bytes on '
+                  '32-bit builds.\n'
+                  '     If an object’s   "__hash__()" must interoperate on '
+                  'builds of\n'
+                  '     different bit sizes, be sure to check the width on all '
+                  'supported\n'
+                  '     builds.  An easy way to do this is with "python -c '
+                  '"import sys;\n'
+                  '     print(sys.hash_info.width)"".\n'
                   '\n'
                   '   If a class does not define an "__eq__()" method it '
                   'should not\n'
@@ -3561,22 +3610,24 @@
                   '   hashable by an "isinstance(obj, '
                   'collections.abc.Hashable)" call.\n'
                   '\n'
-                  '   Note: By default, the "__hash__()" values of str and '
-                  'bytes\n'
-                  '     objects are “salted” with an unpredictable random '
-                  'value.\n'
-                  '     Although they remain constant within an individual '
-                  'Python\n'
-                  '     process, they are not predictable between repeated '
-                  'invocations of\n'
-                  '     Python.This is intended to provide protection against '
-                  'a denial-\n'
-                  '     of-service caused by carefully-chosen inputs that '
-                  'exploit the\n'
-                  '     worst case performance of a dict insertion, O(n^2) '
-                  'complexity.\n'
-                  '     See '
-                  'http://www.ocert.org/advisories/ocert-2011-003.html for\n'
+                  '   Note:\n'
+                  '\n'
+                  '     By default, the "__hash__()" values of str and bytes '
+                  'objects are\n'
+                  '     “salted” with an unpredictable random value.  Although '
+                  'they\n'
+                  '     remain constant within an individual Python process, '
+                  'they are not\n'
+                  '     predictable between repeated invocations of '
+                  'Python.This is\n'
+                  '     intended to provide protection against a '
+                  'denial-of-service caused\n'
+                  '     by carefully-chosen inputs that exploit the worst '
+                  'case\n'
+                  '     performance of a dict insertion, O(n^2) complexity.  '
+                  'See\n'
+                  '     http://www.ocert.org/advisories/ocert-2011-003.html '
+                  'for\n'
                   '     details.Changing hash values affects the iteration '
                   'order of sets.\n'
                   '     Python has never made guarantees about this ordering '
@@ -3966,7 +4017,7 @@
              'is\n'
              '   first hit. The arguments are the same as for "break".\n'
              '\n'
-             'cl(ear) [filename:lineno | bpnumber [bpnumber ...]]\n'
+             'cl(ear) [filename:lineno | bpnumber ...]\n'
              '\n'
              '   With a *filename:lineno* argument, clear all the breakpoints '
              'at\n'
@@ -3976,7 +4027,7 @@
              'first\n'
              '   ask confirmation).\n'
              '\n'
-             'disable [bpnumber [bpnumber ...]]\n'
+             'disable [bpnumber ...]\n'
              '\n'
              '   Disable the breakpoints given as a space separated list of\n'
              '   breakpoint numbers.  Disabling a breakpoint means it cannot '
@@ -3985,7 +4036,7 @@
              'breakpoint, it\n'
              '   remains in the list of breakpoints and can be (re-)enabled.\n'
              '\n'
-             'enable [bpnumber [bpnumber ...]]\n'
+             'enable [bpnumber ...]\n'
              '\n'
              '   Enable the breakpoints specified.\n'
              '\n'
@@ -4156,9 +4207,11 @@
              'its\n'
              '   value.\n'
              '\n'
-             '   Note: "print()" can also be used, but is not a debugger '
-             'command —\n'
-             '     this executes the Python "print()" function.\n'
+             '   Note:\n'
+             '\n'
+             '     "print()" can also be used, but is not a debugger command — '
+             'this\n'
+             '     executes the Python "print()" function.\n'
              '\n'
              'pp expression\n'
              '\n'
@@ -4290,8 +4343,8 @@
              '-[ Footnotes ]-\n'
              '\n'
              '[1] Whether a frame is considered to originate in a certain '
-             'module\n'
-             '    is determined by the "__name__" in the frame globals.\n',
+             'module is\n'
+             '    determined by the "__name__" in the frame globals.\n',
  'del': 'The "del" statement\n'
         '*******************\n'
         '\n'
@@ -4471,13 +4524,15 @@
                'about the\n'
                'exceptional condition.\n'
                '\n'
-               'Note: Exception messages are not part of the Python API.  '
-               'Their\n'
-               '  contents may change from one version of Python to the next '
-               'without\n'
-               '  warning and should not be relied on by code which will run '
-               'under\n'
-               '  multiple versions of the interpreter.\n'
+               'Note:\n'
+               '\n'
+               '  Exception messages are not part of the Python API.  Their '
+               'contents\n'
+               '  may change from one version of Python to the next without '
+               'warning\n'
+               '  and should not be relied on by code which will run under '
+               'multiple\n'
+               '  versions of the interpreter.\n'
                '\n'
                'See also the description of the "try" statement in section The '
                'try\n'
@@ -4487,10 +4542,9 @@
                '-[ Footnotes ]-\n'
                '\n'
                '[1] This limitation occurs because the code that is executed '
-               'by\n'
-               '    these operations is not available at the time the module '
-               'is\n'
-               '    compiled.\n',
+               'by these\n'
+               '    operations is not available at the time the module is '
+               'compiled.\n',
  'execmodel': 'Execution model\n'
               '***************\n'
               '\n'
@@ -4512,9 +4566,13 @@
               '(a\n'
               'command specified on the interpreter command line with the '
               '"-c"\n'
-              'option) is a code block.  The string argument passed to the '
-              'built-in\n'
-              'functions "eval()" and "exec()" is a code block.\n'
+              'option) is a code block. A module run as a top level script (as '
+              'module\n'
+              '"__main__") from the command line using a "-m" argument is also '
+              'a code\n'
+              'block. The string argument passed to the built-in functions '
+              '"eval()"\n'
+              'and "exec()" is a code block.\n'
               '\n'
               'A code block is executed in an *execution frame*.  A frame '
               'contains\n'
@@ -4792,13 +4850,15 @@
               'about the\n'
               'exceptional condition.\n'
               '\n'
-              'Note: Exception messages are not part of the Python API.  '
-              'Their\n'
-              '  contents may change from one version of Python to the next '
-              'without\n'
-              '  warning and should not be relied on by code which will run '
-              'under\n'
-              '  multiple versions of the interpreter.\n'
+              'Note:\n'
+              '\n'
+              '  Exception messages are not part of the Python API.  Their '
+              'contents\n'
+              '  may change from one version of Python to the next without '
+              'warning\n'
+              '  and should not be relied on by code which will run under '
+              'multiple\n'
+              '  versions of the interpreter.\n'
               '\n'
               'See also the description of the "try" statement in section The '
               'try\n'
@@ -4807,11 +4867,10 @@
               '\n'
               '-[ Footnotes ]-\n'
               '\n'
-              '[1] This limitation occurs because the code that is executed '
-              'by\n'
-              '    these operations is not available at the time the module '
-              'is\n'
-              '    compiled.\n',
+              '[1] This limitation occurs because the code that is executed by '
+              'these\n'
+              '    operations is not available at the time the module is '
+              'compiled.\n',
  'exprlists': 'Expression lists\n'
               '****************\n'
               '\n'
@@ -4930,8 +4989,11 @@
         'i\n'
         ':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, 2]".\n'
         '\n'
-        'Note: There is a subtlety when the sequence is being modified by the\n'
-        '  loop (this can only occur for mutable sequences, e.g. lists).  An\n'
+        'Note:\n'
+        '\n'
+        '  There is a subtlety when the sequence is being modified by the '
+        'loop\n'
+        '  (this can only occur for mutable sequences, e.g. lists).  An\n'
         '  internal counter is used to keep track of which item is used next,\n'
         '  and this is incremented on each iteration.  When this counter has\n'
         '  reached the length of the sequence the loop terminates.  This '
@@ -5245,24 +5307,23 @@
                   'for the\n'
                   'conversion.  The alternate form is defined differently for '
                   'different\n'
-                  'types.  This option is only valid for integer, float, '
-                  'complex and\n'
-                  'Decimal types. For integers, when binary, octal, or '
-                  'hexadecimal output\n'
-                  'is used, this option adds the prefix respective "\'0b\'", '
-                  '"\'0o\'", or\n'
-                  '"\'0x\'" to the output value. For floats, complex and '
-                  'Decimal the\n'
-                  'alternate form causes the result of the conversion to '
-                  'always contain a\n'
-                  'decimal-point character, even if no digits follow it. '
-                  'Normally, a\n'
-                  'decimal-point character appears in the result of these '
-                  'conversions\n'
-                  'only if a digit follows it. In addition, for "\'g\'" and '
-                  '"\'G\'"\n'
-                  'conversions, trailing zeros are not removed from the '
-                  'result.\n'
+                  'types.  This option is only valid for integer, float and '
+                  'complex\n'
+                  'types. For integers, when binary, octal, or hexadecimal '
+                  'output is\n'
+                  'used, this option adds the prefix respective "\'0b\'", '
+                  '"\'0o\'", or "\'0x\'"\n'
+                  'to the output value. For float and complex the alternate '
+                  'form causes\n'
+                  'the result of the conversion to always contain a '
+                  'decimal-point\n'
+                  'character, even if no digits follow it. Normally, a '
+                  'decimal-point\n'
+                  'character appears in the result of these conversions only '
+                  'if a digit\n'
+                  'follows it. In addition, for "\'g\'" and "\'G\'" '
+                  'conversions, trailing\n'
+                  'zeros are not removed from the result.\n'
                   '\n'
                   'The "\',\'" option signals the use of a comma for a '
                   'thousands separator.\n'
@@ -5400,9 +5461,8 @@
                   'the integer\n'
                   'to a floating point number before formatting.\n'
                   '\n'
-                  'The available presentation types for floating point and '
-                  'decimal values\n'
-                  'are:\n'
+                  'The available presentation types for "float" and "Decimal" '
+                  'values are:\n'
                   '\n'
                   '   '
                   '+-----------+------------------------------------------------------------+\n'
@@ -5411,24 +5471,50 @@
                   '|\n'
                   '   '
                   '|===========|============================================================|\n'
-                  '   | "\'e\'"     | Exponent notation. Prints the number in '
-                  'scientific         |\n'
-                  '   |           | notation using the letter ‘e’ to indicate '
-                  'the exponent.    |\n'
-                  '   |           | The default precision is '
-                  '"6".                              |\n'
+                  '   | "\'e\'"     | Scientific notation. For a given '
+                  'precision "p", formats    |\n'
+                  '   |           | the number in scientific notation with the '
+                  'letter ‘e’      |\n'
+                  '   |           | separating the coefficient from the '
+                  'exponent. The          |\n'
+                  '   |           | coefficient has one digit before and "p" '
+                  'digits after the  |\n'
+                  '   |           | decimal point, for a total of "p + 1" '
+                  'significant digits.  |\n'
+                  '   |           | With no precision given, uses a precision '
+                  'of "6" digits    |\n'
+                  '   |           | after the decimal point for "float", and '
+                  'shows all         |\n'
+                  '   |           | coefficient digits for "Decimal". If no '
+                  'digits follow the  |\n'
+                  '   |           | decimal point, the decimal point is also '
+                  'removed unless    |\n'
+                  '   |           | the "#" option is '
+                  'used.                                    |\n'
                   '   '
                   '+-----------+------------------------------------------------------------+\n'
-                  '   | "\'E\'"     | Exponent notation. Same as "\'e\'" '
-                  'except it uses an upper   |\n'
+                  '   | "\'E\'"     | Scientific notation. Same as "\'e\'" '
+                  'except it uses an upper |\n'
                   '   |           | case ‘E’ as the separator '
                   'character.                       |\n'
                   '   '
                   '+-----------+------------------------------------------------------------+\n'
-                  '   | "\'f\'"     | Fixed-point notation. Displays the '
-                  'number as a fixed-point |\n'
-                  '   |           | number. The default precision is '
-                  '"6".                      |\n'
+                  '   | "\'f\'"     | Fixed-point notation. For a given '
+                  'precision "p", formats   |\n'
+                  '   |           | the number as a decimal number with '
+                  'exactly "p" digits     |\n'
+                  '   |           | following the decimal point. With no '
+                  'precision given, uses |\n'
+                  '   |           | a precision of "6" digits after the '
+                  'decimal point for      |\n'
+                  '   |           | "float", and uses a precision large enough '
+                  'to show all     |\n'
+                  '   |           | coefficient digits for "Decimal". If no '
+                  'digits follow the  |\n'
+                  '   |           | decimal point, the decimal point is also '
+                  'removed unless    |\n'
+                  '   |           | the "#" option is '
+                  'used.                                    |\n'
                   '   '
                   '+-----------+------------------------------------------------------------+\n'
                   '   | "\'F\'"     | Fixed-point notation. Same as "\'f\'", '
@@ -5474,9 +5560,14 @@
                   '   |           | regardless of the precision.  A precision '
                   'of "0" is        |\n'
                   '   |           | treated as equivalent to a precision of '
-                  '"1". The default   |\n'
-                  '   |           | precision is '
-                  '"6".                                          |\n'
+                  '"1". With no       |\n'
+                  '   |           | precision given, uses a precision of "6" '
+                  'significant       |\n'
+                  '   |           | digits for "float", and shows all '
+                  'coefficient digits for   |\n'
+                  '   |           | '
+                  '"Decimal".                                                 '
+                  '|\n'
                   '   '
                   '+-----------+------------------------------------------------------------+\n'
                   '   | "\'G\'"     | General format. Same as "\'g\'" except '
@@ -5689,8 +5780,8 @@
              '[parameter_list] ")"\n'
              '               ["->" expression] ":" suite\n'
              '   decorators                ::= decorator+\n'
-             '   decorator                 ::= "@" dotted_name ["(" '
-             '[argument_list [","]] ")"] NEWLINE\n'
+             '   decorator                 ::= "@" assignment_expression '
+             'NEWLINE\n'
              '   dotted_name               ::= identifier ("." identifier)*\n'
              '   parameter_list            ::= defparameter ("," '
              'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n'
@@ -5744,6 +5835,11 @@
              'the name\n'
              '"func".\n'
              '\n'
+             'Changed in version 3.9: Functions may be decorated with any '
+             'valid\n'
+             '"assignment_expression". Previously, the grammar was much more\n'
+             'restrictive; see **PEP 614** for details.\n'
+             '\n'
              'When one or more *parameters* have the form *parameter* "="\n'
              '*expression*, the function is said to have “default parameter '
              'values.”\n'
@@ -5934,7 +6030,9 @@
                'defined.\n'
                '   See section The import statement.\n'
                '\n'
-               '   Note: The name "_" is often used in conjunction with\n'
+               '   Note:\n'
+               '\n'
+               '     The name "_" is often used in conjunction with\n'
                '     internationalization; refer to the documentation for the\n'
                '     "gettext" module for more information on this '
                'convention.\n'
@@ -6080,7 +6178,9 @@
                 'defined.\n'
                 '   See section The import statement.\n'
                 '\n'
-                '   Note: The name "_" is often used in conjunction with\n'
+                '   Note:\n'
+                '\n'
+                '     The name "_" is often used in conjunction with\n'
                 '     internationalization; refer to the documentation for '
                 'the\n'
                 '     "gettext" module for more information on this '
@@ -6165,8 +6265,9 @@
            '\n'
            '1. find a module, loading and initializing it if necessary\n'
            '\n'
-           '2. define a name or names in the local namespace for the scope\n'
-           '   where the "import" statement occurs.\n'
+           '2. define a name or names in the local namespace for the scope '
+           'where\n'
+           '   the "import" statement occurs.\n'
            '\n'
            'When the statement contains multiple clauses (separated by commas) '
            'the\n'
@@ -6192,8 +6293,9 @@
            'made\n'
            'available in the local namespace in one of three ways:\n'
            '\n'
-           '* If the module name is followed by "as", then the name following\n'
-           '  "as" is bound directly to the imported module.\n'
+           '* If the module name is followed by "as", then the name following '
+           '"as"\n'
+           '  is bound directly to the imported module.\n'
            '\n'
            '* If no other name is specified, and the module being imported is '
            'a\n'
@@ -6342,8 +6444,8 @@
            '\n'
            '* other future statements.\n'
            '\n'
-           'The only feature in Python 3.7 that requires using the future\n'
-           'statement is "annotations".\n'
+           'The only feature that requires using the future statement is\n'
+           '"annotations" (see **PEP 563**).\n'
            '\n'
            'All historical features enabled by the future statement are still\n'
            'recognized by Python 3.  The list includes "absolute_import",\n'
@@ -6871,15 +6973,18 @@
                   '"__rpow__()" (the\n'
                   '   coercion rules would become too complicated).\n'
                   '\n'
-                  '   Note: If the right operand’s type is a subclass of the '
-                  'left\n'
-                  '     operand’s type and that subclass provides the '
-                  'reflected method\n'
-                  '     for the operation, this method will be called before '
-                  'the left\n'
-                  '     operand’s non-reflected method.  This behavior allows '
-                  'subclasses\n'
-                  '     to override their ancestors’ operations.\n'
+                  '   Note:\n'
+                  '\n'
+                  '     If the right operand’s type is a subclass of the left '
+                  'operand’s\n'
+                  '     type and that subclass provides a different '
+                  'implementation of the\n'
+                  '     reflected method for the operation, this method will '
+                  'be called\n'
+                  '     before the left operand’s non-reflected method. This '
+                  'behavior\n'
+                  '     allows subclasses to override their ancestors’ '
+                  'operations.\n'
                   '\n'
                   'object.__iadd__(self, other)\n'
                   'object.__isub__(self, other)\n'
@@ -6923,6 +7028,16 @@
                   'the data\n'
                   '   model.\n'
                   '\n'
+                  '   Note:\n'
+                  '\n'
+                  '     Due to a bug in the dispatching mechanism for "**=", a '
+                  'class that\n'
+                  '     defines "__ipow__()" but returns "NotImplemented" '
+                  'would fail to\n'
+                  '     fall back to "x.__pow__(y)" and "y.__rpow__(x)". This '
+                  'bug is\n'
+                  '     fixed in Python 3.10.\n'
+                  '\n'
                   'object.__neg__(self)\n'
                   'object.__pos__(self)\n'
                   'object.__abs__(self)\n'
@@ -7199,8 +7314,8 @@
                      '-[ Footnotes ]-\n'
                      '\n'
                      '[1] While "abs(x%y) < abs(y)" is true mathematically, '
-                     'for floats\n'
-                     '    it may not be true numerically due to roundoff.  For '
+                     'for floats it\n'
+                     '    may not be true numerically due to roundoff.  For '
                      'example, and\n'
                      '    assuming a platform on which a Python float is an '
                      'IEEE 754 double-\n'
@@ -7265,22 +7380,22 @@
                      '"unicodedata.normalize()".\n'
                      '\n'
                      '[4] Due to automatic garbage-collection, free lists, and '
-                     'the\n'
-                     '    dynamic nature of descriptors, you may notice '
-                     'seemingly unusual\n'
-                     '    behaviour in certain uses of the "is" operator, like '
-                     'those\n'
-                     '    involving comparisons between instance methods, or '
-                     'constants.\n'
-                     '    Check their documentation for more info.\n'
+                     'the dynamic\n'
+                     '    nature of descriptors, you may notice seemingly '
+                     'unusual behaviour\n'
+                     '    in certain uses of the "is" operator, like those '
+                     'involving\n'
+                     '    comparisons between instance methods, or constants.  '
+                     'Check their\n'
+                     '    documentation for more info.\n'
                      '\n'
                      '[5] The "%" operator is also used for string formatting; '
                      'the same\n'
                      '    precedence applies.\n'
                      '\n'
                      '[6] The power operator "**" binds less tightly than an '
-                     'arithmetic\n'
-                     '    or bitwise unary operator on its right, that is, '
+                     'arithmetic or\n'
+                     '    bitwise unary operator on its right, that is, '
                      '"2**-1" is "0.5".\n',
  'pass': 'The "pass" statement\n'
          '********************\n'
@@ -7570,9 +7685,11 @@
                    '\n'
                    '   New in version 3.4.\n'
                    '\n'
-                   'Note: Slicing is done exclusively with the following three '
-                   'methods.\n'
-                   '  A call like\n'
+                   'Note:\n'
+                   '\n'
+                   '  Slicing is done exclusively with the following three '
+                   'methods.  A\n'
+                   '  call like\n'
                    '\n'
                    '     a[1:2] = b\n'
                    '\n'
@@ -7603,7 +7720,9 @@
                    'the\n'
                    '   container), "KeyError" should be raised.\n'
                    '\n'
-                   '   Note: "for" loops expect that an "IndexError" will be '
+                   '   Note:\n'
+                   '\n'
+                   '     "for" loops expect that an "IndexError" will be '
                    'raised for\n'
                    '     illegal indexes to allow proper detection of the end '
                    'of the\n'
@@ -7839,26 +7958,26 @@
                  '-[ Footnotes ]-\n'
                  '\n'
                  '[1] Additional information on these special methods may be '
-                 'found\n'
-                 '    in the Python Reference Manual (Basic customization).\n'
+                 'found in\n'
+                 '    the Python Reference Manual (Basic customization).\n'
                  '\n'
                  '[2] As a consequence, the list "[1, 2]" is considered equal '
-                 'to\n'
-                 '    "[1.0, 2.0]", and similarly for tuples.\n'
+                 'to "[1.0,\n'
+                 '    2.0]", and similarly for tuples.\n'
                  '\n'
                  '[3] They must have since the parser can’t tell the type of '
                  'the\n'
                  '    operands.\n'
                  '\n'
                  '[4] Cased characters are those with general category '
-                 'property\n'
-                 '    being one of “Lu” (Letter, uppercase), “Ll” (Letter, '
-                 'lowercase),\n'
-                 '    or “Lt” (Letter, titlecase).\n'
+                 'property being\n'
+                 '    one of “Lu” (Letter, uppercase), “Ll” (Letter, '
+                 'lowercase), or “Lt”\n'
+                 '    (Letter, titlecase).\n'
                  '\n'
-                 '[5] To format only a tuple you should therefore provide a\n'
-                 '    singleton tuple whose only element is the tuple to be '
-                 'formatted.\n',
+                 '[5] To format only a tuple you should therefore provide a '
+                 'singleton\n'
+                 '    tuple whose only element is the tuple to be formatted.\n',
  'specialnames': 'Special method names\n'
                  '********************\n'
                  '\n'
@@ -8003,7 +8122,9 @@
                  'for\n'
                  '   objects that still exist when the interpreter exits.\n'
                  '\n'
-                 '   Note: "del x" doesn’t directly call "x.__del__()" — the '
+                 '   Note:\n'
+                 '\n'
+                 '     "del x" doesn’t directly call "x.__del__()" — the '
                  'former\n'
                  '     decrements the reference count for "x" by one, and the '
                  'latter is\n'
@@ -8027,12 +8148,15 @@
                  '\n'
                  '   See also: Documentation for the "gc" module.\n'
                  '\n'
-                 '   Warning: Due to the precarious circumstances under which\n'
-                 '     "__del__()" methods are invoked, exceptions that occur '
-                 'during\n'
-                 '     their execution are ignored, and a warning is printed '
-                 'to\n'
-                 '     "sys.stderr" instead. In particular:\n'
+                 '   Warning:\n'
+                 '\n'
+                 '     Due to the precarious circumstances under which '
+                 '"__del__()"\n'
+                 '     methods are invoked, exceptions that occur during their '
+                 'execution\n'
+                 '     are ignored, and a warning is printed to "sys.stderr" '
+                 'instead.\n'
+                 '     In particular:\n'
                  '\n'
                  '     * "__del__()" can be invoked when arbitrary code is '
                  'being\n'
@@ -8045,22 +8169,20 @@
                  '       that gets interrupted to execute "__del__()".\n'
                  '\n'
                  '     * "__del__()" can be executed during interpreter '
-                 'shutdown.  As\n'
-                 '       a consequence, the global variables it needs to '
-                 'access\n'
-                 '       (including other modules) may already have been '
-                 'deleted or set\n'
-                 '       to "None". Python guarantees that globals whose name '
-                 'begins\n'
-                 '       with a single underscore are deleted from their '
-                 'module before\n'
-                 '       other globals are deleted; if no other references to '
-                 'such\n'
-                 '       globals exist, this may help in assuring that '
-                 'imported modules\n'
-                 '       are still available at the time when the "__del__()" '
-                 'method is\n'
-                 '       called.\n'
+                 'shutdown.  As a\n'
+                 '       consequence, the global variables it needs to access '
+                 '(including\n'
+                 '       other modules) may already have been deleted or set '
+                 'to "None".\n'
+                 '       Python guarantees that globals whose name begins with '
+                 'a single\n'
+                 '       underscore are deleted from their module before other '
+                 'globals\n'
+                 '       are deleted; if no other references to such globals '
+                 'exist, this\n'
+                 '       may help in assuring that imported modules are still '
+                 'available\n'
+                 '       at the time when the "__del__()" method is called.\n'
                  '\n'
                  'object.__repr__(self)\n'
                  '\n'
@@ -8145,7 +8267,7 @@
                  '\n'
                  '   Changed in version 3.7: "object.__format__(x, \'\')" is '
                  'now\n'
-                 '   equivalent to "str(x)" rather than "format(str(self), '
+                 '   equivalent to "str(x)" rather than "format(str(x), '
                  '\'\')".\n'
                  '\n'
                  'object.__lt__(self, other)\n'
@@ -8179,16 +8301,21 @@
                  '   on the value to determine if the result is true or '
                  'false.\n'
                  '\n'
-                 '   By default, "__ne__()" delegates to "__eq__()" and '
-                 'inverts the\n'
-                 '   result unless it is "NotImplemented".  There are no other '
-                 'implied\n'
-                 '   relationships among the comparison operators, for '
-                 'example, the\n'
-                 '   truth of "(x<y or x==y)" does not imply "x<=y". To '
-                 'automatically\n'
-                 '   generate ordering operations from a single root '
-                 'operation, see\n'
+                 '   By default, "object" implements "__eq__()" by using "is", '
+                 'returning\n'
+                 '   "NotImplemented" in the case of a false comparison: "True '
+                 'if x is y\n'
+                 '   else NotImplemented". For "__ne__()", by default it '
+                 'delegates to\n'
+                 '   "__eq__()" and inverts the result unless it is '
+                 '"NotImplemented".\n'
+                 '   There are no other implied relationships among the '
+                 'comparison\n'
+                 '   operators or default implementations; for example, the '
+                 'truth of\n'
+                 '   "(x<y or x==y)" does not imply "x<=y". To automatically '
+                 'generate\n'
+                 '   ordering operations from a single root operation, see\n'
                  '   "functools.total_ordering()".\n'
                  '\n'
                  '   See the paragraph on "__hash__()" for some important '
@@ -8236,19 +8363,21 @@
                  '      def __hash__(self):\n'
                  '          return hash((self.name, self.nick, self.color))\n'
                  '\n'
-                 '   Note: "hash()" truncates the value returned from an '
-                 'object’s\n'
-                 '     custom "__hash__()" method to the size of a '
-                 '"Py_ssize_t".  This\n'
-                 '     is typically 8 bytes on 64-bit builds and 4 bytes on '
-                 '32-bit\n'
-                 '     builds. If an object’s   "__hash__()" must interoperate '
-                 'on builds\n'
-                 '     of different bit sizes, be sure to check the width on '
-                 'all\n'
-                 '     supported builds.  An easy way to do this is with '
-                 '"python -c\n'
-                 '     "import sys; print(sys.hash_info.width)"".\n'
+                 '   Note:\n'
+                 '\n'
+                 '     "hash()" truncates the value returned from an object’s '
+                 'custom\n'
+                 '     "__hash__()" method to the size of a "Py_ssize_t".  '
+                 'This is\n'
+                 '     typically 8 bytes on 64-bit builds and 4 bytes on '
+                 '32-bit builds.\n'
+                 '     If an object’s   "__hash__()" must interoperate on '
+                 'builds of\n'
+                 '     different bit sizes, be sure to check the width on all '
+                 'supported\n'
+                 '     builds.  An easy way to do this is with "python -c '
+                 '"import sys;\n'
+                 '     print(sys.hash_info.width)"".\n'
                  '\n'
                  '   If a class does not define an "__eq__()" method it should '
                  'not\n'
@@ -8304,21 +8433,22 @@
                  '   hashable by an "isinstance(obj, '
                  'collections.abc.Hashable)" call.\n'
                  '\n'
-                 '   Note: By default, the "__hash__()" values of str and '
-                 'bytes\n'
-                 '     objects are “salted” with an unpredictable random '
-                 'value.\n'
-                 '     Although they remain constant within an individual '
-                 'Python\n'
-                 '     process, they are not predictable between repeated '
-                 'invocations of\n'
-                 '     Python.This is intended to provide protection against a '
-                 'denial-\n'
-                 '     of-service caused by carefully-chosen inputs that '
-                 'exploit the\n'
-                 '     worst case performance of a dict insertion, O(n^2) '
-                 'complexity.\n'
-                 '     See http://www.ocert.org/advisories/ocert-2011-003.html '
+                 '   Note:\n'
+                 '\n'
+                 '     By default, the "__hash__()" values of str and bytes '
+                 'objects are\n'
+                 '     “salted” with an unpredictable random value.  Although '
+                 'they\n'
+                 '     remain constant within an individual Python process, '
+                 'they are not\n'
+                 '     predictable between repeated invocations of Python.This '
+                 'is\n'
+                 '     intended to provide protection against a '
+                 'denial-of-service caused\n'
+                 '     by carefully-chosen inputs that exploit the worst case\n'
+                 '     performance of a dict insertion, O(n^2) complexity.  '
+                 'See\n'
+                 '     http://www.ocert.org/advisories/ocert-2011-003.html '
                  'for\n'
                  '     details.Changing hash values affects the iteration '
                  'order of sets.\n'
@@ -8407,11 +8537,17 @@
                  'needs, for\n'
                  '   example, "object.__getattribute__(self, name)".\n'
                  '\n'
-                 '   Note: This method may still be bypassed when looking up '
-                 'special\n'
-                 '     methods as the result of implicit invocation via '
-                 'language syntax\n'
-                 '     or built-in functions. See Special method lookup.\n'
+                 '   Note:\n'
+                 '\n'
+                 '     This method may still be bypassed when looking up '
+                 'special methods\n'
+                 '     as the result of implicit invocation via language '
+                 'syntax or\n'
+                 '     built-in functions. See Special method lookup.\n'
+                 '\n'
+                 '   For certain sensitive attribute accesses, raises an '
+                 'auditing event\n'
+                 '   "object.__getattr__" with arguments "obj" and "name".\n'
                  '\n'
                  'object.__setattr__(self, name, value)\n'
                  '\n'
@@ -8429,6 +8565,11 @@
                  'example,\n'
                  '   "object.__setattr__(self, name, value)".\n'
                  '\n'
+                 '   For certain sensitive attribute assignments, raises an '
+                 'auditing\n'
+                 '   event "object.__setattr__" with arguments "obj", "name", '
+                 '"value".\n'
+                 '\n'
                  'object.__delattr__(self, name)\n'
                  '\n'
                  '   Like "__setattr__()" but for attribute deletion instead '
@@ -8437,6 +8578,10 @@
                  'obj.name" is\n'
                  '   meaningful for the object.\n'
                  '\n'
+                 '   For certain sensitive attribute deletions, raises an '
+                 'auditing event\n'
+                 '   "object.__delattr__" with arguments "obj" and "name".\n'
+                 '\n'
                  'object.__dir__(self)\n'
                  '\n'
                  '   Called when "dir()" is called on the object. A sequence '
@@ -8495,15 +8640,16 @@
                  '\n'
                  '   sys.modules[__name__].__class__ = VerboseModule\n'
                  '\n'
-                 'Note: Defining module "__getattr__" and setting module '
-                 '"__class__"\n'
-                 '  only affect lookups made using the attribute access syntax '
-                 '–\n'
-                 '  directly accessing the module globals (whether by code '
-                 'within the\n'
-                 '  module, or via a reference to the module’s globals '
-                 'dictionary) is\n'
-                 '  unaffected.\n'
+                 'Note:\n'
+                 '\n'
+                 '  Defining module "__getattr__" and setting module '
+                 '"__class__" only\n'
+                 '  affect lookups made using the attribute access syntax – '
+                 'directly\n'
+                 '  accessing the module globals (whether by code within the '
+                 'module, or\n'
+                 '  via a reference to the module’s globals dictionary) is '
+                 'unaffected.\n'
                  '\n'
                  'Changed in version 3.5: "__class__" module attribute is now '
                  'writable.\n'
@@ -8586,12 +8732,14 @@
                  'The\n'
                  '   descriptor has been assigned to *name*.\n'
                  '\n'
-                 '   Note: "__set_name__()" is only called implicitly as part '
-                 'of the\n'
-                 '     "type" constructor, so it will need to be called '
-                 'explicitly with\n'
-                 '     the appropriate parameters when a descriptor is added '
-                 'to a class\n'
+                 '   Note:\n'
+                 '\n'
+                 '     "__set_name__()" is only called implicitly as part of '
+                 'the "type"\n'
+                 '     constructor, so it will need to be called explicitly '
+                 'with the\n'
+                 '     appropriate parameters when a descriptor is added to a '
+                 'class\n'
                  '     after initial creation:\n'
                  '\n'
                  '        class A:\n'
@@ -8696,12 +8844,13 @@
                  'both\n'
                  '"__get__()" and "__set__()", while non-data descriptors have '
                  'just the\n'
-                 '"__get__()" method.  Data descriptors with "__set__()" and '
-                 '"__get__()"\n'
-                 'defined always override a redefinition in an instance '
-                 'dictionary.  In\n'
-                 'contrast, non-data descriptors can be overridden by '
-                 'instances.\n'
+                 '"__get__()" method.  Data descriptors with "__get__()" and '
+                 '"__set__()"\n'
+                 '(and/or "__delete__()") defined always override a '
+                 'redefinition in an\n'
+                 'instance dictionary.  In contrast, non-data descriptors can '
+                 'be\n'
+                 'overridden by instances.\n'
                  '\n'
                  'Python methods (including "staticmethod()" and '
                  '"classmethod()") are\n'
@@ -8749,10 +8898,9 @@
                  '~~~~~~~~~~~~~~~~~~~~~~~~~~\n'
                  '\n'
                  '* When inheriting from a class without *__slots__*, the '
-                 '*__dict__*\n'
-                 '  and *__weakref__* attribute of the instances will always '
-                 'be\n'
-                 '  accessible.\n'
+                 '*__dict__* and\n'
+                 '  *__weakref__* attribute of the instances will always be '
+                 'accessible.\n'
                  '\n'
                  '* Without a *__dict__* variable, instances cannot be '
                  'assigned new\n'
@@ -8766,13 +8914,12 @@
                  '  declaration.\n'
                  '\n'
                  '* Without a *__weakref__* variable for each instance, '
-                 'classes\n'
-                 '  defining *__slots__* do not support weak references to '
-                 'its\n'
-                 '  instances. If weak reference support is needed, then add\n'
-                 '  "\'__weakref__\'" to the sequence of strings in the '
-                 '*__slots__*\n'
-                 '  declaration.\n'
+                 'classes defining\n'
+                 '  *__slots__* do not support weak references to its '
+                 'instances. If weak\n'
+                 '  reference support is needed, then add "\'__weakref__\'" to '
+                 'the\n'
+                 '  sequence of strings in the *__slots__* declaration.\n'
                  '\n'
                  '* *__slots__* are implemented at the class level by '
                  'creating\n'
@@ -8785,23 +8932,22 @@
                  '  attribute would overwrite the descriptor assignment.\n'
                  '\n'
                  '* The action of a *__slots__* declaration is not limited to '
-                 'the\n'
-                 '  class where it is defined.  *__slots__* declared in '
-                 'parents are\n'
-                 '  available in child classes. However, child subclasses will '
-                 'get a\n'
-                 '  *__dict__* and *__weakref__* unless they also define '
-                 '*__slots__*\n'
-                 '  (which should only contain names of any *additional* '
-                 'slots).\n'
+                 'the class\n'
+                 '  where it is defined.  *__slots__* declared in parents are '
+                 'available\n'
+                 '  in child classes. However, child subclasses will get a '
+                 '*__dict__*\n'
+                 '  and *__weakref__* unless they also define *__slots__* '
+                 '(which should\n'
+                 '  only contain names of any *additional* slots).\n'
                  '\n'
                  '* If a class defines a slot also defined in a base class, '
-                 'the\n'
-                 '  instance variable defined by the base class slot is '
-                 'inaccessible\n'
-                 '  (except by retrieving its descriptor directly from the '
-                 'base class).\n'
-                 '  This renders the meaning of the program undefined.  In the '
+                 'the instance\n'
+                 '  variable defined by the base class slot is inaccessible '
+                 '(except by\n'
+                 '  retrieving its descriptor directly from the base class). '
+                 'This\n'
+                 '  renders the meaning of the program undefined.  In the '
                  'future, a\n'
                  '  check may be added to prevent this.\n'
                  '\n'
@@ -8811,9 +8957,9 @@
                  'and "tuple".\n'
                  '\n'
                  '* Any non-string iterable may be assigned to *__slots__*. '
-                 'Mappings\n'
-                 '  may also be used; however, in the future, special meaning '
-                 'may be\n'
+                 'Mappings may\n'
+                 '  also be used; however, in the future, special meaning may '
+                 'be\n'
                  '  assigned to the values corresponding to each key.\n'
                  '\n'
                  '* *__class__* assignment works only if both classes have the '
@@ -8829,8 +8975,8 @@
                  '  raise "TypeError".\n'
                  '\n'
                  '* If an iterator is used for *__slots__* then a descriptor '
-                 'is\n'
-                 '  created for each of the iterator’s values. However, the '
+                 'is created\n'
+                 '  for each of the iterator’s values. However, the '
                  '*__slots__*\n'
                  '  attribute will be an empty iterator.\n'
                  '\n'
@@ -8883,9 +9029,11 @@
                  'does nothing,\n'
                  '   but raises an error if it is called with any arguments.\n'
                  '\n'
-                 '   Note: The metaclass hint "metaclass" is consumed by the '
-                 'rest of\n'
-                 '     the type machinery, and is never passed to '
+                 '   Note:\n'
+                 '\n'
+                 '     The metaclass hint "metaclass" is consumed by the rest '
+                 'of the\n'
+                 '     type machinery, and is never passed to '
                  '"__init_subclass__"\n'
                  '     implementations. The actual metaclass (rather than the '
                  'explicit\n'
@@ -8953,9 +9101,10 @@
                  'tuple may\n'
                  'be empty, in such case the original base is ignored.\n'
                  '\n'
-                 'See also: **PEP 560** - Core support for typing module and '
-                 'generic\n'
-                 '  types\n'
+                 'See also:\n'
+                 '\n'
+                 '  **PEP 560** - Core support for typing module and generic '
+                 'types\n'
                  '\n'
                  '\n'
                  'Determining the appropriate metaclass\n'
@@ -9213,9 +9362,10 @@
                  'type hints,\n'
                  'other usage is discouraged.\n'
                  '\n'
-                 'See also: **PEP 560** - Core support for typing module and '
-                 'generic\n'
-                 '  types\n'
+                 'See also:\n'
+                 '\n'
+                 '  **PEP 560** - Core support for typing module and generic '
+                 'types\n'
                  '\n'
                  '\n'
                  'Emulating callable objects\n'
@@ -9225,8 +9375,8 @@
                  '\n'
                  '   Called when the instance is “called” as a function; if '
                  'this method\n'
-                 '   is defined, "x(arg1, arg2, ...)" is a shorthand for\n'
-                 '   "x.__call__(arg1, arg2, ...)".\n'
+                 '   is defined, "x(arg1, arg2, ...)" roughly translates to\n'
+                 '   "type(x).__call__(x, arg1, ...)".\n'
                  '\n'
                  '\n'
                  'Emulating container types\n'
@@ -9327,9 +9477,11 @@
                  '\n'
                  '   New in version 3.4.\n'
                  '\n'
-                 'Note: Slicing is done exclusively with the following three '
-                 'methods.\n'
-                 '  A call like\n'
+                 'Note:\n'
+                 '\n'
+                 '  Slicing is done exclusively with the following three '
+                 'methods.  A\n'
+                 '  call like\n'
                  '\n'
                  '     a[1:2] = b\n'
                  '\n'
@@ -9360,8 +9512,10 @@
                  'the\n'
                  '   container), "KeyError" should be raised.\n'
                  '\n'
-                 '   Note: "for" loops expect that an "IndexError" will be '
-                 'raised for\n'
+                 '   Note:\n'
+                 '\n'
+                 '     "for" loops expect that an "IndexError" will be raised '
+                 'for\n'
                  '     illegal indexes to allow proper detection of the end of '
                  'the\n'
                  '     sequence.\n'
@@ -9551,15 +9705,18 @@
                  '"__rpow__()" (the\n'
                  '   coercion rules would become too complicated).\n'
                  '\n'
-                 '   Note: If the right operand’s type is a subclass of the '
-                 'left\n'
-                 '     operand’s type and that subclass provides the reflected '
-                 'method\n'
-                 '     for the operation, this method will be called before '
-                 'the left\n'
-                 '     operand’s non-reflected method.  This behavior allows '
-                 'subclasses\n'
-                 '     to override their ancestors’ operations.\n'
+                 '   Note:\n'
+                 '\n'
+                 '     If the right operand’s type is a subclass of the left '
+                 'operand’s\n'
+                 '     type and that subclass provides a different '
+                 'implementation of the\n'
+                 '     reflected method for the operation, this method will be '
+                 'called\n'
+                 '     before the left operand’s non-reflected method. This '
+                 'behavior\n'
+                 '     allows subclasses to override their ancestors’ '
+                 'operations.\n'
                  '\n'
                  'object.__iadd__(self, other)\n'
                  'object.__isub__(self, other)\n'
@@ -9603,6 +9760,16 @@
                  'the data\n'
                  '   model.\n'
                  '\n'
+                 '   Note:\n'
+                 '\n'
+                 '     Due to a bug in the dispatching mechanism for "**=", a '
+                 'class that\n'
+                 '     defines "__ipow__()" but returns "NotImplemented" would '
+                 'fail to\n'
+                 '     fall back to "x.__pow__(y)" and "y.__rpow__(x)". This '
+                 'bug is\n'
+                 '     fixed in Python 3.10.\n'
+                 '\n'
                  'object.__neg__(self)\n'
                  'object.__pos__(self)\n'
                  'object.__abs__(self)\n'
@@ -9909,9 +10076,20 @@
                    'For a list\n'
                    '   of possible encodings, see section Standard Encodings.\n'
                    '\n'
+                   '   By default, the *errors* argument is not checked for '
+                   'best\n'
+                   '   performances, but only used at the first encoding '
+                   'error. Enable the\n'
+                   '   Python Development Mode, or use a debug build to check '
+                   '*errors*.\n'
+                   '\n'
                    '   Changed in version 3.1: Support for keyword arguments '
                    'added.\n'
                    '\n'
+                   '   Changed in version 3.9: The *errors* is now checked in '
+                   'development\n'
+                   '   mode and in debug mode.\n'
+                   '\n'
                    'str.endswith(suffix[, start[, end]])\n'
                    '\n'
                    '   Return "True" if the string ends with the specified '
@@ -9966,11 +10144,13 @@
                    '"-1" if\n'
                    '   *sub* is not found.\n'
                    '\n'
-                   '   Note: The "find()" method should be used only if you '
-                   'need to know\n'
-                   '     the position of *sub*.  To check if *sub* is a '
-                   'substring or not,\n'
-                   '     use the "in" operator:\n'
+                   '   Note:\n'
+                   '\n'
+                   '     The "find()" method should be used only if you need '
+                   'to know the\n'
+                   '     position of *sub*.  To check if *sub* is a substring '
+                   'or not, use\n'
+                   '     the "in" operator:\n'
                    '\n'
                    "        >>> 'Py' in 'Python'\n"
                    '        True\n'
@@ -9999,8 +10179,9 @@
                    '   formatting options that can be specified in format '
                    'strings.\n'
                    '\n'
-                   '   Note: When formatting a number ("int", "float", '
-                   '"complex",\n'
+                   '   Note:\n'
+                   '\n'
+                   '     When formatting a number ("int", "float", "complex",\n'
                    '     "decimal.Decimal" and subclasses) with the "n" type '
                    '(ex:\n'
                    '     "\'{:n}\'.format(1234)"), the function temporarily '
@@ -10205,6 +10386,15 @@
                    '"False"\n'
                    '   otherwise.\n'
                    '\n'
+                   "   >>> 'BANANA'.isupper()\n"
+                   '   True\n'
+                   "   >>> 'banana'.isupper()\n"
+                   '   False\n'
+                   "   >>> 'baNana'.isupper()\n"
+                   '   False\n'
+                   "   >>> ' '.isupper()\n"
+                   '   False\n'
+                   '\n'
                    'str.join(iterable)\n'
                    '\n'
                    '   Return a string which is the concatenation of the '
@@ -10253,6 +10443,16 @@
                    "      >>> 'www.example.com'.lstrip('cmowz.')\n"
                    "      'example.com'\n"
                    '\n'
+                   '   See "str.removeprefix()" for a method that will remove '
+                   'a single\n'
+                   '   prefix string rather than all of a set of characters.  '
+                   'For example:\n'
+                   '\n'
+                   "      >>> 'Arthur: three!'.lstrip('Arthur: ')\n"
+                   "      'ee!'\n"
+                   "      >>> 'Arthur: three!'.removeprefix('Arthur: ')\n"
+                   "      'three!'\n"
+                   '\n'
                    'static str.maketrans(x[, y[, z]])\n'
                    '\n'
                    '   This static method returns a translation table usable '
@@ -10289,6 +10489,35 @@
                    'followed by\n'
                    '   two empty strings.\n'
                    '\n'
+                   'str.removeprefix(prefix, /)\n'
+                   '\n'
+                   '   If the string starts with the *prefix* string, return\n'
+                   '   "string[len(prefix):]". Otherwise, return a copy of the '
+                   'original\n'
+                   '   string:\n'
+                   '\n'
+                   "      >>> 'TestHook'.removeprefix('Test')\n"
+                   "      'Hook'\n"
+                   "      >>> 'BaseTestCase'.removeprefix('Test')\n"
+                   "      'BaseTestCase'\n"
+                   '\n'
+                   '   New in version 3.9.\n'
+                   '\n'
+                   'str.removesuffix(suffix, /)\n'
+                   '\n'
+                   '   If the string ends with the *suffix* string and that '
+                   '*suffix* is\n'
+                   '   not empty, return "string[:-len(suffix)]". Otherwise, '
+                   'return a copy\n'
+                   '   of the original string:\n'
+                   '\n'
+                   "      >>> 'MiscTests'.removesuffix('Tests')\n"
+                   "      'Misc'\n"
+                   "      >>> 'TmpDirMixin'.removesuffix('Tests')\n"
+                   "      'TmpDirMixin'\n"
+                   '\n'
+                   '   New in version 3.9.\n'
+                   '\n'
                    'str.replace(old, new[, count])\n'
                    '\n'
                    '   Return a copy of the string with all occurrences of '
@@ -10366,6 +10595,16 @@
                    "      >>> 'mississippi'.rstrip('ipz')\n"
                    "      'mississ'\n"
                    '\n'
+                   '   See "str.removesuffix()" for a method that will remove '
+                   'a single\n'
+                   '   suffix string rather than all of a set of characters.  '
+                   'For example:\n'
+                   '\n'
+                   "      >>> 'Monty Python'.rstrip(' Python')\n"
+                   "      'M'\n"
+                   "      >>> 'Monty Python'.removesuffix(' Python')\n"
+                   "      'Monty'\n"
+                   '\n'
                    'str.split(sep=None, maxsplit=-1)\n'
                    '\n'
                    '   Return a list of the words in the string, using *sep* '
@@ -10847,17 +11086,20 @@
             '\n'
             '2. Unlike in Standard C, exactly two hex digits are required.\n'
             '\n'
-            '3. In a bytes literal, hexadecimal and octal escapes denote the\n'
-            '   byte with the given value. In a string literal, these escapes\n'
-            '   denote a Unicode character with the given value.\n'
+            '3. In a bytes literal, hexadecimal and octal escapes denote the '
+            'byte\n'
+            '   with the given value. In a string literal, these escapes '
+            'denote a\n'
+            '   Unicode character with the given value.\n'
             '\n'
             '4. Changed in version 3.3: Support for name aliases [1] has been\n'
             '   added.\n'
             '\n'
             '5. Exactly four hex digits are required.\n'
             '\n'
-            '6. Any Unicode character can be encoded this way.  Exactly eight\n'
-            '   hex digits are required.\n'
+            '6. Any Unicode character can be encoded this way.  Exactly eight '
+            'hex\n'
+            '   digits are required.\n'
             '\n'
             'Unlike Standard C, all unrecognized escape sequences are left in '
             'the\n'
@@ -10899,9 +11141,10 @@
  'subscriptions': 'Subscriptions\n'
                   '*************\n'
                   '\n'
-                  'A subscription selects an item of a sequence (string, tuple '
-                  'or list)\n'
-                  'or mapping (dictionary) object:\n'
+                  'Subscription of a sequence (string, tuple or list) or '
+                  'mapping\n'
+                  '(dictionary) object usually selects an item from the '
+                  'collection:\n'
                   '\n'
                   '   subscription ::= primary "[" expression_list "]"\n'
                   '\n'
@@ -10952,7 +11195,13 @@
                   '\n'
                   'A string’s items are characters.  A character is not a '
                   'separate data\n'
-                  'type but a string of exactly one character.\n',
+                  'type but a string of exactly one character.\n'
+                  '\n'
+                  'Subscription of certain *classes* or *types* creates a '
+                  'generic alias.\n'
+                  'In this case, user-defined classes can support subscription '
+                  'by\n'
+                  'providing a "__class_getitem__()" classmethod.\n',
  'truth': 'Truth Value Testing\n'
           '*******************\n'
           '\n'
@@ -11170,10 +11419,17 @@
           'for\n'
           '   the operands provided.  (The interpreter will then try the\n'
           '   reflected operation, or some other fallback, depending on the\n'
-          '   operator.)  Its truth value is true.\n'
+          '   operator.)  It should not be evaluated in a boolean context.\n'
           '\n'
           '   See Implementing the arithmetic operations for more details.\n'
           '\n'
+          '   Changed in version 3.9: Evaluating "NotImplemented" in a '
+          'boolean\n'
+          '   context is deprecated. While it currently evaluates as true, it\n'
+          '   will emit a "DeprecationWarning". It will raise a "TypeError" in '
+          'a\n'
+          '   future version of Python.\n'
+          '\n'
           'Ellipsis\n'
           '   This type has a single value.  There is a single object with '
           'this\n'
@@ -11191,6 +11447,27 @@
           'representation\n'
           '   in computers.\n'
           '\n'
+          '   The string representations of the numeric classes, computed by\n'
+          '   "__repr__()" and "__str__()", have the following properties:\n'
+          '\n'
+          '   * They are valid numeric literals which, when passed to their '
+          'class\n'
+          '     constructor, produce an object having the value of the '
+          'original\n'
+          '     numeric.\n'
+          '\n'
+          '   * The representation is in base 10, when possible.\n'
+          '\n'
+          '   * Leading zeros, possibly excepting a single zero before a '
+          'decimal\n'
+          '     point, are not shown.\n'
+          '\n'
+          '   * Trailing zeros, possibly excepting a single zero after a '
+          'decimal\n'
+          '     point, are not shown.\n'
+          '\n'
+          '   * A sign is shown only when the number is negative.\n'
+          '\n'
           '   Python distinguishes between integers, floating point numbers, '
           'and\n'
           '   complex numbers:\n'
@@ -12242,6 +12519,21 @@
                  'positional\n'
                  '   argument and a possibly empty set of keyword arguments.\n'
                  '\n'
+                 '   Dictionaries can be created by several means:\n'
+                 '\n'
+                 '   * Use a comma-separated list of "key: value" pairs within '
+                 'braces:\n'
+                 '     "{\'jack\': 4098, \'sjoerd\': 4127}" or "{4098: '
+                 "'jack', 4127:\n"
+                 '     \'sjoerd\'}"\n'
+                 '\n'
+                 '   * Use a dict comprehension: "{}", "{x: x ** 2 for x in '
+                 'range(10)}"\n'
+                 '\n'
+                 '   * Use the type constructor: "dict()", "dict([(\'foo\', '
+                 "100), ('bar',\n"
+                 '     200)])", "dict(foo=100, bar=200)"\n'
+                 '\n'
                  '   If no positional argument is given, an empty dictionary '
                  'is created.\n'
                  '   If a positional argument is given and it is a mapping '
@@ -12279,7 +12571,8 @@
                  "      >>> c = dict(zip(['one', 'two', 'three'], [1, 2, 3]))\n"
                  "      >>> d = dict([('two', 2), ('one', 1), ('three', 3)])\n"
                  "      >>> e = dict({'three': 3, 'one': 1, 'two': 2})\n"
-                 '      >>> a == b == c == d == e\n'
+                 "      >>> f = dict({'one': 1, 'three': 3}, two=2)\n"
+                 '      >>> a == b == c == d == e == f\n'
                  '      True\n'
                  '\n'
                  '   Providing keyword arguments as in the first example only '
@@ -12477,6 +12770,29 @@
                  '         >>> d.values() == d.values()\n'
                  '         False\n'
                  '\n'
+                 '   d | other\n'
+                 '\n'
+                 '      Create a new dictionary with the merged keys and '
+                 'values of *d*\n'
+                 '      and *other*, which must both be dictionaries. The '
+                 'values of\n'
+                 '      *other* take priority when *d* and *other* share '
+                 'keys.\n'
+                 '\n'
+                 '      New in version 3.9.\n'
+                 '\n'
+                 '   d |= other\n'
+                 '\n'
+                 '      Update the dictionary *d* with keys and values from '
+                 '*other*,\n'
+                 '      which may be either a *mapping* or an *iterable* of '
+                 'key/value\n'
+                 '      pairs. The values of *other* take priority when *d* '
+                 'and *other*\n'
+                 '      share keys.\n'
+                 '\n'
+                 '      New in version 3.9.\n'
+                 '\n'
                  '   Dictionaries compare equal if and only if they have the '
                  'same "(key,\n'
                  '   value)" pairs (regardless of ordering). Order comparisons '
@@ -12524,9 +12840,11 @@
                  '\n'
                  '   Changed in version 3.8: Dictionaries are now reversible.\n'
                  '\n'
-                 'See also: "types.MappingProxyType" can be used to create a '
-                 'read-only\n'
-                 '  view of a "dict".\n'
+                 'See also:\n'
+                 '\n'
+                 '  "types.MappingProxyType" can be used to create a read-only '
+                 'view of a\n'
+                 '  "dict".\n'
                  '\n'
                  '\n'
                  'Dictionary view objects\n'
@@ -12910,13 +13228,14 @@
              '"None", it\n'
              '   is treated like "1".\n'
              '\n'
-             '6. Concatenating immutable sequences always results in a new\n'
-             '   object. This means that building up a sequence by repeated\n'
-             '   concatenation will have a quadratic runtime cost in the '
-             'total\n'
-             '   sequence length. To get a linear runtime cost, you must '
-             'switch to\n'
-             '   one of the alternatives below:\n'
+             '6. Concatenating immutable sequences always results in a new '
+             'object.\n'
+             '   This means that building up a sequence by repeated '
+             'concatenation\n'
+             '   will have a quadratic runtime cost in the total sequence '
+             'length.\n'
+             '   To get a linear runtime cost, you must switch to one of the\n'
+             '   alternatives below:\n'
              '\n'
              '   * if concatenating "str" objects, you can build a list and '
              'use\n'
@@ -12934,24 +13253,25 @@
              '   * for other types, investigate the relevant class '
              'documentation\n'
              '\n'
-             '7. Some sequence types (such as "range") only support item\n'
-             '   sequences that follow specific patterns, and hence don’t '
-             'support\n'
-             '   sequence concatenation or repetition.\n'
-             '\n'
-             '8. "index" raises "ValueError" when *x* is not found in *s*. '
-             'Not\n'
-             '   all implementations support passing the additional arguments '
-             '*i*\n'
-             '   and *j*. These arguments allow efficient searching of '
-             'subsections\n'
-             '   of the sequence. Passing the extra arguments is roughly '
-             'equivalent\n'
-             '   to using "s[i:j].index(x)", only without copying any data and '
-             'with\n'
-             '   the returned index being relative to the start of the '
+             '7. Some sequence types (such as "range") only support item '
+             'sequences\n'
+             '   that follow specific patterns, and hence don’t support '
              'sequence\n'
-             '   rather than the start of the slice.\n'
+             '   concatenation or repetition.\n'
+             '\n'
+             '8. "index" raises "ValueError" when *x* is not found in *s*. Not '
+             'all\n'
+             '   implementations support passing the additional arguments *i* '
+             'and\n'
+             '   *j*. These arguments allow efficient searching of subsections '
+             'of\n'
+             '   the sequence. Passing the extra arguments is roughly '
+             'equivalent to\n'
+             '   using "s[i:j].index(x)", only without copying any data and '
+             'with the\n'
+             '   returned index being relative to the start of the sequence '
+             'rather\n'
+             '   than the start of the slice.\n'
              '\n'
              '\n'
              'Immutable Sequence Types\n'
@@ -13079,17 +13399,17 @@
              '1. *t* must have the same length as the slice it is replacing.\n'
              '\n'
              '2. The optional argument *i* defaults to "-1", so that by '
-             'default\n'
-             '   the last item is removed and returned.\n'
+             'default the\n'
+             '   last item is removed and returned.\n'
              '\n'
              '3. "remove()" raises "ValueError" when *x* is not found in *s*.\n'
              '\n'
-             '4. The "reverse()" method modifies the sequence in place for\n'
-             '   economy of space when reversing a large sequence.  To remind '
-             'users\n'
-             '   that it operates by side effect, it does not return the '
-             'reversed\n'
-             '   sequence.\n'
+             '4. The "reverse()" method modifies the sequence in place for '
+             'economy\n'
+             '   of space when reversing a large sequence.  To remind users '
+             'that it\n'
+             '   operates by side effect, it does not return the reversed '
+             'sequence.\n'
              '\n'
              '5. "clear()" and "copy()" are included for consistency with the\n'
              '   interfaces of mutable containers that don’t support slicing\n'
@@ -13126,9 +13446,9 @@
              '   * Using a pair of square brackets to denote the empty list: '
              '"[]"\n'
              '\n'
-             '   * Using square brackets, separating items with commas: '
-             '"[a]",\n'
-             '     "[a, b, c]"\n'
+             '   * Using square brackets, separating items with commas: "[a]", '
+             '"[a,\n'
+             '     b, c]"\n'
              '\n'
              '   * Using a list comprehension: "[x for x in iterable]"\n'
              '\n'
@@ -13431,9 +13751,9 @@
              '\n'
              'See also:\n'
              '\n'
-             '  * The linspace recipe shows how to implement a lazy version '
-             'of\n'
-             '    range suitable for floating point applications.\n',
+             '  * The linspace recipe shows how to implement a lazy version of '
+             'range\n'
+             '    suitable for floating point applications.\n',
  'typesseq-mutable': 'Mutable Sequence Types\n'
                      '**********************\n'
                      '\n'
@@ -13544,19 +13864,18 @@
                      'replacing.\n'
                      '\n'
                      '2. The optional argument *i* defaults to "-1", so that '
-                     'by default\n'
-                     '   the last item is removed and returned.\n'
+                     'by default the\n'
+                     '   last item is removed and returned.\n'
                      '\n'
                      '3. "remove()" raises "ValueError" when *x* is not found '
                      'in *s*.\n'
                      '\n'
                      '4. The "reverse()" method modifies the sequence in place '
-                     'for\n'
-                     '   economy of space when reversing a large sequence.  To '
-                     'remind users\n'
-                     '   that it operates by side effect, it does not return '
-                     'the reversed\n'
-                     '   sequence.\n'
+                     'for economy\n'
+                     '   of space when reversing a large sequence.  To remind '
+                     'users that it\n'
+                     '   operates by side effect, it does not return the '
+                     'reversed sequence.\n'
                      '\n'
                      '5. "clear()" and "copy()" are included for consistency '
                      'with the\n'
@@ -13639,8 +13958,9 @@
          'The execution of the "with" statement with one “item” proceeds as\n'
          'follows:\n'
          '\n'
-         '1. The context expression (the expression given in the "with_item")\n'
-         '   is evaluated to obtain a context manager.\n'
+         '1. The context expression (the expression given in the "with_item") '
+         'is\n'
+         '   evaluated to obtain a context manager.\n'
          '\n'
          '2. The context manager’s "__enter__()" is loaded for later use.\n'
          '\n'
@@ -13648,12 +13968,15 @@
          '\n'
          '4. The context manager’s "__enter__()" method is invoked.\n'
          '\n'
-         '5. If a target was included in the "with" statement, the return\n'
-         '   value from "__enter__()" is assigned to it.\n'
+         '5. If a target was included in the "with" statement, the return '
+         'value\n'
+         '   from "__enter__()" is assigned to it.\n'
          '\n'
-         '   Note: The "with" statement guarantees that if the "__enter__()"\n'
-         '     method returns without an error, then "__exit__()" will always '
-         'be\n'
+         '   Note:\n'
+         '\n'
+         '     The "with" statement guarantees that if the "__enter__()" '
+         'method\n'
+         '     returns without an error, then "__exit__()" will always be\n'
          '     called. Thus, if an error occurs during the assignment to the\n'
          '     target list, it will be treated the same as an error occurring\n'
          '     within the suite would be. See step 6 below.\n'
diff --git a/common/py3-stdlib/queue.py b/common/py3-stdlib/queue.py
index 5bb0431..10dbcbc 100644
--- a/common/py3-stdlib/queue.py
+++ b/common/py3-stdlib/queue.py
@@ -1,6 +1,7 @@
 '''A multi-producer, multi-consumer queue.'''
 
 import threading
+import types
 from collections import deque
 from heapq import heappush, heappop
 from time import monotonic as time
@@ -216,6 +217,8 @@
     def _get(self):
         return self.queue.popleft()
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 
 class PriorityQueue(Queue):
     '''Variant of Queue that retrieves open entries in priority order (lowest first).
@@ -316,6 +319,8 @@
         '''Return the approximate size of the queue (not reliable!).'''
         return len(self._queue)
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 
 if SimpleQueue is None:
     SimpleQueue = _PySimpleQueue
diff --git a/common/py3-stdlib/quopri.py b/common/py3-stdlib/quopri.py
index cbd979a..08899c5 100755
--- a/common/py3-stdlib/quopri.py
+++ b/common/py3-stdlib/quopri.py
@@ -204,11 +204,11 @@
         print("-t: quote tabs")
         print("-d: decode; default encode")
         sys.exit(2)
-    deco = 0
-    tabs = 0
+    deco = False
+    tabs = False
     for o, a in opts:
-        if o == '-t': tabs = 1
-        if o == '-d': deco = 1
+        if o == '-t': tabs = True
+        if o == '-d': deco = True
     if tabs and deco:
         sys.stdout = sys.stderr
         print("-t and -d are mutually exclusive")
diff --git a/common/py3-stdlib/random.py b/common/py3-stdlib/random.py
index 365a019..a6454f5 100644
--- a/common/py3-stdlib/random.py
+++ b/common/py3-stdlib/random.py
@@ -1,5 +1,9 @@
 """Random variable generators.
 
+    bytes
+    -----
+           uniform bytes (values between 0 and 255)
+
     integers
     --------
            uniform within range
@@ -37,14 +41,20 @@
 
 """
 
+# Translated by Guido van Rossum from C source provided by
+# Adrian Baddeley.  Adapted by Raymond Hettinger for use with
+# the Mersenne Twister  and os.urandom() core generators.
+
 from warnings import warn as _warn
 from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
 from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
+from math import tau as TWOPI, floor as _floor
 from os import urandom as _urandom
 from _collections_abc import Set as _Set, Sequence as _Sequence
 from itertools import accumulate as _accumulate, repeat as _repeat
 from bisect import bisect as _bisect
 import os as _os
+import _random
 
 try:
     # hashlib is pretty heavy to load, try lean internal module first
@@ -53,28 +63,40 @@
     # fallback to official implementation
     from hashlib import sha512 as _sha512
 
+__all__ = [
+    "Random",
+    "SystemRandom",
+    "betavariate",
+    "choice",
+    "choices",
+    "expovariate",
+    "gammavariate",
+    "gauss",
+    "getrandbits",
+    "getstate",
+    "lognormvariate",
+    "normalvariate",
+    "paretovariate",
+    "randint",
+    "random",
+    "randrange",
+    "sample",
+    "seed",
+    "setstate",
+    "shuffle",
+    "triangular",
+    "uniform",
+    "vonmisesvariate",
+    "weibullvariate",
+]
 
-__all__ = ["Random","seed","random","uniform","randint","choice","sample",
-           "randrange","shuffle","normalvariate","lognormvariate",
-           "expovariate","vonmisesvariate","gammavariate","triangular",
-           "gauss","betavariate","paretovariate","weibullvariate",
-           "getstate","setstate", "getrandbits", "choices",
-           "SystemRandom"]
-
-NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0)
-TWOPI = 2.0*_pi
+NV_MAGICCONST = 4 * _exp(-0.5) / _sqrt(2.0)
 LOG4 = _log(4.0)
 SG_MAGICCONST = 1.0 + _log(4.5)
 BPF = 53        # Number of bits in a float
-RECIP_BPF = 2**-BPF
+RECIP_BPF = 2 ** -BPF
 
 
-# Translated by Guido van Rossum from C source provided by
-# Adrian Baddeley.  Adapted by Raymond Hettinger for use with
-# the Mersenne Twister  and os.urandom() core generators.
-
-import _random
-
 class Random(_random.Random):
     """Random number generator base class used by bound module functions.
 
@@ -100,28 +122,11 @@
         self.seed(x)
         self.gauss_next = None
 
-    def __init_subclass__(cls, /, **kwargs):
-        """Control how subclasses generate random integers.
-
-        The algorithm a subclass can use depends on the random() and/or
-        getrandbits() implementation available to it and determines
-        whether it can generate random integers from arbitrarily large
-        ranges.
-        """
-
-        for c in cls.__mro__:
-            if '_randbelow' in c.__dict__:
-                # just inherit it
-                break
-            if 'getrandbits' in c.__dict__:
-                cls._randbelow = cls._randbelow_with_getrandbits
-                break
-            if 'random' in c.__dict__:
-                cls._randbelow = cls._randbelow_without_getrandbits
-                break
-
     def seed(self, a=None, version=2):
-        """Initialize internal state from hashable object.
+        """Initialize internal state from a seed.
+
+        The only supported seed types are None, int, float,
+        str, bytes, and bytearray.
 
         None or no argument seeds from current time or from an operating
         system specific randomness source if available.
@@ -143,12 +148,20 @@
             x ^= len(a)
             a = -2 if x == -1 else x
 
-        if version == 2 and isinstance(a, (str, bytes, bytearray)):
+        elif version == 2 and isinstance(a, (str, bytes, bytearray)):
             if isinstance(a, str):
                 a = a.encode()
             a += _sha512(a).digest()
             a = int.from_bytes(a, 'big')
 
+        elif not isinstance(a, (type(None), int, float, str, bytes, bytearray)):
+            _warn('Seeding based on hashing is deprecated\n'
+                  'since Python 3.9 and will be removed in a subsequent '
+                  'version. The only \n'
+                  'supported seed types are: None, '
+                  'int, float, str, bytes, and bytearray.',
+                  DeprecationWarning, 2)
+
         super().seed(a)
         self.gauss_next = None
 
@@ -169,7 +182,7 @@
             #   really unsigned 32-bit ints, so we convert negative ints from
             #   version 2 to positive longs for version 3.
             try:
-                internalstate = tuple(x % (2**32) for x in internalstate)
+                internalstate = tuple(x % (2 ** 32) for x in internalstate)
             except ValueError as e:
                 raise TypeError from e
             super().setstate(internalstate)
@@ -178,15 +191,18 @@
                              "Random.setstate() of version %s" %
                              (version, self.VERSION))
 
-## ---- Methods below this point do not need to be overridden when
-## ---- subclassing for the purpose of using a different core generator.
 
-## -------------------- pickle support  -------------------
+    ## -------------------------------------------------------
+    ## ---- Methods below this point do not need to be overridden or extended
+    ## ---- when subclassing for the purpose of using a different core generator.
+
+
+    ## -------------------- pickle support  -------------------
 
     # Issue 17489: Since __reduce__ was defined to fix #759889 this is no
     # longer called; we leave it here because it has been here since random was
     # rewritten back in 2001 and why risk breaking something.
-    def __getstate__(self): # for pickle
+    def __getstate__(self):  # for pickle
         return self.getstate()
 
     def __setstate__(self, state):  # for pickle
@@ -195,9 +211,83 @@
     def __reduce__(self):
         return self.__class__, (), self.getstate()
 
-## -------------------- integer methods  -------------------
 
-    def randrange(self, start, stop=None, step=1, _int=int):
+    ## ---- internal support method for evenly distributed integers ----
+
+    def __init_subclass__(cls, /, **kwargs):
+        """Control how subclasses generate random integers.
+
+        The algorithm a subclass can use depends on the random() and/or
+        getrandbits() implementation available to it and determines
+        whether it can generate random integers from arbitrarily large
+        ranges.
+        """
+
+        for c in cls.__mro__:
+            if '_randbelow' in c.__dict__:
+                # just inherit it
+                break
+            if 'getrandbits' in c.__dict__:
+                cls._randbelow = cls._randbelow_with_getrandbits
+                break
+            if 'random' in c.__dict__:
+                cls._randbelow = cls._randbelow_without_getrandbits
+                break
+
+    def _randbelow_with_getrandbits(self, n):
+        "Return a random int in the range [0,n).  Returns 0 if n==0."
+
+        if not n:
+            return 0
+        getrandbits = self.getrandbits
+        k = n.bit_length()  # don't use (n-1) here because n can be 1
+        r = getrandbits(k)  # 0 <= r < 2**k
+        while r >= n:
+            r = getrandbits(k)
+        return r
+
+    def _randbelow_without_getrandbits(self, n, maxsize=1<<BPF):
+        """Return a random int in the range [0,n).  Returns 0 if n==0.
+
+        The implementation does not use getrandbits, but only random.
+        """
+
+        random = self.random
+        if n >= maxsize:
+            _warn("Underlying random() generator does not supply \n"
+                "enough bits to choose from a population range this large.\n"
+                "To remove the range limitation, add a getrandbits() method.")
+            return _floor(random() * n)
+        if n == 0:
+            return 0
+        rem = maxsize % n
+        limit = (maxsize - rem) / maxsize   # int(limit * maxsize) % n == 0
+        r = random()
+        while r >= limit:
+            r = random()
+        return _floor(r * maxsize) % n
+
+    _randbelow = _randbelow_with_getrandbits
+
+
+    ## --------------------------------------------------------
+    ## ---- Methods below this point generate custom distributions
+    ## ---- based on the methods defined above.  They do not
+    ## ---- directly touch the underlying generator and only
+    ## ---- access randomness through the methods:  random(),
+    ## ---- getrandbits(), or _randbelow().
+
+
+    ## -------------------- bytes methods ---------------------
+
+    def randbytes(self, n):
+        """Generate n random bytes."""
+        return self.getrandbits(n * 8).to_bytes(n, 'little')
+
+
+    ## -------------------- integer methods  -------------------
+
+    def randrange(self, start, stop=None, step=1):
         """Choose a random item from range(start, stop[, step]).
 
         This fixes the problem with randint() which includes the
@@ -207,7 +297,7 @@
 
         # This code is a bit messy to make it fast for the
         # common case while still doing adequate error checking.
-        istart = _int(start)
+        istart = int(start)
         if istart != start:
             raise ValueError("non-integer arg 1 for randrange()")
         if stop is None:
@@ -216,7 +306,7 @@
             raise ValueError("empty range for randrange()")
 
         # stop argument supplied.
-        istop = _int(stop)
+        istop = int(stop)
         if istop != stop:
             raise ValueError("non-integer stop for randrange()")
         width = istop - istart
@@ -226,7 +316,7 @@
             raise ValueError("empty range for randrange() (%d, %d, %d)" % (istart, istop, width))
 
         # Non-unit step argument supplied.
-        istep = _int(step)
+        istep = int(step)
         if istep != step:
             raise ValueError("non-integer step for randrange()")
         if istep > 0:
@@ -239,7 +329,7 @@
         if n <= 0:
             raise ValueError("empty range for randrange()")
 
-        return istart + istep*self._randbelow(n)
+        return istart + istep * self._randbelow(n)
 
     def randint(self, a, b):
         """Return random integer in range [a, b], including both end points.
@@ -247,48 +337,13 @@
 
         return self.randrange(a, b+1)
 
-    def _randbelow_with_getrandbits(self, n):
-        "Return a random int in the range [0,n).  Raises ValueError if n==0."
 
-        getrandbits = self.getrandbits
-        k = n.bit_length()  # don't use (n-1) here because n can be 1
-        r = getrandbits(k)          # 0 <= r < 2**k
-        while r >= n:
-            r = getrandbits(k)
-        return r
-
-    def _randbelow_without_getrandbits(self, n, int=int, maxsize=1<<BPF):
-        """Return a random int in the range [0,n).  Raises ValueError if n==0.
-
-        The implementation does not use getrandbits, but only random.
-        """
-
-        random = self.random
-        if n >= maxsize:
-            _warn("Underlying random() generator does not supply \n"
-                "enough bits to choose from a population range this large.\n"
-                "To remove the range limitation, add a getrandbits() method.")
-            return int(random() * n)
-        if n == 0:
-            raise ValueError("Boundary cannot be zero")
-        rem = maxsize % n
-        limit = (maxsize - rem) / maxsize   # int(limit * maxsize) % n == 0
-        r = random()
-        while r >= limit:
-            r = random()
-        return int(r*maxsize) % n
-
-    _randbelow = _randbelow_with_getrandbits
-
-## -------------------- sequence methods  -------------------
+    ## -------------------- sequence methods  -------------------
 
     def choice(self, seq):
         """Choose a random element from a non-empty sequence."""
-        try:
-            i = self._randbelow(len(seq))
-        except ValueError:
-            raise IndexError('Cannot choose from an empty sequence') from None
-        return seq[i]
+        # raises IndexError if seq is empty
+        return seq[self._randbelow(len(seq))]
 
     def shuffle(self, x, random=None):
         """Shuffle list x in place, and return None.
@@ -303,16 +358,20 @@
             randbelow = self._randbelow
             for i in reversed(range(1, len(x))):
                 # pick an element in x[:i+1] with which to exchange x[i]
-                j = randbelow(i+1)
+                j = randbelow(i + 1)
                 x[i], x[j] = x[j], x[i]
         else:
-            _int = int
+            _warn('The *random* parameter to shuffle() has been deprecated\n'
+                  'since Python 3.9 and will be removed in a subsequent '
+                  'version.',
+                  DeprecationWarning, 2)
+            floor = _floor
             for i in reversed(range(1, len(x))):
                 # pick an element in x[:i+1] with which to exchange x[i]
-                j = _int(random() * (i+1))
+                j = floor(random() * (i + 1))
                 x[i], x[j] = x[j], x[i]
 
-    def sample(self, population, k):
+    def sample(self, population, k, *, counts=None):
         """Chooses k unique random elements from a population sequence or set.
 
         Returns a new list containing elements from the population while
@@ -325,9 +384,21 @@
         population contains repeats, then each occurrence is a possible
         selection in the sample.
 
-        To choose a sample in a range of integers, use range as an argument.
-        This is especially fast and space efficient for sampling from a
-        large population:   sample(range(10000000), 60)
+        Repeated elements can be specified one at a time or with the optional
+        counts parameter.  For example:
+
+            sample(['red', 'blue'], counts=[4, 2], k=5)
+
+        is equivalent to:
+
+            sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5)
+
+        To choose a sample from a range of integers, use range() for the
+        population argument.  This is especially fast and space efficient
+        for sampling from a large population:
+
+            sample(range(10000000), 60)
+
         """
 
         # Sampling without replacement entails tracking either potential
@@ -354,24 +425,40 @@
         # causing them to eat more entropy than necessary.
 
         if isinstance(population, _Set):
+            _warn('Sampling from a set deprecated\n'
+                  'since Python 3.9 and will be removed in a subsequent version.',
+                  DeprecationWarning, 2)
             population = tuple(population)
         if not isinstance(population, _Sequence):
-            raise TypeError("Population must be a sequence or set.  For dicts, use list(d).")
-        randbelow = self._randbelow
+            raise TypeError("Population must be a sequence.  For dicts or sets, use sorted(d).")
         n = len(population)
+        if counts is not None:
+            cum_counts = list(_accumulate(counts))
+            if len(cum_counts) != n:
+                raise ValueError('The number of counts does not match the population')
+            total = cum_counts.pop()
+            if not isinstance(total, int):
+                raise TypeError('Counts must be integers')
+            if total <= 0:
+                raise ValueError('Total of counts must be greater than zero')
+            selections = sample(range(total), k=k)
+            bisect = _bisect
+            return [population[bisect(cum_counts, s)] for s in selections]
+        randbelow = self._randbelow
         if not 0 <= k <= n:
             raise ValueError("Sample larger than population or is negative")
         result = [None] * k
         setsize = 21        # size of a small set minus size of an empty list
         if k > 5:
-            setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
+            setsize += 4 ** _ceil(_log(k * 3, 4))  # table size for big sets
         if n <= setsize:
-            # An n-length list is smaller than a k-length set
+            # An n-length list is smaller than a k-length set.
+            # Invariant:  non-selected at pool[0 : n-i]
             pool = list(population)
-            for i in range(k):         # invariant:  non-selected at [0,n-i)
-                j = randbelow(n-i)
+            for i in range(k):
+                j = randbelow(n - i)
                 result[i] = pool[j]
-                pool[j] = pool[n-i-1]   # move non-selected item into vacancy
+                pool[j] = pool[n - i - 1]  # move non-selected item into vacancy
         else:
             selected = set()
             selected_add = selected.add
@@ -394,29 +481,28 @@
         n = len(population)
         if cum_weights is None:
             if weights is None:
-                _int = int
+                floor = _floor
                 n += 0.0    # convert to float for a small speed improvement
-                return [population[_int(random() * n)] for i in _repeat(None, k)]
+                return [population[floor(random() * n)] for i in _repeat(None, k)]
             cum_weights = list(_accumulate(weights))
         elif weights is not None:
             raise TypeError('Cannot specify both weights and cumulative weights')
         if len(cum_weights) != n:
             raise ValueError('The number of weights does not match the population')
-        bisect = _bisect
         total = cum_weights[-1] + 0.0   # convert to float
+        if total <= 0.0:
+            raise ValueError('Total of weights must be greater than zero')
+        bisect = _bisect
         hi = n - 1
         return [population[bisect(cum_weights, random() * total, 0, hi)]
                 for i in _repeat(None, k)]
 
-## -------------------- real-valued distributions  -------------------
 
-## -------------------- uniform distribution -------------------
+    ## -------------------- real-valued distributions  -------------------
 
     def uniform(self, a, b):
         "Get a random number in the range [a, b) or [a, b] depending on rounding."
-        return a + (b-a) * self.random()
-
-## -------------------- triangular --------------------
+        return a + (b - a) * self.random()
 
     def triangular(self, low=0.0, high=1.0, mode=None):
         """Triangular distribution.
@@ -438,180 +524,26 @@
             low, high = high, low
         return low + (high - low) * _sqrt(u * c)
 
-## -------------------- normal distribution --------------------
-
     def normalvariate(self, mu, sigma):
         """Normal distribution.
 
         mu is the mean, and sigma is the standard deviation.
 
         """
-        # mu = mean, sigma = standard deviation
-
         # Uses Kinderman and Monahan method. Reference: Kinderman,
         # A.J. and Monahan, J.F., "Computer generation of random
         # variables using the ratio of uniform deviates", ACM Trans
         # Math Software, 3, (1977), pp257-260.
 
         random = self.random
-        while 1:
+        while True:
             u1 = random()
             u2 = 1.0 - random()
-            z = NV_MAGICCONST*(u1-0.5)/u2
-            zz = z*z/4.0
+            z = NV_MAGICCONST * (u1 - 0.5) / u2
+            zz = z * z / 4.0
             if zz <= -_log(u2):
                 break
-        return mu + z*sigma
-
-## -------------------- lognormal distribution --------------------
-
-    def lognormvariate(self, mu, sigma):
-        """Log normal distribution.
-
-        If you take the natural logarithm of this distribution, you'll get a
-        normal distribution with mean mu and standard deviation sigma.
-        mu can have any value, and sigma must be greater than zero.
-
-        """
-        return _exp(self.normalvariate(mu, sigma))
-
-## -------------------- exponential distribution --------------------
-
-    def expovariate(self, lambd):
-        """Exponential distribution.
-
-        lambd is 1.0 divided by the desired mean.  It should be
-        nonzero.  (The parameter would be called "lambda", but that is
-        a reserved word in Python.)  Returned values range from 0 to
-        positive infinity if lambd is positive, and from negative
-        infinity to 0 if lambd is negative.
-
-        """
-        # lambd: rate lambd = 1/mean
-        # ('lambda' is a Python reserved word)
-
-        # we use 1-random() instead of random() to preclude the
-        # possibility of taking the log of zero.
-        return -_log(1.0 - self.random())/lambd
-
-## -------------------- von Mises distribution --------------------
-
-    def vonmisesvariate(self, mu, kappa):
-        """Circular data distribution.
-
-        mu is the mean angle, expressed in radians between 0 and 2*pi, and
-        kappa is the concentration parameter, which must be greater than or
-        equal to zero.  If kappa is equal to zero, this distribution reduces
-        to a uniform random angle over the range 0 to 2*pi.
-
-        """
-        # mu:    mean angle (in radians between 0 and 2*pi)
-        # kappa: concentration parameter kappa (>= 0)
-        # if kappa = 0 generate uniform random angle
-
-        # Based upon an algorithm published in: Fisher, N.I.,
-        # "Statistical Analysis of Circular Data", Cambridge
-        # University Press, 1993.
-
-        # Thanks to Magnus Kessler for a correction to the
-        # implementation of step 4.
-
-        random = self.random
-        if kappa <= 1e-6:
-            return TWOPI * random()
-
-        s = 0.5 / kappa
-        r = s + _sqrt(1.0 + s * s)
-
-        while 1:
-            u1 = random()
-            z = _cos(_pi * u1)
-
-            d = z / (r + z)
-            u2 = random()
-            if u2 < 1.0 - d * d or u2 <= (1.0 - d) * _exp(d):
-                break
-
-        q = 1.0 / r
-        f = (q + z) / (1.0 + q * z)
-        u3 = random()
-        if u3 > 0.5:
-            theta = (mu + _acos(f)) % TWOPI
-        else:
-            theta = (mu - _acos(f)) % TWOPI
-
-        return theta
-
-## -------------------- gamma distribution --------------------
-
-    def gammavariate(self, alpha, beta):
-        """Gamma distribution.  Not the gamma function!
-
-        Conditions on the parameters are alpha > 0 and beta > 0.
-
-        The probability distribution function is:
-
-                    x ** (alpha - 1) * math.exp(-x / beta)
-          pdf(x) =  --------------------------------------
-                      math.gamma(alpha) * beta ** alpha
-
-        """
-
-        # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
-
-        # Warning: a few older sources define the gamma distribution in terms
-        # of alpha > -1.0
-        if alpha <= 0.0 or beta <= 0.0:
-            raise ValueError('gammavariate: alpha and beta must be > 0.0')
-
-        random = self.random
-        if alpha > 1.0:
-
-            # Uses R.C.H. Cheng, "The generation of Gamma
-            # variables with non-integral shape parameters",
-            # Applied Statistics, (1977), 26, No. 1, p71-74
-
-            ainv = _sqrt(2.0 * alpha - 1.0)
-            bbb = alpha - LOG4
-            ccc = alpha + ainv
-
-            while 1:
-                u1 = random()
-                if not 1e-7 < u1 < .9999999:
-                    continue
-                u2 = 1.0 - random()
-                v = _log(u1/(1.0-u1))/ainv
-                x = alpha*_exp(v)
-                z = u1*u1*u2
-                r = bbb+ccc*v-x
-                if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z):
-                    return x * beta
-
-        elif alpha == 1.0:
-            # expovariate(1/beta)
-            return -_log(1.0 - random()) * beta
-
-        else:   # alpha is between 0 and 1 (exclusive)
-
-            # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
-
-            while 1:
-                u = random()
-                b = (_e + alpha)/_e
-                p = b*u
-                if p <= 1.0:
-                    x = p ** (1.0/alpha)
-                else:
-                    x = -_log((b-p)/alpha)
-                u1 = random()
-                if p > 1.0:
-                    if u1 <= x ** (alpha - 1.0):
-                        break
-                elif u1 <= _exp(-x):
-                    break
-            return x * beta
-
-## -------------------- Gauss (faster alternative) --------------------
+        return mu + z * sigma
 
     def gauss(self, mu, sigma):
         """Gaussian distribution.
@@ -622,7 +554,6 @@
         Not thread-safe without a lock around calls.
 
         """
-
         # When x and y are two variables from [0, 1), uniformly
         # distributed, then
         #
@@ -650,21 +581,141 @@
             z = _cos(x2pi) * g2rad
             self.gauss_next = _sin(x2pi) * g2rad
 
-        return mu + z*sigma
+        return mu + z * sigma
 
-## -------------------- beta --------------------
-## See
-## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
-## for Ivan Frohne's insightful analysis of why the original implementation:
-##
-##    def betavariate(self, alpha, beta):
-##        # Discrete Event Simulation in C, pp 87-88.
-##
-##        y = self.expovariate(alpha)
-##        z = self.expovariate(1.0/beta)
-##        return z/(y+z)
-##
-## was dead wrong, and how it probably got that way.
+    def lognormvariate(self, mu, sigma):
+        """Log normal distribution.
+
+        If you take the natural logarithm of this distribution, you'll get a
+        normal distribution with mean mu and standard deviation sigma.
+        mu can have any value, and sigma must be greater than zero.
+
+        """
+        return _exp(self.normalvariate(mu, sigma))
+
+    def expovariate(self, lambd):
+        """Exponential distribution.
+
+        lambd is 1.0 divided by the desired mean.  It should be
+        nonzero.  (The parameter would be called "lambda", but that is
+        a reserved word in Python.)  Returned values range from 0 to
+        positive infinity if lambd is positive, and from negative
+        infinity to 0 if lambd is negative.
+
+        """
+        # lambd: rate lambd = 1/mean
+        # ('lambda' is a Python reserved word)
+
+        # we use 1-random() instead of random() to preclude the
+        # possibility of taking the log of zero.
+        return -_log(1.0 - self.random()) / lambd
+
+    def vonmisesvariate(self, mu, kappa):
+        """Circular data distribution.
+
+        mu is the mean angle, expressed in radians between 0 and 2*pi, and
+        kappa is the concentration parameter, which must be greater than or
+        equal to zero.  If kappa is equal to zero, this distribution reduces
+        to a uniform random angle over the range 0 to 2*pi.
+
+        """
+        # Based upon an algorithm published in: Fisher, N.I.,
+        # "Statistical Analysis of Circular Data", Cambridge
+        # University Press, 1993.
+
+        # Thanks to Magnus Kessler for a correction to the
+        # implementation of step 4.
+
+        random = self.random
+        if kappa <= 1e-6:
+            return TWOPI * random()
+
+        s = 0.5 / kappa
+        r = s + _sqrt(1.0 + s * s)
+
+        while True:
+            u1 = random()
+            z = _cos(_pi * u1)
+
+            d = z / (r + z)
+            u2 = random()
+            if u2 < 1.0 - d * d or u2 <= (1.0 - d) * _exp(d):
+                break
+
+        q = 1.0 / r
+        f = (q + z) / (1.0 + q * z)
+        u3 = random()
+        if u3 > 0.5:
+            theta = (mu + _acos(f)) % TWOPI
+        else:
+            theta = (mu - _acos(f)) % TWOPI
+
+        return theta
+
+    def gammavariate(self, alpha, beta):
+        """Gamma distribution.  Not the gamma function!
+
+        Conditions on the parameters are alpha > 0 and beta > 0.
+
+        The probability distribution function is:
+
+                    x ** (alpha - 1) * math.exp(-x / beta)
+          pdf(x) =  --------------------------------------
+                      math.gamma(alpha) * beta ** alpha
+
+        """
+        # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
+
+        # Warning: a few older sources define the gamma distribution in terms
+        # of alpha > -1.0
+        if alpha <= 0.0 or beta <= 0.0:
+            raise ValueError('gammavariate: alpha and beta must be > 0.0')
+
+        random = self.random
+        if alpha > 1.0:
+
+            # Uses R.C.H. Cheng, "The generation of Gamma
+            # variables with non-integral shape parameters",
+            # Applied Statistics, (1977), 26, No. 1, p71-74
+
+            ainv = _sqrt(2.0 * alpha - 1.0)
+            bbb = alpha - LOG4
+            ccc = alpha + ainv
+
+            while 1:
+                u1 = random()
+                if not 1e-7 < u1 < 0.9999999:
+                    continue
+                u2 = 1.0 - random()
+                v = _log(u1 / (1.0 - u1)) / ainv
+                x = alpha * _exp(v)
+                z = u1 * u1 * u2
+                r = bbb + ccc * v - x
+                if r + SG_MAGICCONST - 4.5 * z >= 0.0 or r >= _log(z):
+                    return x * beta
+
+        elif alpha == 1.0:
+            # expovariate(1/beta)
+            return -_log(1.0 - random()) * beta
+
+        else:
+            # alpha is between 0 and 1 (exclusive)
+            # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
+            while True:
+                u = random()
+                b = (_e + alpha) / _e
+                p = b * u
+                if p <= 1.0:
+                    x = p ** (1.0 / alpha)
+                else:
+                    x = -_log((b - p) / alpha)
+                u1 = random()
+                if p > 1.0:
+                    if u1 <= x ** (alpha - 1.0):
+                        break
+                elif u1 <= _exp(-x):
+                    break
+            return x * beta
 
     def betavariate(self, alpha, beta):
         """Beta distribution.
@@ -673,25 +724,32 @@
         Returned values range between 0 and 1.
 
         """
+        ## See
+        ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
+        ## for Ivan Frohne's insightful analysis of why the original implementation:
+        ##
+        ##    def betavariate(self, alpha, beta):
+        ##        # Discrete Event Simulation in C, pp 87-88.
+        ##
+        ##        y = self.expovariate(alpha)
+        ##        z = self.expovariate(1.0/beta)
+        ##        return z/(y+z)
+        ##
+        ## was dead wrong, and how it probably got that way.
 
         # This version due to Janne Sinkkonen, and matches all the std
         # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
         y = self.gammavariate(alpha, 1.0)
-        if y == 0:
-            return 0.0
-        else:
+        if y:
             return y / (y + self.gammavariate(beta, 1.0))
-
-## -------------------- Pareto --------------------
+        return 0.0
 
     def paretovariate(self, alpha):
         """Pareto distribution.  alpha is the shape parameter."""
         # Jain, pg. 495
 
         u = 1.0 - self.random()
-        return 1.0 / u ** (1.0/alpha)
-
-## -------------------- Weibull --------------------
+        return 1.0 / u ** (1.0 / alpha)
 
     def weibullvariate(self, alpha, beta):
         """Weibull distribution.
@@ -702,16 +760,20 @@
         # Jain, pg. 499; bug fix courtesy Bill Arms
 
         u = 1.0 - self.random()
-        return alpha * (-_log(u)) ** (1.0/beta)
+        return alpha * (-_log(u)) ** (1.0 / beta)
 
+
+## ------------------------------------------------------------------
 ## --------------- Operating System Random Source  ------------------
 
+
 class SystemRandom(Random):
     """Alternate random number generator using sources provided
     by the operating system (such as /dev/urandom on Unix or
     CryptGenRandom on Windows).
 
      Not available on all systems (see os.urandom() for details).
+
     """
 
     def random(self):
@@ -720,12 +782,18 @@
 
     def getrandbits(self, k):
         """getrandbits(k) -> x.  Generates an int with k random bits."""
-        if k <= 0:
-            raise ValueError('number of bits must be greater than zero')
+        if k < 0:
+            raise ValueError('number of bits must be non-negative')
         numbytes = (k + 7) // 8                       # bits / 8 and rounded up
         x = int.from_bytes(_urandom(numbytes), 'big')
         return x >> (numbytes * 8 - k)                # trim excess bits
 
+    def randbytes(self, n):
+        """Generate n random bytes."""
+        # os.urandom(n) fails with ValueError for n < 0
+        # and returns an empty bytes string for n == 0.
+        return _urandom(n)
+
     def seed(self, *args, **kwds):
         "Stub method.  Not used for a system random number generator."
         return None
@@ -735,51 +803,11 @@
         raise NotImplementedError('System entropy source does not have state.')
     getstate = setstate = _notimplemented
 
-## -------------------- test program --------------------
 
-def _test_generator(n, func, args):
-    import time
-    print(n, 'times', func.__name__)
-    total = 0.0
-    sqsum = 0.0
-    smallest = 1e10
-    largest = -1e10
-    t0 = time.perf_counter()
-    for i in range(n):
-        x = func(*args)
-        total += x
-        sqsum = sqsum + x*x
-        smallest = min(x, smallest)
-        largest = max(x, largest)
-    t1 = time.perf_counter()
-    print(round(t1-t0, 3), 'sec,', end=' ')
-    avg = total/n
-    stddev = _sqrt(sqsum/n - avg*avg)
-    print('avg %g, stddev %g, min %g, max %g\n' % \
-              (avg, stddev, smallest, largest))
-
-
-def _test(N=2000):
-    _test_generator(N, random, ())
-    _test_generator(N, normalvariate, (0.0, 1.0))
-    _test_generator(N, lognormvariate, (0.0, 1.0))
-    _test_generator(N, vonmisesvariate, (0.0, 1.0))
-    _test_generator(N, gammavariate, (0.01, 1.0))
-    _test_generator(N, gammavariate, (0.1, 1.0))
-    _test_generator(N, gammavariate, (0.1, 2.0))
-    _test_generator(N, gammavariate, (0.5, 1.0))
-    _test_generator(N, gammavariate, (0.9, 1.0))
-    _test_generator(N, gammavariate, (1.0, 1.0))
-    _test_generator(N, gammavariate, (2.0, 1.0))
-    _test_generator(N, gammavariate, (20.0, 1.0))
-    _test_generator(N, gammavariate, (200.0, 1.0))
-    _test_generator(N, gauss, (0.0, 1.0))
-    _test_generator(N, betavariate, (3.0, 3.0))
-    _test_generator(N, triangular, (0.0, 1.0, 1.0/3.0))
-
+# ----------------------------------------------------------------------
 # Create one instance, seeded from current time, and export its methods
 # as module-level functions.  The functions share state across all uses
-#(both in the user's code and in the Python libraries), but that's fine
+# (both in the user's code and in the Python libraries), but that's fine
 # for most programs and is easier for the casual user than making them
 # instantiate their own Random() instance.
 
@@ -806,6 +834,50 @@
 getstate = _inst.getstate
 setstate = _inst.setstate
 getrandbits = _inst.getrandbits
+randbytes = _inst.randbytes
+
+
+## ------------------------------------------------------
+## ----------------- test program -----------------------
+
+def _test_generator(n, func, args):
+    from statistics import stdev, fmean as mean
+    from time import perf_counter
+
+    t0 = perf_counter()
+    data = [func(*args) for i in range(n)]
+    t1 = perf_counter()
+
+    xbar = mean(data)
+    sigma = stdev(data, xbar)
+    low = min(data)
+    high = max(data)
+
+    print(f'{t1 - t0:.3f} sec, {n} times {func.__name__}')
+    print('avg %g, stddev %g, min %g, max %g\n' % (xbar, sigma, low, high))
+
+
+def _test(N=2000):
+    _test_generator(N, random, ())
+    _test_generator(N, normalvariate, (0.0, 1.0))
+    _test_generator(N, lognormvariate, (0.0, 1.0))
+    _test_generator(N, vonmisesvariate, (0.0, 1.0))
+    _test_generator(N, gammavariate, (0.01, 1.0))
+    _test_generator(N, gammavariate, (0.1, 1.0))
+    _test_generator(N, gammavariate, (0.1, 2.0))
+    _test_generator(N, gammavariate, (0.5, 1.0))
+    _test_generator(N, gammavariate, (0.9, 1.0))
+    _test_generator(N, gammavariate, (1.0, 1.0))
+    _test_generator(N, gammavariate, (2.0, 1.0))
+    _test_generator(N, gammavariate, (20.0, 1.0))
+    _test_generator(N, gammavariate, (200.0, 1.0))
+    _test_generator(N, gauss, (0.0, 1.0))
+    _test_generator(N, betavariate, (3.0, 3.0))
+    _test_generator(N, triangular, (0.0, 1.0, 1.0 / 3.0))
+
+
+## ------------------------------------------------------
+## ------------------ fork support  ---------------------
 
 if hasattr(_os, "fork"):
     _os.register_at_fork(after_in_child=_inst.seed)
diff --git a/common/py3-stdlib/runpy.py b/common/py3-stdlib/runpy.py
index 0f54f3e..7e1e1ac 100644
--- a/common/py3-stdlib/runpy.py
+++ b/common/py3-stdlib/runpy.py
@@ -133,6 +133,9 @@
         # importlib, where the latter raises other errors for cases where
         # pkgutil previously raised ImportError
         msg = "Error while finding module specification for {!r} ({}: {})"
+        if mod_name.endswith(".py"):
+            msg += (f". Try using '{mod_name[:-3]}' instead of "
+                    f"'{mod_name}' as the module name.")
         raise error(msg.format(mod_name, type(ex).__name__, ex)) from ex
     if spec is None:
         raise error("No module named %s" % mod_name)
diff --git a/common/py3-stdlib/secrets.py b/common/py3-stdlib/secrets.py
index 1304342..a546efb 100644
--- a/common/py3-stdlib/secrets.py
+++ b/common/py3-stdlib/secrets.py
@@ -14,7 +14,6 @@
 
 import base64
 import binascii
-import os
 
 from hmac import compare_digest
 from random import SystemRandom
@@ -44,7 +43,7 @@
     """
     if nbytes is None:
         nbytes = DEFAULT_ENTROPY
-    return os.urandom(nbytes)
+    return _sysrand.randbytes(nbytes)
 
 def token_hex(nbytes=None):
     """Return a random text string, in hexadecimal.
diff --git a/common/py3-stdlib/selectors.py b/common/py3-stdlib/selectors.py
index a9a0801..bb15a1c 100644
--- a/common/py3-stdlib/selectors.py
+++ b/common/py3-stdlib/selectors.py
@@ -57,6 +57,7 @@
     SelectorKey.data.__doc__ = ('''Optional opaque data associated to this file object.
     For example, this could be used to store a per-client session ID.''')
 
+
 class _SelectorMapping(Mapping):
     """Mapping of file objects to selector keys."""
 
@@ -552,7 +553,10 @@
 
         def select(self, timeout=None):
             timeout = None if timeout is None else max(timeout, 0)
-            max_ev = len(self._fd_to_key)
+            # If max_ev is 0, kqueue will ignore the timeout. For consistent
+            # behavior with the other selector classes, we prevent that here
+            # (using max). See https://bugs.python.org/issue29255
+            max_ev = max(len(self._fd_to_key), 1)
             ready = []
             try:
                 kev_list = self._selector.control(None, max_ev, timeout)
@@ -577,16 +581,39 @@
             super().close()
 
 
+def _can_use(method):
+    """Check if we can use the selector depending upon the
+    operating system. """
+    # Implementation based upon https://github.com/sethmlarson/selectors2/blob/master/selectors2.py
+    selector = getattr(select, method, None)
+    if selector is None:
+        # select module does not implement method
+        return False
+    # check if the OS and Kernel actually support the method. Call may fail with
+    # OSError: [Errno 38] Function not implemented
+    try:
+        selector_obj = selector()
+        if method == 'poll':
+            # check that poll actually works
+            selector_obj.poll(0)
+        else:
+            # close epoll, kqueue, and devpoll fd
+            selector_obj.close()
+        return True
+    except OSError:
+        return False
+
+
 # Choose the best implementation, roughly:
 #    epoll|kqueue|devpoll > poll > select.
 # select() also can't accept a FD > FD_SETSIZE (usually around 1024)
-if 'KqueueSelector' in globals():
+if _can_use('kqueue'):
     DefaultSelector = KqueueSelector
-elif 'EpollSelector' in globals():
+elif _can_use('epoll'):
     DefaultSelector = EpollSelector
-elif 'DevpollSelector' in globals():
+elif _can_use('devpoll'):
     DefaultSelector = DevpollSelector
-elif 'PollSelector' in globals():
+elif _can_use('poll'):
     DefaultSelector = PollSelector
 else:
     DefaultSelector = SelectSelector
diff --git a/common/py3-stdlib/shlex.py b/common/py3-stdlib/shlex.py
index c817274..4801a6c 100644
--- a/common/py3-stdlib/shlex.py
+++ b/common/py3-stdlib/shlex.py
@@ -304,6 +304,10 @@
 
 def split(s, comments=False, posix=True):
     """Split the string *s* using shell-like syntax."""
+    if s is None:
+        import warnings
+        warnings.warn("Passing None for 's' to shlex.split() is deprecated.",
+                      DeprecationWarning, stacklevel=2)
     lex = shlex(s, posix=posix)
     lex.whitespace_split = True
     if not comments:
diff --git a/common/py3-stdlib/shutil.py b/common/py3-stdlib/shutil.py
index 1f05d80..f0e833d 100644
--- a/common/py3-stdlib/shutil.py
+++ b/common/py3-stdlib/shutil.py
@@ -53,6 +53,9 @@
 _USE_CP_SENDFILE = hasattr(os, "sendfile") and sys.platform.startswith("linux")
 _HAS_FCOPYFILE = posix and hasattr(posix, "_fcopyfile")  # macOS
 
+# CMD defaults in Windows 10
+_WIN_DEFAULT_PATHEXT = ".COM;.EXE;.BAT;.CMD;.VBS;.JS;.WS;.MSC"
+
 __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
            "copytree", "move", "rmtree", "Error", "SpecialFileError",
            "ExecError", "make_archive", "get_archive_formats",
@@ -708,7 +711,7 @@
         try:
             fd = os.open(path, os.O_RDONLY)
         except Exception:
-            onerror(os.lstat, path, sys.exc_info())
+            onerror(os.open, path, sys.exc_info())
             return
         try:
             if os.path.samestat(orig_st, os.fstat(fd)):
@@ -741,8 +744,20 @@
 rmtree.avoids_symlink_attacks = _use_fd_functions
 
 def _basename(path):
-    # A basename() variant which first strips the trailing slash, if present.
-    # Thus we always get the last component of the path, even for directories.
+    """A basename() variant which first strips the trailing slash, if present.
+    Thus we always get the last component of the path, even for directories.
+
+    path: Union[PathLike, str]
+
+    e.g.
+    >>> os.path.basename('/bar/foo')
+    'foo'
+    >>> os.path.basename('/bar/foo/')
+    ''
+    >>> _basename('/bar/foo/')
+    'foo'
+    """
+    path = os.fspath(path)
     sep = os.path.sep + (os.path.altsep or '')
     return os.path.basename(path.rstrip(sep))
 
@@ -781,7 +796,10 @@
             os.rename(src, dst)
             return
 
+        # Using _basename instead of os.path.basename is important, as we must
+        # ignore any trailing slash to avoid the basename returning ''
         real_dst = os.path.join(dst, _basename(src))
+
         if os.path.exists(real_dst):
             raise Error("Destination path '%s' already exists" % real_dst)
     try:
@@ -1400,7 +1418,9 @@
             path.insert(0, curdir)
 
         # PATHEXT is necessary to check on Windows.
-        pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
+        pathext_source = os.getenv("PATHEXT") or _WIN_DEFAULT_PATHEXT
+        pathext = [ext for ext in pathext_source.split(os.pathsep) if ext]
+
         if use_bytes:
             pathext = [os.fsencode(ext) for ext in pathext]
         # See if the given file matches any of the expected path extensions.
diff --git a/common/py3-stdlib/site.py b/common/py3-stdlib/site.py
index 9fa21cc..9e617af 100644
--- a/common/py3-stdlib/site.py
+++ b/common/py3-stdlib/site.py
@@ -334,13 +334,22 @@
             continue
         seen.add(prefix)
 
+        libdirs = [sys.platlibdir]
+        if sys.platlibdir != "lib":
+            libdirs.append("lib")
+
         if os.sep == '/':
-            sitepackages.append(os.path.join(prefix, "lib",
-                                        "python%d.%d" % sys.version_info[:2],
-                                        "site-packages"))
+            for libdir in libdirs:
+                path = os.path.join(prefix, libdir,
+                                    "python%d.%d" % sys.version_info[:2],
+                                    "site-packages")
+                sitepackages.append(path)
         else:
             sitepackages.append(prefix)
-            sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
+
+            for libdir in libdirs:
+                path = os.path.join(prefix, libdir, "site-packages")
+                sitepackages.append(path)
     return sitepackages
 
 def addsitepackages(known_paths, prefixes=None):
@@ -590,7 +599,7 @@
     Exit codes with --user-base or --user-site:
       0 - user site directory is enabled
       1 - user site directory is disabled by user
-      2 - uses site directory is disabled by super user
+      2 - user site directory is disabled by super user
           or for security reasons
      >2 - unknown error
     """
diff --git a/common/py3-stdlib/smtpd.py b/common/py3-stdlib/smtpd.py
index 8103ca9..8f1a22e 100755
--- a/common/py3-stdlib/smtpd.py
+++ b/common/py3-stdlib/smtpd.py
@@ -779,6 +779,8 @@
 
 class MailmanProxy(PureProxy):
     def __init__(self, *args, **kwargs):
+        warn('MailmanProxy is deprecated and will be removed '
+             'in future', DeprecationWarning, 2)
         if 'enable_SMTPUTF8' in kwargs and kwargs['enable_SMTPUTF8']:
             raise ValueError("MailmanProxy does not support SMTPUTF8.")
         super(PureProxy, self).__init__(*args, **kwargs)
diff --git a/common/py3-stdlib/smtplib.py b/common/py3-stdlib/smtplib.py
index 8e3d4bf..7808ba0 100755
--- a/common/py3-stdlib/smtplib.py
+++ b/common/py3-stdlib/smtplib.py
@@ -303,6 +303,8 @@
     def _get_socket(self, host, port, timeout):
         # This makes it simpler for SMTP_SSL to use the SMTP connect code
         # and just alter the socket connection bit.
+        if timeout is not None and not timeout:
+            raise ValueError('Non-blocking socket (timeout=0) is not supported')
         if self.debuglevel > 0:
             self._print_debug('connect: to', (host, port), self.source_address)
         return socket.create_connection((host, port), timeout,
@@ -333,8 +335,6 @@
                     raise OSError("nonnumeric port")
         if not port:
             port = self.default_port
-        if self.debuglevel > 0:
-            self._print_debug('connect:', (host, port))
         sys.audit("smtplib.connect", self, host, port)
         self.sock = self._get_socket(host, port, self.timeout)
         self.file = None
@@ -1032,13 +1032,12 @@
                                                      keyfile=keyfile)
             self.context = context
             SMTP.__init__(self, host, port, local_hostname, timeout,
-                    source_address)
+                          source_address)
 
         def _get_socket(self, host, port, timeout):
             if self.debuglevel > 0:
                 self._print_debug('connect:', (host, port))
-            new_socket = socket.create_connection((host, port), timeout,
-                    self.source_address)
+            new_socket = super()._get_socket(host, port, timeout)
             new_socket = self.context.wrap_socket(new_socket,
                                                   server_hostname=self._host)
             return new_socket
@@ -1067,19 +1066,23 @@
     ehlo_msg = "lhlo"
 
     def __init__(self, host='', port=LMTP_PORT, local_hostname=None,
-            source_address=None):
+                 source_address=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
         """Initialize a new instance."""
-        SMTP.__init__(self, host, port, local_hostname=local_hostname,
-                      source_address=source_address)
+        super().__init__(host, port, local_hostname=local_hostname,
+                         source_address=source_address, timeout=timeout)
 
     def connect(self, host='localhost', port=0, source_address=None):
         """Connect to the LMTP daemon, on either a Unix or a TCP socket."""
         if host[0] != '/':
-            return SMTP.connect(self, host, port, source_address=source_address)
+            return super().connect(host, port, source_address=source_address)
+
+        if self.timeout is not None and not self.timeout:
+            raise ValueError('Non-blocking socket (timeout=0) is not supported')
 
         # Handle Unix-domain sockets.
         try:
             self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+            self.sock.settimeout(self.timeout)
             self.file = None
             self.sock.connect(host)
         except OSError:
diff --git a/common/py3-stdlib/socket.py b/common/py3-stdlib/socket.py
old mode 100644
new mode 100755
index f83f36d..cafa573
--- a/common/py3-stdlib/socket.py
+++ b/common/py3-stdlib/socket.py
@@ -12,6 +12,8 @@
 socket() -- create a new socket object
 socketpair() -- create a pair of new socket objects [*]
 fromfd() -- create a socket object from an open file descriptor [*]
+send_fds() -- Send file descriptor to the socket.
+recv_fds() -- Recieve file descriptors from the socket.
 fromshare() -- create a socket object from data received from socket.share() [*]
 gethostname() -- return the current hostname
 gethostbyname() -- map a hostname to its IP number
@@ -104,7 +106,6 @@
     except ValueError:
         return value
 
-_realsocket = socket
 
 # WSA error codes
 if sys.platform.lower().startswith("win"):
@@ -543,6 +544,40 @@
     nfd = dup(fd)
     return socket(family, type, proto, nfd)
 
+if hasattr(_socket.socket, "sendmsg"):
+    import array
+
+    def send_fds(sock, buffers, fds, flags=0, address=None):
+        """ send_fds(sock, buffers, fds[, flags[, address]]) -> integer
+
+        Send the list of file descriptors fds over an AF_UNIX socket.
+        """
+        return sock.sendmsg(buffers, [(_socket.SOL_SOCKET,
+            _socket.SCM_RIGHTS, array.array("i", fds))])
+    __all__.append("send_fds")
+
+if hasattr(_socket.socket, "recvmsg"):
+    import array
+
+    def recv_fds(sock, bufsize, maxfds, flags=0):
+        """ recv_fds(sock, bufsize, maxfds[, flags]) -> (data, list of file
+        descriptors, msg_flags, address)
+
+        Receive up to maxfds file descriptors returning the message
+        data and a list containing the descriptors.
+        """
+        # Array of ints
+        fds = array.array("i")
+        msg, ancdata, flags, addr = sock.recvmsg(bufsize,
+            _socket.CMSG_LEN(maxfds * fds.itemsize))
+        for cmsg_level, cmsg_type, cmsg_data in ancdata:
+            if (cmsg_level == _socket.SOL_SOCKET and cmsg_type == _socket.SCM_RIGHTS):
+                fds.frombytes(cmsg_data[:
+                        len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+
+        return msg, list(fds), flags, addr
+    __all__.append("recv_fds")
+
 if hasattr(_socket.socket, "share"):
     def fromshare(info):
         """ fromshare(info) -> socket object
diff --git a/common/py3-stdlib/socketserver.py b/common/py3-stdlib/socketserver.py
index 1ad028f..57c1ae6 100644
--- a/common/py3-stdlib/socketserver.py
+++ b/common/py3-stdlib/socketserver.py
@@ -374,7 +374,7 @@
 
         """
         print('-'*40, file=sys.stderr)
-        print('Exception happened during processing of request from',
+        print('Exception occurred during processing of request from',
             client_address, file=sys.stderr)
         import traceback
         traceback.print_exc()
diff --git a/common/py3-stdlib/sqlite3/test/backup.py b/common/py3-stdlib/sqlite3/test/backup.py
index 903bacf..ad1da97 100644
--- a/common/py3-stdlib/sqlite3/test/backup.py
+++ b/common/py3-stdlib/sqlite3/test/backup.py
@@ -36,6 +36,13 @@
         with self.assertRaises(sqlite.ProgrammingError):
             self.cx.backup(bck)
 
+    def test_bad_source_closed_connection(self):
+        bck = sqlite.connect(':memory:')
+        source = sqlite.connect(":memory:")
+        source.close()
+        with self.assertRaises(sqlite.ProgrammingError):
+            source.backup(bck)
+
     def test_bad_target_in_transaction(self):
         bck = sqlite.connect(':memory:')
         bck.execute('CREATE TABLE bar (key INTEGER)')
diff --git a/common/py3-stdlib/sqlite3/test/dbapi.py b/common/py3-stdlib/sqlite3/test/dbapi.py
index 7c259d2..ad9c9f0 100644
--- a/common/py3-stdlib/sqlite3/test/dbapi.py
+++ b/common/py3-stdlib/sqlite3/test/dbapi.py
@@ -230,7 +230,7 @@
             """)
 
     def CheckExecuteWrongSqlArg(self):
-        with self.assertRaises(ValueError):
+        with self.assertRaises(TypeError):
             self.cu.execute(42)
 
     def CheckExecuteArgInt(self):
@@ -276,7 +276,7 @@
         self.assertEqual(row[0], "foo")
 
     def CheckExecuteParamSequence(self):
-        class L(object):
+        class L:
             def __len__(self):
                 return 1
             def __getitem__(self, x):
@@ -288,6 +288,18 @@
         row = self.cu.fetchone()
         self.assertEqual(row[0], "foo")
 
+    def CheckExecuteParamSequenceBadLen(self):
+        # Issue41662: Error in __len__() was overridden with ProgrammingError.
+        class L:
+            def __len__(self):
+                1/0
+            def __getitem__(slf, x):
+                raise AssertionError
+
+        self.cu.execute("insert into test(name) values ('foo')")
+        with self.assertRaises(ZeroDivisionError):
+            self.cu.execute("select name from test where name=?", L())
+
     def CheckExecuteDictMapping(self):
         self.cu.execute("insert into test(name) values ('foo')")
         self.cu.execute("select name from test where name=:name", {"name": "foo"})
@@ -377,7 +389,7 @@
         self.cu.executemany("insert into test(income) values (?)", mygen())
 
     def CheckExecuteManyWrongSqlArg(self):
-        with self.assertRaises(ValueError):
+        with self.assertRaises(TypeError):
             self.cu.executemany(42, [(3,)])
 
     def CheckExecuteManySelect(self):
diff --git a/common/py3-stdlib/sqlite3/test/regression.py b/common/py3-stdlib/sqlite3/test/regression.py
index ce97655..6aa86d5 100644
--- a/common/py3-stdlib/sqlite3/test/regression.py
+++ b/common/py3-stdlib/sqlite3/test/regression.py
@@ -133,6 +133,19 @@
         con.execute("insert into foo(bar) values (5)")
         con.execute(SELECT)
 
+    def CheckBindMutatingList(self):
+        # Issue41662: Crash when mutate a list of parameters during iteration.
+        class X:
+            def __conform__(self, protocol):
+                parameters.clear()
+                return "..."
+        parameters = [X(), 0]
+        con = sqlite.connect(":memory:",detect_types=sqlite.PARSE_DECLTYPES)
+        con.execute("create table foo(bar X, baz integer)")
+        # Should not crash
+        with self.assertRaises(IndexError):
+            con.execute("insert into foo(bar, baz) values (?, ?)", parameters)
+
     def CheckErrorMsgDecodeError(self):
         # When porting the module to Python 3.0, the error message about
         # decoding errors disappeared. This verifies they're back again.
@@ -262,7 +275,7 @@
         Call a connection with a non-string SQL request: check error handling
         of the statement constructor.
         """
-        self.assertRaises(sqlite.Warning, self.con, 1)
+        self.assertRaises(TypeError, self.con, 1)
 
     def CheckCollation(self):
         def collation_cb(a, b):
diff --git a/common/py3-stdlib/ssl.py b/common/py3-stdlib/ssl.py
index 0726cae..30f4e59 100644
--- a/common/py3-stdlib/ssl.py
+++ b/common/py3-stdlib/ssl.py
@@ -252,7 +252,7 @@
 if sys.platform == "win32":
     from _ssl import enum_certificates, enum_crls
 
-from socket import socket, AF_INET, SOCK_STREAM, create_connection
+from socket import socket, SOCK_STREAM, create_connection
 from socket import SOL_SOCKET, SO_TYPE
 import socket as _socket
 import base64        # for DER-to-PEM translation
diff --git a/common/py3-stdlib/statistics.py b/common/py3-stdlib/statistics.py
index c5c6e47..f9d3802 100644
--- a/common/py3-stdlib/statistics.py
+++ b/common/py3-stdlib/statistics.py
@@ -163,7 +163,7 @@
     T = _coerce(int, type(start))
     for typ, values in groupby(data, type):
         T = _coerce(T, typ)  # or raise TypeError
-        for n,d in map(_exact_ratio, values):
+        for n, d in map(_exact_ratio, values):
             count += 1
             partials[d] = partials_get(d, 0) + n
     if None in partials:
@@ -261,7 +261,7 @@
         return T(value)
     except TypeError:
         if issubclass(T, Decimal):
-            return T(value.numerator)/T(value.denominator)
+            return T(value.numerator) / T(value.denominator)
         else:
             raise
 
@@ -277,8 +277,8 @@
 def _find_rteq(a, l, x):
     'Locate the rightmost value exactly equal to x'
     i = bisect_right(a, x, lo=l)
-    if i != (len(a)+1) and a[i-1] == x:
-        return i-1
+    if i != (len(a) + 1) and a[i - 1] == x:
+        return i - 1
     raise ValueError
 
 
@@ -315,7 +315,7 @@
         raise StatisticsError('mean requires at least one data point')
     T, total, count = _sum(data)
     assert count == n
-    return _convert(total/n, T)
+    return _convert(total / n, T)
 
 
 def fmean(data):
@@ -403,11 +403,11 @@
         else:
             raise TypeError('unsupported type')
     try:
-        T, total, count = _sum(1/x for x in _fail_neg(data, errmsg))
+        T, total, count = _sum(1 / x for x in _fail_neg(data, errmsg))
     except ZeroDivisionError:
         return 0
     assert count == n
-    return _convert(n/total, T)
+    return _convert(n / total, T)
 
 
 # FIXME: investigate ways to calculate medians without sorting? Quickselect?
@@ -428,11 +428,11 @@
     n = len(data)
     if n == 0:
         raise StatisticsError("no median for empty data")
-    if n%2 == 1:
-        return data[n//2]
+    if n % 2 == 1:
+        return data[n // 2]
     else:
-        i = n//2
-        return (data[i - 1] + data[i])/2
+        i = n // 2
+        return (data[i - 1] + data[i]) / 2
 
 
 def median_low(data):
@@ -451,10 +451,10 @@
     n = len(data)
     if n == 0:
         raise StatisticsError("no median for empty data")
-    if n%2 == 1:
-        return data[n//2]
+    if n % 2 == 1:
+        return data[n // 2]
     else:
-        return data[n//2 - 1]
+        return data[n // 2 - 1]
 
 
 def median_high(data):
@@ -473,7 +473,7 @@
     n = len(data)
     if n == 0:
         raise StatisticsError("no median for empty data")
-    return data[n//2]
+    return data[n // 2]
 
 
 def median_grouped(data, interval=1):
@@ -510,15 +510,15 @@
         return data[0]
     # Find the value at the midpoint. Remember this corresponds to the
     # centre of the class interval.
-    x = data[n//2]
+    x = data[n // 2]
     for obj in (x, interval):
         if isinstance(obj, (str, bytes)):
             raise TypeError('expected number but got %r' % obj)
     try:
-        L = x - interval/2  # The lower limit of the median interval.
+        L = x - interval / 2  # The lower limit of the median interval.
     except TypeError:
         # Mixed type. For now we just coerce to float.
-        L = float(x) - float(interval)/2
+        L = float(x) - float(interval) / 2
 
     # Uses bisection search to search for x in data with log(n) time complexity
     # Find the position of leftmost occurrence of x in data
@@ -528,7 +528,7 @@
     l2 = _find_rteq(data, l1, x)
     cf = l1
     f = l2 - l1 + 1
-    return L + interval*(n/2 - cf)/f
+    return L + interval * (n / 2 - cf) / f
 
 
 def mode(data):
@@ -554,8 +554,7 @@
     If *data* is empty, ``mode``, raises StatisticsError.
 
     """
-    data = iter(data)
-    pairs = Counter(data).most_common(1)
+    pairs = Counter(iter(data)).most_common(1)
     try:
         return pairs[0][0]
     except IndexError:
@@ -597,7 +596,7 @@
 # For sample data where there is a positive probability for values
 # beyond the range of the data, the R6 exclusive method is a
 # reasonable choice.  Consider a random sample of nine values from a
-# population with a uniform distribution from 0.0 to 100.0.  The
+# population with a uniform distribution from 0.0 to 1.0.  The
 # distribution of the third ranked sample point is described by
 # betavariate(alpha=3, beta=7) which has mode=0.250, median=0.286, and
 # mean=0.300.  Only the latter (which corresponds with R6) gives the
@@ -643,9 +642,8 @@
         m = ld - 1
         result = []
         for i in range(1, n):
-            j = i * m // n
-            delta = i*m - j*n
-            interpolated = (data[j] * (n - delta) + data[j+1] * delta) / n
+            j, delta = divmod(i * m, n)
+            interpolated = (data[j] * (n - delta) + data[j + 1] * delta) / n
             result.append(interpolated)
         return result
     if method == 'exclusive':
@@ -655,7 +653,7 @@
             j = i * m // n                               # rescale i to m/n
             j = 1 if j < 1 else ld-1 if j > ld-1 else j  # clamp to 1 .. ld-1
             delta = i*m - j*n                            # exact integer math
-            interpolated = (data[j-1] * (n - delta) + data[j] * delta) / n
+            interpolated = (data[j - 1] * (n - delta) + data[j] * delta) / n
             result.append(interpolated)
         return result
     raise ValueError(f'Unknown method: {method!r}')
@@ -689,9 +687,9 @@
     T, total, count = _sum((x-c)**2 for x in data)
     # The following sum should mathematically equal zero, but due to rounding
     # error may not.
-    U, total2, count2 = _sum((x-c) for x in data)
+    U, total2, count2 = _sum((x - c) for x in data)
     assert T == U and count == count2
-    total -=  total2**2/len(data)
+    total -= total2 ** 2 / len(data)
     assert not total < 0, 'negative sum of square deviations: %f' % total
     return (T, total)
 
@@ -740,7 +738,7 @@
     if n < 2:
         raise StatisticsError('variance requires at least two data points')
     T, ss = _ss(data, xbar)
-    return _convert(ss/(n-1), T)
+    return _convert(ss / (n - 1), T)
 
 
 def pvariance(data, mu=None):
@@ -784,7 +782,7 @@
     if n < 1:
         raise StatisticsError('pvariance requires at least one data point')
     T, ss = _ss(data, mu)
-    return _convert(ss/n, T)
+    return _convert(ss / n, T)
 
 
 def stdev(data, xbar=None):
@@ -896,6 +894,13 @@
     return mu + (x * sigma)
 
 
+# If available, use C implementation
+try:
+    from _statistics import _normal_dist_inv_cdf
+except ImportError:
+    pass
+
+
 class NormalDist:
     "Normal distribution of a random variable"
     # https://en.wikipedia.org/wiki/Normal_distribution
@@ -986,7 +991,7 @@
         if not isinstance(other, NormalDist):
             raise TypeError('Expected another NormalDist instance')
         X, Y = self, other
-        if (Y._sigma, Y._mu) < (X._sigma, X._mu):   # sort to assure commutativity
+        if (Y._sigma, Y._mu) < (X._sigma, X._mu):  # sort to assure commutativity
             X, Y = Y, X
         X_var, Y_var = X.variance, Y.variance
         if not X_var or not Y_var:
@@ -1001,6 +1006,17 @@
         x2 = (a - b) / dv
         return 1.0 - (fabs(Y.cdf(x1) - X.cdf(x1)) + fabs(Y.cdf(x2) - X.cdf(x2)))
 
+    def zscore(self, x):
+        """Compute the Standard Score.  (x - mean) / stdev
+
+        Describes *x* in terms of the number of standard deviations
+        above or below the mean of the normal distribution.
+        """
+        # https://www.statisticshowto.com/probability-and-statistics/z-score/
+        if not self._sigma:
+            raise StatisticsError('zscore() not defined when sigma is zero')
+        return (x - self._mu) / self._sigma
+
     @property
     def mean(self):
         "Arithmetic mean of the normal distribution."
@@ -1102,79 +1118,3 @@
 
     def __repr__(self):
         return f'{type(self).__name__}(mu={self._mu!r}, sigma={self._sigma!r})'
-
-# If available, use C implementation
-try:
-    from _statistics import _normal_dist_inv_cdf
-except ImportError:
-    pass
-
-
-if __name__ == '__main__':
-
-    # Show math operations computed analytically in comparsion
-    # to a monte carlo simulation of the same operations
-
-    from math import isclose
-    from operator import add, sub, mul, truediv
-    from itertools import repeat
-    import doctest
-
-    g1 = NormalDist(10, 20)
-    g2 = NormalDist(-5, 25)
-
-    # Test scaling by a constant
-    assert (g1 * 5 / 5).mean == g1.mean
-    assert (g1 * 5 / 5).stdev == g1.stdev
-
-    n = 100_000
-    G1 = g1.samples(n)
-    G2 = g2.samples(n)
-
-    for func in (add, sub):
-        print(f'\nTest {func.__name__} with another NormalDist:')
-        print(func(g1, g2))
-        print(NormalDist.from_samples(map(func, G1, G2)))
-
-    const = 11
-    for func in (add, sub, mul, truediv):
-        print(f'\nTest {func.__name__} with a constant:')
-        print(func(g1, const))
-        print(NormalDist.from_samples(map(func, G1, repeat(const))))
-
-    const = 19
-    for func in (add, sub, mul):
-        print(f'\nTest constant with {func.__name__}:')
-        print(func(const, g1))
-        print(NormalDist.from_samples(map(func, repeat(const), G1)))
-
-    def assert_close(G1, G2):
-        assert isclose(G1.mean, G1.mean, rel_tol=0.01), (G1, G2)
-        assert isclose(G1.stdev, G2.stdev, rel_tol=0.01), (G1, G2)
-
-    X = NormalDist(-105, 73)
-    Y = NormalDist(31, 47)
-    s = 32.75
-    n = 100_000
-
-    S = NormalDist.from_samples([x + s for x in X.samples(n)])
-    assert_close(X + s, S)
-
-    S = NormalDist.from_samples([x - s for x in X.samples(n)])
-    assert_close(X - s, S)
-
-    S = NormalDist.from_samples([x * s for x in X.samples(n)])
-    assert_close(X * s, S)
-
-    S = NormalDist.from_samples([x / s for x in X.samples(n)])
-    assert_close(X / s, S)
-
-    S = NormalDist.from_samples([x + y for x, y in zip(X.samples(n),
-                                                       Y.samples(n))])
-    assert_close(X + Y, S)
-
-    S = NormalDist.from_samples([x - y for x, y in zip(X.samples(n),
-                                                       Y.samples(n))])
-    assert_close(X - Y, S)
-
-    print(doctest.testmod())
diff --git a/common/py3-stdlib/string.py b/common/py3-stdlib/string.py
index b423ff5..489777b 100644
--- a/common/py3-stdlib/string.py
+++ b/common/py3-stdlib/string.py
@@ -54,30 +54,7 @@
 
 _sentinel_dict = {}
 
-class _TemplateMetaclass(type):
-    pattern = r"""
-    %(delim)s(?:
-      (?P<escaped>%(delim)s) |   # Escape sequence of two delimiters
-      (?P<named>%(id)s)      |   # delimiter and a Python identifier
-      {(?P<braced>%(bid)s)}  |   # delimiter and a braced identifier
-      (?P<invalid>)              # Other ill-formed delimiter exprs
-    )
-    """
-
-    def __init__(cls, name, bases, dct):
-        super(_TemplateMetaclass, cls).__init__(name, bases, dct)
-        if 'pattern' in dct:
-            pattern = cls.pattern
-        else:
-            pattern = _TemplateMetaclass.pattern % {
-                'delim' : _re.escape(cls.delimiter),
-                'id'    : cls.idpattern,
-                'bid'   : cls.braceidpattern or cls.idpattern,
-                }
-        cls.pattern = _re.compile(pattern, cls.flags | _re.VERBOSE)
-
-
-class Template(metaclass=_TemplateMetaclass):
+class Template:
     """A string class for supporting $-substitutions."""
 
     delimiter = '$'
@@ -89,6 +66,24 @@
     braceidpattern = None
     flags = _re.IGNORECASE
 
+    def __init_subclass__(cls):
+        super().__init_subclass__()
+        if 'pattern' in cls.__dict__:
+            pattern = cls.pattern
+        else:
+            delim = _re.escape(cls.delimiter)
+            id = cls.idpattern
+            bid = cls.braceidpattern or cls.idpattern
+            pattern = fr"""
+            {delim}(?:
+              (?P<escaped>{delim})  |   # Escape sequence of two delimiters
+              (?P<named>{id})       |   # delimiter and a Python identifier
+              {{(?P<braced>{bid})}} |   # delimiter and a braced identifier
+              (?P<invalid>)             # Other ill-formed delimiter exprs
+            )
+            """
+        cls.pattern = _re.compile(pattern, cls.flags | _re.VERBOSE)
+
     def __init__(self, template):
         self.template = template
 
@@ -146,6 +141,9 @@
                              self.pattern)
         return self.pattern.sub(convert, self.template)
 
+# Initialize Template.pattern.  __init_subclass__() is automatically called
+# only for subclasses, not for the Template class itself.
+Template.__init_subclass__()
 
 
 ########################################################################
diff --git a/common/py3-stdlib/subprocess.py b/common/py3-stdlib/subprocess.py
index 5c2c2f0..f1d829a 100644
--- a/common/py3-stdlib/subprocess.py
+++ b/common/py3-stdlib/subprocess.py
@@ -52,7 +52,16 @@
 import warnings
 import contextlib
 from time import monotonic as _time
+import types
 
+try:
+    import pwd
+except ImportError:
+    pwd = None
+try:
+    import grp
+except ImportError:
+    grp = None
 
 __all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
            "getoutput", "check_output", "run", "CalledProcessError", "DEVNULL",
@@ -317,7 +326,7 @@
     if dev_mode:
         args.extend(('-X', 'dev'))
     for opt in ('faulthandler', 'tracemalloc', 'importtime',
-                'showalloccount', 'showrefcount', 'utf8'):
+                'showrefcount', 'utf8', 'oldparser'):
         if opt in xoptions:
             value = xoptions[opt]
             if value is True:
@@ -438,6 +447,9 @@
             args.append('stderr={!r}'.format(self.stderr))
         return "{}({})".format(type(self).__name__, ', '.join(args))
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
+
     def check_returncode(self):
         """Raise CalledProcessError if the exit code is non-zero."""
         if self.returncode:
@@ -719,6 +731,14 @@
 
       start_new_session (POSIX only)
 
+      group (POSIX only)
+
+      extra_groups (POSIX only)
+
+      user (POSIX only)
+
+      umask (POSIX only)
+
       pass_fds (POSIX only)
 
       encoding and errors: Text mode encoding and error handling to use for
@@ -735,7 +755,8 @@
                  shell=False, cwd=None, env=None, universal_newlines=None,
                  startupinfo=None, creationflags=0,
                  restore_signals=True, start_new_session=False,
-                 pass_fds=(), *, encoding=None, errors=None, text=None):
+                 pass_fds=(), *, user=None, group=None, extra_groups=None,
+                 encoding=None, errors=None, text=None, umask=-1):
         """Create new Popen instance."""
         _cleanup()
         # Held while anything is calling waitpid before returncode has been
@@ -833,6 +854,78 @@
             else:
                 line_buffering = False
 
+        gid = None
+        if group is not None:
+            if not hasattr(os, 'setregid'):
+                raise ValueError("The 'group' parameter is not supported on the "
+                                 "current platform")
+
+            elif isinstance(group, str):
+                if grp is None:
+                    raise ValueError("The group parameter cannot be a string "
+                                     "on systems without the grp module")
+
+                gid = grp.getgrnam(group).gr_gid
+            elif isinstance(group, int):
+                gid = group
+            else:
+                raise TypeError("Group must be a string or an integer, not {}"
+                                .format(type(group)))
+
+            if gid < 0:
+                raise ValueError(f"Group ID cannot be negative, got {gid}")
+
+        gids = None
+        if extra_groups is not None:
+            if not hasattr(os, 'setgroups'):
+                raise ValueError("The 'extra_groups' parameter is not "
+                                 "supported on the current platform")
+
+            elif isinstance(extra_groups, str):
+                raise ValueError("Groups must be a list, not a string")
+
+            gids = []
+            for extra_group in extra_groups:
+                if isinstance(extra_group, str):
+                    if grp is None:
+                        raise ValueError("Items in extra_groups cannot be "
+                                         "strings on systems without the "
+                                         "grp module")
+
+                    gids.append(grp.getgrnam(extra_group).gr_gid)
+                elif isinstance(extra_group, int):
+                    gids.append(extra_group)
+                else:
+                    raise TypeError("Items in extra_groups must be a string "
+                                    "or integer, not {}"
+                                    .format(type(extra_group)))
+
+            # make sure that the gids are all positive here so we can do less
+            # checking in the C code
+            for gid_check in gids:
+                if gid_check < 0:
+                    raise ValueError(f"Group ID cannot be negative, got {gid_check}")
+
+        uid = None
+        if user is not None:
+            if not hasattr(os, 'setreuid'):
+                raise ValueError("The 'user' parameter is not supported on "
+                                 "the current platform")
+
+            elif isinstance(user, str):
+                if pwd is None:
+                    raise ValueError("The user parameter cannot be a string "
+                                     "on systems without the pwd module")
+
+                uid = pwd.getpwnam(user).pw_uid
+            elif isinstance(user, int):
+                uid = user
+            else:
+                raise TypeError("User must be a string or an integer")
+
+            if uid < 0:
+                raise ValueError(f"User ID cannot be negative, got {uid}")
+
         try:
             if p2cwrite != -1:
                 self.stdin = io.open(p2cwrite, 'wb', bufsize)
@@ -857,7 +950,9 @@
                                 p2cread, p2cwrite,
                                 c2pread, c2pwrite,
                                 errread, errwrite,
-                                restore_signals, start_new_session)
+                                restore_signals,
+                                gid, gids, uid, umask,
+                                start_new_session)
         except:
             # Cleanup if the child failed starting.
             for f in filter(None, (self.stdin, self.stdout, self.stderr)):
@@ -887,6 +982,17 @@
 
             raise
 
+    def __repr__(self):
+        obj_repr = (
+            f"<{self.__class__.__name__}: "
+            f"returncode: {self.returncode} args: {list(self.args)!r}>"
+        )
+        if len(obj_repr) > 80:
+            obj_repr = obj_repr[:76] + "...>"
+        return obj_repr
+
+    __class_getitem__ = classmethod(types.GenericAlias)
+
     @property
     def universal_newlines(self):
         # universal_newlines as retained as an alias of text_mode for API
@@ -1227,7 +1333,10 @@
                            p2cread, p2cwrite,
                            c2pread, c2pwrite,
                            errread, errwrite,
-                           unused_restore_signals, unused_start_new_session):
+                           unused_restore_signals,
+                           unused_gid, unused_gids, unused_uid,
+                           unused_umask,
+                           unused_start_new_session):
             """Execute program (MS Windows version)"""
 
             assert not pass_fds, "pass_fds not supported on Windows."
@@ -1553,7 +1662,9 @@
                            p2cread, p2cwrite,
                            c2pread, c2pwrite,
                            errread, errwrite,
-                           restore_signals, start_new_session):
+                           restore_signals,
+                           gid, gids, uid, umask,
+                           start_new_session):
             """Execute program (POSIX version)"""
 
             if isinstance(args, (str, bytes)):
@@ -1588,7 +1699,11 @@
                     and (p2cread == -1 or p2cread > 2)
                     and (c2pwrite == -1 or c2pwrite > 2)
                     and (errwrite == -1 or errwrite > 2)
-                    and not start_new_session):
+                    and not start_new_session
+                    and gid is None
+                    and gids is None
+                    and uid is None
+                    and umask < 0):
                 self._posix_spawn(args, executable, env, restore_signals,
                                   p2cread, p2cwrite,
                                   c2pread, c2pwrite,
@@ -1641,7 +1756,9 @@
                             p2cread, p2cwrite, c2pread, c2pwrite,
                             errread, errwrite,
                             errpipe_read, errpipe_write,
-                            restore_signals, start_new_session, preexec_fn)
+                            restore_signals, start_new_session,
+                            gid, gids, uid, umask,
+                            preexec_fn)
                     self._child_created = True
                 finally:
                     # be sure the FD is closed no matter what
@@ -1703,23 +1820,17 @@
                 raise child_exception_type(err_msg)
 
 
-        def _handle_exitstatus(self, sts, _WIFSIGNALED=os.WIFSIGNALED,
-                _WTERMSIG=os.WTERMSIG, _WIFEXITED=os.WIFEXITED,
-                _WEXITSTATUS=os.WEXITSTATUS, _WIFSTOPPED=os.WIFSTOPPED,
-                _WSTOPSIG=os.WSTOPSIG):
+        def _handle_exitstatus(self, sts,
+                               waitstatus_to_exitcode=os.waitstatus_to_exitcode,
+                               _WIFSTOPPED=os.WIFSTOPPED,
+                               _WSTOPSIG=os.WSTOPSIG):
             """All callers to this function MUST hold self._waitpid_lock."""
             # This method is called (indirectly) by __del__, so it cannot
             # refer to anything outside of its local scope.
-            if _WIFSIGNALED(sts):
-                self.returncode = -_WTERMSIG(sts)
-            elif _WIFEXITED(sts):
-                self.returncode = _WEXITSTATUS(sts)
-            elif _WIFSTOPPED(sts):
+            if _WIFSTOPPED(sts):
                 self.returncode = -_WSTOPSIG(sts)
             else:
-                # Should never happen
-                raise SubprocessError("Unknown child exit status!")
-
+                self.returncode = waitstatus_to_exitcode(sts)
 
         def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
                 _WNOHANG=os.WNOHANG, _ECHILD=errno.ECHILD):
@@ -1926,9 +2037,35 @@
 
         def send_signal(self, sig):
             """Send a signal to the process."""
-            # Skip signalling a process that we know has already died.
-            if self.returncode is None:
+            # bpo-38630: Polling reduces the risk of sending a signal to the
+            # wrong process if the process completed, the Popen.returncode
+            # attribute is still None, and the pid has been reassigned
+            # (recycled) to a new different process. This race condition can
+            # happens in two cases.
+            #
+            # Case 1. Thread A calls Popen.poll(), thread B calls
+            # Popen.send_signal(). In thread A, waitpid() succeed and returns
+            # the exit status. Thread B calls kill() because poll() in thread A
+            # did not set returncode yet. Calling poll() in thread B prevents
+            # the race condition thanks to Popen._waitpid_lock.
+            #
+            # Case 2. waitpid(pid, 0) has been called directly, without
+            # using Popen methods: returncode is still None is this case.
+            # Calling Popen.poll() will set returncode to a default value,
+            # since waitpid() fails with ProcessLookupError.
+            self.poll()
+            if self.returncode is not None:
+                # Skip signalling a process that we know has already died.
+                return
+
+            # The race condition can still happen if the race condition
+            # described above happens between the returncode test
+            # and the kill() call.
+            try:
                 os.kill(self.pid, sig)
+            except ProcessLookupError:
+                # Supress the race condition error; bpo-40550.
+                pass
 
         def terminate(self):
             """Terminate the process with SIGTERM
diff --git a/common/py3-stdlib/sunau.py b/common/py3-stdlib/sunau.py
index 129502b..79750a9 100644
--- a/common/py3-stdlib/sunau.py
+++ b/common/py3-stdlib/sunau.py
@@ -104,7 +104,7 @@
 """
 
 from collections import namedtuple
-import warnings
+
 
 _sunau_params = namedtuple('_sunau_params',
                            'nchannels sampwidth framerate nframes comptype compname')
@@ -524,8 +524,3 @@
         return Au_write(f)
     else:
         raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
-
-def openfp(f, mode=None):
-    warnings.warn("sunau.openfp is deprecated since Python 3.7. "
-                  "Use sunau.open instead.", DeprecationWarning, stacklevel=2)
-    return open(f, mode=mode)
diff --git a/common/py3-stdlib/symbol.py b/common/py3-stdlib/symbol.py
index 36e0eec..aaac8c9 100644
--- a/common/py3-stdlib/symbol.py
+++ b/common/py3-stdlib/symbol.py
@@ -11,6 +11,15 @@
 #
 #    make regen-symbol
 
+import warnings
+
+warnings.warn(
+    "The symbol module is deprecated and will be removed "
+    "in future versions of Python",
+    DeprecationWarning,
+    stacklevel=2,
+)
+
 #--start constants--
 single_input = 256
 file_input = 257
diff --git a/common/py3-stdlib/symtable.py b/common/py3-stdlib/symtable.py
index ac0a64f..521540f 100644
--- a/common/py3-stdlib/symtable.py
+++ b/common/py3-stdlib/symtable.py
@@ -34,7 +34,7 @@
 _newSymbolTable = SymbolTableFactory()
 
 
-class SymbolTable(object):
+class SymbolTable:
 
     def __init__(self, raw_table, filename):
         self._table = raw_table
@@ -47,7 +47,7 @@
         else:
             kind = "%s " % self.__class__.__name__
 
-        if self._table.name == "global":
+        if self._table.name == "top":
             return "<{0}SymbolTable for module {1}>".format(kind, self._filename)
         else:
             return "<{0}SymbolTable for {1} in {2}>".format(kind,
@@ -82,10 +82,6 @@
     def has_children(self):
         return bool(self._table.children)
 
-    def has_exec(self):
-        """Return true if the scope uses exec.  Deprecated method."""
-        return False
-
     def get_identifiers(self):
         return self._table.symbols.keys()
 
@@ -94,7 +90,9 @@
         if sym is None:
             flags = self._table.symbols[name]
             namespaces = self.__check_children(name)
-            sym = self._symbols[name] = Symbol(name, flags, namespaces)
+            module_scope = (self._table.name == "top")
+            sym = self._symbols[name] = Symbol(name, flags, namespaces,
+                                               module_scope=module_scope)
         return sym
 
     def get_symbols(self):
@@ -167,13 +165,14 @@
         return self.__methods
 
 
-class Symbol(object):
+class Symbol:
 
-    def __init__(self, name, flags, namespaces=None):
+    def __init__(self, name, flags, namespaces=None, *, module_scope=False):
         self.__name = name
         self.__flags = flags
         self.__scope = (flags >> SCOPE_OFF) & SCOPE_MASK # like PyST_GetScope()
         self.__namespaces = namespaces or ()
+        self.__module_scope = module_scope
 
     def __repr__(self):
         return "<symbol {0!r}>".format(self.__name)
@@ -188,7 +187,10 @@
         return bool(self.__flags & DEF_PARAM)
 
     def is_global(self):
-        return bool(self.__scope in (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT))
+        """Return *True* if the sysmbol is global.
+        """
+        return bool(self.__scope in (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT)
+                    or (self.__module_scope and self.__flags & DEF_BOUND))
 
     def is_nonlocal(self):
         return bool(self.__flags & DEF_NONLOCAL)
@@ -197,7 +199,10 @@
         return bool(self.__scope == GLOBAL_EXPLICIT)
 
     def is_local(self):
-        return bool(self.__scope in (LOCAL, CELL))
+        """Return *True* if the symbol is local.
+        """
+        return bool(self.__scope in (LOCAL, CELL)
+                    or (self.__module_scope and self.__flags & DEF_BOUND))
 
     def is_annotated(self):
         return bool(self.__flags & DEF_ANNOT)
diff --git a/common/py3-stdlib/sysconfig.py b/common/py3-stdlib/sysconfig.py
index b9e2faf..bf04ac5 100644
--- a/common/py3-stdlib/sysconfig.py
+++ b/common/py3-stdlib/sysconfig.py
@@ -20,10 +20,10 @@
 
 _INSTALL_SCHEMES = {
     'posix_prefix': {
-        'stdlib': '{installed_base}/lib/python{py_version_short}',
-        'platstdlib': '{platbase}/lib/python{py_version_short}',
+        'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}',
+        'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}',
         'purelib': '{base}/lib/python{py_version_short}/site-packages',
-        'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
+        'platlib': '{platbase}/{platlibdir}/python{py_version_short}/site-packages',
         'include':
             '{installed_base}/include/python{py_version_short}{abiflags}',
         'platinclude':
@@ -62,10 +62,10 @@
         'data': '{userbase}',
         },
     'posix_user': {
-        'stdlib': '{userbase}/lib/python{py_version_short}',
-        'platstdlib': '{userbase}/lib/python{py_version_short}',
+        'stdlib': '{userbase}/{platlibdir}/python{py_version_short}',
+        'platstdlib': '{userbase}/{platlibdir}/python{py_version_short}',
         'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
-        'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
+        'platlib': '{userbase}/{platlibdir}/python{py_version_short}/site-packages',
         'include': '{userbase}/include/python{py_version_short}',
         'scripts': '{userbase}/bin',
         'data': '{userbase}',
@@ -84,8 +84,6 @@
 _SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
                 'scripts', 'data')
 
- # FIXME don't rely on sys.version here, its format is an implementation detail
- # of CPython, use sys.version_info or sys.hexversion
 _PY_VERSION = sys.version.split()[0]
 _PY_VERSION_SHORT = '%d.%d' % sys.version_info[:2]
 _PY_VERSION_SHORT_NO_DOT = '%d%d' % sys.version_info[:2]
@@ -539,6 +537,7 @@
         _CONFIG_VARS['installed_platbase'] = _BASE_EXEC_PREFIX
         _CONFIG_VARS['platbase'] = _EXEC_PREFIX
         _CONFIG_VARS['projectbase'] = _PROJECT_BASE
+        _CONFIG_VARS['platlibdir'] = sys.platlibdir
         try:
             _CONFIG_VARS['abiflags'] = sys.abiflags
         except AttributeError:
@@ -547,6 +546,7 @@
 
         if os.name == 'nt':
             _init_non_posix(_CONFIG_VARS)
+            _CONFIG_VARS['TZPATH'] = ''
         if os.name == 'posix':
             _init_posix(_CONFIG_VARS)
         # For backward compatibility, see issue19555
@@ -665,7 +665,8 @@
             machine += ".%s" % bitness[sys.maxsize]
         # fall through to standard osname-release-machine representation
     elif osname[:3] == "aix":
-        return "%s-%s.%s" % (osname, version, release)
+        from _aix_support import aix_platform
+        return aix_platform()
     elif osname[:6] == "cygwin":
         osname = "cygwin"
         import re
diff --git a/common/py3-stdlib/tarfile.py b/common/py3-stdlib/tarfile.py
index 7a69e1b..1d15612 100755
--- a/common/py3-stdlib/tarfile.py
+++ b/common/py3-stdlib/tarfile.py
@@ -420,6 +420,8 @@
         self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
         if self.name.endswith(".gz"):
             self.name = self.name[:-3]
+        # Honor "directory components removed" from RFC1952
+        self.name = os.path.basename(self.name)
         # RFC1952 says we must use ISO-8859-1 for the FNAME field.
         self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
 
@@ -930,6 +932,14 @@
         """Return a header block. info is a dictionary with file
            information, format must be one of the *_FORMAT constants.
         """
+        has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
+        if has_device_fields:
+            devmajor = itn(info.get("devmajor", 0), 8, format)
+            devminor = itn(info.get("devminor", 0), 8, format)
+        else:
+            devmajor = stn("", 8, encoding, errors)
+            devminor = stn("", 8, encoding, errors)
+
         parts = [
             stn(info.get("name", ""), 100, encoding, errors),
             itn(info.get("mode", 0) & 0o7777, 8, format),
@@ -943,8 +953,8 @@
             info.get("magic", POSIX_MAGIC),
             stn(info.get("uname", ""), 32, encoding, errors),
             stn(info.get("gname", ""), 32, encoding, errors),
-            itn(info.get("devmajor", 0), 8, format),
-            itn(info.get("devminor", 0), 8, format),
+            devmajor,
+            devminor,
             stn(info.get("prefix", ""), 155, encoding, errors)
         ]
 
@@ -2083,9 +2093,10 @@
 
     def extractfile(self, member):
         """Extract a member from the archive as a file object. `member' may be
-           a filename or a TarInfo object. If `member' is a regular file or a
-           link, an io.BufferedReader object is returned. Otherwise, None is
-           returned.
+           a filename or a TarInfo object. If `member' is a regular file or
+           a link, an io.BufferedReader object is returned. For all other
+           existing members, None is returned. If `member' does not appear
+           in the archive, KeyError is raised.
         """
         self._check("r")
 
@@ -2226,6 +2237,9 @@
         try:
             # For systems that support symbolic and hard links.
             if tarinfo.issym():
+                if os.path.lexists(targetpath):
+                    # Avoid FileExistsError on following os.symlink.
+                    os.unlink(targetpath)
                 os.symlink(tarinfo.linkname, targetpath)
             else:
                 # See extract().
@@ -2461,9 +2475,14 @@
 def is_tarfile(name):
     """Return True if name points to a tar archive that we
        are able to handle, else return False.
+
+       'name' should be a string, file, or file-like object.
     """
     try:
-        t = open(name)
+        if hasattr(name, "read"):
+            t = open(fileobj=name)
+        else:
+            t = open(name)
         t.close()
         return True
     except TarError:
diff --git a/common/py3-stdlib/tempfile.py b/common/py3-stdlib/tempfile.py
index 5b990e0..770f72c 100644
--- a/common/py3-stdlib/tempfile.py
+++ b/common/py3-stdlib/tempfile.py
@@ -44,6 +44,7 @@
 import errno as _errno
 from random import Random as _Random
 import sys as _sys
+import types as _types
 import weakref as _weakref
 import _thread
 _allocate_lock = _thread.allocate_lock
@@ -307,8 +308,7 @@
     otherwise a default directory is used.
 
     If 'text' is specified and true, the file is opened in text
-    mode.  Else (the default) the file is opened in binary mode.  On
-    some operating systems, this makes no difference.
+    mode.  Else (the default) the file is opened in binary mode.
 
     If any of 'suffix', 'prefix' and 'dir' are not None, they must be the
     same type.  If they are bytes, the returned name will be bytes; str
@@ -643,6 +643,8 @@
                                    'encoding': encoding, 'newline': newline,
                                    'dir': dir, 'errors': errors}
 
+    __class_getitem__ = classmethod(_types.GenericAlias)
+
     def _check(self, file):
         if self._rolled: return
         max_size = self._max_size
@@ -737,10 +739,6 @@
     def seek(self, *args):
         return self._file.seek(*args)
 
-    @property
-    def softspace(self):
-        return self._file.softspace
-
     def tell(self):
         return self._file.tell()
 
@@ -830,3 +828,5 @@
     def cleanup(self):
         if self._finalizer.detach():
             self._rmtree(self.name)
+
+    __class_getitem__ = classmethod(_types.GenericAlias)
diff --git a/common/py3-stdlib/threading.py b/common/py3-stdlib/threading.py
index 813dae2..d96d99a 100644
--- a/common/py3-stdlib/threading.py
+++ b/common/py3-stdlib/threading.py
@@ -3,6 +3,7 @@
 import os as _os
 import sys as _sys
 import _thread
+import functools
 
 from time import monotonic as _time
 from _weakrefset import WeakSet
@@ -121,6 +122,11 @@
             hex(id(self))
         )
 
+    def _at_fork_reinit(self):
+        self._block._at_fork_reinit()
+        self._owner = None
+        self._count = 0
+
     def acquire(self, blocking=True, timeout=-1):
         """Acquire a lock, blocking or non-blocking.
 
@@ -243,6 +249,10 @@
             pass
         self._waiters = _deque()
 
+    def _at_fork_reinit(self):
+        self._lock._at_fork_reinit()
+        self._waiters.clear()
+
     def __enter__(self):
         return self._lock.__enter__()
 
@@ -261,7 +271,7 @@
     def _is_owned(self):
         # Return True if lock is owned by current_thread.
         # This method is called only if _lock doesn't have _is_owned().
-        if self._lock.acquire(0):
+        if self._lock.acquire(False):
             self._lock.release()
             return False
         else:
@@ -438,16 +448,19 @@
 
     __enter__ = acquire
 
-    def release(self):
-        """Release a semaphore, incrementing the internal counter by one.
+    def release(self, n=1):
+        """Release a semaphore, incrementing the internal counter by one or more.
 
         When the counter is zero on entry and another thread is waiting for it
         to become larger than zero again, wake up that thread.
 
         """
+        if n < 1:
+            raise ValueError('n must be one or more')
         with self._cond:
-            self._value += 1
-            self._cond.notify()
+            self._value += n
+            for i in range(n):
+                self._cond.notify()
 
     def __exit__(self, t, v, tb):
         self.release()
@@ -474,8 +487,8 @@
         Semaphore.__init__(self, value)
         self._initial_value = value
 
-    def release(self):
-        """Release a semaphore, incrementing the internal counter by one.
+    def release(self, n=1):
+        """Release a semaphore, incrementing the internal counter by one or more.
 
         When the counter is zero on entry and another thread is waiting for it
         to become larger than zero again, wake up that thread.
@@ -484,11 +497,14 @@
         raise a ValueError.
 
         """
+        if n < 1:
+            raise ValueError('n must be one or more')
         with self._cond:
-            if self._value >= self._initial_value:
+            if self._value + n > self._initial_value:
                 raise ValueError("Semaphore released too many times")
-            self._value += 1
-            self._cond.notify()
+            self._value += n
+            for i in range(n):
+                self._cond.notify()
 
 
 class Event:
@@ -506,9 +522,9 @@
         self._cond = Condition(Lock())
         self._flag = False
 
-    def _reset_internal_locks(self):
-        # private!  called by Thread._reset_internal_locks by _after_fork()
-        self._cond.__init__(Lock())
+    def _at_fork_reinit(self):
+        # Private method called by Thread._reset_internal_locks()
+        self._cond._at_fork_reinit()
 
     def is_set(self):
         """Return true if and only if the internal flag is true."""
@@ -808,9 +824,14 @@
     def _reset_internal_locks(self, is_alive):
         # private!  Called by _after_fork() to reset our internal locks as
         # they may be in an invalid state leading to a deadlock or crash.
-        self._started._reset_internal_locks()
+        self._started._at_fork_reinit()
         if is_alive:
-            self._set_tstate_lock()
+            # bpo-42350: If the fork happens when the thread is already stopped
+            # (ex: after threading._shutdown() has been called), _tstate_lock
+            # is None. Do nothing in this case.
+            if self._tstate_lock is not None:
+                self._tstate_lock._at_fork_reinit()
+                self._tstate_lock.acquire()
         else:
             # The thread isn't alive after fork: it doesn't have a tstate
             # anymore.
@@ -846,6 +867,7 @@
 
         if self._started.is_set():
             raise RuntimeError("threads can only be started once")
+
         with _active_limbo_lock:
             _limbo[self] = self
         try:
@@ -1082,16 +1104,6 @@
         self._wait_for_tstate_lock(False)
         return not self._is_stopped
 
-    def isAlive(self):
-        """Return whether the thread is alive.
-
-        This method is deprecated, use is_alive() instead.
-        """
-        import warnings
-        warnings.warn('isAlive() is deprecated, use is_alive() instead',
-                      DeprecationWarning, stacklevel=2)
-        return self.is_alive()
-
     @property
     def daemon(self):
         """A boolean value indicating whether this thread is a daemon thread.
@@ -1344,6 +1356,27 @@
     with _active_limbo_lock:
         return list(_active.values()) + list(_limbo.values())
 
+
+_threading_atexits = []
+_SHUTTING_DOWN = False
+
+def _register_atexit(func, *arg, **kwargs):
+    """CPython internal: register *func* to be called before joining threads.
+
+    The registered *func* is called with its arguments just before all
+    non-daemon threads are joined in `_shutdown()`. It provides a similar
+    purpose to `atexit.register()`, but its functions are called prior to
+    threading shutdown instead of interpreter shutdown.
+
+    For similarity to atexit, the registered functions are called in reverse.
+    """
+    if _SHUTTING_DOWN:
+        raise RuntimeError("can't register atexit after shutdown")
+
+    call = functools.partial(func, *arg, **kwargs)
+    _threading_atexits.append(call)
+
+
 from _thread import stack_size
 
 # Create the main thread object,
@@ -1365,6 +1398,8 @@
         # _shutdown() was already called
         return
 
+    global _SHUTTING_DOWN
+    _SHUTTING_DOWN = True
     # Main thread
     tlock = _main_thread._tstate_lock
     # The main thread isn't finished yet, so its thread state lock can't have
@@ -1374,6 +1409,11 @@
     tlock.release()
     _main_thread._stop()
 
+    # Call registered threading atexit functions before threads are joined.
+    # Order is reversed, similar to atexit.
+    for atexit_call in reversed(_threading_atexits):
+        atexit_call()
+
     # Join all non-deamon threads
     while True:
         with _shutdown_locks_lock:
diff --git a/common/py3-stdlib/trace.py b/common/py3-stdlib/trace.py
index 89f17d4..c505d8b 100755
--- a/common/py3-stdlib/trace.py
+++ b/common/py3-stdlib/trace.py
@@ -453,22 +453,7 @@
                 sys.settrace(None)
                 threading.settrace(None)
 
-    def runfunc(*args, **kw):
-        if len(args) >= 2:
-            self, func, *args = args
-        elif not args:
-            raise TypeError("descriptor 'runfunc' of 'Trace' object "
-                            "needs an argument")
-        elif 'func' in kw:
-            func = kw.pop('func')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'func' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('runfunc expected at least 1 positional argument, '
-                            'got %d' % (len(args)-1))
-
+    def runfunc(self, func, /, *args, **kw):
         result = None
         if not self.donothing:
             sys.settrace(self.globaltrace)
@@ -478,7 +463,6 @@
             if not self.donothing:
                 sys.settrace(None)
         return result
-    runfunc.__text_signature__ = '($self, func, /, *args, **kw)'
 
     def file_module_function_of(self, frame):
         code = frame.f_code
diff --git a/common/py3-stdlib/traceback.py b/common/py3-stdlib/traceback.py
index 5ef3be7..fb34de9 100644
--- a/common/py3-stdlib/traceback.py
+++ b/common/py3-stdlib/traceback.py
@@ -500,7 +500,6 @@
                 _seen=_seen)
         else:
             context = None
-        self.exc_traceback = exc_traceback
         self.__cause__ = cause
         self.__context__ = context
         self.__suppress_context__ = \
@@ -538,7 +537,9 @@
             self.__cause__._load_lines()
 
     def __eq__(self, other):
-        return self.__dict__ == other.__dict__
+        if isinstance(other, TracebackException):
+            return self.__dict__ == other.__dict__
+        return NotImplemented
 
     def __str__(self):
         return self._str
@@ -567,23 +568,30 @@
 
         if not issubclass(self.exc_type, SyntaxError):
             yield _format_final_exc_line(stype, self._str)
-            return
+        else:
+            yield from self._format_syntax_error(stype)
 
-        # It was a syntax error; show exactly where the problem was found.
+    def _format_syntax_error(self, stype):
+        """Format SyntaxError exceptions (internal helper)."""
+        # Show exactly where the problem was found.
         filename = self.filename or "<string>"
         lineno = str(self.lineno) or '?'
         yield '  File "{}", line {}\n'.format(filename, lineno)
 
-        badline = self.text
-        offset = self.offset
-        if badline is not None:
-            yield '    {}\n'.format(badline.strip())
-            if offset is not None:
-                caretspace = badline.rstrip('\n')
-                offset = min(len(caretspace), offset) - 1
-                caretspace = caretspace[:offset].lstrip()
+        text = self.text
+        if text is not None:
+            # text  = "   foo\n"
+            # rtext = "   foo"
+            # ltext =    "foo"
+            rtext = text.rstrip('\n')
+            ltext = rtext.lstrip(' \n\f')
+            spaces = len(rtext) - len(ltext)
+            yield '    {}\n'.format(ltext)
+            # Convert 1-based column offset to 0-based index into stripped text
+            caret = (self.offset or 0) - 1 - spaces
+            if caret >= 0:
                 # non-space whitespace (likes tabs) must be kept for alignment
-                caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+                caretspace = ((c if c.isspace() else ' ') for c in ltext[:caret])
                 yield '    {}^\n'.format(''.join(caretspace))
         msg = self.msg or "<no detail available>"
         yield "{}: {}\n".format(stype, msg)
@@ -608,7 +616,7 @@
                 not self.__suppress_context__):
                 yield from self.__context__.format(chain=chain)
                 yield _context_message
-        if self.exc_traceback is not None:
+        if self.stack:
             yield 'Traceback (most recent call last):\n'
-        yield from self.stack.format()
+            yield from self.stack.format()
         yield from self.format_exception_only()
diff --git a/common/py3-stdlib/tracemalloc.py b/common/py3-stdlib/tracemalloc.py
index 2c1ac3b..69b4170 100644
--- a/common/py3-stdlib/tracemalloc.py
+++ b/common/py3-stdlib/tracemalloc.py
@@ -43,6 +43,8 @@
         return hash((self.traceback, self.size, self.count))
 
     def __eq__(self, other):
+        if not isinstance(other, Statistic):
+            return NotImplemented
         return (self.traceback == other.traceback
                 and self.size == other.size
                 and self.count == other.count)
@@ -84,6 +86,8 @@
                      self.count, self.count_diff))
 
     def __eq__(self, other):
+        if not isinstance(other, StatisticDiff):
+            return NotImplemented
         return (self.traceback == other.traceback
                 and self.size == other.size
                 and self.size_diff == other.size_diff
@@ -153,9 +157,13 @@
         return self._frame[1]
 
     def __eq__(self, other):
+        if not isinstance(other, Frame):
+            return NotImplemented
         return (self._frame == other._frame)
 
     def __lt__(self, other):
+        if not isinstance(other, Frame):
+            return NotImplemented
         return (self._frame < other._frame)
 
     def __hash__(self):
@@ -174,15 +182,20 @@
     Sequence of Frame instances sorted from the oldest frame
     to the most recent frame.
     """
-    __slots__ = ("_frames",)
+    __slots__ = ("_frames", '_total_nframe')
 
-    def __init__(self, frames):
+    def __init__(self, frames, total_nframe=None):
         Sequence.__init__(self)
         # frames is a tuple of frame tuples: see Frame constructor for the
         # format of a frame tuple; it is reversed, because _tracemalloc
         # returns frames sorted from most recent to oldest, but the
         # Python API expects oldest to most recent
         self._frames = tuple(reversed(frames))
+        self._total_nframe = total_nframe
+
+    @property
+    def total_nframe(self):
+        return self._total_nframe
 
     def __len__(self):
         return len(self._frames)
@@ -200,16 +213,25 @@
         return hash(self._frames)
 
     def __eq__(self, other):
+        if not isinstance(other, Traceback):
+            return NotImplemented
         return (self._frames == other._frames)
 
     def __lt__(self, other):
+        if not isinstance(other, Traceback):
+            return NotImplemented
         return (self._frames < other._frames)
 
     def __str__(self):
         return str(self[0])
 
     def __repr__(self):
-        return "<Traceback %r>" % (tuple(self),)
+        s = "<Traceback %r" % tuple(self)
+        if self._total_nframe is None:
+            s += ">"
+        else:
+            s += f" total_nframe={self.total_nframe}>"
+        return s
 
     def format(self, limit=None, most_recent_first=False):
         lines = []
@@ -268,9 +290,11 @@
 
     @property
     def traceback(self):
-        return Traceback(self._trace[2])
+        return Traceback(*self._trace[2:])
 
     def __eq__(self, other):
+        if not isinstance(other, Trace):
+            return NotImplemented
         return (self._trace == other._trace)
 
     def __hash__(self):
@@ -303,6 +327,8 @@
         return trace._trace in self._traces
 
     def __eq__(self, other):
+        if not isinstance(other, _Traces):
+            return NotImplemented
         return (self._traces == other._traces)
 
     def __repr__(self):
@@ -362,7 +388,7 @@
             return self._match_frame(filename, lineno)
 
     def _match(self, trace):
-        domain, size, traceback = trace
+        domain, size, traceback, total_nframe = trace
         res = self._match_traceback(traceback)
         if self.domain is not None:
             if self.inclusive:
@@ -382,7 +408,7 @@
         return self._domain
 
     def _match(self, trace):
-        domain, size, traceback = trace
+        domain, size, traceback, total_nframe = trace
         return (domain == self.domain) ^ (not self.inclusive)
 
 
@@ -459,7 +485,7 @@
         tracebacks = {}
         if not cumulative:
             for trace in self.traces._traces:
-                domain, size, trace_traceback = trace
+                domain, size, trace_traceback, total_nframe = trace
                 try:
                     traceback = tracebacks[trace_traceback]
                 except KeyError:
@@ -480,7 +506,7 @@
         else:
             # cumulative statistics
             for trace in self.traces._traces:
-                domain, size, trace_traceback = trace
+                domain, size, trace_traceback, total_nframe = trace
                 for frame in trace_traceback:
                     try:
                         traceback = tracebacks[frame]
diff --git a/common/py3-stdlib/turtle.py b/common/py3-stdlib/turtle.py
index ee67a35..ba8288d 100644
--- a/common/py3-stdlib/turtle.py
+++ b/common/py3-stdlib/turtle.py
@@ -258,6 +258,7 @@
     def __rmul__(self, other):
         if isinstance(other, int) or isinstance(other, float):
             return Vec2D(self[0]*other, self[1]*other)
+        return NotImplemented
     def __sub__(self, other):
         return Vec2D(self[0]-other[0], self[1]-other[1])
     def __neg__(self):
diff --git a/common/py3-stdlib/types.py b/common/py3-stdlib/types.py
index ea3c0b2..ad2020e 100644
--- a/common/py3-stdlib/types.py
+++ b/common/py3-stdlib/types.py
@@ -293,4 +293,7 @@
     return wrapped
 
 
+GenericAlias = type(list[int])
+
+
 __all__ = [n for n in globals() if n[:1] != '_']
diff --git a/common/py3-stdlib/typing.py b/common/py3-stdlib/typing.py
index 589eea9..f5316ab 100644
--- a/common/py3-stdlib/typing.py
+++ b/common/py3-stdlib/typing.py
@@ -26,11 +26,12 @@
 import re as stdlib_re  # Avoid confusion with the re we export.
 import sys
 import types
-from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType
+from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType, GenericAlias
 
 # Please keep __all__ alphabetized within each category.
 __all__ = [
     # Super-special typing primitives.
+    'Annotated',
     'Any',
     'Callable',
     'ClassVar',
@@ -140,8 +141,9 @@
     if (isinstance(arg, _GenericAlias) and
             arg.__origin__ in invalid_generic_forms):
         raise TypeError(f"{arg} is not valid as type argument")
-    if (isinstance(arg, _SpecialForm) and arg not in (Any, NoReturn) or
-            arg in (Generic, Protocol)):
+    if arg in (Any, NoReturn):
+        return arg
+    if isinstance(arg, _SpecialForm) or arg in (Generic, Protocol):
         raise TypeError(f"Plain {arg} is not valid as type argument")
     if isinstance(arg, (type, TypeVar, ForwardRef)):
         return arg
@@ -158,6 +160,8 @@
     typically enough to uniquely identify a type.  For everything
     else, we fall back on repr(obj).
     """
+    if isinstance(obj, types.GenericAlias):
+        return repr(obj)
     if isinstance(obj, type):
         if obj.__module__ == 'builtins':
             return obj.__qualname__
@@ -179,56 +183,24 @@
     for t in types:
         if isinstance(t, TypeVar) and t not in tvars:
             tvars.append(t)
-        if isinstance(t, _GenericAlias) and not t._special:
+        if isinstance(t, (_GenericAlias, GenericAlias)):
             tvars.extend([t for t in t.__parameters__ if t not in tvars])
     return tuple(tvars)
 
 
-def _subs_tvars(tp, tvars, subs):
-    """Substitute type variables 'tvars' with substitutions 'subs'.
-    These two must have the same length.
-    """
-    if not isinstance(tp, _GenericAlias):
-        return tp
-    new_args = list(tp.__args__)
-    for a, arg in enumerate(tp.__args__):
-        if isinstance(arg, TypeVar):
-            for i, tvar in enumerate(tvars):
-                if arg == tvar:
-                    new_args[a] = subs[i]
-        else:
-            new_args[a] = _subs_tvars(arg, tvars, subs)
-    if tp.__origin__ is Union:
-        return Union[tuple(new_args)]
-    return tp.copy_with(tuple(new_args))
-
-
-def _check_generic(cls, parameters):
+def _check_generic(cls, parameters, elen):
     """Check correct count for parameters of a generic cls (internal helper).
     This gives a nice error message in case of count mismatch.
     """
-    if not cls.__parameters__:
+    if not elen:
         raise TypeError(f"{cls} is not a generic class")
     alen = len(parameters)
-    elen = len(cls.__parameters__)
     if alen != elen:
         raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};"
                         f" actual {alen}, expected {elen}")
 
 
-def _remove_dups_flatten(parameters):
-    """An internal helper for Union creation and substitution: flatten Unions
-    among parameters, then remove duplicates.
-    """
-    # Flatten out Union[Union[...], ...].
-    params = []
-    for p in parameters:
-        if isinstance(p, _GenericAlias) and p.__origin__ is Union:
-            params.extend(p.__args__)
-        elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
-            params.extend(p[1:])
-        else:
-            params.append(p)
+def _deduplicate(params):
     # Weed out strict duplicates, preserving the first of each occurrence.
     all_params = set(params)
     if len(all_params) < len(params):
@@ -239,42 +211,78 @@
                 all_params.remove(t)
         params = new_params
         assert not all_params, all_params
+    return params
+
+
+def _remove_dups_flatten(parameters):
+    """An internal helper for Union creation and substitution: flatten Unions
+    among parameters, then remove duplicates.
+    """
+    # Flatten out Union[Union[...], ...].
+    params = []
+    for p in parameters:
+        if isinstance(p, _UnionGenericAlias):
+            params.extend(p.__args__)
+        elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
+            params.extend(p[1:])
+        else:
+            params.append(p)
+
+    return tuple(_deduplicate(params))
+
+
+def _flatten_literal_params(parameters):
+    """An internal helper for Literal creation: flatten Literals among parameters"""
+    params = []
+    for p in parameters:
+        if isinstance(p, _LiteralGenericAlias):
+            params.extend(p.__args__)
+        else:
+            params.append(p)
     return tuple(params)
 
 
 _cleanups = []
 
 
-def _tp_cache(func):
+def _tp_cache(func=None, /, *, typed=False):
     """Internal wrapper caching __getitem__ of generic types with a fallback to
     original function for non-hashable arguments.
     """
-    cached = functools.lru_cache()(func)
-    _cleanups.append(cached.cache_clear)
+    def decorator(func):
+        cached = functools.lru_cache(typed=typed)(func)
+        _cleanups.append(cached.cache_clear)
 
-    @functools.wraps(func)
-    def inner(*args, **kwds):
-        try:
-            return cached(*args, **kwds)
-        except TypeError:
-            pass  # All real errors (not unhashable args) are raised below.
-        return func(*args, **kwds)
-    return inner
+        @functools.wraps(func)
+        def inner(*args, **kwds):
+            try:
+                return cached(*args, **kwds)
+            except TypeError:
+                pass  # All real errors (not unhashable args) are raised below.
+            return func(*args, **kwds)
+        return inner
 
+    if func is not None:
+        return decorator(func)
 
-def _eval_type(t, globalns, localns):
-    """Evaluate all forward reverences in the given type t.
+    return decorator
+
+def _eval_type(t, globalns, localns, recursive_guard=frozenset()):
+    """Evaluate all forward references in the given type t.
     For use of globalns and localns see the docstring for get_type_hints().
+    recursive_guard is used to prevent prevent infinite recursion
+    with recursive ForwardRef.
     """
     if isinstance(t, ForwardRef):
-        return t._evaluate(globalns, localns)
-    if isinstance(t, _GenericAlias):
-        ev_args = tuple(_eval_type(a, globalns, localns) for a in t.__args__)
+        return t._evaluate(globalns, localns, recursive_guard)
+    if isinstance(t, (_GenericAlias, GenericAlias)):
+        ev_args = tuple(_eval_type(a, globalns, localns, recursive_guard) for a in t.__args__)
         if ev_args == t.__args__:
             return t
-        res = t.copy_with(ev_args)
-        res._special = t._special
-        return res
+        if isinstance(t, GenericAlias):
+            return GenericAlias(t.__origin__, ev_args)
+        else:
+            return t.copy_with(ev_args)
     return t
 
 
@@ -289,6 +297,7 @@
 
 class _Immutable:
     """Mixin to indicate that object should not be copied."""
+    __slots__ = ()
 
     def __copy__(self):
         return self
@@ -297,37 +306,18 @@
         return self
 
 
-class _SpecialForm(_Final, _Immutable, _root=True):
-    """Internal indicator of special typing constructs.
-    See _doc instance attribute for specific docs.
-    """
+# Internal indicator of special typing constructs.
+# See __doc__ instance attribute for specific docs.
+class _SpecialForm(_Final, _root=True):
+    __slots__ = ('_name', '__doc__', '_getitem')
 
-    __slots__ = ('_name', '_doc')
+    def __init__(self, getitem):
+        self._getitem = getitem
+        self._name = getitem.__name__
+        self.__doc__ = getitem.__doc__
 
-    def __new__(cls, *args, **kwds):
-        """Constructor.
-
-        This only exists to give a better error message in case
-        someone tries to subclass a special typing object (not a good idea).
-        """
-        if (len(args) == 3 and
-                isinstance(args[0], str) and
-                isinstance(args[1], tuple)):
-            # Close enough.
-            raise TypeError(f"Cannot subclass {cls!r}")
-        return super().__new__(cls)
-
-    def __init__(self, name, doc):
-        self._name = name
-        self._doc = doc
-
-    def __eq__(self, other):
-        if not isinstance(other, _SpecialForm):
-            return NotImplemented
-        return self._name == other._name
-
-    def __hash__(self):
-        return hash((self._name,))
+    def __mro_entries__(self, bases):
+        raise TypeError(f"Cannot subclass {self!r}")
 
     def __repr__(self):
         return 'typing.' + self._name
@@ -346,31 +336,17 @@
 
     @_tp_cache
     def __getitem__(self, parameters):
-        if self._name in ('ClassVar', 'Final'):
-            item = _type_check(parameters, f'{self._name} accepts only single type.')
-            return _GenericAlias(self, (item,))
-        if self._name == 'Union':
-            if parameters == ():
-                raise TypeError("Cannot take a Union of no types.")
-            if not isinstance(parameters, tuple):
-                parameters = (parameters,)
-            msg = "Union[arg, ...]: each arg must be a type."
-            parameters = tuple(_type_check(p, msg) for p in parameters)
-            parameters = _remove_dups_flatten(parameters)
-            if len(parameters) == 1:
-                return parameters[0]
-            return _GenericAlias(self, parameters)
-        if self._name == 'Optional':
-            arg = _type_check(parameters, "Optional[t] requires a single type.")
-            return Union[arg, type(None)]
-        if self._name == 'Literal':
-            # There is no '_type_check' call because arguments to Literal[...] are
-            # values, not types.
-            return _GenericAlias(self, parameters)
-        raise TypeError(f"{self} is not subscriptable")
+        return self._getitem(self, parameters)
 
 
-Any = _SpecialForm('Any', doc=
+class _LiteralSpecialForm(_SpecialForm, _root=True):
+    @_tp_cache(typed=True)
+    def __getitem__(self, parameters):
+        return self._getitem(self, parameters)
+
+
+@_SpecialForm
+def Any(self, parameters):
     """Special type indicating an unconstrained type.
 
     - Any is compatible with every type.
@@ -380,9 +356,11 @@
     Note that all the above statements are true from the point of view of
     static type checkers. At runtime, Any should not be used with instance
     or class checks.
-    """)
+    """
+    raise TypeError(f"{self} is not subscriptable")
 
-NoReturn = _SpecialForm('NoReturn', doc=
+@_SpecialForm
+def NoReturn(self, parameters):
     """Special type indicating functions that never return.
     Example::
 
@@ -393,9 +371,11 @@
 
     This type is invalid in other positions, e.g., ``List[NoReturn]``
     will fail in static type checkers.
-    """)
+    """
+    raise TypeError(f"{self} is not subscriptable")
 
-ClassVar = _SpecialForm('ClassVar', doc=
+@_SpecialForm
+def ClassVar(self, parameters):
     """Special type construct to mark class variables.
 
     An annotation wrapped in ClassVar indicates that a given
@@ -410,9 +390,12 @@
 
     Note that ClassVar is not a class itself, and should not
     be used with isinstance() or issubclass().
-    """)
+    """
+    item = _type_check(parameters, f'{self} accepts only single type.')
+    return _GenericAlias(self, (item,))
 
-Final = _SpecialForm('Final', doc=
+@_SpecialForm
+def Final(self, parameters):
     """Special typing construct to indicate final names to type checkers.
 
     A final name cannot be re-assigned or overridden in a subclass.
@@ -428,9 +411,12 @@
           TIMEOUT = 1  # Error reported by type checker
 
     There is no runtime checking of these properties.
-    """)
+    """
+    item = _type_check(parameters, f'{self} accepts only single type.')
+    return _GenericAlias(self, (item,))
 
-Union = _SpecialForm('Union', doc=
+@_SpecialForm
+def Union(self, parameters):
     """Union type; Union[X, Y] means either X or Y.
 
     To define a union, use e.g. Union[int, str].  Details:
@@ -455,15 +441,29 @@
 
     - You cannot subclass or instantiate a union.
     - You can use Optional[X] as a shorthand for Union[X, None].
-    """)
+    """
+    if parameters == ():
+        raise TypeError("Cannot take a Union of no types.")
+    if not isinstance(parameters, tuple):
+        parameters = (parameters,)
+    msg = "Union[arg, ...]: each arg must be a type."
+    parameters = tuple(_type_check(p, msg) for p in parameters)
+    parameters = _remove_dups_flatten(parameters)
+    if len(parameters) == 1:
+        return parameters[0]
+    return _UnionGenericAlias(self, parameters)
 
-Optional = _SpecialForm('Optional', doc=
+@_SpecialForm
+def Optional(self, parameters):
     """Optional type.
 
     Optional[X] is equivalent to Union[X, None].
-    """)
+    """
+    arg = _type_check(parameters, f"{self} requires a single type.")
+    return Union[arg, type(None)]
 
-Literal = _SpecialForm('Literal', doc=
+@_LiteralSpecialForm
+def Literal(self, parameters):
     """Special typing form to define literal types (a.k.a. value types).
 
     This form can be used to indicate to type checkers that the corresponding
@@ -480,10 +480,23 @@
       open_helper('/some/path', 'r')  # Passes type check
       open_helper('/other/path', 'typo')  # Error in type checker
 
-   Literal[...] cannot be subclassed. At runtime, an arbitrary value
-   is allowed as type argument to Literal[...], but type checkers may
-   impose restrictions.
-    """)
+    Literal[...] cannot be subclassed. At runtime, an arbitrary value
+    is allowed as type argument to Literal[...], but type checkers may
+    impose restrictions.
+    """
+    # There is no '_type_check' call because arguments to Literal[...] are
+    # values, not types.
+    if not isinstance(parameters, tuple):
+        parameters = (parameters,)
+
+    parameters = _flatten_literal_params(parameters)
+
+    try:
+        parameters = tuple(p for p, _ in _deduplicate(list(_value_and_type_iter(parameters))))
+    except TypeError:  # unhashable parameters
+        pass
+
+    return _LiteralGenericAlias(self, parameters)
 
 
 class ForwardRef(_Final, _root=True):
@@ -506,7 +519,9 @@
         self.__forward_value__ = None
         self.__forward_is_argument__ = is_argument
 
-    def _evaluate(self, globalns, localns):
+    def _evaluate(self, globalns, localns, recursive_guard):
+        if self.__forward_arg__ in recursive_guard:
+            return self
         if not self.__forward_evaluated__ or localns is not globalns:
             if globalns is None and localns is None:
                 globalns = localns = {}
@@ -514,10 +529,14 @@
                 globalns = localns
             elif localns is None:
                 localns = globalns
-            self.__forward_value__ = _type_check(
+            type_ =_type_check(
                 eval(self.__forward_code__, globalns, localns),
                 "Forward references must evaluate to types.",
-                is_argument=self.__forward_is_argument__)
+                is_argument=self.__forward_is_argument__,
+            )
+            self.__forward_value__ = _eval_type(
+                type_, globalns, localns, recursive_guard | {self.__forward_arg__}
+            )
             self.__forward_evaluated__ = True
         return self.__forward_value__
 
@@ -581,7 +600,7 @@
     """
 
     __slots__ = ('__name__', '__bound__', '__constraints__',
-                 '__covariant__', '__contravariant__')
+                 '__covariant__', '__contravariant__', '__dict__')
 
     def __init__(self, name, *constraints, bound=None,
                  covariant=False, contravariant=False):
@@ -620,6 +639,68 @@
         return self.__name__
 
 
+def _is_dunder(attr):
+    return attr.startswith('__') and attr.endswith('__')
+
+class _BaseGenericAlias(_Final, _root=True):
+    """The central part of internal API.
+
+    This represents a generic version of type 'origin' with type arguments 'params'.
+    There are two kind of these aliases: user defined and special. The special ones
+    are wrappers around builtin collections and ABCs in collections.abc. These must
+    have 'name' always set. If 'inst' is False, then the alias can't be instantiated,
+    this is used by e.g. typing.List and typing.Dict.
+    """
+    def __init__(self, origin, *, inst=True, name=None):
+        self._inst = inst
+        self._name = name
+        self.__origin__ = origin
+        self.__slots__ = None  # This is not documented.
+
+    def __call__(self, *args, **kwargs):
+        if not self._inst:
+            raise TypeError(f"Type {self._name} cannot be instantiated; "
+                            f"use {self.__origin__.__name__}() instead")
+        result = self.__origin__(*args, **kwargs)
+        try:
+            result.__orig_class__ = self
+        except AttributeError:
+            pass
+        return result
+
+    def __mro_entries__(self, bases):
+        res = []
+        if self.__origin__ not in bases:
+            res.append(self.__origin__)
+        i = bases.index(self)
+        for b in bases[i+1:]:
+            if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic):
+                break
+        else:
+            res.append(Generic)
+        return tuple(res)
+
+    def __getattr__(self, attr):
+        # We are careful for copy and pickle.
+        # Also for simplicity we just don't relay all dunder names
+        if '__origin__' in self.__dict__ and not _is_dunder(attr):
+            return getattr(self.__origin__, attr)
+        raise AttributeError(attr)
+
+    def __setattr__(self, attr, val):
+        if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'):
+            super().__setattr__(attr, val)
+        else:
+            setattr(self.__origin__, attr, val)
+
+    def __instancecheck__(self, obj):
+        return self.__subclasscheck__(type(obj))
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Subscripted generics cannot be used with"
+                        " class and instance checks")
+
+
 # Special typing constructs Union, Optional, Generic, Callable and Tuple
 # use three special attributes for internal bookkeeping of generic types:
 # * __parameters__ is a tuple of unique free type parameters of a generic
@@ -631,49 +712,27 @@
 #   e.g., Dict[T, int].__args__ == (T, int).
 
 
-# Mapping from non-generic type names that have a generic alias in typing
-# but with a different name.
-_normalize_alias = {'list': 'List',
-                    'tuple': 'Tuple',
-                    'dict': 'Dict',
-                    'set': 'Set',
-                    'frozenset': 'FrozenSet',
-                    'deque': 'Deque',
-                    'defaultdict': 'DefaultDict',
-                    'type': 'Type',
-                    'Set': 'AbstractSet'}
-
-def _is_dunder(attr):
-    return attr.startswith('__') and attr.endswith('__')
-
-
-class _GenericAlias(_Final, _root=True):
-    """The central part of internal API.
-
-    This represents a generic version of type 'origin' with type arguments 'params'.
-    There are two kind of these aliases: user defined and special. The special ones
-    are wrappers around builtin collections and ABCs in collections.abc. These must
-    have 'name' always set. If 'inst' is False, then the alias can't be instantiated,
-    this is used by e.g. typing.List and typing.Dict.
-    """
-    def __init__(self, origin, params, *, inst=True, special=False, name=None):
-        self._inst = inst
-        self._special = special
-        if special and name is None:
-            orig_name = origin.__name__
-            name = _normalize_alias.get(orig_name, orig_name)
-        self._name = name
+class _GenericAlias(_BaseGenericAlias, _root=True):
+    def __init__(self, origin, params, *, inst=True, name=None):
+        super().__init__(origin, inst=inst, name=name)
         if not isinstance(params, tuple):
             params = (params,)
-        self.__origin__ = origin
         self.__args__ = tuple(... if a is _TypingEllipsis else
                               () if a is _TypingEmpty else
                               a for a in params)
         self.__parameters__ = _collect_type_vars(params)
-        self.__slots__ = None  # This is not documented.
         if not name:
             self.__module__ = origin.__module__
 
+    def __eq__(self, other):
+        if not isinstance(other, _GenericAlias):
+            return NotImplemented
+        return (self.__origin__ == other.__origin__
+                and self.__args__ == other.__args__)
+
+    def __hash__(self):
+        return hash((self.__origin__, self.__args__))
+
     @_tp_cache
     def __getitem__(self, params):
         if self.__origin__ in (Generic, Protocol):
@@ -683,125 +742,119 @@
             params = (params,)
         msg = "Parameters to generic types must be types."
         params = tuple(_type_check(p, msg) for p in params)
-        _check_generic(self, params)
-        return _subs_tvars(self, self.__parameters__, params)
+        _check_generic(self, params, len(self.__parameters__))
+
+        subst = dict(zip(self.__parameters__, params))
+        new_args = []
+        for arg in self.__args__:
+            if isinstance(arg, TypeVar):
+                arg = subst[arg]
+            elif isinstance(arg, (_GenericAlias, GenericAlias)):
+                subparams = arg.__parameters__
+                if subparams:
+                    subargs = tuple(subst[x] for x in subparams)
+                    arg = arg[subargs]
+            new_args.append(arg)
+        return self.copy_with(tuple(new_args))
 
     def copy_with(self, params):
-        # We don't copy self._special.
-        return _GenericAlias(self.__origin__, params, name=self._name, inst=self._inst)
+        return self.__class__(self.__origin__, params, name=self._name, inst=self._inst)
 
     def __repr__(self):
-        if (self._name != 'Callable' or
-                len(self.__args__) == 2 and self.__args__[0] is Ellipsis):
-            if self._name:
-                name = 'typing.' + self._name
-            else:
-                name = _type_repr(self.__origin__)
-            if not self._special:
-                args = f'[{", ".join([_type_repr(a) for a in self.__args__])}]'
-            else:
-                args = ''
-            return (f'{name}{args}')
-        if self._special:
-            return 'typing.Callable'
-        return (f'typing.Callable'
-                f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
-                f'{_type_repr(self.__args__[-1])}]')
+        if self._name:
+            name = 'typing.' + self._name
+        else:
+            name = _type_repr(self.__origin__)
+        args = ", ".join([_type_repr(a) for a in self.__args__])
+        return f'{name}[{args}]'
 
-    def __eq__(self, other):
-        if not isinstance(other, _GenericAlias):
-            return NotImplemented
-        if self.__origin__ != other.__origin__:
-            return False
-        if self.__origin__ is Union and other.__origin__ is Union:
-            return frozenset(self.__args__) == frozenset(other.__args__)
-        return self.__args__ == other.__args__
-
-    def __hash__(self):
-        if self.__origin__ is Union:
-            return hash((Union, frozenset(self.__args__)))
-        return hash((self.__origin__, self.__args__))
-
-    def __call__(self, *args, **kwargs):
-        if not self._inst:
-            raise TypeError(f"Type {self._name} cannot be instantiated; "
-                            f"use {self._name.lower()}() instead")
-        result = self.__origin__(*args, **kwargs)
-        try:
-            result.__orig_class__ = self
-        except AttributeError:
-            pass
-        return result
+    def __reduce__(self):
+        if self._name:
+            origin = globals()[self._name]
+        else:
+            origin = self.__origin__
+        args = tuple(self.__args__)
+        if len(args) == 1 and not isinstance(args[0], tuple):
+            args, = args
+        return operator.getitem, (origin, args)
 
     def __mro_entries__(self, bases):
         if self._name:  # generic version of an ABC or built-in class
-            res = []
-            if self.__origin__ not in bases:
-                res.append(self.__origin__)
-            i = bases.index(self)
-            if not any(isinstance(b, _GenericAlias) or issubclass(b, Generic)
-                       for b in bases[i+1:]):
-                res.append(Generic)
-            return tuple(res)
+            return super().__mro_entries__(bases)
         if self.__origin__ is Generic:
             if Protocol in bases:
                 return ()
             i = bases.index(self)
             for b in bases[i+1:]:
-                if isinstance(b, _GenericAlias) and b is not self:
+                if isinstance(b, _BaseGenericAlias) and b is not self:
                     return ()
         return (self.__origin__,)
 
-    def __getattr__(self, attr):
-        # We are careful for copy and pickle.
-        # Also for simplicity we just don't relay all dunder names
-        if '__origin__' in self.__dict__ and not _is_dunder(attr):
-            return getattr(self.__origin__, attr)
-        raise AttributeError(attr)
 
-    def __setattr__(self, attr, val):
-        if _is_dunder(attr) or attr in ('_name', '_inst', '_special'):
-            super().__setattr__(attr, val)
+# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
+# 1 for List and 2 for Dict.  It may be -1 if variable number of
+# parameters are accepted (needs custom __getitem__).
+
+class _SpecialGenericAlias(_BaseGenericAlias, _root=True):
+    def __init__(self, origin, nparams, *, inst=True, name=None):
+        if name is None:
+            name = origin.__name__
+        super().__init__(origin, inst=inst, name=name)
+        self._nparams = nparams
+        if origin.__module__ == 'builtins':
+            self.__doc__ = f'A generic version of {origin.__qualname__}.'
         else:
-            setattr(self.__origin__, attr, val)
+            self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
 
-    def __instancecheck__(self, obj):
-        return self.__subclasscheck__(type(obj))
+    @_tp_cache
+    def __getitem__(self, params):
+        if not isinstance(params, tuple):
+            params = (params,)
+        msg = "Parameters to generic types must be types."
+        params = tuple(_type_check(p, msg) for p in params)
+        _check_generic(self, params, self._nparams)
+        return self.copy_with(params)
+
+    def copy_with(self, params):
+        return _GenericAlias(self.__origin__, params,
+                             name=self._name, inst=self._inst)
+
+    def __repr__(self):
+        return 'typing.' + self._name
 
     def __subclasscheck__(self, cls):
-        if self._special:
-            if not isinstance(cls, _GenericAlias):
-                return issubclass(cls, self.__origin__)
-            if cls._special:
-                return issubclass(cls.__origin__, self.__origin__)
-        raise TypeError("Subscripted generics cannot be used with"
-                        " class and instance checks")
+        if isinstance(cls, _SpecialGenericAlias):
+            return issubclass(cls.__origin__, self.__origin__)
+        if not isinstance(cls, _GenericAlias):
+            return issubclass(cls, self.__origin__)
+        return super().__subclasscheck__(cls)
 
     def __reduce__(self):
-        if self._special:
-            return self._name
-
-        if self._name:
-            origin = globals()[self._name]
-        else:
-            origin = self.__origin__
-        if (origin is Callable and
-            not (len(self.__args__) == 2 and self.__args__[0] is Ellipsis)):
-            args = list(self.__args__[:-1]), self.__args__[-1]
-        else:
-            args = tuple(self.__args__)
-            if len(args) == 1 and not isinstance(args[0], tuple):
-                args, = args
-        return operator.getitem, (origin, args)
+        return self._name
 
 
-class _VariadicGenericAlias(_GenericAlias, _root=True):
-    """Same as _GenericAlias above but for variadic aliases. Currently,
-    this is used only by special internal aliases: Tuple and Callable.
-    """
+class _CallableGenericAlias(_GenericAlias, _root=True):
+    def __repr__(self):
+        assert self._name == 'Callable'
+        if len(self.__args__) == 2 and self.__args__[0] is Ellipsis:
+            return super().__repr__()
+        return (f'typing.Callable'
+                f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
+                f'{_type_repr(self.__args__[-1])}]')
+
+    def __reduce__(self):
+        args = self.__args__
+        if not (len(args) == 2 and args[0] is ...):
+            args = list(args[:-1]), args[-1]
+        return operator.getitem, (Callable, args)
+
+
+class _CallableType(_SpecialGenericAlias, _root=True):
+    def copy_with(self, params):
+        return _CallableGenericAlias(self.__origin__, params,
+                                     name=self._name, inst=self._inst)
+
     def __getitem__(self, params):
-        if self._name != 'Callable' or not self._special:
-            return self.__getitem_inner__(params)
         if not isinstance(params, tuple) or len(params) != 2:
             raise TypeError("Callable must be used as "
                             "Callable[[arg, ...], result].")
@@ -817,29 +870,69 @@
 
     @_tp_cache
     def __getitem_inner__(self, params):
-        if self.__origin__ is tuple and self._special:
-            if params == ():
-                return self.copy_with((_TypingEmpty,))
-            if not isinstance(params, tuple):
-                params = (params,)
-            if len(params) == 2 and params[1] is ...:
-                msg = "Tuple[t, ...]: t must be a type."
-                p = _type_check(params[0], msg)
-                return self.copy_with((p, _TypingEllipsis))
-            msg = "Tuple[t0, t1, ...]: each t must be a type."
-            params = tuple(_type_check(p, msg) for p in params)
-            return self.copy_with(params)
-        if self.__origin__ is collections.abc.Callable and self._special:
-            args, result = params
-            msg = "Callable[args, result]: result must be a type."
-            result = _type_check(result, msg)
-            if args is Ellipsis:
-                return self.copy_with((_TypingEllipsis, result))
-            msg = "Callable[[arg, ...], result]: each arg must be a type."
-            args = tuple(_type_check(arg, msg) for arg in args)
-            params = args + (result,)
-            return self.copy_with(params)
-        return super().__getitem__(params)
+        args, result = params
+        msg = "Callable[args, result]: result must be a type."
+        result = _type_check(result, msg)
+        if args is Ellipsis:
+            return self.copy_with((_TypingEllipsis, result))
+        msg = "Callable[[arg, ...], result]: each arg must be a type."
+        args = tuple(_type_check(arg, msg) for arg in args)
+        params = args + (result,)
+        return self.copy_with(params)
+
+
+class _TupleType(_SpecialGenericAlias, _root=True):
+    @_tp_cache
+    def __getitem__(self, params):
+        if params == ():
+            return self.copy_with((_TypingEmpty,))
+        if not isinstance(params, tuple):
+            params = (params,)
+        if len(params) == 2 and params[1] is ...:
+            msg = "Tuple[t, ...]: t must be a type."
+            p = _type_check(params[0], msg)
+            return self.copy_with((p, _TypingEllipsis))
+        msg = "Tuple[t0, t1, ...]: each t must be a type."
+        params = tuple(_type_check(p, msg) for p in params)
+        return self.copy_with(params)
+
+
+class _UnionGenericAlias(_GenericAlias, _root=True):
+    def copy_with(self, params):
+        return Union[params]
+
+    def __eq__(self, other):
+        if not isinstance(other, _UnionGenericAlias):
+            return NotImplemented
+        return set(self.__args__) == set(other.__args__)
+
+    def __hash__(self):
+        return hash(frozenset(self.__args__))
+
+    def __repr__(self):
+        args = self.__args__
+        if len(args) == 2:
+            if args[0] is type(None):
+                return f'typing.Optional[{_type_repr(args[1])}]'
+            elif args[1] is type(None):
+                return f'typing.Optional[{_type_repr(args[0])}]'
+        return super().__repr__()
+
+
+def _value_and_type_iter(parameters):
+    return ((p, type(p)) for p in parameters)
+
+
+class _LiteralGenericAlias(_GenericAlias, _root=True):
+
+    def __eq__(self, other):
+        if not isinstance(other, _LiteralGenericAlias):
+            return NotImplemented
+
+        return set(_value_and_type_iter(self.__args__)) == set(_value_and_type_iter(other.__args__))
+
+    def __hash__(self):
+        return hash(frozenset(_value_and_type_iter(self.__args__)))
 
 
 class Generic:
@@ -865,16 +958,6 @@
     __slots__ = ()
     _is_protocol = False
 
-    def __new__(cls, *args, **kwds):
-        if cls in (Generic, Protocol):
-            raise TypeError(f"Type {cls.__name__} cannot be instantiated; "
-                            "it can be used only as a base class")
-        if super().__new__ is object.__new__ and cls.__init__ is not object.__init__:
-            obj = super().__new__(cls)
-        else:
-            obj = super().__new__(cls, *args, **kwds)
-        return obj
-
     @_tp_cache
     def __class_getitem__(cls, params):
         if not isinstance(params, tuple):
@@ -894,7 +977,7 @@
                     f"Parameters to {cls.__name__}[...] must all be unique")
         else:
             # Subscripting a regular Generic subclass.
-            _check_generic(cls, params)
+            _check_generic(cls, params, len(cls.__parameters__))
         return _GenericAlias(cls, params)
 
     def __init_subclass__(cls, *args, **kwargs):
@@ -949,7 +1032,7 @@
 
 _SPECIAL_NAMES = ['__abstractmethods__', '__annotations__', '__dict__', '__doc__',
                   '__init__', '__module__', '__new__', '__slots__',
-                  '__subclasshook__', '__weakref__']
+                  '__subclasshook__', '__weakref__', '__class_getitem__']
 
 # These special attributes will be not collected as protocol members.
 EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS + _SPECIAL_NAMES + ['_MutableMapping__marker']
@@ -983,7 +1066,7 @@
 
 
 def _allow_reckless_class_cheks():
-    """Allow instnance and class checks for special stdlib modules.
+    """Allow instance and class checks for special stdlib modules.
 
     The abc and functools modules indiscriminately call isinstance() and
     issubclass() on the whole MRO of a user class, which may contain protocols.
@@ -1121,6 +1204,100 @@
         cls.__init__ = _no_init
 
 
+class _AnnotatedAlias(_GenericAlias, _root=True):
+    """Runtime representation of an annotated type.
+
+    At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+    with extra annotations. The alias behaves like a normal typing alias,
+    instantiating is the same as instantiating the underlying type, binding
+    it to types is also the same.
+    """
+    def __init__(self, origin, metadata):
+        if isinstance(origin, _AnnotatedAlias):
+            metadata = origin.__metadata__ + metadata
+            origin = origin.__origin__
+        super().__init__(origin, origin)
+        self.__metadata__ = metadata
+
+    def copy_with(self, params):
+        assert len(params) == 1
+        new_type = params[0]
+        return _AnnotatedAlias(new_type, self.__metadata__)
+
+    def __repr__(self):
+        return "typing.Annotated[{}, {}]".format(
+            _type_repr(self.__origin__),
+            ", ".join(repr(a) for a in self.__metadata__)
+        )
+
+    def __reduce__(self):
+        return operator.getitem, (
+            Annotated, (self.__origin__,) + self.__metadata__
+        )
+
+    def __eq__(self, other):
+        if not isinstance(other, _AnnotatedAlias):
+            return NotImplemented
+        return (self.__origin__ == other.__origin__
+                and self.__metadata__ == other.__metadata__)
+
+    def __hash__(self):
+        return hash((self.__origin__, self.__metadata__))
+
+
+class Annotated:
+    """Add context specific metadata to a type.
+
+    Example: Annotated[int, runtime_check.Unsigned] indicates to the
+    hypothetical runtime_check module that this type is an unsigned int.
+    Every other consumer of this type can ignore this metadata and treat
+    this type as int.
+
+    The first argument to Annotated must be a valid type.
+
+    Details:
+
+    - It's an error to call `Annotated` with less than two arguments.
+    - Nested Annotated are flattened::
+
+        Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+    - Instantiating an annotated type is equivalent to instantiating the
+    underlying type::
+
+        Annotated[C, Ann1](5) == C(5)
+
+    - Annotated can be used as a generic type alias::
+
+        Optimized = Annotated[T, runtime.Optimize()]
+        Optimized[int] == Annotated[int, runtime.Optimize()]
+
+        OptimizedList = Annotated[List[T], runtime.Optimize()]
+        OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwargs):
+        raise TypeError("Type Annotated cannot be instantiated.")
+
+    @_tp_cache
+    def __class_getitem__(cls, params):
+        if not isinstance(params, tuple) or len(params) < 2:
+            raise TypeError("Annotated[...] should be used "
+                            "with at least two arguments (a type and an "
+                            "annotation).")
+        msg = "Annotated[t, ...]: t must be a type."
+        origin = _type_check(params[0], msg)
+        metadata = tuple(params[1:])
+        return _AnnotatedAlias(origin, metadata)
+
+    def __init_subclass__(cls, *args, **kwargs):
+        raise TypeError(
+            "Cannot subclass {}.Annotated".format(cls.__module__)
+        )
+
+
 def runtime_checkable(cls):
     """Mark a protocol class as a runtime protocol.
 
@@ -1182,12 +1359,13 @@
                   WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
 
 
-def get_type_hints(obj, globalns=None, localns=None):
+def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
     """Return type hints for an object.
 
     This is often the same as obj.__annotations__, but it handles
-    forward references encoded as string literals, and if necessary
-    adds Optional[t] if a default value equal to None is set.
+    forward references encoded as string literals, adds Optional[t] if a
+    default value equal to None is set and recursively replaces all
+    'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
 
     The argument may be a module, class, method, or function. The annotations
     are returned as a dictionary. For classes, annotations include also
@@ -1231,7 +1409,7 @@
                     value = ForwardRef(value, is_argument=False)
                 value = _eval_type(value, base_globals, localns)
                 hints[name] = value
-        return hints
+        return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
 
     if globalns is None:
         if isinstance(obj, types.ModuleType):
@@ -1265,14 +1443,32 @@
         if name in defaults and defaults[name] is None:
             value = Optional[value]
         hints[name] = value
-    return hints
+    return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+
+
+def _strip_annotations(t):
+    """Strips the annotations from a given type.
+    """
+    if isinstance(t, _AnnotatedAlias):
+        return _strip_annotations(t.__origin__)
+    if isinstance(t, _GenericAlias):
+        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+        if stripped_args == t.__args__:
+            return t
+        return t.copy_with(stripped_args)
+    if isinstance(t, GenericAlias):
+        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+        if stripped_args == t.__args__:
+            return t
+        return GenericAlias(t.__origin__, stripped_args)
+    return t
 
 
 def get_origin(tp):
     """Get the unsubscripted version of a type.
 
-    This supports generic types, Callable, Tuple, Union, Literal, Final and ClassVar.
-    Return None for unsupported types. Examples::
+    This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+    and Annotated. Return None for unsupported types. Examples::
 
         get_origin(Literal[42]) is Literal
         get_origin(int) is None
@@ -1282,7 +1478,9 @@
         get_origin(Union[T, int]) is Union
         get_origin(List[Tuple[T, T]][int]) == list
     """
-    if isinstance(tp, _GenericAlias):
+    if isinstance(tp, _AnnotatedAlias):
+        return Annotated
+    if isinstance(tp, (_BaseGenericAlias, GenericAlias)):
         return tp.__origin__
     if tp is Generic:
         return Generic
@@ -1300,11 +1498,15 @@
         get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
         get_args(Callable[[], T][int]) == ([], int)
     """
-    if isinstance(tp, _GenericAlias) and not tp._special:
+    if isinstance(tp, _AnnotatedAlias):
+        return (tp.__origin__,) + tp.__metadata__
+    if isinstance(tp, _GenericAlias):
         res = tp.__args__
-        if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+        if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis:
             res = (list(res[:-1]), res[-1])
         return res
+    if isinstance(tp, GenericAlias):
+        return tp.__args__
     return ()
 
 
@@ -1432,21 +1634,20 @@
 
 
 # Various ABCs mimicking those in collections.abc.
-def _alias(origin, params, inst=True):
-    return _GenericAlias(origin, params, special=True, inst=inst)
+_alias = _SpecialGenericAlias
 
-Hashable = _alias(collections.abc.Hashable, ())  # Not generic.
-Awaitable = _alias(collections.abc.Awaitable, T_co)
-Coroutine = _alias(collections.abc.Coroutine, (T_co, T_contra, V_co))
-AsyncIterable = _alias(collections.abc.AsyncIterable, T_co)
-AsyncIterator = _alias(collections.abc.AsyncIterator, T_co)
-Iterable = _alias(collections.abc.Iterable, T_co)
-Iterator = _alias(collections.abc.Iterator, T_co)
-Reversible = _alias(collections.abc.Reversible, T_co)
-Sized = _alias(collections.abc.Sized, ())  # Not generic.
-Container = _alias(collections.abc.Container, T_co)
-Collection = _alias(collections.abc.Collection, T_co)
-Callable = _VariadicGenericAlias(collections.abc.Callable, (), special=True)
+Hashable = _alias(collections.abc.Hashable, 0)  # Not generic.
+Awaitable = _alias(collections.abc.Awaitable, 1)
+Coroutine = _alias(collections.abc.Coroutine, 3)
+AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
+AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
+Iterable = _alias(collections.abc.Iterable, 1)
+Iterator = _alias(collections.abc.Iterator, 1)
+Reversible = _alias(collections.abc.Reversible, 1)
+Sized = _alias(collections.abc.Sized, 0)  # Not generic.
+Container = _alias(collections.abc.Container, 1)
+Collection = _alias(collections.abc.Collection, 1)
+Callable = _CallableType(collections.abc.Callable, 2)
 Callable.__doc__ = \
     """Callable type; Callable[[int], str] is a function of (int) -> str.
 
@@ -1457,15 +1658,16 @@
     There is no syntax to indicate optional or keyword arguments,
     such function types are rarely used as callback types.
     """
-AbstractSet = _alias(collections.abc.Set, T_co)
-MutableSet = _alias(collections.abc.MutableSet, T)
+AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
+MutableSet = _alias(collections.abc.MutableSet, 1)
 # NOTE: Mapping is only covariant in the value type.
-Mapping = _alias(collections.abc.Mapping, (KT, VT_co))
-MutableMapping = _alias(collections.abc.MutableMapping, (KT, VT))
-Sequence = _alias(collections.abc.Sequence, T_co)
-MutableSequence = _alias(collections.abc.MutableSequence, T)
-ByteString = _alias(collections.abc.ByteString, ())  # Not generic
-Tuple = _VariadicGenericAlias(tuple, (), inst=False, special=True)
+Mapping = _alias(collections.abc.Mapping, 2)
+MutableMapping = _alias(collections.abc.MutableMapping, 2)
+Sequence = _alias(collections.abc.Sequence, 1)
+MutableSequence = _alias(collections.abc.MutableSequence, 1)
+ByteString = _alias(collections.abc.ByteString, 0)  # Not generic
+# Tuple accepts variable number of parameters.
+Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
 Tuple.__doc__ = \
     """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
 
@@ -1475,24 +1677,24 @@
 
     To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
     """
-List = _alias(list, T, inst=False)
-Deque = _alias(collections.deque, T)
-Set = _alias(set, T, inst=False)
-FrozenSet = _alias(frozenset, T_co, inst=False)
-MappingView = _alias(collections.abc.MappingView, T_co)
-KeysView = _alias(collections.abc.KeysView, KT)
-ItemsView = _alias(collections.abc.ItemsView, (KT, VT_co))
-ValuesView = _alias(collections.abc.ValuesView, VT_co)
-ContextManager = _alias(contextlib.AbstractContextManager, T_co)
-AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co)
-Dict = _alias(dict, (KT, VT), inst=False)
-DefaultDict = _alias(collections.defaultdict, (KT, VT))
-OrderedDict = _alias(collections.OrderedDict, (KT, VT))
-Counter = _alias(collections.Counter, T)
-ChainMap = _alias(collections.ChainMap, (KT, VT))
-Generator = _alias(collections.abc.Generator, (T_co, T_contra, V_co))
-AsyncGenerator = _alias(collections.abc.AsyncGenerator, (T_co, T_contra))
-Type = _alias(type, CT_co, inst=False)
+List = _alias(list, 1, inst=False, name='List')
+Deque = _alias(collections.deque, 1, name='Deque')
+Set = _alias(set, 1, inst=False, name='Set')
+FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
+MappingView = _alias(collections.abc.MappingView, 1)
+KeysView = _alias(collections.abc.KeysView, 1)
+ItemsView = _alias(collections.abc.ItemsView, 2)
+ValuesView = _alias(collections.abc.ValuesView, 1)
+ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager')
+AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager')
+Dict = _alias(dict, 2, inst=False, name='Dict')
+DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
+OrderedDict = _alias(collections.OrderedDict, 2)
+Counter = _alias(collections.Counter, 1)
+ChainMap = _alias(collections.ChainMap, 2)
+Generator = _alias(collections.abc.Generator, 3)
+AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2)
+Type = _alias(type, 1, inst=False, name='Type')
 Type.__doc__ = \
     """A special construct usable to annotate class objects.
 
@@ -1588,50 +1790,41 @@
         pass
 
 
-def _make_nmtuple(name, types):
-    msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type"
-    types = [(n, _type_check(t, msg)) for n, t in types]
-    nm_tpl = collections.namedtuple(name, [n for n, t in types])
-    # Prior to PEP 526, only _field_types attribute was assigned.
-    # Now __annotations__ are used and _field_types is deprecated (remove in 3.9)
-    nm_tpl.__annotations__ = nm_tpl._field_types = dict(types)
-    try:
-        nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
-    except (AttributeError, ValueError):
-        pass
+def _make_nmtuple(name, types, module, defaults = ()):
+    fields = [n for n, t in types]
+    types = {n: _type_check(t, f"field {n} annotation must be a type")
+             for n, t in types}
+    nm_tpl = collections.namedtuple(name, fields,
+                                    defaults=defaults, module=module)
+    nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
     return nm_tpl
 
 
 # attributes prohibited to set in NamedTuple class syntax
-_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__',
-               '_fields', '_field_defaults', '_field_types',
-               '_make', '_replace', '_asdict', '_source')
+_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
+                         '_fields', '_field_defaults',
+                         '_make', '_replace', '_asdict', '_source'})
 
-_special = ('__module__', '__name__', '__annotations__')
+_special = frozenset({'__module__', '__name__', '__annotations__'})
 
 
 class NamedTupleMeta(type):
 
     def __new__(cls, typename, bases, ns):
-        if ns.get('_root', False):
-            return super().__new__(cls, typename, bases, ns)
+        assert bases[0] is _NamedTuple
         types = ns.get('__annotations__', {})
-        nm_tpl = _make_nmtuple(typename, types.items())
-        defaults = []
-        defaults_dict = {}
+        default_names = []
         for field_name in types:
             if field_name in ns:
-                default_value = ns[field_name]
-                defaults.append(default_value)
-                defaults_dict[field_name] = default_value
-            elif defaults:
-                raise TypeError("Non-default namedtuple field {field_name} cannot "
-                                "follow default field(s) {default_names}"
-                                .format(field_name=field_name,
-                                        default_names=', '.join(defaults_dict.keys())))
-        nm_tpl.__new__.__annotations__ = dict(types)
-        nm_tpl.__new__.__defaults__ = tuple(defaults)
-        nm_tpl._field_defaults = defaults_dict
+                default_names.append(field_name)
+            elif default_names:
+                raise TypeError(f"Non-default namedtuple field {field_name} "
+                                f"cannot follow default field"
+                                f"{'s' if len(default_names) > 1 else ''} "
+                                f"{', '.join(default_names)}")
+        nm_tpl = _make_nmtuple(typename, types.items(),
+                               defaults=[ns[n] for n in default_names],
+                               module=ns['__module__'])
         # update from user namespace without overriding special namedtuple attributes
         for key in ns:
             if key in _prohibited:
@@ -1641,7 +1834,7 @@
         return nm_tpl
 
 
-class NamedTuple(metaclass=NamedTupleMeta):
+def NamedTuple(typename, fields=None, /, **kwargs):
     """Typed version of namedtuple.
 
     Usage in Python versions >= 3.6::
@@ -1665,99 +1858,81 @@
 
         Employee = NamedTuple('Employee', [('name', str), ('id', int)])
     """
-    _root = True
-
-    def __new__(*args, **kwargs):
-        if not args:
-            raise TypeError('NamedTuple.__new__(): not enough arguments')
-        cls, *args = args  # allow the "cls" keyword be passed
-        if args:
-            typename, *args = args # allow the "typename" keyword be passed
-        elif 'typename' in kwargs:
-            typename = kwargs.pop('typename')
-            import warnings
-            warnings.warn("Passing 'typename' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError("NamedTuple.__new__() missing 1 required positional "
-                            "argument: 'typename'")
-        if args:
-            try:
-                fields, = args # allow the "fields" keyword be passed
-            except ValueError:
-                raise TypeError(f'NamedTuple.__new__() takes from 2 to 3 '
-                                f'positional arguments but {len(args) + 2} '
-                                f'were given') from None
-        elif 'fields' in kwargs and len(kwargs) == 1:
-            fields = kwargs.pop('fields')
-            import warnings
-            warnings.warn("Passing 'fields' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            fields = None
-
-        if fields is None:
-            fields = kwargs.items()
-        elif kwargs:
-            raise TypeError("Either list of fields or keywords"
-                            " can be provided to NamedTuple, not both")
-        return _make_nmtuple(typename, fields)
-    __new__.__text_signature__ = '($cls, typename, fields=None, /, **kwargs)'
-
-
-def _dict_new(cls, /, *args, **kwargs):
-    return dict(*args, **kwargs)
-
-
-def _typeddict_new(cls, typename, fields=None, /, *, total=True, **kwargs):
     if fields is None:
-        fields = kwargs
+        fields = kwargs.items()
     elif kwargs:
-        raise TypeError("TypedDict takes either a dict or keyword arguments,"
-                        " but not both")
-
-    ns = {'__annotations__': dict(fields), '__total__': total}
+        raise TypeError("Either list of fields or keywords"
+                        " can be provided to NamedTuple, not both")
     try:
-        # Setting correct module is necessary to make typed dict classes pickleable.
-        ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+        module = sys._getframe(1).f_globals.get('__name__', '__main__')
     except (AttributeError, ValueError):
-        pass
+        module = None
+    return _make_nmtuple(typename, fields, module=module)
 
-    return _TypedDictMeta(typename, (), ns)
+_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
 
+def _namedtuple_mro_entries(bases):
+    if len(bases) > 1:
+        raise TypeError("Multiple inheritance with NamedTuple is not supported")
+    assert bases[0] is NamedTuple
+    return (_NamedTuple,)
 
-def _check_fails(cls, other):
-    # Typed dicts are only for static structural subtyping.
-    raise TypeError('TypedDict does not support instance and class checks')
+NamedTuple.__mro_entries__ = _namedtuple_mro_entries
 
 
 class _TypedDictMeta(type):
     def __new__(cls, name, bases, ns, total=True):
         """Create new typed dict class object.
 
-        This method is called directly when TypedDict is subclassed,
-        or via _typeddict_new when TypedDict is instantiated. This way
+        This method is called when TypedDict is subclassed,
+        or when TypedDict is instantiated. This way
         TypedDict supports all three syntax forms described in its docstring.
-        Subclasses and instances of TypedDict return actual dictionaries
-        via _dict_new.
+        Subclasses and instances of TypedDict return actual dictionaries.
         """
-        ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
-        tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
-
-        anns = ns.get('__annotations__', {})
-        msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
-        anns = {n: _type_check(tp, msg) for n, tp in anns.items()}
         for base in bases:
-            anns.update(base.__dict__.get('__annotations__', {}))
-        tp_dict.__annotations__ = anns
+            if type(base) is not _TypedDictMeta:
+                raise TypeError('cannot inherit from both a TypedDict type '
+                                'and a non-TypedDict base class')
+        tp_dict = type.__new__(_TypedDictMeta, name, (dict,), ns)
+
+        annotations = {}
+        own_annotations = ns.get('__annotations__', {})
+        own_annotation_keys = set(own_annotations.keys())
+        msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+        own_annotations = {
+            n: _type_check(tp, msg) for n, tp in own_annotations.items()
+        }
+        required_keys = set()
+        optional_keys = set()
+
+        for base in bases:
+            annotations.update(base.__dict__.get('__annotations__', {}))
+            required_keys.update(base.__dict__.get('__required_keys__', ()))
+            optional_keys.update(base.__dict__.get('__optional_keys__', ()))
+
+        annotations.update(own_annotations)
+        if total:
+            required_keys.update(own_annotation_keys)
+        else:
+            optional_keys.update(own_annotation_keys)
+
+        tp_dict.__annotations__ = annotations
+        tp_dict.__required_keys__ = frozenset(required_keys)
+        tp_dict.__optional_keys__ = frozenset(optional_keys)
         if not hasattr(tp_dict, '__total__'):
             tp_dict.__total__ = total
         return tp_dict
 
-    __instancecheck__ = __subclasscheck__ = _check_fails
+    __call__ = dict  # static method
+
+    def __subclasscheck__(cls, other):
+        # Typed dicts are only for static structural subtyping.
+        raise TypeError('TypedDict does not support instance and class checks')
+
+    __instancecheck__ = __subclasscheck__
 
 
-class TypedDict(dict, metaclass=_TypedDictMeta):
+def TypedDict(typename, fields=None, /, *, total=True, **kwargs):
     """A simple typed namespace. At runtime it is equivalent to a plain dict.
 
     TypedDict creates a dictionary type that expects all of its
@@ -1776,8 +1951,9 @@
 
         assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
 
-    The type info can be accessed via Point2D.__annotations__. TypedDict
-    supports two additional equivalent forms::
+    The type info can be accessed via the Point2D.__annotations__ dict, and
+    the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+    TypedDict supports two additional equivalent forms::
 
         Point2D = TypedDict('Point2D', x=int, y=int, label=str)
         Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
@@ -1798,6 +1974,23 @@
     The class syntax is only supported in Python 3.6+, while two other
     syntax forms work for Python 2.7 and 3.2+
     """
+    if fields is None:
+        fields = kwargs
+    elif kwargs:
+        raise TypeError("TypedDict takes either a dict or keyword arguments,"
+                        " but not both")
+
+    ns = {'__annotations__': dict(fields), '__total__': total}
+    try:
+        # Setting correct module is necessary to make typed dict classes pickleable.
+        ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+    except (AttributeError, ValueError):
+        pass
+
+    return _TypedDictMeta(typename, (), ns)
+
+_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
 
 
 def NewType(name, tp):
@@ -1995,8 +2188,8 @@
 io.__name__ = __name__ + '.io'
 sys.modules[io.__name__] = io
 
-Pattern = _alias(stdlib_re.Pattern, AnyStr)
-Match = _alias(stdlib_re.Match, AnyStr)
+Pattern = _alias(stdlib_re.Pattern, 1)
+Match = _alias(stdlib_re.Match, 1)
 
 class re:
     """Wrapper namespace for re type aliases."""
diff --git a/common/py3-stdlib/unittest/__init__.py b/common/py3-stdlib/unittest/__init__.py
index ace3a6f..348dc47 100644
--- a/common/py3-stdlib/unittest/__init__.py
+++ b/common/py3-stdlib/unittest/__init__.py
@@ -57,7 +57,6 @@
 __unittest = True
 
 from .result import TestResult
-from .async_case import IsolatedAsyncioTestCase
 from .case import (addModuleCleanup, TestCase, FunctionTestCase, SkipTest, skip,
                    skipIf, skipUnless, expectedFailure)
 from .suite import BaseTestSuite, TestSuite
@@ -66,6 +65,7 @@
 from .main import TestProgram, main
 from .runner import TextTestRunner, TextTestResult
 from .signals import installHandler, registerResult, removeResult, removeHandler
+# IsolatedAsyncioTestCase will be imported lazily.
 
 # deprecated
 _TextTestResult = TextTestResult
@@ -78,3 +78,18 @@
     # top level directory cached on loader instance
     this_dir = os.path.dirname(__file__)
     return loader.discover(start_dir=this_dir, pattern=pattern)
+
+
+# Lazy import of IsolatedAsyncioTestCase from .async_case
+# It imports asyncio, which is relatively heavy, but most tests
+# do not need it.
+
+def __dir__():
+    return globals().keys() | {'IsolatedAsyncioTestCase'}
+
+def __getattr__(name):
+    if name == 'IsolatedAsyncioTestCase':
+        global IsolatedAsyncioTestCase
+        from .async_case import IsolatedAsyncioTestCase
+        return IsolatedAsyncioTestCase
+    raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
diff --git a/common/py3-stdlib/unittest/_log.py b/common/py3-stdlib/unittest/_log.py
new file mode 100644
index 0000000..94e7e75
--- /dev/null
+++ b/common/py3-stdlib/unittest/_log.py
@@ -0,0 +1,69 @@
+import logging
+import collections
+
+from .case import _BaseTestCaseContext
+
+
+_LoggingWatcher = collections.namedtuple("_LoggingWatcher",
+                                         ["records", "output"])
+
+class _CapturingHandler(logging.Handler):
+    """
+    A logging handler capturing all (raw and formatted) logging output.
+    """
+
+    def __init__(self):
+        logging.Handler.__init__(self)
+        self.watcher = _LoggingWatcher([], [])
+
+    def flush(self):
+        pass
+
+    def emit(self, record):
+        self.watcher.records.append(record)
+        msg = self.format(record)
+        self.watcher.output.append(msg)
+
+
+class _AssertLogsContext(_BaseTestCaseContext):
+    """A context manager used to implement TestCase.assertLogs()."""
+
+    LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
+
+    def __init__(self, test_case, logger_name, level):
+        _BaseTestCaseContext.__init__(self, test_case)
+        self.logger_name = logger_name
+        if level:
+            self.level = logging._nameToLevel.get(level, level)
+        else:
+            self.level = logging.INFO
+        self.msg = None
+
+    def __enter__(self):
+        if isinstance(self.logger_name, logging.Logger):
+            logger = self.logger = self.logger_name
+        else:
+            logger = self.logger = logging.getLogger(self.logger_name)
+        formatter = logging.Formatter(self.LOGGING_FORMAT)
+        handler = _CapturingHandler()
+        handler.setFormatter(formatter)
+        self.watcher = handler.watcher
+        self.old_handlers = logger.handlers[:]
+        self.old_level = logger.level
+        self.old_propagate = logger.propagate
+        logger.handlers = [handler]
+        logger.setLevel(self.level)
+        logger.propagate = False
+        return handler.watcher
+
+    def __exit__(self, exc_type, exc_value, tb):
+        self.logger.handlers = self.old_handlers
+        self.logger.propagate = self.old_propagate
+        self.logger.setLevel(self.old_level)
+        if exc_type is not None:
+            # let unexpected exceptions pass through
+            return False
+        if len(self.watcher.records) == 0:
+            self._raiseFailure(
+                "no logs of level {} or higher triggered on {}"
+                .format(logging.getLevelName(self.level), self.logger.name))
diff --git a/common/py3-stdlib/unittest/case.py b/common/py3-stdlib/unittest/case.py
index 3223c0b..f8bc865 100644
--- a/common/py3-stdlib/unittest/case.py
+++ b/common/py3-stdlib/unittest/case.py
@@ -3,7 +3,6 @@
 import sys
 import functools
 import difflib
-import logging
 import pprint
 import re
 import warnings
@@ -241,6 +240,8 @@
                      expected_regex.pattern, str(exc_value)))
         return True
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 
 class _AssertWarnsContext(_AssertRaisesBaseContext):
     """A context manager used to implement TestCase.assertWarns* methods."""
@@ -251,7 +252,7 @@
     def __enter__(self):
         # The __warningregistry__'s need to be in a pristine state for tests
         # to work properly.
-        for v in list(sys.modules.values()):
+        for v in sys.modules.values():
             if getattr(v, '__warningregistry__', None):
                 v.__warningregistry__ = {}
         self.warnings_manager = warnings.catch_warnings(record=True)
@@ -295,73 +296,6 @@
 
 
 
-_LoggingWatcher = collections.namedtuple("_LoggingWatcher",
-                                         ["records", "output"])
-
-
-class _CapturingHandler(logging.Handler):
-    """
-    A logging handler capturing all (raw and formatted) logging output.
-    """
-
-    def __init__(self):
-        logging.Handler.__init__(self)
-        self.watcher = _LoggingWatcher([], [])
-
-    def flush(self):
-        pass
-
-    def emit(self, record):
-        self.watcher.records.append(record)
-        msg = self.format(record)
-        self.watcher.output.append(msg)
-
-
-
-class _AssertLogsContext(_BaseTestCaseContext):
-    """A context manager used to implement TestCase.assertLogs()."""
-
-    LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
-
-    def __init__(self, test_case, logger_name, level):
-        _BaseTestCaseContext.__init__(self, test_case)
-        self.logger_name = logger_name
-        if level:
-            self.level = logging._nameToLevel.get(level, level)
-        else:
-            self.level = logging.INFO
-        self.msg = None
-
-    def __enter__(self):
-        if isinstance(self.logger_name, logging.Logger):
-            logger = self.logger = self.logger_name
-        else:
-            logger = self.logger = logging.getLogger(self.logger_name)
-        formatter = logging.Formatter(self.LOGGING_FORMAT)
-        handler = _CapturingHandler()
-        handler.setFormatter(formatter)
-        self.watcher = handler.watcher
-        self.old_handlers = logger.handlers[:]
-        self.old_level = logger.level
-        self.old_propagate = logger.propagate
-        logger.handlers = [handler]
-        logger.setLevel(self.level)
-        logger.propagate = False
-        return handler.watcher
-
-    def __exit__(self, exc_type, exc_value, tb):
-        self.logger.handlers = self.old_handlers
-        self.logger.propagate = self.old_propagate
-        self.logger.setLevel(self.old_level)
-        if exc_type is not None:
-            # let unexpected exceptions pass through
-            return False
-        if len(self.watcher.records) == 0:
-            self._raiseFailure(
-                "no logs of level {} or higher triggered on {}"
-                .format(logging.getLevelName(self.level), self.logger.name))
-
-
 class _OrderedChainMap(collections.ChainMap):
     def __iter__(self):
         seen = set()
@@ -468,30 +402,13 @@
         """
         self._type_equality_funcs[typeobj] = function
 
-    def addCleanup(*args, **kwargs):
+    def addCleanup(self, function, /, *args, **kwargs):
         """Add a function, with arguments, to be called when the test is
         completed. Functions added are called on a LIFO basis and are
         called after tearDown on test failure or success.
 
         Cleanup items are called even if setUp fails (unlike tearDown)."""
-        if len(args) >= 2:
-            self, function, *args = args
-        elif not args:
-            raise TypeError("descriptor 'addCleanup' of 'TestCase' object "
-                            "needs an argument")
-        elif 'function' in kwargs:
-            function = kwargs.pop('function')
-            self, *args = args
-            import warnings
-            warnings.warn("Passing 'function' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError('addCleanup expected at least 1 positional '
-                            'argument, got %d' % (len(args)-1))
-        args = tuple(args)
-
         self._cleanups.append((function, args, kwargs))
-    addCleanup.__text_signature__ = '($self, function, /, *args, **kwargs)'
 
     @classmethod
     def addClassCleanup(cls, function, /, *args, **kwargs):
@@ -729,7 +646,7 @@
             function, args, kwargs = cls._class_cleanups.pop()
             try:
                 function(*args, **kwargs)
-            except Exception as exc:
+            except Exception:
                 cls.tearDown_exceptions.append(sys.exc_info())
 
     def __call__(self, *args, **kwds):
@@ -869,6 +786,8 @@
             self.assertEqual(cm.output, ['INFO:foo:first message',
                                          'ERROR:foo.bar:second message'])
         """
+        # Lazy import to avoid importing logging if it is not needed.
+        from ._log import _AssertLogsContext
         return _AssertLogsContext(self, logger, level)
 
     def _getAssertEqualityFunc(self, first, second):
diff --git a/common/py3-stdlib/unittest/mock.py b/common/py3-stdlib/unittest/mock.py
index 3629cf6..b495a5f 100644
--- a/common/py3-stdlib/unittest/mock.py
+++ b/common/py3-stdlib/unittest/mock.py
@@ -23,8 +23,6 @@
 )
 
 
-__version__ = '1.0'
-
 import asyncio
 import contextlib
 import io
@@ -32,6 +30,7 @@
 import pprint
 import sys
 import builtins
+from asyncio import iscoroutinefunction
 from types import CodeType, ModuleType, MethodType
 from unittest.util import safe_repr
 from functools import wraps, partial
@@ -50,12 +49,12 @@
         return False
     if hasattr(obj, '__func__'):
         obj = getattr(obj, '__func__')
-    return asyncio.iscoroutinefunction(obj) or inspect.isawaitable(obj)
+    return iscoroutinefunction(obj) or inspect.isawaitable(obj)
 
 
 def _is_async_func(func):
     if getattr(func, '__code__', None):
-        return asyncio.iscoroutinefunction(func)
+        return iscoroutinefunction(func)
     else:
         return False
 
@@ -403,18 +402,12 @@
         # so we can create magic methods on the
         # class without stomping on other mocks
         bases = (cls,)
-        if not issubclass(cls, AsyncMock):
+        if not issubclass(cls, AsyncMockMixin):
             # Check if spec is an async object or function
-            sig = inspect.signature(NonCallableMock.__init__)
-            bound_args = sig.bind_partial(cls, *args, **kw).arguments
-            spec_arg = [
-                arg for arg in bound_args.keys()
-                if arg.startswith('spec')
-            ]
-            if spec_arg:
-                # what if spec_set is different than spec?
-                if _is_async_obj(bound_args[spec_arg[0]]):
-                    bases = (AsyncMockMixin, cls,)
+            bound_args = _MOCK_SIG.bind_partial(cls, *args, **kw).arguments
+            spec_arg = bound_args.get('spec_set', bound_args.get('spec'))
+            if spec_arg and _is_async_obj(spec_arg):
+                bases = (AsyncMockMixin, cls)
         new = type(cls.__name__, bases, {'__doc__': cls.__doc__})
         instance = _safe_super(NonCallableMock, cls).__new__(new)
         return instance
@@ -496,7 +489,7 @@
         _spec_asyncs = []
 
         for attr in dir(spec):
-            if asyncio.iscoroutinefunction(getattr(spec, attr, None)):
+            if iscoroutinefunction(getattr(spec, attr, None)):
                 _spec_asyncs.append(attr)
 
         if spec is not None and not _is_list(spec):
@@ -600,7 +593,7 @@
         for child in self._mock_children.values():
             if isinstance(child, _SpecState) or child is _deleted:
                 continue
-            child.reset_mock(visited)
+            child.reset_mock(visited, return_value=return_value, side_effect=side_effect)
 
         ret = self._mock_return_value
         if _is_instance_mock(ret) and ret is not self:
@@ -857,7 +850,8 @@
             else:
                 name, args, kwargs = _call
             try:
-                return name, sig.bind(*args, **kwargs)
+                bound_call = sig.bind(*args, **kwargs)
+                return call(name, bound_call.args, bound_call.kwargs)
             except TypeError as e:
                 return e.with_traceback(None)
         else:
@@ -906,9 +900,9 @@
         def _error_message():
             msg = self._format_mock_failure_message(args, kwargs)
             return msg
-        expected = self._call_matcher((args, kwargs))
+        expected = self._call_matcher(_Call((args, kwargs), two=True))
         actual = self._call_matcher(self.call_args)
-        if expected != actual:
+        if actual != expected:
             cause = expected if isinstance(expected, Exception) else None
             raise AssertionError(_error_message()) from cause
 
@@ -976,10 +970,10 @@
         The assert passes if the mock has *ever* been called, unlike
         `assert_called_with` and `assert_called_once_with` that only pass if
         the call is the most recent one."""
-        expected = self._call_matcher((args, kwargs))
+        expected = self._call_matcher(_Call((args, kwargs), two=True))
+        cause = expected if isinstance(expected, Exception) else None
         actual = [self._call_matcher(c) for c in self.call_args_list]
-        if expected not in actual:
-            cause = expected if isinstance(expected, Exception) else None
+        if cause or expected not in _AnyComparer(actual):
             expected_string = self._format_mock_call_signature(args, kwargs)
             raise AssertionError(
                 '%s call not found' % expected_string
@@ -1038,6 +1032,24 @@
         return f"\n{prefix}: {safe_repr(self.mock_calls)}."
 
 
+_MOCK_SIG = inspect.signature(NonCallableMock.__init__)
+
+
+class _AnyComparer(list):
+    """A list which checks if it contains a call which may have an
+    argument of ANY, flipping the components of item and self from
+    their traditional locations so that ANY is guaranteed to be on
+    the left."""
+    def __contains__(self, item):
+        for _call in self:
+            assert len(item) == len(_call)
+            if all([
+                expected == actual
+                for expected, actual in zip(item, _call)
+            ]):
+                return True
+        return False
+
 
 def _try_iter(obj):
     if obj is None:
@@ -1696,7 +1708,8 @@
     "as"; very useful if `patch` is creating a mock object for you.
 
     `patch` takes arbitrary keyword arguments. These will be passed to
-    the `Mock` (or `new_callable`) on construction.
+    `AsyncMock` if the patched object is asynchronous, to `MagicMock`
+    otherwise or to `new_callable` if specified.
 
     `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
     available for alternate use-cases.
@@ -1820,11 +1833,27 @@
 
     def __exit__(self, *args):
         """Unpatch the dict."""
-        self._unpatch_dict()
+        if self._original is not None:
+            self._unpatch_dict()
         return False
 
-    start = __enter__
-    stop = __exit__
+
+    def start(self):
+        """Activate a patch, returning any created mock."""
+        result = self.__enter__()
+        _patch._active_patches.append(self)
+        return result
+
+
+    def stop(self):
+        """Stop an active patch."""
+        try:
+            _patch._active_patches.remove(self)
+        except ValueError:
+            # If the patch hasn't been started this will fail
+            return None
+
+        return self.__exit__(None, None, None)
 
 
 def _clear_dict(in_dict):
@@ -2096,7 +2125,7 @@
 
     def __init__(self, /, *args, **kwargs):
         super().__init__(*args, **kwargs)
-        # asyncio.iscoroutinefunction() checks _is_coroutine property to say if an
+        # iscoroutinefunction() checks _is_coroutine property to say if an
         # object is a coroutine. Without this check it looks to see if it is a
         # function/method, which in this case it is not (since it is an
         # AsyncMock).
@@ -2111,7 +2140,7 @@
         self.__dict__['__code__'] = code_mock
 
     async def _execute_mock_call(self, /, *args, **kwargs):
-        # This is nearly just like super(), except for sepcial handling
+        # This is nearly just like super(), except for special handling
         # of coroutines
 
         _call = _Call((args, kwargs), two=True)
@@ -2132,7 +2161,7 @@
                     raise StopAsyncIteration
                 if _is_exception(result):
                     raise result
-            elif asyncio.iscoroutinefunction(effect):
+            elif iscoroutinefunction(effect):
                 result = await effect(*args, **kwargs)
             else:
                 result = effect(*args, **kwargs)
@@ -2144,7 +2173,7 @@
             return self.return_value
 
         if self._mock_wraps is not None:
-            if asyncio.iscoroutinefunction(self._mock_wraps):
+            if iscoroutinefunction(self._mock_wraps):
                 return await self._mock_wraps(*args, **kwargs)
             return self._mock_wraps(*args, **kwargs)
 
@@ -2179,9 +2208,9 @@
             msg = self._format_mock_failure_message(args, kwargs, action='await')
             return msg
 
-        expected = self._call_matcher((args, kwargs))
+        expected = self._call_matcher(_Call((args, kwargs), two=True))
         actual = self._call_matcher(self.await_args)
-        if expected != actual:
+        if actual != expected:
             cause = expected if isinstance(expected, Exception) else None
             raise AssertionError(_error_message()) from cause
 
@@ -2200,10 +2229,10 @@
         """
         Assert the mock has ever been awaited with the specified arguments.
         """
-        expected = self._call_matcher((args, kwargs))
+        expected = self._call_matcher(_Call((args, kwargs), two=True))
+        cause = expected if isinstance(expected, Exception) else None
         actual = [self._call_matcher(c) for c in self.await_args_list]
-        if expected not in actual:
-            cause = expected if isinstance(expected, Exception) else None
+        if cause or expected not in _AnyComparer(actual):
             expected_string = self._format_mock_call_signature(args, kwargs)
             raise AssertionError(
                 '%s await not found' % expected_string
@@ -2281,7 +2310,7 @@
     recognized as an async function, and the result of a call is an awaitable:
 
     >>> mock = AsyncMock()
-    >>> asyncio.iscoroutinefunction(mock)
+    >>> iscoroutinefunction(mock)
     True
     >>> inspect.isawaitable(mock())
     True
@@ -2408,12 +2437,10 @@
 
 
     def __eq__(self, other):
-        if other is ANY:
-            return True
         try:
             len_other = len(other)
         except TypeError:
-            return False
+            return NotImplemented
 
         self_name = ''
         if len(self) == 2:
@@ -2486,12 +2513,6 @@
         return tuple.__getattribute__(self, attr)
 
 
-    def count(self, /, *args, **kwargs):
-        return self.__getattr__('count')(*args, **kwargs)
-
-    def index(self, /, *args, **kwargs):
-        return self.__getattr__('index')(*args, **kwargs)
-
     def _get_call_arguments(self):
         if len(self) == 2:
             args, kwargs = self
@@ -2656,7 +2677,7 @@
 
             skipfirst = _must_skip(spec, entry, is_type)
             kwargs['_eat_self'] = skipfirst
-            if asyncio.iscoroutinefunction(original):
+            if iscoroutinefunction(original):
                 child_klass = AsyncMock
             else:
                 child_klass = MagicMock
@@ -2862,9 +2883,6 @@
         code_mock.co_flags = inspect.CO_ITERABLE_COROUTINE
         self.__dict__['__code__'] = code_mock
 
-    def __aiter__(self):
-        return self
-
     async def __anext__(self):
         try:
             return next(self.iterator)
diff --git a/common/py3-stdlib/unittest/result.py b/common/py3-stdlib/unittest/result.py
index c7e3206..111317b 100644
--- a/common/py3-stdlib/unittest/result.py
+++ b/common/py3-stdlib/unittest/result.py
@@ -161,7 +161,7 @@
         """Tells whether or not this result was a success."""
         # The hasattr check is for test_result's OldResult test.  That
         # way this method works on objects that lack the attribute.
-        # (where would such result intances come from? old stored pickles?)
+        # (where would such result instances come from? old stored pickles?)
         return ((len(self.failures) == len(self.errors) == 0) and
                 (not hasattr(self, 'unexpectedSuccesses') or
                  len(self.unexpectedSuccesses) == 0))
diff --git a/common/py3-stdlib/unittest/test/test_case.py b/common/py3-stdlib/unittest/test/test_case.py
index 3dedcbe..f855c4d 100644
--- a/common/py3-stdlib/unittest/test/test_case.py
+++ b/common/py3-stdlib/unittest/test/test_case.py
@@ -8,7 +8,6 @@
 import warnings
 import weakref
 import inspect
-import types
 
 from copy import deepcopy
 from test import support
@@ -1351,20 +1350,6 @@
             pass
         self.assertRaises(TypeError, self.assertWarnsRegex, MyWarn, lambda: True)
 
-    def testAssertWarnsModifySysModules(self):
-        # bpo-29620: handle modified sys.modules during iteration
-        class Foo(types.ModuleType):
-            @property
-            def __warningregistry__(self):
-                sys.modules['@bar@'] = 'bar'
-
-        sys.modules['@foo@'] = Foo('foo')
-        try:
-            self.assertWarns(UserWarning, warnings.warn, 'expected')
-        finally:
-            del sys.modules['@foo@']
-            del sys.modules['@bar@']
-
     def testAssertRaisesRegexMismatch(self):
         def Stub():
             raise Exception('Unexpected')
diff --git a/common/py3-stdlib/unittest/test/test_program.py b/common/py3-stdlib/unittest/test/test_program.py
index 4a62ae1..eef82ff 100644
--- a/common/py3-stdlib/unittest/test/test_program.py
+++ b/common/py3-stdlib/unittest/test/test_program.py
@@ -188,8 +188,6 @@
         program = self.program
         for arg, attr in (('buffer', 'buffer'), ('failfast', 'failfast'),
                       ('catch', 'catchbreak')):
-            if attr == 'catch' and not hasInstallHandler:
-                continue
 
             setattr(program, attr, None)
             program.parseArgs([None])
diff --git a/common/py3-stdlib/unittest/test/test_runner.py b/common/py3-stdlib/unittest/test/test_runner.py
index 7d36340..dd9a1b6 100644
--- a/common/py3-stdlib/unittest/test/test_runner.py
+++ b/common/py3-stdlib/unittest/test/test_runner.py
@@ -592,7 +592,7 @@
         class TestableTest(unittest.TestCase):
             def setUp(self2):
                 self2.addCleanup(cleanup, 1, 2, function=3, self=4)
-                with self.assertWarns(DeprecationWarning):
+                with self.assertRaises(TypeError):
                     self2.addCleanup(function=cleanup, arg='hello')
             def testNothing(self):
                 pass
@@ -603,8 +603,7 @@
             unittest.TestCase.addCleanup(self=TestableTest(), function=cleanup)
         runTests(TestableTest)
         self.assertEqual(cleanups,
-                         [((), {'arg': 'hello'}),
-                          ((1, 2), {'function': 3, 'self': 4})])
+                         [((1, 2), {'function': 3, 'self': 4})])
 
     def test_with_errors_in_addClassCleanup(self):
         ordering = []
diff --git a/common/py3-stdlib/unittest/test/testmock/testasync.py b/common/py3-stdlib/unittest/test/testmock/testasync.py
index e84c66c..690ca4f 100644
--- a/common/py3-stdlib/unittest/test/testmock/testasync.py
+++ b/common/py3-stdlib/unittest/test/testmock/testasync.py
@@ -1,8 +1,12 @@
 import asyncio
+import gc
 import inspect
 import re
 import unittest
+from contextlib import contextmanager
 
+from asyncio import run, iscoroutinefunction
+from unittest import IsolatedAsyncioTestCase
 from unittest.mock import (ANY, call, AsyncMock, patch, MagicMock, Mock,
                            create_autospec, sentinel, _CallList)
 
@@ -12,49 +16,48 @@
 
 
 class AsyncClass:
-    def __init__(self):
-        pass
-    async def async_method(self):
-        pass
-    def normal_method(self):
-        pass
+    def __init__(self): pass
+    async def async_method(self): pass
+    def normal_method(self): pass
 
     @classmethod
-    async def async_class_method(cls):
-        pass
+    async def async_class_method(cls): pass
 
     @staticmethod
-    async def async_static_method():
-        pass
+    async def async_static_method(): pass
 
 
 class AwaitableClass:
-    def __await__(self):
-        yield
+    def __await__(self): yield
 
-async def async_func():
-    pass
+async def async_func(): pass
 
-async def async_func_args(a, b, *, c):
-    pass
+async def async_func_args(a, b, *, c): pass
 
-def normal_func():
-    pass
+def normal_func(): pass
 
 class NormalClass(object):
-    def a(self):
-        pass
+    def a(self): pass
 
 
 async_foo_name = f'{__name__}.AsyncClass'
 normal_foo_name = f'{__name__}.NormalClass'
 
 
+@contextmanager
+def assertNeverAwaited(test):
+    with test.assertWarnsRegex(RuntimeWarning, "was never awaited$"):
+        yield
+        # In non-CPython implementations of Python, this is needed because timely
+        # deallocation is not guaranteed by the garbage collector.
+        gc.collect()
+
+
 class AsyncPatchDecoratorTest(unittest.TestCase):
     def test_is_coroutine_function_patch(self):
         @patch.object(AsyncClass, 'async_method')
         def test_async(mock_method):
-            self.assertTrue(asyncio.iscoroutinefunction(mock_method))
+            self.assertTrue(iscoroutinefunction(mock_method))
         test_async()
 
     def test_is_async_patch(self):
@@ -62,13 +65,13 @@
         def test_async(mock_method):
             m = mock_method()
             self.assertTrue(inspect.isawaitable(m))
-            asyncio.run(m)
+            run(m)
 
         @patch(f'{async_foo_name}.async_method')
         def test_no_parent_attribute(mock_method):
             m = mock_method()
             self.assertTrue(inspect.isawaitable(m))
-            asyncio.run(m)
+            run(m)
 
         test_async()
         test_no_parent_attribute()
@@ -107,7 +110,7 @@
             self.assertEqual(await async_func(), 1)
             self.assertEqual(await async_func_args(1, 2, c=3), 2)
 
-        asyncio.run(test_async())
+        run(test_async())
         self.assertTrue(inspect.iscoroutinefunction(async_func))
 
 
@@ -115,7 +118,7 @@
     def test_is_async_function_cm(self):
         def test_async():
             with patch.object(AsyncClass, 'async_method') as mock_method:
-                self.assertTrue(asyncio.iscoroutinefunction(mock_method))
+                self.assertTrue(iscoroutinefunction(mock_method))
 
         test_async()
 
@@ -124,7 +127,7 @@
             with patch.object(AsyncClass, 'async_method') as mock_method:
                 m = mock_method()
                 self.assertTrue(inspect.isawaitable(m))
-                asyncio.run(m)
+                run(m)
 
         test_async()
 
@@ -141,31 +144,31 @@
                 self.assertIsInstance(async_func, AsyncMock)
             self.assertTrue(inspect.iscoroutinefunction(async_func))
 
-        asyncio.run(test_async())
+        run(test_async())
 
 
 class AsyncMockTest(unittest.TestCase):
     def test_iscoroutinefunction_default(self):
         mock = AsyncMock()
-        self.assertTrue(asyncio.iscoroutinefunction(mock))
+        self.assertTrue(iscoroutinefunction(mock))
 
     def test_iscoroutinefunction_function(self):
         async def foo(): pass
         mock = AsyncMock(foo)
-        self.assertTrue(asyncio.iscoroutinefunction(mock))
+        self.assertTrue(iscoroutinefunction(mock))
         self.assertTrue(inspect.iscoroutinefunction(mock))
 
     def test_isawaitable(self):
         mock = AsyncMock()
         m = mock()
         self.assertTrue(inspect.isawaitable(m))
-        asyncio.run(m)
+        run(m)
         self.assertIn('assert_awaited', dir(mock))
 
     def test_iscoroutinefunction_normal_function(self):
         def foo(): pass
         mock = AsyncMock(foo)
-        self.assertTrue(asyncio.iscoroutinefunction(mock))
+        self.assertTrue(iscoroutinefunction(mock))
         self.assertTrue(inspect.iscoroutinefunction(mock))
 
     def test_future_isfuture(self):
@@ -211,9 +214,9 @@
         self.assertEqual(spec.await_args_list, [])
         spec.assert_not_awaited()
 
-        asyncio.run(main())
+        run(main())
 
-        self.assertTrue(asyncio.iscoroutinefunction(spec))
+        self.assertTrue(iscoroutinefunction(spec))
         self.assertTrue(asyncio.iscoroutine(awaitable))
         self.assertEqual(spec.await_count, 1)
         self.assertEqual(spec.await_args, call(1, 2, c=3))
@@ -223,6 +226,10 @@
         spec.assert_awaited_with(1, 2, c=3)
         spec.assert_awaited()
 
+        with self.assertRaises(AssertionError):
+            spec.assert_any_await(e=1)
+
+
     def test_patch_with_autospec(self):
 
         async def test_async():
@@ -230,7 +237,7 @@
                 awaitable = mock_method(1, 2, c=3)
                 self.assertIsInstance(mock_method.mock, AsyncMock)
 
-                self.assertTrue(asyncio.iscoroutinefunction(mock_method))
+                self.assertTrue(iscoroutinefunction(mock_method))
                 self.assertTrue(asyncio.iscoroutine(awaitable))
                 self.assertTrue(inspect.isawaitable(awaitable))
 
@@ -255,7 +262,7 @@
             self.assertIsNone(mock_method.await_args)
             self.assertEqual(mock_method.await_args_list, [])
 
-        asyncio.run(test_async())
+        run(test_async())
 
 
 class AsyncSpecTest(unittest.TestCase):
@@ -278,8 +285,7 @@
         def inner_test(mock_type):
             async_mock = mock_type(spec=async_func)
             self.assertIsInstance(async_mock, mock_type)
-            with self.assertWarns(RuntimeWarning):
-                # Will raise a warning because never awaited
+            with assertNeverAwaited(self):
                 self.assertTrue(inspect.isawaitable(async_mock()))
 
             sync_mock = mock_type(spec=normal_func)
@@ -293,8 +299,7 @@
         def inner_test(mock_type):
             async_mock = mock_type(async_func)
             self.assertIsInstance(async_mock, mock_type)
-            with self.assertWarns(RuntimeWarning):
-                # Will raise a warning because never awaited
+            with assertNeverAwaited(self):
                 self.assertTrue(inspect.isawaitable(async_mock()))
 
             sync_mock = mock_type(normal_func)
@@ -309,14 +314,14 @@
         self.assertIsInstance(mock, AsyncMock)
         m = mock()
         self.assertTrue(inspect.isawaitable(m))
-        asyncio.run(m)
+        run(m)
 
     def test_spec_as_normal_positional_AsyncMock(self):
         mock = AsyncMock(normal_func)
         self.assertIsInstance(mock, AsyncMock)
         m = mock()
         self.assertTrue(inspect.isawaitable(m))
-        asyncio.run(m)
+        run(m)
 
     def test_spec_async_mock(self):
         @patch.object(AsyncClass, 'async_method', spec=True)
@@ -362,16 +367,17 @@
         @patch.object(AsyncClass, 'async_method', spec_set=True)
         def test_async(async_method):
             self.assertIsInstance(async_method, AsyncMock)
+        test_async()
 
     def test_is_async_AsyncMock(self):
         mock = AsyncMock(spec_set=AsyncClass.async_method)
-        self.assertTrue(asyncio.iscoroutinefunction(mock))
+        self.assertTrue(iscoroutinefunction(mock))
         self.assertIsInstance(mock, AsyncMock)
 
     def test_is_child_AsyncMock(self):
         mock = MagicMock(spec_set=AsyncClass)
-        self.assertTrue(asyncio.iscoroutinefunction(mock.async_method))
-        self.assertFalse(asyncio.iscoroutinefunction(mock.normal_method))
+        self.assertTrue(iscoroutinefunction(mock.async_method))
+        self.assertFalse(iscoroutinefunction(mock.normal_method))
         self.assertIsInstance(mock.async_method, AsyncMock)
         self.assertIsInstance(mock.normal_method, MagicMock)
         self.assertIsInstance(mock, MagicMock)
@@ -384,10 +390,9 @@
             self.assertIsInstance(cm, MagicMock)
 
 
-class AsyncArguments(unittest.IsolatedAsyncioTestCase):
+class AsyncArguments(IsolatedAsyncioTestCase):
     async def test_add_return_value(self):
-        async def addition(self, var):
-            return var + 1
+        async def addition(self, var): pass
 
         mock = AsyncMock(addition, return_value=10)
         output = await mock(5)
@@ -395,8 +400,7 @@
         self.assertEqual(output, 10)
 
     async def test_add_side_effect_exception(self):
-        async def addition(var):
-            return var + 1
+        async def addition(var): pass
         mock = AsyncMock(addition, side_effect=Exception('err'))
         with self.assertRaises(Exception):
             await mock(5)
@@ -542,24 +546,20 @@
         self.assertIsInstance(m_mock.__aenter__, AsyncMock)
         self.assertIsInstance(m_mock.__aexit__, AsyncMock)
         # AsyncMocks are also coroutine functions
-        self.assertTrue(asyncio.iscoroutinefunction(m_mock.__aenter__))
-        self.assertTrue(asyncio.iscoroutinefunction(m_mock.__aexit__))
+        self.assertTrue(iscoroutinefunction(m_mock.__aenter__))
+        self.assertTrue(iscoroutinefunction(m_mock.__aexit__))
 
 class AsyncContextManagerTest(unittest.TestCase):
-    class WithAsyncContextManager:
-        async def __aenter__(self, *args, **kwargs):
-            self.entered = True
-            return self
 
-        async def __aexit__(self, *args, **kwargs):
-            self.exited = True
+    class WithAsyncContextManager:
+        async def __aenter__(self, *args, **kwargs): pass
+
+        async def __aexit__(self, *args, **kwargs): pass
 
     class WithSyncContextManager:
-        def __enter__(self, *args, **kwargs):
-            return self
+        def __enter__(self, *args, **kwargs): pass
 
-        def __exit__(self, *args, **kwargs):
-            pass
+        def __exit__(self, *args, **kwargs): pass
 
     class ProductionCode:
         # Example real-world(ish) code
@@ -580,7 +580,7 @@
             response.json = AsyncMock(return_value={'json': 123})
             cm.__aenter__.return_value = response
             pc.session.post.return_value = cm
-            result = asyncio.run(pc.main())
+            result = run(pc.main())
             self.assertEqual(result, {'json': 123})
 
         for mock_type in [AsyncMock, MagicMock]:
@@ -599,7 +599,7 @@
                     called = True
                 return result
 
-            cm_result = asyncio.run(use_context_manager())
+            cm_result = run(use_context_manager())
             self.assertTrue(called)
             self.assertTrue(cm_mock.__aenter__.called)
             self.assertTrue(cm_mock.__aexit__.called)
@@ -612,6 +612,7 @@
             with self.subTest(f"test context manager magics with {mock_type}"):
                 inner_test(mock_type)
 
+
     def test_mock_customize_async_context_manager(self):
         instance = self.WithAsyncContextManager()
         mock_instance = MagicMock(instance)
@@ -623,7 +624,7 @@
             async with mock_instance as result:
                 return result
 
-        self.assertIs(asyncio.run(use_context_manager()), expected_result)
+        self.assertIs(run(use_context_manager()), expected_result)
 
     def test_mock_customize_async_context_manager_with_coroutine(self):
         enter_called = False
@@ -647,7 +648,7 @@
             async with mock_instance:
                 pass
 
-        asyncio.run(use_context_manager())
+        run(use_context_manager())
         self.assertTrue(enter_called)
         self.assertTrue(exit_called)
 
@@ -659,7 +660,7 @@
         instance = self.WithAsyncContextManager()
         mock_instance = MagicMock(instance)
         with self.assertRaises(TypeError):
-            asyncio.run(raise_in(mock_instance))
+            run(raise_in(mock_instance))
 
 
 class AsyncIteratorTest(unittest.TestCase):
@@ -667,23 +668,16 @@
         def __init__(self):
             self.items = ["foo", "NormalFoo", "baz"]
 
-        def __aiter__(self):
-            return self
+        def __aiter__(self): pass
 
-        async def __anext__(self):
-            try:
-                return self.items.pop()
-            except IndexError:
-                pass
-
-            raise StopAsyncIteration
+        async def __anext__(self): pass
 
     def test_aiter_set_return_value(self):
         mock_iter = AsyncMock(name="tester")
         mock_iter.__aiter__.return_value = [1, 2, 3]
         async def main():
             return [i async for i in mock_iter]
-        result = asyncio.run(main())
+        result = run(main())
         self.assertEqual(result, [1, 2, 3])
 
     def test_mock_aiter_and_anext_asyncmock(self):
@@ -692,11 +686,11 @@
             mock_instance = mock_type(instance)
             # Check that the mock and the real thing bahave the same
             # __aiter__ is not actually async, so not a coroutinefunction
-            self.assertFalse(asyncio.iscoroutinefunction(instance.__aiter__))
-            self.assertFalse(asyncio.iscoroutinefunction(mock_instance.__aiter__))
+            self.assertFalse(iscoroutinefunction(instance.__aiter__))
+            self.assertFalse(iscoroutinefunction(mock_instance.__aiter__))
             # __anext__ is async
-            self.assertTrue(asyncio.iscoroutinefunction(instance.__anext__))
-            self.assertTrue(asyncio.iscoroutinefunction(mock_instance.__anext__))
+            self.assertTrue(iscoroutinefunction(instance.__anext__))
+            self.assertTrue(iscoroutinefunction(mock_instance.__anext__))
 
         for mock_type in [AsyncMock, MagicMock]:
             with self.subTest(f"test aiter and anext corourtine with {mock_type}"):
@@ -714,18 +708,18 @@
         expected = ["FOO", "BAR", "BAZ"]
         def test_default(mock_type):
             mock_instance = mock_type(self.WithAsyncIterator())
-            self.assertEqual(asyncio.run(iterate(mock_instance)), [])
+            self.assertEqual(run(iterate(mock_instance)), [])
 
 
         def test_set_return_value(mock_type):
             mock_instance = mock_type(self.WithAsyncIterator())
             mock_instance.__aiter__.return_value = expected[:]
-            self.assertEqual(asyncio.run(iterate(mock_instance)), expected)
+            self.assertEqual(run(iterate(mock_instance)), expected)
 
         def test_set_return_value_iter(mock_type):
             mock_instance = mock_type(self.WithAsyncIterator())
             mock_instance.__aiter__.return_value = iter(expected[:])
-            self.assertEqual(asyncio.run(iterate(mock_instance)), expected)
+            self.assertEqual(run(iterate(mock_instance)), expected)
 
         for mock_type in [AsyncMock, MagicMock]:
             with self.subTest(f"default value with {mock_type}"):
@@ -750,10 +744,9 @@
 
     def test_assert_called_but_not_awaited(self):
         mock = AsyncMock(AsyncClass)
-        with self.assertWarns(RuntimeWarning):
-            # Will raise a warning because never awaited
+        with assertNeverAwaited(self):
             mock.async_method()
-        self.assertTrue(asyncio.iscoroutinefunction(mock.async_method))
+        self.assertTrue(iscoroutinefunction(mock.async_method))
         mock.async_method.assert_called()
         mock.async_method.assert_called_once()
         mock.async_method.assert_called_once_with()
@@ -771,7 +764,7 @@
         with self.assertRaises(AssertionError):
             mock.async_method.assert_awaited()
 
-        asyncio.run(self._await_coroutine(mock_coroutine))
+        run(self._await_coroutine(mock_coroutine))
         # Assert we haven't re-called the function
         mock.async_method.assert_called_once()
         mock.async_method.assert_awaited()
@@ -785,21 +778,21 @@
         with self.assertRaises(AssertionError):
             self.mock.assert_called()
 
-        asyncio.run(self._runnable_test())
+        run(self._runnable_test())
         self.mock.assert_called_once()
         self.mock.assert_awaited_once()
 
     def test_assert_called_twice_and_awaited_once(self):
         mock = AsyncMock(AsyncClass)
         coroutine = mock.async_method()
-        with self.assertWarns(RuntimeWarning):
-            # The first call will be awaited so no warning there
-            # But this call will never get awaited, so it will warn here
+        # The first call will be awaited so no warning there
+        # But this call will never get awaited, so it will warn here
+        with assertNeverAwaited(self):
             mock.async_method()
         with self.assertRaises(AssertionError):
             mock.async_method.assert_awaited()
         mock.async_method.assert_called()
-        asyncio.run(self._await_coroutine(coroutine))
+        run(self._await_coroutine(coroutine))
         mock.async_method.assert_awaited()
         mock.async_method.assert_awaited_once()
 
@@ -807,10 +800,10 @@
         mock = AsyncMock(AsyncClass)
         coroutine = mock.async_method()
         mock.async_method.assert_called_once()
-        asyncio.run(self._await_coroutine(coroutine))
+        run(self._await_coroutine(coroutine))
         with self.assertRaises(RuntimeError):
             # Cannot reuse already awaited coroutine
-            asyncio.run(self._await_coroutine(coroutine))
+            run(self._await_coroutine(coroutine))
         mock.async_method.assert_awaited()
 
     def test_assert_awaited_but_not_called(self):
@@ -820,7 +813,7 @@
             self.mock.assert_called()
         with self.assertRaises(TypeError):
             # You cannot await an AsyncMock, it must be a coroutine
-            asyncio.run(self._await_coroutine(self.mock))
+            run(self._await_coroutine(self.mock))
 
         with self.assertRaises(AssertionError):
             self.mock.assert_awaited()
@@ -829,38 +822,34 @@
 
     def test_assert_has_calls_not_awaits(self):
         kalls = [call('foo')]
-        with self.assertWarns(RuntimeWarning):
-            # Will raise a warning because never awaited
+        with assertNeverAwaited(self):
             self.mock('foo')
         self.mock.assert_has_calls(kalls)
         with self.assertRaises(AssertionError):
             self.mock.assert_has_awaits(kalls)
 
     def test_assert_has_mock_calls_on_async_mock_no_spec(self):
-        with self.assertWarns(RuntimeWarning):
-            # Will raise a warning because never awaited
+        with assertNeverAwaited(self):
             self.mock()
         kalls_empty = [('', (), {})]
         self.assertEqual(self.mock.mock_calls, kalls_empty)
 
-        with self.assertWarns(RuntimeWarning):
-            # Will raise a warning because never awaited
+        with assertNeverAwaited(self):
             self.mock('foo')
+        with assertNeverAwaited(self):
             self.mock('baz')
         mock_kalls = ([call(), call('foo'), call('baz')])
         self.assertEqual(self.mock.mock_calls, mock_kalls)
 
     def test_assert_has_mock_calls_on_async_mock_with_spec(self):
         a_class_mock = AsyncMock(AsyncClass)
-        with self.assertWarns(RuntimeWarning):
-            # Will raise a warning because never awaited
+        with assertNeverAwaited(self):
             a_class_mock.async_method()
         kalls_empty = [('', (), {})]
         self.assertEqual(a_class_mock.async_method.mock_calls, kalls_empty)
         self.assertEqual(a_class_mock.mock_calls, [call.async_method()])
 
-        with self.assertWarns(RuntimeWarning):
-            # Will raise a warning because never awaited
+        with assertNeverAwaited(self):
             a_class_mock.async_method(1, 2, 3, a=4, b=5)
         method_kalls = [call(), call(1, 2, 3, a=4, b=5)]
         mock_kalls = [call.async_method(), call.async_method(1, 2, 3, a=4, b=5)]
@@ -868,9 +857,9 @@
         self.assertEqual(a_class_mock.mock_calls, mock_kalls)
 
     def test_async_method_calls_recorded(self):
-        with self.assertWarns(RuntimeWarning):
-            # Will raise warnings because never awaited
+        with assertNeverAwaited(self):
             self.mock.something(3, fish=None)
+        with assertNeverAwaited(self):
             self.mock.something_else.something(6, cake=sentinel.Cake)
 
         self.assertEqual(self.mock.method_calls, [
@@ -892,19 +881,20 @@
                 self.assertEqual(attr, [])
 
         assert_attrs(self.mock)
-        with self.assertWarns(RuntimeWarning):
-            # Will raise warnings because never awaited
+        with assertNeverAwaited(self):
             self.mock()
+        with assertNeverAwaited(self):
             self.mock(1, 2)
+        with assertNeverAwaited(self):
             self.mock(a=3)
 
         self.mock.reset_mock()
         assert_attrs(self.mock)
 
         a_mock = AsyncMock(AsyncClass)
-        with self.assertWarns(RuntimeWarning):
-            # Will raise warnings because never awaited
+        with assertNeverAwaited(self):
             a_mock.async_method()
+        with assertNeverAwaited(self):
             a_mock.async_method(1, a=3)
 
         a_mock.reset_mock()
@@ -914,17 +904,17 @@
         with self.assertRaises(AssertionError):
             self.mock.assert_awaited()
 
-        asyncio.run(self._runnable_test())
+        run(self._runnable_test())
         self.mock.assert_awaited()
 
     def test_assert_awaited_once(self):
         with self.assertRaises(AssertionError):
             self.mock.assert_awaited_once()
 
-        asyncio.run(self._runnable_test())
+        run(self._runnable_test())
         self.mock.assert_awaited_once()
 
-        asyncio.run(self._runnable_test())
+        run(self._runnable_test())
         with self.assertRaises(AssertionError):
             self.mock.assert_awaited_once()
 
@@ -933,15 +923,15 @@
         with self.assertRaisesRegex(AssertionError, msg):
             self.mock.assert_awaited_with('foo')
 
-        asyncio.run(self._runnable_test())
+        run(self._runnable_test())
         msg = 'expected await not found'
         with self.assertRaisesRegex(AssertionError, msg):
             self.mock.assert_awaited_with('foo')
 
-        asyncio.run(self._runnable_test('foo'))
+        run(self._runnable_test('foo'))
         self.mock.assert_awaited_with('foo')
 
-        asyncio.run(self._runnable_test('SomethingElse'))
+        run(self._runnable_test('SomethingElse'))
         with self.assertRaises(AssertionError):
             self.mock.assert_awaited_with('foo')
 
@@ -949,10 +939,10 @@
         with self.assertRaises(AssertionError):
             self.mock.assert_awaited_once_with('foo')
 
-        asyncio.run(self._runnable_test('foo'))
+        run(self._runnable_test('foo'))
         self.mock.assert_awaited_once_with('foo')
 
-        asyncio.run(self._runnable_test('foo'))
+        run(self._runnable_test('foo'))
         with self.assertRaises(AssertionError):
             self.mock.assert_awaited_once_with('foo')
 
@@ -960,14 +950,14 @@
         with self.assertRaises(AssertionError):
             self.mock.assert_any_await('foo')
 
-        asyncio.run(self._runnable_test('baz'))
+        run(self._runnable_test('baz'))
         with self.assertRaises(AssertionError):
             self.mock.assert_any_await('foo')
 
-        asyncio.run(self._runnable_test('foo'))
+        run(self._runnable_test('foo'))
         self.mock.assert_any_await('foo')
 
-        asyncio.run(self._runnable_test('SomethingElse'))
+        run(self._runnable_test('SomethingElse'))
         self.mock.assert_any_await('foo')
 
     def test_assert_has_awaits_no_order(self):
@@ -977,43 +967,67 @@
             self.mock.assert_has_awaits(calls)
         self.assertEqual(len(cm.exception.args), 1)
 
-        asyncio.run(self._runnable_test('foo'))
+        run(self._runnable_test('foo'))
         with self.assertRaises(AssertionError):
             self.mock.assert_has_awaits(calls)
 
-        asyncio.run(self._runnable_test('foo'))
+        run(self._runnable_test('foo'))
         with self.assertRaises(AssertionError):
             self.mock.assert_has_awaits(calls)
 
-        asyncio.run(self._runnable_test('baz'))
+        run(self._runnable_test('baz'))
         self.mock.assert_has_awaits(calls)
 
-        asyncio.run(self._runnable_test('SomethingElse'))
+        run(self._runnable_test('SomethingElse'))
         self.mock.assert_has_awaits(calls)
 
+    def test_awaits_asserts_with_any(self):
+        class Foo:
+            def __eq__(self, other): pass
+
+        run(self._runnable_test(Foo(), 1))
+
+        self.mock.assert_has_awaits([call(ANY, 1)])
+        self.mock.assert_awaited_with(ANY, 1)
+        self.mock.assert_any_await(ANY, 1)
+
+    def test_awaits_asserts_with_spec_and_any(self):
+        class Foo:
+            def __eq__(self, other): pass
+
+        mock_with_spec = AsyncMock(spec=Foo)
+
+        async def _custom_mock_runnable_test(*args):
+            await mock_with_spec(*args)
+
+        run(_custom_mock_runnable_test(Foo(), 1))
+        mock_with_spec.assert_has_awaits([call(ANY, 1)])
+        mock_with_spec.assert_awaited_with(ANY, 1)
+        mock_with_spec.assert_any_await(ANY, 1)
+
     def test_assert_has_awaits_ordered(self):
         calls = [call('foo'), call('baz')]
         with self.assertRaises(AssertionError):
             self.mock.assert_has_awaits(calls, any_order=True)
 
-        asyncio.run(self._runnable_test('baz'))
+        run(self._runnable_test('baz'))
         with self.assertRaises(AssertionError):
             self.mock.assert_has_awaits(calls, any_order=True)
 
-        asyncio.run(self._runnable_test('bamf'))
+        run(self._runnable_test('bamf'))
         with self.assertRaises(AssertionError):
             self.mock.assert_has_awaits(calls, any_order=True)
 
-        asyncio.run(self._runnable_test('foo'))
+        run(self._runnable_test('foo'))
         self.mock.assert_has_awaits(calls, any_order=True)
 
-        asyncio.run(self._runnable_test('qux'))
+        run(self._runnable_test('qux'))
         self.mock.assert_has_awaits(calls, any_order=True)
 
     def test_assert_not_awaited(self):
         self.mock.assert_not_awaited()
 
-        asyncio.run(self._runnable_test())
+        run(self._runnable_test())
         with self.assertRaises(AssertionError):
             self.mock.assert_not_awaited()
 
@@ -1021,7 +1035,7 @@
         async def f(x=None): pass
 
         self.mock = AsyncMock(spec=f)
-        asyncio.run(self._runnable_test(1))
+        run(self._runnable_test(1))
 
         with self.assertRaisesRegex(
                 AssertionError,
diff --git a/common/py3-stdlib/unittest/test/testmock/testhelpers.py b/common/py3-stdlib/unittest/test/testmock/testhelpers.py
index f3c7acb..9e7ec5d 100644
--- a/common/py3-stdlib/unittest/test/testmock/testhelpers.py
+++ b/common/py3-stdlib/unittest/test/testmock/testhelpers.py
@@ -64,7 +64,28 @@
             self.assertEqual(expected, mock.mock_calls)
             self.assertEqual(mock.mock_calls, expected)
 
+    def test_any_no_spec(self):
+        # This is a regression test for bpo-37555
+        class Foo:
+            def __eq__(self, other): pass
 
+        mock = Mock()
+        mock(Foo(), 1)
+        mock.assert_has_calls([call(ANY, 1)])
+        mock.assert_called_with(ANY, 1)
+        mock.assert_any_call(ANY, 1)
+
+    def test_any_and_spec_set(self):
+        # This is a regression test for bpo-37555
+        class Foo:
+            def __eq__(self, other): pass
+
+        mock = Mock(spec=Foo)
+
+        mock(Foo(), 1)
+        mock.assert_has_calls([call(ANY, 1)])
+        mock.assert_called_with(ANY, 1)
+        mock.assert_any_call(ANY, 1)
 
 class CallTest(unittest.TestCase):
 
diff --git a/common/py3-stdlib/unittest/test/testmock/testmagicmethods.py b/common/py3-stdlib/unittest/test/testmock/testmagicmethods.py
index 76b3a56..a4feae7 100644
--- a/common/py3-stdlib/unittest/test/testmock/testmagicmethods.py
+++ b/common/py3-stdlib/unittest/test/testmock/testmagicmethods.py
@@ -1,8 +1,7 @@
-import asyncio
 import math
 import unittest
 import os
-import sys
+from asyncio import iscoroutinefunction
 from unittest.mock import AsyncMock, Mock, MagicMock, _magics
 
 
@@ -286,8 +285,8 @@
         self.assertEqual(math.trunc(mock), mock.__trunc__())
         self.assertEqual(math.floor(mock), mock.__floor__())
         self.assertEqual(math.ceil(mock), mock.__ceil__())
-        self.assertTrue(asyncio.iscoroutinefunction(mock.__aexit__))
-        self.assertTrue(asyncio.iscoroutinefunction(mock.__aenter__))
+        self.assertTrue(iscoroutinefunction(mock.__aexit__))
+        self.assertTrue(iscoroutinefunction(mock.__aenter__))
         self.assertIsInstance(mock.__aenter__, AsyncMock)
         self.assertIsInstance(mock.__aexit__, AsyncMock)
 
@@ -312,8 +311,8 @@
         self.assertEqual(math.trunc(mock), mock.__trunc__())
         self.assertEqual(math.floor(mock), mock.__floor__())
         self.assertEqual(math.ceil(mock), mock.__ceil__())
-        self.assertTrue(asyncio.iscoroutinefunction(mock.__aexit__))
-        self.assertTrue(asyncio.iscoroutinefunction(mock.__aenter__))
+        self.assertTrue(iscoroutinefunction(mock.__aexit__))
+        self.assertTrue(iscoroutinefunction(mock.__aenter__))
         self.assertIsInstance(mock.__aenter__, AsyncMock)
         self.assertIsInstance(mock.__aexit__, AsyncMock)
 
@@ -429,7 +428,6 @@
             self.assertEqual(dir(mock), ['foo'])
 
 
-    @unittest.skipIf('PyPy' in sys.version, "This fails differently on pypy")
     def test_bound_methods(self):
         m = Mock()
 
diff --git a/common/py3-stdlib/unittest/test/testmock/testmock.py b/common/py3-stdlib/unittest/test/testmock/testmock.py
index 1cde45e..ce674e7 100644
--- a/common/py3-stdlib/unittest/test/testmock/testmock.py
+++ b/common/py3-stdlib/unittest/test/testmock/testmock.py
@@ -3,6 +3,7 @@
 import sys
 import tempfile
 
+from test.support import ALWAYS_EQ
 import unittest
 from unittest.test.testmock.support import is_instance
 from unittest import mock
@@ -262,7 +263,7 @@
 
         ret_val = mock(sentinel.Arg)
         self.assertTrue(mock.called, "called not set")
-        self.assertEqual(mock.call_count, 1, "call_count incoreect")
+        self.assertEqual(mock.call_count, 1, "call_count incorrect")
         self.assertEqual(mock.call_args, ((sentinel.Arg,), {}),
                          "call_args not set")
         self.assertEqual(mock.call_args.args, (sentinel.Arg,),
@@ -322,6 +323,8 @@
         self.assertFalse(mm != mock.ANY)
         self.assertTrue(mock.ANY == mm)
         self.assertFalse(mock.ANY != mm)
+        self.assertTrue(mm == ALWAYS_EQ)
+        self.assertFalse(mm != ALWAYS_EQ)
 
         call1 = mock.call(mock.MagicMock())
         call2 = mock.call(mock.ANY)
@@ -330,6 +333,11 @@
         self.assertTrue(call2 == call1)
         self.assertFalse(call2 != call1)
 
+        self.assertTrue(call1 == ALWAYS_EQ)
+        self.assertFalse(call1 != ALWAYS_EQ)
+        self.assertFalse(call1 == 1)
+        self.assertTrue(call1 != 1)
+
 
     def test_assert_called_with(self):
         mock = Mock()
@@ -707,6 +715,57 @@
         self.assertRaises(StopIteration, mock.method)
 
 
+    def test_magic_method_wraps_dict(self):
+        # bpo-25597: MagicMock with wrap doesn't call wrapped object's
+        # method for magic methods with default values.
+        data = {'foo': 'bar'}
+
+        wrapped_dict = MagicMock(wraps=data)
+        self.assertEqual(wrapped_dict.get('foo'), 'bar')
+        # Accessing key gives a MagicMock
+        self.assertIsInstance(wrapped_dict['foo'], MagicMock)
+        # __contains__ method has a default value of False
+        self.assertFalse('foo' in wrapped_dict)
+
+        # return_value is non-sentinel and takes precedence over wrapped value.
+        wrapped_dict.get.return_value = 'return_value'
+        self.assertEqual(wrapped_dict.get('foo'), 'return_value')
+
+        # return_value is sentinel and hence wrapped value is returned.
+        wrapped_dict.get.return_value = sentinel.DEFAULT
+        self.assertEqual(wrapped_dict.get('foo'), 'bar')
+
+        self.assertEqual(wrapped_dict.get('baz'), None)
+        self.assertIsInstance(wrapped_dict['baz'], MagicMock)
+        self.assertFalse('bar' in wrapped_dict)
+
+        data['baz'] = 'spam'
+        self.assertEqual(wrapped_dict.get('baz'), 'spam')
+        self.assertIsInstance(wrapped_dict['baz'], MagicMock)
+        self.assertFalse('bar' in wrapped_dict)
+
+        del data['baz']
+        self.assertEqual(wrapped_dict.get('baz'), None)
+
+
+    def test_magic_method_wraps_class(self):
+
+        class Foo:
+
+            def __getitem__(self, index):
+                return index
+
+            def __custom_method__(self):
+                return "foo"
+
+
+        klass = MagicMock(wraps=Foo)
+        obj = klass()
+        self.assertEqual(obj.__getitem__(2), 2)
+        self.assertEqual(obj[2], 2)
+        self.assertEqual(obj.__custom_method__(), "foo")
+
+
     def test_exceptional_side_effect(self):
         mock = Mock(side_effect=AttributeError)
         self.assertRaises(AttributeError, mock)
@@ -1628,11 +1687,23 @@
         self.assertNotEqual(m.side_effect, None)
 
     def test_reset_sideeffect(self):
-        m = Mock(return_value=10, side_effect=[2,3])
+        m = Mock(return_value=10, side_effect=[2, 3])
         m.reset_mock(side_effect=True)
         self.assertEqual(m.return_value, 10)
         self.assertEqual(m.side_effect, None)
 
+    def test_reset_return_with_children(self):
+        m = MagicMock(f=MagicMock(return_value=1))
+        self.assertEqual(m.f(), 1)
+        m.reset_mock(return_value=True)
+        self.assertNotEqual(m.f(), 1)
+
+    def test_reset_return_with_children_side_effect(self):
+        m = MagicMock(f=MagicMock(side_effect=[2, 3]))
+        self.assertNotEqual(m.f.side_effect, None)
+        m.reset_mock(side_effect=True)
+        self.assertEqual(m.f.side_effect, None)
+
     def test_mock_add_spec(self):
         class _One(object):
             one = 1
@@ -1801,6 +1872,11 @@
         with self.assertRaises(StopIteration):
             next(f1)
 
+    def test_mock_open_next_with_readline_with_return_value(self):
+        mopen = mock.mock_open(read_data='foo\nbarn')
+        mopen.return_value.readline.return_value = 'abc'
+        self.assertEqual('abc', next(mopen()))
+
     def test_mock_open_write(self):
         # Test exception in file writing write()
         mock_namedtemp = mock.mock_open(mock.MagicMock(name='JLV'))
diff --git a/common/py3-stdlib/unittest/test/testmock/testpatch.py b/common/py3-stdlib/unittest/test/testmock/testpatch.py
index e065a2c..d8c1515 100644
--- a/common/py3-stdlib/unittest/test/testmock/testpatch.py
+++ b/common/py3-stdlib/unittest/test/testmock/testpatch.py
@@ -4,6 +4,7 @@
 
 import os
 import sys
+from collections import OrderedDict
 
 import unittest
 from unittest.test.testmock import support
@@ -769,6 +770,14 @@
         self.assertEqual(d, original)
 
 
+    def test_patch_dict_stop_without_start(self):
+        d = {'foo': 'bar'}
+        original = d.copy()
+        patcher = patch.dict(d, [('spam', 'eggs')], clear=True)
+        self.assertFalse(patcher.stop())
+        self.assertEqual(d, original)
+
+
     def test_patch_dict_class_decorator(self):
         this = self
         d = {'spam': 'eggs'}
@@ -1807,6 +1816,56 @@
 
         self.assertEqual(stopped, ["three", "two", "one"])
 
+    def test_patch_dict_stopall(self):
+        dic1 = {}
+        dic2 = {1: 'a'}
+        dic3 = {1: 'A', 2: 'B'}
+        origdic1 = dic1.copy()
+        origdic2 = dic2.copy()
+        origdic3 = dic3.copy()
+        patch.dict(dic1, {1: 'I', 2: 'II'}).start()
+        patch.dict(dic2, {2: 'b'}).start()
+
+        @patch.dict(dic3)
+        def patched():
+            del dic3[1]
+
+        patched()
+        self.assertNotEqual(dic1, origdic1)
+        self.assertNotEqual(dic2, origdic2)
+        self.assertEqual(dic3, origdic3)
+
+        patch.stopall()
+
+        self.assertEqual(dic1, origdic1)
+        self.assertEqual(dic2, origdic2)
+        self.assertEqual(dic3, origdic3)
+
+
+    def test_patch_and_patch_dict_stopall(self):
+        original_unlink = os.unlink
+        original_chdir = os.chdir
+        dic1 = {}
+        dic2 = {1: 'A', 2: 'B'}
+        origdic1 = dic1.copy()
+        origdic2 = dic2.copy()
+
+        patch('os.unlink', something).start()
+        patch('os.chdir', something_else).start()
+        patch.dict(dic1, {1: 'I', 2: 'II'}).start()
+        patch.dict(dic2).start()
+        del dic2[1]
+
+        self.assertIsNot(os.unlink, original_unlink)
+        self.assertIsNot(os.chdir, original_chdir)
+        self.assertNotEqual(dic1, origdic1)
+        self.assertNotEqual(dic2, origdic2)
+        patch.stopall()
+        self.assertIs(os.unlink, original_unlink)
+        self.assertIs(os.chdir, original_chdir)
+        self.assertEqual(dic1, origdic1)
+        self.assertEqual(dic2, origdic2)
+
 
     def test_special_attrs(self):
         def foo(x=0):
@@ -1834,6 +1893,25 @@
             self.assertEqual(foo(), 1)
         self.assertEqual(foo(), 0)
 
+    def test_patch_orderdict(self):
+        foo = OrderedDict()
+        foo['a'] = object()
+        foo['b'] = 'python'
+
+        original = foo.copy()
+        update_values = list(zip('cdefghijklmnopqrstuvwxyz', range(26)))
+        patched_values = list(foo.items()) + update_values
+
+        with patch.dict(foo, OrderedDict(update_values)):
+            self.assertEqual(list(foo.items()), patched_values)
+
+        self.assertEqual(foo, original)
+
+        with patch.dict(foo, update_values):
+            self.assertEqual(list(foo.items()), patched_values)
+
+        self.assertEqual(foo, original)
+
     def test_dotted_but_module_not_loaded(self):
         # This exercises the AttributeError branch of _dot_lookup.
 
diff --git a/common/py3-stdlib/urllib/parse.py b/common/py3-stdlib/urllib/parse.py
index e2b6f13..ea897c3 100644
--- a/common/py3-stdlib/urllib/parse.py
+++ b/common/py3-stdlib/urllib/parse.py
@@ -29,6 +29,7 @@
 
 import re
 import sys
+import types
 import collections
 import warnings
 
@@ -176,6 +177,8 @@
                 raise ValueError("Port out of range 0-65535")
         return port
 
+    __class_getitem__ = classmethod(types.GenericAlias)
+
 
 class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr):
     __slots__ = ()
@@ -366,9 +369,23 @@
 def urlparse(url, scheme='', allow_fragments=True):
     """Parse a URL into 6 components:
     <scheme>://<netloc>/<path>;<params>?<query>#<fragment>
-    Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
-    Note that we don't break the components up in smaller bits
-    (e.g. netloc is a single string) and we don't expand % escapes."""
+
+    The result is a named 6-tuple with fields corresponding to the
+    above. It is either a ParseResult or ParseResultBytes object,
+    depending on the type of the url parameter.
+
+    The username, password, hostname, and port sub-components of netloc
+    can also be accessed as attributes of the returned object.
+
+    The scheme argument provides the default value of the scheme
+    component when no scheme is found in url.
+
+    If allow_fragments is False, no attempt is made to separate the
+    fragment component from the previous component, which can be either
+    path or query.
+
+    Note that % escapes are not expanded.
+    """
     url, scheme, _coerce_result = _coerce_args(url, scheme)
     splitresult = urlsplit(url, scheme, allow_fragments)
     scheme, netloc, url, query, fragment = splitresult
@@ -417,9 +434,24 @@
 def urlsplit(url, scheme='', allow_fragments=True):
     """Parse a URL into 5 components:
     <scheme>://<netloc>/<path>?<query>#<fragment>
-    Return a 5-tuple: (scheme, netloc, path, query, fragment).
-    Note that we don't break the components up in smaller bits
-    (e.g. netloc is a single string) and we don't expand % escapes."""
+
+    The result is a named 5-tuple with fields corresponding to the
+    above. It is either a SplitResult or SplitResultBytes object,
+    depending on the type of the url parameter.
+
+    The username, password, hostname, and port sub-components of netloc
+    can also be accessed as attributes of the returned object.
+
+    The scheme argument provides the default value of the scheme
+    component when no scheme is found in url.
+
+    If allow_fragments is False, no attempt is made to separate the
+    fragment component from the previous component, which can be either
+    path or query.
+
+    Note that % escapes are not expanded.
+    """
+
     url, scheme, _coerce_result = _coerce_args(url, scheme)
     allow_fragments = bool(allow_fragments)
     key = url, scheme, allow_fragments, type(url), type(scheme)
@@ -431,31 +463,11 @@
     netloc = query = fragment = ''
     i = url.find(':')
     if i > 0:
-        if url[:i] == 'http': # optimize the common case
-            url = url[i+1:]
-            if url[:2] == '//':
-                netloc, url = _splitnetloc(url, 2)
-                if (('[' in netloc and ']' not in netloc) or
-                        (']' in netloc and '[' not in netloc)):
-                    raise ValueError("Invalid IPv6 URL")
-            if allow_fragments and '#' in url:
-                url, fragment = url.split('#', 1)
-            if '?' in url:
-                url, query = url.split('?', 1)
-            _checknetloc(netloc)
-            v = SplitResult('http', netloc, url, query, fragment)
-            _parse_cache[key] = v
-            return _coerce_result(v)
         for c in url[:i]:
             if c not in scheme_chars:
                 break
         else:
-            # make sure "url" is not actually a port number (in which case
-            # "scheme" is really part of the path)
-            rest = url[i+1:]
-            if not rest or any(c not in '0123456789' for c in rest):
-                # not a port number
-                scheme, url = url[:i].lower(), rest
+            scheme, url = url[:i].lower(), url[i+1:]
 
     if url[:2] == '//':
         netloc, url = _splitnetloc(url, 2)
@@ -631,6 +643,8 @@
 
     unquote('abc%20def') -> 'abc def'.
     """
+    if isinstance(string, bytes):
+        return unquote_to_bytes(string).decode(encoding, errors)
     if '%' not in string:
         string.split
         return string
diff --git a/common/py3-stdlib/urllib/request.py b/common/py3-stdlib/urllib/request.py
index e440738..a8c870b 100644
--- a/common/py3-stdlib/urllib/request.py
+++ b/common/py3-stdlib/urllib/request.py
@@ -163,18 +163,10 @@
 
     The *cadefault* parameter is ignored.
 
-    This function always returns an object which can work as a context
-    manager and has methods such as
 
-    * geturl() - return the URL of the resource retrieved, commonly used to
-      determine if a redirect was followed
-
-    * info() - return the meta-information of the page, such as headers, in the
-      form of an email.message_from_string() instance (see Quick Reference to
-      HTTP Headers)
-
-    * getcode() - return the HTTP status code of the response.  Raises URLError
-      on errors.
+    This function always returns an object which can work as a
+    context manager and has the properties url, headers, and status.
+    See urllib.response.addinfourl for more detail on these properties.
 
     For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse
     object slightly modified. In addition to the three new methods above, the
@@ -1819,7 +1811,7 @@
                 hdrs = fp.info()
                 fp.close()
                 return url2pathname(_splithost(url1)[1]), hdrs
-            except OSError as msg:
+            except OSError:
                 pass
         fp = self.open(url, data)
         try:
@@ -2604,6 +2596,11 @@
                 mask = 8 * (m.group(1).count('.') + 1)
             else:
                 mask = int(mask[1:])
+
+            if mask < 0 or mask > 32:
+                # System libraries ignore invalid prefix lengths
+                continue
+
             mask = 32 - mask
 
             if (hostIP >> mask) == (base >> mask):
diff --git a/common/py3-stdlib/urllib/response.py b/common/py3-stdlib/urllib/response.py
index 4778118..5a2c3cc 100644
--- a/common/py3-stdlib/urllib/response.py
+++ b/common/py3-stdlib/urllib/response.py
@@ -73,6 +73,10 @@
         self.url = url
         self.code = code
 
+    @property
+    def status(self):
+        return self.code
+
     def getcode(self):
         return self.code
 
diff --git a/common/py3-stdlib/uuid.py b/common/py3-stdlib/uuid.py
index 188e16b..5ae0a3e 100644
--- a/common/py3-stdlib/uuid.py
+++ b/common/py3-stdlib/uuid.py
@@ -45,7 +45,6 @@
 """
 
 import os
-import platform
 import sys
 
 from enum import Enum
@@ -54,10 +53,19 @@
 __author__ = 'Ka-Ping Yee <ping@zesty.ca>'
 
 # The recognized platforms - known behaviors
-_AIX     = platform.system() == 'AIX'
-_DARWIN  = platform.system() == 'Darwin'
-_LINUX   = platform.system() == 'Linux'
-_WINDOWS = platform.system() == 'Windows'
+if sys.platform in ('win32', 'darwin'):
+    _AIX = _LINUX = False
+else:
+    import platform
+    _platform_system = platform.system()
+    _AIX     = _platform_system == 'AIX'
+    _LINUX   = _platform_system == 'Linux'
+
+_MAC_DELIM = b':'
+_MAC_OMITS_LEADING_ZEROES = False
+if _AIX:
+    _MAC_DELIM = b'.'
+    _MAC_OMITS_LEADING_ZEROES = True
 
 RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
     'reserved for NCS compatibility', 'specified in RFC 4122',
@@ -347,24 +355,32 @@
         if self.variant == RFC_4122:
             return int((self.int >> 76) & 0xf)
 
-def _popen(command, *args):
-    import os, shutil, subprocess
-    executable = shutil.which(command)
-    if executable is None:
-        path = os.pathsep.join(('/sbin', '/usr/sbin'))
-        executable = shutil.which(command, path=path)
+
+def _get_command_stdout(command, *args):
+    import io, os, shutil, subprocess
+
+    try:
+        path_dirs = os.environ.get('PATH', os.defpath).split(os.pathsep)
+        path_dirs.extend(['/sbin', '/usr/sbin'])
+        executable = shutil.which(command, path=os.pathsep.join(path_dirs))
         if executable is None:
             return None
-    # LC_ALL=C to ensure English output, stderr=DEVNULL to prevent output
-    # on stderr (Note: we don't have an example where the words we search
-    # for are actually localized, but in theory some system could do so.)
-    env = dict(os.environ)
-    env['LC_ALL'] = 'C'
-    proc = subprocess.Popen((executable,) + args,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.DEVNULL,
-                            env=env)
-    return proc
+        # LC_ALL=C to ensure English output, stderr=DEVNULL to prevent output
+        # on stderr (Note: we don't have an example where the words we search
+        # for are actually localized, but in theory some system could do so.)
+        env = dict(os.environ)
+        env['LC_ALL'] = 'C'
+        proc = subprocess.Popen((executable,) + args,
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.DEVNULL,
+                                env=env)
+        if not proc:
+            return None
+        stdout, stderr = proc.communicate()
+        return io.BytesIO(stdout)
+    except (OSError, subprocess.SubprocessError):
+        return None
+
 
 # For MAC (a.k.a. IEEE 802, or EUI-48) addresses, the second least significant
 # bit of the first octet signifies whether the MAC address is universally (0)
@@ -384,40 +400,114 @@
 def _is_universal(mac):
     return not (mac & (1 << 41))
 
-def _find_mac(command, args, hw_identifiers, get_index):
+
+def _find_mac_near_keyword(command, args, keywords, get_word_index):
+    """Searches a command's output for a MAC address near a keyword.
+
+    Each line of words in the output is case-insensitively searched for
+    any of the given keywords.  Upon a match, get_word_index is invoked
+    to pick a word from the line, given the index of the match.  For
+    example, lambda i: 0 would get the first word on the line, while
+    lambda i: i - 1 would get the word preceding the keyword.
+    """
+    stdout = _get_command_stdout(command, args)
+    if stdout is None:
+        return None
+
     first_local_mac = None
-    try:
-        proc = _popen(command, *args.split())
-        if not proc:
-            return None
-        with proc:
-            for line in proc.stdout:
-                words = line.lower().rstrip().split()
-                for i in range(len(words)):
-                    if words[i] in hw_identifiers:
-                        try:
-                            word = words[get_index(i)]
-                            mac = int(word.replace(b':', b''), 16)
-                            if _is_universal(mac):
-                                return mac
-                            first_local_mac = first_local_mac or mac
-                        except (ValueError, IndexError):
-                            # Virtual interfaces, such as those provided by
-                            # VPNs, do not have a colon-delimited MAC address
-                            # as expected, but a 16-byte HWAddr separated by
-                            # dashes. These should be ignored in favor of a
-                            # real MAC address
-                            pass
-    except OSError:
-        pass
+    for line in stdout:
+        words = line.lower().rstrip().split()
+        for i in range(len(words)):
+            if words[i] in keywords:
+                try:
+                    word = words[get_word_index(i)]
+                    mac = int(word.replace(_MAC_DELIM, b''), 16)
+                except (ValueError, IndexError):
+                    # Virtual interfaces, such as those provided by
+                    # VPNs, do not have a colon-delimited MAC address
+                    # as expected, but a 16-byte HWAddr separated by
+                    # dashes. These should be ignored in favor of a
+                    # real MAC address
+                    pass
+                else:
+                    if _is_universal(mac):
+                        return mac
+                    first_local_mac = first_local_mac or mac
     return first_local_mac or None
 
+
+def _parse_mac(word):
+    # Accept 'HH:HH:HH:HH:HH:HH' MAC address (ex: '52:54:00:9d:0e:67'),
+    # but reject IPv6 address (ex: 'fe80::5054:ff:fe9' or '123:2:3:4:5:6:7:8').
+    #
+    # Virtual interfaces, such as those provided by VPNs, do not have a
+    # colon-delimited MAC address as expected, but a 16-byte HWAddr separated
+    # by dashes. These should be ignored in favor of a real MAC address
+    parts = word.split(_MAC_DELIM)
+    if len(parts) != 6:
+        return
+    if _MAC_OMITS_LEADING_ZEROES:
+        # (Only) on AIX the macaddr value given is not prefixed by 0, e.g.
+        # en0   1500  link#2      fa.bc.de.f7.62.4 110854824     0 160133733     0     0
+        # not
+        # en0   1500  link#2      fa.bc.de.f7.62.04 110854824     0 160133733     0     0
+        if not all(1 <= len(part) <= 2 for part in parts):
+            return
+        hexstr = b''.join(part.rjust(2, b'0') for part in parts)
+    else:
+        if not all(len(part) == 2 for part in parts):
+            return
+        hexstr = b''.join(parts)
+    try:
+        return int(hexstr, 16)
+    except ValueError:
+        return
+
+
+def _find_mac_under_heading(command, args, heading):
+    """Looks for a MAC address under a heading in a command's output.
+
+    The first line of words in the output is searched for the given
+    heading. Words at the same word index as the heading in subsequent
+    lines are then examined to see if they look like MAC addresses.
+    """
+    stdout = _get_command_stdout(command, args)
+    if stdout is None:
+        return None
+
+    keywords = stdout.readline().rstrip().split()
+    try:
+        column_index = keywords.index(heading)
+    except ValueError:
+        return None
+
+    first_local_mac = None
+    for line in stdout:
+        words = line.rstrip().split()
+        try:
+            word = words[column_index]
+        except IndexError:
+            continue
+
+        mac = _parse_mac(word)
+        if mac is None:
+            continue
+        if _is_universal(mac):
+            return mac
+        if first_local_mac is None:
+            first_local_mac = mac
+
+    return first_local_mac
+
+
+# The following functions call external programs to 'get' a macaddr value to
+# be used as basis for an uuid
 def _ifconfig_getnode():
     """Get the hardware address on Unix by running ifconfig."""
     # This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes.
     keywords = (b'hwaddr', b'ether', b'address:', b'lladdr')
     for args in ('', '-a', '-av'):
-        mac = _find_mac('ifconfig', args, keywords, lambda i: i+1)
+        mac = _find_mac_near_keyword('ifconfig', args, keywords, lambda i: i+1)
         if mac:
             return mac
         return None
@@ -425,7 +515,7 @@
 def _ip_getnode():
     """Get the hardware address on Unix by running ip."""
     # This works on Linux with iproute2.
-    mac = _find_mac('ip', 'link', [b'link/ether'], lambda i: i+1)
+    mac = _find_mac_near_keyword('ip', 'link', [b'link/ether'], lambda i: i+1)
     if mac:
         return mac
     return None
@@ -439,17 +529,17 @@
         return None
 
     # Try getting the MAC addr from arp based on our IP address (Solaris).
-    mac = _find_mac('arp', '-an', [os.fsencode(ip_addr)], lambda i: -1)
+    mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: -1)
     if mac:
         return mac
 
     # This works on OpenBSD
-    mac = _find_mac('arp', '-an', [os.fsencode(ip_addr)], lambda i: i+1)
+    mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: i+1)
     if mac:
         return mac
 
     # This works on Linux, FreeBSD and NetBSD
-    mac = _find_mac('arp', '-an', [os.fsencode('(%s)' % ip_addr)],
+    mac = _find_mac_near_keyword('arp', '-an', [os.fsencode('(%s)' % ip_addr)],
                     lambda i: i+2)
     # Return None instead of 0.
     if mac:
@@ -459,210 +549,52 @@
 def _lanscan_getnode():
     """Get the hardware address on Unix by running lanscan."""
     # This might work on HP-UX.
-    return _find_mac('lanscan', '-ai', [b'lan0'], lambda i: 0)
+    return _find_mac_near_keyword('lanscan', '-ai', [b'lan0'], lambda i: 0)
 
 def _netstat_getnode():
     """Get the hardware address on Unix by running netstat."""
-    # This might work on AIX, Tru64 UNIX.
-    first_local_mac = None
-    try:
-        proc = _popen('netstat', '-ia')
-        if not proc:
-            return None
-        with proc:
-            words = proc.stdout.readline().rstrip().split()
-            try:
-                i = words.index(b'Address')
-            except ValueError:
-                return None
-            for line in proc.stdout:
-                try:
-                    words = line.rstrip().split()
-                    word = words[i]
-                    if len(word) == 17 and word.count(b':') == 5:
-                        mac = int(word.replace(b':', b''), 16)
-                        if _is_universal(mac):
-                            return mac
-                        first_local_mac = first_local_mac or mac
-                except (ValueError, IndexError):
-                    pass
-    except OSError:
-        pass
-    return first_local_mac or None
+    # This works on AIX and might work on Tru64 UNIX.
+    return _find_mac_under_heading('netstat', '-ian', b'Address')
 
 def _ipconfig_getnode():
-    """Get the hardware address on Windows by running ipconfig.exe."""
-    import os, re, subprocess
-    first_local_mac = None
-    dirs = ['', r'c:\windows\system32', r'c:\winnt\system32']
-    try:
-        import ctypes
-        buffer = ctypes.create_string_buffer(300)
-        ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300)
-        dirs.insert(0, buffer.value.decode('mbcs'))
-    except:
-        pass
-    for dir in dirs:
-        try:
-            proc = subprocess.Popen([os.path.join(dir, 'ipconfig'), '/all'],
-                                    stdout=subprocess.PIPE,
-                                    encoding="oem")
-        except OSError:
-            continue
-        with proc:
-            for line in proc.stdout:
-                value = line.split(':')[-1].strip().lower()
-                if re.fullmatch('(?:[0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value):
-                    mac = int(value.replace('-', ''), 16)
-                    if _is_universal(mac):
-                        return mac
-                    first_local_mac = first_local_mac or mac
-    return first_local_mac or None
+    """[DEPRECATED] Get the hardware address on Windows."""
+    # bpo-40501: UuidCreateSequential() is now the only supported approach
+    return _windll_getnode()
 
 def _netbios_getnode():
-    """Get the hardware address on Windows using NetBIOS calls.
-    See http://support.microsoft.com/kb/118623 for details."""
-    import win32wnet, netbios
-    first_local_mac = None
-    ncb = netbios.NCB()
-    ncb.Command = netbios.NCBENUM
-    ncb.Buffer = adapters = netbios.LANA_ENUM()
-    adapters._pack()
-    if win32wnet.Netbios(ncb) != 0:
-        return None
-    adapters._unpack()
-    for i in range(adapters.length):
-        ncb.Reset()
-        ncb.Command = netbios.NCBRESET
-        ncb.Lana_num = ord(adapters.lana[i])
-        if win32wnet.Netbios(ncb) != 0:
-            continue
-        ncb.Reset()
-        ncb.Command = netbios.NCBASTAT
-        ncb.Lana_num = ord(adapters.lana[i])
-        ncb.Callname = '*'.ljust(16)
-        ncb.Buffer = status = netbios.ADAPTER_STATUS()
-        if win32wnet.Netbios(ncb) != 0:
-            continue
-        status._unpack()
-        bytes = status.adapter_address[:6]
-        if len(bytes) != 6:
-            continue
-        mac = int.from_bytes(bytes, 'big')
-        if _is_universal(mac):
-            return mac
-        first_local_mac = first_local_mac or mac
-    return first_local_mac or None
+    """[DEPRECATED] Get the hardware address on Windows."""
+    # bpo-40501: UuidCreateSequential() is now the only supported approach
+    return _windll_getnode()
 
 
-_generate_time_safe = _UuidCreate = None
-_has_uuid_generate_time_safe = None
-
 # Import optional C extension at toplevel, to help disabling it when testing
 try:
     import _uuid
+    _generate_time_safe = getattr(_uuid, "generate_time_safe", None)
+    _UuidCreate = getattr(_uuid, "UuidCreate", None)
+    _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe
 except ImportError:
     _uuid = None
+    _generate_time_safe = None
+    _UuidCreate = None
+    _has_uuid_generate_time_safe = None
 
 
 def _load_system_functions():
-    """
-    Try to load platform-specific functions for generating uuids.
-    """
-    global _generate_time_safe, _UuidCreate, _has_uuid_generate_time_safe
-
-    if _has_uuid_generate_time_safe is not None:
-        return
-
-    _has_uuid_generate_time_safe = False
-
-    if sys.platform == "darwin" and int(os.uname().release.split('.')[0]) < 9:
-        # The uuid_generate_* functions are broken on MacOS X 10.5, as noted
-        # in issue #8621 the function generates the same sequence of values
-        # in the parent process and all children created using fork (unless
-        # those children use exec as well).
-        #
-        # Assume that the uuid_generate functions are broken from 10.5 onward,
-        # the test can be adjusted when a later version is fixed.
-        pass
-    elif _uuid is not None:
-        _generate_time_safe = _uuid.generate_time_safe
-        _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe
-        return
-
-    try:
-        # If we couldn't find an extension module, try ctypes to find
-        # system routines for UUID generation.
-        # Thanks to Thomas Heller for ctypes and for his help with its use here.
-        import ctypes
-        import ctypes.util
-
-        # The uuid_generate_* routines are provided by libuuid on at least
-        # Linux and FreeBSD, and provided by libc on Mac OS X.
-        _libnames = ['uuid']
-        if not sys.platform.startswith('win'):
-            _libnames.append('c')
-        for libname in _libnames:
-            try:
-                lib = ctypes.CDLL(ctypes.util.find_library(libname))
-            except Exception:                           # pragma: nocover
-                continue
-            # Try to find the safe variety first.
-            if hasattr(lib, 'uuid_generate_time_safe'):
-                _uuid_generate_time_safe = lib.uuid_generate_time_safe
-                # int uuid_generate_time_safe(uuid_t out);
-                def _generate_time_safe():
-                    _buffer = ctypes.create_string_buffer(16)
-                    res = _uuid_generate_time_safe(_buffer)
-                    return bytes(_buffer.raw), res
-                _has_uuid_generate_time_safe = True
-                break
-
-            elif hasattr(lib, 'uuid_generate_time'):    # pragma: nocover
-                _uuid_generate_time = lib.uuid_generate_time
-                # void uuid_generate_time(uuid_t out);
-                _uuid_generate_time.restype = None
-                def _generate_time_safe():
-                    _buffer = ctypes.create_string_buffer(16)
-                    _uuid_generate_time(_buffer)
-                    return bytes(_buffer.raw), None
-                break
-
-        # On Windows prior to 2000, UuidCreate gives a UUID containing the
-        # hardware address.  On Windows 2000 and later, UuidCreate makes a
-        # random UUID and UuidCreateSequential gives a UUID containing the
-        # hardware address.  These routines are provided by the RPC runtime.
-        # NOTE:  at least on Tim's WinXP Pro SP2 desktop box, while the last
-        # 6 bytes returned by UuidCreateSequential are fixed, they don't appear
-        # to bear any relationship to the MAC address of any network device
-        # on the box.
-        try:
-            lib = ctypes.windll.rpcrt4
-        except:
-            lib = None
-        _UuidCreate = getattr(lib, 'UuidCreateSequential',
-                              getattr(lib, 'UuidCreate', None))
-
-    except Exception as exc:
-        import warnings
-        warnings.warn(f"Could not find fallback ctypes uuid functions: {exc}",
-                      ImportWarning)
+    """[DEPRECATED] Platform-specific functions loaded at import time"""
 
 
 def _unix_getnode():
-    """Get the hardware address on Unix using the _uuid extension module
-    or ctypes."""
-    _load_system_functions()
-    uuid_time, _ = _generate_time_safe()
-    return UUID(bytes=uuid_time).node
+    """Get the hardware address on Unix using the _uuid extension module."""
+    if _generate_time_safe:
+        uuid_time, _ = _generate_time_safe()
+        return UUID(bytes=uuid_time).node
 
 def _windll_getnode():
-    """Get the hardware address on Windows using ctypes."""
-    import ctypes
-    _load_system_functions()
-    _buffer = ctypes.create_string_buffer(16)
-    if _UuidCreate(_buffer) == 0:
-        return UUID(bytes=bytes_(_buffer.raw)).node
+    """Get the hardware address on Windows using the _uuid extension module."""
+    if _UuidCreate:
+        uuid_bytes = _UuidCreate()
+        return UUID(bytes_le=uuid_bytes).node
 
 def _random_getnode():
     """Get a random node ID."""
@@ -688,10 +620,11 @@
 #     @unittest.skipUnless(_uuid._ifconfig_getnode in _uuid._GETTERS, ...)
 if _LINUX:
     _OS_GETTERS = [_ip_getnode, _ifconfig_getnode]
-elif _DARWIN:
+elif sys.platform == 'darwin':
     _OS_GETTERS = [_ifconfig_getnode, _arp_getnode, _netstat_getnode]
-elif _WINDOWS:
-    _OS_GETTERS = [_netbios_getnode, _ipconfig_getnode]
+elif sys.platform == 'win32':
+    # bpo-40201: _windll_getnode will always succeed, so these are not needed
+    _OS_GETTERS = []
 elif _AIX:
     _OS_GETTERS = [_netstat_getnode]
 else:
@@ -706,7 +639,7 @@
 
 _node = None
 
-def getnode(*, getters=None):
+def getnode():
     """Get the hardware address as a 48-bit positive integer.
 
     The first time this runs, it may launch a separate program, which could
@@ -738,7 +671,6 @@
 
     # When the system provides a version-1 UUID generator, use it (but don't
     # use UuidCreate here because its UUIDs don't conform to RFC 4122).
-    _load_system_functions()
     if _generate_time_safe is not None and node is clock_seq is None:
         uuid_time, safely_generated = _generate_time_safe()
         try:
@@ -772,8 +704,11 @@
 def uuid3(namespace, name):
     """Generate a UUID from the MD5 hash of a namespace UUID and a name."""
     from hashlib import md5
-    hash = md5(namespace.bytes + bytes(name, "utf-8")).digest()
-    return UUID(bytes=hash[:16], version=3)
+    digest = md5(
+        namespace.bytes + bytes(name, "utf-8"),
+        usedforsecurity=False
+    ).digest()
+    return UUID(bytes=digest[:16], version=3)
 
 def uuid4():
     """Generate a random UUID."""
diff --git a/common/py3-stdlib/wave.py b/common/py3-stdlib/wave.py
index 823f091..b707119 100644
--- a/common/py3-stdlib/wave.py
+++ b/common/py3-stdlib/wave.py
@@ -71,9 +71,15 @@
 is destroyed.
 """
 
+from chunk import Chunk
+from collections import namedtuple
+import audioop
 import builtins
+import struct
+import sys
 
-__all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"]
+
+__all__ = ["open", "Error", "Wave_read", "Wave_write"]
 
 class Error(Exception):
     pass
@@ -82,13 +88,6 @@
 
 _array_fmts = None, 'b', 'h', None, 'i'
 
-import audioop
-import struct
-import sys
-from chunk import Chunk
-from collections import namedtuple
-import warnings
-
 _wave_params = namedtuple('_wave_params',
                      'nchannels sampwidth framerate nframes comptype compname')
 
@@ -512,8 +511,3 @@
         return Wave_write(f)
     else:
         raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
-
-def openfp(f, mode=None):
-    warnings.warn("wave.openfp is deprecated since Python 3.7. "
-                  "Use wave.open instead.", DeprecationWarning, stacklevel=2)
-    return open(f, mode=mode)
diff --git a/common/py3-stdlib/weakref.py b/common/py3-stdlib/weakref.py
index 9d70089..5fa851d 100644
--- a/common/py3-stdlib/weakref.py
+++ b/common/py3-stdlib/weakref.py
@@ -33,6 +33,9 @@
            "WeakSet", "WeakMethod", "finalize"]
 
 
+_collections_abc.Set.register(WeakSet)
+_collections_abc.MutableSet.register(WeakSet)
+
 class WeakMethod(ref):
     """
     A custom `weakref.ref` subclass which simulates a weak reference to
@@ -75,14 +78,14 @@
             if not self._alive or not other._alive:
                 return self is other
             return ref.__eq__(self, other) and self._func_ref == other._func_ref
-        return False
+        return NotImplemented
 
     def __ne__(self, other):
         if isinstance(other, WeakMethod):
             if not self._alive or not other._alive:
                 return self is not other
             return ref.__ne__(self, other) or self._func_ref != other._func_ref
-        return True
+        return NotImplemented
 
     __hash__ = ref.__hash__
 
@@ -307,6 +310,25 @@
             self._commit_removals()
         return list(self.data.values())
 
+    def __ior__(self, other):
+        self.update(other)
+        return self
+
+    def __or__(self, other):
+        if isinstance(other, _collections_abc.Mapping):
+            c = self.copy()
+            c.update(other)
+            return c
+        return NotImplemented
+
+    def __ror__(self, other):
+        if isinstance(other, _collections_abc.Mapping):
+            c = self.__class__()
+            c.update(other)
+            c.update(self)
+            return c
+        return NotImplemented
+
 
 class KeyedRef(ref):
     """Specialized reference that includes a key corresponding to the value.
@@ -485,6 +507,25 @@
         if len(kwargs):
             self.update(kwargs)
 
+    def __ior__(self, other):
+        self.update(other)
+        return self
+
+    def __or__(self, other):
+        if isinstance(other, _collections_abc.Mapping):
+            c = self.copy()
+            c.update(other)
+            return c
+        return NotImplemented
+
+    def __ror__(self, other):
+        if isinstance(other, _collections_abc.Mapping):
+            c = self.__class__()
+            c.update(other)
+            c.update(self)
+            return c
+        return NotImplemented
+
 
 class finalize:
     """Class for finalization of weakrefable objects
@@ -514,33 +555,7 @@
     class _Info:
         __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
 
-    def __init__(*args, **kwargs):
-        if len(args) >= 3:
-            self, obj, func, *args = args
-        elif not args:
-            raise TypeError("descriptor '__init__' of 'finalize' object "
-                            "needs an argument")
-        else:
-            if 'func' not in kwargs:
-                raise TypeError('finalize expected at least 2 positional '
-                                'arguments, got %d' % (len(args)-1))
-            func = kwargs.pop('func')
-            if len(args) >= 2:
-                self, obj, *args = args
-                import warnings
-                warnings.warn("Passing 'func' as keyword argument is deprecated",
-                              DeprecationWarning, stacklevel=2)
-            else:
-                if 'obj' not in kwargs:
-                    raise TypeError('finalize expected at least 2 positional '
-                                    'arguments, got %d' % (len(args)-1))
-                obj = kwargs.pop('obj')
-                self, *args = args
-                import warnings
-                warnings.warn("Passing 'obj' as keyword argument is deprecated",
-                              DeprecationWarning, stacklevel=2)
-        args = tuple(args)
-
+    def __init__(self, obj, func, /, *args, **kwargs):
         if not self._registered_with_atexit:
             # We may register the exit function more than once because
             # of a thread race, but that is harmless
@@ -556,7 +571,6 @@
         info.index = next(self._index_iter)
         self._registry[self] = info
         finalize._dirty = True
-    __init__.__text_signature__ = '($self, obj, func, /, *args, **kwargs)'
 
     def __call__(self, _=None):
         """If alive then mark as dead and return func(*args, **kwargs);
diff --git a/common/py3-stdlib/webbrowser.py b/common/py3-stdlib/webbrowser.py
index cea9130..6023c1e 100755
--- a/common/py3-stdlib/webbrowser.py
+++ b/common/py3-stdlib/webbrowser.py
@@ -550,7 +550,7 @@
                 cmd = "xdg-settings get default-web-browser".split()
                 raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
                 result = raw_result.decode().strip()
-            except (FileNotFoundError, subprocess.CalledProcessError, PermissionError) :
+            except (FileNotFoundError, subprocess.CalledProcessError, PermissionError, NotADirectoryError) :
                 pass
             else:
                 global _os_preferred_browser
diff --git a/common/py3-stdlib/xml/dom/expatbuilder.py b/common/py3-stdlib/xml/dom/expatbuilder.py
index 2bd835b..199c22d 100644
--- a/common/py3-stdlib/xml/dom/expatbuilder.py
+++ b/common/py3-stdlib/xml/dom/expatbuilder.py
@@ -204,11 +204,11 @@
                 buffer = file.read(16*1024)
                 if not buffer:
                     break
-                parser.Parse(buffer, 0)
+                parser.Parse(buffer, False)
                 if first_buffer and self.document.documentElement:
                     self._setup_subset(buffer)
                 first_buffer = False
-            parser.Parse("", True)
+            parser.Parse(b"", True)
         except ParseEscape:
             pass
         doc = self.document
@@ -637,7 +637,7 @@
         nsattrs = self._getNSattrs() # get ns decls from node's ancestors
         document = _FRAGMENT_BUILDER_TEMPLATE % (ident, subset, nsattrs)
         try:
-            parser.Parse(document, 1)
+            parser.Parse(document, True)
         except:
             self.reset()
             raise
@@ -697,7 +697,7 @@
             self.fragment = self.document.createDocumentFragment()
             self.curNode = self.fragment
             try:
-                parser.Parse(self._source, 1)
+                parser.Parse(self._source, True)
             finally:
                 self.curNode = old_cur_node
                 self.document = old_document
diff --git a/common/py3-stdlib/xml/dom/minidom.py b/common/py3-stdlib/xml/dom/minidom.py
index 464420b..d09ef5e 100644
--- a/common/py3-stdlib/xml/dom/minidom.py
+++ b/common/py3-stdlib/xml/dom/minidom.py
@@ -43,10 +43,11 @@
     def __bool__(self):
         return True
 
-    def toxml(self, encoding=None):
-        return self.toprettyxml("", "", encoding)
+    def toxml(self, encoding=None, standalone=None):
+        return self.toprettyxml("", "", encoding, standalone)
 
-    def toprettyxml(self, indent="\t", newl="\n", encoding=None):
+    def toprettyxml(self, indent="\t", newl="\n", encoding=None,
+                    standalone=None):
         if encoding is None:
             writer = io.StringIO()
         else:
@@ -56,7 +57,7 @@
                                       newline='\n')
         if self.nodeType == Node.DOCUMENT_NODE:
             # Can pass encoding only to document, to put it into XML header
-            self.writexml(writer, "", indent, newl, encoding)
+            self.writexml(writer, "", indent, newl, encoding, standalone)
         else:
             self.writexml(writer, "", indent, newl)
         if encoding is None:
@@ -718,6 +719,14 @@
         Node.unlink(self)
 
     def getAttribute(self, attname):
+        """Returns the value of the specified attribute.
+
+        Returns the value of the element's attribute named attname as
+        a string. An empty string is returned if the element does not
+        have such an attribute. Note that an empty string may also be
+        returned as an explicitly given attribute value, use the
+        hasAttribute method to distinguish these two cases.
+        """
         if self._attrs is None:
             return ""
         try:
@@ -828,6 +837,11 @@
     removeAttributeNodeNS = removeAttributeNode
 
     def hasAttribute(self, name):
+        """Checks whether the element has an attribute with the specified name.
+
+        Returns True if the element has an attribute with the specified name.
+        Otherwise, returns False.
+        """
         if self._attrs is None:
             return False
         return name in self._attrs
@@ -838,6 +852,11 @@
         return (namespaceURI, localName) in self._attrsNS
 
     def getElementsByTagName(self, name):
+        """Returns all descendant elements with the given tag name.
+
+        Returns the list of all descendant elements (not direct children
+        only) with the specified tag name.
+        """
         return _get_elements_by_tagName_helper(self, name, NodeList())
 
     def getElementsByTagNameNS(self, namespaceURI, localName):
@@ -848,6 +867,11 @@
         return "<DOM Element: %s at %#x>" % (self.tagName, id(self))
 
     def writexml(self, writer, indent="", addindent="", newl=""):
+        """Write an XML element to a file-like object
+
+        Write the element to the writer object that must provide
+        a write method (e.g. a file or StringIO object).
+        """
         # indent = current indentation
         # addindent = indentation to add to higher levels
         # newl = newline string
@@ -1787,12 +1811,17 @@
             raise xml.dom.NotSupportedErr("cannot import document type nodes")
         return _clone_node(node, deep, self)
 
-    def writexml(self, writer, indent="", addindent="", newl="", encoding=None):
-        if encoding is None:
-            writer.write('<?xml version="1.0" ?>'+newl)
-        else:
-            writer.write('<?xml version="1.0" encoding="%s"?>%s' % (
-                encoding, newl))
+    def writexml(self, writer, indent="", addindent="", newl="", encoding=None,
+                 standalone=None):
+        declarations = []
+
+        if encoding:
+            declarations.append(f'encoding="{encoding}"')
+        if standalone is not None:
+            declarations.append(f'standalone="{"yes" if standalone else "no"}"')
+
+        writer.write(f'<?xml version="1.0" {" ".join(declarations)}?>{newl}')
+
         for node in self.childNodes:
             node.writexml(writer, indent, addindent, newl)
 
diff --git a/common/py3-stdlib/xml/dom/xmlbuilder.py b/common/py3-stdlib/xml/dom/xmlbuilder.py
index 213ab14..8a20026 100644
--- a/common/py3-stdlib/xml/dom/xmlbuilder.py
+++ b/common/py3-stdlib/xml/dom/xmlbuilder.py
@@ -1,7 +1,6 @@
 """Implementation of the DOM Level 3 'LS-Load' feature."""
 
 import copy
-import warnings
 import xml.dom
 
 from xml.dom.NodeFilter import NodeFilter
diff --git a/common/py3-stdlib/xml/etree/ElementInclude.py b/common/py3-stdlib/xml/etree/ElementInclude.py
index 963470e..5303062 100644
--- a/common/py3-stdlib/xml/etree/ElementInclude.py
+++ b/common/py3-stdlib/xml/etree/ElementInclude.py
@@ -50,18 +50,28 @@
 
 import copy
 from . import ElementTree
+from urllib.parse import urljoin
 
 XINCLUDE = "{http://www.w3.org/2001/XInclude}"
 
 XINCLUDE_INCLUDE = XINCLUDE + "include"
 XINCLUDE_FALLBACK = XINCLUDE + "fallback"
 
+# For security reasons, the inclusion depth is limited to this read-only value by default.
+DEFAULT_MAX_INCLUSION_DEPTH = 6
+
+
 ##
 # Fatal include error.
 
 class FatalIncludeError(SyntaxError):
     pass
 
+
+class LimitedRecursiveIncludeError(FatalIncludeError):
+    pass
+
+
 ##
 # Default loader.  This loader reads an included resource from disk.
 #
@@ -92,13 +102,33 @@
 # @param loader Optional resource loader.  If omitted, it defaults
 #     to {@link default_loader}.  If given, it should be a callable
 #     that implements the same interface as <b>default_loader</b>.
+# @param base_url The base URL of the original file, to resolve
+#     relative include file references.
+# @param max_depth The maximum number of recursive inclusions.
+#     Limited to reduce the risk of malicious content explosion.
+#     Pass a negative value to disable the limitation.
+# @throws LimitedRecursiveIncludeError If the {@link max_depth} was exceeded.
 # @throws FatalIncludeError If the function fails to include a given
 #     resource, or if the tree contains malformed XInclude elements.
-# @throws OSError If the function fails to load a given resource.
+# @throws IOError If the function fails to load a given resource.
+# @returns the node or its replacement if it was an XInclude node
 
-def include(elem, loader=None):
+def include(elem, loader=None, base_url=None,
+            max_depth=DEFAULT_MAX_INCLUSION_DEPTH):
+    if max_depth is None:
+        max_depth = -1
+    elif max_depth < 0:
+        raise ValueError("expected non-negative depth or None for 'max_depth', got %r" % max_depth)
+
+    if hasattr(elem, 'getroot'):
+        elem = elem.getroot()
     if loader is None:
         loader = default_loader
+
+    _include(elem, loader, base_url, max_depth, set())
+
+
+def _include(elem, loader, base_url, max_depth, _parent_hrefs):
     # look for xinclude elements
     i = 0
     while i < len(elem):
@@ -106,14 +136,24 @@
         if e.tag == XINCLUDE_INCLUDE:
             # process xinclude directive
             href = e.get("href")
+            if base_url:
+                href = urljoin(base_url, href)
             parse = e.get("parse", "xml")
             if parse == "xml":
+                if href in _parent_hrefs:
+                    raise FatalIncludeError("recursive include of %s" % href)
+                if max_depth == 0:
+                    raise LimitedRecursiveIncludeError(
+                        "maximum xinclude depth reached when including file %s" % href)
+                _parent_hrefs.add(href)
                 node = loader(href, parse)
                 if node is None:
                     raise FatalIncludeError(
                         "cannot load %r as %r" % (href, parse)
                         )
-                node = copy.copy(node)
+                node = copy.copy(node)  # FIXME: this makes little sense with recursive includes
+                _include(node, loader, href, max_depth - 1, _parent_hrefs)
+                _parent_hrefs.remove(href)
                 if e.tail:
                     node.tail = (node.tail or "") + e.tail
                 elem[i] = node
@@ -123,11 +163,13 @@
                     raise FatalIncludeError(
                         "cannot load %r as %r" % (href, parse)
                         )
+                if e.tail:
+                    text += e.tail
                 if i:
                     node = elem[i-1]
-                    node.tail = (node.tail or "") + text + (e.tail or "")
+                    node.tail = (node.tail or "") + text
                 else:
-                    elem.text = (elem.text or "") + text + (e.tail or "")
+                    elem.text = (elem.text or "") + text
                 del elem[i]
                 continue
             else:
@@ -139,5 +181,5 @@
                 "xi:fallback tag must be child of xi:include (%r)" % e.tag
                 )
         else:
-            include(e, loader)
-        i = i + 1
+            _include(e, loader, base_url, max_depth, _parent_hrefs)
+        i += 1
diff --git a/common/py3-stdlib/xml/etree/ElementTree.py b/common/py3-stdlib/xml/etree/ElementTree.py
index 645e999..7a26900 100644
--- a/common/py3-stdlib/xml/etree/ElementTree.py
+++ b/common/py3-stdlib/xml/etree/ElementTree.py
@@ -76,7 +76,7 @@
     "dump",
     "Element", "ElementTree",
     "fromstring", "fromstringlist",
-    "iselement", "iterparse",
+    "indent", "iselement", "iterparse",
     "parse", "ParseError",
     "PI", "ProcessingInstruction",
     "QName",
@@ -195,6 +195,13 @@
         original tree.
 
         """
+        warnings.warn(
+            "elem.copy() is deprecated. Use copy.copy(elem) instead.",
+            DeprecationWarning
+            )
+        return self.__copy__()
+
+    def __copy__(self):
         elem = self.makeelement(self.tag, self.attrib)
         elem.text = self.text
         elem.tail = self.tail
@@ -273,19 +280,6 @@
         # assert iselement(element)
         self._children.remove(subelement)
 
-    def getchildren(self):
-        """(Deprecated) Return all subelements.
-
-        Elements are returned in document order.
-
-        """
-        warnings.warn(
-            "This method will be removed in future versions.  "
-            "Use 'list(elem)' or iteration over elem instead.",
-            DeprecationWarning, stacklevel=2
-            )
-        return self._children
-
     def find(self, path, namespaces=None):
         """Find first matching element by tag name or path.
 
@@ -409,15 +403,6 @@
         for e in self._children:
             yield from e.iter(tag)
 
-    # compatibility
-    def getiterator(self, tag=None):
-        warnings.warn(
-            "This method will be removed in future versions.  "
-            "Use 'elem.iter()' or 'list(elem.iter())' instead.",
-            DeprecationWarning, stacklevel=2
-        )
-        return list(self.iter(tag))
-
     def itertext(self):
         """Create text iterator.
 
@@ -617,15 +602,6 @@
         # assert self._root is not None
         return self._root.iter(tag)
 
-    # compatibility
-    def getiterator(self, tag=None):
-        warnings.warn(
-            "This method will be removed in future versions.  "
-            "Use 'tree.iter()' or 'list(tree.iter())' instead.",
-            DeprecationWarning, stacklevel=2
-        )
-        return list(self.iter(tag))
-
     def find(self, path, namespaces=None):
         """Find first matching element by tag name or path.
 
@@ -1081,15 +1057,15 @@
             text = text.replace(">", "&gt;")
         if "\"" in text:
             text = text.replace("\"", "&quot;")
-        # The following business with carriage returns is to satisfy
-        # Section 2.11 of the XML specification, stating that
-        # CR or CR LN should be replaced with just LN
+        # Although section 2.11 of the XML specification states that CR or
+        # CR LN should be replaced with just LN, it applies only to EOLNs
+        # which take part of organizing file into lines. Within attributes,
+        # we are replacing these with entity numbers, so they do not count.
         # http://www.w3.org/TR/REC-xml/#sec-line-ends
-        if "\r\n" in text:
-            text = text.replace("\r\n", "\n")
+        # The current solution, contained in following six lines, was
+        # discussed in issue 17582 and 39011.
         if "\r" in text:
-            text = text.replace("\r", "\n")
-        #The following four lines are issue 17582
+            text = text.replace("\r", "&#13;")
         if "\n" in text:
             text = text.replace("\n", "&#10;")
         if "\t" in text:
@@ -1185,6 +1161,57 @@
     if not tail or tail[-1] != "\n":
         sys.stdout.write("\n")
 
+
+def indent(tree, space="  ", level=0):
+    """Indent an XML document by inserting newlines and indentation space
+    after elements.
+
+    *tree* is the ElementTree or Element to modify.  The (root) element
+    itself will not be changed, but the tail text of all elements in its
+    subtree will be adapted.
+
+    *space* is the whitespace to insert for each indentation level, two
+    space characters by default.
+
+    *level* is the initial indentation level. Setting this to a higher
+    value than 0 can be used for indenting subtrees that are more deeply
+    nested inside of a document.
+    """
+    if isinstance(tree, ElementTree):
+        tree = tree.getroot()
+    if level < 0:
+        raise ValueError(f"Initial indentation level must be >= 0, got {level}")
+    if not len(tree):
+        return
+
+    # Reduce the memory consumption by reusing indentation strings.
+    indentations = ["\n" + level * space]
+
+    def _indent_children(elem, level):
+        # Start a new indentation level for the first child.
+        child_level = level + 1
+        try:
+            child_indentation = indentations[child_level]
+        except IndexError:
+            child_indentation = indentations[level] + space
+            indentations.append(child_indentation)
+
+        if not elem.text or not elem.text.strip():
+            elem.text = child_indentation
+
+        for child in elem:
+            if len(child):
+                _indent_children(child, child_level)
+            if not child.tail or not child.tail.strip():
+                child.tail = child_indentation
+
+        # Dedent after the last child by overwriting the previous indentation.
+        if not child.tail.strip():
+            child.tail = indentations[level]
+
+    _indent_children(tree, 0)
+
+
 # --------------------------------------------------------------------
 # parsing
 
@@ -1690,14 +1717,14 @@
     def feed(self, data):
         """Feed encoded data to parser."""
         try:
-            self.parser.Parse(data, 0)
+            self.parser.Parse(data, False)
         except self._error as v:
             self._raiseerror(v)
 
     def close(self):
         """Finish feeding data to parser and return element structure."""
         try:
-            self.parser.Parse("", 1) # end of data
+            self.parser.Parse(b"", True) # end of data
         except self._error as v:
             self._raiseerror(v)
         try:
@@ -1849,6 +1876,11 @@
                 self._declared_ns_stack[-1].append((uri, prefix))
                 return f'{prefix}:{tag}' if prefix else tag, tag, uri
 
+        if not uri:
+            # As soon as a default namespace is defined,
+            # anything that has no namespace (and thus, no prefix) goes there.
+            return tag, tag, uri
+
         raise ValueError(f'Namespace "{uri}" is not declared in scope')
 
     def data(self, data):
diff --git a/common/py3-stdlib/xml/sax/__init__.py b/common/py3-stdlib/xml/sax/__init__.py
index a0f5d40..17b7587 100644
--- a/common/py3-stdlib/xml/sax/__init__.py
+++ b/common/py3-stdlib/xml/sax/__init__.py
@@ -78,7 +78,7 @@
     for parser_name in list(parser_list) + default_parser_list:
         try:
             return _create_parser(parser_name)
-        except ImportError as e:
+        except ImportError:
             import sys
             if parser_name in sys.modules:
                 # The parser module was found, but importing it
diff --git a/common/py3-stdlib/xml/sax/expatreader.py b/common/py3-stdlib/xml/sax/expatreader.py
index 5066ffc..e334ac9 100644
--- a/common/py3-stdlib/xml/sax/expatreader.py
+++ b/common/py3-stdlib/xml/sax/expatreader.py
@@ -93,7 +93,7 @@
         self._parser = None
         self._namespaces = namespaceHandling
         self._lex_handler_prop = None
-        self._parsing = 0
+        self._parsing = False
         self._entity_stack = []
         self._external_ges = 0
         self._interning = None
@@ -203,10 +203,10 @@
 
     # IncrementalParser methods
 
-    def feed(self, data, isFinal = 0):
+    def feed(self, data, isFinal=False):
         if not self._parsing:
             self.reset()
-            self._parsing = 1
+            self._parsing = True
             self._cont_handler.startDocument()
 
         try:
@@ -237,13 +237,13 @@
             # If we are completing an external entity, do nothing here
             return
         try:
-            self.feed("", isFinal = 1)
+            self.feed(b"", isFinal=True)
             self._cont_handler.endDocument()
-            self._parsing = 0
+            self._parsing = False
             # break cycle created by expat handlers pointing to our methods
             self._parser = None
         finally:
-            self._parsing = 0
+            self._parsing = False
             if self._parser is not None:
                 # Keep ErrorColumnNumber and ErrorLineNumber after closing.
                 parser = _ClosedParser()
@@ -307,7 +307,7 @@
         self._parser.SetParamEntityParsing(
             expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)
 
-        self._parsing = 0
+        self._parsing = False
         self._entity_stack = []
 
     # Locator methods
diff --git a/common/py3-stdlib/xmlrpc/client.py b/common/py3-stdlib/xmlrpc/client.py
index b987574..d15d60d 100644
--- a/common/py3-stdlib/xmlrpc/client.py
+++ b/common/py3-stdlib/xmlrpc/client.py
@@ -313,31 +313,38 @@
             s = self.timetuple()
             o = other.timetuple()
         else:
-            otype = (hasattr(other, "__class__")
-                     and other.__class__.__name__
-                     or type(other))
-            raise TypeError("Can't compare %s and %s" %
-                            (self.__class__.__name__, otype))
+            s = self
+            o = NotImplemented
         return s, o
 
     def __lt__(self, other):
         s, o = self.make_comparable(other)
+        if o is NotImplemented:
+            return NotImplemented
         return s < o
 
     def __le__(self, other):
         s, o = self.make_comparable(other)
+        if o is NotImplemented:
+            return NotImplemented
         return s <= o
 
     def __gt__(self, other):
         s, o = self.make_comparable(other)
+        if o is NotImplemented:
+            return NotImplemented
         return s > o
 
     def __ge__(self, other):
         s, o = self.make_comparable(other)
+        if o is NotImplemented:
+            return NotImplemented
         return s >= o
 
     def __eq__(self, other):
         s, o = self.make_comparable(other)
+        if o is NotImplemented:
+            return NotImplemented
         return s == o
 
     def timetuple(self):
@@ -435,7 +442,7 @@
         target.xml(encoding, None)
 
     def feed(self, data):
-        self._parser.Parse(data, 0)
+        self._parser.Parse(data, False)
 
     def close(self):
         try:
@@ -1414,15 +1421,14 @@
         # establish a "logical" server connection
 
         # get the url
-        type, uri = urllib.parse._splittype(uri)
-        if type not in ("http", "https"):
+        p = urllib.parse.urlparse(uri)
+        if p.scheme not in ("http", "https"):
             raise OSError("unsupported XML-RPC protocol")
-        self.__host, self.__handler = urllib.parse._splithost(uri)
-        if not self.__handler:
-            self.__handler = "/RPC2"
+        self.__host = p.netloc
+        self.__handler = p.path or "/RPC2"
 
         if transport is None:
-            if type == "https":
+            if p.scheme == "https":
                 handler = SafeTransport
                 extra_kwargs = {"context": context}
             else:
diff --git a/common/py3-stdlib/xmlrpc/server.py b/common/py3-stdlib/xmlrpc/server.py
index 32aba4d..287e324 100644
--- a/common/py3-stdlib/xmlrpc/server.py
+++ b/common/py3-stdlib/xmlrpc/server.py
@@ -732,7 +732,7 @@
         # hyperlinking of arbitrary strings being used as method
         # names. Only methods with names consisting of word characters
         # and '.'s are hyperlinked.
-        pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
+        pattern = re.compile(r'\b((http|https|ftp)://\S+[\w/]|'
                                 r'RFC[- ]?(\d+)|'
                                 r'PEP[- ]?(\d+)|'
                                 r'(self\.)?((?:\w|\.)+))\b')
diff --git a/common/py3-stdlib/zipfile.py b/common/py3-stdlib/zipfile.py
index 73e8966..816f858 100644
--- a/common/py3-stdlib/zipfile.py
+++ b/common/py3-stdlib/zipfile.py
@@ -4,7 +4,6 @@
 XXX references to utf-8 need further investigation.
 """
 import binascii
-import functools
 import importlib.util
 import io
 import itertools
@@ -378,11 +377,11 @@
         self.volume = 0                 # Volume number of file header
         self.internal_attr = 0          # Internal attributes
         self.external_attr = 0          # External file attributes
+        self.compress_size = 0          # Size of the compressed file
+        self.file_size = 0              # Size of the uncompressed file
         # Other attributes are set by class ZipFile:
         # header_offset         Byte offset to the file header
         # CRC                   CRC-32 of the uncompressed file
-        # compress_size         Size of the compressed file
-        # file_size             Size of the uncompressed file
 
     def __repr__(self):
         result = ['<%s filename=%r' % (self.__class__.__name__, self.filename)]
@@ -467,44 +466,23 @@
             if ln+4 > len(extra):
                 raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln))
             if tp == 0x0001:
-                if ln >= 24:
-                    counts = unpack('<QQQ', extra[4:28])
-                elif ln == 16:
-                    counts = unpack('<QQ', extra[4:20])
-                elif ln == 8:
-                    counts = unpack('<Q', extra[4:12])
-                elif ln == 0:
-                    counts = ()
-                else:
-                    raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln))
-
-                idx = 0
-
+                data = extra[4:ln+4]
                 # ZIP64 extension (large files and/or large archives)
-                if self.file_size in (0xffffffffffffffff, 0xffffffff):
-                    if len(counts) <= idx:
-                        raise BadZipFile(
-                            "Corrupt zip64 extra field. File size not found."
-                        )
-                    self.file_size = counts[idx]
-                    idx += 1
-
-                if self.compress_size == 0xFFFFFFFF:
-                    if len(counts) <= idx:
-                        raise BadZipFile(
-                            "Corrupt zip64 extra field. Compress size not found."
-                        )
-                    self.compress_size = counts[idx]
-                    idx += 1
-
-                if self.header_offset == 0xffffffff:
-                    if len(counts) <= idx:
-                        raise BadZipFile(
-                            "Corrupt zip64 extra field. Header offset not found."
-                        )
-                    old = self.header_offset
-                    self.header_offset = counts[idx]
-                    idx+=1
+                try:
+                    if self.file_size in (0xFFFF_FFFF_FFFF_FFFF, 0xFFFF_FFFF):
+                        field = "File size"
+                        self.file_size, = unpack('<Q', data[:8])
+                        data = data[8:]
+                    if self.compress_size == 0xFFFF_FFFF:
+                        field = "Compress size"
+                        self.compress_size, = unpack('<Q', data[:8])
+                        data = data[8:]
+                    if self.header_offset == 0xFFFF_FFFF:
+                        field = "Header offset"
+                        self.header_offset, = unpack('<Q', data[:8])
+                except struct.error:
+                    raise BadZipFile(f"Corrupt zip64 extra field. "
+                                     f"{field} not found.") from None
 
             extra = extra[ln+4:]
 
@@ -912,12 +890,16 @@
         return self._readbuffer[self._offset: self._offset + 512]
 
     def readable(self):
+        if self.closed:
+            raise ValueError("I/O operation on closed file.")
         return True
 
     def read(self, n=-1):
         """Read and return up to n bytes.
         If the argument is omitted, None, or negative, data is read and returned until EOF is reached.
         """
+        if self.closed:
+            raise ValueError("read from closed file.")
         if n is None or n < 0:
             buf = self._readbuffer[self._offset:]
             self._readbuffer = b''
@@ -1054,9 +1036,13 @@
             super().close()
 
     def seekable(self):
+        if self.closed:
+            raise ValueError("I/O operation on closed file.")
         return self._seekable
 
     def seek(self, offset, whence=0):
+        if self.closed:
+            raise ValueError("seek on closed file.")
         if not self._seekable:
             raise io.UnsupportedOperation("underlying stream is not seekable")
         curr_pos = self.tell()
@@ -1105,6 +1091,8 @@
         return self.tell()
 
     def tell(self):
+        if self.closed:
+            raise ValueError("tell on closed file.")
         if not self._seekable:
             raise io.UnsupportedOperation("underlying stream is not seekable")
         filepos = self._orig_file_size - self._left - len(self._readbuffer) + self._offset
@@ -1584,9 +1572,7 @@
                              "another write handle open on it. "
                              "Close the first handle before opening another.")
 
-        # Sizes and CRC are overwritten with correct data after processing the file
-        if not hasattr(zinfo, 'file_size'):
-            zinfo.file_size = 0
+        # Size and CRC are overwritten with correct data after processing the file
         zinfo.compress_size = 0
         zinfo.CRC = 0
 
@@ -1882,25 +1868,15 @@
 
             extract_version = max(min_version, zinfo.extract_version)
             create_version = max(min_version, zinfo.create_version)
-            try:
-                filename, flag_bits = zinfo._encodeFilenameFlags()
-                centdir = struct.pack(structCentralDir,
-                                      stringCentralDir, create_version,
-                                      zinfo.create_system, extract_version, zinfo.reserved,
-                                      flag_bits, zinfo.compress_type, dostime, dosdate,
-                                      zinfo.CRC, compress_size, file_size,
-                                      len(filename), len(extra_data), len(zinfo.comment),
-                                      0, zinfo.internal_attr, zinfo.external_attr,
-                                      header_offset)
-            except DeprecationWarning:
-                print((structCentralDir, stringCentralDir, create_version,
-                       zinfo.create_system, extract_version, zinfo.reserved,
-                       zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
-                       zinfo.CRC, compress_size, file_size,
-                       len(zinfo.filename), len(extra_data), len(zinfo.comment),
-                       0, zinfo.internal_attr, zinfo.external_attr,
-                       header_offset), file=sys.stderr)
-                raise
+            filename, flag_bits = zinfo._encodeFilenameFlags()
+            centdir = struct.pack(structCentralDir,
+                                  stringCentralDir, create_version,
+                                  zinfo.create_system, extract_version, zinfo.reserved,
+                                  flag_bits, zinfo.compress_type, dostime, dosdate,
+                                  zinfo.CRC, compress_size, file_size,
+                                  len(filename), len(extra_data), len(zinfo.comment),
+                                  0, zinfo.internal_attr, zinfo.external_attr,
+                                  header_offset)
             self.fp.write(centdir)
             self.fp.write(filename)
             self.fp.write(extra_data)
@@ -1942,6 +1918,8 @@
                              centDirSize, centDirOffset, len(self._comment))
         self.fp.write(endrec)
         self.fp.write(self._comment)
+        if self.mode == "a":
+            self.fp.truncate()
         self.fp.flush()
 
     def _fpclose(self, fp):
@@ -2317,20 +2295,31 @@
         self.root = FastLookup.make(root)
         self.at = at
 
-    @property
-    def open(self):
-        return functools.partial(self.root.open, self.at)
+    def open(self, mode='r', *args, **kwargs):
+        """
+        Open this entry as text or binary following the semantics
+        of ``pathlib.Path.open()`` by passing arguments through
+        to io.TextIOWrapper().
+        """
+        pwd = kwargs.pop('pwd', None)
+        zip_mode = mode[0]
+        stream = self.root.open(self.at, zip_mode, pwd=pwd)
+        if 'b' in mode:
+            if args or kwargs:
+                raise ValueError("encoding args invalid for binary operation")
+            return stream
+        return io.TextIOWrapper(stream, *args, **kwargs)
 
     @property
     def name(self):
         return posixpath.basename(self.at.rstrip("/"))
 
     def read_text(self, *args, **kwargs):
-        with self.open() as strm:
-            return io.TextIOWrapper(strm, *args, **kwargs).read()
+        with self.open('r', *args, **kwargs) as strm:
+            return strm.read()
 
     def read_bytes(self):
-        with self.open() as strm:
+        with self.open('rb') as strm:
             return strm.read()
 
     def _is_child(self, path):
diff --git a/darwin-x86/bin/aidl b/darwin-x86/bin/aidl
index 08c5d6f..5e1a731 100755
--- a/darwin-x86/bin/aidl
+++ b/darwin-x86/bin/aidl
Binary files differ
diff --git a/darwin-x86/bin/bison b/darwin-x86/bin/bison
index c1d6105..480d58b 100755
--- a/darwin-x86/bin/bison
+++ b/darwin-x86/bin/bison
Binary files differ
diff --git a/darwin-x86/bin/hidl-gen b/darwin-x86/bin/hidl-gen
index d9f76aa..bbf2293 100755
--- a/darwin-x86/bin/hidl-gen
+++ b/darwin-x86/bin/hidl-gen
Binary files differ
diff --git a/darwin-x86/bin/hidl-lint b/darwin-x86/bin/hidl-lint
index a30afb6..17125ac 100755
--- a/darwin-x86/bin/hidl-lint
+++ b/darwin-x86/bin/hidl-lint
Binary files differ
diff --git a/darwin-x86/bin/make b/darwin-x86/bin/make
index d43d9f4..e3aab80 100755
--- a/darwin-x86/bin/make
+++ b/darwin-x86/bin/make
Binary files differ
diff --git a/darwin-x86/bin/py2-cmd b/darwin-x86/bin/py2-cmd
index a6585da..944d673 100755
--- a/darwin-x86/bin/py2-cmd
+++ b/darwin-x86/bin/py2-cmd
Binary files differ
diff --git a/darwin-x86/bin/py3-cmd b/darwin-x86/bin/py3-cmd
index 696118d..c3ffc08 100755
--- a/darwin-x86/bin/py3-cmd
+++ b/darwin-x86/bin/py3-cmd
Binary files differ
diff --git a/darwin-x86/bin/py3-launcher-autorun64 b/darwin-x86/bin/py3-launcher-autorun64
index ece9167..3f9d80c 100755
--- a/darwin-x86/bin/py3-launcher-autorun64
+++ b/darwin-x86/bin/py3-launcher-autorun64
Binary files differ
diff --git a/darwin-x86/bin/py3-launcher64 b/darwin-x86/bin/py3-launcher64
index a07a8a1..b334a03 100755
--- a/darwin-x86/bin/py3-launcher64
+++ b/darwin-x86/bin/py3-launcher64
Binary files differ
diff --git a/darwin-x86/bin/soong_zip b/darwin-x86/bin/soong_zip
index 17af812..9b22bd1 100755
--- a/darwin-x86/bin/soong_zip
+++ b/darwin-x86/bin/soong_zip
Binary files differ
diff --git a/darwin-x86/bin/toybox b/darwin-x86/bin/toybox
index 994a223..34a4f8f 100755
--- a/darwin-x86/bin/toybox
+++ b/darwin-x86/bin/toybox
Binary files differ
diff --git a/darwin-x86/bin/zip2zip b/darwin-x86/bin/zip2zip
index 1bf0625..19af8f0 100755
--- a/darwin-x86/bin/zip2zip
+++ b/darwin-x86/bin/zip2zip
Binary files differ
diff --git a/darwin-x86/bin/zipalign b/darwin-x86/bin/zipalign
index ca10052..09a7934 100755
--- a/darwin-x86/bin/zipalign
+++ b/darwin-x86/bin/zipalign
Binary files differ
diff --git a/darwin-x86/lib64/libbase.dylib b/darwin-x86/lib64/libbase.dylib
index 5c5819f..92eb019 100755
--- a/darwin-x86/lib64/libbase.dylib
+++ b/darwin-x86/lib64/libbase.dylib
Binary files differ
diff --git a/darwin-x86/lib64/libcrypto-host.dylib b/darwin-x86/lib64/libcrypto-host.dylib
index ef33d0e..0325b09 100755
--- a/darwin-x86/lib64/libcrypto-host.dylib
+++ b/darwin-x86/lib64/libcrypto-host.dylib
Binary files differ
diff --git a/linux-x86/asan/bin/aidl b/linux-x86/asan/bin/aidl
index e5746a4..4ed7260 100755
--- a/linux-x86/asan/bin/aidl
+++ b/linux-x86/asan/bin/aidl
Binary files differ
diff --git a/linux-x86/asan/bin/toybox b/linux-x86/asan/bin/toybox
index f05854c..81445b6 100755
--- a/linux-x86/asan/bin/toybox
+++ b/linux-x86/asan/bin/toybox
Binary files differ
diff --git a/linux-x86/asan/bin/zipalign b/linux-x86/asan/bin/zipalign
index abaf525..e98e664 100755
--- a/linux-x86/asan/bin/zipalign
+++ b/linux-x86/asan/bin/zipalign
Binary files differ
diff --git a/linux-x86/asan/lib64/libbase.so b/linux-x86/asan/lib64/libbase.so
index 20f85a6..7b09f75 100755
--- a/linux-x86/asan/lib64/libbase.so
+++ b/linux-x86/asan/lib64/libbase.so
Binary files differ
diff --git a/linux-x86/asan/lib64/libcrypto-host.so b/linux-x86/asan/lib64/libcrypto-host.so
index db29cd7..732b751 100755
--- a/linux-x86/asan/lib64/libcrypto-host.so
+++ b/linux-x86/asan/lib64/libcrypto-host.so
Binary files differ
diff --git a/linux-x86/bin/aidl b/linux-x86/bin/aidl
index 491a5d9..77eaaf7 100755
--- a/linux-x86/bin/aidl
+++ b/linux-x86/bin/aidl
Binary files differ
diff --git a/linux-x86/bin/hidl-gen b/linux-x86/bin/hidl-gen
index 47c5c82..a6df209 100755
--- a/linux-x86/bin/hidl-gen
+++ b/linux-x86/bin/hidl-gen
Binary files differ
diff --git a/linux-x86/bin/hidl-lint b/linux-x86/bin/hidl-lint
index e5a7869..91f7fa1 100755
--- a/linux-x86/bin/hidl-lint
+++ b/linux-x86/bin/hidl-lint
Binary files differ
diff --git a/linux-x86/bin/py2-cmd b/linux-x86/bin/py2-cmd
index 4f5ee5e..8f7573c 100755
--- a/linux-x86/bin/py2-cmd
+++ b/linux-x86/bin/py2-cmd
Binary files differ
diff --git a/linux-x86/bin/py3-cmd b/linux-x86/bin/py3-cmd
index 9a7de89..97a6c6e 100755
--- a/linux-x86/bin/py3-cmd
+++ b/linux-x86/bin/py3-cmd
Binary files differ
diff --git a/linux-x86/bin/py3-launcher-autorun64 b/linux-x86/bin/py3-launcher-autorun64
index af815e0..e35e245 100755
--- a/linux-x86/bin/py3-launcher-autorun64
+++ b/linux-x86/bin/py3-launcher-autorun64
Binary files differ
diff --git a/linux-x86/bin/py3-launcher64 b/linux-x86/bin/py3-launcher64
index 896d247..d7c188a 100755
--- a/linux-x86/bin/py3-launcher64
+++ b/linux-x86/bin/py3-launcher64
Binary files differ
diff --git a/linux-x86/bin/soong_zip b/linux-x86/bin/soong_zip
index 213e699..9a0fda5 100755
--- a/linux-x86/bin/soong_zip
+++ b/linux-x86/bin/soong_zip
Binary files differ
diff --git a/linux-x86/bin/toybox b/linux-x86/bin/toybox
index 8f5b42a..ee5f865 100755
--- a/linux-x86/bin/toybox
+++ b/linux-x86/bin/toybox
Binary files differ
diff --git a/linux-x86/bin/zip2zip b/linux-x86/bin/zip2zip
index 6964ac4..b978197 100755
--- a/linux-x86/bin/zip2zip
+++ b/linux-x86/bin/zip2zip
Binary files differ
diff --git a/linux-x86/bin/zipalign b/linux-x86/bin/zipalign
index 5bbdd83..a03ca6e 100755
--- a/linux-x86/bin/zipalign
+++ b/linux-x86/bin/zipalign
Binary files differ
diff --git a/linux-x86/lib64/libbase.so b/linux-x86/lib64/libbase.so
index c8064e4..834dc9b 100755
--- a/linux-x86/lib64/libbase.so
+++ b/linux-x86/lib64/libbase.so
Binary files differ
diff --git a/linux-x86/lib64/libcrypto-host.so b/linux-x86/lib64/libcrypto-host.so
index c6fdd80..6681454 100755
--- a/linux-x86/lib64/libcrypto-host.so
+++ b/linux-x86/lib64/libcrypto-host.so
Binary files differ
diff --git a/manifest.xml b/manifest.xml
index f2864aa..5fd20b0 100644
--- a/manifest.xml
+++ b/manifest.xml
@@ -9,37 +9,37 @@
 
   <project name="platform/external/ninja" path="external/ninja" revision="d264d658aceea4de8a709dae40b47678c8bae284" />
 
-  <project clone-depth="1" groups="pdk" name="platform/prebuilts/remoteexecution-client" path="prebuilts/remoteexecution-client" revision="f913105734387dc1d4ae641422480a545d6614f0" />
+  <project clone-depth="1" groups="pdk" name="platform/prebuilts/remoteexecution-client" path="prebuilts/remoteexecution-client" revision="07b33e4b5ee481a76560b6be3ed64d817e56d20b" />
 
-  <project name="platform/system/unwinding" path="system/unwinding" revision="b074f64b2248fc26904b103e6b5b91bb9e48cbaf" />
+  <project name="platform/system/unwinding" path="system/unwinding" revision="780b9d9f06e8d151ae47f00976e96385e0edd22e" />
 
-  <project name="platform/external/icu" path="external/icu" revision="f1b98dd04b6038cde4db0da0f5a0d7ce49d3bc1c" />
+  <project name="platform/external/icu" path="external/icu" revision="9989ee7d3565482da358aea86f5ea46ad595f4ce" />
 
   <project clone-depth="1" name="platform/prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9" path="prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9" revision="7144bd5fe5db7f470563c34e4f0ac6ab5703f926" />
 
   <project name="platform/external/go-cmp" path="external/go-cmp" revision="0f7c828c80e325c9fc2af078ffb108362bb84c15" />
 
-  <project name="platform/system/libbase" path="system/libbase" revision="93d2d5923af2b276f21453fef0b09d22be43d2ef" />
+  <project name="platform/system/libbase" path="system/libbase" revision="6e51de4672e3ac51123d755d353a9dbee2a57af4" />
 
   <project name="platform/external/javapoet" path="external/javapoet" revision="137745472179f2170a58ec914cdad78921fc7679" />
 
   <project name="platform/external/capstone" path="external/capstone" revision="63bfcadf03cd1e0d6a98fe7a4cee189063668162" />
 
-  <project clone-depth="1" name="platform/prebuilts/build-tools" path="prebuilts/build-tools" revision="ed2089634991203a980dd4ea8ce1c9fdaa6a8309" />
+  <project clone-depth="1" name="platform/prebuilts/build-tools" path="prebuilts/build-tools" revision="2861af10fad0eb5c9545e0d40330c8a5a4bc0680" />
 
   <project name="platform/external/pcre" path="external/pcre" revision="562bd8cec0877100592737615b8b9b68604bdfa2" />
 
   <project name="platform/external/libcxxabi" path="external/libcxxabi" revision="fd8f32ef796b635d666a89c0da8688e05ae27e4a" />
 
-  <project name="toolchain/make" revision="7424963d66959fdf2868ba66eff5360b0eadf2ea" />
+  <project name="toolchain/make" revision="3a78e1257ef0f0f57b12e869fc55bf02b39f9a08" />
 
-  <project name="platform/system/core" path="system/core" revision="c4707c4b6ab96e262629b9d69c50ea033e3b29a9" />
+  <project name="platform/system/core" path="system/core" revision="931c07707d577f5aa44b6e3d91352aa296c6873c" />
 
-  <project clone-depth="1" groups="pdk,tools" name="platform/prebuilts/tools" path="prebuilts/tools" revision="db4fc98d2653a544dc1d49c5a830380efea8e6cb" />
+  <project clone-depth="1" groups="pdk,tools" name="platform/prebuilts/tools" path="prebuilts/tools" revision="3fd56285d7a417b8d265612687666d295d065525" />
 
-  <project name="platform/external/guava" path="external/guava" revision="c2a035398f534bf61fe4df74eba3737d9e422853" />
+  <project name="platform/external/guava" path="external/guava" revision="bee38a1aee119cad4801d0ca2f8eaec9560e7b9a" />
 
-  <project clone-depth="1" name="platform/prebuilts/clang-tools" path="prebuilts/clang-tools" revision="92328d888cb7805be9797847076de3294de88b8c" />
+  <project clone-depth="1" name="platform/prebuilts/clang-tools" path="prebuilts/clang-tools" revision="ef456f0d6bedbaa2fd76ee83231b76a9ae68d197" />
 
   <project name="platform/external/golang-protobuf" path="external/golang-protobuf" revision="8deccff4ef7a79922cd4236d7cd81dfecd7fb418" />
 
@@ -51,7 +51,7 @@
 
   <project name="platform/external/bzip2" path="external/bzip2" revision="7821e22a03be5f9b076e4eb54125dc587efa3e3a" />
 
-  <project name="platform/build/soong" path="build/soong" revision="f785278d994515efd5e7637fc97c3374d095551f">
+  <project name="platform/build/soong" path="build/soong" revision="c81560e6fa0db3e97d538eb3ce77cbfed377075d">
     <linkfile dest="Android.bp" src="root.bp" />
 
     <linkfile dest="bootstrap.bash" src="bootstrap.bash" />
@@ -67,41 +67,41 @@
 
   <project clone-depth="1" groups="linux" name="platform/prebuilts/go/linux-x86" path="prebuilts/go/linux-x86" revision="e017cedfb79dc1ded3fa4de9315094023602e1be" />
 
-  <project clone-depth="1" groups="darwin" name="platform/prebuilts/clang/host/darwin-x86" path="prebuilts/clang/host/darwin-x86" revision="7c1ba3261c064e055fa3f2e715ad0003debea063" />
+  <project clone-depth="1" groups="darwin" name="platform/prebuilts/clang/host/darwin-x86" path="prebuilts/clang/host/darwin-x86" revision="da279e86f8b0736ab58d0cab58cef9d65404fc1c" />
 
-  <project name="platform/external/boringssl" path="external/boringssl" revision="18499ff78673e07cc8963ba22a9f4d2f6019659e" />
+  <project name="platform/external/boringssl" path="external/boringssl" revision="064f894ab4dd1b045cd68f30b2806dc2bab66bef" />
 
   <project clone-depth="1" groups="linux" name="platform/prebuilts/ninja/linux-x86" path="prebuilts/ninja/linux-x86" revision="6369b19fc3fbe765636af75d394627e2b92599ed" />
 
   <project name="platform/external/openssl" path="external/openssl" revision="a7efb4208fa440335e4c13c57ac30fcc5611a961" />
 
-  <project name="platform/external/toybox" path="external/toybox" revision="abd16962d611ae745de31fd028043d56f1c11bba" />
+  <project name="platform/external/toybox" path="external/toybox" revision="a427a44ce95f076ec2d98d73c9007de3c56f178f" />
 
   <project name="platform/external/golang-x-sync" path="external/golang-x-sync" revision="e43247afef776e3507532a21ade3cce758b96226" />
 
   <project clone-depth="1" groups="darwin" name="platform/prebuilts/ninja/darwin-x86" path="prebuilts/ninja/darwin-x86" revision="00f798346dedb4a7a6a6dcc9ad32ff09d66ee0db" />
 
-  <project clone-depth="1" name="platform/external/error_prone" path="external/error_prone" revision="7da31e78562db9f52869df5a006d7a1f25397750" />
+  <project clone-depth="1" name="platform/external/error_prone" path="external/error_prone" revision="2c386e73c03b24e0a1ae27b314c7269299f5227b" />
 
   <project name="platform/external/jsr305" path="external/jsr305" revision="1a3f97731c0ce0cabb2afaf7fa4690bf4391cd6a" />
 
   <project name="platform/external/nsjail" path="external/nsjail" revision="a25cd900710305660a653c6b63d76286d8d6cdfe" />
 
-  <project clone-depth="1" groups="linux" name="platform/prebuilts/clang/host/linux-x86" path="prebuilts/clang/host/linux-x86" revision="d17e24155faad1cc3cd41f01fc1d97c19675d383" />
+  <project clone-depth="1" groups="linux" name="platform/prebuilts/clang/host/linux-x86" path="prebuilts/clang/host/linux-x86" revision="fd9d3247e852f9d6601fdb0e1877bf5e9cbe52cd" />
 
   <project name="platform/external/bloaty" path="external/bloaty" revision="ea2e752ffec4b095800be4ff9d8bed5f56f57ca8" />
 
   <project clone-depth="1" groups="darwin" name="platform/prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1" path="prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1" revision="34b9e5578f99a11691696f955cc182b6e97d9a3a" />
 
-  <project name="platform/external/python/cpython3" path="external/python/cpython3" revision="74c15f241aa4b20976731e8f5c2b6488b59e04c2" />
+  <project name="platform/external/python/cpython3" path="external/python/cpython3" revision="d7f9938c94658cf3a2344c4252d56cd158c79a3c" />
 
-  <project name="platform/build/blueprint" path="build/blueprint" revision="c759904dd3bd2425b51dd6707f438c6ebae74997" />
+  <project name="platform/build/blueprint" path="build/blueprint" revision="b8d520adda284f79e1805517207b9d14f7fe4b23" />
 
   <project name="platform/external/safe-iop" path="external/safe-iop" revision="b805514f31a231a0e78a18f296c0454fcadead1a" />
 
   <project name="platform/external/llvm" path="external/llvm" revision="fea7a439296839204ac87aeb96c6072ef58df73b" />
 
-  <project name="platform/external/bison" path="external/bison" revision="d5e4afa073308e91d0ccfc0e4ae14c02a78d12b6" />
+  <project name="platform/external/bison" path="external/bison" revision="1519c8770cf104cf870388e25e767813f3301cf4" />
 
   <project clone-depth="1" name="platform/prebuilts/misc" path="prebuilts/misc" revision="36c3c89f987bec3c3c77fb2aeee7422bc3f9d3c3" />
 
@@ -121,7 +121,7 @@
 
   <project name="platform/external/sqlite" path="external/sqlite" revision="4a579c323a0521ab897d34934da043955d0c6b7a" />
 
-  <project name="platform/bionic" path="bionic" revision="934b1e38b89bd0de0904900738ecd4fbc95130c9" />
+  <project name="platform/bionic" path="bionic" revision="8f80d379639fc3134b5eca83dae1ad05bb036c36" />
 
   <project name="platform/external/golang-x-tools" path="external/golang-x-tools" revision="bcc6484babb4e999e4bdb6a982bfa7e92fc640d2" />
 
@@ -137,15 +137,15 @@
 
   <project name="platform/external/expat" path="external/expat" revision="e3689675b0157542ae4f70690d71bd3996ccb34c" />
 
-  <project name="platform/system/libhwbinder" path="system/libhwbinder" revision="e6d47a5d9ea903b0f048859fc957d132ead1a820" />
+  <project name="platform/system/libhwbinder" path="system/libhwbinder" revision="30e1061ca859942e9be5d0373d36c782e7491904" />
 
   <project name="platform/external/turbine" path="external/turbine" revision="bf096fd0b66c4b2863c71bbb11b6de4c5e88806d" />
 
-  <project name="platform/external/compiler-rt" path="external/compiler-rt" revision="2a72045d2057d5951f82054a83776e27d6fe59e4" />
+  <project name="platform/external/compiler-rt" path="external/compiler-rt" revision="bf08053df3ce76939a4282a282e13a6d1fe134bf" />
 
   <project name="platform/external/libunwind_llvm" path="external/libunwind_llvm" revision="54ed55f04191810e5e92cd9ef675b71483c398c3" />
 
-  <project name="platform/build" path="build/make" revision="34b288b3faff837b1ea251cec67fdb4de4c3bf1b">
+  <project name="platform/build" path="build/make" revision="e67bb6a48d693aa7c95c1c833d3bbb360dd10476">
     <linkfile dest="build/tools" src="tools" />
 </project>
 
@@ -163,25 +163,25 @@
 
   <project name="platform/external/googletest" path="external/googletest" revision="e1f35040857fe3112cac00c299fe01842a422b19" />
 
-  <project name="platform/external/zlib" path="external/zlib" revision="18c8f3206a257d4dca406fcac2bee7044e3ca1b7" />
+  <project name="platform/external/zlib" path="external/zlib" revision="07c8631a88d7c0b1713398efb3984cd7b9923005" />
 
   <project name="platform/external/zopfli" path="external/zopfli" revision="51ea3d41bea54b0735b0e5c72421de6aa9cb8b28" />
 
-  <project clone-depth="1" groups="linux" name="platform/prebuilts/clang/host/windows-x86" path="prebuilts/clang/host/windows-x86" revision="7a0fc9913f169c592f8838bbe04b83397a44b6dd" />
+  <project clone-depth="1" groups="linux" name="platform/prebuilts/clang/host/windows-x86" path="prebuilts/clang/host/windows-x86" revision="3c549b0d72457aaca650f9b6081efabd98293231" />
 
   <project name="platform/external/kythe" path="external/kythe" revision="de7e2fbaab0f29c2a69db22df15d48b72aad8180" />
 
-  <project name="platform/external/dagger2" path="external/dagger2" revision="d46e41e3dc9bd0cce1c7024130281537dd90ae8c" />
+  <project name="platform/external/dagger2" path="external/dagger2" revision="7093e5117e929aaca22f0f9f417f730717fb3fe2" />
 
   <project name="platform/build/kati" path="build/kati" revision="2cded7e906446249ea19a093fe1b85c685db8214" />
 
-  <project name="platform/system/logging" path="system/logging" revision="06ca6cbadb41bb3ba1a96c61cf99a58ed1db1570" />
+  <project name="platform/system/logging" path="system/logging" revision="c9fa49879949bb6208fc3874f4da1e3df4f84df2" />
 
-  <project name="platform/external/protobuf" path="external/protobuf" revision="a179011cfa1fa077f5a29fc9b8f4f6115cbbd9b6" />
+  <project name="platform/external/protobuf" path="external/protobuf" revision="0a41c8929852545941522cf2999eda58d1aba793" />
 
   <project name="platform/external/jemalloc" path="external/jemalloc" revision="0c83f48fb28ecc5d4946396865dba43d583e792d" />
 
-  <project name="platform/development" path="development" revision="cb5da53eb75bf49f4b8b8b0fab8340aa07c5e8fa" />
+  <project name="platform/development" path="development" revision="0f9420e94d22ae303a3054c2b872bec7a59b535f" />
 
   <project name="platform/external/libffi" path="external/libffi" revision="12d65dc2117aabe13e69052bd9bdc4401cd8edca" />
 
@@ -191,9 +191,9 @@
 
   <project name="platform/external/fmtlib" path="external/fmtlib" revision="4c96ef1743bb42362739d984e71bfe5505c777ee" />
 
-  <project name="platform/system/tools/hidl" path="system/tools/hidl" revision="2a090e06d7b19bc0844ddeb1cc43e1d6add52800" />
+  <project name="platform/system/tools/hidl" path="system/tools/hidl" revision="d754338cf0dc61b357bf6b1d8fef6287adb6968d" />
 
-  <project name="platform/system/tools/aidl" path="system/tools/aidl" revision="34c4cb8aea6ab67525c982a912d74813857f7b1d" />
+  <project name="platform/system/tools/aidl" path="system/tools/aidl" revision="337713f2598060e6af7dfddd2c55d6e41138b8e4" />
 
   <project name="platform/external/bazelbuild-remote-apis" path="external/bazelbuild-remote-apis" revision="e0e5a5351a4571f63506eec04ee49a3dd1e60327" />