Update build-tools to ab/7064006
https://ci.android.com/builds/branches/aosp-build-tools-release/grid?head=7064006&tail=7064006
Test: treehugger
Change-Id: I4fa4337a1f9c5a347d34725fa4d3cb7256043c98
diff --git a/common/py3-stdlib/importlib/__init__.py b/common/py3-stdlib/importlib/__init__.py
new file mode 100644
index 0000000..0c73c50
--- /dev/null
+++ b/common/py3-stdlib/importlib/__init__.py
@@ -0,0 +1,176 @@
+"""A pure Python implementation of import."""
+__all__ = ['__import__', 'import_module', 'invalidate_caches', 'reload']
+
+# Bootstrap help #####################################################
+
+# Until bootstrapping is complete, DO NOT import any modules that attempt
+# to import importlib._bootstrap (directly or indirectly). Since this
+# partially initialised package would be present in sys.modules, those
+# modules would get an uninitialised copy of the source version, instead
+# of a fully initialised version (either the frozen one or the one
+# initialised below if the frozen one is not available).
+import _imp # Just the builtin component, NOT the full Python module
+import sys
+
+try:
+ import _frozen_importlib as _bootstrap
+except ImportError:
+ from . import _bootstrap
+ _bootstrap._setup(sys, _imp)
+else:
+ # importlib._bootstrap is the built-in import, ensure we don't create
+ # a second copy of the module.
+ _bootstrap.__name__ = 'importlib._bootstrap'
+ _bootstrap.__package__ = 'importlib'
+ try:
+ _bootstrap.__file__ = __file__.replace('__init__.py', '_bootstrap.py')
+ except NameError:
+ # __file__ is not guaranteed to be defined, e.g. if this code gets
+ # frozen by a tool like cx_Freeze.
+ pass
+ sys.modules['importlib._bootstrap'] = _bootstrap
+
+try:
+ import _frozen_importlib_external as _bootstrap_external
+except ImportError:
+ from . import _bootstrap_external
+ _bootstrap_external._setup(_bootstrap)
+ _bootstrap._bootstrap_external = _bootstrap_external
+else:
+ _bootstrap_external.__name__ = 'importlib._bootstrap_external'
+ _bootstrap_external.__package__ = 'importlib'
+ try:
+ _bootstrap_external.__file__ = __file__.replace('__init__.py', '_bootstrap_external.py')
+ except NameError:
+ # __file__ is not guaranteed to be defined, e.g. if this code gets
+ # frozen by a tool like cx_Freeze.
+ pass
+ sys.modules['importlib._bootstrap_external'] = _bootstrap_external
+
+# To simplify imports in test code
+_pack_uint32 = _bootstrap_external._pack_uint32
+_unpack_uint32 = _bootstrap_external._unpack_uint32
+
+# Fully bootstrapped at this point, import whatever you like, circular
+# dependencies and startup overhead minimisation permitting :)
+
+import types
+import warnings
+
+
+# Public API #########################################################
+
+from ._bootstrap import __import__
+
+
+def invalidate_caches():
+ """Call the invalidate_caches() method on all meta path finders stored in
+ sys.meta_path (where implemented)."""
+ for finder in sys.meta_path:
+ if hasattr(finder, 'invalidate_caches'):
+ finder.invalidate_caches()
+
+
+def find_loader(name, path=None):
+ """Return the loader for the specified module.
+
+ This is a backward-compatible wrapper around find_spec().
+
+ This function is deprecated in favor of importlib.util.find_spec().
+
+ """
+ warnings.warn('Deprecated since Python 3.4. '
+ 'Use importlib.util.find_spec() instead.',
+ DeprecationWarning, stacklevel=2)
+ try:
+ loader = sys.modules[name].__loader__
+ if loader is None:
+ raise ValueError('{}.__loader__ is None'.format(name))
+ else:
+ return loader
+ except KeyError:
+ pass
+ except AttributeError:
+ raise ValueError('{}.__loader__ is not set'.format(name)) from None
+
+ spec = _bootstrap._find_spec(name, path)
+ # We won't worry about malformed specs (missing attributes).
+ if spec is None:
+ return None
+ if spec.loader is None:
+ if spec.submodule_search_locations is None:
+ raise ImportError('spec for {} missing loader'.format(name),
+ name=name)
+ raise ImportError('namespace packages do not have loaders',
+ name=name)
+ return spec.loader
+
+
+def import_module(name, package=None):
+ """Import a module.
+
+ The 'package' argument is required when performing a relative import. It
+ specifies the package to use as the anchor point from which to resolve the
+ relative import to an absolute import.
+
+ """
+ level = 0
+ if name.startswith('.'):
+ if not package:
+ msg = ("the 'package' argument is required to perform a relative "
+ "import for {!r}")
+ raise TypeError(msg.format(name))
+ for character in name:
+ if character != '.':
+ break
+ level += 1
+ return _bootstrap._gcd_import(name[level:], package, level)
+
+
+_RELOADING = {}
+
+
+def reload(module):
+ """Reload the module and return it.
+
+ The module must have been successfully imported before.
+
+ """
+ if not module or not isinstance(module, types.ModuleType):
+ raise TypeError("reload() argument must be a module")
+ try:
+ name = module.__spec__.name
+ except AttributeError:
+ name = module.__name__
+
+ if sys.modules.get(name) is not module:
+ msg = "module {} not in sys.modules"
+ raise ImportError(msg.format(name), name=name)
+ if name in _RELOADING:
+ return _RELOADING[name]
+ _RELOADING[name] = module
+ try:
+ parent_name = name.rpartition('.')[0]
+ if parent_name:
+ try:
+ parent = sys.modules[parent_name]
+ except KeyError:
+ msg = "parent {!r} not in sys.modules"
+ raise ImportError(msg.format(parent_name),
+ name=parent_name) from None
+ else:
+ pkgpath = parent.__path__
+ else:
+ pkgpath = None
+ target = module
+ spec = module.__spec__ = _bootstrap._find_spec(name, pkgpath, target)
+ if spec is None:
+ raise ModuleNotFoundError(f"spec not found for the module {name!r}", name=name)
+ _bootstrap._exec(spec, module)
+ # The module may have replaced itself in sys.modules!
+ return sys.modules[name]
+ finally:
+ try:
+ del _RELOADING[name]
+ except KeyError:
+ pass
diff --git a/common/py3-stdlib/importlib/_bootstrap.py b/common/py3-stdlib/importlib/_bootstrap.py
new file mode 100644
index 0000000..32deef1
--- /dev/null
+++ b/common/py3-stdlib/importlib/_bootstrap.py
@@ -0,0 +1,1173 @@
+"""Core implementation of import.
+
+This module is NOT meant to be directly imported! It has been designed such
+that it can be bootstrapped into Python as the implementation of import. As
+such it requires the injection of specific modules and attributes in order to
+work. One should use importlib as the public-facing version of this module.
+
+"""
+#
+# IMPORTANT: Whenever making changes to this module, be sure to run a top-level
+# `make regen-importlib` followed by `make` in order to get the frozen version
+# of the module updated. Not doing so will result in the Makefile to fail for
+# all others who don't have a ./python around to freeze the module
+# in the early stages of compilation.
+#
+
+# See importlib._setup() for what is injected into the global namespace.
+
+# When editing this code be aware that code executed at import time CANNOT
+# reference any injected objects! This includes not only global code but also
+# anything specified at the class level.
+
+# Bootstrap-related code ######################################################
+
+_bootstrap_external = None
+
+def _wrap(new, old):
+ """Simple substitute for functools.update_wrapper."""
+ for replace in ['__module__', '__name__', '__qualname__', '__doc__']:
+ if hasattr(old, replace):
+ setattr(new, replace, getattr(old, replace))
+ new.__dict__.update(old.__dict__)
+
+
+def _new_module(name):
+ return type(sys)(name)
+
+
+# Module-level locking ########################################################
+
+# A dict mapping module names to weakrefs of _ModuleLock instances
+# Dictionary protected by the global import lock
+_module_locks = {}
+# A dict mapping thread ids to _ModuleLock instances
+_blocking_on = {}
+
+
+class _DeadlockError(RuntimeError):
+ pass
+
+
+class _ModuleLock:
+ """A recursive lock implementation which is able to detect deadlocks
+ (e.g. thread 1 trying to take locks A then B, and thread 2 trying to
+ take locks B then A).
+ """
+
+ def __init__(self, name):
+ self.lock = _thread.allocate_lock()
+ self.wakeup = _thread.allocate_lock()
+ self.name = name
+ self.owner = None
+ self.count = 0
+ self.waiters = 0
+
+ def has_deadlock(self):
+ # Deadlock avoidance for concurrent circular imports.
+ me = _thread.get_ident()
+ tid = self.owner
+ while True:
+ lock = _blocking_on.get(tid)
+ if lock is None:
+ return False
+ tid = lock.owner
+ if tid == me:
+ return True
+
+ def acquire(self):
+ """
+ Acquire the module lock. If a potential deadlock is detected,
+ a _DeadlockError is raised.
+ Otherwise, the lock is always acquired and True is returned.
+ """
+ tid = _thread.get_ident()
+ _blocking_on[tid] = self
+ try:
+ while True:
+ with self.lock:
+ if self.count == 0 or self.owner == tid:
+ self.owner = tid
+ self.count += 1
+ return True
+ if self.has_deadlock():
+ raise _DeadlockError('deadlock detected by %r' % self)
+ if self.wakeup.acquire(False):
+ self.waiters += 1
+ # Wait for a release() call
+ self.wakeup.acquire()
+ self.wakeup.release()
+ finally:
+ del _blocking_on[tid]
+
+ def release(self):
+ tid = _thread.get_ident()
+ with self.lock:
+ if self.owner != tid:
+ raise RuntimeError('cannot release un-acquired lock')
+ assert self.count > 0
+ self.count -= 1
+ if self.count == 0:
+ self.owner = None
+ if self.waiters:
+ self.waiters -= 1
+ self.wakeup.release()
+
+ def __repr__(self):
+ return '_ModuleLock({!r}) at {}'.format(self.name, id(self))
+
+
+class _DummyModuleLock:
+ """A simple _ModuleLock equivalent for Python builds without
+ multi-threading support."""
+
+ def __init__(self, name):
+ self.name = name
+ self.count = 0
+
+ def acquire(self):
+ self.count += 1
+ return True
+
+ def release(self):
+ if self.count == 0:
+ raise RuntimeError('cannot release un-acquired lock')
+ self.count -= 1
+
+ def __repr__(self):
+ return '_DummyModuleLock({!r}) at {}'.format(self.name, id(self))
+
+
+class _ModuleLockManager:
+
+ def __init__(self, name):
+ self._name = name
+ self._lock = None
+
+ def __enter__(self):
+ self._lock = _get_module_lock(self._name)
+ self._lock.acquire()
+
+ def __exit__(self, *args, **kwargs):
+ self._lock.release()
+
+
+# The following two functions are for consumption by Python/import.c.
+
+def _get_module_lock(name):
+ """Get or create the module lock for a given module name.
+
+ Acquire/release internally the global import lock to protect
+ _module_locks."""
+
+ _imp.acquire_lock()
+ try:
+ try:
+ lock = _module_locks[name]()
+ except KeyError:
+ lock = None
+
+ if lock is None:
+ if _thread is None:
+ lock = _DummyModuleLock(name)
+ else:
+ lock = _ModuleLock(name)
+
+ def cb(ref, name=name):
+ _imp.acquire_lock()
+ try:
+ # bpo-31070: Check if another thread created a new lock
+ # after the previous lock was destroyed
+ # but before the weakref callback was called.
+ if _module_locks.get(name) is ref:
+ del _module_locks[name]
+ finally:
+ _imp.release_lock()
+
+ _module_locks[name] = _weakref.ref(lock, cb)
+ finally:
+ _imp.release_lock()
+
+ return lock
+
+
+def _lock_unlock_module(name):
+ """Acquires then releases the module lock for a given module name.
+
+ This is used to ensure a module is completely initialized, in the
+ event it is being imported by another thread.
+ """
+ lock = _get_module_lock(name)
+ try:
+ lock.acquire()
+ except _DeadlockError:
+ # Concurrent circular import, we'll accept a partially initialized
+ # module object.
+ pass
+ else:
+ lock.release()
+
+# Frame stripping magic ###############################################
+def _call_with_frames_removed(f, *args, **kwds):
+ """remove_importlib_frames in import.c will always remove sequences
+ of importlib frames that end with a call to this function
+
+ Use it instead of a normal call in places where including the importlib
+ frames introduces unwanted noise into the traceback (e.g. when executing
+ module code)
+ """
+ return f(*args, **kwds)
+
+
+def _verbose_message(message, *args, verbosity=1):
+ """Print the message to stderr if -v/PYTHONVERBOSE is turned on."""
+ if sys.flags.verbose >= verbosity:
+ if not message.startswith(('#', 'import ')):
+ message = '# ' + message
+ print(message.format(*args), file=sys.stderr)
+
+
+def _requires_builtin(fxn):
+ """Decorator to verify the named module is built-in."""
+ def _requires_builtin_wrapper(self, fullname):
+ if fullname not in sys.builtin_module_names:
+ raise ImportError('{!r} is not a built-in module'.format(fullname),
+ name=fullname)
+ return fxn(self, fullname)
+ _wrap(_requires_builtin_wrapper, fxn)
+ return _requires_builtin_wrapper
+
+
+def _requires_frozen(fxn):
+ """Decorator to verify the named module is frozen."""
+ def _requires_frozen_wrapper(self, fullname):
+ if not _imp.is_frozen(fullname):
+ raise ImportError('{!r} is not a frozen module'.format(fullname),
+ name=fullname)
+ return fxn(self, fullname)
+ _wrap(_requires_frozen_wrapper, fxn)
+ return _requires_frozen_wrapper
+
+
+# Typically used by loader classes as a method replacement.
+def _load_module_shim(self, fullname):
+ """Load the specified module into sys.modules and return it.
+
+ This method is deprecated. Use loader.exec_module instead.
+
+ """
+ spec = spec_from_loader(fullname, self)
+ if fullname in sys.modules:
+ module = sys.modules[fullname]
+ _exec(spec, module)
+ return sys.modules[fullname]
+ else:
+ return _load(spec)
+
+# Module specifications #######################################################
+
+def _module_repr(module):
+ # The implementation of ModuleType.__repr__().
+ loader = getattr(module, '__loader__', None)
+ if hasattr(loader, 'module_repr'):
+ # As soon as BuiltinImporter, FrozenImporter, and NamespaceLoader
+ # drop their implementations for module_repr. we can add a
+ # deprecation warning here.
+ try:
+ return loader.module_repr(module)
+ except Exception:
+ pass
+ try:
+ spec = module.__spec__
+ except AttributeError:
+ pass
+ else:
+ if spec is not None:
+ return _module_repr_from_spec(spec)
+
+ # We could use module.__class__.__name__ instead of 'module' in the
+ # various repr permutations.
+ try:
+ name = module.__name__
+ except AttributeError:
+ name = '?'
+ try:
+ filename = module.__file__
+ except AttributeError:
+ if loader is None:
+ return '<module {!r}>'.format(name)
+ else:
+ return '<module {!r} ({!r})>'.format(name, loader)
+ else:
+ return '<module {!r} from {!r}>'.format(name, filename)
+
+
+class ModuleSpec:
+ """The specification for a module, used for loading.
+
+ A module's spec is the source for information about the module. For
+ data associated with the module, including source, use the spec's
+ loader.
+
+ `name` is the absolute name of the module. `loader` is the loader
+ to use when loading the module. `parent` is the name of the
+ package the module is in. The parent is derived from the name.
+
+ `is_package` determines if the module is considered a package or
+ not. On modules this is reflected by the `__path__` attribute.
+
+ `origin` is the specific location used by the loader from which to
+ load the module, if that information is available. When filename is
+ set, origin will match.
+
+ `has_location` indicates that a spec's "origin" reflects a location.
+ When this is True, `__file__` attribute of the module is set.
+
+ `cached` is the location of the cached bytecode file, if any. It
+ corresponds to the `__cached__` attribute.
+
+ `submodule_search_locations` is the sequence of path entries to
+ search when importing submodules. If set, is_package should be
+ True--and False otherwise.
+
+ Packages are simply modules that (may) have submodules. If a spec
+ has a non-None value in `submodule_search_locations`, the import
+ system will consider modules loaded from the spec as packages.
+
+ Only finders (see importlib.abc.MetaPathFinder and
+ importlib.abc.PathEntryFinder) should modify ModuleSpec instances.
+
+ """
+
+ def __init__(self, name, loader, *, origin=None, loader_state=None,
+ is_package=None):
+ self.name = name
+ self.loader = loader
+ self.origin = origin
+ self.loader_state = loader_state
+ self.submodule_search_locations = [] if is_package else None
+
+ # file-location attributes
+ self._set_fileattr = False
+ self._cached = None
+
+ def __repr__(self):
+ args = ['name={!r}'.format(self.name),
+ 'loader={!r}'.format(self.loader)]
+ if self.origin is not None:
+ args.append('origin={!r}'.format(self.origin))
+ if self.submodule_search_locations is not None:
+ args.append('submodule_search_locations={}'
+ .format(self.submodule_search_locations))
+ return '{}({})'.format(self.__class__.__name__, ', '.join(args))
+
+ def __eq__(self, other):
+ smsl = self.submodule_search_locations
+ try:
+ return (self.name == other.name and
+ self.loader == other.loader and
+ self.origin == other.origin and
+ smsl == other.submodule_search_locations and
+ self.cached == other.cached and
+ self.has_location == other.has_location)
+ except AttributeError:
+ return False
+
+ @property
+ def cached(self):
+ if self._cached is None:
+ if self.origin is not None and self._set_fileattr:
+ if _bootstrap_external is None:
+ raise NotImplementedError
+ self._cached = _bootstrap_external._get_cached(self.origin)
+ return self._cached
+
+ @cached.setter
+ def cached(self, cached):
+ self._cached = cached
+
+ @property
+ def parent(self):
+ """The name of the module's parent."""
+ if self.submodule_search_locations is None:
+ return self.name.rpartition('.')[0]
+ else:
+ return self.name
+
+ @property
+ def has_location(self):
+ return self._set_fileattr
+
+ @has_location.setter
+ def has_location(self, value):
+ self._set_fileattr = bool(value)
+
+
+def spec_from_loader(name, loader, *, origin=None, is_package=None):
+ """Return a module spec based on various loader methods."""
+ if hasattr(loader, 'get_filename'):
+ if _bootstrap_external is None:
+ raise NotImplementedError
+ spec_from_file_location = _bootstrap_external.spec_from_file_location
+
+ if is_package is None:
+ return spec_from_file_location(name, loader=loader)
+ search = [] if is_package else None
+ return spec_from_file_location(name, loader=loader,
+ submodule_search_locations=search)
+
+ if is_package is None:
+ if hasattr(loader, 'is_package'):
+ try:
+ is_package = loader.is_package(name)
+ except ImportError:
+ is_package = None # aka, undefined
+ else:
+ # the default
+ is_package = False
+
+ return ModuleSpec(name, loader, origin=origin, is_package=is_package)
+
+
+def _spec_from_module(module, loader=None, origin=None):
+ # This function is meant for use in _setup().
+ try:
+ spec = module.__spec__
+ except AttributeError:
+ pass
+ else:
+ if spec is not None:
+ return spec
+
+ name = module.__name__
+ if loader is None:
+ try:
+ loader = module.__loader__
+ except AttributeError:
+ # loader will stay None.
+ pass
+ try:
+ location = module.__file__
+ except AttributeError:
+ location = None
+ if origin is None:
+ if location is None:
+ try:
+ origin = loader._ORIGIN
+ except AttributeError:
+ origin = None
+ else:
+ origin = location
+ try:
+ cached = module.__cached__
+ except AttributeError:
+ cached = None
+ try:
+ submodule_search_locations = list(module.__path__)
+ except AttributeError:
+ submodule_search_locations = None
+
+ spec = ModuleSpec(name, loader, origin=origin)
+ spec._set_fileattr = False if location is None else True
+ spec.cached = cached
+ spec.submodule_search_locations = submodule_search_locations
+ return spec
+
+
+def _init_module_attrs(spec, module, *, override=False):
+ # The passed-in module may be not support attribute assignment,
+ # in which case we simply don't set the attributes.
+ # __name__
+ if (override or getattr(module, '__name__', None) is None):
+ try:
+ module.__name__ = spec.name
+ except AttributeError:
+ pass
+ # __loader__
+ if override or getattr(module, '__loader__', None) is None:
+ loader = spec.loader
+ if loader is None:
+ # A backward compatibility hack.
+ if spec.submodule_search_locations is not None:
+ if _bootstrap_external is None:
+ raise NotImplementedError
+ _NamespaceLoader = _bootstrap_external._NamespaceLoader
+
+ loader = _NamespaceLoader.__new__(_NamespaceLoader)
+ loader._path = spec.submodule_search_locations
+ spec.loader = loader
+ # While the docs say that module.__file__ is not set for
+ # built-in modules, and the code below will avoid setting it if
+ # spec.has_location is false, this is incorrect for namespace
+ # packages. Namespace packages have no location, but their
+ # __spec__.origin is None, and thus their module.__file__
+ # should also be None for consistency. While a bit of a hack,
+ # this is the best place to ensure this consistency.
+ #
+ # See # https://docs.python.org/3/library/importlib.html#importlib.abc.Loader.load_module
+ # and bpo-32305
+ module.__file__ = None
+ try:
+ module.__loader__ = loader
+ except AttributeError:
+ pass
+ # __package__
+ if override or getattr(module, '__package__', None) is None:
+ try:
+ module.__package__ = spec.parent
+ except AttributeError:
+ pass
+ # __spec__
+ try:
+ module.__spec__ = spec
+ except AttributeError:
+ pass
+ # __path__
+ if override or getattr(module, '__path__', None) is None:
+ if spec.submodule_search_locations is not None:
+ try:
+ module.__path__ = spec.submodule_search_locations
+ except AttributeError:
+ pass
+ # __file__/__cached__
+ if spec.has_location:
+ if override or getattr(module, '__file__', None) is None:
+ try:
+ module.__file__ = spec.origin
+ except AttributeError:
+ pass
+
+ if override or getattr(module, '__cached__', None) is None:
+ if spec.cached is not None:
+ try:
+ module.__cached__ = spec.cached
+ except AttributeError:
+ pass
+ return module
+
+
+def module_from_spec(spec):
+ """Create a module based on the provided spec."""
+ # Typically loaders will not implement create_module().
+ module = None
+ if hasattr(spec.loader, 'create_module'):
+ # If create_module() returns `None` then it means default
+ # module creation should be used.
+ module = spec.loader.create_module(spec)
+ elif hasattr(spec.loader, 'exec_module'):
+ raise ImportError('loaders that define exec_module() '
+ 'must also define create_module()')
+ if module is None:
+ module = _new_module(spec.name)
+ _init_module_attrs(spec, module)
+ return module
+
+
+def _module_repr_from_spec(spec):
+ """Return the repr to use for the module."""
+ # We mostly replicate _module_repr() using the spec attributes.
+ name = '?' if spec.name is None else spec.name
+ if spec.origin is None:
+ if spec.loader is None:
+ return '<module {!r}>'.format(name)
+ else:
+ return '<module {!r} ({!r})>'.format(name, spec.loader)
+ else:
+ if spec.has_location:
+ return '<module {!r} from {!r}>'.format(name, spec.origin)
+ else:
+ return '<module {!r} ({})>'.format(spec.name, spec.origin)
+
+
+# Used by importlib.reload() and _load_module_shim().
+def _exec(spec, module):
+ """Execute the spec's specified module in an existing module's namespace."""
+ name = spec.name
+ with _ModuleLockManager(name):
+ if sys.modules.get(name) is not module:
+ msg = 'module {!r} not in sys.modules'.format(name)
+ raise ImportError(msg, name=name)
+ try:
+ if spec.loader is None:
+ if spec.submodule_search_locations is None:
+ raise ImportError('missing loader', name=spec.name)
+ # Namespace package.
+ _init_module_attrs(spec, module, override=True)
+ else:
+ _init_module_attrs(spec, module, override=True)
+ if not hasattr(spec.loader, 'exec_module'):
+ # (issue19713) Once BuiltinImporter and ExtensionFileLoader
+ # have exec_module() implemented, we can add a deprecation
+ # warning here.
+ spec.loader.load_module(name)
+ else:
+ spec.loader.exec_module(module)
+ finally:
+ # Update the order of insertion into sys.modules for module
+ # clean-up at shutdown.
+ module = sys.modules.pop(spec.name)
+ sys.modules[spec.name] = module
+ return module
+
+
+def _load_backward_compatible(spec):
+ # (issue19713) Once BuiltinImporter and ExtensionFileLoader
+ # have exec_module() implemented, we can add a deprecation
+ # warning here.
+ try:
+ spec.loader.load_module(spec.name)
+ except:
+ if spec.name in sys.modules:
+ module = sys.modules.pop(spec.name)
+ sys.modules[spec.name] = module
+ raise
+ # The module must be in sys.modules at this point!
+ # Move it to the end of sys.modules.
+ module = sys.modules.pop(spec.name)
+ sys.modules[spec.name] = module
+ if getattr(module, '__loader__', None) is None:
+ try:
+ module.__loader__ = spec.loader
+ except AttributeError:
+ pass
+ if getattr(module, '__package__', None) is None:
+ try:
+ # Since module.__path__ may not line up with
+ # spec.submodule_search_paths, we can't necessarily rely
+ # on spec.parent here.
+ module.__package__ = module.__name__
+ if not hasattr(module, '__path__'):
+ module.__package__ = spec.name.rpartition('.')[0]
+ except AttributeError:
+ pass
+ if getattr(module, '__spec__', None) is None:
+ try:
+ module.__spec__ = spec
+ except AttributeError:
+ pass
+ return module
+
+def _load_unlocked(spec):
+ # A helper for direct use by the import system.
+ if spec.loader is not None:
+ # Not a namespace package.
+ if not hasattr(spec.loader, 'exec_module'):
+ return _load_backward_compatible(spec)
+
+ module = module_from_spec(spec)
+
+ # This must be done before putting the module in sys.modules
+ # (otherwise an optimization shortcut in import.c becomes
+ # wrong).
+ spec._initializing = True
+ try:
+ sys.modules[spec.name] = module
+ try:
+ if spec.loader is None:
+ if spec.submodule_search_locations is None:
+ raise ImportError('missing loader', name=spec.name)
+ # A namespace package so do nothing.
+ else:
+ spec.loader.exec_module(module)
+ except:
+ try:
+ del sys.modules[spec.name]
+ except KeyError:
+ pass
+ raise
+ # Move the module to the end of sys.modules.
+ # We don't ensure that the import-related module attributes get
+ # set in the sys.modules replacement case. Such modules are on
+ # their own.
+ module = sys.modules.pop(spec.name)
+ sys.modules[spec.name] = module
+ _verbose_message('import {!r} # {!r}', spec.name, spec.loader)
+ finally:
+ spec._initializing = False
+
+ return module
+
+# A method used during testing of _load_unlocked() and by
+# _load_module_shim().
+def _load(spec):
+ """Return a new module object, loaded by the spec's loader.
+
+ The module is not added to its parent.
+
+ If a module is already in sys.modules, that existing module gets
+ clobbered.
+
+ """
+ with _ModuleLockManager(spec.name):
+ return _load_unlocked(spec)
+
+
+# Loaders #####################################################################
+
+class BuiltinImporter:
+
+ """Meta path import for built-in modules.
+
+ All methods are either class or static methods to avoid the need to
+ instantiate the class.
+
+ """
+
+ @staticmethod
+ def module_repr(module):
+ """Return repr for the module.
+
+ The method is deprecated. The import machinery does the job itself.
+
+ """
+ return '<module {!r} (built-in)>'.format(module.__name__)
+
+ @classmethod
+ def find_spec(cls, fullname, path=None, target=None):
+ if path is not None:
+ return None
+ if _imp.is_builtin(fullname):
+ return spec_from_loader(fullname, cls, origin='built-in')
+ else:
+ return None
+
+ @classmethod
+ def find_module(cls, fullname, path=None):
+ """Find the built-in module.
+
+ If 'path' is ever specified then the search is considered a failure.
+
+ This method is deprecated. Use find_spec() instead.
+
+ """
+ spec = cls.find_spec(fullname, path)
+ return spec.loader if spec is not None else None
+
+ @classmethod
+ def create_module(self, spec):
+ """Create a built-in module"""
+ if spec.name not in sys.builtin_module_names:
+ raise ImportError('{!r} is not a built-in module'.format(spec.name),
+ name=spec.name)
+ return _call_with_frames_removed(_imp.create_builtin, spec)
+
+ @classmethod
+ def exec_module(self, module):
+ """Exec a built-in module"""
+ _call_with_frames_removed(_imp.exec_builtin, module)
+
+ @classmethod
+ @_requires_builtin
+ def get_code(cls, fullname):
+ """Return None as built-in modules do not have code objects."""
+ return None
+
+ @classmethod
+ @_requires_builtin
+ def get_source(cls, fullname):
+ """Return None as built-in modules do not have source code."""
+ return None
+
+ @classmethod
+ @_requires_builtin
+ def is_package(cls, fullname):
+ """Return False as built-in modules are never packages."""
+ return False
+
+ load_module = classmethod(_load_module_shim)
+
+
+class FrozenImporter:
+
+ """Meta path import for frozen modules.
+
+ All methods are either class or static methods to avoid the need to
+ instantiate the class.
+
+ """
+
+ _ORIGIN = "frozen"
+
+ @staticmethod
+ def module_repr(m):
+ """Return repr for the module.
+
+ The method is deprecated. The import machinery does the job itself.
+
+ """
+ return '<module {!r} ({})>'.format(m.__name__, FrozenImporter._ORIGIN)
+
+ @classmethod
+ def find_spec(cls, fullname, path=None, target=None):
+ if _imp.is_frozen(fullname):
+ return spec_from_loader(fullname, cls, origin=cls._ORIGIN)
+ else:
+ return None
+
+ @classmethod
+ def find_module(cls, fullname, path=None):
+ """Find a frozen module.
+
+ This method is deprecated. Use find_spec() instead.
+
+ """
+ return cls if _imp.is_frozen(fullname) else None
+
+ @classmethod
+ def create_module(cls, spec):
+ """Use default semantics for module creation."""
+
+ @staticmethod
+ def exec_module(module):
+ name = module.__spec__.name
+ if not _imp.is_frozen(name):
+ raise ImportError('{!r} is not a frozen module'.format(name),
+ name=name)
+ code = _call_with_frames_removed(_imp.get_frozen_object, name)
+ exec(code, module.__dict__)
+
+ @classmethod
+ def load_module(cls, fullname):
+ """Load a frozen module.
+
+ This method is deprecated. Use exec_module() instead.
+
+ """
+ return _load_module_shim(cls, fullname)
+
+ @classmethod
+ @_requires_frozen
+ def get_code(cls, fullname):
+ """Return the code object for the frozen module."""
+ return _imp.get_frozen_object(fullname)
+
+ @classmethod
+ @_requires_frozen
+ def get_source(cls, fullname):
+ """Return None as frozen modules do not have source code."""
+ return None
+
+ @classmethod
+ @_requires_frozen
+ def is_package(cls, fullname):
+ """Return True if the frozen module is a package."""
+ return _imp.is_frozen_package(fullname)
+
+
+# Import itself ###############################################################
+
+class _ImportLockContext:
+
+ """Context manager for the import lock."""
+
+ def __enter__(self):
+ """Acquire the import lock."""
+ _imp.acquire_lock()
+
+ def __exit__(self, exc_type, exc_value, exc_traceback):
+ """Release the import lock regardless of any raised exceptions."""
+ _imp.release_lock()
+
+
+def _resolve_name(name, package, level):
+ """Resolve a relative module name to an absolute one."""
+ bits = package.rsplit('.', level - 1)
+ if len(bits) < level:
+ raise ValueError('attempted relative import beyond top-level package')
+ base = bits[0]
+ return '{}.{}'.format(base, name) if name else base
+
+
+def _find_spec_legacy(finder, name, path):
+ # This would be a good place for a DeprecationWarning if
+ # we ended up going that route.
+ loader = finder.find_module(name, path)
+ if loader is None:
+ return None
+ return spec_from_loader(name, loader)
+
+
+def _find_spec(name, path, target=None):
+ """Find a module's spec."""
+ meta_path = sys.meta_path
+ if meta_path is None:
+ # PyImport_Cleanup() is running or has been called.
+ raise ImportError("sys.meta_path is None, Python is likely "
+ "shutting down")
+
+ if not meta_path:
+ _warnings.warn('sys.meta_path is empty', ImportWarning)
+
+ # We check sys.modules here for the reload case. While a passed-in
+ # target will usually indicate a reload there is no guarantee, whereas
+ # sys.modules provides one.
+ is_reload = name in sys.modules
+ for finder in meta_path:
+ with _ImportLockContext():
+ try:
+ find_spec = finder.find_spec
+ except AttributeError:
+ spec = _find_spec_legacy(finder, name, path)
+ if spec is None:
+ continue
+ else:
+ spec = find_spec(name, path, target)
+ if spec is not None:
+ # The parent import may have already imported this module.
+ if not is_reload and name in sys.modules:
+ module = sys.modules[name]
+ try:
+ __spec__ = module.__spec__
+ except AttributeError:
+ # We use the found spec since that is the one that
+ # we would have used if the parent module hadn't
+ # beaten us to the punch.
+ return spec
+ else:
+ if __spec__ is None:
+ return spec
+ else:
+ return __spec__
+ else:
+ return spec
+ else:
+ return None
+
+
+def _sanity_check(name, package, level):
+ """Verify arguments are "sane"."""
+ if not isinstance(name, str):
+ raise TypeError('module name must be str, not {}'.format(type(name)))
+ if level < 0:
+ raise ValueError('level must be >= 0')
+ if level > 0:
+ if not isinstance(package, str):
+ raise TypeError('__package__ not set to a string')
+ elif not package:
+ raise ImportError('attempted relative import with no known parent '
+ 'package')
+ if not name and level == 0:
+ raise ValueError('Empty module name')
+
+
+_ERR_MSG_PREFIX = 'No module named '
+_ERR_MSG = _ERR_MSG_PREFIX + '{!r}'
+
+def _find_and_load_unlocked(name, import_):
+ path = None
+ parent = name.rpartition('.')[0]
+ if parent:
+ if parent not in sys.modules:
+ _call_with_frames_removed(import_, parent)
+ # Crazy side-effects!
+ if name in sys.modules:
+ return sys.modules[name]
+ parent_module = sys.modules[parent]
+ try:
+ path = parent_module.__path__
+ except AttributeError:
+ msg = (_ERR_MSG + '; {!r} is not a package').format(name, parent)
+ raise ModuleNotFoundError(msg, name=name) from None
+ spec = _find_spec(name, path)
+ if spec is None:
+ raise ModuleNotFoundError(_ERR_MSG.format(name), name=name)
+ else:
+ module = _load_unlocked(spec)
+ if parent:
+ # Set the module as an attribute on its parent.
+ parent_module = sys.modules[parent]
+ setattr(parent_module, name.rpartition('.')[2], module)
+ return module
+
+
+_NEEDS_LOADING = object()
+
+
+def _find_and_load(name, import_):
+ """Find and load the module."""
+ with _ModuleLockManager(name):
+ module = sys.modules.get(name, _NEEDS_LOADING)
+ if module is _NEEDS_LOADING:
+ return _find_and_load_unlocked(name, import_)
+
+ if module is None:
+ message = ('import of {} halted; '
+ 'None in sys.modules'.format(name))
+ raise ModuleNotFoundError(message, name=name)
+
+ _lock_unlock_module(name)
+ return module
+
+
+def _gcd_import(name, package=None, level=0):
+ """Import and return the module based on its name, the package the call is
+ being made from, and the level adjustment.
+
+ This function represents the greatest common denominator of functionality
+ between import_module and __import__. This includes setting __package__ if
+ the loader did not.
+
+ """
+ _sanity_check(name, package, level)
+ if level > 0:
+ name = _resolve_name(name, package, level)
+ return _find_and_load(name, _gcd_import)
+
+
+def _handle_fromlist(module, fromlist, import_, *, recursive=False):
+ """Figure out what __import__ should return.
+
+ The import_ parameter is a callable which takes the name of module to
+ import. It is required to decouple the function from assuming importlib's
+ import implementation is desired.
+
+ """
+ # The hell that is fromlist ...
+ # If a package was imported, try to import stuff from fromlist.
+ for x in fromlist:
+ if not isinstance(x, str):
+ if recursive:
+ where = module.__name__ + '.__all__'
+ else:
+ where = "``from list''"
+ raise TypeError(f"Item in {where} must be str, "
+ f"not {type(x).__name__}")
+ elif x == '*':
+ if not recursive and hasattr(module, '__all__'):
+ _handle_fromlist(module, module.__all__, import_,
+ recursive=True)
+ elif not hasattr(module, x):
+ from_name = '{}.{}'.format(module.__name__, x)
+ try:
+ _call_with_frames_removed(import_, from_name)
+ except ModuleNotFoundError as exc:
+ # Backwards-compatibility dictates we ignore failed
+ # imports triggered by fromlist for modules that don't
+ # exist.
+ if (exc.name == from_name and
+ sys.modules.get(from_name, _NEEDS_LOADING) is not None):
+ continue
+ raise
+ return module
+
+
+def _calc___package__(globals):
+ """Calculate what __package__ should be.
+
+ __package__ is not guaranteed to be defined or could be set to None
+ to represent that its proper value is unknown.
+
+ """
+ package = globals.get('__package__')
+ spec = globals.get('__spec__')
+ if package is not None:
+ if spec is not None and package != spec.parent:
+ _warnings.warn("__package__ != __spec__.parent "
+ f"({package!r} != {spec.parent!r})",
+ ImportWarning, stacklevel=3)
+ return package
+ elif spec is not None:
+ return spec.parent
+ else:
+ _warnings.warn("can't resolve package from __spec__ or __package__, "
+ "falling back on __name__ and __path__",
+ ImportWarning, stacklevel=3)
+ package = globals['__name__']
+ if '__path__' not in globals:
+ package = package.rpartition('.')[0]
+ return package
+
+
+def __import__(name, globals=None, locals=None, fromlist=(), level=0):
+ """Import a module.
+
+ The 'globals' argument is used to infer where the import is occurring from
+ to handle relative imports. The 'locals' argument is ignored. The
+ 'fromlist' argument specifies what should exist as attributes on the module
+ being imported (e.g. ``from module import <fromlist>``). The 'level'
+ argument represents the package location to import from in a relative
+ import (e.g. ``from ..pkg import mod`` would have a 'level' of 2).
+
+ """
+ if level == 0:
+ module = _gcd_import(name)
+ else:
+ globals_ = globals if globals is not None else {}
+ package = _calc___package__(globals_)
+ module = _gcd_import(name, package, level)
+ if not fromlist:
+ # Return up to the first dot in 'name'. This is complicated by the fact
+ # that 'name' may be relative.
+ if level == 0:
+ return _gcd_import(name.partition('.')[0])
+ elif not name:
+ return module
+ else:
+ # Figure out where to slice the module's name up to the first dot
+ # in 'name'.
+ cut_off = len(name) - len(name.partition('.')[0])
+ # Slice end needs to be positive to alleviate need to special-case
+ # when ``'.' not in name``.
+ return sys.modules[module.__name__[:len(module.__name__)-cut_off]]
+ elif hasattr(module, '__path__'):
+ return _handle_fromlist(module, fromlist, _gcd_import)
+ else:
+ return module
+
+
+def _builtin_from_name(name):
+ spec = BuiltinImporter.find_spec(name)
+ if spec is None:
+ raise ImportError('no built-in module named ' + name)
+ return _load_unlocked(spec)
+
+
+def _setup(sys_module, _imp_module):
+ """Setup importlib by importing needed built-in modules and injecting them
+ into the global namespace.
+
+ As sys is needed for sys.modules access and _imp is needed to load built-in
+ modules, those two modules must be explicitly passed in.
+
+ """
+ global _imp, sys
+ _imp = _imp_module
+ sys = sys_module
+
+ # Set up the spec for existing builtin/frozen modules.
+ module_type = type(sys)
+ for name, module in sys.modules.items():
+ if isinstance(module, module_type):
+ if name in sys.builtin_module_names:
+ loader = BuiltinImporter
+ elif _imp.is_frozen(name):
+ loader = FrozenImporter
+ else:
+ continue
+ spec = _spec_from_module(module, loader)
+ _init_module_attrs(spec, module)
+
+ # Directly load built-in modules needed during bootstrap.
+ self_module = sys.modules[__name__]
+ for builtin_name in ('_thread', '_warnings', '_weakref'):
+ if builtin_name not in sys.modules:
+ builtin_module = _builtin_from_name(builtin_name)
+ else:
+ builtin_module = sys.modules[builtin_name]
+ setattr(self_module, builtin_name, builtin_module)
+
+
+def _install(sys_module, _imp_module):
+ """Install importers for builtin and frozen modules"""
+ _setup(sys_module, _imp_module)
+
+ sys.meta_path.append(BuiltinImporter)
+ sys.meta_path.append(FrozenImporter)
+
+
+def _install_external_importers():
+ """Install importers that require external filesystem access"""
+ global _bootstrap_external
+ import _frozen_importlib_external
+ _bootstrap_external = _frozen_importlib_external
+ _frozen_importlib_external._install(sys.modules[__name__])
diff --git a/common/py3-stdlib/importlib/_bootstrap_external.py b/common/py3-stdlib/importlib/_bootstrap_external.py
new file mode 100644
index 0000000..b8ac482
--- /dev/null
+++ b/common/py3-stdlib/importlib/_bootstrap_external.py
@@ -0,0 +1,1638 @@
+"""Core implementation of path-based import.
+
+This module is NOT meant to be directly imported! It has been designed such
+that it can be bootstrapped into Python as the implementation of import. As
+such it requires the injection of specific modules and attributes in order to
+work. One should use importlib as the public-facing version of this module.
+
+"""
+# IMPORTANT: Whenever making changes to this module, be sure to run a top-level
+# `make regen-importlib` followed by `make` in order to get the frozen version
+# of the module updated. Not doing so will result in the Makefile to fail for
+# all others who don't have a ./python around to freeze the module in the early
+# stages of compilation.
+#
+
+# See importlib._setup() for what is injected into the global namespace.
+
+# When editing this code be aware that code executed at import time CANNOT
+# reference any injected objects! This includes not only global code but also
+# anything specified at the class level.
+
+# Bootstrap-related code ######################################################
+_CASE_INSENSITIVE_PLATFORMS_STR_KEY = 'win',
+_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY = 'cygwin', 'darwin'
+_CASE_INSENSITIVE_PLATFORMS = (_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY
+ + _CASE_INSENSITIVE_PLATFORMS_STR_KEY)
+
+
+def _make_relax_case():
+ if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
+ if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS_STR_KEY):
+ key = 'PYTHONCASEOK'
+ else:
+ key = b'PYTHONCASEOK'
+
+ def _relax_case():
+ """True if filenames must be checked case-insensitively."""
+ return key in _os.environ
+ else:
+ def _relax_case():
+ """True if filenames must be checked case-insensitively."""
+ return False
+ return _relax_case
+
+
+def _pack_uint32(x):
+ """Convert a 32-bit integer to little-endian."""
+ return (int(x) & 0xFFFFFFFF).to_bytes(4, 'little')
+
+
+def _unpack_uint32(data):
+ """Convert 4 bytes in little-endian to an integer."""
+ assert len(data) == 4
+ return int.from_bytes(data, 'little')
+
+def _unpack_uint16(data):
+ """Convert 2 bytes in little-endian to an integer."""
+ assert len(data) == 2
+ return int.from_bytes(data, 'little')
+
+
+def _path_join(*path_parts):
+ """Replacement for os.path.join()."""
+ return path_sep.join([part.rstrip(path_separators)
+ for part in path_parts if part])
+
+
+def _path_split(path):
+ """Replacement for os.path.split()."""
+ if len(path_separators) == 1:
+ front, _, tail = path.rpartition(path_sep)
+ return front, tail
+ for x in reversed(path):
+ if x in path_separators:
+ front, tail = path.rsplit(x, maxsplit=1)
+ return front, tail
+ return '', path
+
+
+def _path_stat(path):
+ """Stat the path.
+
+ Made a separate function to make it easier to override in experiments
+ (e.g. cache stat results).
+
+ """
+ return _os.stat(path)
+
+
+def _path_is_mode_type(path, mode):
+ """Test whether the path is the specified mode type."""
+ try:
+ stat_info = _path_stat(path)
+ except OSError:
+ return False
+ return (stat_info.st_mode & 0o170000) == mode
+
+
+def _path_isfile(path):
+ """Replacement for os.path.isfile."""
+ return _path_is_mode_type(path, 0o100000)
+
+
+def _path_isdir(path):
+ """Replacement for os.path.isdir."""
+ if not path:
+ path = _os.getcwd()
+ return _path_is_mode_type(path, 0o040000)
+
+
+def _path_isabs(path):
+ """Replacement for os.path.isabs.
+
+ Considers a Windows drive-relative path (no drive, but starts with slash) to
+ still be "absolute".
+ """
+ return path.startswith(path_separators) or path[1:3] in _pathseps_with_colon
+
+
+def _write_atomic(path, data, mode=0o666):
+ """Best-effort function to write data to a path atomically.
+ Be prepared to handle a FileExistsError if concurrent writing of the
+ temporary file is attempted."""
+ # id() is used to generate a pseudo-random filename.
+ path_tmp = '{}.{}'.format(path, id(path))
+ fd = _os.open(path_tmp,
+ _os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, mode & 0o666)
+ try:
+ # We first write data to a temporary file, and then use os.replace() to
+ # perform an atomic rename.
+ with _io.FileIO(fd, 'wb') as file:
+ file.write(data)
+ _os.replace(path_tmp, path)
+ except OSError:
+ try:
+ _os.unlink(path_tmp)
+ except OSError:
+ pass
+ raise
+
+
+_code_type = type(_write_atomic.__code__)
+
+
+# Finder/loader utility code ###############################################
+
+# Magic word to reject .pyc files generated by other Python versions.
+# It should change for each incompatible change to the bytecode.
+#
+# The value of CR and LF is incorporated so if you ever read or write
+# a .pyc file in text mode the magic number will be wrong; also, the
+# Apple MPW compiler swaps their values, botching string constants.
+#
+# There were a variety of old schemes for setting the magic number.
+# The current working scheme is to increment the previous value by
+# 10.
+#
+# Starting with the adoption of PEP 3147 in Python 3.2, every bump in magic
+# number also includes a new "magic tag", i.e. a human readable string used
+# to represent the magic number in __pycache__ directories. When you change
+# the magic number, you must also set a new unique magic tag. Generally this
+# can be named after the Python major version of the magic number bump, but
+# it can really be anything, as long as it's different than anything else
+# that's come before. The tags are included in the following table, starting
+# with Python 3.2a0.
+#
+# Known values:
+# Python 1.5: 20121
+# Python 1.5.1: 20121
+# Python 1.5.2: 20121
+# Python 1.6: 50428
+# Python 2.0: 50823
+# Python 2.0.1: 50823
+# Python 2.1: 60202
+# Python 2.1.1: 60202
+# Python 2.1.2: 60202
+# Python 2.2: 60717
+# Python 2.3a0: 62011
+# Python 2.3a0: 62021
+# Python 2.3a0: 62011 (!)
+# Python 2.4a0: 62041
+# Python 2.4a3: 62051
+# Python 2.4b1: 62061
+# Python 2.5a0: 62071
+# Python 2.5a0: 62081 (ast-branch)
+# Python 2.5a0: 62091 (with)
+# Python 2.5a0: 62092 (changed WITH_CLEANUP opcode)
+# Python 2.5b3: 62101 (fix wrong code: for x, in ...)
+# Python 2.5b3: 62111 (fix wrong code: x += yield)
+# Python 2.5c1: 62121 (fix wrong lnotab with for loops and
+# storing constants that should have been removed)
+# Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp)
+# Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode)
+# Python 2.6a1: 62161 (WITH_CLEANUP optimization)
+# Python 2.7a0: 62171 (optimize list comprehensions/change LIST_APPEND)
+# Python 2.7a0: 62181 (optimize conditional branches:
+# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
+# Python 2.7a0 62191 (introduce SETUP_WITH)
+# Python 2.7a0 62201 (introduce BUILD_SET)
+# Python 2.7a0 62211 (introduce MAP_ADD and SET_ADD)
+# Python 3000: 3000
+# 3010 (removed UNARY_CONVERT)
+# 3020 (added BUILD_SET)
+# 3030 (added keyword-only parameters)
+# 3040 (added signature annotations)
+# 3050 (print becomes a function)
+# 3060 (PEP 3115 metaclass syntax)
+# 3061 (string literals become unicode)
+# 3071 (PEP 3109 raise changes)
+# 3081 (PEP 3137 make __file__ and __name__ unicode)
+# 3091 (kill str8 interning)
+# 3101 (merge from 2.6a0, see 62151)
+# 3103 (__file__ points to source file)
+# Python 3.0a4: 3111 (WITH_CLEANUP optimization).
+# Python 3.0b1: 3131 (lexical exception stacking, including POP_EXCEPT
+ #3021)
+# Python 3.1a1: 3141 (optimize list, set and dict comprehensions:
+# change LIST_APPEND and SET_ADD, add MAP_ADD #2183)
+# Python 3.1a1: 3151 (optimize conditional branches:
+# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE
+ #4715)
+# Python 3.2a1: 3160 (add SETUP_WITH #6101)
+# tag: cpython-32
+# Python 3.2a2: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR #9225)
+# tag: cpython-32
+# Python 3.2a3 3180 (add DELETE_DEREF #4617)
+# Python 3.3a1 3190 (__class__ super closure changed)
+# Python 3.3a1 3200 (PEP 3155 __qualname__ added #13448)
+# Python 3.3a1 3210 (added size modulo 2**32 to the pyc header #13645)
+# Python 3.3a2 3220 (changed PEP 380 implementation #14230)
+# Python 3.3a4 3230 (revert changes to implicit __class__ closure #14857)
+# Python 3.4a1 3250 (evaluate positional default arguments before
+# keyword-only defaults #16967)
+# Python 3.4a1 3260 (add LOAD_CLASSDEREF; allow locals of class to override
+# free vars #17853)
+# Python 3.4a1 3270 (various tweaks to the __class__ closure #12370)
+# Python 3.4a1 3280 (remove implicit class argument)
+# Python 3.4a4 3290 (changes to __qualname__ computation #19301)
+# Python 3.4a4 3300 (more changes to __qualname__ computation #19301)
+# Python 3.4rc2 3310 (alter __qualname__ computation #20625)
+# Python 3.5a1 3320 (PEP 465: Matrix multiplication operator #21176)
+# Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations #2292)
+# Python 3.5b2 3340 (fix dictionary display evaluation order #11205)
+# Python 3.5b3 3350 (add GET_YIELD_FROM_ITER opcode #24400)
+# Python 3.5.2 3351 (fix BUILD_MAP_UNPACK_WITH_CALL opcode #27286)
+# Python 3.6a0 3360 (add FORMAT_VALUE opcode #25483)
+# Python 3.6a1 3361 (lineno delta of code.co_lnotab becomes signed #26107)
+# Python 3.6a2 3370 (16 bit wordcode #26647)
+# Python 3.6a2 3371 (add BUILD_CONST_KEY_MAP opcode #27140)
+# Python 3.6a2 3372 (MAKE_FUNCTION simplification, remove MAKE_CLOSURE
+# #27095)
+# Python 3.6b1 3373 (add BUILD_STRING opcode #27078)
+# Python 3.6b1 3375 (add SETUP_ANNOTATIONS and STORE_ANNOTATION opcodes
+# #27985)
+# Python 3.6b1 3376 (simplify CALL_FUNCTIONs & BUILD_MAP_UNPACK_WITH_CALL
+ #27213)
+# Python 3.6b1 3377 (set __class__ cell from type.__new__ #23722)
+# Python 3.6b2 3378 (add BUILD_TUPLE_UNPACK_WITH_CALL #28257)
+# Python 3.6rc1 3379 (more thorough __class__ validation #23722)
+# Python 3.7a1 3390 (add LOAD_METHOD and CALL_METHOD opcodes #26110)
+# Python 3.7a2 3391 (update GET_AITER #31709)
+# Python 3.7a4 3392 (PEP 552: Deterministic pycs #31650)
+# Python 3.7b1 3393 (remove STORE_ANNOTATION opcode #32550)
+# Python 3.7b5 3394 (restored docstring as the first stmt in the body;
+# this might affected the first line number #32911)
+# Python 3.8a1 3400 (move frame block handling to compiler #17611)
+# Python 3.8a1 3401 (add END_ASYNC_FOR #33041)
+# Python 3.8a1 3410 (PEP570 Python Positional-Only Parameters #36540)
+# Python 3.8b2 3411 (Reverse evaluation order of key: value in dict
+# comprehensions #35224)
+# Python 3.8b2 3412 (Swap the position of positional args and positional
+# only args in ast.arguments #37593)
+# Python 3.8b4 3413 (Fix "break" and "continue" in "finally" #37830)
+#
+# MAGIC must change whenever the bytecode emitted by the compiler may no
+# longer be understood by older implementations of the eval loop (usually
+# due to the addition of new opcodes).
+#
+# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
+# in PC/launcher.c must also be updated.
+
+MAGIC_NUMBER = (3413).to_bytes(2, 'little') + b'\r\n'
+_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
+
+_PYCACHE = '__pycache__'
+_OPT = 'opt-'
+
+SOURCE_SUFFIXES = ['.py'] # _setup() adds .pyw as needed.
+
+BYTECODE_SUFFIXES = ['.pyc']
+# Deprecated.
+DEBUG_BYTECODE_SUFFIXES = OPTIMIZED_BYTECODE_SUFFIXES = BYTECODE_SUFFIXES
+
+def cache_from_source(path, debug_override=None, *, optimization=None):
+ """Given the path to a .py file, return the path to its .pyc file.
+
+ The .py file does not need to exist; this simply returns the path to the
+ .pyc file calculated as if the .py file were imported.
+
+ The 'optimization' parameter controls the presumed optimization level of
+ the bytecode file. If 'optimization' is not None, the string representation
+ of the argument is taken and verified to be alphanumeric (else ValueError
+ is raised).
+
+ The debug_override parameter is deprecated. If debug_override is not None,
+ a True value is the same as setting 'optimization' to the empty string
+ while a False value is equivalent to setting 'optimization' to '1'.
+
+ If sys.implementation.cache_tag is None then NotImplementedError is raised.
+
+ """
+ if debug_override is not None:
+ _warnings.warn('the debug_override parameter is deprecated; use '
+ "'optimization' instead", DeprecationWarning)
+ if optimization is not None:
+ message = 'debug_override or optimization must be set to None'
+ raise TypeError(message)
+ optimization = '' if debug_override else 1
+ path = _os.fspath(path)
+ head, tail = _path_split(path)
+ base, sep, rest = tail.rpartition('.')
+ tag = sys.implementation.cache_tag
+ if tag is None:
+ raise NotImplementedError('sys.implementation.cache_tag is None')
+ almost_filename = ''.join([(base if base else rest), sep, tag])
+ if optimization is None:
+ if sys.flags.optimize == 0:
+ optimization = ''
+ else:
+ optimization = sys.flags.optimize
+ optimization = str(optimization)
+ if optimization != '':
+ if not optimization.isalnum():
+ raise ValueError('{!r} is not alphanumeric'.format(optimization))
+ almost_filename = '{}.{}{}'.format(almost_filename, _OPT, optimization)
+ filename = almost_filename + BYTECODE_SUFFIXES[0]
+ if sys.pycache_prefix is not None:
+ # We need an absolute path to the py file to avoid the possibility of
+ # collisions within sys.pycache_prefix, if someone has two different
+ # `foo/bar.py` on their system and they import both of them using the
+ # same sys.pycache_prefix. Let's say sys.pycache_prefix is
+ # `C:\Bytecode`; the idea here is that if we get `Foo\Bar`, we first
+ # make it absolute (`C:\Somewhere\Foo\Bar`), then make it root-relative
+ # (`Somewhere\Foo\Bar`), so we end up placing the bytecode file in an
+ # unambiguous `C:\Bytecode\Somewhere\Foo\Bar\`.
+ if not _path_isabs(head):
+ head = _path_join(_os.getcwd(), head)
+
+ # Strip initial drive from a Windows path. We know we have an absolute
+ # path here, so the second part of the check rules out a POSIX path that
+ # happens to contain a colon at the second character.
+ if head[1] == ':' and head[0] not in path_separators:
+ head = head[2:]
+
+ # Strip initial path separator from `head` to complete the conversion
+ # back to a root-relative path before joining.
+ return _path_join(
+ sys.pycache_prefix,
+ head.lstrip(path_separators),
+ filename,
+ )
+ return _path_join(head, _PYCACHE, filename)
+
+
+def source_from_cache(path):
+ """Given the path to a .pyc. file, return the path to its .py file.
+
+ The .pyc file does not need to exist; this simply returns the path to
+ the .py file calculated to correspond to the .pyc file. If path does
+ not conform to PEP 3147/488 format, ValueError will be raised. If
+ sys.implementation.cache_tag is None then NotImplementedError is raised.
+
+ """
+ if sys.implementation.cache_tag is None:
+ raise NotImplementedError('sys.implementation.cache_tag is None')
+ path = _os.fspath(path)
+ head, pycache_filename = _path_split(path)
+ found_in_pycache_prefix = False
+ if sys.pycache_prefix is not None:
+ stripped_path = sys.pycache_prefix.rstrip(path_separators)
+ if head.startswith(stripped_path + path_sep):
+ head = head[len(stripped_path):]
+ found_in_pycache_prefix = True
+ if not found_in_pycache_prefix:
+ head, pycache = _path_split(head)
+ if pycache != _PYCACHE:
+ raise ValueError(f'{_PYCACHE} not bottom-level directory in '
+ f'{path!r}')
+ dot_count = pycache_filename.count('.')
+ if dot_count not in {2, 3}:
+ raise ValueError(f'expected only 2 or 3 dots in {pycache_filename!r}')
+ elif dot_count == 3:
+ optimization = pycache_filename.rsplit('.', 2)[-2]
+ if not optimization.startswith(_OPT):
+ raise ValueError("optimization portion of filename does not start "
+ f"with {_OPT!r}")
+ opt_level = optimization[len(_OPT):]
+ if not opt_level.isalnum():
+ raise ValueError(f"optimization level {optimization!r} is not an "
+ "alphanumeric value")
+ base_filename = pycache_filename.partition('.')[0]
+ return _path_join(head, base_filename + SOURCE_SUFFIXES[0])
+
+
+def _get_sourcefile(bytecode_path):
+ """Convert a bytecode file path to a source path (if possible).
+
+ This function exists purely for backwards-compatibility for
+ PyImport_ExecCodeModuleWithFilenames() in the C API.
+
+ """
+ if len(bytecode_path) == 0:
+ return None
+ rest, _, extension = bytecode_path.rpartition('.')
+ if not rest or extension.lower()[-3:-1] != 'py':
+ return bytecode_path
+ try:
+ source_path = source_from_cache(bytecode_path)
+ except (NotImplementedError, ValueError):
+ source_path = bytecode_path[:-1]
+ return source_path if _path_isfile(source_path) else bytecode_path
+
+
+def _get_cached(filename):
+ if filename.endswith(tuple(SOURCE_SUFFIXES)):
+ try:
+ return cache_from_source(filename)
+ except NotImplementedError:
+ pass
+ elif filename.endswith(tuple(BYTECODE_SUFFIXES)):
+ return filename
+ else:
+ return None
+
+
+def _calc_mode(path):
+ """Calculate the mode permissions for a bytecode file."""
+ try:
+ mode = _path_stat(path).st_mode
+ except OSError:
+ mode = 0o666
+ # We always ensure write access so we can update cached files
+ # later even when the source files are read-only on Windows (#6074)
+ mode |= 0o200
+ return mode
+
+
+def _check_name(method):
+ """Decorator to verify that the module being requested matches the one the
+ loader can handle.
+
+ The first argument (self) must define _name which the second argument is
+ compared against. If the comparison fails then ImportError is raised.
+
+ """
+ def _check_name_wrapper(self, name=None, *args, **kwargs):
+ if name is None:
+ name = self.name
+ elif self.name != name:
+ raise ImportError('loader for %s cannot handle %s' %
+ (self.name, name), name=name)
+ return method(self, name, *args, **kwargs)
+ try:
+ _wrap = _bootstrap._wrap
+ except NameError:
+ # XXX yuck
+ def _wrap(new, old):
+ for replace in ['__module__', '__name__', '__qualname__', '__doc__']:
+ if hasattr(old, replace):
+ setattr(new, replace, getattr(old, replace))
+ new.__dict__.update(old.__dict__)
+ _wrap(_check_name_wrapper, method)
+ return _check_name_wrapper
+
+
+def _find_module_shim(self, fullname):
+ """Try to find a loader for the specified module by delegating to
+ self.find_loader().
+
+ This method is deprecated in favor of finder.find_spec().
+
+ """
+ # Call find_loader(). If it returns a string (indicating this
+ # is a namespace package portion), generate a warning and
+ # return None.
+ loader, portions = self.find_loader(fullname)
+ if loader is None and len(portions):
+ msg = 'Not importing directory {}: missing __init__'
+ _warnings.warn(msg.format(portions[0]), ImportWarning)
+ return loader
+
+
+def _classify_pyc(data, name, exc_details):
+ """Perform basic validity checking of a pyc header and return the flags field,
+ which determines how the pyc should be further validated against the source.
+
+ *data* is the contents of the pyc file. (Only the first 16 bytes are
+ required, though.)
+
+ *name* is the name of the module being imported. It is used for logging.
+
+ *exc_details* is a dictionary passed to ImportError if it raised for
+ improved debugging.
+
+ ImportError is raised when the magic number is incorrect or when the flags
+ field is invalid. EOFError is raised when the data is found to be truncated.
+
+ """
+ magic = data[:4]
+ if magic != MAGIC_NUMBER:
+ message = f'bad magic number in {name!r}: {magic!r}'
+ _bootstrap._verbose_message('{}', message)
+ raise ImportError(message, **exc_details)
+ if len(data) < 16:
+ message = f'reached EOF while reading pyc header of {name!r}'
+ _bootstrap._verbose_message('{}', message)
+ raise EOFError(message)
+ flags = _unpack_uint32(data[4:8])
+ # Only the first two flags are defined.
+ if flags & ~0b11:
+ message = f'invalid flags {flags!r} in {name!r}'
+ raise ImportError(message, **exc_details)
+ return flags
+
+
+def _validate_timestamp_pyc(data, source_mtime, source_size, name,
+ exc_details):
+ """Validate a pyc against the source last-modified time.
+
+ *data* is the contents of the pyc file. (Only the first 16 bytes are
+ required.)
+
+ *source_mtime* is the last modified timestamp of the source file.
+
+ *source_size* is None or the size of the source file in bytes.
+
+ *name* is the name of the module being imported. It is used for logging.
+
+ *exc_details* is a dictionary passed to ImportError if it raised for
+ improved debugging.
+
+ An ImportError is raised if the bytecode is stale.
+
+ """
+ if _unpack_uint32(data[8:12]) != (source_mtime & 0xFFFFFFFF):
+ message = f'bytecode is stale for {name!r}'
+ _bootstrap._verbose_message('{}', message)
+ raise ImportError(message, **exc_details)
+ if (source_size is not None and
+ _unpack_uint32(data[12:16]) != (source_size & 0xFFFFFFFF)):
+ raise ImportError(f'bytecode is stale for {name!r}', **exc_details)
+
+
+def _validate_hash_pyc(data, source_hash, name, exc_details):
+ """Validate a hash-based pyc by checking the real source hash against the one in
+ the pyc header.
+
+ *data* is the contents of the pyc file. (Only the first 16 bytes are
+ required.)
+
+ *source_hash* is the importlib.util.source_hash() of the source file.
+
+ *name* is the name of the module being imported. It is used for logging.
+
+ *exc_details* is a dictionary passed to ImportError if it raised for
+ improved debugging.
+
+ An ImportError is raised if the bytecode is stale.
+
+ """
+ if data[8:16] != source_hash:
+ raise ImportError(
+ f'hash in bytecode doesn\'t match hash of source {name!r}',
+ **exc_details,
+ )
+
+
+def _compile_bytecode(data, name=None, bytecode_path=None, source_path=None):
+ """Compile bytecode as found in a pyc."""
+ code = marshal.loads(data)
+ if isinstance(code, _code_type):
+ _bootstrap._verbose_message('code object from {!r}', bytecode_path)
+ if source_path is not None:
+ _imp._fix_co_filename(code, source_path)
+ return code
+ else:
+ raise ImportError('Non-code object in {!r}'.format(bytecode_path),
+ name=name, path=bytecode_path)
+
+
+def _code_to_timestamp_pyc(code, mtime=0, source_size=0):
+ "Produce the data for a timestamp-based pyc."
+ data = bytearray(MAGIC_NUMBER)
+ data.extend(_pack_uint32(0))
+ data.extend(_pack_uint32(mtime))
+ data.extend(_pack_uint32(source_size))
+ data.extend(marshal.dumps(code))
+ return data
+
+
+def _code_to_hash_pyc(code, source_hash, checked=True):
+ "Produce the data for a hash-based pyc."
+ data = bytearray(MAGIC_NUMBER)
+ flags = 0b1 | checked << 1
+ data.extend(_pack_uint32(flags))
+ assert len(source_hash) == 8
+ data.extend(source_hash)
+ data.extend(marshal.dumps(code))
+ return data
+
+
+def decode_source(source_bytes):
+ """Decode bytes representing source code and return the string.
+
+ Universal newline support is used in the decoding.
+ """
+ import tokenize # To avoid bootstrap issues.
+ source_bytes_readline = _io.BytesIO(source_bytes).readline
+ encoding = tokenize.detect_encoding(source_bytes_readline)
+ newline_decoder = _io.IncrementalNewlineDecoder(None, True)
+ return newline_decoder.decode(source_bytes.decode(encoding[0]))
+
+
+# Module specifications #######################################################
+
+_POPULATE = object()
+
+
+def spec_from_file_location(name, location=None, *, loader=None,
+ submodule_search_locations=_POPULATE):
+ """Return a module spec based on a file location.
+
+ To indicate that the module is a package, set
+ submodule_search_locations to a list of directory paths. An
+ empty list is sufficient, though its not otherwise useful to the
+ import system.
+
+ The loader must take a spec as its only __init__() arg.
+
+ """
+ if location is None:
+ # The caller may simply want a partially populated location-
+ # oriented spec. So we set the location to a bogus value and
+ # fill in as much as we can.
+ location = '<unknown>'
+ if hasattr(loader, 'get_filename'):
+ # ExecutionLoader
+ try:
+ location = loader.get_filename(name)
+ except ImportError:
+ pass
+ else:
+ location = _os.fspath(location)
+
+ # If the location is on the filesystem, but doesn't actually exist,
+ # we could return None here, indicating that the location is not
+ # valid. However, we don't have a good way of testing since an
+ # indirect location (e.g. a zip file or URL) will look like a
+ # non-existent file relative to the filesystem.
+
+ spec = _bootstrap.ModuleSpec(name, loader, origin=location)
+ spec._set_fileattr = True
+
+ # Pick a loader if one wasn't provided.
+ if loader is None:
+ for loader_class, suffixes in _get_supported_file_loaders():
+ if location.endswith(tuple(suffixes)):
+ loader = loader_class(name, location)
+ spec.loader = loader
+ break
+ else:
+ return None
+
+ # Set submodule_search_paths appropriately.
+ if submodule_search_locations is _POPULATE:
+ # Check the loader.
+ if hasattr(loader, 'is_package'):
+ try:
+ is_package = loader.is_package(name)
+ except ImportError:
+ pass
+ else:
+ if is_package:
+ spec.submodule_search_locations = []
+ else:
+ spec.submodule_search_locations = submodule_search_locations
+ if spec.submodule_search_locations == []:
+ if location:
+ dirname = _path_split(location)[0]
+ spec.submodule_search_locations.append(dirname)
+
+ return spec
+
+
+# Loaders #####################################################################
+
+class WindowsRegistryFinder:
+
+ """Meta path finder for modules declared in the Windows registry."""
+
+ REGISTRY_KEY = (
+ 'Software\\Python\\PythonCore\\{sys_version}'
+ '\\Modules\\{fullname}')
+ REGISTRY_KEY_DEBUG = (
+ 'Software\\Python\\PythonCore\\{sys_version}'
+ '\\Modules\\{fullname}\\Debug')
+ DEBUG_BUILD = False # Changed in _setup()
+
+ @classmethod
+ def _open_registry(cls, key):
+ try:
+ return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, key)
+ except OSError:
+ return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key)
+
+ @classmethod
+ def _search_registry(cls, fullname):
+ if cls.DEBUG_BUILD:
+ registry_key = cls.REGISTRY_KEY_DEBUG
+ else:
+ registry_key = cls.REGISTRY_KEY
+ key = registry_key.format(fullname=fullname,
+ sys_version='%d.%d' % sys.version_info[:2])
+ try:
+ with cls._open_registry(key) as hkey:
+ filepath = _winreg.QueryValue(hkey, '')
+ except OSError:
+ return None
+ return filepath
+
+ @classmethod
+ def find_spec(cls, fullname, path=None, target=None):
+ filepath = cls._search_registry(fullname)
+ if filepath is None:
+ return None
+ try:
+ _path_stat(filepath)
+ except OSError:
+ return None
+ for loader, suffixes in _get_supported_file_loaders():
+ if filepath.endswith(tuple(suffixes)):
+ spec = _bootstrap.spec_from_loader(fullname,
+ loader(fullname, filepath),
+ origin=filepath)
+ return spec
+
+ @classmethod
+ def find_module(cls, fullname, path=None):
+ """Find module named in the registry.
+
+ This method is deprecated. Use exec_module() instead.
+
+ """
+ spec = cls.find_spec(fullname, path)
+ if spec is not None:
+ return spec.loader
+ else:
+ return None
+
+
+class _LoaderBasics:
+
+ """Base class of common code needed by both SourceLoader and
+ SourcelessFileLoader."""
+
+ def is_package(self, fullname):
+ """Concrete implementation of InspectLoader.is_package by checking if
+ the path returned by get_filename has a filename of '__init__.py'."""
+ filename = _path_split(self.get_filename(fullname))[1]
+ filename_base = filename.rsplit('.', 1)[0]
+ tail_name = fullname.rpartition('.')[2]
+ return filename_base == '__init__' and tail_name != '__init__'
+
+ def create_module(self, spec):
+ """Use default semantics for module creation."""
+
+ def exec_module(self, module):
+ """Execute the module."""
+ code = self.get_code(module.__name__)
+ if code is None:
+ raise ImportError('cannot load module {!r} when get_code() '
+ 'returns None'.format(module.__name__))
+ _bootstrap._call_with_frames_removed(exec, code, module.__dict__)
+
+ def load_module(self, fullname):
+ """This module is deprecated."""
+ return _bootstrap._load_module_shim(self, fullname)
+
+
+class SourceLoader(_LoaderBasics):
+
+ def path_mtime(self, path):
+ """Optional method that returns the modification time (an int) for the
+ specified path (a str).
+
+ Raises OSError when the path cannot be handled.
+ """
+ raise OSError
+
+ def path_stats(self, path):
+ """Optional method returning a metadata dict for the specified
+ path (a str).
+
+ Possible keys:
+ - 'mtime' (mandatory) is the numeric timestamp of last source
+ code modification;
+ - 'size' (optional) is the size in bytes of the source code.
+
+ Implementing this method allows the loader to read bytecode files.
+ Raises OSError when the path cannot be handled.
+ """
+ return {'mtime': self.path_mtime(path)}
+
+ def _cache_bytecode(self, source_path, cache_path, data):
+ """Optional method which writes data (bytes) to a file path (a str).
+
+ Implementing this method allows for the writing of bytecode files.
+
+ The source path is needed in order to correctly transfer permissions
+ """
+ # For backwards compatibility, we delegate to set_data()
+ return self.set_data(cache_path, data)
+
+ def set_data(self, path, data):
+ """Optional method which writes data (bytes) to a file path (a str).
+
+ Implementing this method allows for the writing of bytecode files.
+ """
+
+
+ def get_source(self, fullname):
+ """Concrete implementation of InspectLoader.get_source."""
+ path = self.get_filename(fullname)
+ try:
+ source_bytes = self.get_data(path)
+ except OSError as exc:
+ raise ImportError('source not available through get_data()',
+ name=fullname) from exc
+ return decode_source(source_bytes)
+
+ def source_to_code(self, data, path, *, _optimize=-1):
+ """Return the code object compiled from source.
+
+ The 'data' argument can be any object type that compile() supports.
+ """
+ return _bootstrap._call_with_frames_removed(compile, data, path, 'exec',
+ dont_inherit=True, optimize=_optimize)
+
+ def get_code(self, fullname):
+ """Concrete implementation of InspectLoader.get_code.
+
+ Reading of bytecode requires path_stats to be implemented. To write
+ bytecode, set_data must also be implemented.
+
+ """
+ source_path = self.get_filename(fullname)
+ source_mtime = None
+ source_bytes = None
+ source_hash = None
+ hash_based = False
+ check_source = True
+ try:
+ bytecode_path = cache_from_source(source_path)
+ except NotImplementedError:
+ bytecode_path = None
+ else:
+ try:
+ st = self.path_stats(source_path)
+ except OSError:
+ pass
+ else:
+ source_mtime = int(st['mtime'])
+ try:
+ data = self.get_data(bytecode_path)
+ except OSError:
+ pass
+ else:
+ exc_details = {
+ 'name': fullname,
+ 'path': bytecode_path,
+ }
+ try:
+ flags = _classify_pyc(data, fullname, exc_details)
+ bytes_data = memoryview(data)[16:]
+ hash_based = flags & 0b1 != 0
+ if hash_based:
+ check_source = flags & 0b10 != 0
+ if (_imp.check_hash_based_pycs != 'never' and
+ (check_source or
+ _imp.check_hash_based_pycs == 'always')):
+ source_bytes = self.get_data(source_path)
+ source_hash = _imp.source_hash(
+ _RAW_MAGIC_NUMBER,
+ source_bytes,
+ )
+ _validate_hash_pyc(data, source_hash, fullname,
+ exc_details)
+ else:
+ _validate_timestamp_pyc(
+ data,
+ source_mtime,
+ st['size'],
+ fullname,
+ exc_details,
+ )
+ except (ImportError, EOFError):
+ pass
+ else:
+ _bootstrap._verbose_message('{} matches {}', bytecode_path,
+ source_path)
+ return _compile_bytecode(bytes_data, name=fullname,
+ bytecode_path=bytecode_path,
+ source_path=source_path)
+ if source_bytes is None:
+ source_bytes = self.get_data(source_path)
+ code_object = self.source_to_code(source_bytes, source_path)
+ _bootstrap._verbose_message('code object from {}', source_path)
+ if (not sys.dont_write_bytecode and bytecode_path is not None and
+ source_mtime is not None):
+ if hash_based:
+ if source_hash is None:
+ source_hash = _imp.source_hash(source_bytes)
+ data = _code_to_hash_pyc(code_object, source_hash, check_source)
+ else:
+ data = _code_to_timestamp_pyc(code_object, source_mtime,
+ len(source_bytes))
+ try:
+ self._cache_bytecode(source_path, bytecode_path, data)
+ except NotImplementedError:
+ pass
+ return code_object
+
+
+class FileLoader:
+
+ """Base file loader class which implements the loader protocol methods that
+ require file system usage."""
+
+ def __init__(self, fullname, path):
+ """Cache the module name and the path to the file found by the
+ finder."""
+ self.name = fullname
+ self.path = path
+
+ def __eq__(self, other):
+ return (self.__class__ == other.__class__ and
+ self.__dict__ == other.__dict__)
+
+ def __hash__(self):
+ return hash(self.name) ^ hash(self.path)
+
+ @_check_name
+ def load_module(self, fullname):
+ """Load a module from a file.
+
+ This method is deprecated. Use exec_module() instead.
+
+ """
+ # The only reason for this method is for the name check.
+ # Issue #14857: Avoid the zero-argument form of super so the implementation
+ # of that form can be updated without breaking the frozen module
+ return super(FileLoader, self).load_module(fullname)
+
+ @_check_name
+ def get_filename(self, fullname):
+ """Return the path to the source file as found by the finder."""
+ return self.path
+
+ def get_data(self, path):
+ """Return the data from path as raw bytes."""
+ if isinstance(self, (SourceLoader, ExtensionFileLoader)):
+ with _io.open_code(str(path)) as file:
+ return file.read()
+ else:
+ with _io.FileIO(path, 'r') as file:
+ return file.read()
+
+ # ResourceReader ABC API.
+
+ @_check_name
+ def get_resource_reader(self, module):
+ if self.is_package(module):
+ return self
+ return None
+
+ def open_resource(self, resource):
+ path = _path_join(_path_split(self.path)[0], resource)
+ return _io.FileIO(path, 'r')
+
+ def resource_path(self, resource):
+ if not self.is_resource(resource):
+ raise FileNotFoundError
+ path = _path_join(_path_split(self.path)[0], resource)
+ return path
+
+ def is_resource(self, name):
+ if path_sep in name:
+ return False
+ path = _path_join(_path_split(self.path)[0], name)
+ return _path_isfile(path)
+
+ def contents(self):
+ return iter(_os.listdir(_path_split(self.path)[0]))
+
+
+class SourceFileLoader(FileLoader, SourceLoader):
+
+ """Concrete implementation of SourceLoader using the file system."""
+
+ def path_stats(self, path):
+ """Return the metadata for the path."""
+ st = _path_stat(path)
+ return {'mtime': st.st_mtime, 'size': st.st_size}
+
+ def _cache_bytecode(self, source_path, bytecode_path, data):
+ # Adapt between the two APIs
+ mode = _calc_mode(source_path)
+ return self.set_data(bytecode_path, data, _mode=mode)
+
+ def set_data(self, path, data, *, _mode=0o666):
+ """Write bytes data to a file."""
+ parent, filename = _path_split(path)
+ path_parts = []
+ # Figure out what directories are missing.
+ while parent and not _path_isdir(parent):
+ parent, part = _path_split(parent)
+ path_parts.append(part)
+ # Create needed directories.
+ for part in reversed(path_parts):
+ parent = _path_join(parent, part)
+ try:
+ _os.mkdir(parent)
+ except FileExistsError:
+ # Probably another Python process already created the dir.
+ continue
+ except OSError as exc:
+ # Could be a permission error, read-only filesystem: just forget
+ # about writing the data.
+ _bootstrap._verbose_message('could not create {!r}: {!r}',
+ parent, exc)
+ return
+ try:
+ _write_atomic(path, data, _mode)
+ _bootstrap._verbose_message('created {!r}', path)
+ except OSError as exc:
+ # Same as above: just don't write the bytecode.
+ _bootstrap._verbose_message('could not create {!r}: {!r}', path,
+ exc)
+
+
+class SourcelessFileLoader(FileLoader, _LoaderBasics):
+
+ """Loader which handles sourceless file imports."""
+
+ def get_code(self, fullname):
+ path = self.get_filename(fullname)
+ data = self.get_data(path)
+ # Call _classify_pyc to do basic validation of the pyc but ignore the
+ # result. There's no source to check against.
+ exc_details = {
+ 'name': fullname,
+ 'path': path,
+ }
+ _classify_pyc(data, fullname, exc_details)
+ return _compile_bytecode(
+ memoryview(data)[16:],
+ name=fullname,
+ bytecode_path=path,
+ )
+
+ def get_source(self, fullname):
+ """Return None as there is no source code."""
+ return None
+
+
+# Filled in by _setup().
+EXTENSION_SUFFIXES = []
+
+
+class ExtensionFileLoader(FileLoader, _LoaderBasics):
+
+ """Loader for extension modules.
+
+ The constructor is designed to work with FileFinder.
+
+ """
+
+ def __init__(self, name, path):
+ self.name = name
+ self.path = path
+
+ def __eq__(self, other):
+ return (self.__class__ == other.__class__ and
+ self.__dict__ == other.__dict__)
+
+ def __hash__(self):
+ return hash(self.name) ^ hash(self.path)
+
+ def create_module(self, spec):
+ """Create an unitialized extension module"""
+ module = _bootstrap._call_with_frames_removed(
+ _imp.create_dynamic, spec)
+ _bootstrap._verbose_message('extension module {!r} loaded from {!r}',
+ spec.name, self.path)
+ return module
+
+ def exec_module(self, module):
+ """Initialize an extension module"""
+ _bootstrap._call_with_frames_removed(_imp.exec_dynamic, module)
+ _bootstrap._verbose_message('extension module {!r} executed from {!r}',
+ self.name, self.path)
+
+ def is_package(self, fullname):
+ """Return True if the extension module is a package."""
+ file_name = _path_split(self.path)[1]
+ return any(file_name == '__init__' + suffix
+ for suffix in EXTENSION_SUFFIXES)
+
+ def get_code(self, fullname):
+ """Return None as an extension module cannot create a code object."""
+ return None
+
+ def get_source(self, fullname):
+ """Return None as extension modules have no source code."""
+ return None
+
+ @_check_name
+ def get_filename(self, fullname):
+ """Return the path to the source file as found by the finder."""
+ return self.path
+
+
+class _NamespacePath:
+ """Represents a namespace package's path. It uses the module name
+ to find its parent module, and from there it looks up the parent's
+ __path__. When this changes, the module's own path is recomputed,
+ using path_finder. For top-level modules, the parent module's path
+ is sys.path."""
+
+ def __init__(self, name, path, path_finder):
+ self._name = name
+ self._path = path
+ self._last_parent_path = tuple(self._get_parent_path())
+ self._path_finder = path_finder
+
+ def _find_parent_path_names(self):
+ """Returns a tuple of (parent-module-name, parent-path-attr-name)"""
+ parent, dot, me = self._name.rpartition('.')
+ if dot == '':
+ # This is a top-level module. sys.path contains the parent path.
+ return 'sys', 'path'
+ # Not a top-level module. parent-module.__path__ contains the
+ # parent path.
+ return parent, '__path__'
+
+ def _get_parent_path(self):
+ parent_module_name, path_attr_name = self._find_parent_path_names()
+ return getattr(sys.modules[parent_module_name], path_attr_name)
+
+ def _recalculate(self):
+ # If the parent's path has changed, recalculate _path
+ parent_path = tuple(self._get_parent_path()) # Make a copy
+ if parent_path != self._last_parent_path:
+ spec = self._path_finder(self._name, parent_path)
+ # Note that no changes are made if a loader is returned, but we
+ # do remember the new parent path
+ if spec is not None and spec.loader is None:
+ if spec.submodule_search_locations:
+ self._path = spec.submodule_search_locations
+ self._last_parent_path = parent_path # Save the copy
+ return self._path
+
+ def __iter__(self):
+ return iter(self._recalculate())
+
+ def __getitem__(self, index):
+ return self._recalculate()[index]
+
+ def __setitem__(self, index, path):
+ self._path[index] = path
+
+ def __len__(self):
+ return len(self._recalculate())
+
+ def __repr__(self):
+ return '_NamespacePath({!r})'.format(self._path)
+
+ def __contains__(self, item):
+ return item in self._recalculate()
+
+ def append(self, item):
+ self._path.append(item)
+
+
+# We use this exclusively in module_from_spec() for backward-compatibility.
+class _NamespaceLoader:
+ def __init__(self, name, path, path_finder):
+ self._path = _NamespacePath(name, path, path_finder)
+
+ @classmethod
+ def module_repr(cls, module):
+ """Return repr for the module.
+
+ The method is deprecated. The import machinery does the job itself.
+
+ """
+ return '<module {!r} (namespace)>'.format(module.__name__)
+
+ def is_package(self, fullname):
+ return True
+
+ def get_source(self, fullname):
+ return ''
+
+ def get_code(self, fullname):
+ return compile('', '<string>', 'exec', dont_inherit=True)
+
+ def create_module(self, spec):
+ """Use default semantics for module creation."""
+
+ def exec_module(self, module):
+ pass
+
+ def load_module(self, fullname):
+ """Load a namespace module.
+
+ This method is deprecated. Use exec_module() instead.
+
+ """
+ # The import system never calls this method.
+ _bootstrap._verbose_message('namespace module loaded with path {!r}',
+ self._path)
+ return _bootstrap._load_module_shim(self, fullname)
+
+
+# Finders #####################################################################
+
+class PathFinder:
+
+ """Meta path finder for sys.path and package __path__ attributes."""
+
+ @classmethod
+ def invalidate_caches(cls):
+ """Call the invalidate_caches() method on all path entry finders
+ stored in sys.path_importer_caches (where implemented)."""
+ for name, finder in list(sys.path_importer_cache.items()):
+ if finder is None:
+ del sys.path_importer_cache[name]
+ elif hasattr(finder, 'invalidate_caches'):
+ finder.invalidate_caches()
+
+ @classmethod
+ def _path_hooks(cls, path):
+ """Search sys.path_hooks for a finder for 'path'."""
+ if sys.path_hooks is not None and not sys.path_hooks:
+ _warnings.warn('sys.path_hooks is empty', ImportWarning)
+ for hook in sys.path_hooks:
+ try:
+ return hook(path)
+ except ImportError:
+ continue
+ else:
+ return None
+
+ @classmethod
+ def _path_importer_cache(cls, path):
+ """Get the finder for the path entry from sys.path_importer_cache.
+
+ If the path entry is not in the cache, find the appropriate finder
+ and cache it. If no finder is available, store None.
+
+ """
+ if path == '':
+ try:
+ path = _os.getcwd()
+ except FileNotFoundError:
+ # Don't cache the failure as the cwd can easily change to
+ # a valid directory later on.
+ return None
+ try:
+ finder = sys.path_importer_cache[path]
+ except KeyError:
+ finder = cls._path_hooks(path)
+ sys.path_importer_cache[path] = finder
+ return finder
+
+ @classmethod
+ def _legacy_get_spec(cls, fullname, finder):
+ # This would be a good place for a DeprecationWarning if
+ # we ended up going that route.
+ if hasattr(finder, 'find_loader'):
+ loader, portions = finder.find_loader(fullname)
+ else:
+ loader = finder.find_module(fullname)
+ portions = []
+ if loader is not None:
+ return _bootstrap.spec_from_loader(fullname, loader)
+ spec = _bootstrap.ModuleSpec(fullname, None)
+ spec.submodule_search_locations = portions
+ return spec
+
+ @classmethod
+ def _get_spec(cls, fullname, path, target=None):
+ """Find the loader or namespace_path for this module/package name."""
+ # If this ends up being a namespace package, namespace_path is
+ # the list of paths that will become its __path__
+ namespace_path = []
+ for entry in path:
+ if not isinstance(entry, (str, bytes)):
+ continue
+ finder = cls._path_importer_cache(entry)
+ if finder is not None:
+ if hasattr(finder, 'find_spec'):
+ spec = finder.find_spec(fullname, target)
+ else:
+ spec = cls._legacy_get_spec(fullname, finder)
+ if spec is None:
+ continue
+ if spec.loader is not None:
+ return spec
+ portions = spec.submodule_search_locations
+ if portions is None:
+ raise ImportError('spec missing loader')
+ # This is possibly part of a namespace package.
+ # Remember these path entries (if any) for when we
+ # create a namespace package, and continue iterating
+ # on path.
+ namespace_path.extend(portions)
+ else:
+ spec = _bootstrap.ModuleSpec(fullname, None)
+ spec.submodule_search_locations = namespace_path
+ return spec
+
+ @classmethod
+ def find_spec(cls, fullname, path=None, target=None):
+ """Try to find a spec for 'fullname' on sys.path or 'path'.
+
+ The search is based on sys.path_hooks and sys.path_importer_cache.
+ """
+ if path is None:
+ path = sys.path
+ spec = cls._get_spec(fullname, path, target)
+ if spec is None:
+ return None
+ elif spec.loader is None:
+ namespace_path = spec.submodule_search_locations
+ if namespace_path:
+ # We found at least one namespace path. Return a spec which
+ # can create the namespace package.
+ spec.origin = None
+ spec.submodule_search_locations = _NamespacePath(fullname, namespace_path, cls._get_spec)
+ return spec
+ else:
+ return None
+ else:
+ return spec
+
+ @classmethod
+ def find_module(cls, fullname, path=None):
+ """find the module on sys.path or 'path' based on sys.path_hooks and
+ sys.path_importer_cache.
+
+ This method is deprecated. Use find_spec() instead.
+
+ """
+ spec = cls.find_spec(fullname, path)
+ if spec is None:
+ return None
+ return spec.loader
+
+ @classmethod
+ def find_distributions(cls, *args, **kwargs):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ from importlib.metadata import MetadataPathFinder
+ return MetadataPathFinder.find_distributions(*args, **kwargs)
+
+
+class FileFinder:
+
+ """File-based finder.
+
+ Interactions with the file system are cached for performance, being
+ refreshed when the directory the finder is handling has been modified.
+
+ """
+
+ def __init__(self, path, *loader_details):
+ """Initialize with the path to search on and a variable number of
+ 2-tuples containing the loader and the file suffixes the loader
+ recognizes."""
+ loaders = []
+ for loader, suffixes in loader_details:
+ loaders.extend((suffix, loader) for suffix in suffixes)
+ self._loaders = loaders
+ # Base (directory) path
+ self.path = path or '.'
+ self._path_mtime = -1
+ self._path_cache = set()
+ self._relaxed_path_cache = set()
+
+ def invalidate_caches(self):
+ """Invalidate the directory mtime."""
+ self._path_mtime = -1
+
+ find_module = _find_module_shim
+
+ def find_loader(self, fullname):
+ """Try to find a loader for the specified module, or the namespace
+ package portions. Returns (loader, list-of-portions).
+
+ This method is deprecated. Use find_spec() instead.
+
+ """
+ spec = self.find_spec(fullname)
+ if spec is None:
+ return None, []
+ return spec.loader, spec.submodule_search_locations or []
+
+ def _get_spec(self, loader_class, fullname, path, smsl, target):
+ loader = loader_class(fullname, path)
+ return spec_from_file_location(fullname, path, loader=loader,
+ submodule_search_locations=smsl)
+
+ def find_spec(self, fullname, target=None):
+ """Try to find a spec for the specified module.
+
+ Returns the matching spec, or None if not found.
+ """
+ is_namespace = False
+ tail_module = fullname.rpartition('.')[2]
+ try:
+ mtime = _path_stat(self.path or _os.getcwd()).st_mtime
+ except OSError:
+ mtime = -1
+ if mtime != self._path_mtime:
+ self._fill_cache()
+ self._path_mtime = mtime
+ # tail_module keeps the original casing, for __file__ and friends
+ if _relax_case():
+ cache = self._relaxed_path_cache
+ cache_module = tail_module.lower()
+ else:
+ cache = self._path_cache
+ cache_module = tail_module
+ # Check if the module is the name of a directory (and thus a package).
+ if cache_module in cache:
+ base_path = _path_join(self.path, tail_module)
+ for suffix, loader_class in self._loaders:
+ init_filename = '__init__' + suffix
+ full_path = _path_join(base_path, init_filename)
+ if _path_isfile(full_path):
+ return self._get_spec(loader_class, fullname, full_path, [base_path], target)
+ else:
+ # If a namespace package, return the path if we don't
+ # find a module in the next section.
+ is_namespace = _path_isdir(base_path)
+ # Check for a file w/ a proper suffix exists.
+ for suffix, loader_class in self._loaders:
+ full_path = _path_join(self.path, tail_module + suffix)
+ _bootstrap._verbose_message('trying {}', full_path, verbosity=2)
+ if cache_module + suffix in cache:
+ if _path_isfile(full_path):
+ return self._get_spec(loader_class, fullname, full_path,
+ None, target)
+ if is_namespace:
+ _bootstrap._verbose_message('possible namespace for {}', base_path)
+ spec = _bootstrap.ModuleSpec(fullname, None)
+ spec.submodule_search_locations = [base_path]
+ return spec
+ return None
+
+ def _fill_cache(self):
+ """Fill the cache of potential modules and packages for this directory."""
+ path = self.path
+ try:
+ contents = _os.listdir(path or _os.getcwd())
+ except (FileNotFoundError, PermissionError, NotADirectoryError):
+ # Directory has either been removed, turned into a file, or made
+ # unreadable.
+ contents = []
+ # We store two cached versions, to handle runtime changes of the
+ # PYTHONCASEOK environment variable.
+ if not sys.platform.startswith('win'):
+ self._path_cache = set(contents)
+ else:
+ # Windows users can import modules with case-insensitive file
+ # suffixes (for legacy reasons). Make the suffix lowercase here
+ # so it's done once instead of for every import. This is safe as
+ # the specified suffixes to check against are always specified in a
+ # case-sensitive manner.
+ lower_suffix_contents = set()
+ for item in contents:
+ name, dot, suffix = item.partition('.')
+ if dot:
+ new_name = '{}.{}'.format(name, suffix.lower())
+ else:
+ new_name = name
+ lower_suffix_contents.add(new_name)
+ self._path_cache = lower_suffix_contents
+ if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
+ self._relaxed_path_cache = {fn.lower() for fn in contents}
+
+ @classmethod
+ def path_hook(cls, *loader_details):
+ """A class method which returns a closure to use on sys.path_hook
+ which will return an instance using the specified loaders and the path
+ called on the closure.
+
+ If the path called on the closure is not a directory, ImportError is
+ raised.
+
+ """
+ def path_hook_for_FileFinder(path):
+ """Path hook for importlib.machinery.FileFinder."""
+ if not _path_isdir(path):
+ raise ImportError('only directories are supported', path=path)
+ return cls(path, *loader_details)
+
+ return path_hook_for_FileFinder
+
+ def __repr__(self):
+ return 'FileFinder({!r})'.format(self.path)
+
+
+# Import setup ###############################################################
+
+def _fix_up_module(ns, name, pathname, cpathname=None):
+ # This function is used by PyImport_ExecCodeModuleObject().
+ loader = ns.get('__loader__')
+ spec = ns.get('__spec__')
+ if not loader:
+ if spec:
+ loader = spec.loader
+ elif pathname == cpathname:
+ loader = SourcelessFileLoader(name, pathname)
+ else:
+ loader = SourceFileLoader(name, pathname)
+ if not spec:
+ spec = spec_from_file_location(name, pathname, loader=loader)
+ try:
+ ns['__spec__'] = spec
+ ns['__loader__'] = loader
+ ns['__file__'] = pathname
+ ns['__cached__'] = cpathname
+ except Exception:
+ # Not important enough to report.
+ pass
+
+
+def _get_supported_file_loaders():
+ """Returns a list of file-based module loaders.
+
+ Each item is a tuple (loader, suffixes).
+ """
+ extensions = ExtensionFileLoader, _imp.extension_suffixes()
+ source = SourceFileLoader, SOURCE_SUFFIXES
+ bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES
+ return [extensions, source, bytecode]
+
+
+def _setup(_bootstrap_module):
+ """Setup the path-based importers for importlib by importing needed
+ built-in modules and injecting them into the global namespace.
+
+ Other components are extracted from the core bootstrap module.
+
+ """
+ global sys, _imp, _bootstrap
+ _bootstrap = _bootstrap_module
+ sys = _bootstrap.sys
+ _imp = _bootstrap._imp
+
+ # Directly load built-in modules needed during bootstrap.
+ self_module = sys.modules[__name__]
+ for builtin_name in ('_io', '_warnings', 'builtins', 'marshal'):
+ if builtin_name not in sys.modules:
+ builtin_module = _bootstrap._builtin_from_name(builtin_name)
+ else:
+ builtin_module = sys.modules[builtin_name]
+ setattr(self_module, builtin_name, builtin_module)
+
+ # Directly load the os module (needed during bootstrap).
+ os_details = ('posix', ['/']), ('nt', ['\\', '/'])
+ for builtin_os, path_separators in os_details:
+ # Assumption made in _path_join()
+ assert all(len(sep) == 1 for sep in path_separators)
+ path_sep = path_separators[0]
+ if builtin_os in sys.modules:
+ os_module = sys.modules[builtin_os]
+ break
+ else:
+ try:
+ os_module = _bootstrap._builtin_from_name(builtin_os)
+ break
+ except ImportError:
+ continue
+ else:
+ raise ImportError('importlib requires posix or nt')
+ setattr(self_module, '_os', os_module)
+ setattr(self_module, 'path_sep', path_sep)
+ setattr(self_module, 'path_separators', ''.join(path_separators))
+ setattr(self_module, '_pathseps_with_colon', {f':{s}' for s in path_separators})
+
+ # Directly load the _thread module (needed during bootstrap).
+ thread_module = _bootstrap._builtin_from_name('_thread')
+ setattr(self_module, '_thread', thread_module)
+
+ # Directly load the _weakref module (needed during bootstrap).
+ weakref_module = _bootstrap._builtin_from_name('_weakref')
+ setattr(self_module, '_weakref', weakref_module)
+
+ # Directly load the winreg module (needed during bootstrap).
+ if builtin_os == 'nt':
+ winreg_module = _bootstrap._builtin_from_name('winreg')
+ setattr(self_module, '_winreg', winreg_module)
+
+ # Constants
+ setattr(self_module, '_relax_case', _make_relax_case())
+ EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())
+ if builtin_os == 'nt':
+ SOURCE_SUFFIXES.append('.pyw')
+ if '_d.pyd' in EXTENSION_SUFFIXES:
+ WindowsRegistryFinder.DEBUG_BUILD = True
+
+
+def _install(_bootstrap_module):
+ """Install the path-based import components."""
+ _setup(_bootstrap_module)
+ supported_loaders = _get_supported_file_loaders()
+ sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])
+ sys.meta_path.append(PathFinder)
diff --git a/common/py3-stdlib/importlib/abc.py b/common/py3-stdlib/importlib/abc.py
new file mode 100644
index 0000000..4b2d3de
--- /dev/null
+++ b/common/py3-stdlib/importlib/abc.py
@@ -0,0 +1,388 @@
+"""Abstract base classes related to import."""
+from . import _bootstrap
+from . import _bootstrap_external
+from . import machinery
+try:
+ import _frozen_importlib
+except ImportError as exc:
+ if exc.name != '_frozen_importlib':
+ raise
+ _frozen_importlib = None
+try:
+ import _frozen_importlib_external
+except ImportError as exc:
+ _frozen_importlib_external = _bootstrap_external
+import abc
+import warnings
+
+
+def _register(abstract_cls, *classes):
+ for cls in classes:
+ abstract_cls.register(cls)
+ if _frozen_importlib is not None:
+ try:
+ frozen_cls = getattr(_frozen_importlib, cls.__name__)
+ except AttributeError:
+ frozen_cls = getattr(_frozen_importlib_external, cls.__name__)
+ abstract_cls.register(frozen_cls)
+
+
+class Finder(metaclass=abc.ABCMeta):
+
+ """Legacy abstract base class for import finders.
+
+ It may be subclassed for compatibility with legacy third party
+ reimplementations of the import system. Otherwise, finder
+ implementations should derive from the more specific MetaPathFinder
+ or PathEntryFinder ABCs.
+
+ Deprecated since Python 3.3
+ """
+
+ @abc.abstractmethod
+ def find_module(self, fullname, path=None):
+ """An abstract method that should find a module.
+ The fullname is a str and the optional path is a str or None.
+ Returns a Loader object or None.
+ """
+
+
+class MetaPathFinder(Finder):
+
+ """Abstract base class for import finders on sys.meta_path."""
+
+ # We don't define find_spec() here since that would break
+ # hasattr checks we do to support backward compatibility.
+
+ def find_module(self, fullname, path):
+ """Return a loader for the module.
+
+ If no module is found, return None. The fullname is a str and
+ the path is a list of strings or None.
+
+ This method is deprecated since Python 3.4 in favor of
+ finder.find_spec(). If find_spec() exists then backwards-compatible
+ functionality is provided for this method.
+
+ """
+ warnings.warn("MetaPathFinder.find_module() is deprecated since Python "
+ "3.4 in favor of MetaPathFinder.find_spec() "
+ "(available since 3.4)",
+ DeprecationWarning,
+ stacklevel=2)
+ if not hasattr(self, 'find_spec'):
+ return None
+ found = self.find_spec(fullname, path)
+ return found.loader if found is not None else None
+
+ def invalidate_caches(self):
+ """An optional method for clearing the finder's cache, if any.
+ This method is used by importlib.invalidate_caches().
+ """
+
+_register(MetaPathFinder, machinery.BuiltinImporter, machinery.FrozenImporter,
+ machinery.PathFinder, machinery.WindowsRegistryFinder)
+
+
+class PathEntryFinder(Finder):
+
+ """Abstract base class for path entry finders used by PathFinder."""
+
+ # We don't define find_spec() here since that would break
+ # hasattr checks we do to support backward compatibility.
+
+ def find_loader(self, fullname):
+ """Return (loader, namespace portion) for the path entry.
+
+ The fullname is a str. The namespace portion is a sequence of
+ path entries contributing to part of a namespace package. The
+ sequence may be empty. If loader is not None, the portion will
+ be ignored.
+
+ The portion will be discarded if another path entry finder
+ locates the module as a normal module or package.
+
+ This method is deprecated since Python 3.4 in favor of
+ finder.find_spec(). If find_spec() is provided than backwards-compatible
+ functionality is provided.
+ """
+ warnings.warn("PathEntryFinder.find_loader() is deprecated since Python "
+ "3.4 in favor of PathEntryFinder.find_spec() "
+ "(available since 3.4)",
+ DeprecationWarning,
+ stacklevel=2)
+ if not hasattr(self, 'find_spec'):
+ return None, []
+ found = self.find_spec(fullname)
+ if found is not None:
+ if not found.submodule_search_locations:
+ portions = []
+ else:
+ portions = found.submodule_search_locations
+ return found.loader, portions
+ else:
+ return None, []
+
+ find_module = _bootstrap_external._find_module_shim
+
+ def invalidate_caches(self):
+ """An optional method for clearing the finder's cache, if any.
+ This method is used by PathFinder.invalidate_caches().
+ """
+
+_register(PathEntryFinder, machinery.FileFinder)
+
+
+class Loader(metaclass=abc.ABCMeta):
+
+ """Abstract base class for import loaders."""
+
+ def create_module(self, spec):
+ """Return a module to initialize and into which to load.
+
+ This method should raise ImportError if anything prevents it
+ from creating a new module. It may return None to indicate
+ that the spec should create the new module.
+ """
+ # By default, defer to default semantics for the new module.
+ return None
+
+ # We don't define exec_module() here since that would break
+ # hasattr checks we do to support backward compatibility.
+
+ def load_module(self, fullname):
+ """Return the loaded module.
+
+ The module must be added to sys.modules and have import-related
+ attributes set properly. The fullname is a str.
+
+ ImportError is raised on failure.
+
+ This method is deprecated in favor of loader.exec_module(). If
+ exec_module() exists then it is used to provide a backwards-compatible
+ functionality for this method.
+
+ """
+ if not hasattr(self, 'exec_module'):
+ raise ImportError
+ return _bootstrap._load_module_shim(self, fullname)
+
+ def module_repr(self, module):
+ """Return a module's repr.
+
+ Used by the module type when the method does not raise
+ NotImplementedError.
+
+ This method is deprecated.
+
+ """
+ # The exception will cause ModuleType.__repr__ to ignore this method.
+ raise NotImplementedError
+
+
+class ResourceLoader(Loader):
+
+ """Abstract base class for loaders which can return data from their
+ back-end storage.
+
+ This ABC represents one of the optional protocols specified by PEP 302.
+
+ """
+
+ @abc.abstractmethod
+ def get_data(self, path):
+ """Abstract method which when implemented should return the bytes for
+ the specified path. The path must be a str."""
+ raise OSError
+
+
+class InspectLoader(Loader):
+
+ """Abstract base class for loaders which support inspection about the
+ modules they can load.
+
+ This ABC represents one of the optional protocols specified by PEP 302.
+
+ """
+
+ def is_package(self, fullname):
+ """Optional method which when implemented should return whether the
+ module is a package. The fullname is a str. Returns a bool.
+
+ Raises ImportError if the module cannot be found.
+ """
+ raise ImportError
+
+ def get_code(self, fullname):
+ """Method which returns the code object for the module.
+
+ The fullname is a str. Returns a types.CodeType if possible, else
+ returns None if a code object does not make sense
+ (e.g. built-in module). Raises ImportError if the module cannot be
+ found.
+ """
+ source = self.get_source(fullname)
+ if source is None:
+ return None
+ return self.source_to_code(source)
+
+ @abc.abstractmethod
+ def get_source(self, fullname):
+ """Abstract method which should return the source code for the
+ module. The fullname is a str. Returns a str.
+
+ Raises ImportError if the module cannot be found.
+ """
+ raise ImportError
+
+ @staticmethod
+ def source_to_code(data, path='<string>'):
+ """Compile 'data' into a code object.
+
+ The 'data' argument can be anything that compile() can handle. The'path'
+ argument should be where the data was retrieved (when applicable)."""
+ return compile(data, path, 'exec', dont_inherit=True)
+
+ exec_module = _bootstrap_external._LoaderBasics.exec_module
+ load_module = _bootstrap_external._LoaderBasics.load_module
+
+_register(InspectLoader, machinery.BuiltinImporter, machinery.FrozenImporter)
+
+
+class ExecutionLoader(InspectLoader):
+
+ """Abstract base class for loaders that wish to support the execution of
+ modules as scripts.
+
+ This ABC represents one of the optional protocols specified in PEP 302.
+
+ """
+
+ @abc.abstractmethod
+ def get_filename(self, fullname):
+ """Abstract method which should return the value that __file__ is to be
+ set to.
+
+ Raises ImportError if the module cannot be found.
+ """
+ raise ImportError
+
+ def get_code(self, fullname):
+ """Method to return the code object for fullname.
+
+ Should return None if not applicable (e.g. built-in module).
+ Raise ImportError if the module cannot be found.
+ """
+ source = self.get_source(fullname)
+ if source is None:
+ return None
+ try:
+ path = self.get_filename(fullname)
+ except ImportError:
+ return self.source_to_code(source)
+ else:
+ return self.source_to_code(source, path)
+
+_register(ExecutionLoader, machinery.ExtensionFileLoader)
+
+
+class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader):
+
+ """Abstract base class partially implementing the ResourceLoader and
+ ExecutionLoader ABCs."""
+
+_register(FileLoader, machinery.SourceFileLoader,
+ machinery.SourcelessFileLoader)
+
+
+class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader):
+
+ """Abstract base class for loading source code (and optionally any
+ corresponding bytecode).
+
+ To support loading from source code, the abstractmethods inherited from
+ ResourceLoader and ExecutionLoader need to be implemented. To also support
+ loading from bytecode, the optional methods specified directly by this ABC
+ is required.
+
+ Inherited abstractmethods not implemented in this ABC:
+
+ * ResourceLoader.get_data
+ * ExecutionLoader.get_filename
+
+ """
+
+ def path_mtime(self, path):
+ """Return the (int) modification time for the path (str)."""
+ if self.path_stats.__func__ is SourceLoader.path_stats:
+ raise OSError
+ return int(self.path_stats(path)['mtime'])
+
+ def path_stats(self, path):
+ """Return a metadata dict for the source pointed to by the path (str).
+ Possible keys:
+ - 'mtime' (mandatory) is the numeric timestamp of last source
+ code modification;
+ - 'size' (optional) is the size in bytes of the source code.
+ """
+ if self.path_mtime.__func__ is SourceLoader.path_mtime:
+ raise OSError
+ return {'mtime': self.path_mtime(path)}
+
+ def set_data(self, path, data):
+ """Write the bytes to the path (if possible).
+
+ Accepts a str path and data as bytes.
+
+ Any needed intermediary directories are to be created. If for some
+ reason the file cannot be written because of permissions, fail
+ silently.
+ """
+
+_register(SourceLoader, machinery.SourceFileLoader)
+
+
+class ResourceReader(metaclass=abc.ABCMeta):
+
+ """Abstract base class to provide resource-reading support.
+
+ Loaders that support resource reading are expected to implement
+ the ``get_resource_reader(fullname)`` method and have it either return None
+ or an object compatible with this ABC.
+ """
+
+ @abc.abstractmethod
+ def open_resource(self, resource):
+ """Return an opened, file-like object for binary reading.
+
+ The 'resource' argument is expected to represent only a file name
+ and thus not contain any subdirectory components.
+
+ If the resource cannot be found, FileNotFoundError is raised.
+ """
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def resource_path(self, resource):
+ """Return the file system path to the specified resource.
+
+ The 'resource' argument is expected to represent only a file name
+ and thus not contain any subdirectory components.
+
+ If the resource does not exist on the file system, raise
+ FileNotFoundError.
+ """
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def is_resource(self, name):
+ """Return True if the named 'name' is consider a resource."""
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def contents(self):
+ """Return an iterable of strings over the contents of the package."""
+ return []
+
+
+_register(ResourceReader, machinery.SourceFileLoader)
diff --git a/common/py3-stdlib/importlib/machinery.py b/common/py3-stdlib/importlib/machinery.py
new file mode 100644
index 0000000..1b2b5c9
--- /dev/null
+++ b/common/py3-stdlib/importlib/machinery.py
@@ -0,0 +1,21 @@
+"""The machinery of importlib: finders, loaders, hooks, etc."""
+
+import _imp
+
+from ._bootstrap import ModuleSpec
+from ._bootstrap import BuiltinImporter
+from ._bootstrap import FrozenImporter
+from ._bootstrap_external import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES,
+ OPTIMIZED_BYTECODE_SUFFIXES, BYTECODE_SUFFIXES,
+ EXTENSION_SUFFIXES)
+from ._bootstrap_external import WindowsRegistryFinder
+from ._bootstrap_external import PathFinder
+from ._bootstrap_external import FileFinder
+from ._bootstrap_external import SourceFileLoader
+from ._bootstrap_external import SourcelessFileLoader
+from ._bootstrap_external import ExtensionFileLoader
+
+
+def all_suffixes():
+ """Returns a list of all recognized module suffixes for this process"""
+ return SOURCE_SUFFIXES + BYTECODE_SUFFIXES + EXTENSION_SUFFIXES
diff --git a/common/py3-stdlib/importlib/metadata.py b/common/py3-stdlib/importlib/metadata.py
new file mode 100644
index 0000000..831f593
--- /dev/null
+++ b/common/py3-stdlib/importlib/metadata.py
@@ -0,0 +1,566 @@
+import io
+import os
+import re
+import abc
+import csv
+import sys
+import email
+import pathlib
+import zipfile
+import operator
+import functools
+import itertools
+import posixpath
+import collections
+
+from configparser import ConfigParser
+from contextlib import suppress
+from importlib import import_module
+from importlib.abc import MetaPathFinder
+from itertools import starmap
+
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'requires',
+ 'version',
+ ]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+
+class EntryPoint(
+ collections.namedtuple('EntryPointBase', 'name value group')):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ <https://packaging.python.org/specifications/entry-points/>`_
+ for more information.
+ """
+
+ pattern = re.compile(
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def extras(self):
+ match = self.pattern.match(self.value)
+ return list(re.finditer(r'\w+', match.group('extras') or ''))
+
+ @classmethod
+ def _from_config(cls, config):
+ return [
+ cls(name, value, group)
+ for group in config.sections()
+ for name, value in config.items(group)
+ ]
+
+ @classmethod
+ def _from_text(cls, text):
+ config = ConfigParser(delimiters='=')
+ # case sensitive: https://stackoverflow.com/q/1611799/812183
+ config.optionxform = str
+ try:
+ config.read_string(text)
+ except AttributeError: # pragma: nocover
+ # Python 2 has no read_string
+ config.readfp(io.StringIO(text))
+ return EntryPoint._from_config(config)
+
+ def __iter__(self):
+ """
+ Supply iter so one may construct dicts of EntryPoints easily.
+ """
+ return iter((self.name, self))
+
+ def __reduce__(self):
+ return (
+ self.__class__,
+ (self.name, self.value, self.group),
+ )
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ def read_text(self, encoding='utf-8'):
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self):
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self):
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec):
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self):
+ return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
+
+
+class Distribution:
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename):
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path):
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name):
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ """
+ for resolver in cls._discover_resolvers():
+ dists = resolver(DistributionFinder.Context(name=name))
+ dist = next(dists, None)
+ if dist is not None:
+ return dist
+ else:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs):
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context)
+ for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path):
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(pathlib.Path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None)
+ for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @property
+ def metadata(self):
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ return email.message_from_string(text)
+
+ @property
+ def version(self):
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self):
+ return EntryPoint._from_text(self.read_text('entry_points.txt'))
+
+ @property
+ def files(self):
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
+ missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+ file_lines = self._read_files_distinfo() or self._read_files_egginfo()
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ return file_lines and list(starmap(make_file, csv.reader(file_lines)))
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo(self):
+ """
+ SOURCES.txt might contain literal commas, so wrap each line
+ in quotes.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self):
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return source and self._deps_from_requires_text(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ section_pairs = cls._read_sections(source.splitlines())
+ sections = {
+ section: list(map(operator.itemgetter('line'), results))
+ for section, results in
+ itertools.groupby(section_pairs, operator.itemgetter('section'))
+ }
+ return cls._convert_egg_info_reqs_to_simple_reqs(sections)
+
+ @staticmethod
+ def _read_sections(lines):
+ section = None
+ for line in filter(None, lines):
+ section_match = re.match(r'\[(.*)\]$', line)
+ if section_match:
+ section = section_match.group(1)
+ continue
+ yield locals()
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+ def make_condition(name):
+ return name and 'extra == "{name}"'.format(name=name)
+
+ def parse_condition(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = '({markers})'.format(markers=markers)
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ for section, deps in sections.items():
+ for dep in deps:
+ yield dep + parse_condition(section)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+ """
+ Keyword arguments presented by the caller to
+ ``distributions()`` or ``Distribution.discover()``
+ to narrow the scope of a search for distributions
+ in all DistributionFinders.
+
+ Each DistributionFinder may expect any parameters
+ and should attempt to honor the canonical
+ parameters defined below when appropriate.
+ """
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ A name of ``None`` matches all distributions.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self):
+ """
+ The path that a distribution finder should search.
+
+ Typically refers to Python package paths and defaults
+ to ``sys.path``.
+ """
+ return vars(self).get('path', sys.path)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+class FastPath:
+ """
+ Micro-optimized class for searching a path for
+ children.
+ """
+
+ def __init__(self, root):
+ self.root = root
+ self.base = os.path.basename(root).lower()
+
+ def joinpath(self, child):
+ return pathlib.Path(self.root, child)
+
+ def children(self):
+ with suppress(Exception):
+ return os.listdir(self.root or '')
+ with suppress(Exception):
+ return self.zip_children()
+ return []
+
+ def zip_children(self):
+ zip_path = zipfile.Path(self.root)
+ names = zip_path.root.namelist()
+ self.joinpath = zip_path.joinpath
+
+ return (
+ posixpath.split(child)[0]
+ for child in names
+ )
+
+ def is_egg(self, search):
+ base = self.base
+ return (
+ base == search.versionless_egg_name
+ or base.startswith(search.prefix)
+ and base.endswith('.egg'))
+
+ def search(self, name):
+ for child in self.children():
+ n_low = child.lower()
+ if (n_low in name.exact_matches
+ or n_low.startswith(name.prefix)
+ and n_low.endswith(name.suffixes)
+ # legacy case:
+ or self.is_egg(name) and n_low == 'egg-info'):
+ yield self.joinpath(child)
+
+
+class Prepared:
+ """
+ A prepared search for metadata on a possibly-named package.
+ """
+ normalized = ''
+ prefix = ''
+ suffixes = '.dist-info', '.egg-info'
+ exact_matches = [''][:0]
+ versionless_egg_name = ''
+
+ def __init__(self, name):
+ self.name = name
+ if name is None:
+ return
+ self.normalized = name.lower().replace('-', '_')
+ self.prefix = self.normalized + '-'
+ self.exact_matches = [
+ self.normalized + suffix for suffix in self.suffixes]
+ self.versionless_egg_name = self.normalized + '.egg'
+
+
+class MetadataPathFinder(DistributionFinder):
+ @classmethod
+ def find_distributions(cls, context=DistributionFinder.Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = cls._search_paths(context.name, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, name, paths):
+ """Find metadata directories in paths heuristically."""
+ return itertools.chain.from_iterable(
+ path.search(Prepared(name))
+ for path in map(FastPath, paths)
+ )
+
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path):
+ """Construct a distribution from a path to the metadata directory.
+
+ :param path: A pathlib.Path or similar object supporting
+ .joinpath(), __div__, .parent, and .read_text().
+ """
+ self._path = path
+
+ def read_text(self, filename):
+ with suppress(FileNotFoundError, IsADirectoryError, KeyError,
+ NotADirectoryError, PermissionError):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path):
+ return self._path.parent / path
+
+
+def distribution(distribution_name):
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs):
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name):
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: An email.Message containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name):
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+def entry_points():
+ """Return EntryPoint objects for all installed packages.
+
+ :return: EntryPoint objects for all installed packages.
+ """
+ eps = itertools.chain.from_iterable(
+ dist.entry_points for dist in distributions())
+ by_group = operator.attrgetter('group')
+ ordered = sorted(eps, key=by_group)
+ grouped = itertools.groupby(ordered, by_group)
+ return {
+ group: tuple(eps)
+ for group, eps in grouped
+ }
+
+
+def files(distribution_name):
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name):
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterator of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
diff --git a/common/py3-stdlib/importlib/resources.py b/common/py3-stdlib/importlib/resources.py
new file mode 100644
index 0000000..fc3a1c9
--- /dev/null
+++ b/common/py3-stdlib/importlib/resources.py
@@ -0,0 +1,259 @@
+import os
+import tempfile
+
+from . import abc as resources_abc
+from contextlib import contextmanager, suppress
+from importlib import import_module
+from importlib.abc import ResourceLoader
+from io import BytesIO, TextIOWrapper
+from pathlib import Path
+from types import ModuleType
+from typing import Iterable, Iterator, Optional, Set, Union # noqa: F401
+from typing import cast
+from typing.io import BinaryIO, TextIO
+from zipimport import ZipImportError
+
+
+__all__ = [
+ 'Package',
+ 'Resource',
+ 'contents',
+ 'is_resource',
+ 'open_binary',
+ 'open_text',
+ 'path',
+ 'read_binary',
+ 'read_text',
+ ]
+
+
+Package = Union[str, ModuleType]
+Resource = Union[str, os.PathLike]
+
+
+def _get_package(package) -> ModuleType:
+ """Take a package name or module object and return the module.
+
+ If a name, the module is imported. If the passed or imported module
+ object is not a package, raise an exception.
+ """
+ if hasattr(package, '__spec__'):
+ if package.__spec__.submodule_search_locations is None:
+ raise TypeError('{!r} is not a package'.format(
+ package.__spec__.name))
+ else:
+ return package
+ else:
+ module = import_module(package)
+ if module.__spec__.submodule_search_locations is None:
+ raise TypeError('{!r} is not a package'.format(package))
+ else:
+ return module
+
+
+def _normalize_path(path) -> str:
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ parent, file_name = os.path.split(path)
+ if parent:
+ raise ValueError('{!r} must be only a file name'.format(path))
+ else:
+ return file_name
+
+
+def _get_resource_reader(
+ package: ModuleType) -> Optional[resources_abc.ResourceReader]:
+ # Return the package's loader if it's a ResourceReader. We can't use
+ # a issubclass() check here because apparently abc.'s __subclasscheck__()
+ # hook wants to create a weak reference to the object, but
+ # zipimport.zipimporter does not support weak references, resulting in a
+ # TypeError. That seems terrible.
+ spec = package.__spec__
+ if hasattr(spec.loader, 'get_resource_reader'):
+ return cast(resources_abc.ResourceReader,
+ spec.loader.get_resource_reader(spec.name))
+ return None
+
+
+def _check_location(package):
+ if package.__spec__.origin is None or not package.__spec__.has_location:
+ raise FileNotFoundError(f'Package has no location {package!r}')
+
+
+def open_binary(package: Package, resource: Resource) -> BinaryIO:
+ """Return a file-like object opened for binary reading of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ return reader.open_resource(resource)
+ _check_location(package)
+ absolute_package_path = os.path.abspath(package.__spec__.origin)
+ package_path = os.path.dirname(absolute_package_path)
+ full_path = os.path.join(package_path, resource)
+ try:
+ return open(full_path, mode='rb')
+ except OSError:
+ # Just assume the loader is a resource loader; all the relevant
+ # importlib.machinery loaders are and an AttributeError for
+ # get_data() will make it clear what is needed from the loader.
+ loader = cast(ResourceLoader, package.__spec__.loader)
+ data = None
+ if hasattr(package.__spec__.loader, 'get_data'):
+ with suppress(OSError):
+ data = loader.get_data(full_path)
+ if data is None:
+ package_name = package.__spec__.name
+ message = '{!r} resource not found in {!r}'.format(
+ resource, package_name)
+ raise FileNotFoundError(message)
+ else:
+ return BytesIO(data)
+
+
+def open_text(package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict') -> TextIO:
+ """Return a file-like object opened for text reading of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ return TextIOWrapper(reader.open_resource(resource), encoding, errors)
+ _check_location(package)
+ absolute_package_path = os.path.abspath(package.__spec__.origin)
+ package_path = os.path.dirname(absolute_package_path)
+ full_path = os.path.join(package_path, resource)
+ try:
+ return open(full_path, mode='r', encoding=encoding, errors=errors)
+ except OSError:
+ # Just assume the loader is a resource loader; all the relevant
+ # importlib.machinery loaders are and an AttributeError for
+ # get_data() will make it clear what is needed from the loader.
+ loader = cast(ResourceLoader, package.__spec__.loader)
+ data = None
+ if hasattr(package.__spec__.loader, 'get_data'):
+ with suppress(OSError):
+ data = loader.get_data(full_path)
+ if data is None:
+ package_name = package.__spec__.name
+ message = '{!r} resource not found in {!r}'.format(
+ resource, package_name)
+ raise FileNotFoundError(message)
+ else:
+ return TextIOWrapper(BytesIO(data), encoding, errors)
+
+
+def read_binary(package: Package, resource: Resource) -> bytes:
+ """Return the binary contents of the resource."""
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ with open_binary(package, resource) as fp:
+ return fp.read()
+
+
+def read_text(package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict') -> str:
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+@contextmanager
+def path(package: Package, resource: Resource) -> Iterator[Path]:
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ resource = _normalize_path(resource)
+ package = _get_package(package)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ try:
+ yield Path(reader.resource_path(resource))
+ return
+ except FileNotFoundError:
+ pass
+ else:
+ _check_location(package)
+ # Fall-through for both the lack of resource_path() *and* if
+ # resource_path() raises FileNotFoundError.
+ package_directory = Path(package.__spec__.origin).parent
+ file_path = package_directory / resource
+ if file_path.exists():
+ yield file_path
+ else:
+ with open_binary(package, resource) as fp:
+ data = fp.read()
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on
+ # Windows properly.
+ fd, raw_path = tempfile.mkstemp()
+ try:
+ os.write(fd, data)
+ os.close(fd)
+ yield Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+def is_resource(package: Package, name: str) -> bool:
+ """True if 'name' is a resource inside 'package'.
+
+ Directories are *not* resources.
+ """
+ package = _get_package(package)
+ _normalize_path(name)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ return reader.is_resource(name)
+ try:
+ package_contents = set(contents(package))
+ except (NotADirectoryError, FileNotFoundError):
+ return False
+ if name not in package_contents:
+ return False
+ # Just because the given file_name lives as an entry in the package's
+ # contents doesn't necessarily mean it's a resource. Directories are not
+ # resources, so let's try to find out if it's a directory or not.
+ path = Path(package.__spec__.origin).parent / name
+ return path.is_file()
+
+
+def contents(package: Package) -> Iterable[str]:
+ """Return an iterable of entries in 'package'.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ package = _get_package(package)
+ reader = _get_resource_reader(package)
+ if reader is not None:
+ return reader.contents()
+ # Is the package a namespace package? By definition, namespace packages
+ # cannot have resources. We could use _check_location() and catch the
+ # exception, but that's extra work, so just inline the check.
+ elif package.__spec__.origin is None or not package.__spec__.has_location:
+ return ()
+ else:
+ package_directory = Path(package.__spec__.origin).parent
+ return os.listdir(package_directory)
diff --git a/common/py3-stdlib/importlib/util.py b/common/py3-stdlib/importlib/util.py
new file mode 100644
index 0000000..201e0f4
--- /dev/null
+++ b/common/py3-stdlib/importlib/util.py
@@ -0,0 +1,300 @@
+"""Utility code for constructing importers, etc."""
+from . import abc
+from ._bootstrap import module_from_spec
+from ._bootstrap import _resolve_name
+from ._bootstrap import spec_from_loader
+from ._bootstrap import _find_spec
+from ._bootstrap_external import MAGIC_NUMBER
+from ._bootstrap_external import _RAW_MAGIC_NUMBER
+from ._bootstrap_external import cache_from_source
+from ._bootstrap_external import decode_source
+from ._bootstrap_external import source_from_cache
+from ._bootstrap_external import spec_from_file_location
+
+from contextlib import contextmanager
+import _imp
+import functools
+import sys
+import types
+import warnings
+
+
+def source_hash(source_bytes):
+ "Return the hash of *source_bytes* as used in hash-based pyc files."
+ return _imp.source_hash(_RAW_MAGIC_NUMBER, source_bytes)
+
+
+def resolve_name(name, package):
+ """Resolve a relative module name to an absolute one."""
+ if not name.startswith('.'):
+ return name
+ elif not package:
+ raise ValueError(f'no package specified for {repr(name)} '
+ '(required for relative module names)')
+ level = 0
+ for character in name:
+ if character != '.':
+ break
+ level += 1
+ return _resolve_name(name[level:], package, level)
+
+
+def _find_spec_from_path(name, path=None):
+ """Return the spec for the specified module.
+
+ First, sys.modules is checked to see if the module was already imported. If
+ so, then sys.modules[name].__spec__ is returned. If that happens to be
+ set to None, then ValueError is raised. If the module is not in
+ sys.modules, then sys.meta_path is searched for a suitable spec with the
+ value of 'path' given to the finders. None is returned if no spec could
+ be found.
+
+ Dotted names do not have their parent packages implicitly imported. You will
+ most likely need to explicitly import all parent packages in the proper
+ order for a submodule to get the correct spec.
+
+ """
+ if name not in sys.modules:
+ return _find_spec(name, path)
+ else:
+ module = sys.modules[name]
+ if module is None:
+ return None
+ try:
+ spec = module.__spec__
+ except AttributeError:
+ raise ValueError('{}.__spec__ is not set'.format(name)) from None
+ else:
+ if spec is None:
+ raise ValueError('{}.__spec__ is None'.format(name))
+ return spec
+
+
+def find_spec(name, package=None):
+ """Return the spec for the specified module.
+
+ First, sys.modules is checked to see if the module was already imported. If
+ so, then sys.modules[name].__spec__ is returned. If that happens to be
+ set to None, then ValueError is raised. If the module is not in
+ sys.modules, then sys.meta_path is searched for a suitable spec with the
+ value of 'path' given to the finders. None is returned if no spec could
+ be found.
+
+ If the name is for submodule (contains a dot), the parent module is
+ automatically imported.
+
+ The name and package arguments work the same as importlib.import_module().
+ In other words, relative module names (with leading dots) work.
+
+ """
+ fullname = resolve_name(name, package) if name.startswith('.') else name
+ if fullname not in sys.modules:
+ parent_name = fullname.rpartition('.')[0]
+ if parent_name:
+ parent = __import__(parent_name, fromlist=['__path__'])
+ try:
+ parent_path = parent.__path__
+ except AttributeError as e:
+ raise ModuleNotFoundError(
+ f"__path__ attribute not found on {parent_name!r} "
+ f"while trying to find {fullname!r}", name=fullname) from e
+ else:
+ parent_path = None
+ return _find_spec(fullname, parent_path)
+ else:
+ module = sys.modules[fullname]
+ if module is None:
+ return None
+ try:
+ spec = module.__spec__
+ except AttributeError:
+ raise ValueError('{}.__spec__ is not set'.format(name)) from None
+ else:
+ if spec is None:
+ raise ValueError('{}.__spec__ is None'.format(name))
+ return spec
+
+
+@contextmanager
+def _module_to_load(name):
+ is_reload = name in sys.modules
+
+ module = sys.modules.get(name)
+ if not is_reload:
+ # This must be done before open() is called as the 'io' module
+ # implicitly imports 'locale' and would otherwise trigger an
+ # infinite loop.
+ module = type(sys)(name)
+ # This must be done before putting the module in sys.modules
+ # (otherwise an optimization shortcut in import.c becomes wrong)
+ module.__initializing__ = True
+ sys.modules[name] = module
+ try:
+ yield module
+ except Exception:
+ if not is_reload:
+ try:
+ del sys.modules[name]
+ except KeyError:
+ pass
+ finally:
+ module.__initializing__ = False
+
+
+def set_package(fxn):
+ """Set __package__ on the returned module.
+
+ This function is deprecated.
+
+ """
+ @functools.wraps(fxn)
+ def set_package_wrapper(*args, **kwargs):
+ warnings.warn('The import system now takes care of this automatically.',
+ DeprecationWarning, stacklevel=2)
+ module = fxn(*args, **kwargs)
+ if getattr(module, '__package__', None) is None:
+ module.__package__ = module.__name__
+ if not hasattr(module, '__path__'):
+ module.__package__ = module.__package__.rpartition('.')[0]
+ return module
+ return set_package_wrapper
+
+
+def set_loader(fxn):
+ """Set __loader__ on the returned module.
+
+ This function is deprecated.
+
+ """
+ @functools.wraps(fxn)
+ def set_loader_wrapper(self, *args, **kwargs):
+ warnings.warn('The import system now takes care of this automatically.',
+ DeprecationWarning, stacklevel=2)
+ module = fxn(self, *args, **kwargs)
+ if getattr(module, '__loader__', None) is None:
+ module.__loader__ = self
+ return module
+ return set_loader_wrapper
+
+
+def module_for_loader(fxn):
+ """Decorator to handle selecting the proper module for loaders.
+
+ The decorated function is passed the module to use instead of the module
+ name. The module passed in to the function is either from sys.modules if
+ it already exists or is a new module. If the module is new, then __name__
+ is set the first argument to the method, __loader__ is set to self, and
+ __package__ is set accordingly (if self.is_package() is defined) will be set
+ before it is passed to the decorated function (if self.is_package() does
+ not work for the module it will be set post-load).
+
+ If an exception is raised and the decorator created the module it is
+ subsequently removed from sys.modules.
+
+ The decorator assumes that the decorated function takes the module name as
+ the second argument.
+
+ """
+ warnings.warn('The import system now takes care of this automatically.',
+ DeprecationWarning, stacklevel=2)
+ @functools.wraps(fxn)
+ def module_for_loader_wrapper(self, fullname, *args, **kwargs):
+ with _module_to_load(fullname) as module:
+ module.__loader__ = self
+ try:
+ is_package = self.is_package(fullname)
+ except (ImportError, AttributeError):
+ pass
+ else:
+ if is_package:
+ module.__package__ = fullname
+ else:
+ module.__package__ = fullname.rpartition('.')[0]
+ # If __package__ was not set above, __import__() will do it later.
+ return fxn(self, module, *args, **kwargs)
+
+ return module_for_loader_wrapper
+
+
+class _LazyModule(types.ModuleType):
+
+ """A subclass of the module type which triggers loading upon attribute access."""
+
+ def __getattribute__(self, attr):
+ """Trigger the load of the module and return the attribute."""
+ # All module metadata must be garnered from __spec__ in order to avoid
+ # using mutated values.
+ # Stop triggering this method.
+ self.__class__ = types.ModuleType
+ # Get the original name to make sure no object substitution occurred
+ # in sys.modules.
+ original_name = self.__spec__.name
+ # Figure out exactly what attributes were mutated between the creation
+ # of the module and now.
+ attrs_then = self.__spec__.loader_state['__dict__']
+ original_type = self.__spec__.loader_state['__class__']
+ attrs_now = self.__dict__
+ attrs_updated = {}
+ for key, value in attrs_now.items():
+ # Code that set the attribute may have kept a reference to the
+ # assigned object, making identity more important than equality.
+ if key not in attrs_then:
+ attrs_updated[key] = value
+ elif id(attrs_now[key]) != id(attrs_then[key]):
+ attrs_updated[key] = value
+ self.__spec__.loader.exec_module(self)
+ # If exec_module() was used directly there is no guarantee the module
+ # object was put into sys.modules.
+ if original_name in sys.modules:
+ if id(self) != id(sys.modules[original_name]):
+ raise ValueError(f"module object for {original_name!r} "
+ "substituted in sys.modules during a lazy "
+ "load")
+ # Update after loading since that's what would happen in an eager
+ # loading situation.
+ self.__dict__.update(attrs_updated)
+ return getattr(self, attr)
+
+ def __delattr__(self, attr):
+ """Trigger the load and then perform the deletion."""
+ # To trigger the load and raise an exception if the attribute
+ # doesn't exist.
+ self.__getattribute__(attr)
+ delattr(self, attr)
+
+
+class LazyLoader(abc.Loader):
+
+ """A loader that creates a module which defers loading until attribute access."""
+
+ @staticmethod
+ def __check_eager_loader(loader):
+ if not hasattr(loader, 'exec_module'):
+ raise TypeError('loader must define exec_module()')
+
+ @classmethod
+ def factory(cls, loader):
+ """Construct a callable which returns the eager loader made lazy."""
+ cls.__check_eager_loader(loader)
+ return lambda *args, **kwargs: cls(loader(*args, **kwargs))
+
+ def __init__(self, loader):
+ self.__check_eager_loader(loader)
+ self.loader = loader
+
+ def create_module(self, spec):
+ return self.loader.create_module(spec)
+
+ def exec_module(self, module):
+ """Make the module load lazily."""
+ module.__spec__.loader = self.loader
+ module.__loader__ = self.loader
+ # Don't need to worry about deep-copying as trying to set an attribute
+ # on an object would have triggered the load,
+ # e.g. ``module.__spec__.loader = None`` would trigger a load from
+ # trying to access module.__spec__.
+ loader_state = {}
+ loader_state['__dict__'] = module.__dict__.copy()
+ loader_state['__class__'] = module.__class__
+ module.__spec__.loader_state = loader_state
+ module.__class__ = _LazyModule