Phillip J. Eby | 069159b | 2006-04-18 04:05:34 +0000 | [diff] [blame] | 1 | """Package resource API |
| 2 | -------------------- |
| 3 | |
| 4 | A resource is a logical file contained within a package, or a logical |
| 5 | subdirectory thereof. The package resource API expects resource names |
| 6 | to have their path parts separated with ``/``, *not* whatever the local |
| 7 | path separator is. Do not use os.path operations to manipulate resource |
| 8 | names being passed into the API. |
| 9 | |
| 10 | The package resource API is designed to work with normal filesystem packages, |
| 11 | .egg files, and unpacked .egg files. It can also work in a limited way with |
| 12 | .zip files and with custom PEP 302 loaders that support the ``get_data()`` |
| 13 | method. |
| 14 | """ |
| 15 | |
| 16 | import sys, os, zipimport, time, re, imp, new, pkgutil # XXX |
| 17 | from sets import ImmutableSet |
| 18 | from os import utime, rename, unlink # capture these to bypass sandboxing |
| 19 | from os import open as os_open |
| 20 | |
Phillip J. Eby | 54ddd23 | 2006-04-18 15:30:05 +0000 | [diff] [blame] | 21 | def get_supported_platform(): |
Phillip J. Eby | 069159b | 2006-04-18 04:05:34 +0000 | [diff] [blame] | 22 | """Return this platform's maximum compatible version. |
| 23 | |
| 24 | distutils.util.get_platform() normally reports the minimum version |
| 25 | of Mac OS X that would be required to *use* extensions produced by |
| 26 | distutils. But what we want when checking compatibility is to know the |
| 27 | version of Mac OS X that we are *running*. To allow usage of packages that |
| 28 | explicitly require a newer version of Mac OS X, we must also know the |
| 29 | current version of the OS. |
| 30 | |
| 31 | If this condition occurs for any other platform with a version in its |
| 32 | platform strings, this function should be extended accordingly. |
| 33 | """ |
Phillip J. Eby | 54ddd23 | 2006-04-18 15:30:05 +0000 | [diff] [blame] | 34 | plat = get_build_platform(); m = macosVersionString.match(plat) |
Phillip J. Eby | 069159b | 2006-04-18 04:05:34 +0000 | [diff] [blame] | 35 | if m is not None and sys.platform == "darwin": |
| 36 | try: |
| 37 | plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) |
| 38 | except ValueError: |
| 39 | pass # not Mac OS X |
| 40 | return plat |
| 41 | |
| 42 | __all__ = [ |
| 43 | # Basic resource access and distribution/entry point discovery |
| 44 | 'require', 'run_script', 'get_provider', 'get_distribution', |
| 45 | 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', |
| 46 | 'resource_string', 'resource_stream', 'resource_filename', |
| 47 | 'resource_listdir', 'resource_exists', 'resource_isdir', |
| 48 | |
| 49 | # Environmental control |
| 50 | 'declare_namespace', 'working_set', 'add_activation_listener', |
| 51 | 'find_distributions', 'set_extraction_path', 'cleanup_resources', |
| 52 | 'get_default_cache', |
| 53 | |
| 54 | # Primary implementation classes |
| 55 | 'Environment', 'WorkingSet', 'ResourceManager', |
| 56 | 'Distribution', 'Requirement', 'EntryPoint', |
| 57 | |
| 58 | # Exceptions |
| 59 | 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', |
| 60 | 'ExtractionError', |
| 61 | |
| 62 | # Parsing functions and string utilities |
| 63 | 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', |
| 64 | 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', |
| 65 | 'safe_extra', 'to_filename', |
| 66 | |
| 67 | # filesystem utilities |
| 68 | 'ensure_directory', 'normalize_path', |
| 69 | |
| 70 | # Distribution "precedence" constants |
| 71 | 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', |
| 72 | |
| 73 | # "Provider" interfaces, implementations, and registration/lookup APIs |
| 74 | 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', |
| 75 | 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', |
| 76 | 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', |
| 77 | 'register_finder', 'register_namespace_handler', 'register_loader_type', |
| 78 | 'fixup_namespace_packages', 'get_importer', |
| 79 | |
| 80 | # Deprecated/backward compatibility only |
| 81 | 'run_main', 'AvailableDistributions', |
| 82 | ] |
| 83 | class ResolutionError(Exception): |
| 84 | """Abstract base for dependency resolution errors""" |
| 85 | def __repr__(self): |
| 86 | return self.__class__.__name__+repr(self.args) |
| 87 | |
| 88 | class VersionConflict(ResolutionError): |
| 89 | """An already-installed version conflicts with the requested version""" |
| 90 | |
| 91 | class DistributionNotFound(ResolutionError): |
| 92 | """A requested distribution was not found""" |
| 93 | |
| 94 | class UnknownExtra(ResolutionError): |
| 95 | """Distribution doesn't have an "extra feature" of the given name""" |
| 96 | |
| 97 | _provider_factories = {} |
| 98 | PY_MAJOR = sys.version[:3] |
| 99 | EGG_DIST = 3 |
| 100 | BINARY_DIST = 2 |
| 101 | SOURCE_DIST = 1 |
| 102 | CHECKOUT_DIST = 0 |
| 103 | DEVELOP_DIST = -1 |
| 104 | |
| 105 | def register_loader_type(loader_type, provider_factory): |
| 106 | """Register `provider_factory` to make providers for `loader_type` |
| 107 | |
| 108 | `loader_type` is the type or class of a PEP 302 ``module.__loader__``, |
| 109 | and `provider_factory` is a function that, passed a *module* object, |
| 110 | returns an ``IResourceProvider`` for that module. |
| 111 | """ |
| 112 | _provider_factories[loader_type] = provider_factory |
| 113 | |
| 114 | def get_provider(moduleOrReq): |
| 115 | """Return an IResourceProvider for the named module or requirement""" |
| 116 | if isinstance(moduleOrReq,Requirement): |
| 117 | return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] |
| 118 | try: |
| 119 | module = sys.modules[moduleOrReq] |
| 120 | except KeyError: |
| 121 | __import__(moduleOrReq) |
| 122 | module = sys.modules[moduleOrReq] |
| 123 | loader = getattr(module, '__loader__', None) |
| 124 | return _find_adapter(_provider_factories, loader)(module) |
| 125 | |
| 126 | def _macosx_vers(_cache=[]): |
| 127 | if not _cache: |
| 128 | info = os.popen('/usr/bin/sw_vers').read().splitlines() |
| 129 | for line in info: |
| 130 | key, value = line.split(None, 1) |
| 131 | if key == 'ProductVersion:': |
| 132 | _cache.append(value.strip().split(".")) |
| 133 | break |
| 134 | else: |
| 135 | raise ValueError, "What?!" |
| 136 | return _cache[0] |
| 137 | |
| 138 | def _macosx_arch(machine): |
| 139 | return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) |
| 140 | |
Phillip J. Eby | 54ddd23 | 2006-04-18 15:30:05 +0000 | [diff] [blame] | 141 | def get_build_platform(): |
Phillip J. Eby | 069159b | 2006-04-18 04:05:34 +0000 | [diff] [blame] | 142 | """Return this platform's string for platform-specific distributions |
| 143 | |
| 144 | XXX Currently this is the same as ``distutils.util.get_platform()``, but it |
| 145 | needs some hacks for Linux and Mac OS X. |
| 146 | """ |
| 147 | from distutils.util import get_platform |
| 148 | plat = get_platform() |
| 149 | if sys.platform == "darwin" and not plat.startswith('macosx-'): |
| 150 | try: |
| 151 | version = _macosx_vers() |
| 152 | machine = os.uname()[4].replace(" ", "_") |
| 153 | return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), |
| 154 | _macosx_arch(machine)) |
| 155 | except ValueError: |
| 156 | # if someone is running a non-Mac darwin system, this will fall |
| 157 | # through to the default implementation |
| 158 | pass |
| 159 | return plat |
| 160 | |
| 161 | macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") |
| 162 | darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") |
Phillip J. Eby | 54ddd23 | 2006-04-18 15:30:05 +0000 | [diff] [blame] | 163 | get_platform = get_build_platform # XXX backward compat |
Phillip J. Eby | 069159b | 2006-04-18 04:05:34 +0000 | [diff] [blame] | 164 | |
| 165 | def compatible_platforms(provided,required): |
| 166 | """Can code for the `provided` platform run on the `required` platform? |
| 167 | |
| 168 | Returns true if either platform is ``None``, or the platforms are equal. |
| 169 | |
| 170 | XXX Needs compatibility checks for Linux and other unixy OSes. |
| 171 | """ |
| 172 | if provided is None or required is None or provided==required: |
| 173 | return True # easy case |
Phillip J. Eby | 069159b | 2006-04-18 04:05:34 +0000 | [diff] [blame] | 174 | |
| 175 | # Mac OS X special cases |
| 176 | reqMac = macosVersionString.match(required) |
| 177 | if reqMac: |
| 178 | provMac = macosVersionString.match(provided) |
| 179 | |
| 180 | # is this a Mac package? |
| 181 | if not provMac: |
| 182 | # this is backwards compatibility for packages built before |
| 183 | # setuptools 0.6. All packages built after this point will |
| 184 | # use the new macosx designation. |
| 185 | provDarwin = darwinVersionString.match(provided) |
| 186 | if provDarwin: |
| 187 | dversion = int(provDarwin.group(1)) |
| 188 | macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) |
| 189 | if dversion == 7 and macosversion >= "10.3" or \ |
| 190 | dversion == 8 and macosversion >= "10.4": |
| 191 | |
| 192 | #import warnings |
| 193 | #warnings.warn("Mac eggs should be rebuilt to " |
| 194 | # "use the macosx designation instead of darwin.", |
| 195 | # category=DeprecationWarning) |
| 196 | return True |
| 197 | return False # egg isn't macosx or legacy darwin |
| 198 | |
| 199 | # are they the same major version and machine type? |
| 200 | if provMac.group(1) != reqMac.group(1) or \ |
| 201 | provMac.group(3) != reqMac.group(3): |
| 202 | return False |
| 203 | |
Phillip J. Eby | 54ddd23 | 2006-04-18 15:30:05 +0000 | [diff] [blame] | 204 | |
| 205 | |
Phillip J. Eby | 069159b | 2006-04-18 04:05:34 +0000 | [diff] [blame] | 206 | # is the required OS major update >= the provided one? |
| 207 | if int(provMac.group(2)) > int(reqMac.group(2)): |
| 208 | return False |
| 209 | |
| 210 | return True |
| 211 | |
| 212 | # XXX Linux and other platforms' special cases should go here |
| 213 | return False |
| 214 | |
| 215 | |
| 216 | def run_script(dist_spec, script_name): |
| 217 | """Locate distribution `dist_spec` and run its `script_name` script""" |
| 218 | ns = sys._getframe(1).f_globals |
| 219 | name = ns['__name__'] |
| 220 | ns.clear() |
| 221 | ns['__name__'] = name |
| 222 | require(dist_spec)[0].run_script(script_name, ns) |
| 223 | |
| 224 | run_main = run_script # backward compatibility |
| 225 | |
| 226 | def get_distribution(dist): |
| 227 | """Return a current distribution object for a Requirement or string""" |
| 228 | if isinstance(dist,basestring): dist = Requirement.parse(dist) |
| 229 | if isinstance(dist,Requirement): dist = get_provider(dist) |
| 230 | if not isinstance(dist,Distribution): |
| 231 | raise TypeError("Expected string, Requirement, or Distribution", dist) |
| 232 | return dist |
| 233 | |
| 234 | def load_entry_point(dist, group, name): |
| 235 | """Return `name` entry point of `group` for `dist` or raise ImportError""" |
| 236 | return get_distribution(dist).load_entry_point(group, name) |
| 237 | |
| 238 | def get_entry_map(dist, group=None): |
| 239 | """Return the entry point map for `group`, or the full entry map""" |
| 240 | return get_distribution(dist).get_entry_map(group) |
| 241 | |
| 242 | def get_entry_info(dist, group, name): |
| 243 | """Return the EntryPoint object for `group`+`name`, or ``None``""" |
| 244 | return get_distribution(dist).get_entry_info(group, name) |
| 245 | |
| 246 | |
| 247 | try: |
| 248 | from pkgutil import get_importer |
| 249 | except ImportError: |
| 250 | import _pkgutil as pkgutil |
| 251 | get_importer = pkgutil.get_importer |
| 252 | else: |
| 253 | import pkgutil |
| 254 | |
| 255 | |
| 256 | class IMetadataProvider: |
| 257 | |
| 258 | def has_metadata(name): |
| 259 | """Does the package's distribution contain the named metadata?""" |
| 260 | |
| 261 | def get_metadata(name): |
| 262 | """The named metadata resource as a string""" |
| 263 | |
| 264 | def get_metadata_lines(name): |
| 265 | """Yield named metadata resource as list of non-blank non-comment lines |
| 266 | |
| 267 | Leading and trailing whitespace is stripped from each line, and lines |
| 268 | with ``#`` as the first non-blank character are omitted.""" |
| 269 | |
| 270 | def metadata_isdir(name): |
| 271 | """Is the named metadata a directory? (like ``os.path.isdir()``)""" |
| 272 | |
| 273 | def metadata_listdir(name): |
| 274 | """List of metadata names in the directory (like ``os.listdir()``)""" |
| 275 | |
| 276 | def run_script(script_name, namespace): |
| 277 | """Execute the named script in the supplied namespace dictionary""" |
| 278 | |
| 279 | |
| 280 | |
| 281 | |
| 282 | |
| 283 | |
| 284 | |
| 285 | |
| 286 | |
| 287 | |
| 288 | class IResourceProvider(IMetadataProvider): |
| 289 | """An object that provides access to package resources""" |
| 290 | |
| 291 | def get_resource_filename(manager, resource_name): |
| 292 | """Return a true filesystem path for `resource_name` |
| 293 | |
| 294 | `manager` must be an ``IResourceManager``""" |
| 295 | |
| 296 | def get_resource_stream(manager, resource_name): |
| 297 | """Return a readable file-like object for `resource_name` |
| 298 | |
| 299 | `manager` must be an ``IResourceManager``""" |
| 300 | |
| 301 | def get_resource_string(manager, resource_name): |
| 302 | """Return a string containing the contents of `resource_name` |
| 303 | |
| 304 | `manager` must be an ``IResourceManager``""" |
| 305 | |
| 306 | def has_resource(resource_name): |
| 307 | """Does the package contain the named resource?""" |
| 308 | |
| 309 | def resource_isdir(resource_name): |
| 310 | """Is the named resource a directory? (like ``os.path.isdir()``)""" |
| 311 | |
| 312 | def resource_listdir(resource_name): |
| 313 | """List of resource names in the directory (like ``os.listdir()``)""" |
| 314 | |
| 315 | |
| 316 | |
| 317 | |
| 318 | |
| 319 | |
| 320 | |
| 321 | |
| 322 | |
| 323 | |
| 324 | |
| 325 | |
| 326 | |
| 327 | |
| 328 | |
| 329 | class WorkingSet(object): |
| 330 | """A collection of active distributions on sys.path (or a similar list)""" |
| 331 | |
| 332 | def __init__(self, entries=None): |
| 333 | """Create working set from list of path entries (default=sys.path)""" |
| 334 | self.entries = [] |
| 335 | self.entry_keys = {} |
| 336 | self.by_key = {} |
| 337 | self.callbacks = [] |
| 338 | |
| 339 | if entries is None: |
| 340 | entries = sys.path |
| 341 | |
| 342 | for entry in entries: |
| 343 | self.add_entry(entry) |
| 344 | |
| 345 | |
| 346 | def add_entry(self, entry): |
| 347 | """Add a path item to ``.entries``, finding any distributions on it |
| 348 | |
| 349 | ``find_distributions(entry,False)`` is used to find distributions |
| 350 | corresponding to the path entry, and they are added. `entry` is |
| 351 | always appended to ``.entries``, even if it is already present. |
| 352 | (This is because ``sys.path`` can contain the same value more than |
| 353 | once, and the ``.entries`` of the ``sys.path`` WorkingSet should always |
| 354 | equal ``sys.path``.) |
| 355 | """ |
| 356 | self.entry_keys.setdefault(entry, []) |
| 357 | self.entries.append(entry) |
| 358 | for dist in find_distributions(entry, True): |
| 359 | self.add(dist, entry, False) |
| 360 | |
| 361 | |
| 362 | def __contains__(self,dist): |
| 363 | """True if `dist` is the active distribution for its project""" |
| 364 | return self.by_key.get(dist.key) == dist |
| 365 | |
| 366 | |
| 367 | |
| 368 | |
| 369 | |
| 370 | def find(self, req): |
| 371 | """Find a distribution matching requirement `req` |
| 372 | |
| 373 | If there is an active distribution for the requested project, this |
| 374 | returns it as long as it meets the version requirement specified by |
| 375 | `req`. But, if there is an active distribution for the project and it |
| 376 | does *not* meet the `req` requirement, ``VersionConflict`` is raised. |
| 377 | If there is no active distribution for the requested project, ``None`` |
| 378 | is returned. |
| 379 | """ |
| 380 | dist = self.by_key.get(req.key) |
| 381 | if dist is not None and dist not in req: |
| 382 | raise VersionConflict(dist,req) # XXX add more info |
| 383 | else: |
| 384 | return dist |
| 385 | |
| 386 | def iter_entry_points(self, group, name=None): |
| 387 | """Yield entry point objects from `group` matching `name` |
| 388 | |
| 389 | If `name` is None, yields all entry points in `group` from all |
| 390 | distributions in the working set, otherwise only ones matching |
| 391 | both `group` and `name` are yielded (in distribution order). |
| 392 | """ |
| 393 | for dist in self: |
| 394 | entries = dist.get_entry_map(group) |
| 395 | if name is None: |
| 396 | for ep in entries.values(): |
| 397 | yield ep |
| 398 | elif name in entries: |
| 399 | yield entries[name] |
| 400 | |
| 401 | def run_script(self, requires, script_name): |
| 402 | """Locate distribution for `requires` and run `script_name` script""" |
| 403 | ns = sys._getframe(1).f_globals |
| 404 | name = ns['__name__'] |
| 405 | ns.clear() |
| 406 | ns['__name__'] = name |
| 407 | self.require(requires)[0].run_script(script_name, ns) |
| 408 | |
| 409 | |
| 410 | |
| 411 | def __iter__(self): |
| 412 | """Yield distributions for non-duplicate projects in the working set |
| 413 | |
| 414 | The yield order is the order in which the items' path entries were |
| 415 | added to the working set. |
| 416 | """ |
| 417 | seen = {} |
| 418 | for item in self.entries: |
| 419 | for key in self.entry_keys[item]: |
| 420 | if key not in seen: |
| 421 | seen[key]=1 |
| 422 | yield self.by_key[key] |
| 423 | |
| 424 | def add(self, dist, entry=None, insert=True): |
| 425 | """Add `dist` to working set, associated with `entry` |
| 426 | |
| 427 | If `entry` is unspecified, it defaults to the ``.location`` of `dist`. |
| 428 | On exit from this routine, `entry` is added to the end of the working |
| 429 | set's ``.entries`` (if it wasn't already present). |
| 430 | |
| 431 | `dist` is only added to the working set if it's for a project that |
| 432 | doesn't already have a distribution in the set. If it's added, any |
| 433 | callbacks registered with the ``subscribe()`` method will be called. |
| 434 | """ |
| 435 | if insert: |
| 436 | dist.insert_on(self.entries, entry) |
| 437 | |
| 438 | if entry is None: |
| 439 | entry = dist.location |
| 440 | keys = self.entry_keys.setdefault(entry,[]) |
| 441 | |
| 442 | if dist.key in self.by_key: |
| 443 | return # ignore hidden distros |
| 444 | |
| 445 | self.by_key[dist.key] = dist |
| 446 | if dist.key not in keys: |
| 447 | keys.append(dist.key) |
| 448 | |
| 449 | self._added_new(dist) |
| 450 | |
| 451 | |
| 452 | def resolve(self, requirements, env=None, installer=None): |
| 453 | """List all distributions needed to (recursively) meet `requirements` |
| 454 | |
| 455 | `requirements` must be a sequence of ``Requirement`` objects. `env`, |
| 456 | if supplied, should be an ``Environment`` instance. If |
| 457 | not supplied, it defaults to all distributions available within any |
| 458 | entry or distribution in the working set. `installer`, if supplied, |
| 459 | will be invoked with each requirement that cannot be met by an |
| 460 | already-installed distribution; it should return a ``Distribution`` or |
| 461 | ``None``. |
| 462 | """ |
| 463 | |
| 464 | requirements = list(requirements)[::-1] # set up the stack |
| 465 | processed = {} # set of processed requirements |
| 466 | best = {} # key -> dist |
| 467 | to_activate = [] |
| 468 | |
| 469 | while requirements: |
| 470 | req = requirements.pop(0) # process dependencies breadth-first |
| 471 | if req in processed: |
| 472 | # Ignore cyclic or redundant dependencies |
| 473 | continue |
| 474 | dist = best.get(req.key) |
| 475 | if dist is None: |
| 476 | # Find the best distribution and add it to the map |
| 477 | dist = self.by_key.get(req.key) |
| 478 | if dist is None: |
| 479 | if env is None: |
| 480 | env = Environment(self.entries) |
| 481 | dist = best[req.key] = env.best_match(req, self, installer) |
| 482 | if dist is None: |
| 483 | raise DistributionNotFound(req) # XXX put more info here |
| 484 | to_activate.append(dist) |
| 485 | if dist not in req: |
| 486 | # Oops, the "best" so far conflicts with a dependency |
| 487 | raise VersionConflict(dist,req) # XXX put more info here |
| 488 | requirements.extend(dist.requires(req.extras)[::-1]) |
| 489 | processed[req] = True |
| 490 | |
| 491 | return to_activate # return list of distros to activate |
| 492 | |
| 493 | def find_plugins(self, |
| 494 | plugin_env, full_env=None, installer=None, fallback=True |
| 495 | ): |
| 496 | """Find all activatable distributions in `plugin_env` |
| 497 | |
| 498 | Example usage:: |
| 499 | |
| 500 | distributions, errors = working_set.find_plugins( |
| 501 | Environment(plugin_dirlist) |
| 502 | ) |
| 503 | map(working_set.add, distributions) # add plugins+libs to sys.path |
| 504 | print "Couldn't load", errors # display errors |
| 505 | |
| 506 | The `plugin_env` should be an ``Environment`` instance that contains |
| 507 | only distributions that are in the project's "plugin directory" or |
| 508 | directories. The `full_env`, if supplied, should be an ``Environment`` |
| 509 | contains all currently-available distributions. If `full_env` is not |
| 510 | supplied, one is created automatically from the ``WorkingSet`` this |
| 511 | method is called on, which will typically mean that every directory on |
| 512 | ``sys.path`` will be scanned for distributions. |
| 513 | |
| 514 | `installer` is a standard installer callback as used by the |
| 515 | ``resolve()`` method. The `fallback` flag indicates whether we should |
| 516 | attempt to resolve older versions of a plugin if the newest version |
| 517 | cannot be resolved. |
| 518 | |
| 519 | This method returns a 2-tuple: (`distributions`, `error_info`), where |
| 520 | `distributions` is a list of the distributions found in `plugin_env` |
| 521 | that were loadable, along with any other distributions that are needed |
| 522 | to resolve their dependencies. `error_info` is a dictionary mapping |
| 523 | unloadable plugin distributions to an exception instance describing the |
| 524 | error that occurred. Usually this will be a ``DistributionNotFound`` or |
| 525 | ``VersionConflict`` instance. |
| 526 | """ |
| 527 | |
| 528 | plugin_projects = list(plugin_env) |
| 529 | plugin_projects.sort() # scan project names in alphabetic order |
| 530 | |
| 531 | error_info = {} |
| 532 | distributions = {} |
| 533 | |
| 534 | if full_env is None: |
| 535 | env = Environment(self.entries) |
| 536 | env += plugin_env |
| 537 | else: |
| 538 | env = full_env + plugin_env |
| 539 | |
| 540 | shadow_set = self.__class__([]) |
| 541 | map(shadow_set.add, self) # put all our entries in shadow_set |
| 542 | |
| 543 | for project_name in plugin_projects: |
| 544 | |
| 545 | for dist in plugin_env[project_name]: |
| 546 | |
| 547 | req = [dist.as_requirement()] |
| 548 | |
| 549 | try: |
| 550 | resolvees = shadow_set.resolve(req, env, installer) |
| 551 | |
| 552 | except ResolutionError,v: |
| 553 | error_info[dist] = v # save error info |
| 554 | if fallback: |
| 555 | continue # try the next older version of project |
| 556 | else: |
| 557 | break # give up on this project, keep going |
| 558 | |
| 559 | else: |
| 560 | map(shadow_set.add, resolvees) |
| 561 | distributions.update(dict.fromkeys(resolvees)) |
| 562 | |
| 563 | # success, no need to try any more versions of this project |
| 564 | break |
| 565 | |
| 566 | distributions = list(distributions) |
| 567 | distributions.sort() |
| 568 | |
| 569 | return distributions, error_info |
| 570 | |
| 571 | |
| 572 | |
| 573 | |
| 574 | |
| 575 | def require(self, *requirements): |
| 576 | """Ensure that distributions matching `requirements` are activated |
| 577 | |
| 578 | `requirements` must be a string or a (possibly-nested) sequence |
| 579 | thereof, specifying the distributions and versions required. The |
| 580 | return value is a sequence of the distributions that needed to be |
| 581 | activated to fulfill the requirements; all relevant distributions are |
| 582 | included, even if they were already activated in this working set. |
| 583 | """ |
| 584 | |
| 585 | needed = self.resolve(parse_requirements(requirements)) |
| 586 | |
| 587 | for dist in needed: |
| 588 | self.add(dist) |
| 589 | |
| 590 | return needed |
| 591 | |
| 592 | |
| 593 | def subscribe(self, callback): |
| 594 | """Invoke `callback` for all distributions (including existing ones)""" |
| 595 | if callback in self.callbacks: |
| 596 | return |
| 597 | self.callbacks.append(callback) |
| 598 | for dist in self: |
| 599 | callback(dist) |
| 600 | |
| 601 | |
| 602 | def _added_new(self, dist): |
| 603 | for callback in self.callbacks: |
| 604 | callback(dist) |
| 605 | |
| 606 | |
| 607 | |
| 608 | |
| 609 | |
| 610 | |
| 611 | |
| 612 | |
| 613 | |
| 614 | |
| 615 | |
| 616 | class Environment(object): |
| 617 | """Searchable snapshot of distributions on a search path""" |
| 618 | |
Phillip J. Eby | 54ddd23 | 2006-04-18 15:30:05 +0000 | [diff] [blame] | 619 | def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): |
Phillip J. Eby | 069159b | 2006-04-18 04:05:34 +0000 | [diff] [blame] | 620 | """Snapshot distributions available on a search path |
| 621 | |
| 622 | Any distributions found on `search_path` are added to the environment. |
| 623 | `search_path` should be a sequence of ``sys.path`` items. If not |
| 624 | supplied, ``sys.path`` is used. |
| 625 | |
| 626 | `platform` is an optional string specifying the name of the platform |
| 627 | that platform-specific distributions must be compatible with. If |
| 628 | unspecified, it defaults to the current platform. `python` is an |
| 629 | optional string naming the desired version of Python (e.g. ``'2.4'``); |
| 630 | it defaults to the current version. |
| 631 | |
| 632 | You may explicitly set `platform` (and/or `python`) to ``None`` if you |
| 633 | wish to map *all* distributions, not just those compatible with the |
| 634 | running platform or Python version. |
| 635 | """ |
| 636 | self._distmap = {} |
| 637 | self._cache = {} |
| 638 | self.platform = platform |
| 639 | self.python = python |
| 640 | self.scan(search_path) |
| 641 | |
| 642 | def can_add(self, dist): |
| 643 | """Is distribution `dist` acceptable for this environment? |
| 644 | |
| 645 | The distribution must match the platform and python version |
| 646 | requirements specified when this environment was created, or False |
| 647 | is returned. |
| 648 | """ |
| 649 | return (self.python is None or dist.py_version is None |
| 650 | or dist.py_version==self.python) \ |
| 651 | and compatible_platforms(dist.platform,self.platform) |
| 652 | |
| 653 | def remove(self, dist): |
| 654 | """Remove `dist` from the environment""" |
| 655 | self._distmap[dist.key].remove(dist) |
| 656 | |
| 657 | def scan(self, search_path=None): |
| 658 | """Scan `search_path` for distributions usable in this environment |
| 659 | |
| 660 | Any distributions found are added to the environment. |
| 661 | `search_path` should be a sequence of ``sys.path`` items. If not |
| 662 | supplied, ``sys.path`` is used. Only distributions conforming to |
| 663 | the platform/python version defined at initialization are added. |
| 664 | """ |
| 665 | if search_path is None: |
| 666 | search_path = sys.path |
| 667 | |
| 668 | for item in search_path: |
| 669 | for dist in find_distributions(item): |
| 670 | self.add(dist) |
| 671 | |
| 672 | def __getitem__(self,project_name): |
| 673 | """Return a newest-to-oldest list of distributions for `project_name` |
| 674 | """ |
| 675 | try: |
| 676 | return self._cache[project_name] |
| 677 | except KeyError: |
| 678 | project_name = project_name.lower() |
| 679 | if project_name not in self._distmap: |
| 680 | return [] |
| 681 | |
| 682 | if project_name not in self._cache: |
| 683 | dists = self._cache[project_name] = self._distmap[project_name] |
| 684 | _sort_dists(dists) |
| 685 | |
| 686 | return self._cache[project_name] |
| 687 | |
| 688 | def add(self,dist): |
| 689 | """Add `dist` if we ``can_add()`` it and it isn't already added""" |
| 690 | if self.can_add(dist) and dist.has_version(): |
| 691 | dists = self._distmap.setdefault(dist.key,[]) |
| 692 | if dist not in dists: |
| 693 | dists.append(dist) |
| 694 | if dist.key in self._cache: |
| 695 | _sort_dists(self._cache[dist.key]) |
| 696 | |
| 697 | |
| 698 | def best_match(self, req, working_set, installer=None): |
| 699 | """Find distribution best matching `req` and usable on `working_set` |
| 700 | |
| 701 | This calls the ``find(req)`` method of the `working_set` to see if a |
| 702 | suitable distribution is already active. (This may raise |
| 703 | ``VersionConflict`` if an unsuitable version of the project is already |
| 704 | active in the specified `working_set`.) If a suitable distribution |
| 705 | isn't active, this method returns the newest distribution in the |
| 706 | environment that meets the ``Requirement`` in `req`. If no suitable |
| 707 | distribution is found, and `installer` is supplied, then the result of |
| 708 | calling the environment's ``obtain(req, installer)`` method will be |
| 709 | returned. |
| 710 | """ |
| 711 | dist = working_set.find(req) |
| 712 | if dist is not None: |
| 713 | return dist |
| 714 | for dist in self[req.key]: |
| 715 | if dist in req: |
| 716 | return dist |
| 717 | return self.obtain(req, installer) # try and download/install |
| 718 | |
| 719 | def obtain(self, requirement, installer=None): |
| 720 | """Obtain a distribution matching `requirement` (e.g. via download) |
| 721 | |
| 722 | Obtain a distro that matches requirement (e.g. via download). In the |
| 723 | base ``Environment`` class, this routine just returns |
| 724 | ``installer(requirement)``, unless `installer` is None, in which case |
| 725 | None is returned instead. This method is a hook that allows subclasses |
| 726 | to attempt other ways of obtaining a distribution before falling back |
| 727 | to the `installer` argument.""" |
| 728 | if installer is not None: |
| 729 | return installer(requirement) |
| 730 | |
| 731 | def __iter__(self): |
| 732 | """Yield the unique project names of the available distributions""" |
| 733 | for key in self._distmap.keys(): |
| 734 | if self[key]: yield key |
| 735 | |
| 736 | |
| 737 | |
| 738 | |
| 739 | def __iadd__(self, other): |
| 740 | """In-place addition of a distribution or environment""" |
| 741 | if isinstance(other,Distribution): |
| 742 | self.add(other) |
| 743 | elif isinstance(other,Environment): |
| 744 | for project in other: |
| 745 | for dist in other[project]: |
| 746 | self.add(dist) |
| 747 | else: |
| 748 | raise TypeError("Can't add %r to environment" % (other,)) |
| 749 | return self |
| 750 | |
| 751 | def __add__(self, other): |
| 752 | """Add an environment or distribution to an environment""" |
| 753 | new = self.__class__([], platform=None, python=None) |
| 754 | for env in self, other: |
| 755 | new += env |
| 756 | return new |
| 757 | |
| 758 | |
| 759 | AvailableDistributions = Environment # XXX backward compatibility |
| 760 | |
| 761 | |
| 762 | class ExtractionError(RuntimeError): |
| 763 | """An error occurred extracting a resource |
| 764 | |
| 765 | The following attributes are available from instances of this exception: |
| 766 | |
| 767 | manager |
| 768 | The resource manager that raised this exception |
| 769 | |
| 770 | cache_path |
| 771 | The base directory for resource extraction |
| 772 | |
| 773 | original_error |
| 774 | The exception instance that caused extraction to fail |
| 775 | """ |
| 776 | |
| 777 | |
| 778 | |
| 779 | |
| 780 | class ResourceManager: |
| 781 | """Manage resource extraction and packages""" |
| 782 | extraction_path = None |
| 783 | |
| 784 | def __init__(self): |
| 785 | self.cached_files = {} |
| 786 | |
| 787 | def resource_exists(self, package_or_requirement, resource_name): |
| 788 | """Does the named resource exist?""" |
| 789 | return get_provider(package_or_requirement).has_resource(resource_name) |
| 790 | |
| 791 | def resource_isdir(self, package_or_requirement, resource_name): |
| 792 | """Is the named resource an existing directory?""" |
| 793 | return get_provider(package_or_requirement).resource_isdir( |
| 794 | resource_name |
| 795 | ) |
| 796 | |
| 797 | def resource_filename(self, package_or_requirement, resource_name): |
| 798 | """Return a true filesystem path for specified resource""" |
| 799 | return get_provider(package_or_requirement).get_resource_filename( |
| 800 | self, resource_name |
| 801 | ) |
| 802 | |
| 803 | def resource_stream(self, package_or_requirement, resource_name): |
| 804 | """Return a readable file-like object for specified resource""" |
| 805 | return get_provider(package_or_requirement).get_resource_stream( |
| 806 | self, resource_name |
| 807 | ) |
| 808 | |
| 809 | def resource_string(self, package_or_requirement, resource_name): |
| 810 | """Return specified resource as a string""" |
| 811 | return get_provider(package_or_requirement).get_resource_string( |
| 812 | self, resource_name |
| 813 | ) |
| 814 | |
| 815 | def resource_listdir(self, package_or_requirement, resource_name): |
| 816 | """List the contents of the named resource directory""" |
| 817 | return get_provider(package_or_requirement).resource_listdir( |
| 818 | resource_name |
| 819 | ) |
| 820 | |
| 821 | def extraction_error(self): |
| 822 | """Give an error message for problems extracting file(s)""" |
| 823 | |
| 824 | old_exc = sys.exc_info()[1] |
| 825 | cache_path = self.extraction_path or get_default_cache() |
| 826 | |
| 827 | err = ExtractionError("""Can't extract file(s) to egg cache |
| 828 | |
| 829 | The following error occurred while trying to extract file(s) to the Python egg |
| 830 | cache: |
| 831 | |
| 832 | %s |
| 833 | |
| 834 | The Python egg cache directory is currently set to: |
| 835 | |
| 836 | %s |
| 837 | |
| 838 | Perhaps your account does not have write access to this directory? You can |
| 839 | change the cache directory by setting the PYTHON_EGG_CACHE environment |
| 840 | variable to point to an accessible directory. |
| 841 | """ % (old_exc, cache_path) |
| 842 | ) |
| 843 | err.manager = self |
| 844 | err.cache_path = cache_path |
| 845 | err.original_error = old_exc |
| 846 | raise err |
| 847 | |
| 848 | |
| 849 | |
| 850 | |
| 851 | |
| 852 | |
| 853 | |
| 854 | |
| 855 | |
| 856 | |
| 857 | |
| 858 | |
| 859 | |
| 860 | |
| 861 | |
| 862 | def get_cache_path(self, archive_name, names=()): |
| 863 | """Return absolute location in cache for `archive_name` and `names` |
| 864 | |
| 865 | The parent directory of the resulting path will be created if it does |
| 866 | not already exist. `archive_name` should be the base filename of the |
| 867 | enclosing egg (which may not be the name of the enclosing zipfile!), |
| 868 | including its ".egg" extension. `names`, if provided, should be a |
| 869 | sequence of path name parts "under" the egg's extraction location. |
| 870 | |
| 871 | This method should only be called by resource providers that need to |
| 872 | obtain an extraction location, and only for names they intend to |
| 873 | extract, as it tracks the generated names for possible cleanup later. |
| 874 | """ |
| 875 | extract_path = self.extraction_path or get_default_cache() |
| 876 | target_path = os.path.join(extract_path, archive_name+'-tmp', *names) |
| 877 | try: |
| 878 | ensure_directory(target_path) |
| 879 | except: |
| 880 | self.extraction_error() |
| 881 | |
| 882 | self.cached_files[target_path] = 1 |
| 883 | return target_path |
| 884 | |
| 885 | |
| 886 | def postprocess(self, tempname, filename): |
| 887 | """Perform any platform-specific postprocessing of `tempname` |
| 888 | |
| 889 | This is where Mac header rewrites should be done; other platforms don't |
| 890 | have anything special they should do. |
| 891 | |
| 892 | Resource providers should call this method ONLY after successfully |
| 893 | extracting a compressed resource. They must NOT call it on resources |
| 894 | that are already in the filesystem. |
| 895 | |
| 896 | `tempname` is the current (temporary) name of the file, and `filename` |
| 897 | is the name it will be renamed to by the caller after this routine |
| 898 | returns. |
| 899 | """ |
| 900 | # XXX |
| 901 | |
| 902 | |
| 903 | def set_extraction_path(self, path): |
| 904 | """Set the base path where resources will be extracted to, if needed. |
| 905 | |
| 906 | If you do not call this routine before any extractions take place, the |
| 907 | path defaults to the return value of ``get_default_cache()``. (Which |
| 908 | is based on the ``PYTHON_EGG_CACHE`` environment variable, with various |
| 909 | platform-specific fallbacks. See that routine's documentation for more |
| 910 | details.) |
| 911 | |
| 912 | Resources are extracted to subdirectories of this path based upon |
| 913 | information given by the ``IResourceProvider``. You may set this to a |
| 914 | temporary directory, but then you must call ``cleanup_resources()`` to |
| 915 | delete the extracted files when done. There is no guarantee that |
| 916 | ``cleanup_resources()`` will be able to remove all extracted files. |
| 917 | |
| 918 | (Note: you may not change the extraction path for a given resource |
| 919 | manager once resources have been extracted, unless you first call |
| 920 | ``cleanup_resources()``.) |
| 921 | """ |
| 922 | if self.cached_files: |
| 923 | raise ValueError( |
| 924 | "Can't change extraction path, files already extracted" |
| 925 | ) |
| 926 | |
| 927 | self.extraction_path = path |
| 928 | |
| 929 | def cleanup_resources(self, force=False): |
| 930 | """ |
| 931 | Delete all extracted resource files and directories, returning a list |
| 932 | of the file and directory names that could not be successfully removed. |
| 933 | This function does not have any concurrency protection, so it should |
| 934 | generally only be called when the extraction path is a temporary |
| 935 | directory exclusive to a single process. This method is not |
| 936 | automatically called; you must call it explicitly or register it as an |
| 937 | ``atexit`` function if you wish to ensure cleanup of a temporary |
| 938 | directory used for extractions. |
| 939 | """ |
| 940 | # XXX |
| 941 | |
| 942 | |
| 943 | |
| 944 | def get_default_cache(): |
| 945 | """Determine the default cache location |
| 946 | |
| 947 | This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. |
| 948 | Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the |
| 949 | "Application Data" directory. On all other systems, it's "~/.python-eggs". |
| 950 | """ |
| 951 | try: |
| 952 | return os.environ['PYTHON_EGG_CACHE'] |
| 953 | except KeyError: |
| 954 | pass |
| 955 | |
| 956 | if os.name!='nt': |
| 957 | return os.path.expanduser('~/.python-eggs') |
| 958 | |
| 959 | app_data = 'Application Data' # XXX this may be locale-specific! |
| 960 | app_homes = [ |
| 961 | (('APPDATA',), None), # best option, should be locale-safe |
| 962 | (('USERPROFILE',), app_data), |
| 963 | (('HOMEDRIVE','HOMEPATH'), app_data), |
| 964 | (('HOMEPATH',), app_data), |
| 965 | (('HOME',), None), |
| 966 | (('WINDIR',), app_data), # 95/98/ME |
| 967 | ] |
| 968 | |
| 969 | for keys, subdir in app_homes: |
| 970 | dirname = '' |
| 971 | for key in keys: |
| 972 | if key in os.environ: |
| 973 | dirname = os.path.join(os.environ[key]) |
| 974 | else: |
| 975 | break |
| 976 | else: |
| 977 | if subdir: |
| 978 | dirname = os.path.join(dirname,subdir) |
| 979 | return os.path.join(dirname, 'Python-Eggs') |
| 980 | else: |
| 981 | raise RuntimeError( |
| 982 | "Please set the PYTHON_EGG_CACHE enviroment variable" |
| 983 | ) |
| 984 | |
| 985 | def safe_name(name): |
| 986 | """Convert an arbitrary string to a standard distribution name |
| 987 | |
| 988 | Any runs of non-alphanumeric/. characters are replaced with a single '-'. |
| 989 | """ |
| 990 | return re.sub('[^A-Za-z0-9.]+', '-', name) |
| 991 | |
| 992 | |
| 993 | def safe_version(version): |
| 994 | """Convert an arbitrary string to a standard version string |
| 995 | |
| 996 | Spaces become dots, and all other non-alphanumeric characters become |
| 997 | dashes, with runs of multiple dashes condensed to a single dash. |
| 998 | """ |
| 999 | version = version.replace(' ','.') |
| 1000 | return re.sub('[^A-Za-z0-9.]+', '-', version) |
| 1001 | |
| 1002 | |
| 1003 | def safe_extra(extra): |
| 1004 | """Convert an arbitrary string to a standard 'extra' name |
| 1005 | |
| 1006 | Any runs of non-alphanumeric characters are replaced with a single '_', |
| 1007 | and the result is always lowercased. |
| 1008 | """ |
| 1009 | return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() |
| 1010 | |
| 1011 | |
| 1012 | def to_filename(name): |
| 1013 | """Convert a project or version name to its filename-escaped form |
| 1014 | |
| 1015 | Any '-' characters are currently replaced with '_'. |
| 1016 | """ |
| 1017 | return name.replace('-','_') |
| 1018 | |
| 1019 | |
| 1020 | |
| 1021 | |
| 1022 | |
| 1023 | |
| 1024 | |
| 1025 | |
| 1026 | class NullProvider: |
| 1027 | """Try to implement resources and metadata for arbitrary PEP 302 loaders""" |
| 1028 | |
| 1029 | egg_name = None |
| 1030 | egg_info = None |
| 1031 | loader = None |
| 1032 | |
| 1033 | def __init__(self, module): |
| 1034 | self.loader = getattr(module, '__loader__', None) |
| 1035 | self.module_path = os.path.dirname(getattr(module, '__file__', '')) |
| 1036 | |
| 1037 | def get_resource_filename(self, manager, resource_name): |
| 1038 | return self._fn(self.module_path, resource_name) |
| 1039 | |
| 1040 | def get_resource_stream(self, manager, resource_name): |
| 1041 | return StringIO(self.get_resource_string(manager, resource_name)) |
| 1042 | |
| 1043 | def get_resource_string(self, manager, resource_name): |
| 1044 | return self._get(self._fn(self.module_path, resource_name)) |
| 1045 | |
| 1046 | def has_resource(self, resource_name): |
| 1047 | return self._has(self._fn(self.module_path, resource_name)) |
| 1048 | |
| 1049 | def has_metadata(self, name): |
| 1050 | return self.egg_info and self._has(self._fn(self.egg_info,name)) |
| 1051 | |
| 1052 | def get_metadata(self, name): |
| 1053 | if not self.egg_info: |
| 1054 | return "" |
| 1055 | return self._get(self._fn(self.egg_info,name)) |
| 1056 | |
| 1057 | def get_metadata_lines(self, name): |
| 1058 | return yield_lines(self.get_metadata(name)) |
| 1059 | |
| 1060 | def resource_isdir(self,resource_name): |
| 1061 | return self._isdir(self._fn(self.module_path, resource_name)) |
| 1062 | |
| 1063 | def metadata_isdir(self,name): |
| 1064 | return self.egg_info and self._isdir(self._fn(self.egg_info,name)) |
| 1065 | |
| 1066 | |
| 1067 | def resource_listdir(self,resource_name): |
| 1068 | return self._listdir(self._fn(self.module_path,resource_name)) |
| 1069 | |
| 1070 | def metadata_listdir(self,name): |
| 1071 | if self.egg_info: |
| 1072 | return self._listdir(self._fn(self.egg_info,name)) |
| 1073 | return [] |
| 1074 | |
| 1075 | def run_script(self,script_name,namespace): |
| 1076 | script = 'scripts/'+script_name |
| 1077 | if not self.has_metadata(script): |
| 1078 | raise ResolutionError("No script named %r" % script_name) |
| 1079 | script_text = self.get_metadata(script).replace('\r\n','\n') |
| 1080 | script_text = script_text.replace('\r','\n') |
| 1081 | script_filename = self._fn(self.egg_info,script) |
| 1082 | namespace['__file__'] = script_filename |
| 1083 | if os.path.exists(script_filename): |
| 1084 | execfile(script_filename, namespace, namespace) |
| 1085 | else: |
| 1086 | from linecache import cache |
| 1087 | cache[script_filename] = ( |
| 1088 | len(script_text), 0, script_text.split('\n'), script_filename |
| 1089 | ) |
| 1090 | script_code = compile(script_text,script_filename,'exec') |
| 1091 | exec script_code in namespace, namespace |
| 1092 | |
| 1093 | def _has(self, path): |
| 1094 | raise NotImplementedError( |
| 1095 | "Can't perform this operation for unregistered loader type" |
| 1096 | ) |
| 1097 | |
| 1098 | def _isdir(self, path): |
| 1099 | raise NotImplementedError( |
| 1100 | "Can't perform this operation for unregistered loader type" |
| 1101 | ) |
| 1102 | |
| 1103 | def _listdir(self, path): |
| 1104 | raise NotImplementedError( |
| 1105 | "Can't perform this operation for unregistered loader type" |
| 1106 | ) |
| 1107 | |
| 1108 | def _fn(self, base, resource_name): |
| 1109 | return os.path.join(base, *resource_name.split('/')) |
| 1110 | |
| 1111 | def _get(self, path): |
| 1112 | if hasattr(self.loader, 'get_data'): |
| 1113 | return self.loader.get_data(path) |
| 1114 | raise NotImplementedError( |
| 1115 | "Can't perform this operation for loaders without 'get_data()'" |
| 1116 | ) |
| 1117 | |
| 1118 | register_loader_type(object, NullProvider) |
| 1119 | |
| 1120 | |
| 1121 | class EggProvider(NullProvider): |
| 1122 | """Provider based on a virtual filesystem""" |
| 1123 | |
| 1124 | def __init__(self,module): |
| 1125 | NullProvider.__init__(self,module) |
| 1126 | self._setup_prefix() |
| 1127 | |
| 1128 | def _setup_prefix(self): |
| 1129 | # we assume here that our metadata may be nested inside a "basket" |
| 1130 | # of multiple eggs; that's why we use module_path instead of .archive |
| 1131 | path = self.module_path |
| 1132 | old = None |
| 1133 | while path!=old: |
| 1134 | if path.lower().endswith('.egg'): |
| 1135 | self.egg_name = os.path.basename(path) |
| 1136 | self.egg_info = os.path.join(path, 'EGG-INFO') |
| 1137 | self.egg_root = path |
| 1138 | break |
| 1139 | old = path |
| 1140 | path, base = os.path.split(path) |
| 1141 | |
| 1142 | |
| 1143 | |
| 1144 | |
| 1145 | |
| 1146 | |
| 1147 | |
| 1148 | |
| 1149 | class DefaultProvider(EggProvider): |
| 1150 | """Provides access to package resources in the filesystem""" |
| 1151 | |
| 1152 | def _has(self, path): |
| 1153 | return os.path.exists(path) |
| 1154 | |
| 1155 | def _isdir(self,path): |
| 1156 | return os.path.isdir(path) |
| 1157 | |
| 1158 | def _listdir(self,path): |
| 1159 | return os.listdir(path) |
| 1160 | |
| 1161 | def get_resource_stream(self, manager, resource_name): |
| 1162 | return open(self._fn(self.module_path, resource_name), 'rb') |
| 1163 | |
| 1164 | def _get(self, path): |
| 1165 | stream = open(path, 'rb') |
| 1166 | try: |
| 1167 | return stream.read() |
| 1168 | finally: |
| 1169 | stream.close() |
| 1170 | |
| 1171 | register_loader_type(type(None), DefaultProvider) |
| 1172 | |
| 1173 | |
| 1174 | class EmptyProvider(NullProvider): |
| 1175 | """Provider that returns nothing for all requests""" |
| 1176 | |
| 1177 | _isdir = _has = lambda self,path: False |
| 1178 | _get = lambda self,path: '' |
| 1179 | _listdir = lambda self,path: [] |
| 1180 | module_path = None |
| 1181 | |
| 1182 | def __init__(self): |
| 1183 | pass |
| 1184 | |
| 1185 | empty_provider = EmptyProvider() |
| 1186 | |
| 1187 | |
| 1188 | |
| 1189 | |
| 1190 | class ZipProvider(EggProvider): |
| 1191 | """Resource support for zips and eggs""" |
| 1192 | |
| 1193 | eagers = None |
| 1194 | |
| 1195 | def __init__(self, module): |
| 1196 | EggProvider.__init__(self,module) |
| 1197 | self.zipinfo = zipimport._zip_directory_cache[self.loader.archive] |
| 1198 | self.zip_pre = self.loader.archive+os.sep |
| 1199 | |
| 1200 | def _zipinfo_name(self, fspath): |
| 1201 | # Convert a virtual filename (full path to file) into a zipfile subpath |
| 1202 | # usable with the zipimport directory cache for our target archive |
| 1203 | if fspath.startswith(self.zip_pre): |
| 1204 | return fspath[len(self.zip_pre):] |
| 1205 | raise AssertionError( |
| 1206 | "%s is not a subpath of %s" % (fspath,self.zip_pre) |
| 1207 | ) |
| 1208 | |
| 1209 | def _parts(self,zip_path): |
| 1210 | # Convert a zipfile subpath into an egg-relative path part list |
| 1211 | fspath = self.zip_pre+zip_path # pseudo-fs path |
| 1212 | if fspath.startswith(self.egg_root+os.sep): |
| 1213 | return fspath[len(self.egg_root)+1:].split(os.sep) |
| 1214 | raise AssertionError( |
| 1215 | "%s is not a subpath of %s" % (fspath,self.egg_root) |
| 1216 | ) |
| 1217 | |
| 1218 | def get_resource_filename(self, manager, resource_name): |
| 1219 | if not self.egg_name: |
| 1220 | raise NotImplementedError( |
| 1221 | "resource_filename() only supported for .egg, not .zip" |
| 1222 | ) |
| 1223 | # no need to lock for extraction, since we use temp names |
| 1224 | zip_path = self._resource_to_zip(resource_name) |
| 1225 | eagers = self._get_eager_resources() |
| 1226 | if '/'.join(self._parts(zip_path)) in eagers: |
| 1227 | for name in eagers: |
| 1228 | self._extract_resource(manager, self._eager_to_zip(name)) |
| 1229 | return self._extract_resource(manager, zip_path) |
| 1230 | |
| 1231 | def _extract_resource(self, manager, zip_path): |
| 1232 | |
| 1233 | if zip_path in self._index(): |
| 1234 | for name in self._index()[zip_path]: |
| 1235 | last = self._extract_resource( |
| 1236 | manager, os.path.join(zip_path, name) |
| 1237 | ) |
| 1238 | return os.path.dirname(last) # return the extracted directory name |
| 1239 | |
| 1240 | zip_stat = self.zipinfo[zip_path] |
| 1241 | t,d,size = zip_stat[5], zip_stat[6], zip_stat[3] |
| 1242 | date_time = ( |
| 1243 | (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd |
| 1244 | (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc. |
| 1245 | ) |
| 1246 | timestamp = time.mktime(date_time) |
| 1247 | |
| 1248 | try: |
| 1249 | real_path = manager.get_cache_path( |
| 1250 | self.egg_name, self._parts(zip_path) |
| 1251 | ) |
| 1252 | |
| 1253 | if os.path.isfile(real_path): |
| 1254 | stat = os.stat(real_path) |
| 1255 | if stat.st_size==size and stat.st_mtime==timestamp: |
| 1256 | # size and stamp match, don't bother extracting |
| 1257 | return real_path |
| 1258 | |
| 1259 | outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) |
| 1260 | os.write(outf, self.loader.get_data(zip_path)) |
| 1261 | os.close(outf) |
| 1262 | utime(tmpnam, (timestamp,timestamp)) |
| 1263 | manager.postprocess(tmpnam, real_path) |
| 1264 | |
| 1265 | try: |
| 1266 | rename(tmpnam, real_path) |
| 1267 | |
| 1268 | except os.error: |
| 1269 | if os.path.isfile(real_path): |
| 1270 | stat = os.stat(real_path) |
| 1271 | |
| 1272 | if stat.st_size==size and stat.st_mtime==timestamp: |
| 1273 | # size and stamp match, somebody did it just ahead of |
| 1274 | # us, so we're done |
| 1275 | return real_path |
| 1276 | elif os.name=='nt': # Windows, del old file and retry |
| 1277 | unlink(real_path) |
| 1278 | rename(tmpnam, real_path) |
| 1279 | return real_path |
| 1280 | raise |
| 1281 | |
| 1282 | except os.error: |
| 1283 | manager.extraction_error() # report a user-friendly error |
| 1284 | |
| 1285 | return real_path |
| 1286 | |
| 1287 | def _get_eager_resources(self): |
| 1288 | if self.eagers is None: |
| 1289 | eagers = [] |
| 1290 | for name in ('native_libs.txt', 'eager_resources.txt'): |
| 1291 | if self.has_metadata(name): |
| 1292 | eagers.extend(self.get_metadata_lines(name)) |
| 1293 | self.eagers = eagers |
| 1294 | return self.eagers |
| 1295 | |
| 1296 | def _index(self): |
| 1297 | try: |
| 1298 | return self._dirindex |
| 1299 | except AttributeError: |
| 1300 | ind = {} |
| 1301 | for path in self.zipinfo: |
| 1302 | parts = path.split(os.sep) |
| 1303 | while parts: |
| 1304 | parent = os.sep.join(parts[:-1]) |
| 1305 | if parent in ind: |
| 1306 | ind[parent].append(parts[-1]) |
| 1307 | break |
| 1308 | else: |
| 1309 | ind[parent] = [parts.pop()] |
| 1310 | self._dirindex = ind |
| 1311 | return ind |
| 1312 | |
| 1313 | def _has(self, fspath): |
| 1314 | zip_path = self._zipinfo_name(fspath) |
| 1315 | return zip_path in self.zipinfo or zip_path in self._index() |
| 1316 | |
| 1317 | def _isdir(self,fspath): |
| 1318 | return self._zipinfo_name(fspath) in self._index() |
| 1319 | |
| 1320 | def _listdir(self,fspath): |
| 1321 | return list(self._index().get(self._zipinfo_name(fspath), ())) |
| 1322 | |
| 1323 | def _eager_to_zip(self,resource_name): |
| 1324 | return self._zipinfo_name(self._fn(self.egg_root,resource_name)) |
| 1325 | |
| 1326 | def _resource_to_zip(self,resource_name): |
| 1327 | return self._zipinfo_name(self._fn(self.module_path,resource_name)) |
| 1328 | |
| 1329 | register_loader_type(zipimport.zipimporter, ZipProvider) |
| 1330 | |
| 1331 | |
| 1332 | |
| 1333 | |
| 1334 | |
| 1335 | |
| 1336 | |
| 1337 | |
| 1338 | |
| 1339 | |
| 1340 | |
| 1341 | |
| 1342 | |
| 1343 | |
| 1344 | |
| 1345 | |
| 1346 | |
| 1347 | |
| 1348 | |
| 1349 | |
| 1350 | |
| 1351 | |
| 1352 | |
| 1353 | |
| 1354 | class FileMetadata(EmptyProvider): |
| 1355 | """Metadata handler for standalone PKG-INFO files |
| 1356 | |
| 1357 | Usage:: |
| 1358 | |
| 1359 | metadata = FileMetadata("/path/to/PKG-INFO") |
| 1360 | |
| 1361 | This provider rejects all data and metadata requests except for PKG-INFO, |
| 1362 | which is treated as existing, and will be the contents of the file at |
| 1363 | the provided location. |
| 1364 | """ |
| 1365 | |
| 1366 | def __init__(self,path): |
| 1367 | self.path = path |
| 1368 | |
| 1369 | def has_metadata(self,name): |
| 1370 | return name=='PKG-INFO' |
| 1371 | |
| 1372 | def get_metadata(self,name): |
| 1373 | if name=='PKG-INFO': |
| 1374 | return open(self.path,'rU').read() |
| 1375 | raise KeyError("No metadata except PKG-INFO is available") |
| 1376 | |
| 1377 | def get_metadata_lines(self,name): |
| 1378 | return yield_lines(self.get_metadata(name)) |
| 1379 | |
| 1380 | |
| 1381 | |
| 1382 | |
| 1383 | |
| 1384 | |
| 1385 | |
| 1386 | |
| 1387 | |
| 1388 | |
| 1389 | |
| 1390 | |
| 1391 | |
| 1392 | |
| 1393 | |
| 1394 | |
| 1395 | class PathMetadata(DefaultProvider): |
| 1396 | """Metadata provider for egg directories |
| 1397 | |
| 1398 | Usage:: |
| 1399 | |
| 1400 | # Development eggs: |
| 1401 | |
| 1402 | egg_info = "/path/to/PackageName.egg-info" |
| 1403 | base_dir = os.path.dirname(egg_info) |
| 1404 | metadata = PathMetadata(base_dir, egg_info) |
| 1405 | dist_name = os.path.splitext(os.path.basename(egg_info))[0] |
| 1406 | dist = Distribution(basedir,project_name=dist_name,metadata=metadata) |
| 1407 | |
| 1408 | # Unpacked egg directories: |
| 1409 | |
| 1410 | egg_path = "/path/to/PackageName-ver-pyver-etc.egg" |
| 1411 | metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) |
| 1412 | dist = Distribution.from_filename(egg_path, metadata=metadata) |
| 1413 | """ |
| 1414 | def __init__(self, path, egg_info): |
| 1415 | self.module_path = path |
| 1416 | self.egg_info = egg_info |
| 1417 | |
| 1418 | |
| 1419 | class EggMetadata(ZipProvider): |
| 1420 | """Metadata provider for .egg files""" |
| 1421 | |
| 1422 | def __init__(self, importer): |
| 1423 | """Create a metadata provider from a zipimporter""" |
| 1424 | |
| 1425 | self.zipinfo = zipimport._zip_directory_cache[importer.archive] |
| 1426 | self.zip_pre = importer.archive+os.sep |
| 1427 | self.loader = importer |
| 1428 | if importer.prefix: |
| 1429 | self.module_path = os.path.join(importer.archive, importer.prefix) |
| 1430 | else: |
| 1431 | self.module_path = importer.archive |
| 1432 | self._setup_prefix() |
| 1433 | |
| 1434 | |
| 1435 | |
| 1436 | _distribution_finders = {} |
| 1437 | |
| 1438 | def register_finder(importer_type, distribution_finder): |
| 1439 | """Register `distribution_finder` to find distributions in sys.path items |
| 1440 | |
| 1441 | `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item |
| 1442 | handler), and `distribution_finder` is a callable that, passed a path |
| 1443 | item and the importer instance, yields ``Distribution`` instances found on |
| 1444 | that path item. See ``pkg_resources.find_on_path`` for an example.""" |
| 1445 | _distribution_finders[importer_type] = distribution_finder |
| 1446 | |
| 1447 | |
| 1448 | def find_distributions(path_item, only=False): |
| 1449 | """Yield distributions accessible via `path_item`""" |
| 1450 | importer = get_importer(path_item) |
| 1451 | finder = _find_adapter(_distribution_finders, importer) |
| 1452 | return finder(importer, path_item, only) |
| 1453 | |
| 1454 | def find_in_zip(importer, path_item, only=False): |
| 1455 | metadata = EggMetadata(importer) |
| 1456 | if metadata.has_metadata('PKG-INFO'): |
| 1457 | yield Distribution.from_filename(path_item, metadata=metadata) |
| 1458 | if only: |
| 1459 | return # don't yield nested distros |
| 1460 | for subitem in metadata.resource_listdir('/'): |
| 1461 | if subitem.endswith('.egg'): |
| 1462 | subpath = os.path.join(path_item, subitem) |
| 1463 | for dist in find_in_zip(zipimport.zipimporter(subpath), subpath): |
| 1464 | yield dist |
| 1465 | |
| 1466 | register_finder(zipimport.zipimporter, find_in_zip) |
| 1467 | |
| 1468 | def StringIO(*args, **kw): |
| 1469 | """Thunk to load the real StringIO on demand""" |
| 1470 | global StringIO |
| 1471 | try: |
| 1472 | from cStringIO import StringIO |
| 1473 | except ImportError: |
| 1474 | from StringIO import StringIO |
| 1475 | return StringIO(*args,**kw) |
| 1476 | |
| 1477 | def find_nothing(importer, path_item, only=False): |
| 1478 | return () |
| 1479 | register_finder(object,find_nothing) |
| 1480 | |
| 1481 | def find_on_path(importer, path_item, only=False): |
| 1482 | """Yield distributions accessible on a sys.path directory""" |
| 1483 | path_item = _normalize_cached(path_item) |
| 1484 | |
| 1485 | if os.path.isdir(path_item): |
| 1486 | if path_item.lower().endswith('.egg'): |
| 1487 | # unpacked egg |
| 1488 | yield Distribution.from_filename( |
| 1489 | path_item, metadata=PathMetadata( |
| 1490 | path_item, os.path.join(path_item,'EGG-INFO') |
| 1491 | ) |
| 1492 | ) |
| 1493 | else: |
| 1494 | # scan for .egg and .egg-info in directory |
| 1495 | for entry in os.listdir(path_item): |
| 1496 | lower = entry.lower() |
| 1497 | if lower.endswith('.egg-info'): |
| 1498 | fullpath = os.path.join(path_item, entry) |
| 1499 | if os.path.isdir(fullpath): |
| 1500 | # egg-info directory, allow getting metadata |
| 1501 | metadata = PathMetadata(path_item, fullpath) |
| 1502 | else: |
| 1503 | metadata = FileMetadata(fullpath) |
| 1504 | yield Distribution.from_location( |
| 1505 | path_item,entry,metadata,precedence=DEVELOP_DIST |
| 1506 | ) |
| 1507 | elif not only and lower.endswith('.egg'): |
| 1508 | for dist in find_distributions(os.path.join(path_item, entry)): |
| 1509 | yield dist |
| 1510 | elif not only and lower.endswith('.egg-link'): |
| 1511 | for line in file(os.path.join(path_item, entry)): |
| 1512 | if not line.strip(): continue |
| 1513 | for item in find_distributions(line.rstrip()): |
| 1514 | yield item |
| 1515 | |
| 1516 | register_finder(pkgutil.ImpImporter, find_on_path) |
| 1517 | |
| 1518 | _namespace_handlers = {} |
| 1519 | _namespace_packages = {} |
| 1520 | |
| 1521 | def register_namespace_handler(importer_type, namespace_handler): |
| 1522 | """Register `namespace_handler` to declare namespace packages |
| 1523 | |
| 1524 | `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item |
| 1525 | handler), and `namespace_handler` is a callable like this:: |
| 1526 | |
| 1527 | def namespace_handler(importer,path_entry,moduleName,module): |
| 1528 | # return a path_entry to use for child packages |
| 1529 | |
| 1530 | Namespace handlers are only called if the importer object has already |
| 1531 | agreed that it can handle the relevant path item, and they should only |
| 1532 | return a subpath if the module __path__ does not already contain an |
| 1533 | equivalent subpath. For an example namespace handler, see |
| 1534 | ``pkg_resources.file_ns_handler``. |
| 1535 | """ |
| 1536 | _namespace_handlers[importer_type] = namespace_handler |
| 1537 | |
| 1538 | def _handle_ns(packageName, path_item): |
| 1539 | """Ensure that named package includes a subpath of path_item (if needed)""" |
| 1540 | importer = get_importer(path_item) |
| 1541 | if importer is None: |
| 1542 | return None |
| 1543 | loader = importer.find_module(packageName) |
| 1544 | if loader is None: |
| 1545 | return None |
| 1546 | module = sys.modules.get(packageName) |
| 1547 | if module is None: |
| 1548 | module = sys.modules[packageName] = new.module(packageName) |
| 1549 | module.__path__ = []; _set_parent_ns(packageName) |
| 1550 | elif not hasattr(module,'__path__'): |
| 1551 | raise TypeError("Not a package:", packageName) |
| 1552 | handler = _find_adapter(_namespace_handlers, importer) |
| 1553 | subpath = handler(importer,path_item,packageName,module) |
| 1554 | if subpath is not None: |
| 1555 | path = module.__path__; path.append(subpath) |
| 1556 | loader.load_module(packageName); module.__path__ = path |
| 1557 | return subpath |
| 1558 | |
| 1559 | def declare_namespace(packageName): |
| 1560 | """Declare that package 'packageName' is a namespace package""" |
| 1561 | |
| 1562 | imp.acquire_lock() |
| 1563 | try: |
| 1564 | if packageName in _namespace_packages: |
| 1565 | return |
| 1566 | |
| 1567 | path, parent = sys.path, None |
| 1568 | if '.' in packageName: |
| 1569 | parent = '.'.join(packageName.split('.')[:-1]) |
| 1570 | declare_namespace(parent) |
| 1571 | __import__(parent) |
| 1572 | try: |
| 1573 | path = sys.modules[parent].__path__ |
| 1574 | except AttributeError: |
| 1575 | raise TypeError("Not a package:", parent) |
| 1576 | |
| 1577 | # Track what packages are namespaces, so when new path items are added, |
| 1578 | # they can be updated |
| 1579 | _namespace_packages.setdefault(parent,[]).append(packageName) |
| 1580 | _namespace_packages.setdefault(packageName,[]) |
| 1581 | |
| 1582 | for path_item in path: |
| 1583 | # Ensure all the parent's path items are reflected in the child, |
| 1584 | # if they apply |
| 1585 | _handle_ns(packageName, path_item) |
| 1586 | |
| 1587 | finally: |
| 1588 | imp.release_lock() |
| 1589 | |
| 1590 | def fixup_namespace_packages(path_item, parent=None): |
| 1591 | """Ensure that previously-declared namespace packages include path_item""" |
| 1592 | imp.acquire_lock() |
| 1593 | try: |
| 1594 | for package in _namespace_packages.get(parent,()): |
| 1595 | subpath = _handle_ns(package, path_item) |
| 1596 | if subpath: fixup_namespace_packages(subpath,package) |
| 1597 | finally: |
| 1598 | imp.release_lock() |
| 1599 | |
| 1600 | def file_ns_handler(importer, path_item, packageName, module): |
| 1601 | """Compute an ns-package subpath for a filesystem or zipfile importer""" |
| 1602 | |
| 1603 | subpath = os.path.join(path_item, packageName.split('.')[-1]) |
| 1604 | normalized = _normalize_cached(subpath) |
| 1605 | for item in module.__path__: |
| 1606 | if _normalize_cached(item)==normalized: |
| 1607 | break |
| 1608 | else: |
| 1609 | # Only return the path if it's not already there |
| 1610 | return subpath |
| 1611 | |
| 1612 | register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) |
| 1613 | register_namespace_handler(zipimport.zipimporter, file_ns_handler) |
| 1614 | |
| 1615 | |
| 1616 | def null_ns_handler(importer, path_item, packageName, module): |
| 1617 | return None |
| 1618 | |
| 1619 | register_namespace_handler(object,null_ns_handler) |
| 1620 | |
| 1621 | |
| 1622 | def normalize_path(filename): |
| 1623 | """Normalize a file/dir name for comparison purposes""" |
| 1624 | return os.path.normcase(os.path.realpath(filename)) |
| 1625 | |
| 1626 | def _normalize_cached(filename,_cache={}): |
| 1627 | try: |
| 1628 | return _cache[filename] |
| 1629 | except KeyError: |
| 1630 | _cache[filename] = result = normalize_path(filename) |
| 1631 | return result |
| 1632 | |
| 1633 | def _set_parent_ns(packageName): |
| 1634 | parts = packageName.split('.') |
| 1635 | name = parts.pop() |
| 1636 | if parts: |
| 1637 | parent = '.'.join(parts) |
| 1638 | setattr(sys.modules[parent], name, sys.modules[packageName]) |
| 1639 | |
| 1640 | |
| 1641 | def yield_lines(strs): |
| 1642 | """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" |
| 1643 | if isinstance(strs,basestring): |
| 1644 | for s in strs.splitlines(): |
| 1645 | s = s.strip() |
| 1646 | if s and not s.startswith('#'): # skip blank lines/comments |
| 1647 | yield s |
| 1648 | else: |
| 1649 | for ss in strs: |
| 1650 | for s in yield_lines(ss): |
| 1651 | yield s |
| 1652 | |
| 1653 | LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment |
| 1654 | CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation |
| 1655 | DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra |
| 1656 | VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info |
| 1657 | COMMA = re.compile(r"\s*,").match # comma between items |
| 1658 | OBRACKET = re.compile(r"\s*\[").match |
| 1659 | CBRACKET = re.compile(r"\s*\]").match |
| 1660 | MODULE = re.compile(r"\w+(\.\w+)*$").match |
| 1661 | EGG_NAME = re.compile( |
| 1662 | r"(?P<name>[^-]+)" |
| 1663 | r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?", |
| 1664 | re.VERBOSE | re.IGNORECASE |
| 1665 | ).match |
| 1666 | |
| 1667 | component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) |
| 1668 | replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c'}.get |
| 1669 | |
| 1670 | def _parse_version_parts(s): |
| 1671 | for part in component_re.split(s): |
| 1672 | part = replace(part,part) |
| 1673 | if not part or part=='.': |
| 1674 | continue |
| 1675 | if part[:1] in '0123456789': |
| 1676 | yield part.zfill(8) # pad for numeric comparison |
| 1677 | else: |
| 1678 | yield '*'+part |
| 1679 | |
| 1680 | yield '*final' # ensure that alpha/beta/candidate are before final |
| 1681 | |
| 1682 | def parse_version(s): |
| 1683 | """Convert a version string to a chronologically-sortable key |
| 1684 | |
| 1685 | This is a rough cross between distutils' StrictVersion and LooseVersion; |
| 1686 | if you give it versions that would work with StrictVersion, then it behaves |
| 1687 | the same; otherwise it acts like a slightly-smarter LooseVersion. It is |
| 1688 | *possible* to create pathological version coding schemes that will fool |
| 1689 | this parser, but they should be very rare in practice. |
| 1690 | |
| 1691 | The returned value will be a tuple of strings. Numeric portions of the |
| 1692 | version are padded to 8 digits so they will compare numerically, but |
| 1693 | without relying on how numbers compare relative to strings. Dots are |
| 1694 | dropped, but dashes are retained. Trailing zeros between alpha segments |
| 1695 | or dashes are suppressed, so that e.g. "2.4.0" is considered the same as |
| 1696 | "2.4". Alphanumeric parts are lower-cased. |
| 1697 | |
| 1698 | The algorithm assumes that strings like "-" and any alpha string that |
| 1699 | alphabetically follows "final" represents a "patch level". So, "2.4-1" |
| 1700 | is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is |
| 1701 | considered newer than "2.4-1", whic in turn is newer than "2.4". |
| 1702 | |
| 1703 | Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that |
| 1704 | come before "final" alphabetically) are assumed to be pre-release versions, |
| 1705 | so that the version "2.4" is considered newer than "2.4a1". |
| 1706 | |
| 1707 | Finally, to handle miscellaneous cases, the strings "pre", "preview", and |
| 1708 | "rc" are treated as if they were "c", i.e. as though they were release |
| 1709 | candidates, and therefore are not as new as a version string that does not |
| 1710 | contain them. |
| 1711 | """ |
| 1712 | parts = [] |
| 1713 | for part in _parse_version_parts(s.lower()): |
| 1714 | if part.startswith('*'): |
| 1715 | if part<'*final': # remove '-' before a prerelease tag |
| 1716 | while parts and parts[-1]=='*final-': parts.pop() |
| 1717 | # remove trailing zeros from each series of numeric parts |
| 1718 | while parts and parts[-1]=='00000000': |
| 1719 | parts.pop() |
| 1720 | parts.append(part) |
| 1721 | return tuple(parts) |
| 1722 | |
| 1723 | class EntryPoint(object): |
| 1724 | """Object representing an advertised importable object""" |
| 1725 | |
| 1726 | def __init__(self, name, module_name, attrs=(), extras=(), dist=None): |
| 1727 | if not MODULE(module_name): |
| 1728 | raise ValueError("Invalid module name", module_name) |
| 1729 | self.name = name |
| 1730 | self.module_name = module_name |
| 1731 | self.attrs = tuple(attrs) |
| 1732 | self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras |
| 1733 | self.dist = dist |
| 1734 | |
| 1735 | def __str__(self): |
| 1736 | s = "%s = %s" % (self.name, self.module_name) |
| 1737 | if self.attrs: |
| 1738 | s += ':' + '.'.join(self.attrs) |
| 1739 | if self.extras: |
| 1740 | s += ' [%s]' % ','.join(self.extras) |
| 1741 | return s |
| 1742 | |
| 1743 | def __repr__(self): |
| 1744 | return "EntryPoint.parse(%r)" % str(self) |
| 1745 | |
| 1746 | def load(self, require=True, env=None, installer=None): |
| 1747 | if require: self.require(env, installer) |
| 1748 | entry = __import__(self.module_name, globals(),globals(), ['__name__']) |
| 1749 | for attr in self.attrs: |
| 1750 | try: |
| 1751 | entry = getattr(entry,attr) |
| 1752 | except AttributeError: |
| 1753 | raise ImportError("%r has no %r attribute" % (entry,attr)) |
| 1754 | return entry |
| 1755 | |
| 1756 | def require(self, env=None, installer=None): |
| 1757 | if self.extras and not self.dist: |
| 1758 | raise UnknownExtra("Can't require() without a distribution", self) |
| 1759 | map(working_set.add, |
| 1760 | working_set.resolve(self.dist.requires(self.extras),env,installer)) |
| 1761 | |
| 1762 | |
| 1763 | |
| 1764 | #@classmethod |
| 1765 | def parse(cls, src, dist=None): |
| 1766 | """Parse a single entry point from string `src` |
| 1767 | |
| 1768 | Entry point syntax follows the form:: |
| 1769 | |
| 1770 | name = some.module:some.attr [extra1,extra2] |
| 1771 | |
| 1772 | The entry name and module name are required, but the ``:attrs`` and |
| 1773 | ``[extras]`` parts are optional |
| 1774 | """ |
| 1775 | try: |
| 1776 | attrs = extras = () |
| 1777 | name,value = src.split('=',1) |
| 1778 | if '[' in value: |
| 1779 | value,extras = value.split('[',1) |
| 1780 | req = Requirement.parse("x["+extras) |
| 1781 | if req.specs: raise ValueError |
| 1782 | extras = req.extras |
| 1783 | if ':' in value: |
| 1784 | value,attrs = value.split(':',1) |
| 1785 | if not MODULE(attrs.rstrip()): |
| 1786 | raise ValueError |
| 1787 | attrs = attrs.rstrip().split('.') |
| 1788 | except ValueError: |
| 1789 | raise ValueError( |
| 1790 | "EntryPoint must be in 'name=module:attrs [extras]' format", |
| 1791 | src |
| 1792 | ) |
| 1793 | else: |
| 1794 | return cls(name.strip(), value.lstrip(), attrs, extras, dist) |
| 1795 | |
| 1796 | parse = classmethod(parse) |
| 1797 | |
| 1798 | |
| 1799 | |
| 1800 | |
| 1801 | |
| 1802 | |
| 1803 | |
| 1804 | |
| 1805 | #@classmethod |
| 1806 | def parse_group(cls, group, lines, dist=None): |
| 1807 | """Parse an entry point group""" |
| 1808 | if not MODULE(group): |
| 1809 | raise ValueError("Invalid group name", group) |
| 1810 | this = {} |
| 1811 | for line in yield_lines(lines): |
| 1812 | ep = cls.parse(line, dist) |
| 1813 | if ep.name in this: |
| 1814 | raise ValueError("Duplicate entry point", group, ep.name) |
| 1815 | this[ep.name]=ep |
| 1816 | return this |
| 1817 | |
| 1818 | parse_group = classmethod(parse_group) |
| 1819 | |
| 1820 | #@classmethod |
| 1821 | def parse_map(cls, data, dist=None): |
| 1822 | """Parse a map of entry point groups""" |
| 1823 | if isinstance(data,dict): |
| 1824 | data = data.items() |
| 1825 | else: |
| 1826 | data = split_sections(data) |
| 1827 | maps = {} |
| 1828 | for group, lines in data: |
| 1829 | if group is None: |
| 1830 | if not lines: |
| 1831 | continue |
| 1832 | raise ValueError("Entry points must be listed in groups") |
| 1833 | group = group.strip() |
| 1834 | if group in maps: |
| 1835 | raise ValueError("Duplicate group name", group) |
| 1836 | maps[group] = cls.parse_group(group, lines, dist) |
| 1837 | return maps |
| 1838 | |
| 1839 | parse_map = classmethod(parse_map) |
| 1840 | |
| 1841 | |
| 1842 | |
| 1843 | |
| 1844 | |
| 1845 | |
| 1846 | class Distribution(object): |
| 1847 | """Wrap an actual or potential sys.path entry w/metadata""" |
| 1848 | def __init__(self, |
| 1849 | location=None, metadata=None, project_name=None, version=None, |
| 1850 | py_version=PY_MAJOR, platform=None, precedence = EGG_DIST |
| 1851 | ): |
| 1852 | self.project_name = safe_name(project_name or 'Unknown') |
| 1853 | if version is not None: |
| 1854 | self._version = safe_version(version) |
| 1855 | self.py_version = py_version |
| 1856 | self.platform = platform |
| 1857 | self.location = location |
| 1858 | self.precedence = precedence |
| 1859 | self._provider = metadata or empty_provider |
| 1860 | |
| 1861 | #@classmethod |
| 1862 | def from_location(cls,location,basename,metadata=None,**kw): |
| 1863 | project_name, version, py_version, platform = [None]*4 |
| 1864 | basename, ext = os.path.splitext(basename) |
| 1865 | if ext.lower() in (".egg",".egg-info"): |
| 1866 | match = EGG_NAME(basename) |
| 1867 | if match: |
| 1868 | project_name, version, py_version, platform = match.group( |
| 1869 | 'name','ver','pyver','plat' |
| 1870 | ) |
| 1871 | return cls( |
| 1872 | location, metadata, project_name=project_name, version=version, |
| 1873 | py_version=py_version, platform=platform, **kw |
| 1874 | ) |
| 1875 | from_location = classmethod(from_location) |
| 1876 | |
| 1877 | hashcmp = property( |
| 1878 | lambda self: ( |
| 1879 | getattr(self,'parsed_version',()), self.precedence, self.key, |
| 1880 | -len(self.location or ''), self.location, self.py_version, |
| 1881 | self.platform |
| 1882 | ) |
| 1883 | ) |
| 1884 | def __cmp__(self, other): return cmp(self.hashcmp, other) |
| 1885 | def __hash__(self): return hash(self.hashcmp) |
| 1886 | |
| 1887 | # These properties have to be lazy so that we don't have to load any |
| 1888 | # metadata until/unless it's actually needed. (i.e., some distributions |
| 1889 | # may not know their name or version without loading PKG-INFO) |
| 1890 | |
| 1891 | #@property |
| 1892 | def key(self): |
| 1893 | try: |
| 1894 | return self._key |
| 1895 | except AttributeError: |
| 1896 | self._key = key = self.project_name.lower() |
| 1897 | return key |
| 1898 | key = property(key) |
| 1899 | |
| 1900 | #@property |
| 1901 | def parsed_version(self): |
| 1902 | try: |
| 1903 | return self._parsed_version |
| 1904 | except AttributeError: |
| 1905 | self._parsed_version = pv = parse_version(self.version) |
| 1906 | return pv |
| 1907 | |
| 1908 | parsed_version = property(parsed_version) |
| 1909 | |
| 1910 | #@property |
| 1911 | def version(self): |
| 1912 | try: |
| 1913 | return self._version |
| 1914 | except AttributeError: |
| 1915 | for line in self._get_metadata('PKG-INFO'): |
| 1916 | if line.lower().startswith('version:'): |
| 1917 | self._version = safe_version(line.split(':',1)[1].strip()) |
| 1918 | return self._version |
| 1919 | else: |
| 1920 | raise ValueError( |
| 1921 | "Missing 'Version:' header and/or PKG-INFO file", self |
| 1922 | ) |
| 1923 | version = property(version) |
| 1924 | |
| 1925 | |
| 1926 | |
| 1927 | |
| 1928 | #@property |
| 1929 | def _dep_map(self): |
| 1930 | try: |
| 1931 | return self.__dep_map |
| 1932 | except AttributeError: |
| 1933 | dm = self.__dep_map = {None: []} |
| 1934 | for name in 'requires.txt', 'depends.txt': |
| 1935 | for extra,reqs in split_sections(self._get_metadata(name)): |
| 1936 | if extra: extra = safe_extra(extra) |
| 1937 | dm.setdefault(extra,[]).extend(parse_requirements(reqs)) |
| 1938 | return dm |
| 1939 | _dep_map = property(_dep_map) |
| 1940 | |
| 1941 | def requires(self,extras=()): |
| 1942 | """List of Requirements needed for this distro if `extras` are used""" |
| 1943 | dm = self._dep_map |
| 1944 | deps = [] |
| 1945 | deps.extend(dm.get(None,())) |
| 1946 | for ext in extras: |
| 1947 | try: |
| 1948 | deps.extend(dm[safe_extra(ext)]) |
| 1949 | except KeyError: |
| 1950 | raise UnknownExtra( |
| 1951 | "%s has no such extra feature %r" % (self, ext) |
| 1952 | ) |
| 1953 | return deps |
| 1954 | |
| 1955 | def _get_metadata(self,name): |
| 1956 | if self.has_metadata(name): |
| 1957 | for line in self.get_metadata_lines(name): |
| 1958 | yield line |
| 1959 | |
| 1960 | def activate(self,path=None): |
| 1961 | """Ensure distribution is importable on `path` (default=sys.path)""" |
| 1962 | if path is None: path = sys.path |
| 1963 | self.insert_on(path) |
| 1964 | if path is sys.path: |
| 1965 | fixup_namespace_packages(self.location) |
| 1966 | for pkg in self._get_metadata('namespace_packages.txt'): |
| 1967 | if pkg in sys.modules: declare_namespace(pkg) |
| 1968 | |
| 1969 | def egg_name(self): |
| 1970 | """Return what this distribution's standard .egg filename should be""" |
| 1971 | filename = "%s-%s-py%s" % ( |
| 1972 | to_filename(self.project_name), to_filename(self.version), |
| 1973 | self.py_version or PY_MAJOR |
| 1974 | ) |
| 1975 | |
| 1976 | if self.platform: |
| 1977 | filename += '-'+self.platform |
| 1978 | return filename |
| 1979 | |
| 1980 | def __repr__(self): |
| 1981 | if self.location: |
| 1982 | return "%s (%s)" % (self,self.location) |
| 1983 | else: |
| 1984 | return str(self) |
| 1985 | |
| 1986 | def __str__(self): |
| 1987 | try: version = getattr(self,'version',None) |
| 1988 | except ValueError: version = None |
| 1989 | version = version or "[unknown version]" |
| 1990 | return "%s %s" % (self.project_name,version) |
| 1991 | |
| 1992 | def __getattr__(self,attr): |
| 1993 | """Delegate all unrecognized public attributes to .metadata provider""" |
| 1994 | if attr.startswith('_'): |
| 1995 | raise AttributeError,attr |
| 1996 | return getattr(self._provider, attr) |
| 1997 | |
| 1998 | #@classmethod |
| 1999 | def from_filename(cls,filename,metadata=None, **kw): |
| 2000 | return cls.from_location( |
| 2001 | _normalize_cached(filename), os.path.basename(filename), metadata, |
| 2002 | **kw |
| 2003 | ) |
| 2004 | from_filename = classmethod(from_filename) |
| 2005 | |
| 2006 | def as_requirement(self): |
| 2007 | """Return a ``Requirement`` that matches this distribution exactly""" |
| 2008 | return Requirement.parse('%s==%s' % (self.project_name, self.version)) |
| 2009 | |
| 2010 | def load_entry_point(self, group, name): |
| 2011 | """Return the `name` entry point of `group` or raise ImportError""" |
| 2012 | ep = self.get_entry_info(group,name) |
| 2013 | if ep is None: |
| 2014 | raise ImportError("Entry point %r not found" % ((group,name),)) |
| 2015 | return ep.load() |
| 2016 | |
| 2017 | def get_entry_map(self, group=None): |
| 2018 | """Return the entry point map for `group`, or the full entry map""" |
| 2019 | try: |
| 2020 | ep_map = self._ep_map |
| 2021 | except AttributeError: |
| 2022 | ep_map = self._ep_map = EntryPoint.parse_map( |
| 2023 | self._get_metadata('entry_points.txt'), self |
| 2024 | ) |
| 2025 | if group is not None: |
| 2026 | return ep_map.get(group,{}) |
| 2027 | return ep_map |
| 2028 | |
| 2029 | def get_entry_info(self, group, name): |
| 2030 | """Return the EntryPoint object for `group`+`name`, or ``None``""" |
| 2031 | return self.get_entry_map(group).get(name) |
| 2032 | |
| 2033 | def insert_on(self, path, loc = None): |
| 2034 | """Insert self.location in path before its nearest parent directory""" |
| 2035 | loc = loc or self.location |
| 2036 | if not loc: return |
| 2037 | if path is sys.path: |
| 2038 | self.check_version_conflict() |
| 2039 | best, pos = 0, -1 |
| 2040 | for p,item in enumerate(path): |
| 2041 | item = _normalize_cached(item) |
| 2042 | if loc.startswith(item) and len(item)>best and loc<>item: |
| 2043 | best, pos = len(item), p |
| 2044 | if pos==-1: |
| 2045 | if loc not in path: path.append(loc) |
| 2046 | elif loc not in path[:pos+1]: |
| 2047 | while loc in path: path.remove(loc) |
| 2048 | path.insert(pos,loc) |
| 2049 | |
| 2050 | |
| 2051 | def check_version_conflict(self): |
| 2052 | if self.key=='setuptools': |
| 2053 | return # ignore the inevitable setuptools self-conflicts :( |
| 2054 | |
| 2055 | nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) |
| 2056 | loc = normalize_path(self.location) |
| 2057 | for modname in self._get_metadata('top_level.txt'): |
| 2058 | if (modname not in sys.modules or modname in nsp |
| 2059 | or modname in _namespace_packages |
| 2060 | ): |
| 2061 | continue |
| 2062 | |
| 2063 | fn = getattr(sys.modules[modname], '__file__', None) |
| 2064 | if fn and normalize_path(fn).startswith(loc): |
| 2065 | continue |
| 2066 | issue_warning( |
| 2067 | "Module %s was already imported from %s, but %s is being added" |
| 2068 | " to sys.path" % (modname, fn, self.location), |
| 2069 | ) |
| 2070 | |
| 2071 | def has_version(self): |
| 2072 | try: |
| 2073 | self.version |
| 2074 | except ValueError: |
| 2075 | issue_warning("Unbuilt egg for "+repr(self)) |
| 2076 | return False |
| 2077 | return True |
| 2078 | |
| 2079 | def clone(self,**kw): |
| 2080 | """Copy this distribution, substituting in any changed keyword args""" |
| 2081 | for attr in ( |
| 2082 | 'project_name', 'version', 'py_version', 'platform', 'location', |
| 2083 | 'precedence' |
| 2084 | ): |
| 2085 | kw.setdefault(attr, getattr(self,attr,None)) |
| 2086 | kw.setdefault('metadata', self._provider) |
| 2087 | return self.__class__(**kw) |
| 2088 | |
| 2089 | |
| 2090 | |
| 2091 | |
| 2092 | #@property |
| 2093 | def extras(self): |
| 2094 | return [dep for dep in self._dep_map if dep] |
| 2095 | extras = property(extras) |
| 2096 | |
| 2097 | |
| 2098 | def issue_warning(*args,**kw): |
| 2099 | level = 1 |
| 2100 | g = globals() |
| 2101 | try: |
| 2102 | # find the first stack frame that is *not* code in |
| 2103 | # the pkg_resources module, to use for the warning |
| 2104 | while sys._getframe(level).f_globals is g: |
| 2105 | level += 1 |
| 2106 | except ValueError: |
| 2107 | pass |
| 2108 | from warnings import warn |
| 2109 | warn(stacklevel = level+1, *args, **kw) |
| 2110 | |
| 2111 | |
| 2112 | |
| 2113 | |
| 2114 | |
| 2115 | |
| 2116 | |
| 2117 | |
| 2118 | |
| 2119 | |
| 2120 | |
| 2121 | |
| 2122 | |
| 2123 | |
| 2124 | |
| 2125 | |
| 2126 | |
| 2127 | |
| 2128 | |
| 2129 | |
| 2130 | |
| 2131 | |
| 2132 | |
| 2133 | def parse_requirements(strs): |
| 2134 | """Yield ``Requirement`` objects for each specification in `strs` |
| 2135 | |
| 2136 | `strs` must be an instance of ``basestring``, or a (possibly-nested) |
| 2137 | iterable thereof. |
| 2138 | """ |
| 2139 | # create a steppable iterator, so we can handle \-continuations |
| 2140 | lines = iter(yield_lines(strs)) |
| 2141 | |
| 2142 | def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): |
| 2143 | |
| 2144 | items = [] |
| 2145 | |
| 2146 | while not TERMINATOR(line,p): |
| 2147 | if CONTINUE(line,p): |
| 2148 | try: |
| 2149 | line = lines.next(); p = 0 |
| 2150 | except StopIteration: |
| 2151 | raise ValueError( |
| 2152 | "\\ must not appear on the last nonblank line" |
| 2153 | ) |
| 2154 | |
| 2155 | match = ITEM(line,p) |
| 2156 | if not match: |
| 2157 | raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) |
| 2158 | |
| 2159 | items.append(match.group(*groups)) |
| 2160 | p = match.end() |
| 2161 | |
| 2162 | match = COMMA(line,p) |
| 2163 | if match: |
| 2164 | p = match.end() # skip the comma |
| 2165 | elif not TERMINATOR(line,p): |
| 2166 | raise ValueError( |
| 2167 | "Expected ',' or end-of-list in",line,"at",line[p:] |
| 2168 | ) |
| 2169 | |
| 2170 | match = TERMINATOR(line,p) |
| 2171 | if match: p = match.end() # skip the terminator, if any |
| 2172 | return line, p, items |
| 2173 | |
| 2174 | for line in lines: |
| 2175 | match = DISTRO(line) |
| 2176 | if not match: |
| 2177 | raise ValueError("Missing distribution spec", line) |
| 2178 | project_name = match.group(1) |
| 2179 | p = match.end() |
| 2180 | extras = [] |
| 2181 | |
| 2182 | match = OBRACKET(line,p) |
| 2183 | if match: |
| 2184 | p = match.end() |
| 2185 | line, p, extras = scan_list( |
| 2186 | DISTRO, CBRACKET, line, p, (1,), "'extra' name" |
| 2187 | ) |
| 2188 | |
| 2189 | line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") |
| 2190 | specs = [(op,safe_version(val)) for op,val in specs] |
| 2191 | yield Requirement(project_name, specs, extras) |
| 2192 | |
| 2193 | |
| 2194 | def _sort_dists(dists): |
| 2195 | tmp = [(dist.hashcmp,dist) for dist in dists] |
| 2196 | tmp.sort() |
| 2197 | dists[::-1] = [d for hc,d in tmp] |
| 2198 | |
| 2199 | |
| 2200 | |
| 2201 | |
| 2202 | |
| 2203 | |
| 2204 | |
| 2205 | |
| 2206 | |
| 2207 | |
| 2208 | |
| 2209 | |
| 2210 | |
| 2211 | |
| 2212 | |
| 2213 | |
| 2214 | |
| 2215 | class Requirement: |
| 2216 | def __init__(self, project_name, specs, extras): |
| 2217 | """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" |
| 2218 | self.unsafe_name, project_name = project_name, safe_name(project_name) |
| 2219 | self.project_name, self.key = project_name, project_name.lower() |
| 2220 | index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] |
| 2221 | index.sort() |
| 2222 | self.specs = [(op,ver) for parsed,trans,op,ver in index] |
| 2223 | self.index, self.extras = index, tuple(map(safe_extra,extras)) |
| 2224 | self.hashCmp = ( |
| 2225 | self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), |
| 2226 | ImmutableSet(self.extras) |
| 2227 | ) |
| 2228 | self.__hash = hash(self.hashCmp) |
| 2229 | |
| 2230 | def __str__(self): |
| 2231 | specs = ','.join([''.join(s) for s in self.specs]) |
| 2232 | extras = ','.join(self.extras) |
| 2233 | if extras: extras = '[%s]' % extras |
| 2234 | return '%s%s%s' % (self.project_name, extras, specs) |
| 2235 | |
| 2236 | def __eq__(self,other): |
| 2237 | return isinstance(other,Requirement) and self.hashCmp==other.hashCmp |
| 2238 | |
| 2239 | def __contains__(self,item): |
| 2240 | if isinstance(item,Distribution): |
| 2241 | if item.key <> self.key: return False |
| 2242 | if self.index: item = item.parsed_version # only get if we need it |
| 2243 | elif isinstance(item,basestring): |
| 2244 | item = parse_version(item) |
| 2245 | last = None |
| 2246 | for parsed,trans,op,ver in self.index: |
| 2247 | action = trans[cmp(item,parsed)] |
| 2248 | if action=='F': return False |
| 2249 | elif action=='T': return True |
| 2250 | elif action=='+': last = True |
| 2251 | elif action=='-' or last is None: last = False |
| 2252 | if last is None: last = True # no rules encountered |
| 2253 | return last |
| 2254 | |
| 2255 | |
| 2256 | def __hash__(self): |
| 2257 | return self.__hash |
| 2258 | |
| 2259 | def __repr__(self): return "Requirement.parse(%r)" % str(self) |
| 2260 | |
| 2261 | #@staticmethod |
| 2262 | def parse(s): |
| 2263 | reqs = list(parse_requirements(s)) |
| 2264 | if reqs: |
| 2265 | if len(reqs)==1: |
| 2266 | return reqs[0] |
| 2267 | raise ValueError("Expected only one requirement", s) |
| 2268 | raise ValueError("No requirements found", s) |
| 2269 | |
| 2270 | parse = staticmethod(parse) |
| 2271 | |
| 2272 | state_machine = { |
| 2273 | # =>< |
| 2274 | '<' : '--T', |
| 2275 | '<=': 'T-T', |
| 2276 | '>' : 'F+F', |
| 2277 | '>=': 'T+F', |
| 2278 | '==': 'T..', |
| 2279 | '!=': 'F++', |
| 2280 | } |
| 2281 | |
| 2282 | |
| 2283 | def _get_mro(cls): |
| 2284 | """Get an mro for a type or classic class""" |
| 2285 | if not isinstance(cls,type): |
| 2286 | class cls(cls,object): pass |
| 2287 | return cls.__mro__[1:] |
| 2288 | return cls.__mro__ |
| 2289 | |
| 2290 | def _find_adapter(registry, ob): |
| 2291 | """Return an adapter factory for `ob` from `registry`""" |
| 2292 | for t in _get_mro(getattr(ob, '__class__', type(ob))): |
| 2293 | if t in registry: |
| 2294 | return registry[t] |
| 2295 | |
| 2296 | |
| 2297 | def ensure_directory(path): |
| 2298 | """Ensure that the parent directory of `path` exists""" |
| 2299 | dirname = os.path.dirname(path) |
| 2300 | if not os.path.isdir(dirname): |
| 2301 | os.makedirs(dirname) |
| 2302 | |
| 2303 | def split_sections(s): |
| 2304 | """Split a string or iterable thereof into (section,content) pairs |
| 2305 | |
| 2306 | Each ``section`` is a stripped version of the section header ("[section]") |
| 2307 | and each ``content`` is a list of stripped lines excluding blank lines and |
| 2308 | comment-only lines. If there are any such lines before the first section |
| 2309 | header, they're returned in a first ``section`` of ``None``. |
| 2310 | """ |
| 2311 | section = None |
| 2312 | content = [] |
| 2313 | for line in yield_lines(s): |
| 2314 | if line.startswith("["): |
| 2315 | if line.endswith("]"): |
| 2316 | if section or content: |
| 2317 | yield section, content |
| 2318 | section = line[1:-1].strip() |
| 2319 | content = [] |
| 2320 | else: |
| 2321 | raise ValueError("Invalid section heading", line) |
| 2322 | else: |
| 2323 | content.append(line) |
| 2324 | |
| 2325 | # wrap up last segment |
| 2326 | yield section, content |
| 2327 | |
| 2328 | def _mkstemp(*args,**kw): |
| 2329 | from tempfile import mkstemp |
| 2330 | old_open = os.open |
| 2331 | try: |
| 2332 | os.open = os_open # temporarily bypass sandboxing |
| 2333 | return mkstemp(*args,**kw) |
| 2334 | finally: |
| 2335 | os.open = old_open # and then put it back |
| 2336 | |
| 2337 | |
| 2338 | # Set up global resource manager |
| 2339 | _manager = ResourceManager() |
| 2340 | def _initialize(g): |
| 2341 | for name in dir(_manager): |
| 2342 | if not name.startswith('_'): |
| 2343 | g[name] = getattr(_manager, name) |
| 2344 | _initialize(globals()) |
| 2345 | |
| 2346 | # Prepare the master working set and make the ``require()`` API available |
| 2347 | working_set = WorkingSet() |
| 2348 | try: |
| 2349 | # Does the main program list any requirements? |
| 2350 | from __main__ import __requires__ |
| 2351 | except ImportError: |
| 2352 | pass # No: just use the default working set based on sys.path |
| 2353 | else: |
| 2354 | # Yes: ensure the requirements are met, by prefixing sys.path if necessary |
| 2355 | try: |
| 2356 | working_set.require(__requires__) |
| 2357 | except VersionConflict: # try it without defaults already on sys.path |
| 2358 | working_set = WorkingSet([]) # by starting with an empty path |
| 2359 | for dist in working_set.resolve( |
| 2360 | parse_requirements(__requires__), Environment() |
| 2361 | ): |
| 2362 | working_set.add(dist) |
| 2363 | for entry in sys.path: # add any missing entries from sys.path |
| 2364 | if entry not in working_set.entries: |
| 2365 | working_set.add_entry(entry) |
| 2366 | sys.path[:] = working_set.entries # then copy back to sys.path |
| 2367 | |
| 2368 | require = working_set.require |
| 2369 | iter_entry_points = working_set.iter_entry_points |
| 2370 | add_activation_listener = working_set.subscribe |
| 2371 | run_script = working_set.run_script |
| 2372 | run_main = run_script # backward compatibility |
| 2373 | # Activate all distributions already on sys.path, and ensure that |
| 2374 | # all distributions added to the working set in the future (e.g. by |
| 2375 | # calling ``require()``) will get activated as well. |
| 2376 | add_activation_listener(lambda dist: dist.activate()) |
| 2377 | working_set.entries=[]; map(working_set.add_entry,sys.path) # match order |
| 2378 | |