A resource is a logical file contained within a package, or a logical
subdirectory thereof. The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is. Do not use os.path operations to manipulate resource
names being passed into the API.
The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files. It can also work in a limited way with
.zip files andwith custom PEP 302 loaders that support the ``get_data()``
method.
This module is deprecated. Users are directed to :mod:`importlib.resources`,
:mod:`importlib.metadata` and :pypi:`packaging` instead. """
from __future__ import annotations
import sys
if sys.version_info < (3, 8): # noqa: UP036 # Check for unsupported versions raise RuntimeError("Python 3.8 or later is required")
# capture these to bypass sandboxing from os import open as os_open, utime # isort: skip from os.path import isdir, split # isort: skip
try: from os import mkdir, rename, unlink
WRITE_SUPPORT = True except ImportError: # no write support, probably under GAE
WRITE_SUPPORT = False
import packaging.markers import packaging.requirements import packaging.specifiers import packaging.utils import packaging.version from jaraco.text import drop_comment, join_continuation, yield_lines from platformdirs import user_cache_dir as _user_cache_dir
if TYPE_CHECKING: from _typeshed import BytesPath, StrOrBytesPath, StrPath from _typeshed.importlib import LoaderProtocol from typing_extensions import Self, TypeAlias
def __getstate__() -> dict[str, Any]:
state = {}
g = globals() for k, v in _state_vars.items():
state[k] = g['_sget_' + v](g[k]) return state
def __setstate__(state: dict[str, Any]) -> dict[str, Any]:
g = globals() for k, v in state.items():
g['_sset_' + _state_vars[k]](k, g[k], v) return state
def get_supported_platform(): """Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
of macOS that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
version of macOS that we are *running*. To allow usage of packages that
explicitly require a newer version of macOS, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
platform strings, this function should be extended accordingly. """
plat = get_build_platform()
m = macosVersionString.match(plat) if m isnotNoneand sys.platform == "darwin": try:
plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3)) except ValueError: # not macOS pass return plat
def with_context(
self, required_by: set[Distribution | str]
) -> Self | ContextualVersionConflict: """ If required_by is non-empty, return a version of self that is a
ContextualVersionConflict. """ ifnot required_by: return self
args = self.args + (required_by,) return ContextualVersionConflict(*args)
class ContextualVersionConflict(VersionConflict): """
A VersionConflict that accepts a third parameter, the set of the
requirements that required the installed Distribution. """
_template = VersionConflict._template + ' by {self.required_by}'
def register_loader_type(
loader_type: type[_ModuleLike], provider_factory: _ProviderFactoryType
) -> None: """Register `provider_factory` to make providers for `loader_type`
`loader_type` is the type orclass of a PEP 302 ``module.__loader__``, and `provider_factory` is a function that, passed a *module* object,
returns an ``IResourceProvider`` for that module. """
_provider_factories[loader_type] = provider_factory
@overload def get_provider(moduleOrReq: str) -> IResourceProvider: ...
@overload def get_provider(moduleOrReq: Requirement) -> Distribution: ... def get_provider(moduleOrReq: str | Requirement) -> IResourceProvider | Distribution: """Return an IResourceProvider for the named module or requirement""" if isinstance(moduleOrReq, Requirement): return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] try:
module = sys.modules[moduleOrReq] except KeyError:
__import__(moduleOrReq)
module = sys.modules[moduleOrReq]
loader = getattr(module, '__loader__', None) return _find_adapter(_provider_factories, loader)(module)
@functools.lru_cache(maxsize=None) def _macos_vers():
version = platform.mac_ver()[0] # fallback for MacPorts if version == '':
plist = '/System/Library/CoreServices/SystemVersion.plist' if os.path.exists(plist): with open(plist, 'rb') as fh:
plist_content = plistlib.load(fh) if'ProductVersion'in plist_content:
version = plist_content['ProductVersion'] return version.split('.')
def get_build_platform(): """Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and macOS. """ from sysconfig import get_platform
plat = get_platform() if sys.platform == "darwin"andnot plat.startswith('macosx-'): try:
version = _macos_vers()
machine = os.uname()[4].replace(" ", "_") return"macosx-%d.%d-%s" % (
int(version[0]),
int(version[1]),
_macos_arch(machine),
) except ValueError: # if someone is running a non-Mac darwin system, this will fall # through to the default implementation pass return plat
def compatible_platforms(provided: str | None, required: str | None) -> bool: """Can code for the `provided` platform run on the `required` platform?
Returns trueif either platform is ``None``, or the platforms are equal.
XXX Needs compatibility checks for Linux and other unixy OSes. """ if provided isNoneor required isNoneor provided == required: # easy case returnTrue
# macOS special cases
reqMac = macosVersionString.match(required) if reqMac:
provMac = macosVersionString.match(provided)
# is this a Mac package? ifnot provMac: # this is backwards compatibility for packages built before # setuptools 0.6. All packages built after this point will # use the new macOS designation.
provDarwin = darwinVersionString.match(provided) if provDarwin:
dversion = int(provDarwin.group(1))
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) if (
dversion == 7 and macosversion >= "10.3" or dversion == 8 and macosversion >= "10.4"
): returnTrue # egg isn't macOS or legacy darwin returnFalse
# are they the same major version and machine type? if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3): returnFalse
# is the required OS major update >= the provided one? if int(provMac.group(2)) > int(reqMac.group(2)): returnFalse
returnTrue
# XXX Linux and other platforms' special cases should go here returnFalse
@overload def get_distribution(dist: _DistributionT) -> _DistributionT: ...
@overload def get_distribution(dist: _PkgReqType) -> Distribution: ... def get_distribution(dist: Distribution | _PkgReqType) -> Distribution: """Return a current distribution object for a Requirement or string""" if isinstance(dist, str):
dist = Requirement.parse(dist) if isinstance(dist, Requirement):
dist = get_provider(dist) ifnot isinstance(dist, Distribution): raise TypeError("Expected str, Requirement, or Distribution", dist) return dist
def load_entry_point(dist: _EPDistType, group: str, name: str) -> _ResolvedEntryPoint: """Return `name` entry point of `group` for `dist` or raise ImportError""" return get_distribution(dist).load_entry_point(group, name)
@overload def get_entry_map(
dist: _EPDistType, group: None = None
) -> dict[str, dict[str, EntryPoint]]: ...
@overload def get_entry_map(dist: _EPDistType, group: str) -> dict[str, EntryPoint]: ... def get_entry_map(dist: _EPDistType, group: str | None = None): """Return the entry point map for `group`, or the full entry map""" return get_distribution(dist).get_entry_map(group)
def get_entry_info(dist: _EPDistType, group: str, name: str) -> EntryPoint | None: """Return the EntryPoint object for `group`+`name`, or ``None``""" return get_distribution(dist).get_entry_info(group, name)
class IMetadataProvider(Protocol): def has_metadata(self, name: str) -> bool: """Does the package's distribution contain the named metadata?"""
...
def get_metadata(self, name: str) -> str: """The named metadata resource as a string"""
...
def get_metadata_lines(self, name: str) -> Iterator[str]: """Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted."""
...
def metadata_isdir(self, name: str) -> bool: """Is the named metadata a directory? (like ``os.path.isdir()``)"""
...
def metadata_listdir(self, name: str) -> list[str]: """List of metadata names in the directory (like ``os.listdir()``)"""
...
def run_script(self, script_name: str, namespace: dict[str, Any]) -> None: """Execute the named script in the supplied namespace dictionary"""
...
class IResourceProvider(IMetadataProvider, Protocol): """An object that provides access to package resources"""
def get_resource_filename(
self, manager: ResourceManager, resource_name: str
) -> str: """Return a true filesystem path for `resource_name`
`manager` must be a ``ResourceManager``"""
...
def get_resource_stream(
self, manager: ResourceManager, resource_name: str
) -> _ResourceStream: """Return a readable file-like object for `resource_name`
`manager` must be a ``ResourceManager``"""
...
def get_resource_string(
self, manager: ResourceManager, resource_name: str
) -> bytes: """Return the contents of `resource_name` as :obj:`bytes`
`manager` must be a ``ResourceManager``"""
...
def has_resource(self, resource_name: str) -> bool: """Does the package contain the named resource?"""
...
def resource_isdir(self, resource_name: str) -> bool: """Is the named resource a directory? (like ``os.path.isdir()``)"""
...
def resource_listdir(self, resource_name: str) -> list[str]: """List of resource names in the directory (like ``os.listdir()``)"""
...
class WorkingSet: """A collection of active distributions on sys.path (or a similar list)"""
def __init__(self, entries: Iterable[str] | None = None) -> None: """Create working set from list of path entries (default=sys.path)"""
self.entries: list[str] = []
self.entry_keys: dict[str | None, list[str]] = {}
self.by_key: dict[str, Distribution] = {}
self.normalized_to_canonical_keys: dict[str, str] = {}
self.callbacks: list[Callable[[Distribution], object]] = []
if entries isNone:
entries = sys.path
for entry in entries:
self.add_entry(entry)
@classmethod def _build_master(cls): """
Prepare the master working set. """
ws = cls() try: from __main__ import __requires__ except ImportError: # The main program does not list any requirements return ws
# ensure the requirements are met try:
ws.require(__requires__) except VersionConflict: return cls._build_from_requirements(__requires__)
return ws
@classmethod def _build_from_requirements(cls, req_spec): """
Build a working set from a requirement spec. Rewrites sys.path. """ # try it without defaults already on sys.path # by starting with an empty path
ws = cls([])
reqs = parse_requirements(req_spec)
dists = ws.resolve(reqs, Environment()) for dist in dists:
ws.add(dist)
# add any missing entries from sys.path for entry in sys.path: if entry notin ws.entries:
ws.add_entry(entry)
# then copy back to sys.path
sys.path[:] = ws.entries return ws
def add_entry(self, entry: str) -> None: """Add a path item to ``.entries``, finding any distributions on it
``find_distributions(entry, True)`` is used to find distributions
corresponding to the path entry, and they are added. `entry` is
always appended to ``.entries``, even if it is already present.
(This is because ``sys.path`` can contain the same value more than
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
equal ``sys.path``.) """
self.entry_keys.setdefault(entry, [])
self.entries.append(entry) for dist in find_distributions(entry, True):
self.add(dist, entry, False)
def __contains__(self, dist: Distribution) -> bool: """True if `dist` is the active distribution for its project""" return self.by_key.get(dist.key) == dist
def find(self, req: Requirement) -> Distribution | None: """Find a distribution matching requirement `req`
If there is an active distribution for the requested project, this
returns it as long as it meets the version requirement specified by
`req`. But, if there is an active distribution for the project and it
does *not* meet the `req` requirement, ``VersionConflict`` is raised. If there is no active distribution for the requested project, ``None`` is returned. """
dist = self.by_key.get(req.key)
if dist isNone:
canonical_key = self.normalized_to_canonical_keys.get(req.key)
if canonical_key isnotNone:
req.key = canonical_key
dist = self.by_key.get(canonical_key)
if dist isnotNoneand dist notin req: # XXX add more info raise VersionConflict(dist, req) return dist
If `name` isNone, yields all entry points in `group` from all
distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order). """ return (
entry for dist in self for entry in dist.get_entry_map(group).values() if name isNoneor name == entry.name
)
def run_script(self, requires: str, script_name: str) -> None: """Locate distribution for `requires` and run `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
self.require(requires)[0].run_script(script_name, ns)
def __iter__(self) -> Iterator[Distribution]: """Yield distributions for non-duplicate projects in the working set
The yield order is the order in which the items' path entries were
added to the working set. """
seen = set() for item in self.entries: if item notin self.entry_keys: # workaround a cache issue continue
for key in self.entry_keys[item]: if key notin seen:
seen.add(key) yield self.by_key[key]
def add(
self,
dist: Distribution,
entry: str | None = None,
insert: bool = True,
replace: bool = False,
) -> None: """Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
On exit from this routine, `entry` is added to the end of the working
set's ``.entries`` (if it wasn't already present).
`dist` is only added to the working set if it's for a project that
doesn't already have a distribution in the set, unless `replace=True`. If it's added, any callbacks registered with the ``subscribe()`` method
will be called. """ if insert:
dist.insert_on(self.entries, entry, replace=replace)
if entry isNone:
entry = dist.location
keys = self.entry_keys.setdefault(entry, [])
keys2 = self.entry_keys.setdefault(dist.location, []) ifnot replace and dist.key in self.by_key: # ignore hidden distros return
self.by_key[dist.key] = dist
normalized_name = packaging.utils.canonicalize_name(dist.key)
self.normalized_to_canonical_keys[normalized_name] = dist.key if dist.key notin keys:
keys.append(dist.key) if dist.key notin keys2:
keys2.append(dist.key)
self._added_new(dist)
`requirements` must be a sequence of ``Requirement`` objects. `env`, if supplied, should be an ``Environment`` instance. If not supplied, it defaults to all distributions available within any
entry or distribution in the working set. `installer`, if supplied,
will be invoked with each requirement that cannot be met by an
already-installed distribution; it should return a ``Distribution`` or
``None``.
Unless `replace_conflicting=True`, raises a VersionConflict exception if
any requirements are found on the path that have the correct name but
the wrong version. Otherwise, if an `installer` is supplied it will be
invoked to obtain the correct version of the requirement and activate
it.
`extras` is a list of the extras to be used with these requirements.
This is important because extra requirements may look like `my_req;
extra = "my_extra"`, which would otherwise be interpreted as a purely
optional requirement. Instead, we want to be able to assert that these
requirements are truly required. """
# set up the stack
requirements = list(requirements)[::-1] # set of processed requirements
processed = set() # key -> dist
best: dict[str, Distribution] = {}
to_activate: list[Distribution] = []
req_extras = _ReqExtras()
# Mapping of requirement to set of distributions that required it; # useful for reporting info about conflicts.
required_by: collections.defaultdict[Requirement, set[str]] = (
collections.defaultdict(set)
)
while requirements: # process dependencies breadth-first
req = requirements.pop(0) if req in processed: # Ignore cyclic or redundant dependencies continue
# push the new requirements onto the stack
new_requirements = dist.requires(req.extras)[::-1]
requirements.extend(new_requirements)
# Register the new requirements needed by req for new_requirement in new_requirements:
required_by[new_requirement].add(req.project_name)
req_extras[new_requirement] = req.extras
processed.add(req)
# return list of distros to activate return to_activate
def _resolve_dist(
self, req, best, replace_conflicting, env, installer, required_by, to_activate
) -> Distribution:
dist = best.get(req.key) if dist isNone: # Find the best distribution and add it to the map
dist = self.by_key.get(req.key) if dist isNoneor (dist notin req and replace_conflicting):
ws = self if env isNone: if dist isNone:
env = Environment(self.entries) else: # Use an empty environment and workingset to avoid # any further conflicts with the conflicting # distribution
env = Environment([])
ws = WorkingSet([])
dist = best[req.key] = env.best_match(
req, ws, installer, replace_conflicting=replace_conflicting
) if dist isNone:
requirers = required_by.get(req, None) raise DistributionNotFound(req, requirers)
to_activate.append(dist) if dist notin req: # Oops, the "best" so far conflicts with a dependency
dependent_req = required_by[req] raise VersionConflict(dist, req).with_context(dependent_req) return dist
distributions, errors = working_set.find_plugins(
Environment(plugin_dirlist)
) # add plugins+libs to sys.path
map(working_set.add, distributions) # display errors
print('Could not load', errors)
The `plugin_env` should be an ``Environment`` instance that contains
only distributions that are in the project's "plugin directory" or
directories. The `full_env`, if supplied, should be an ``Environment``
contains all currently-available distributions. If `full_env` isnot
supplied, one is created automatically from the ``WorkingSet`` this
method is called on, which will typically mean that every directory on
``sys.path`` will be scanned for distributions.
`installer` is a standard installer callback as used by the
``resolve()`` method. The `fallback` flag indicates whether we should
attempt to resolve older versions of a plugin if the newest version
cannot be resolved.
This method returns a 2-tuple: (`distributions`, `error_info`), where
`distributions` is a list of the distributions found in `plugin_env`
that were loadable, along with any other distributions that are needed
to resolve their dependencies. `error_info` is a dictionary mapping
unloadable plugin distributions to an exception instance describing the
error that occurred. Usually this will be a ``DistributionNotFound`` or
``VersionConflict`` instance. """
plugin_projects = list(plugin_env) # scan project names in alphabetic order
plugin_projects.sort()
except ResolutionError as v: # save error info
error_info[dist] = v if fallback: # try the next older version of project continue else: # give up on this project, keep going break
def require(self, *requirements: _NestedStr) -> list[Distribution]: """Ensure that distributions matching `requirements` are activated
`requirements` must be a string or a (possibly-nested) sequence
thereof, specifying the distributions and versions required. The return value is a sequence of the distributions that needed to be
activated to fulfill the requirements; all relevant distributions are
included, even if they were already activated in this working set. """
needed = self.resolve(parse_requirements(requirements))
for dist in needed:
self.add(dist)
return needed
def subscribe(
self, callback: Callable[[Distribution], object], existing: bool = True
) -> None: """Invoke `callback` for all distributions
If `existing=True` (default),
call on all existing ones, as well. """ if callback in self.callbacks: return
self.callbacks.append(callback) ifnot existing: return for dist in self:
callback(dist)
def _added_new(self, dist): for callback in self.callbacks:
callback(dist)
class _ReqExtras(Dict["Requirement", Tuple[str, ...]]): """
Map each requirement to the extras that demanded it. """
def markers_pass(self, req: Requirement, extras: tuple[str, ...] | None = None): """
Evaluate markers for req against each extra that
demanded it.
ReturnFalseif the req has a marker and fails
evaluation. Otherwise, returnTrue. """ returnnot req.marker or any(
req.marker.evaluate({'extra': extra}) for extra in self.get(req, ()) + (extras or ("",))
)
class Environment: """Searchable snapshot of distributions on a search path"""
Any distributions found on `search_path` are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. Ifnot
supplied, ``sys.path`` is used.
`platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an
optional string naming the desired version of Python (e.g. ``'3.6'``);
it defaults to the current version.
You may explicitly set `platform` (and/or `python`) to ``None`` if you
wish to map *all* distributions, not just those compatible with the
running platform or Python version. """
self._distmap: dict[str, list[Distribution]] = {}
self.platform = platform
self.python = python
self.scan(search_path)
def can_add(self, dist: Distribution) -> bool: """Is distribution `dist` acceptable for this environment?
The distribution must match the platform and python version
requirements specified when this environment was created, orFalse is returned. """
py_compat = (
self.python isNone or dist.py_version isNone or dist.py_version == self.python
) return py_compat and compatible_platforms(dist.platform, self.platform)
def remove(self, dist: Distribution) -> None: """Remove `dist` from the environment"""
self._distmap[dist.key].remove(dist)
def scan(self, search_path: Iterable[str] | None = None) -> None: """Scan `search_path` for distributions usable in this environment
Any distributions found are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. Ifnot
supplied, ``sys.path`` is used. Only distributions conforming to
the platform/python version defined at initialization are added. """ if search_path isNone:
search_path = sys.path
for item in search_path: for dist in find_distributions(item):
self.add(dist)
def __getitem__(self, project_name: str) -> list[Distribution]: """Return a newest-to-oldest list of distributions for `project_name`
Uses case-insensitive `project_name` comparison, assuming all the
project's distributions use their project's name converted to all
lowercase as their key.
def add(self, dist: Distribution) -> None: """Add `dist` if we ``can_add()`` it and it has not already been added""" if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key, []) if dist notin dists:
dists.append(dist)
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned. """ try:
dist = working_set.find(req) except VersionConflict: ifnot replace_conflicting: raise
dist = None if dist isnotNone: return dist for dist in self[req.key]: if dist in req: return dist # try to download/install return self.obtain(req, installer)
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
``installer(requirement)``, unless `installer` isNone, in which case Noneis returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
to the `installer` argument.""" return installer(requirement) if installer elseNone
def __iter__(self) -> Iterator[str]: """Yield the unique project names of the available distributions""" for key in self._distmap.keys(): if self[key]: yield key
def __iadd__(self, other: Distribution | Environment) -> Self: """In-place addition of a distribution or environment""" if isinstance(other, Distribution):
self.add(other) elif isinstance(other, Environment): for project in other: for dist in other[project]:
self.add(dist) else: raise TypeError("Can't add %r to environment" % (other,)) return self
def __add__(self, other: Distribution | Environment) -> Self: """Add an environment or distribution to an environment"""
new = self.__class__([], platform=None, python=None) for env in self, other:
new += env return new
def resource_listdir(
self, package_or_requirement: _PkgReqType, resource_name: str
) -> list[str]: """List the contents of the named resource directory""" return get_provider(package_or_requirement).resource_listdir(resource_name)
def extraction_error(self) -> NoReturn: """Give an error message for problems extracting file(s)"""
old_exc = sys.exc_info()[1]
cache_path = self.extraction_path or get_default_cache()
tmpl = textwrap.dedent( """
Can't extract file(s) to egg cache
The following error occurred while trying to extract file(s)
to the Python egg cache:
{old_exc}
The Python egg cache directory is currently set to:
{cache_path}
Perhaps your account does not have write access to this directory?
You can change the cache directory by setting the PYTHON_EGG_CACHE
environment variable to point to an accessible directory. """
).lstrip()
err = ExtractionError(tmpl.format(**locals()))
err.manager = self
err.cache_path = cache_path
err.original_error = old_exc raise err
def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()) -> str: """Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later. """
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name + '-tmp', *names) try:
_bypass_ensure_directory(target_path) except Exception:
self.extraction_error()
@staticmethod def _warn_unsafe_extraction_path(path): """ If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user if a known insecure location is used.
See Distribute #375 for more details. """ if os.name == 'nt'andnot path.startswith(os.environ['windir']): # On Windows, permissions are generally restrictive by default # and temp directories are not writable by other users, so # bypass the warning. return
mode = os.stat(path).st_mode if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = ( "Extraction path is writable by group/others " "and vulnerable to attack when " "used with get_resource_filename ({path}). " "Consider a more secure " "location (set with .set_extraction_path or the " "PYTHON_EGG_CACHE environment variable)."
).format(**locals())
warnings.warn(msg, UserWarning)
def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath) -> None: """Perform any platform-specific postprocessing of `tempname`
This is where Mac header rewrites should be done; other platforms don't
have anything special they should do.
Resource providers should call this method ONLY after successfully
extracting a compressed resource. They must NOT call it on resources
that are already in the filesystem.
`tempname` is the current (temporary) name of the file, and `filename` is the name it will be renamed to by the caller after this routine
returns. """
if os.name == 'posix': # Make the resource executable
mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
os.chmod(tempname, mode)
def set_extraction_path(self, path: str) -> None: """Set the base path where resources will be extracted to, if needed.
If you do not call this routine before any extractions take place, the
path defaults to the return value of ``get_default_cache()``. (Which is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
platform-specific fallbacks. See that routine's documentation for more
details.)
Resources are extracted to subdirectories of this path based upon
information given by the ``IResourceProvider``. You may set this to a
temporary directory, but then you must call ``cleanup_resources()`` to
delete the extracted files when done. There is no guarantee that
``cleanup_resources()`` will be able to remove all extracted files.
(Note: you may not change the extraction path for a given resource
manager once resources have been extracted, unless you first call
``cleanup_resources()``.) """ if self.cached_files: raise ValueError("Can't change extraction path, files already extracted")
self.extraction_path = path
def cleanup_resources(self, force: bool = False) -> list[str]: """
Delete all extracted resource files and directories, returning a list
of the file and directory names that could not be successfully removed.
This function does not have any concurrency protection, so it should
generally only be called when the extraction path is a temporary
directory exclusive to a single process. This method isnot
automatically called; you must call it explicitly or register it as an
``atexit`` function if you wish to ensure cleanup of a temporary
directory used for extractions. """ # XXX return []
def get_default_cache() -> str: """ Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app
named "Python-Eggs". """ return os.environ.get('PYTHON_EGG_CACHE') or _user_cache_dir(appname='Python-Eggs')
def safe_name(name: str) -> str: """Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'. """ return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version: str) -> str: """
Convert an arbitrary string to a standard version string """ try: # normalize the version return str(packaging.version.Version(version)) except packaging.version.InvalidVersion:
version = version.replace(' ', '.') return re.sub('[^A-Za-z0-9.]+', '-', version)
def _forgiving_version(version): """Fallback when ``safe_version`` is not safe enough
>>> parse_version(_forgiving_version('0.23ubuntu1'))
<Version('0.23.dev0+sanitized.ubuntu1')>
>>> parse_version(_forgiving_version('0.23-'))
<Version('0.23.dev0+sanitized')>
>>> parse_version(_forgiving_version('0.-_'))
<Version('0.dev0+sanitized')>
>>> parse_version(_forgiving_version('42.+?1'))
<Version('42.dev0+sanitized.1')>
>>> parse_version(_forgiving_version('hello world'))
<Version('0.dev0+sanitized.hello.world')> """
version = version.replace(' ', '.')
match = _PEP440_FALLBACK.search(version) if match:
safe = match["safe"]
rest = version[len(safe) :] else:
safe = "0"
rest = version
local = f"sanitized.{_safe_segment(rest)}".strip(".") return f"{safe}.dev0+{local}"
def _safe_segment(segment): """Convert an arbitrary string into a safe segment"""
segment = re.sub('[^A-Za-z0-9.]+', '-', segment)
segment = re.sub('-[^A-Za-z0-9]+', '-', segment) return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
def safe_extra(extra: str) -> str: """Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. """ return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
def to_filename(name: str) -> str: """Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with'_'. """ return name.replace('-', '_')
def invalid_marker(text: str) -> SyntaxError | Literal[False]: """
Validate text as a PEP 508 environment marker; return an exception if invalid orFalse otherwise. """ try:
evaluate_marker(text) except SyntaxError as e:
e.filename = None
e.lineno = None return e returnFalse
def evaluate_marker(text: str, extra: str | None = None) -> bool: """
Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid.
This implementation uses the 'pyparsing' module. """ try:
marker = packaging.markers.Marker(text) return marker.evaluate() except packaging.markers.InvalidMarker as e: raise SyntaxError(e) from e
class NullProvider: """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
def get_metadata(self, name: str) -> str: ifnot self.egg_info: return""
path = self._get_metadata_path(name)
value = self._get(path) try: return value.decode('utf-8') except UnicodeDecodeError as exc: # Include the path in the error message to simplify # troubleshooting, and without changing the exception type.
exc.reason += ' in {} file at path: {}'.format(name, path) raise
def _has(self, path) -> bool: raise NotImplementedError( "Can't perform this operation for unregistered loader type"
)
def _isdir(self, path) -> bool: raise NotImplementedError( "Can't perform this operation for unregistered loader type"
)
def _listdir(self, path) -> list[str]: raise NotImplementedError( "Can't perform this operation for unregistered loader type"
)
def _fn(self, base: str | None, resource_name: str): if base isNone: raise TypeError( "`base` parameter in `_fn` is `None`. Either override this method or check the parameter first."
)
self._validate_resource_path(resource_name) if resource_name: return os.path.join(base, *resource_name.split('/')) return base
Windows path separators are straight-up disallowed.
>>> vrp(r'\\foo/bar.txt')
Traceback (most recent call last):
...
ValueError: Use of .. or absolute path in a resource path \ isnot allowed.
>>> vrp(r'C:\\foo/bar.txt')
Traceback (most recent call last):
...
ValueError: Use of .. or absolute path in a resource path \ isnot allowed.
Blank values are allowed
>>> vrp('')
>>> bool(warned) False
Non-string values are not.
>>> vrp(None)
Traceback (most recent call last):
...
AttributeError: ... """
invalid = (
os.path.pardir in path.split(posixpath.sep) or posixpath.isabs(path) or ntpath.isabs(path) or path.startswith("\\")
) ifnot invalid: return
msg = "Use of .. or absolute path in a resource path is not allowed."
# Aggressively disallow Windows absolute paths if (path.startswith("\\") or ntpath.isabs(path)) andnot posixpath.isabs(path): raise ValueError(msg)
# for compatibility, warn; in future # raise ValueError(msg)
issue_warning(
msg[:-1] + " and will raise exceptions in a future release.",
DeprecationWarning,
)
def _get(self, path) -> bytes: if hasattr(self.loader, 'get_data') and self.loader: # Already checked get_data exists return self.loader.get_data(path) # type: ignore[attr-defined] raise NotImplementedError( "Can't perform this operation for loaders without 'get_data()'"
)
register_loader_type(object, NullProvider)
def _parents(path): """ yield all parents of path including path """
last = None while path != last: yield path
last = path
path, _ = os.path.split(path)
class EggProvider(NullProvider): """Provider based on a virtual filesystem"""
def _setup_prefix(self): # Assume that metadata may be nested inside a "basket" # of multiple eggs and use module_path instead of .archive.
eggs = filter(_is_egg_path, _parents(self.module_path))
egg = next(eggs, None)
egg and self._set_egg(egg)
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.