diff --git a/Lib/__hello__.py b/Lib/__hello__.py new file mode 100644 index 0000000000..c09d6a4f52 --- /dev/null +++ b/Lib/__hello__.py @@ -0,0 +1,16 @@ +initialized = True + +class TestFrozenUtf8_1: + """\u00b6""" + +class TestFrozenUtf8_2: + """\u03c0""" + +class TestFrozenUtf8_4: + """\U0001f600""" + +def main(): + print("Hello world!") + +if __name__ == '__main__': + main() diff --git a/Lib/__phello__/__init__.py b/Lib/__phello__/__init__.py new file mode 100644 index 0000000000..d37bd2766a --- /dev/null +++ b/Lib/__phello__/__init__.py @@ -0,0 +1,7 @@ +initialized = True + +def main(): + print("Hello world!") + +if __name__ == '__main__': + main() diff --git a/Lib/__phello__/ham/__init__.py b/Lib/__phello__/ham/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Lib/__phello__/ham/eggs.py b/Lib/__phello__/ham/eggs.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Lib/__phello__/spam.py b/Lib/__phello__/spam.py new file mode 100644 index 0000000000..d37bd2766a --- /dev/null +++ b/Lib/__phello__/spam.py @@ -0,0 +1,7 @@ +initialized = True + +def main(): + print("Hello world!") + +if __name__ == '__main__': + main() diff --git a/Lib/importlib/_adapters.py b/Lib/importlib/_adapters.py deleted file mode 100644 index e72edd1070..0000000000 --- a/Lib/importlib/_adapters.py +++ /dev/null @@ -1,83 +0,0 @@ -from contextlib import suppress - -from . import abc - - -class SpecLoaderAdapter: - """ - Adapt a package spec to adapt the underlying loader. - """ - - def __init__(self, spec, adapter=lambda spec: spec.loader): - self.spec = spec - self.loader = adapter(spec) - - def __getattr__(self, name): - return getattr(self.spec, name) - - -class TraversableResourcesLoader: - """ - Adapt a loader to provide TraversableResources. - """ - - def __init__(self, spec): - self.spec = spec - - def get_resource_reader(self, name): - return DegenerateFiles(self.spec)._native() - - -class DegenerateFiles: - """ - Adapter for an existing or non-existant resource reader - to provide a degenerate .files(). - """ - - class Path(abc.Traversable): - def iterdir(self): - return iter(()) - - def is_dir(self): - return False - - is_file = exists = is_dir # type: ignore - - def joinpath(self, other): - return DegenerateFiles.Path() - - @property - def name(self): - return '' - - def open(self, mode='rb', *args, **kwargs): - raise ValueError() - - def __init__(self, spec): - self.spec = spec - - @property - def _reader(self): - with suppress(AttributeError): - return self.spec.loader.get_resource_reader(self.spec.name) - - def _native(self): - """ - Return the native reader if it supports files(). - """ - reader = self._reader - return reader if hasattr(reader, 'files') else self - - def __getattr__(self, attr): - return getattr(self._reader, attr) - - def files(self): - return DegenerateFiles.Path() - - -def wrap_spec(package): - """ - Construct a package spec with traversable compatibility - on the spec/loader/reader. - """ - return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index a9500d6dc5..b1fdad8e6d 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -362,6 +362,7 @@ def __init__(self, name, loader, *, origin=None, loader_state=None, self.origin = origin self.loader_state = loader_state self.submodule_search_locations = [] if is_package else None + self._uninitialized_submodules = [] # file-location attributes self._set_fileattr = False @@ -421,7 +422,10 @@ def has_location(self, value): def spec_from_loader(name, loader, *, origin=None, is_package=None): """Return a module spec based on various loader methods.""" - if hasattr(loader, 'get_filename'): + if origin is None: + origin = getattr(loader, '_ORIGIN', None) + + if not origin and hasattr(loader, 'get_filename'): if _bootstrap_external is None: raise NotImplementedError spec_from_file_location = _bootstrap_external.spec_from_file_location @@ -467,12 +471,9 @@ def _spec_from_module(module, loader=None, origin=None): except AttributeError: location = None if origin is None: - if location is None: - try: - origin = loader._ORIGIN - except AttributeError: - origin = None - else: + if loader is not None: + origin = getattr(loader, '_ORIGIN', None) + if not origin and location is not None: origin = location try: cached = module.__cached__ @@ -484,7 +485,7 @@ def _spec_from_module(module, loader=None, origin=None): submodule_search_locations = None spec = ModuleSpec(name, loader, origin=origin) - spec._set_fileattr = False if location is None else True + spec._set_fileattr = False if location is None else (origin == location) spec.cached = cached spec.submodule_search_locations = submodule_search_locations return spec @@ -507,9 +508,9 @@ def _init_module_attrs(spec, module, *, override=False): if spec.submodule_search_locations is not None: if _bootstrap_external is None: raise NotImplementedError - _NamespaceLoader = _bootstrap_external._NamespaceLoader + NamespaceLoader = _bootstrap_external.NamespaceLoader - loader = _NamespaceLoader.__new__(_NamespaceLoader) + loader = NamespaceLoader.__new__(NamespaceLoader) loader._path = spec.submodule_search_locations spec.loader = loader # While the docs say that module.__file__ is not set for @@ -541,6 +542,7 @@ def _init_module_attrs(spec, module, *, override=False): # __path__ if override or getattr(module, '__path__', None) is None: if spec.submodule_search_locations is not None: + # XXX We should extend __path__ if it's already a list. try: module.__path__ = spec.submodule_search_locations except AttributeError: @@ -825,11 +827,128 @@ def module_repr(m): return ''.format(m.__name__, FrozenImporter._ORIGIN) @classmethod - def find_spec(cls, fullname, path=None, target=None): - if _imp.is_frozen(fullname): - return spec_from_loader(fullname, cls, origin=cls._ORIGIN) + def _fix_up_module(cls, module): + spec = module.__spec__ + state = spec.loader_state + if state is None: + # The module is missing FrozenImporter-specific values. + + # Fix up the spec attrs. + origname = vars(module).pop('__origname__', None) + assert origname, 'see PyImport_ImportFrozenModuleObject()' + ispkg = hasattr(module, '__path__') + assert _imp.is_frozen_package(module.__name__) == ispkg, ispkg + filename, pkgdir = cls._resolve_filename(origname, spec.name, ispkg) + spec.loader_state = type(sys.implementation)( + filename=filename, + origname=origname, + ) + __path__ = spec.submodule_search_locations + if ispkg: + assert __path__ == [], __path__ + if pkgdir: + spec.submodule_search_locations.insert(0, pkgdir) + else: + assert __path__ is None, __path__ + + # Fix up the module attrs (the bare minimum). + assert not hasattr(module, '__file__'), module.__file__ + if filename: + try: + module.__file__ = filename + except AttributeError: + pass + if ispkg: + if module.__path__ != __path__: + assert module.__path__ == [], module.__path__ + module.__path__.extend(__path__) else: + # These checks ensure that _fix_up_module() is only called + # in the right places. + __path__ = spec.submodule_search_locations + ispkg = __path__ is not None + # Check the loader state. + assert sorted(vars(state)) == ['filename', 'origname'], state + if state.origname: + # The only frozen modules with "origname" set are stdlib modules. + (__file__, pkgdir, + ) = cls._resolve_filename(state.origname, spec.name, ispkg) + assert state.filename == __file__, (state.filename, __file__) + if pkgdir: + assert __path__ == [pkgdir], (__path__, pkgdir) + else: + assert __path__ == ([] if ispkg else None), __path__ + else: + __file__ = None + assert state.filename is None, state.filename + assert __path__ == ([] if ispkg else None), __path__ + # Check the file attrs. + if __file__: + assert hasattr(module, '__file__') + assert module.__file__ == __file__, (module.__file__, __file__) + else: + assert not hasattr(module, '__file__'), module.__file__ + if ispkg: + assert hasattr(module, '__path__') + assert module.__path__ == __path__, (module.__path__, __path__) + else: + assert not hasattr(module, '__path__'), module.__path__ + assert not spec.has_location + + @classmethod + def _resolve_filename(cls, fullname, alias=None, ispkg=False): + if not fullname or not getattr(sys, '_stdlib_dir', None): + return None, None + try: + sep = cls._SEP + except AttributeError: + sep = cls._SEP = '\\' if sys.platform == 'win32' else '/' + + if fullname != alias: + if fullname.startswith('<'): + fullname = fullname[1:] + if not ispkg: + fullname = f'{fullname}.__init__' + else: + ispkg = False + relfile = fullname.replace('.', sep) + if ispkg: + pkgdir = f'{sys._stdlib_dir}{sep}{relfile}' + filename = f'{pkgdir}{sep}__init__.py' + else: + pkgdir = None + filename = f'{sys._stdlib_dir}{sep}{relfile}.py' + return filename, pkgdir + + @classmethod + def find_spec(cls, fullname, path=None, target=None): + info = _call_with_frames_removed(_imp.find_frozen, fullname) + if info is None: return None + # We get the marshaled data in exec_module() (the loader + # part of the importer), instead of here (the finder part). + # The loader is the usual place to get the data that will + # be loaded into the module. (For example, see _LoaderBasics + # in _bootstra_external.py.) Most importantly, this importer + # is simpler if we wait to get the data. + # However, getting as much data in the finder as possible + # to later load the module is okay, and sometimes important. + # (That's why ModuleSpec.loader_state exists.) This is + # especially true if it avoids throwing away expensive data + # the loader would otherwise duplicate later and can be done + # efficiently. In this case it isn't worth it. + _, ispkg, origname = info + spec = spec_from_loader(fullname, cls, + origin=cls._ORIGIN, + is_package=ispkg) + filename, pkgdir = cls._resolve_filename(origname, fullname, ispkg) + spec.loader_state = type(sys.implementation)( + filename=filename, + origname=origname, + ) + if pkgdir: + spec.submodule_search_locations.insert(0, pkgdir) + return spec @classmethod def find_module(cls, fullname, path=None): @@ -845,14 +964,21 @@ def find_module(cls, fullname, path=None): @staticmethod def create_module(spec): - """Use default semantics for module creation.""" + """Set __file__, if able.""" + module = _new_module(spec.name) + try: + filename = spec.loader_state.filename + except AttributeError: + pass + else: + if filename: + module.__file__ = filename + return module @staticmethod def exec_module(module): - name = module.__spec__.name - if not _imp.is_frozen(name): - raise ImportError('{!r} is not a frozen module'.format(name), - name=name) + spec = module.__spec__ + name = spec.name code = _call_with_frames_removed(_imp.get_frozen_object, name) exec(code, module.__dict__) @@ -864,7 +990,16 @@ def load_module(cls, fullname): """ # Warning about deprecation implemented in _load_module_shim(). - return _load_module_shim(cls, fullname) + module = _load_module_shim(cls, fullname) + info = _imp.find_frozen(fullname) + assert info is not None + _, ispkg, origname = info + module.__origname__ = origname + vars(module).pop('__file__', None) + if ispkg: + module.__path__ = [] + cls._fix_up_module(module) + return module @classmethod @_requires_frozen @@ -988,6 +1123,7 @@ def _sanity_check(name, package, level): def _find_and_load_unlocked(name, import_): path = None parent = name.rpartition('.')[0] + parent_spec = None if parent: if parent not in sys.modules: _call_with_frames_removed(import_, parent) @@ -1000,15 +1136,24 @@ def _find_and_load_unlocked(name, import_): except AttributeError: msg = (_ERR_MSG + '; {!r} is not a package').format(name, parent) raise ModuleNotFoundError(msg, name=name) from None + parent_spec = parent_module.__spec__ + child = name.rpartition('.')[2] spec = _find_spec(name, path) if spec is None: raise ModuleNotFoundError(_ERR_MSG.format(name), name=name) else: - module = _load_unlocked(spec) + if parent_spec: + # Temporarily add child we are currently importing to parent's + # _uninitialized_submodules for circular import tracking. + parent_spec._uninitialized_submodules.append(child) + try: + module = _load_unlocked(spec) + finally: + if parent_spec: + parent_spec._uninitialized_submodules.pop() if parent: # Set the module as an attribute on its parent. parent_module = sys.modules[parent] - child = name.rpartition('.')[2] try: setattr(parent_module, child, module) except AttributeError: @@ -1022,17 +1167,28 @@ def _find_and_load_unlocked(name, import_): def _find_and_load(name, import_): """Find and load the module.""" - with _ModuleLockManager(name): - module = sys.modules.get(name, _NEEDS_LOADING) - if module is _NEEDS_LOADING: - return _find_and_load_unlocked(name, import_) + + # Optimization: we avoid unneeded module locking if the module + # already exists in sys.modules and is fully initialized. + module = sys.modules.get(name, _NEEDS_LOADING) + if (module is _NEEDS_LOADING or + getattr(getattr(module, "__spec__", None), "_initializing", False)): + with _ModuleLockManager(name): + module = sys.modules.get(name, _NEEDS_LOADING) + if module is _NEEDS_LOADING: + return _find_and_load_unlocked(name, import_) + + # Optimization: only call _bootstrap._lock_unlock_module() if + # module.__spec__._initializing is True. + # NOTE: because of this, initializing must be set *before* + # putting the new module in sys.modules. + _lock_unlock_module(name) if module is None: message = ('import of {} halted; ' 'None in sys.modules'.format(name)) raise ModuleNotFoundError(message, name=name) - _lock_unlock_module(name) return module @@ -1183,6 +1339,8 @@ def _setup(sys_module, _imp_module): continue spec = _spec_from_module(module, loader) _init_module_attrs(spec, module) + if loader is FrozenImporter: + loader._fix_up_module(module) # Directly load built-in modules needed during bootstrap. self_module = sys.modules[__name__] diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 49bcaea78d..f603a89f7f 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -352,16 +352,73 @@ def _write_atomic(path, data, mode=0o666): # Python 3.10b1 3437 (Undo making 'annotations' future by default - We like to dance among core devs!) # Python 3.10b1 3438 Safer line number table handling. # Python 3.10b1 3439 (Add ROT_N) +# Python 3.11a1 3450 Use exception table for unwinding ("zero cost" exception handling) +# Python 3.11a1 3451 (Add CALL_METHOD_KW) +# Python 3.11a1 3452 (drop nlocals from marshaled code objects) +# Python 3.11a1 3453 (add co_fastlocalnames and co_fastlocalkinds) +# Python 3.11a1 3454 (compute cell offsets relative to locals bpo-43693) +# Python 3.11a1 3455 (add MAKE_CELL bpo-43693) +# Python 3.11a1 3456 (interleave cell args bpo-43693) +# Python 3.11a1 3457 (Change localsplus to a bytes object bpo-43693) +# Python 3.11a1 3458 (imported objects now don't use LOAD_METHOD/CALL_METHOD) +# Python 3.11a1 3459 (PEP 657: add end line numbers and column offsets for instructions) +# Python 3.11a1 3460 (Add co_qualname field to PyCodeObject bpo-44530) +# Python 3.11a1 3461 (JUMP_ABSOLUTE must jump backwards) +# Python 3.11a2 3462 (bpo-44511: remove COPY_DICT_WITHOUT_KEYS, change +# MATCH_CLASS and MATCH_KEYS, and add COPY) +# Python 3.11a3 3463 (bpo-45711: JUMP_IF_NOT_EXC_MATCH no longer pops the +# active exception) +# Python 3.11a3 3464 (bpo-45636: Merge numeric BINARY_*/INPLACE_* into +# BINARY_OP) +# Python 3.11a3 3465 (Add COPY_FREE_VARS opcode) +# Python 3.11a4 3466 (bpo-45292: PEP-654 except*) +# Python 3.11a4 3467 (Change CALL_xxx opcodes) +# Python 3.11a4 3468 (Add SEND opcode) +# Python 3.11a4 3469 (bpo-45711: remove type, traceback from exc_info) +# Python 3.11a4 3470 (bpo-46221: PREP_RERAISE_STAR no longer pushes lasti) +# Python 3.11a4 3471 (bpo-46202: remove pop POP_EXCEPT_AND_RERAISE) +# Python 3.11a4 3472 (bpo-46009: replace GEN_START with POP_TOP) +# Python 3.11a4 3473 (Add POP_JUMP_IF_NOT_NONE/POP_JUMP_IF_NONE opcodes) +# Python 3.11a4 3474 (Add RESUME opcode) +# Python 3.11a5 3475 (Add RETURN_GENERATOR opcode) +# Python 3.11a5 3476 (Add ASYNC_GEN_WRAP opcode) +# Python 3.11a5 3477 (Replace DUP_TOP/DUP_TOP_TWO with COPY and +# ROT_TWO/ROT_THREE/ROT_FOUR/ROT_N with SWAP) +# Python 3.11a5 3478 (New CALL opcodes) +# Python 3.11a5 3479 (Add PUSH_NULL opcode) +# Python 3.11a5 3480 (New CALL opcodes, second iteration) +# Python 3.11a5 3481 (Use inline cache for BINARY_OP) +# Python 3.11a5 3482 (Use inline caching for UNPACK_SEQUENCE and LOAD_GLOBAL) +# Python 3.11a5 3483 (Use inline caching for COMPARE_OP and BINARY_SUBSCR) +# Python 3.11a5 3484 (Use inline caching for LOAD_ATTR, LOAD_METHOD, and +# STORE_ATTR) +# Python 3.11a5 3485 (Add an oparg to GET_AWAITABLE) +# Python 3.11a6 3486 (Use inline caching for PRECALL and CALL) +# Python 3.11a6 3487 (Remove the adaptive "oparg counter" mechanism) +# Python 3.11a6 3488 (LOAD_GLOBAL can push additional NULL) +# Python 3.11a6 3489 (Add JUMP_BACKWARD, remove JUMP_ABSOLUTE) +# Python 3.11a6 3490 (remove JUMP_IF_NOT_EXC_MATCH, add CHECK_EXC_MATCH) +# Python 3.11a6 3491 (remove JUMP_IF_NOT_EG_MATCH, add CHECK_EG_MATCH, +# add JUMP_BACKWARD_NO_INTERRUPT, make JUMP_NO_INTERRUPT virtual) +# Python 3.11a7 3492 (make POP_JUMP_IF_NONE/NOT_NONE/TRUE/FALSE relative) +# Python 3.11a7 3493 (Make JUMP_IF_TRUE_OR_POP/JUMP_IF_FALSE_OR_POP relative) +# Python 3.11a7 3494 (New location info table) +# Python 3.11b4 3495 (Set line number of module's RESUME instr to 0 per PEP 626) +# Python 3.12 will start with magic number 3500 + # # MAGIC must change whenever the bytecode emitted by the compiler may no # longer be understood by older implementations of the eval loop (usually # due to the addition of new opcodes). # +# Starting with Python 3.11, Python 3.n starts with magic number 2900+50n. +# # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3439).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3495).to_bytes(2, 'little') + b'\r\n' + _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c _PYCACHE = '__pycache__' @@ -1172,7 +1229,7 @@ def __hash__(self): return hash(self.name) ^ hash(self.path) def create_module(self, spec): - """Create an unitialized extension module""" + """Create an uninitialized extension module""" module = _bootstrap._call_with_frames_removed( _imp.create_dynamic, spec) _bootstrap._verbose_message('extension module {!r} loaded from {!r}', @@ -1273,8 +1330,10 @@ def append(self, item): self._path.append(item) -# We use this exclusively in module_from_spec() for backward-compatibility. -class _NamespaceLoader: +# This class is actually exposed publicly in a namespace package's __loader__ +# attribute, so it should be available through a non-private name. +# https://bugs.python.org/issue35673 +class NamespaceLoader: def __init__(self, name, path, path_finder): self._path = _NamespacePath(name, path, path_finder) @@ -1285,7 +1344,7 @@ def module_repr(module): The method is deprecated. The import machinery does the job itself. """ - _warnings.warn("_NamespaceLoader.module_repr() is deprecated and " + _warnings.warn("NamespaceLoader.module_repr() is deprecated and " "slated for removal in Python 3.12", DeprecationWarning) return ''.format(module.__name__) @@ -1321,6 +1380,10 @@ def get_resource_reader(self, module): return NamespaceReader(self._path) +# We use this exclusively in module_from_spec() for backward-compatibility. +_NamespaceLoader = NamespaceLoader + + # Finders ##################################################################### class PathFinder: @@ -1332,7 +1395,9 @@ def invalidate_caches(): """Call the invalidate_caches() method on all path entry finders stored in sys.path_importer_caches (where implemented).""" for name, finder in list(sys.path_importer_cache.items()): - if finder is None: + # Drop entry if finder name is a relative path. The current + # working directory may have changed. + if finder is None or not _path_isabs(name): del sys.path_importer_cache[name] elif hasattr(finder, 'invalidate_caches'): finder.invalidate_caches() @@ -1403,7 +1468,7 @@ def _get_spec(cls, fullname, path, target=None): # the list of paths that will become its __path__ namespace_path = [] for entry in path: - if not isinstance(entry, (str, bytes)): + if not isinstance(entry, str): continue finder = cls._path_importer_cache(entry) if finder is not None: @@ -1500,9 +1565,12 @@ def __init__(self, path, *loader_details): loaders.extend((suffix, loader) for suffix in suffixes) self._loaders = loaders # Base (directory) path - self.path = path or '.' - if not _path_isabs(self.path): - self.path = _path_join(_os.getcwd(), self.path) + if not path or path == '.': + self.path = _os.getcwd() + elif not _path_isabs(path): + self.path = _path_join(_os.getcwd(), path) + else: + self.path = path self._path_mtime = -1 self._path_cache = set() self._relaxed_path_cache = set() diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index 0b4a3f8071..3fa151f390 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -14,8 +14,19 @@ from ._abc import Loader import abc import warnings -from typing import BinaryIO, Iterable, Text -from typing import Protocol, runtime_checkable + +# for compatibility with Python 3.10 +from .resources.abc import ResourceReader, Traversable, TraversableResources + + +__all__ = [ + 'Loader', 'Finder', 'MetaPathFinder', 'PathEntryFinder', + 'ResourceLoader', 'InspectLoader', 'ExecutionLoader', + 'FileLoader', 'SourceLoader', + + # for compatibility with Python 3.10 + 'ResourceReader', 'Traversable', 'TraversableResources', +] def _register(abstract_cls, *classes): @@ -213,7 +224,7 @@ def source_to_code(data, path=''): exec_module = _bootstrap_external._LoaderBasics.exec_module load_module = _bootstrap_external._LoaderBasics.load_module -_register(InspectLoader, machinery.BuiltinImporter, machinery.FrozenImporter) +_register(InspectLoader, machinery.BuiltinImporter, machinery.FrozenImporter, machinery.NamespaceLoader) class ExecutionLoader(InspectLoader): @@ -307,136 +318,3 @@ def set_data(self, path, data): """ _register(SourceLoader, machinery.SourceFileLoader) - - -class ResourceReader(metaclass=abc.ABCMeta): - """Abstract base class for loaders to provide resource reading support.""" - - @abc.abstractmethod - def open_resource(self, resource: Text) -> BinaryIO: - """Return an opened, file-like object for binary reading. - - The 'resource' argument is expected to represent only a file name. - If the resource cannot be found, FileNotFoundError is raised. - """ - # This deliberately raises FileNotFoundError instead of - # NotImplementedError so that if this method is accidentally called, - # it'll still do the right thing. - raise FileNotFoundError - - @abc.abstractmethod - def resource_path(self, resource: Text) -> Text: - """Return the file system path to the specified resource. - - The 'resource' argument is expected to represent only a file name. - If the resource does not exist on the file system, raise - FileNotFoundError. - """ - # This deliberately raises FileNotFoundError instead of - # NotImplementedError so that if this method is accidentally called, - # it'll still do the right thing. - raise FileNotFoundError - - @abc.abstractmethod - def is_resource(self, path: Text) -> bool: - """Return True if the named 'path' is a resource. - - Files are resources, directories are not. - """ - raise FileNotFoundError - - @abc.abstractmethod - def contents(self) -> Iterable[str]: - """Return an iterable of entries in `package`.""" - raise FileNotFoundError - - -@runtime_checkable -class Traversable(Protocol): - """ - An object with a subset of pathlib.Path methods suitable for - traversing directories and opening files. - """ - - @abc.abstractmethod - def iterdir(self): - """ - Yield Traversable objects in self - """ - - def read_bytes(self): - """ - Read contents of self as bytes - """ - with self.open('rb') as strm: - return strm.read() - - def read_text(self, encoding=None): - """ - Read contents of self as text - """ - with self.open(encoding=encoding) as strm: - return strm.read() - - @abc.abstractmethod - def is_dir(self) -> bool: - """ - Return True if self is a dir - """ - - @abc.abstractmethod - def is_file(self) -> bool: - """ - Return True if self is a file - """ - - @abc.abstractmethod - def joinpath(self, child): - """ - Return Traversable child in self - """ - - def __truediv__(self, child): - """ - Return Traversable child in self - """ - return self.joinpath(child) - - @abc.abstractmethod - def open(self, mode='r', *args, **kwargs): - """ - mode may be 'r' or 'rb' to open as text or binary. Return a handle - suitable for reading (same as pathlib.Path.open). - - When opening as text, accepts encoding parameters such as those - accepted by io.TextIOWrapper. - """ - - @abc.abstractproperty - def name(self) -> str: - """ - The base name of this object without any parent references. - """ - - -class TraversableResources(ResourceReader): - """ - The required interface for providing traversable - resources. - """ - - @abc.abstractmethod - def files(self): - """Return a Traversable object for the loaded package.""" - - def open_resource(self, resource): - return self.files().joinpath(resource).open('rb') - - def resource_path(self, resource): - raise FileNotFoundError(resource) - - def is_resource(self, path): - return self.files().joinpath(path).is_file() - - def contents(self): - return (item.name for item in self.files().iterdir()) diff --git a/Lib/importlib/machinery.py b/Lib/importlib/machinery.py index 9a7757fb6e..d9a19a13f7 100644 --- a/Lib/importlib/machinery.py +++ b/Lib/importlib/machinery.py @@ -12,6 +12,7 @@ from ._bootstrap_external import SourceFileLoader from ._bootstrap_external import SourcelessFileLoader from ._bootstrap_external import ExtensionFileLoader +from ._bootstrap_external import NamespaceLoader def all_suffixes(): diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py index 7181ed8757..68828269fc 100644 --- a/Lib/importlib/metadata/__init__.py +++ b/Lib/importlib/metadata/__init__.py @@ -15,10 +15,9 @@ import collections from . import _adapters, _meta -from ._meta import PackageMetadata from ._collections import FreezableDefaultDict, Pair -from ._functools import method_cache -from ._itertools import unique_everseen +from ._functools import method_cache, pass_none +from ._itertools import always_iterable, unique_everseen from ._meta import PackageMetadata, SimplePath from contextlib import suppress @@ -127,8 +126,33 @@ def valid(line): return line and not line.startswith('#') -class EntryPoint( - collections.namedtuple('EntryPointBase', 'name value group')): +class DeprecatedTuple: + """ + Provide subscript item access for backward compatibility. + + >>> recwarn = getfixture('recwarn') + >>> ep = EntryPoint(name='name', value='value', group='group') + >>> ep[:] + ('name', 'value', 'group') + >>> ep[0] + 'name' + >>> len(recwarn) + 1 + """ + + _warn = functools.partial( + warnings.warn, + "EntryPoint tuple interface is deprecated. Access members by name.", + DeprecationWarning, + stacklevel=2, + ) + + def __getitem__(self, item): + self._warn() + return self._key()[item] + + +class EntryPoint(DeprecatedTuple): """An entry point as defined by Python packaging conventions. See `the packaging docs on entry points @@ -166,8 +190,15 @@ class EntryPoint( following the attr, and following any extras. """ + name: str + value: str + group: str + dist: Optional['Distribution'] = None + def __init__(self, name, value, group): + vars(self).update(name=name, value=value, group=group) + def load(self): """Load the entry point from its definition. If only a module is indicated by the value, return that module. Otherwise, @@ -194,7 +225,7 @@ def extras(self): return re.findall(r'\w+', match.group('extras') or '') def _for(self, dist): - self.dist = dist + vars(self).update(dist=dist) return self def __iter__(self): @@ -208,12 +239,6 @@ def __iter__(self): warnings.warn(msg, DeprecationWarning) return iter((self.name, self)) - def __reduce__(self): - return ( - self.__class__, - (self.name, self.value, self.group), - ) - def matches(self, **params): """ EntryPoint matches the given parameters. @@ -237,6 +262,27 @@ def matches(self, **params): attrs = (getattr(self, param) for param in params) return all(map(operator.eq, params.values(), attrs)) + def _key(self): + return self.name, self.value, self.group + + def __lt__(self, other): + return self._key() < other._key() + + def __eq__(self, other): + return self._key() == other._key() + + def __setattr__(self, name, value): + raise AttributeError("EntryPoint objects are immutable.") + + def __repr__(self): + return ( + f'EntryPoint(name={self.name!r}, value={self.value!r}, ' + f'group={self.group!r})' + ) + + def __hash__(self): + return hash(self._key()) + class DeprecatedList(list): """ @@ -279,37 +325,20 @@ class DeprecatedList(list): stacklevel=2, ) - def __setitem__(self, *args, **kwargs): - self._warn() - return super().__setitem__(*args, **kwargs) - - def __delitem__(self, *args, **kwargs): - self._warn() - return super().__delitem__(*args, **kwargs) - - def append(self, *args, **kwargs): - self._warn() - return super().append(*args, **kwargs) - - def reverse(self, *args, **kwargs): - self._warn() - return super().reverse(*args, **kwargs) - - def extend(self, *args, **kwargs): - self._warn() - return super().extend(*args, **kwargs) - - def pop(self, *args, **kwargs): - self._warn() - return super().pop(*args, **kwargs) + def _wrap_deprecated_method(method_name: str): # type: ignore + def wrapped(self, *args, **kwargs): + self._warn() + return getattr(super(), method_name)(*args, **kwargs) - def remove(self, *args, **kwargs): - self._warn() - return super().remove(*args, **kwargs) + return method_name, wrapped - def __iadd__(self, *args, **kwargs): - self._warn() - return super().__iadd__(*args, **kwargs) + locals().update( + map( + _wrap_deprecated_method, + '__setitem__ __delitem__ append reverse extend pop remove ' + '__iadd__ insert sort'.split(), + ) + ) def __add__(self, other): if not isinstance(other, tuple): @@ -317,14 +346,6 @@ def __add__(self, other): other = tuple(other) return self.__class__(tuple(self) + other) - def insert(self, *args, **kwargs): - self._warn() - return super().insert(*args, **kwargs) - - def sort(self, *args, **kwargs): - self._warn() - return super().sort(*args, **kwargs) - def __eq__(self, other): if not isinstance(other, tuple): self._warn() @@ -369,7 +390,7 @@ def names(self): """ Return the set of all names of all entry points. """ - return set(ep.name for ep in self) + return {ep.name for ep in self} @property def groups(self): @@ -380,21 +401,17 @@ def groups(self): >>> EntryPoints().groups set() """ - return set(ep.group for ep in self) + return {ep.group for ep in self} @classmethod def _from_text_for(cls, text, dist): return cls(ep._for(dist) for ep in cls._from_text(text)) - @classmethod - def _from_text(cls, text): - return itertools.starmap(EntryPoint, cls._parse_groups(text or '')) - @staticmethod - def _parse_groups(text): + def _from_text(text): return ( - (item.value.name, item.value.value, item.name) - for item in Sectioned.section_pairs(text) + EntryPoint(name=item.value.name, value=item.value.value, group=item.name) + for item in Sectioned.section_pairs(text or '') ) @@ -536,7 +553,7 @@ def locate_file(self, path): """ @classmethod - def from_name(cls, name): + def from_name(cls, name: str): """Return the Distribution for the given package name. :param name: The name of the distribution package to search for. @@ -544,13 +561,13 @@ def from_name(cls, name): package, if found. :raises PackageNotFoundError: When the named package's distribution metadata cannot be found. + :raises ValueError: When an invalid value is supplied for name. """ - for resolver in cls._discover_resolvers(): - dists = resolver(DistributionFinder.Context(name=name)) - dist = next(iter(dists), None) - if dist is not None: - return dist - else: + if not name: + raise ValueError("A distribution name is required.") + try: + return next(cls.discover(name=name)) + except StopIteration: raise PackageNotFoundError(name) @classmethod @@ -588,18 +605,6 @@ def _discover_resolvers(): ) return filter(None, declared) - @classmethod - def _local(cls, root='.'): - from pep517 import build, meta - - system = build.compat_system(root) - builder = functools.partial( - meta.build, - source_dir=root, - system=system, - ) - return PathDistribution(zipfile.Path(meta.build_as_zip(builder))) - @property def metadata(self) -> _meta.PackageMetadata: """Return the parsed metadata for this Distribution. @@ -647,7 +652,6 @@ def files(self): missing. Result may be empty if the metadata exists but is empty. """ - file_lines = self._read_files_distinfo() or self._read_files_egginfo() def make_file(name, hash=None, size_str=None): result = PackagePath(name) @@ -656,7 +660,11 @@ def make_file(name, hash=None, size_str=None): result.dist = self return result - return file_lines and list(starmap(make_file, csv.reader(file_lines))) + @pass_none + def make_files(lines): + return list(starmap(make_file, csv.reader(lines))) + + return make_files(self._read_files_distinfo() or self._read_files_egginfo()) def _read_files_distinfo(self): """ @@ -684,7 +692,7 @@ def _read_dist_info_reqs(self): def _read_egg_info_reqs(self): source = self.read_text('requires.txt') - return None if source is None else self._deps_from_requires_text(source) + return pass_none(self._deps_from_requires_text)(source) @classmethod def _deps_from_requires_text(cls, source): @@ -778,6 +786,9 @@ class FastPath: """ Micro-optimized class for searching a path for children. + + >>> FastPath('').children() + ['...'] """ @functools.lru_cache() # type: ignore @@ -944,13 +955,26 @@ def _normalized_name(self): normalized name from the file system path. """ stem = os.path.basename(str(self._path)) - return self._name_from_stem(stem) or super()._normalized_name + return ( + pass_none(Prepared.normalize)(self._name_from_stem(stem)) + or super()._normalized_name + ) - def _name_from_stem(self, stem): - name, ext = os.path.splitext(stem) + @staticmethod + def _name_from_stem(stem): + """ + >>> PathDistribution._name_from_stem('foo-3.0.egg-info') + 'foo' + >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info') + 'CherryPy' + >>> PathDistribution._name_from_stem('face.egg-info') + 'face' + >>> PathDistribution._name_from_stem('foo.bar') + """ + filename, ext = os.path.splitext(stem) if ext not in ('.dist-info', '.egg-info'): return - name, sep, rest = stem.partition('-') + name, sep, rest = filename.partition('-') return name @@ -990,6 +1014,15 @@ def version(distribution_name): return distribution(distribution_name).version +_unique = functools.partial( + unique_everseen, + key=operator.attrgetter('_normalized_name'), +) +""" +Wrapper for ``distributions`` to return unique distributions by name. +""" + + def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: """Return EntryPoint objects for all installed packages. @@ -1007,10 +1040,8 @@ def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: :return: EntryPoints or SelectableGroups for all installed packages. """ - norm_name = operator.attrgetter('_normalized_name') - unique = functools.partial(unique_everseen, key=norm_name) eps = itertools.chain.from_iterable( - dist.entry_points for dist in unique(distributions()) + dist.entry_points for dist in _unique(distributions()) ) return SelectableGroups.load(eps).select(**params) @@ -1046,6 +1077,18 @@ def packages_distributions() -> Mapping[str, List[str]]: """ pkg_to_dist = collections.defaultdict(list) for dist in distributions(): - for pkg in (dist.read_text('top_level.txt') or '').split(): + for pkg in _top_level_declared(dist) or _top_level_inferred(dist): pkg_to_dist[pkg].append(dist.metadata['Name']) return dict(pkg_to_dist) + + +def _top_level_declared(dist): + return (dist.read_text('top_level.txt') or '').split() + + +def _top_level_inferred(dist): + return { + f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name + for f in always_iterable(dist.files) + if f.suffix == ".py" + } diff --git a/Lib/importlib/metadata/_functools.py b/Lib/importlib/metadata/_functools.py index 73f50d00bc..71f66bd03c 100644 --- a/Lib/importlib/metadata/_functools.py +++ b/Lib/importlib/metadata/_functools.py @@ -83,3 +83,22 @@ def wrapper(self, *args, **kwargs): wrapper.cache_clear = lambda: None return wrapper + + +# From jaraco.functools 3.3 +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper diff --git a/Lib/importlib/metadata/_itertools.py b/Lib/importlib/metadata/_itertools.py index dd45f2f096..d4ca9b9140 100644 --- a/Lib/importlib/metadata/_itertools.py +++ b/Lib/importlib/metadata/_itertools.py @@ -17,3 +17,57 @@ def unique_everseen(iterable, key=None): if k not in seen: seen_add(k) yield element + + +# copied from more_itertools 8.8 +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) diff --git a/Lib/importlib/metadata/_meta.py b/Lib/importlib/metadata/_meta.py index 1a6edbf957..d5c0576194 100644 --- a/Lib/importlib/metadata/_meta.py +++ b/Lib/importlib/metadata/_meta.py @@ -37,7 +37,7 @@ class SimplePath(Protocol): def joinpath(self) -> 'SimplePath': ... # pragma: no cover - def __div__(self) -> 'SimplePath': + def __truediv__(self) -> 'SimplePath': ... # pragma: no cover def parent(self) -> 'SimplePath': diff --git a/Lib/importlib/metadata/_text.py b/Lib/importlib/metadata/_text.py index 766979d93c..c88cfbb234 100644 --- a/Lib/importlib/metadata/_text.py +++ b/Lib/importlib/metadata/_text.py @@ -80,7 +80,7 @@ def __hash__(self): return hash(self.lower()) def __contains__(self, other): - return super(FoldedCase, self).lower().__contains__(other.lower()) + return super().lower().__contains__(other.lower()) def in_(self, other): "Does self appear in other?" @@ -89,7 +89,7 @@ def in_(self, other): # cache lower since it's likely to be called frequently. @method_cache def lower(self): - return super(FoldedCase, self).lower() + return super().lower() def index(self, sub): return self.lower().index(sub.lower()) diff --git a/Lib/importlib/readers.py b/Lib/importlib/readers.py index 41089c071d..df7fb92e5c 100644 --- a/Lib/importlib/readers.py +++ b/Lib/importlib/readers.py @@ -1,123 +1,12 @@ -import collections -import zipfile -import pathlib -from . import abc +""" +Compatibility shim for .resources.readers as found on Python 3.10. +Consumers that can rely on Python 3.11 should use the other +module directly. +""" -def remove_duplicates(items): - return iter(collections.OrderedDict.fromkeys(items)) +from .resources.readers import ( + FileReader, ZipReader, MultiplexedPath, NamespaceReader, +) - -class FileReader(abc.TraversableResources): - def __init__(self, loader): - self.path = pathlib.Path(loader.path).parent - - def resource_path(self, resource): - """ - Return the file system path to prevent - `resources.path()` from creating a temporary - copy. - """ - return str(self.path.joinpath(resource)) - - def files(self): - return self.path - - -class ZipReader(abc.TraversableResources): - def __init__(self, loader, module): - _, _, name = module.rpartition('.') - self.prefix = loader.prefix.replace('\\', '/') + name + '/' - self.archive = loader.archive - - def open_resource(self, resource): - try: - return super().open_resource(resource) - except KeyError as exc: - raise FileNotFoundError(exc.args[0]) - - def is_resource(self, path): - # workaround for `zipfile.Path.is_file` returning true - # for non-existent paths. - target = self.files().joinpath(path) - return target.is_file() and target.exists() - - def files(self): - return zipfile.Path(self.archive, self.prefix) - - -class MultiplexedPath(abc.Traversable): - """ - Given a series of Traversable objects, implement a merged - version of the interface across all objects. Useful for - namespace packages which may be multihomed at a single - name. - """ - - def __init__(self, *paths): - self._paths = list(map(pathlib.Path, remove_duplicates(paths))) - if not self._paths: - message = 'MultiplexedPath must contain at least one path' - raise FileNotFoundError(message) - if not all(path.is_dir() for path in self._paths): - raise NotADirectoryError('MultiplexedPath only supports directories') - - def iterdir(self): - visited = [] - for path in self._paths: - for file in path.iterdir(): - if file.name in visited: - continue - visited.append(file.name) - yield file - - def read_bytes(self): - raise FileNotFoundError(f'{self} is not a file') - - def read_text(self, *args, **kwargs): - raise FileNotFoundError(f'{self} is not a file') - - def is_dir(self): - return True - - def is_file(self): - return False - - def joinpath(self, child): - # first try to find child in current paths - for file in self.iterdir(): - if file.name == child: - return file - # if it does not exist, construct it with the first path - return self._paths[0] / child - - __truediv__ = joinpath - - def open(self, *args, **kwargs): - raise FileNotFoundError(f'{self} is not a file') - - @property - def name(self): - return self._paths[0].name - - def __repr__(self): - paths = ', '.join(f"'{path}'" for path in self._paths) - return f'MultiplexedPath({paths})' - - -class NamespaceReader(abc.TraversableResources): - def __init__(self, namespace_path): - if 'NamespacePath' not in str(namespace_path): - raise ValueError('Invalid path') - self.path = MultiplexedPath(*list(namespace_path)) - - def resource_path(self, resource): - """ - Return the file system path to prevent - `resources.path()` from creating a temporary - copy. - """ - return str(self.path.joinpath(resource)) - - def files(self): - return self.path +__all__ = ['FileReader', 'ZipReader', 'MultiplexedPath', 'NamespaceReader'] diff --git a/Lib/importlib/resources.py b/Lib/importlib/resources.py deleted file mode 100644 index 8a98663ff8..0000000000 --- a/Lib/importlib/resources.py +++ /dev/null @@ -1,185 +0,0 @@ -import os -import io - -from . import _common -from ._common import as_file, files -from .abc import ResourceReader -from contextlib import suppress -from importlib.abc import ResourceLoader -from importlib.machinery import ModuleSpec -from io import BytesIO, TextIOWrapper -from pathlib import Path -from types import ModuleType -from typing import ContextManager, Iterable, Union -from typing import cast -from typing.io import BinaryIO, TextIO -from collections.abc import Sequence -from functools import singledispatch - - -__all__ = [ - 'Package', - 'Resource', - 'ResourceReader', - 'as_file', - 'contents', - 'files', - 'is_resource', - 'open_binary', - 'open_text', - 'path', - 'read_binary', - 'read_text', -] - - -Package = Union[str, ModuleType] -Resource = Union[str, os.PathLike] - - -def open_binary(package: Package, resource: Resource) -> BinaryIO: - """Return a file-like object opened for binary reading of the resource.""" - resource = _common.normalize_path(resource) - package = _common.get_package(package) - reader = _common.get_resource_reader(package) - if reader is not None: - return reader.open_resource(resource) - spec = cast(ModuleSpec, package.__spec__) - # Using pathlib doesn't work well here due to the lack of 'strict' - # argument for pathlib.Path.resolve() prior to Python 3.6. - if spec.submodule_search_locations is not None: - paths = spec.submodule_search_locations - elif spec.origin is not None: - paths = [os.path.dirname(os.path.abspath(spec.origin))] - - for package_path in paths: - full_path = os.path.join(package_path, resource) - try: - return open(full_path, mode='rb') - except OSError: - # Just assume the loader is a resource loader; all the relevant - # importlib.machinery loaders are and an AttributeError for - # get_data() will make it clear what is needed from the loader. - loader = cast(ResourceLoader, spec.loader) - data = None - if hasattr(spec.loader, 'get_data'): - with suppress(OSError): - data = loader.get_data(full_path) - if data is not None: - return BytesIO(data) - - raise FileNotFoundError(f'{resource!r} resource not found in {spec.name!r}') - - -def open_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict', -) -> TextIO: - """Return a file-like object opened for text reading of the resource.""" - return TextIOWrapper( - open_binary(package, resource), encoding=encoding, errors=errors - ) - - -def read_binary(package: Package, resource: Resource) -> bytes: - """Return the binary contents of the resource.""" - with open_binary(package, resource) as fp: - return fp.read() - - -def read_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict', -) -> str: - """Return the decoded string of the resource. - - The decoding-related arguments have the same semantics as those of - bytes.decode(). - """ - with open_text(package, resource, encoding, errors) as fp: - return fp.read() - - -def path( - package: Package, - resource: Resource, -) -> 'ContextManager[Path]': - """A context manager providing a file path object to the resource. - - If the resource does not already exist on its own on the file system, - a temporary file will be created. If the file was created, the file - will be deleted upon exiting the context manager (no exception is - raised if the file was deleted prior to the context manager - exiting). - """ - reader = _common.get_resource_reader(_common.get_package(package)) - return ( - _path_from_reader(reader, _common.normalize_path(resource)) - if reader - else _common.as_file( - _common.files(package).joinpath(_common.normalize_path(resource)) - ) - ) - - -def _path_from_reader(reader, resource): - return _path_from_resource_path(reader, resource) or _path_from_open_resource( - reader, resource - ) - - -def _path_from_resource_path(reader, resource): - with suppress(FileNotFoundError): - return Path(reader.resource_path(resource)) - - -def _path_from_open_resource(reader, resource): - saved = io.BytesIO(reader.open_resource(resource).read()) - return _common._tempfile(saved.read, suffix=resource) - - -def is_resource(package: Package, name: str) -> bool: - """True if 'name' is a resource inside 'package'. - - Directories are *not* resources. - """ - package = _common.get_package(package) - _common.normalize_path(name) - reader = _common.get_resource_reader(package) - if reader is not None: - return reader.is_resource(name) - package_contents = set(contents(package)) - if name not in package_contents: - return False - return (_common.from_package(package) / name).is_file() - - -def contents(package: Package) -> Iterable[str]: - """Return an iterable of entries in 'package'. - - Note that not all entries are resources. Specifically, directories are - not considered resources. Use `is_resource()` on each entry returned here - to check if it is a resource or not. - """ - package = _common.get_package(package) - reader = _common.get_resource_reader(package) - if reader is not None: - return _ensure_sequence(reader.contents()) - transversable = _common.from_package(package) - if transversable.is_dir(): - return list(item.name for item in transversable.iterdir()) - return [] - - -@singledispatch -def _ensure_sequence(iterable): - return list(iterable) - - -@_ensure_sequence.register(Sequence) -def _(iterable): - return iterable diff --git a/Lib/importlib/resources/__init__.py b/Lib/importlib/resources/__init__.py new file mode 100644 index 0000000000..34e3a9950c --- /dev/null +++ b/Lib/importlib/resources/__init__.py @@ -0,0 +1,36 @@ +"""Read resources contained within a package.""" + +from ._common import ( + as_file, + files, + Package, +) + +from ._legacy import ( + contents, + open_binary, + read_binary, + open_text, + read_text, + is_resource, + path, + Resource, +) + +from .abc import ResourceReader + + +__all__ = [ + 'Package', + 'Resource', + 'ResourceReader', + 'as_file', + 'contents', + 'files', + 'is_resource', + 'open_binary', + 'open_text', + 'path', + 'read_binary', + 'read_text', +] diff --git a/Lib/importlib/resources/_adapters.py b/Lib/importlib/resources/_adapters.py new file mode 100644 index 0000000000..ea363d86a5 --- /dev/null +++ b/Lib/importlib/resources/_adapters.py @@ -0,0 +1,170 @@ +from contextlib import suppress +from io import TextIOWrapper + +from . import abc + + +class SpecLoaderAdapter: + """ + Adapt a package spec to adapt the underlying loader. + """ + + def __init__(self, spec, adapter=lambda spec: spec.loader): + self.spec = spec + self.loader = adapter(spec) + + def __getattr__(self, name): + return getattr(self.spec, name) + + +class TraversableResourcesLoader: + """ + Adapt a loader to provide TraversableResources. + """ + + def __init__(self, spec): + self.spec = spec + + def get_resource_reader(self, name): + return CompatibilityFiles(self.spec)._native() + + +def _io_wrapper(file, mode='r', *args, **kwargs): + if mode == 'r': + return TextIOWrapper(file, *args, **kwargs) + elif mode == 'rb': + return file + raise ValueError( + "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode) + ) + + +class CompatibilityFiles: + """ + Adapter for an existing or non-existent resource reader + to provide a compatibility .files(). + """ + + class SpecPath(abc.Traversable): + """ + Path tied to a module spec. + Can be read and exposes the resource reader children. + """ + + def __init__(self, spec, reader): + self._spec = spec + self._reader = reader + + def iterdir(self): + if not self._reader: + return iter(()) + return iter( + CompatibilityFiles.ChildPath(self._reader, path) + for path in self._reader.contents() + ) + + def is_file(self): + return False + + is_dir = is_file + + def joinpath(self, other): + if not self._reader: + return CompatibilityFiles.OrphanPath(other) + return CompatibilityFiles.ChildPath(self._reader, other) + + @property + def name(self): + return self._spec.name + + def open(self, mode='r', *args, **kwargs): + return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs) + + class ChildPath(abc.Traversable): + """ + Path tied to a resource reader child. + Can be read but doesn't expose any meaningful children. + """ + + def __init__(self, reader, name): + self._reader = reader + self._name = name + + def iterdir(self): + return iter(()) + + def is_file(self): + return self._reader.is_resource(self.name) + + def is_dir(self): + return not self.is_file() + + def joinpath(self, other): + return CompatibilityFiles.OrphanPath(self.name, other) + + @property + def name(self): + return self._name + + def open(self, mode='r', *args, **kwargs): + return _io_wrapper( + self._reader.open_resource(self.name), mode, *args, **kwargs + ) + + class OrphanPath(abc.Traversable): + """ + Orphan path, not tied to a module spec or resource reader. + Can't be read and doesn't expose any meaningful children. + """ + + def __init__(self, *path_parts): + if len(path_parts) < 1: + raise ValueError('Need at least one path part to construct a path') + self._path = path_parts + + def iterdir(self): + return iter(()) + + def is_file(self): + return False + + is_dir = is_file + + def joinpath(self, other): + return CompatibilityFiles.OrphanPath(*self._path, other) + + @property + def name(self): + return self._path[-1] + + def open(self, mode='r', *args, **kwargs): + raise FileNotFoundError("Can't open orphan path") + + def __init__(self, spec): + self.spec = spec + + @property + def _reader(self): + with suppress(AttributeError): + return self.spec.loader.get_resource_reader(self.spec.name) + + def _native(self): + """ + Return the native reader if it supports files(). + """ + reader = self._reader + return reader if hasattr(reader, 'files') else self + + def __getattr__(self, attr): + return getattr(self._reader, attr) + + def files(self): + return CompatibilityFiles.SpecPath(self.spec, self._reader) + + +def wrap_spec(package): + """ + Construct a package spec with traversable compatibility + on the spec/loader/reader. + """ + return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/Lib/importlib/_common.py b/Lib/importlib/resources/_common.py similarity index 85% rename from Lib/importlib/_common.py rename to Lib/importlib/resources/_common.py index 84144c038c..ca1fa8ab2f 100644 --- a/Lib/importlib/_common.py +++ b/Lib/importlib/resources/_common.py @@ -6,7 +6,7 @@ import types import importlib -from typing import Union, Any, Optional +from typing import Union, Optional from .abc import ResourceReader, Traversable from ._adapters import wrap_spec @@ -22,19 +22,6 @@ def files(package): return from_package(get_package(package)) -def normalize_path(path): - # type: (Any) -> str - """Normalize a path by ensuring it is a string. - - If the resulting string contains path separators, an exception is raised. - """ - str_path = str(path) - parent, file_name = os.path.split(str_path) - if parent: - raise ValueError(f'{path!r} must be only a file name') - return file_name - - def get_resource_reader(package): # type: (types.ModuleType) -> Optional[ResourceReader] """ @@ -89,8 +76,10 @@ def _tempfile(reader, suffix='', # properly. fd, raw_path = tempfile.mkstemp(suffix=suffix) try: - os.write(fd, reader()) - os.close(fd) + try: + os.write(fd, reader()) + finally: + os.close(fd) del reader yield pathlib.Path(raw_path) finally: diff --git a/Lib/importlib/resources/_itertools.py b/Lib/importlib/resources/_itertools.py new file mode 100644 index 0000000000..cce05582ff --- /dev/null +++ b/Lib/importlib/resources/_itertools.py @@ -0,0 +1,35 @@ +from itertools import filterfalse + +from typing import ( + Callable, + Iterable, + Iterator, + Optional, + Set, + TypeVar, + Union, +) + +# Type and type variable definitions +_T = TypeVar('_T') +_U = TypeVar('_U') + + +def unique_everseen( + iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None +) -> Iterator[_T]: + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen: Set[Union[_T, _U]] = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element diff --git a/Lib/importlib/resources/_legacy.py b/Lib/importlib/resources/_legacy.py new file mode 100644 index 0000000000..1d5d3f1fbb --- /dev/null +++ b/Lib/importlib/resources/_legacy.py @@ -0,0 +1,121 @@ +import functools +import os +import pathlib +import types +import warnings + +from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any + +from . import _common + +Package = Union[types.ModuleType, str] +Resource = str + + +def deprecated(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + warnings.warn( + f"{func.__name__} is deprecated. Use files() instead. " + "Refer to https://importlib-resources.readthedocs.io" + "/en/latest/using.html#migrating-from-legacy for migration advice.", + DeprecationWarning, + stacklevel=2, + ) + return func(*args, **kwargs) + + return wrapper + + +def normalize_path(path): + # type: (Any) -> str + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. + """ + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError(f'{path!r} must be only a file name') + return file_name + + +@deprecated +def open_binary(package: Package, resource: Resource) -> BinaryIO: + """Return a file-like object opened for binary reading of the resource.""" + return (_common.files(package) / normalize_path(resource)).open('rb') + + +@deprecated +def read_binary(package: Package, resource: Resource) -> bytes: + """Return the binary contents of the resource.""" + return (_common.files(package) / normalize_path(resource)).read_bytes() + + +@deprecated +def open_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict', +) -> TextIO: + """Return a file-like object opened for text reading of the resource.""" + return (_common.files(package) / normalize_path(resource)).open( + 'r', encoding=encoding, errors=errors + ) + + +@deprecated +def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict', +) -> str: + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + with open_text(package, resource, encoding, errors) as fp: + return fp.read() + + +@deprecated +def contents(package: Package) -> Iterable[str]: + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ + return [path.name for path in _common.files(package).iterdir()] + + +@deprecated +def is_resource(package: Package, name: str) -> bool: + """True if `name` is a resource inside `package`. + + Directories are *not* resources. + """ + resource = normalize_path(name) + return any( + traversable.name == resource and traversable.is_file() + for traversable in _common.files(package).iterdir() + ) + + +@deprecated +def path( + package: Package, + resource: Resource, +) -> ContextManager[pathlib.Path]: + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + return _common.as_file(_common.files(package) / normalize_path(resource)) diff --git a/Lib/importlib/resources/abc.py b/Lib/importlib/resources/abc.py new file mode 100644 index 0000000000..0b7bfdc415 --- /dev/null +++ b/Lib/importlib/resources/abc.py @@ -0,0 +1,151 @@ +import abc +import io +import os +from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional +from typing import runtime_checkable, Protocol +from typing import Union + + +StrPath = Union[str, os.PathLike[str]] + +__all__ = ["ResourceReader", "Traversable", "TraversableResources"] + + +class ResourceReader(metaclass=abc.ABCMeta): + """Abstract base class for loaders to provide resource reading support.""" + + @abc.abstractmethod + def open_resource(self, resource: Text) -> BinaryIO: + """Return an opened, file-like object for binary reading. + + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def resource_path(self, resource: Text) -> Text: + """Return the file system path to the specified resource. + + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def is_resource(self, path: Text) -> bool: + """Return True if the named 'path' is a resource. + + Files are resources, directories are not. + """ + raise FileNotFoundError + + @abc.abstractmethod + def contents(self) -> Iterable[str]: + """Return an iterable of entries in `package`.""" + raise FileNotFoundError + + +@runtime_checkable +class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + + Any exceptions that occur when accessing the backing resource + may propagate unaltered. + """ + + @abc.abstractmethod + def iterdir(self) -> Iterator["Traversable"]: + """ + Yield Traversable objects in self + """ + + def read_bytes(self) -> bytes: + """ + Read contents of self as bytes + """ + with self.open('rb') as strm: + return strm.read() + + def read_text(self, encoding: Optional[str] = None) -> str: + """ + Read contents of self as text + """ + with self.open(encoding=encoding) as strm: + return strm.read() + + @abc.abstractmethod + def is_dir(self) -> bool: + """ + Return True if self is a directory + """ + + @abc.abstractmethod + def is_file(self) -> bool: + """ + Return True if self is a file + """ + + @abc.abstractmethod + def joinpath(self, *descendants: StrPath) -> "Traversable": + """ + Return Traversable resolved with any descendants applied. + + Each descendant should be a path segment relative to self + and each may contain multiple levels separated by + ``posixpath.sep`` (``/``). + """ + + def __truediv__(self, child: StrPath) -> "Traversable": + """ + Return Traversable child in self + """ + return self.joinpath(child) + + @abc.abstractmethod + def open(self, mode='r', *args, **kwargs): + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ + + @abc.abstractproperty + def name(self) -> str: + """ + The base name of this object without any parent references. + """ + + +class TraversableResources(ResourceReader): + """ + The required interface for providing traversable + resources. + """ + + @abc.abstractmethod + def files(self) -> "Traversable": + """Return a Traversable object for the loaded package.""" + + def open_resource(self, resource: StrPath) -> io.BufferedReader: + return self.files().joinpath(resource).open('rb') + + def resource_path(self, resource: Any) -> NoReturn: + raise FileNotFoundError(resource) + + def is_resource(self, path: StrPath) -> bool: + return self.files().joinpath(path).is_file() + + def contents(self) -> Iterator[str]: + return (item.name for item in self.files().iterdir()) diff --git a/Lib/importlib/resources/readers.py b/Lib/importlib/resources/readers.py new file mode 100644 index 0000000000..b470a2062b --- /dev/null +++ b/Lib/importlib/resources/readers.py @@ -0,0 +1,122 @@ +import collections +import operator +import pathlib +import zipfile + +from . import abc + +from ._itertools import unique_everseen + + +def remove_duplicates(items): + return iter(collections.OrderedDict.fromkeys(items)) + + +class FileReader(abc.TraversableResources): + def __init__(self, loader): + self.path = pathlib.Path(loader.path).parent + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path + + +class ZipReader(abc.TraversableResources): + def __init__(self, loader, module): + _, _, name = module.rpartition('.') + self.prefix = loader.prefix.replace('\\', '/') + name + '/' + self.archive = loader.archive + + def open_resource(self, resource): + try: + return super().open_resource(resource) + except KeyError as exc: + raise FileNotFoundError(exc.args[0]) + + def is_resource(self, path): + # workaround for `zipfile.Path.is_file` returning true + # for non-existent paths. + target = self.files().joinpath(path) + return target.is_file() and target.exists() + + def files(self): + return zipfile.Path(self.archive, self.prefix) + + +class MultiplexedPath(abc.Traversable): + """ + Given a series of Traversable objects, implement a merged + version of the interface across all objects. Useful for + namespace packages which may be multihomed at a single + name. + """ + + def __init__(self, *paths): + self._paths = list(map(pathlib.Path, remove_duplicates(paths))) + if not self._paths: + message = 'MultiplexedPath must contain at least one path' + raise FileNotFoundError(message) + if not all(path.is_dir() for path in self._paths): + raise NotADirectoryError('MultiplexedPath only supports directories') + + def iterdir(self): + files = (file for path in self._paths for file in path.iterdir()) + return unique_everseen(files, key=operator.attrgetter('name')) + + def read_bytes(self): + raise FileNotFoundError(f'{self} is not a file') + + def read_text(self, *args, **kwargs): + raise FileNotFoundError(f'{self} is not a file') + + def is_dir(self): + return True + + def is_file(self): + return False + + def joinpath(self, child): + # first try to find child in current paths + for file in self.iterdir(): + if file.name == child: + return file + # if it does not exist, construct it with the first path + return self._paths[0] / child + + __truediv__ = joinpath + + def open(self, *args, **kwargs): + raise FileNotFoundError(f'{self} is not a file') + + @property + def name(self): + return self._paths[0].name + + def __repr__(self): + paths = ', '.join(f"'{path}'" for path in self._paths) + return f'MultiplexedPath({paths})' + + +class NamespaceReader(abc.TraversableResources): + def __init__(self, namespace_path): + if 'NamespacePath' not in str(namespace_path): + raise ValueError('Invalid path') + self.path = MultiplexedPath(*list(namespace_path)) + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path diff --git a/Lib/importlib/resources/simple.py b/Lib/importlib/resources/simple.py new file mode 100644 index 0000000000..d0fbf23776 --- /dev/null +++ b/Lib/importlib/resources/simple.py @@ -0,0 +1,125 @@ +""" +Interface adapters for low-level readers. +""" + +import abc +import io +import itertools +from typing import BinaryIO, List + +from .abc import Traversable, TraversableResources + + +class SimpleReader(abc.ABC): + """ + The minimum, low-level interface required from a resource + provider. + """ + + @abc.abstractproperty + def package(self): + # type: () -> str + """ + The name of the package for which this reader loads resources. + """ + + @abc.abstractmethod + def children(self): + # type: () -> List['SimpleReader'] + """ + Obtain an iterable of SimpleReader for available + child containers (e.g. directories). + """ + + @abc.abstractmethod + def resources(self): + # type: () -> List[str] + """ + Obtain available named resources for this virtual package. + """ + + @abc.abstractmethod + def open_binary(self, resource): + # type: (str) -> BinaryIO + """ + Obtain a File-like for a named resource. + """ + + @property + def name(self): + return self.package.split('.')[-1] + + +class ResourceHandle(Traversable): + """ + Handle to a named resource in a ResourceReader. + """ + + def __init__(self, parent, name): + # type: (ResourceContainer, str) -> None + self.parent = parent + self.name = name # type: ignore + + def is_file(self): + return True + + def is_dir(self): + return False + + def open(self, mode='r', *args, **kwargs): + stream = self.parent.reader.open_binary(self.name) + if 'b' not in mode: + stream = io.TextIOWrapper(*args, **kwargs) + return stream + + def joinpath(self, name): + raise RuntimeError("Cannot traverse into a resource") + + +class ResourceContainer(Traversable): + """ + Traversable container for a package's resources via its reader. + """ + + def __init__(self, reader): + # type: (SimpleReader) -> None + self.reader = reader + + def is_dir(self): + return True + + def is_file(self): + return False + + def iterdir(self): + files = (ResourceHandle(self, name) for name in self.reader.resources) + dirs = map(ResourceContainer, self.reader.children()) + return itertools.chain(files, dirs) + + def open(self, *args, **kwargs): + raise IsADirectoryError() + + @staticmethod + def _flatten(compound_names): + for name in compound_names: + yield from name.split('/') + + def joinpath(self, *descendants): + if not descendants: + return self + names = self._flatten(descendants) + target = next(names) + return next( + traversable for traversable in self.iterdir() if traversable.name == target + ).joinpath(*names) + + +class TraversableReader(TraversableResources, SimpleReader): + """ + A TraversableResources based on SimpleReader. Resource providers + may derive from this class to provide the TraversableResources + interface by supplying the SimpleReader interface. + """ + + def files(self): + return ResourceContainer(self) diff --git a/Lib/importlib/simple.py b/Lib/importlib/simple.py new file mode 100644 index 0000000000..845bb90364 --- /dev/null +++ b/Lib/importlib/simple.py @@ -0,0 +1,14 @@ +""" +Compatibility shim for .resources.simple as found on Python 3.10. + +Consumers that can rely on Python 3.11 should use the other +module directly. +""" + +from .resources.simple import ( + SimpleReader, ResourceHandle, ResourceContainer, TraversableReader, +) + +__all__ = [ + 'SimpleReader', 'ResourceHandle', 'ResourceContainer', 'TraversableReader', +] diff --git a/Lib/test/test_importlib/builtin/test_finder.py b/Lib/test/test_importlib/builtin/test_finder.py index 6f51abab9b..a4869e07b9 100644 --- a/Lib/test/test_importlib/builtin/test_finder.py +++ b/Lib/test/test_importlib/builtin/test_finder.py @@ -1,5 +1,4 @@ -from .. import abc -from .. import util +from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') diff --git a/Lib/test/test_importlib/builtin/test_loader.py b/Lib/test/test_importlib/builtin/test_loader.py index f6b6d97cd5..7e9d1b1960 100644 --- a/Lib/test/test_importlib/builtin/test_loader.py +++ b/Lib/test/test_importlib/builtin/test_loader.py @@ -1,5 +1,4 @@ -from .. import abc -from .. import util +from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') diff --git a/Lib/test/test_importlib/data/example2-1.0.0-py3-none-any.whl b/Lib/test/test_importlib/data/example2-1.0.0-py3-none-any.whl new file mode 100644 index 0000000000..5ca93657f8 Binary files /dev/null and b/Lib/test/test_importlib/data/example2-1.0.0-py3-none-any.whl differ diff --git a/Lib/test/test_importlib/extension/test_case_sensitivity.py b/Lib/test/test_importlib/extension/test_case_sensitivity.py index 20bf035cb5..366e565cf4 100644 --- a/Lib/test/test_importlib/extension/test_case_sensitivity.py +++ b/Lib/test/test_importlib/extension/test_case_sensitivity.py @@ -2,7 +2,7 @@ from test.support import os_helper import unittest import sys -from .. import util +from test.test_importlib import util importlib = util.import_importlib('importlib') machinery = util.import_importlib('importlib.machinery') diff --git a/Lib/test/test_importlib/extension/test_finder.py b/Lib/test/test_importlib/extension/test_finder.py index 87b8bab358..35ff9fbef5 100644 --- a/Lib/test/test_importlib/extension/test_finder.py +++ b/Lib/test/test_importlib/extension/test_finder.py @@ -1,16 +1,23 @@ -from .. import abc -from .. import util +from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') import unittest -import warnings +import sys class FinderTests(abc.FinderTests): """Test the finder for extension modules.""" + def setUp(self): + if not self.machinery.EXTENSION_SUFFIXES: + raise unittest.SkipTest("Requires dynamic loading support.") + if util.EXTENSIONS.name in sys.builtin_module_names: + raise unittest.SkipTest( + f"{util.EXTENSIONS.name} is a builtin module" + ) + def find_spec(self, fullname): importer = self.machinery.FileFinder(util.EXTENSIONS.path, (self.machinery.ExtensionFileLoader, diff --git a/Lib/test/test_importlib/extension/test_loader.py b/Lib/test/test_importlib/extension/test_loader.py index bdf94eca15..6c5cd577c1 100644 --- a/Lib/test/test_importlib/extension/test_loader.py +++ b/Lib/test/test_importlib/extension/test_loader.py @@ -1,6 +1,5 @@ from warnings import catch_warnings -from .. import abc -from .. import util +from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') @@ -13,11 +12,18 @@ import importlib from test.support.script_helper import assert_python_failure + class LoaderTests(abc.LoaderTests): """Test load_module() for extension modules.""" def setUp(self): + if not self.machinery.EXTENSION_SUFFIXES: + raise unittest.SkipTest("Requires dynamic loading support.") + if util.EXTENSIONS.name in sys.builtin_module_names: + raise unittest.SkipTest( + f"{util.EXTENSIONS.name} is a builtin module" + ) self.loader = self.machinery.ExtensionFileLoader(util.EXTENSIONS.name, util.EXTENSIONS.file_path) @@ -101,7 +107,13 @@ class MultiPhaseExtensionModuleTests(abc.LoaderTests): # Test loading extension modules with multi-phase initialization (PEP 489). def setUp(self): + if not self.machinery.EXTENSION_SUFFIXES: + raise unittest.SkipTest("Requires dynamic loading support.") self.name = '_testmultiphase' + if self.name in sys.builtin_module_names: + raise unittest.SkipTest( + f"{self.name} is a builtin module" + ) finder = self.machinery.FileFinder(None) self.spec = importlib.util.find_spec(self.name) assert self.spec diff --git a/Lib/test/test_importlib/extension/test_path_hook.py b/Lib/test/test_importlib/extension/test_path_hook.py index a4b5a64aae..a0adc70ad1 100644 --- a/Lib/test/test_importlib/extension/test_path_hook.py +++ b/Lib/test/test_importlib/extension/test_path_hook.py @@ -1,4 +1,4 @@ -from .. import util +from test.test_importlib import util machinery = util.import_importlib('importlib.machinery') diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py index 12ed07d337..e7be77b395 100644 --- a/Lib/test/test_importlib/fixtures.py +++ b/Lib/test/test_importlib/fixtures.py @@ -5,11 +5,21 @@ import pathlib import tempfile import textwrap +import functools import contextlib from test.support.os_helper import FS_NONASCII +from test.support import requires_zlib from typing import Dict, Union +try: + from importlib import resources # type: ignore + + getattr(resources, 'files') + getattr(resources, 'as_file') +except (ImportError, AttributeError): + import importlib_resources as resources # type: ignore + @contextlib.contextmanager def tempdir(): @@ -54,7 +64,7 @@ def setUp(self): class SiteDir(Fixtures): def setUp(self): - super(SiteDir, self).setUp() + super().setUp() self.site_dir = self.fixtures.enter_context(tempdir()) @@ -69,7 +79,7 @@ def add_sys_path(dir): sys.path.remove(str(dir)) def setUp(self): - super(OnSysPath, self).setUp() + super().setUp() self.fixtures.enter_context(self.add_sys_path(self.site_dir)) @@ -106,7 +116,7 @@ def main(): } def setUp(self): - super(DistInfoPkg, self).setUp() + super().setUp() build_files(DistInfoPkg.files, self.site_dir) def make_uppercase(self): @@ -131,7 +141,7 @@ class DistInfoPkgWithDot(OnSysPath, SiteDir): } def setUp(self): - super(DistInfoPkgWithDot, self).setUp() + super().setUp() build_files(DistInfoPkgWithDot.files, self.site_dir) @@ -152,13 +162,13 @@ class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir): } def setUp(self): - super(DistInfoPkgWithDotLegacy, self).setUp() + super().setUp() build_files(DistInfoPkgWithDotLegacy.files, self.site_dir) class DistInfoPkgOffPath(SiteDir): def setUp(self): - super(DistInfoPkgOffPath, self).setUp() + super().setUp() build_files(DistInfoPkg.files, self.site_dir) @@ -198,7 +208,7 @@ def main(): } def setUp(self): - super(EggInfoPkg, self).setUp() + super().setUp() build_files(EggInfoPkg.files, prefix=self.site_dir) @@ -219,25 +229,10 @@ class EggInfoFile(OnSysPath, SiteDir): } def setUp(self): - super(EggInfoFile, self).setUp() + super().setUp() build_files(EggInfoFile.files, prefix=self.site_dir) -class LocalPackage: - files: FilesDef = { - "setup.py": """ - import setuptools - setuptools.setup(name="local-pkg", version="2.0.1") - """, - } - - def setUp(self): - self.fixtures = contextlib.ExitStack() - self.addCleanup(self.fixtures.close) - self.fixtures.enter_context(tempdir_as_cwd()) - build_files(self.files) - - def build_files(file_defs, prefix=pathlib.Path()): """Build a set of files/directories, as described by the @@ -285,3 +280,35 @@ def DALS(str): class NullFinder: def find_module(self, name): pass + + +@requires_zlib() +class ZipFixtures: + root = 'test.test_importlib.data' + + def _fixture_on_path(self, filename): + pkg_file = resources.files(self.root).joinpath(filename) + file = self.resources.enter_context(resources.as_file(pkg_file)) + assert file.name.startswith('example'), file.name + sys.path.insert(0, str(file)) + self.resources.callback(sys.path.pop, 0) + + def setUp(self): + # Add self.zip_name to the front of sys.path. + self.resources = contextlib.ExitStack() + self.addCleanup(self.resources.close) + + +def parameterize(*args_set): + """Run test method with a series of parameters.""" + + def wrapper(func): + @functools.wraps(func) + def _inner(self): + for args in args_set: + with self.subTest(**args): + func(self, **args) + + return _inner + + return wrapper diff --git a/Lib/test/test_importlib/frozen/test_finder.py b/Lib/test/test_importlib/frozen/test_finder.py index 0b8b6bc977..a82148f865 100644 --- a/Lib/test/test_importlib/frozen/test_finder.py +++ b/Lib/test/test_importlib/frozen/test_finder.py @@ -1,47 +1,195 @@ -from .. import abc -from .. import util +from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') +import _imp +import marshal +import os.path import unittest import warnings +from test.support import import_helper, REPO_ROOT, STDLIB_DIR + + +def resolve_stdlib_file(name, ispkg=False): + assert name + if ispkg: + return os.path.join(STDLIB_DIR, *name.split('.'), '__init__.py') + else: + return os.path.join(STDLIB_DIR, *name.split('.')) + '.py' + class FindSpecTests(abc.FinderTests): """Test finding frozen modules.""" - def find(self, name, path=None): + def find(self, name, **kwargs): finder = self.machinery.FrozenImporter - return finder.find_spec(name, path) + with import_helper.frozen_modules(): + return finder.find_spec(name, **kwargs) - def test_module(self): - name = '__hello__' - spec = self.find(name) + def check_basic(self, spec, name, ispkg=False): + self.assertEqual(spec.name, name) + self.assertIs(spec.loader, self.machinery.FrozenImporter) self.assertEqual(spec.origin, 'frozen') + self.assertFalse(spec.has_location) + if ispkg: + self.assertIsNotNone(spec.submodule_search_locations) + else: + self.assertIsNone(spec.submodule_search_locations) + self.assertIsNotNone(spec.loader_state) + + def check_loader_state(self, spec, origname=None, filename=None): + if not filename: + if not origname: + origname = spec.name + filename = resolve_stdlib_file(origname) + + actual = dict(vars(spec.loader_state)) + + # Check the rest of spec.loader_state. + expected = dict( + origname=origname, + filename=filename if origname else None, + ) + self.assertDictEqual(actual, expected) + + def check_search_locations(self, spec): + """This is only called when testing packages.""" + missing = object() + filename = getattr(spec.loader_state, 'filename', missing) + origname = getattr(spec.loader_state, 'origname', None) + if not origname or filename is missing: + # We deal with this in check_loader_state(). + return + if not filename: + expected = [] + elif origname != spec.name and not origname.startswith('<'): + expected = [] + else: + expected = [os.path.dirname(filename)] + self.assertListEqual(spec.submodule_search_locations, expected) - # TODO: RUSTPYTHON - @unittest.expectedFailure def test_package(self): spec = self.find('__phello__') self.assertIsNotNone(spec) - # TODO: RUSTPYTHON - @unittest.expectedFailure def test_module_in_package(self): spec = self.find('__phello__.spam', ['__phello__']) self.assertIsNotNone(spec) - # No frozen package within another package to test with. + # TODO: RUSTPYTHON + @unittest.expectedFailure + def test_module(self): + modules = [ + '__hello__', + '__phello__.spam', + '__phello__.ham.eggs', + ] + for name in modules: + with self.subTest(f'{name} -> {name}'): + spec = self.find(name) + self.check_basic(spec, name) + self.check_loader_state(spec) + modules = { + '__hello_alias__': '__hello__', + '_frozen_importlib': 'importlib._bootstrap', + } + for name, origname in modules.items(): + with self.subTest(f'{name} -> {origname}'): + spec = self.find(name) + self.check_basic(spec, name) + self.check_loader_state(spec, origname) + modules = [ + '__phello__.__init__', + '__phello__.ham.__init__', + ] + for name in modules: + origname = '<' + name.rpartition('.')[0] + filename = resolve_stdlib_file(name) + with self.subTest(f'{name} -> {origname}'): + spec = self.find(name) + self.check_basic(spec, name) + self.check_loader_state(spec, origname, filename) + modules = { + '__hello_only__': ('Tools', 'freeze', 'flag.py'), + } + for name, path in modules.items(): + origname = None + filename = os.path.join(REPO_ROOT, *path) + with self.subTest(f'{name} -> {filename}'): + spec = self.find(name) + self.check_basic(spec, name) + self.check_loader_state(spec, origname, filename) + + # TODO: RUSTPYTHON + @unittest.expectedFailure + def test_package(self): + packages = [ + '__phello__', + '__phello__.ham', + ] + for name in packages: + filename = resolve_stdlib_file(name, ispkg=True) + with self.subTest(f'{name} -> {name}'): + spec = self.find(name) + self.check_basic(spec, name, ispkg=True) + self.check_loader_state(spec, name, filename) + self.check_search_locations(spec) + packages = { + '__phello_alias__': '__hello__', + } + for name, origname in packages.items(): + filename = resolve_stdlib_file(origname, ispkg=False) + with self.subTest(f'{name} -> {origname}'): + spec = self.find(name) + self.check_basic(spec, name, ispkg=True) + self.check_loader_state(spec, origname, filename) + self.check_search_locations(spec) + + # These are covered by test_module() and test_package(). + test_module_in_package = None test_package_in_package = None # No easy way to test. test_package_over_module = None + def test_path_ignored(self): + for name in ('__hello__', '__phello__', '__phello__.spam'): + actual = self.find(name) + for path in (None, object(), '', 'eggs', [], [''], ['eggs']): + with self.subTest((name, path)): + spec = self.find(name, path=path) + self.assertEqual(spec, actual) + + def test_target_ignored(self): + imported = ('__hello__', '__phello__') + with import_helper.CleanImport(*imported, usefrozen=True): + import __hello__ as match + import __phello__ as nonmatch + name = '__hello__' + actual = self.find(name) + for target in (None, match, nonmatch, object(), 'not-a-module-object'): + with self.subTest(target): + spec = self.find(name, target=target) + self.assertEqual(spec, actual) + def test_failure(self): spec = self.find('') self.assertIsNone(spec) + # TODO: RUSTPYTHON + @unittest.expectedFailure + def test_not_using_frozen(self): + finder = self.machinery.FrozenImporter + with import_helper.frozen_modules(enabled=False): + # both frozen and not frozen + spec1 = finder.find_spec('__hello__') + # only frozen + spec2 = finder.find_spec('__hello_only__') + self.assertIsNone(spec1) + self.assertIsNone(spec2) + (Frozen_FindSpecTests, Source_FindSpecTests @@ -56,21 +204,18 @@ def find(self, name, path=None): finder = self.machinery.FrozenImporter with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) - return finder.find_module(name, path) + with import_helper.frozen_modules(): + return finder.find_module(name, path) def test_module(self): name = '__hello__' loader = self.find(name) self.assertTrue(hasattr(loader, 'load_module')) - # TODO: RUSTPYTHON - @unittest.expectedFailure def test_package(self): loader = self.find('__phello__') self.assertTrue(hasattr(loader, 'load_module')) - # TODO: RUSTPYTHON - @unittest.expectedFailure def test_module_in_package(self): loader = self.find('__phello__.spam', ['__phello__']) self.assertTrue(hasattr(loader, 'load_module')) diff --git a/Lib/test/test_importlib/frozen/test_loader.py b/Lib/test/test_importlib/frozen/test_loader.py index d09532c757..db256ff0fb 100644 --- a/Lib/test/test_importlib/frozen/test_loader.py +++ b/Lib/test/test_importlib/frozen/test_loader.py @@ -1,29 +1,71 @@ -from .. import abc -from .. import util +from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') -from test.support import captured_stdout +from test.support import captured_stdout, import_helper, STDLIB_DIR +import _imp +import contextlib +import marshal +import os.path import types import unittest import warnings +@contextlib.contextmanager +def deprecated(): + with warnings.catch_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + yield + + +@contextlib.contextmanager +def fresh(name, *, oldapi=False): + with util.uncache(name): + with import_helper.frozen_modules(): + if oldapi: + with deprecated(): + yield + else: + yield + + +def resolve_stdlib_file(name, ispkg=False): + assert name + if ispkg: + return os.path.join(STDLIB_DIR, *name.split('.'), '__init__.py') + else: + return os.path.join(STDLIB_DIR, *name.split('.')) + '.py' + + class ExecModuleTests(abc.LoaderTests): - def exec_module(self, name): - with util.uncache(name), captured_stdout() as stdout: - spec = self.machinery.ModuleSpec( - name, self.machinery.FrozenImporter, origin='frozen', - is_package=self.machinery.FrozenImporter.is_package(name)) - module = types.ModuleType(name) - module.__spec__ = spec - assert not hasattr(module, 'initialized') + def exec_module(self, name, origname=None): + with import_helper.frozen_modules(): + is_package = self.machinery.FrozenImporter.is_package(name) + spec = self.machinery.ModuleSpec( + name, + self.machinery.FrozenImporter, + origin='frozen', + is_package=is_package, + loader_state=types.SimpleNamespace( + origname=origname or name, + filename=resolve_stdlib_file(origname or name, is_package), + ), + ) + module = types.ModuleType(name) + module.__spec__ = spec + assert not hasattr(module, 'initialized') + + with fresh(name): self.machinery.FrozenImporter.exec_module(module) - self.assertTrue(module.initialized) - self.assertTrue(hasattr(module, '__spec__')) - self.assertEqual(module.__spec__.origin, 'frozen') - return module, stdout.getvalue() + with captured_stdout() as stdout: + module.main() + + self.assertTrue(module.initialized) + self.assertTrue(hasattr(module, '__spec__')) + self.assertEqual(module.__spec__.origin, 'frozen') + return module, stdout.getvalue() def test_module(self): name = '__hello__' @@ -33,9 +75,8 @@ def test_module(self): self.assertEqual(getattr(module, attr), value) self.assertEqual(output, 'Hello world!\n') self.assertTrue(hasattr(module, '__spec__')) + self.assertEqual(module.__spec__.loader_state.origname, name) - # TODO: RUSTPYTHON - @unittest.expectedFailure def test_package(self): name = '__phello__' module, output = self.exec_module(name) @@ -47,27 +88,25 @@ def test_package(self): name=name, attr=attr, given=attr_value, expected=value)) self.assertEqual(output, 'Hello world!\n') + self.assertEqual(module.__spec__.loader_state.origname, name) - # TODO: RUSTPYTHON - @unittest.expectedFailure def test_lacking_parent(self): name = '__phello__.spam' with util.uncache('__phello__'): module, output = self.exec_module(name) - check = {'__name__': name} - for attr, value in check.items(): - attr_value = getattr(module, attr) - self.assertEqual(attr_value, value, - 'for {name}.{attr}, {given} != {expected!r}'.format( - name=name, attr=attr, given=attr_value, - expected=value)) - self.assertEqual(output, 'Hello world!\n') + check = {'__name__': name} + for attr, value in check.items(): + attr_value = getattr(module, attr) + self.assertEqual(attr_value, value, + 'for {name}.{attr}, {given} != {expected!r}'.format( + name=name, attr=attr, given=attr_value, + expected=value)) + self.assertEqual(output, 'Hello world!\n') def test_module_repr(self): name = '__hello__' module, output = self.exec_module(name) - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) + with deprecated(): repr_str = self.machinery.FrozenImporter.module_repr(module) self.assertEqual(repr_str, "") @@ -82,7 +121,8 @@ def test_module_repr_indirect(self): test_state_after_failure = None def test_unloadable(self): - assert self.machinery.FrozenImporter.find_spec('_not_real') is None + with import_helper.frozen_modules(): + assert self.machinery.FrozenImporter.find_spec('_not_real') is None with self.assertRaises(ImportError) as cm: self.exec_module('_not_real') self.assertEqual(cm.exception.name, '_not_real') @@ -95,88 +135,91 @@ def test_unloadable(self): class LoaderTests(abc.LoaderTests): + def load_module(self, name): + with fresh(name, oldapi=True): + module = self.machinery.FrozenImporter.load_module(name) + with captured_stdout() as stdout: + module.main() + return module, stdout + + # TODO: RUSTPYTHON + @unittest.expectedFailure def test_module(self): - with util.uncache('__hello__'), captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - module = self.machinery.FrozenImporter.load_module('__hello__') - check = {'__name__': '__hello__', - '__package__': '', - '__loader__': self.machinery.FrozenImporter, - } - for attr, value in check.items(): - self.assertEqual(getattr(module, attr), value) - self.assertEqual(stdout.getvalue(), 'Hello world!\n') - self.assertFalse(hasattr(module, '__file__')) + module, stdout = self.load_module('__hello__') + filename = resolve_stdlib_file('__hello__') + check = {'__name__': '__hello__', + '__package__': '', + '__loader__': self.machinery.FrozenImporter, + '__file__': filename, + } + for attr, value in check.items(): + self.assertEqual(getattr(module, attr, None), value) + self.assertEqual(stdout.getvalue(), 'Hello world!\n') # TODO: RUSTPYTHON @unittest.expectedFailure def test_package(self): - with util.uncache('__phello__'), captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - module = self.machinery.FrozenImporter.load_module('__phello__') - check = {'__name__': '__phello__', - '__package__': '__phello__', - '__path__': [], - '__loader__': self.machinery.FrozenImporter, - } - for attr, value in check.items(): - attr_value = getattr(module, attr) - self.assertEqual(attr_value, value, - "for __phello__.%s, %r != %r" % - (attr, attr_value, value)) - self.assertEqual(stdout.getvalue(), 'Hello world!\n') - self.assertFalse(hasattr(module, '__file__')) + module, stdout = self.load_module('__phello__') + filename = resolve_stdlib_file('__phello__', ispkg=True) + pkgdir = os.path.dirname(filename) + check = {'__name__': '__phello__', + '__package__': '__phello__', + '__path__': [pkgdir], + '__loader__': self.machinery.FrozenImporter, + '__file__': filename, + } + for attr, value in check.items(): + attr_value = getattr(module, attr, None) + self.assertEqual(attr_value, value, + "for __phello__.%s, %r != %r" % + (attr, attr_value, value)) + self.assertEqual(stdout.getvalue(), 'Hello world!\n') # TODO: RUSTPYTHON @unittest.expectedFailure def test_lacking_parent(self): - with util.uncache('__phello__', '__phello__.spam'), \ - captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - module = self.machinery.FrozenImporter.load_module('__phello__.spam') - check = {'__name__': '__phello__.spam', - '__package__': '__phello__', - '__loader__': self.machinery.FrozenImporter, - } - for attr, value in check.items(): - attr_value = getattr(module, attr) - self.assertEqual(attr_value, value, - "for __phello__.spam.%s, %r != %r" % - (attr, attr_value, value)) - self.assertEqual(stdout.getvalue(), 'Hello world!\n') - self.assertFalse(hasattr(module, '__file__')) + with util.uncache('__phello__'): + module, stdout = self.load_module('__phello__.spam') + filename = resolve_stdlib_file('__phello__.spam') + check = {'__name__': '__phello__.spam', + '__package__': '__phello__', + '__loader__': self.machinery.FrozenImporter, + '__file__': filename, + } + for attr, value in check.items(): + attr_value = getattr(module, attr) + self.assertEqual(attr_value, value, + "for __phello__.spam.%s, %r != %r" % + (attr, attr_value, value)) + self.assertEqual(stdout.getvalue(), 'Hello world!\n') def test_module_reuse(self): - with util.uncache('__hello__'), captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - module1 = self.machinery.FrozenImporter.load_module('__hello__') - module2 = self.machinery.FrozenImporter.load_module('__hello__') - self.assertIs(module1, module2) - self.assertEqual(stdout.getvalue(), - 'Hello world!\nHello world!\n') + with fresh('__hello__', oldapi=True): + module1 = self.machinery.FrozenImporter.load_module('__hello__') + module2 = self.machinery.FrozenImporter.load_module('__hello__') + with captured_stdout() as stdout: + module1.main() + module2.main() + self.assertIs(module1, module2) + self.assertEqual(stdout.getvalue(), + 'Hello world!\nHello world!\n') def test_module_repr(self): - with util.uncache('__hello__'), captured_stdout(): - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - module = self.machinery.FrozenImporter.load_module('__hello__') - repr_str = self.machinery.FrozenImporter.module_repr(module) - self.assertEqual(repr_str, - "") + with fresh('__hello__', oldapi=True): + module = self.machinery.FrozenImporter.load_module('__hello__') + repr_str = self.machinery.FrozenImporter.module_repr(module) + self.assertEqual(repr_str, + "") # No way to trigger an error in a frozen module. test_state_after_failure = None def test_unloadable(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - assert self.machinery.FrozenImporter.find_module('_not_real') is None + with import_helper.frozen_modules(): + with deprecated(): + assert self.machinery.FrozenImporter.find_module('_not_real') is None with self.assertRaises(ImportError) as cm: - self.machinery.FrozenImporter.load_module('_not_real') + self.load_module('_not_real') self.assertEqual(cm.exception.name, '_not_real') @@ -192,26 +235,28 @@ class InspectLoaderTests: def test_get_code(self): # Make sure that the code object is good. name = '__hello__' - with captured_stdout() as stdout: + with import_helper.frozen_modules(): code = self.machinery.FrozenImporter.get_code(name) mod = types.ModuleType(name) exec(code, mod.__dict__) - self.assertTrue(hasattr(mod, 'initialized')) - self.assertEqual(stdout.getvalue(), 'Hello world!\n') + with captured_stdout() as stdout: + mod.main() + self.assertTrue(hasattr(mod, 'initialized')) + self.assertEqual(stdout.getvalue(), 'Hello world!\n') def test_get_source(self): # Should always return None. - result = self.machinery.FrozenImporter.get_source('__hello__') + with import_helper.frozen_modules(): + result = self.machinery.FrozenImporter.get_source('__hello__') self.assertIsNone(result) - # TODO: RUSTPYTHON - @unittest.expectedFailure def test_is_package(self): # Should be able to tell what is a package. test_for = (('__hello__', False), ('__phello__', True), ('__phello__.spam', False)) for name, is_package in test_for: - result = self.machinery.FrozenImporter.is_package(name) + with import_helper.frozen_modules(): + result = self.machinery.FrozenImporter.is_package(name) self.assertEqual(bool(result), is_package) def test_failure(self): @@ -219,7 +264,8 @@ def test_failure(self): for meth_name in ('get_code', 'get_source', 'is_package'): method = getattr(self.machinery.FrozenImporter, meth_name) with self.assertRaises(ImportError) as cm: - method('importlib') + with import_helper.frozen_modules(): + method('importlib') self.assertEqual(cm.exception.name, 'importlib') (Frozen_ILTests, diff --git a/Lib/test/test_importlib/import_/test___loader__.py b/Lib/test/test_importlib/import_/test___loader__.py index ecd83c6567..eaf665a6f5 100644 --- a/Lib/test/test_importlib/import_/test___loader__.py +++ b/Lib/test/test_importlib/import_/test___loader__.py @@ -4,7 +4,7 @@ import unittest import warnings -from .. import util +from test.test_importlib import util class SpecLoaderMock: diff --git a/Lib/test/test_importlib/import_/test___package__.py b/Lib/test/test_importlib/import_/test___package__.py index 7a3ac887d5..cc2fa0f459 100644 --- a/Lib/test/test_importlib/import_/test___package__.py +++ b/Lib/test/test_importlib/import_/test___package__.py @@ -6,7 +6,7 @@ """ import unittest import warnings -from .. import util +from test.test_importlib import util class Using__package__: diff --git a/Lib/test/test_importlib/import_/test_api.py b/Lib/test/test_importlib/import_/test_api.py index 35c26977ea..0ee032b020 100644 --- a/Lib/test/test_importlib/import_/test_api.py +++ b/Lib/test/test_importlib/import_/test_api.py @@ -1,4 +1,4 @@ -from .. import util +from test.test_importlib import util from importlib import machinery import sys diff --git a/Lib/test/test_importlib/import_/test_caching.py b/Lib/test/test_importlib/import_/test_caching.py index 0f987b2210..3ca765fb4a 100644 --- a/Lib/test/test_importlib/import_/test_caching.py +++ b/Lib/test/test_importlib/import_/test_caching.py @@ -1,5 +1,5 @@ """Test that sys.modules is used properly by import.""" -from .. import util +from test.test_importlib import util import sys from types import MethodType import unittest diff --git a/Lib/test/test_importlib/import_/test_fromlist.py b/Lib/test/test_importlib/import_/test_fromlist.py index deb21710a6..4b4b9bc3f5 100644 --- a/Lib/test/test_importlib/import_/test_fromlist.py +++ b/Lib/test/test_importlib/import_/test_fromlist.py @@ -1,5 +1,5 @@ """Test that the semantics relating to the 'fromlist' argument are correct.""" -from .. import util +from test.test_importlib import util import warnings import unittest diff --git a/Lib/test/test_importlib/import_/test_meta_path.py b/Lib/test/test_importlib/import_/test_meta_path.py index e19dd0e5bf..c52fc57065 100644 --- a/Lib/test/test_importlib/import_/test_meta_path.py +++ b/Lib/test/test_importlib/import_/test_meta_path.py @@ -1,4 +1,4 @@ -from .. import util +from test.test_importlib import util import importlib._bootstrap import sys from types import MethodType diff --git a/Lib/test/test_importlib/import_/test_packages.py b/Lib/test/test_importlib/import_/test_packages.py index c73ac63f6e..eb0831f7d6 100644 --- a/Lib/test/test_importlib/import_/test_packages.py +++ b/Lib/test/test_importlib/import_/test_packages.py @@ -1,4 +1,4 @@ -from .. import util +from test.test_importlib import util import sys import unittest from test import support diff --git a/Lib/test/test_importlib/import_/test_path.py b/Lib/test/test_importlib/import_/test_path.py index 3b976fdfa9..3873d9f3ed 100644 --- a/Lib/test/test_importlib/import_/test_path.py +++ b/Lib/test/test_importlib/import_/test_path.py @@ -1,4 +1,4 @@ -from .. import util +from test.test_importlib import util importlib = util.import_importlib('importlib') machinery = util.import_importlib('importlib.machinery') @@ -204,10 +204,11 @@ def __init__(self): def invalidate_caches(self): self.called = True - cache = {'leave_alone': object(), 'finder_to_invalidate': FakeFinder()} + key = os.path.abspath('finder_to_invalidate') + cache = {'leave_alone': object(), key: FakeFinder()} with util.import_state(path_importer_cache=cache): self.machinery.PathFinder.invalidate_caches() - self.assertTrue(cache['finder_to_invalidate'].called) + self.assertTrue(cache[key].called) def test_invalidate_caches_clear_out_None(self): # Clear out None in sys.path_importer_cache() when invalidating caches. @@ -216,6 +217,16 @@ def test_invalidate_caches_clear_out_None(self): self.machinery.PathFinder.invalidate_caches() self.assertEqual(len(cache), 0) + def test_invalidate_caches_clear_out_relative_path(self): + class FakeFinder: + def invalidate_caches(self): + pass + + cache = {'relative_path': FakeFinder()} + with util.import_state(path_importer_cache=cache): + self.machinery.PathFinder.invalidate_caches() + self.assertEqual(cache, {}) + class FindModuleTests(FinderTests): def find(self, *args, **kwargs): diff --git a/Lib/test/test_importlib/import_/test_relative_imports.py b/Lib/test/test_importlib/import_/test_relative_imports.py index 41aa182699..99c24f1fd9 100644 --- a/Lib/test/test_importlib/import_/test_relative_imports.py +++ b/Lib/test/test_importlib/import_/test_relative_imports.py @@ -1,5 +1,5 @@ """Test relative imports (PEP 328).""" -from .. import util +from test.test_importlib import util import unittest import warnings diff --git a/Lib/test/test_importlib/resources/__init__.py b/Lib/test/test_importlib/resources/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Lib/test/test_importlib/resources/util.py b/Lib/test/test_importlib/resources/util.py new file mode 100644 index 0000000000..11c8aa8080 --- /dev/null +++ b/Lib/test/test_importlib/resources/util.py @@ -0,0 +1,178 @@ +import abc +import importlib +import io +import sys +import types +from pathlib import Path, PurePath + +from .. import data01 +from .. import zipdata01 +from importlib.abc import ResourceReader +from test.support import import_helper + + +from importlib.machinery import ModuleSpec + + +class Reader(ResourceReader): + def __init__(self, **kwargs): + vars(self).update(kwargs) + + def get_resource_reader(self, package): + return self + + def open_resource(self, path): + self._path = path + if isinstance(self.file, Exception): + raise self.file + return self.file + + def resource_path(self, path_): + self._path = path_ + if isinstance(self.path, Exception): + raise self.path + return self.path + + def is_resource(self, path_): + self._path = path_ + if isinstance(self.path, Exception): + raise self.path + + def part(entry): + return entry.split('/') + + return any( + len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents) + ) + + def contents(self): + if isinstance(self.path, Exception): + raise self.path + yield from self._contents + + +def create_package_from_loader(loader, is_package=True): + name = 'testingpackage' + module = types.ModuleType(name) + spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package) + module.__spec__ = spec + module.__loader__ = loader + return module + + +def create_package(file=None, path=None, is_package=True, contents=()): + return create_package_from_loader( + Reader(file=file, path=path, _contents=contents), + is_package, + ) + + +class CommonTests(metaclass=abc.ABCMeta): + """ + Tests shared by test_open, test_path, and test_read. + """ + + @abc.abstractmethod + def execute(self, package, path): + """ + Call the pertinent legacy API function (e.g. open_text, path) + on package and path. + """ + + def test_package_name(self): + # Passing in the package name should succeed. + self.execute(data01.__name__, 'utf-8.file') + + def test_package_object(self): + # Passing in the package itself should succeed. + self.execute(data01, 'utf-8.file') + + def test_string_path(self): + # Passing in a string for the path should succeed. + path = 'utf-8.file' + self.execute(data01, path) + + def test_pathlib_path(self): + # Passing in a pathlib.PurePath object for the path should succeed. + path = PurePath('utf-8.file') + self.execute(data01, path) + + def test_importing_module_as_side_effect(self): + # The anchor package can already be imported. + del sys.modules[data01.__name__] + self.execute(data01.__name__, 'utf-8.file') + + def test_non_package_by_name(self): + # The anchor package cannot be a module. + with self.assertRaises(TypeError): + self.execute(__name__, 'utf-8.file') + + def test_non_package_by_package(self): + # The anchor package cannot be a module. + with self.assertRaises(TypeError): + module = sys.modules['test.test_importlib.resources.util'] + self.execute(module, 'utf-8.file') + + def test_missing_path(self): + # Attempting to open or read or request the path for a + # non-existent path should succeed if open_resource + # can return a viable data stream. + bytes_data = io.BytesIO(b'Hello, world!') + package = create_package(file=bytes_data, path=FileNotFoundError()) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_extant_path(self): + # Attempting to open or read or request the path when the + # path does exist should still succeed. Does not assert + # anything about the result. + bytes_data = io.BytesIO(b'Hello, world!') + # any path that exists + path = __file__ + package = create_package(file=bytes_data, path=path) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_useless_loader(self): + package = create_package(file=FileNotFoundError(), path=FileNotFoundError()) + with self.assertRaises(FileNotFoundError): + self.execute(package, 'utf-8.file') + + +class ZipSetupBase: + ZIP_MODULE = None + + @classmethod + def setUpClass(cls): + data_path = Path(cls.ZIP_MODULE.__file__) + data_dir = data_path.parent + cls._zip_path = str(data_dir / 'ziptestdata.zip') + sys.path.append(cls._zip_path) + cls.data = importlib.import_module('ziptestdata') + + @classmethod + def tearDownClass(cls): + try: + sys.path.remove(cls._zip_path) + except ValueError: + pass + + try: + del sys.path_importer_cache[cls._zip_path] + del sys.modules[cls.data.__name__] + except KeyError: + pass + + try: + del cls.data + del cls._zip_path + except AttributeError: + pass + + def setUp(self): + modules = import_helper.modules_setup() + self.addCleanup(import_helper.modules_cleanup, *modules) + + +class ZipSetup(ZipSetupBase): + ZIP_MODULE = zipdata01 # type: ignore diff --git a/Lib/test/test_importlib/source/test_case_sensitivity.py b/Lib/test/test_importlib/source/test_case_sensitivity.py index 19543f4a66..9d472707ab 100644 --- a/Lib/test/test_importlib/source/test_case_sensitivity.py +++ b/Lib/test/test_importlib/source/test_case_sensitivity.py @@ -1,7 +1,7 @@ """Test case-sensitivity (PEP 235).""" import sys -from .. import util +from test.test_importlib import util importlib = util.import_importlib('importlib') machinery = util.import_importlib('importlib.machinery') diff --git a/Lib/test/test_importlib/source/test_file_loader.py b/Lib/test/test_importlib/source/test_file_loader.py index 8205ff62a9..ebf6ec68d7 100644 --- a/Lib/test/test_importlib/source/test_file_loader.py +++ b/Lib/test/test_importlib/source/test_file_loader.py @@ -1,5 +1,4 @@ -from .. import abc -from .. import util +from test.test_importlib import abc, util importlib = util.import_importlib('importlib') importlib_abc = util.import_importlib('importlib.abc') diff --git a/Lib/test/test_importlib/source/test_finder.py b/Lib/test/test_importlib/source/test_finder.py index 1cf6719c12..73ef8bf961 100644 --- a/Lib/test/test_importlib/source/test_finder.py +++ b/Lib/test/test_importlib/source/test_finder.py @@ -1,5 +1,4 @@ -from .. import abc -from .. import util +from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') @@ -158,21 +157,12 @@ def test_dir_removal_handling(self): def test_no_read_directory(self): # Issue #16730 tempdir = tempfile.TemporaryDirectory() + self.enterContext(tempdir) + # Since we muck with the permissions, we want to set them back to + # their original values to make sure the directory can be properly + # cleaned up. original_mode = os.stat(tempdir.name).st_mode - def cleanup(tempdir): - """Cleanup function for the temporary directory. - - Since we muck with the permissions, we want to set them back to - their original values to make sure the directory can be properly - cleaned up. - - """ - os.chmod(tempdir.name, original_mode) - # If this is not explicitly called then the __del__ method is used, - # but since already mucking around might as well explicitly clean - # up. - tempdir.__exit__(None, None, None) - self.addCleanup(cleanup, tempdir) + self.addCleanup(os.chmod, tempdir.name, original_mode) os.chmod(tempdir.name, stat.S_IWUSR | stat.S_IXUSR) finder = self.get_finder(tempdir.name) found = self._find(finder, 'doesnotexist') diff --git a/Lib/test/test_importlib/source/test_path_hook.py b/Lib/test/test_importlib/source/test_path_hook.py index 795d436c3b..ead62f5e94 100644 --- a/Lib/test/test_importlib/source/test_path_hook.py +++ b/Lib/test/test_importlib/source/test_path_hook.py @@ -1,4 +1,4 @@ -from .. import util +from test.test_importlib import util machinery = util.import_importlib('importlib.machinery') diff --git a/Lib/test/test_importlib/source/test_source_encoding.py b/Lib/test/test_importlib/source/test_source_encoding.py index cf5b546006..4f206accf9 100644 --- a/Lib/test/test_importlib/source/test_source_encoding.py +++ b/Lib/test/test_importlib/source/test_source_encoding.py @@ -1,4 +1,4 @@ -from .. import util +from test.test_importlib import util machinery = util.import_importlib('importlib.machinery') diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py index 08a416803b..d77b8a0a4d 100644 --- a/Lib/test/test_importlib/test_abc.py +++ b/Lib/test/test_importlib/test_abc.py @@ -9,7 +9,7 @@ from unittest import mock import warnings -from . import util as test_util +from test.test_importlib import util as test_util init = test_util.import_importlib('importlib') abc = test_util.import_importlib('importlib.abc') diff --git a/Lib/test/test_importlib/test_api.py b/Lib/test/test_importlib/test_api.py index 763b2add07..1beb7835d4 100644 --- a/Lib/test/test_importlib/test_api.py +++ b/Lib/test/test_importlib/test_api.py @@ -1,4 +1,4 @@ -from . import util as test_util +from test.test_importlib import util as test_util init = test_util.import_importlib('importlib') util = test_util.import_importlib('importlib.util') @@ -395,7 +395,7 @@ def find_module(self, *args): def invalidate_caches(self): self.called = True - key = 'gobledeegook' + key = os.path.abspath('gobledeegook') meta_ins = InvalidatingNullFinder() path_ins = InvalidatingNullFinder() sys.meta_path.insert(0, meta_ins) diff --git a/Lib/test/test_importlib/test_compatibilty_files.py b/Lib/test/test_importlib/test_compatibilty_files.py new file mode 100644 index 0000000000..9a823f2d93 --- /dev/null +++ b/Lib/test/test_importlib/test_compatibilty_files.py @@ -0,0 +1,102 @@ +import io +import unittest + +from importlib import resources + +from importlib.resources._adapters import ( + CompatibilityFiles, + wrap_spec, +) + +from .resources import util + + +class CompatibilityFilesTests(unittest.TestCase): + @property + def package(self): + bytes_data = io.BytesIO(b'Hello, world!') + return util.create_package( + file=bytes_data, + path='some_path', + contents=('a', 'b', 'c'), + ) + + @property + def files(self): + return resources.files(self.package) + + def test_spec_path_iter(self): + self.assertEqual( + sorted(path.name for path in self.files.iterdir()), + ['a', 'b', 'c'], + ) + + def test_child_path_iter(self): + self.assertEqual(list((self.files / 'a').iterdir()), []) + + def test_orphan_path_iter(self): + self.assertEqual(list((self.files / 'a' / 'a').iterdir()), []) + self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), []) + + def test_spec_path_is(self): + self.assertFalse(self.files.is_file()) + self.assertFalse(self.files.is_dir()) + + def test_child_path_is(self): + self.assertTrue((self.files / 'a').is_file()) + self.assertFalse((self.files / 'a').is_dir()) + + def test_orphan_path_is(self): + self.assertFalse((self.files / 'a' / 'a').is_file()) + self.assertFalse((self.files / 'a' / 'a').is_dir()) + self.assertFalse((self.files / 'a' / 'a' / 'a').is_file()) + self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir()) + + def test_spec_path_name(self): + self.assertEqual(self.files.name, 'testingpackage') + + def test_child_path_name(self): + self.assertEqual((self.files / 'a').name, 'a') + + def test_orphan_path_name(self): + self.assertEqual((self.files / 'a' / 'b').name, 'b') + self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c') + + def test_spec_path_open(self): + self.assertEqual(self.files.read_bytes(), b'Hello, world!') + self.assertEqual(self.files.read_text(), 'Hello, world!') + + def test_child_path_open(self): + self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!') + self.assertEqual((self.files / 'a').read_text(), 'Hello, world!') + + def test_orphan_path_open(self): + with self.assertRaises(FileNotFoundError): + (self.files / 'a' / 'b').read_bytes() + with self.assertRaises(FileNotFoundError): + (self.files / 'a' / 'b' / 'c').read_bytes() + + def test_open_invalid_mode(self): + with self.assertRaises(ValueError): + self.files.open('0') + + def test_orphan_path_invalid(self): + with self.assertRaises(ValueError): + CompatibilityFiles.OrphanPath() + + def test_wrap_spec(self): + spec = wrap_spec(self.package) + self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles) + + +class CompatibilityFilesNoReaderTests(unittest.TestCase): + @property + def package(self): + return util.create_package_from_loader(None) + + @property + def files(self): + return resources.files(self.package) + + def test_spec_path_joinpath(self): + self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath) diff --git a/Lib/test/test_importlib/test_contents.py b/Lib/test/test_importlib/test_contents.py new file mode 100644 index 0000000000..3323bf5b5c --- /dev/null +++ b/Lib/test/test_importlib/test_contents.py @@ -0,0 +1,43 @@ +import unittest +from importlib import resources + +from . import data01 +from .resources import util + + +class ContentsTests: + expected = { + '__init__.py', + 'binary.file', + 'subdirectory', + 'utf-16.file', + 'utf-8.file', + } + + def test_contents(self): + contents = {path.name for path in resources.files(self.data).iterdir()} + assert self.expected <= contents + + +class ContentsDiskTests(ContentsTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase): + pass + + +class ContentsNamespaceTests(ContentsTests, unittest.TestCase): + expected = { + # no __init__ because of namespace design + # no subdirectory as incidental difference in fixture + 'binary.file', + 'utf-16.file', + 'utf-8.file', + } + + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 diff --git a/Lib/test/test_importlib/test_files.py b/Lib/test/test_importlib/test_files.py index 481829b742..b9170d83be 100644 --- a/Lib/test/test_importlib/test_files.py +++ b/Lib/test/test_importlib/test_files.py @@ -4,7 +4,7 @@ from importlib import resources from importlib.abc import Traversable from . import data01 -from . import util +from .resources import util class FilesTests: @@ -35,5 +35,12 @@ class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): pass +class OpenNamespaceTests(FilesTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/test_lazy.py b/Lib/test/test_importlib/test_lazy.py index 14e0186f97..7539a8c435 100644 --- a/Lib/test/test_importlib/test_lazy.py +++ b/Lib/test/test_importlib/test_lazy.py @@ -5,7 +5,7 @@ import types import unittest -from . import util as test_util +from test.test_importlib import util as test_util class CollectInit: diff --git a/Lib/test/test_importlib/test_locks.py b/Lib/test/test_importlib/test_locks.py index 78372059cc..32ed67c308 100644 --- a/Lib/test/test_importlib/test_locks.py +++ b/Lib/test/test_importlib/test_locks.py @@ -1,4 +1,4 @@ -from . import util as test_util +from test.test_importlib import util as test_util init = test_util.import_importlib('importlib') @@ -12,6 +12,9 @@ from test import lock_tests +threading_helper.requires_working_threading(module=True) + + class ModuleLockAsRLockTests: locktype = classmethod(lambda cls: cls.LockType("some_lock")) @@ -149,4 +152,4 @@ def setUpModule(): if __name__ == '__main__': - unittets.main() + unittest.main() diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py index 52cb63712a..d9d067c4b2 100644 --- a/Lib/test/test_importlib/test_main.py +++ b/Lib/test/test_importlib/test_main.py @@ -1,7 +1,6 @@ import re import json import pickle -import textwrap import unittest import warnings import importlib.metadata @@ -16,9 +15,11 @@ Distribution, EntryPoint, PackageNotFoundError, + _unique, distributions, entry_points, metadata, + packages_distributions, version, ) @@ -36,10 +37,12 @@ def test_for_name_does_not_exist(self): Distribution.from_name('does-not-exist') def test_package_not_found_mentions_metadata(self): - # When a package is not found, that could indicate that the - # packgae is not installed or that it is installed without - # metadata. Ensure the exception mentions metadata to help - # guide users toward the cause. See #124. + """ + When a package is not found, that could indicate that the + packgae is not installed or that it is installed without + metadata. Ensure the exception mentions metadata to help + guide users toward the cause. See #124. + """ with self.assertRaises(PackageNotFoundError) as ctx: Distribution.from_name('does-not-exist') @@ -48,6 +51,14 @@ def test_package_not_found_mentions_metadata(self): def test_new_style_classes(self): self.assertIsInstance(Distribution, type) + @fixtures.parameterize( + dict(name=None), + dict(name=''), + ) + def test_invalid_inputs_to_from_name(self, name): + with self.assertRaises(Exception): + Distribution.from_name(name) + class ImportTests(fixtures.DistInfoPkg, unittest.TestCase): def test_import_nonexistent_module(self): @@ -75,44 +86,50 @@ def test_resolve_without_attr(self): class NameNormalizationTests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): @staticmethod - def pkg_with_dashes(site_dir): + def make_pkg(name): """ - Create minimal metadata for a package with dashes - in the name (and thus underscores in the filename). + Create minimal metadata for a dist-info package with + the indicated name on the file system. """ - metadata_dir = site_dir / 'my_pkg.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w', encoding='utf-8') as strm: - strm.write('Version: 1.0\n') - return 'my-pkg' + return { + f'{name}.dist-info': { + 'METADATA': 'VERSION: 1.0\n', + }, + } def test_dashes_in_dist_name_found_as_underscores(self): - # For a package with a dash in the name, the dist-info metadata - # uses underscores in the name. Ensure the metadata loads. - pkg_name = self.pkg_with_dashes(self.site_dir) - assert version(pkg_name) == '1.0' - - @staticmethod - def pkg_with_mixed_case(site_dir): """ - Create minimal metadata for a package with mixed case - in the name. + For a package with a dash in the name, the dist-info metadata + uses underscores in the name. Ensure the metadata loads. """ - metadata_dir = site_dir / 'CherryPy.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w', encoding='utf-8') as strm: - strm.write('Version: 1.0\n') - return 'CherryPy' + fixtures.build_files(self.make_pkg('my_pkg'), self.site_dir) + assert version('my-pkg') == '1.0' def test_dist_name_found_as_any_case(self): - # Ensure the metadata loads when queried with any case. - pkg_name = self.pkg_with_mixed_case(self.site_dir) + """ + Ensure the metadata loads when queried with any case. + """ + pkg_name = 'CherryPy' + fixtures.build_files(self.make_pkg(pkg_name), self.site_dir) assert version(pkg_name) == '1.0' assert version(pkg_name.lower()) == '1.0' assert version(pkg_name.upper()) == '1.0' + def test_unique_distributions(self): + """ + Two distributions varying only by non-normalized name on + the file system should resolve as the same. + """ + fixtures.build_files(self.make_pkg('abc'), self.site_dir) + before = list(_unique(distributions())) + + alt_site_dir = self.fixtures.enter_context(fixtures.tempdir()) + self.fixtures.enter_context(self.add_sys_path(alt_site_dir)) + fixtures.build_files(self.make_pkg('ABC'), alt_site_dir) + after = list(_unique(distributions())) + + assert len(after) == len(before) + class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): @staticmethod @@ -121,11 +138,12 @@ def pkg_with_non_ascii_description(site_dir): Create minimal metadata for a package with non-ASCII in the description. """ - metadata_dir = site_dir / 'portend.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w', encoding='utf-8') as fp: - fp.write('Description: pôrˈtend') + contents = { + 'portend.dist-info': { + 'METADATA': 'Description: pôrˈtend', + }, + } + fixtures.build_files(contents, site_dir) return 'portend' @staticmethod @@ -134,19 +152,15 @@ def pkg_with_non_ascii_description_egg_info(site_dir): Create minimal metadata for an egg-info package with non-ASCII in the description. """ - metadata_dir = site_dir / 'portend.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w', encoding='utf-8') as fp: - fp.write( - textwrap.dedent( - """ + contents = { + 'portend.dist-info': { + 'METADATA': """ Name: portend - pôrˈtend - """ - ).strip() - ) + pôrˈtend""", + }, + } + fixtures.build_files(contents, site_dir) return 'portend' def test_metadata_loads(self): @@ -203,7 +217,7 @@ class InaccessibleSysPath(fixtures.OnSysPath, ffs.TestCase): site_dir = '/access-denied' def setUp(self): - super(InaccessibleSysPath, self).setUp() + super().setUp() self.setUpPyfakefs() self.fs.create_dir(self.site_dir, perm_bits=000) @@ -217,13 +231,21 @@ def test_discovery(self): class TestEntryPoints(unittest.TestCase): def __init__(self, *args): - super(TestEntryPoints, self).__init__(*args) - self.ep = importlib.metadata.EntryPoint('name', 'value', 'group') + super().__init__(*args) + self.ep = importlib.metadata.EntryPoint( + name='name', value='value', group='group' + ) def test_entry_point_pickleable(self): revived = pickle.loads(pickle.dumps(self.ep)) assert revived == self.ep + def test_positional_args(self): + """ + Capture legacy (namedtuple) construction, discouraged. + """ + EntryPoint('name', 'value', 'group') + def test_immutable(self): """EntryPoints should be immutable""" with self.assertRaises(AttributeError): @@ -235,11 +257,13 @@ def test_repr(self): assert "'name'" in repr(self.ep) def test_hashable(self): - # EntryPoints should be hashable. + """EntryPoints should be hashable""" hash(self.ep) def test_json_dump(self): - # json should not expect to be able to dump an EntryPoint. + """ + json should not expect to be able to dump an EntryPoint + """ with self.assertRaises(Exception): with warnings.catch_warnings(record=True): json.dumps(self.ep) @@ -251,11 +275,13 @@ def test_attr(self): assert self.ep.attr is None def test_sortable(self): - # EntryPoint objects are sortable, but result is undefined. + """ + EntryPoint objects are sortable, but result is undefined. + """ sorted( [ - EntryPoint('b', 'val', 'group'), - EntryPoint('a', 'val', 'group'), + EntryPoint(name='b', value='val', group='group'), + EntryPoint(name='a', value='val', group='group'), ] ) @@ -264,10 +290,47 @@ class FileSystem( fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder, unittest.TestCase ): def test_unicode_dir_on_sys_path(self): - # Ensure a Unicode subdirectory of a directory on sys.path - # does not crash. + """ + Ensure a Unicode subdirectory of a directory on sys.path + does not crash. + """ fixtures.build_files( {self.unicode_filename(): {}}, prefix=self.site_dir, ) list(distributions()) + + +class PackagesDistributionsPrebuiltTest(fixtures.ZipFixtures, unittest.TestCase): + def test_packages_distributions_example(self): + self._fixture_on_path('example-21.12-py3-none-any.whl') + assert packages_distributions()['example'] == ['example'] + + def test_packages_distributions_example2(self): + """ + Test packages_distributions on a wheel built + by trampolim. + """ + self._fixture_on_path('example2-1.0.0-py3-none-any.whl') + assert packages_distributions()['example2'] == ['example2'] + + +class PackagesDistributionsTest( + fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase +): + def test_packages_distributions_neither_toplevel_nor_files(self): + """ + Test a package built without 'top-level.txt' or a file list. + """ + fixtures.build_files( + { + 'trim_example-1.0.0.dist-info': { + 'METADATA': """ + Name: trim_example + Version: 1.0.0 + """, + } + }, + prefix=self.site_dir, + ) + packages_distributions() diff --git a/Lib/test/test_importlib/test_metadata_api.py b/Lib/test/test_importlib/test_metadata_api.py index 600a6e42f6..abf568fcca 100644 --- a/Lib/test/test_importlib/test_metadata_api.py +++ b/Lib/test/test_importlib/test_metadata_api.py @@ -21,7 +21,7 @@ @contextlib.contextmanager def suppress_known_deprecation(): with warnings.catch_warnings(record=True) as ctx: - warnings.simplefilter('default') + warnings.simplefilter('default', category=DeprecationWarning) yield ctx @@ -45,7 +45,6 @@ def test_retrieves_version_of_distinfo_pkg(self): assert isinstance(pkg_version, str) assert re.match(self.version_pattern, pkg_version) - # TODO: RUSTPYTHON def test_for_name_does_not_exist(self): with self.assertRaises(PackageNotFoundError): distribution('does-not-exist') @@ -90,13 +89,15 @@ def test_entry_points_distribution(self): self.assertIn(ep.dist.name, ('distinfo-pkg', 'egginfo-pkg')) self.assertEqual(ep.dist.version, "1.0.0") - def test_entry_points_unique_packages(self): - # Entry points should only be exposed for the first package - # on sys.path with a given name. + def test_entry_points_unique_packages_normalized(self): + """ + Entry points should only be exposed for the first package + on sys.path with a given name (even when normalized). + """ alt_site_dir = self.fixtures.enter_context(fixtures.tempdir()) self.fixtures.enter_context(self.add_sys_path(alt_site_dir)) alt_pkg = { - "distinfo_pkg-1.1.0.dist-info": { + "DistInfo_pkg-1.1.0.dist-info": { "METADATA": """ Name: distinfo-pkg Version: 1.1.0 @@ -114,7 +115,7 @@ def test_entry_points_unique_packages(self): for ep in entries ) # ns:sub doesn't exist in alt_pkg - assert 'ns:sub' not in entries + assert 'ns:sub' not in entries.names def test_entry_points_missing_name(self): with self.assertRaises(KeyError): @@ -124,9 +125,11 @@ def test_entry_points_missing_group(self): assert entry_points(group='missing') == () def test_entry_points_dict_construction(self): - # Prior versions of entry_points() returned simple lists and - # allowed casting those lists into maps by name using ``dict()``. - # Capture this now deprecated use-case. + """ + Prior versions of entry_points() returned simple lists and + allowed casting those lists into maps by name using ``dict()``. + Capture this now deprecated use-case. + """ with suppress_known_deprecation() as caught: eps = dict(entry_points(group='entries')) @@ -155,9 +158,11 @@ def test_entry_points_by_index(self): assert "Accessing entry points by index is deprecated" in str(expected) def test_entry_points_groups_getitem(self): - # Prior versions of entry_points() returned a dict. Ensure - # that callers using '.__getitem__()' are supported but warned to - # migrate. + """ + Prior versions of entry_points() returned a dict. Ensure + that callers using '.__getitem__()' are supported but warned to + migrate. + """ with suppress_known_deprecation(): entry_points()['entries'] == entry_points(group='entries') @@ -165,9 +170,11 @@ def test_entry_points_groups_getitem(self): entry_points()['missing'] def test_entry_points_groups_get(self): - # Prior versions of entry_points() returned a dict. Ensure - # that callers using '.get()' are supported but warned to - # migrate. + """ + Prior versions of entry_points() returned a dict. Ensure + that callers using '.get()' are supported but warned to + migrate. + """ with suppress_known_deprecation(): entry_points().get('missing', 'default') == 'default' entry_points().get('entries', 'default') == entry_points()['entries'] @@ -202,10 +209,8 @@ def _test_files(files): file.read_text() def test_file_hash_repr(self): - assertRegex = self.assertRegex - util = [p for p in files('distinfo-pkg') if p.name == 'mod.py'][0] - assertRegex(repr(util.hash), '') + self.assertRegex(repr(util.hash), '') def test_files_dist_info(self): self._test_files(files('distinfo-pkg')) @@ -318,7 +323,7 @@ def test_find_distributions_specified_path(self): assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists) def test_distribution_at_pathlib(self): - # Demonstrate how to load metadata direct from a directory. + """Demonstrate how to load metadata direct from a directory.""" dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' dist = Distribution.at(dist_info_path) assert dist.version == '1.0.0' diff --git a/Lib/test/test_importlib/test_namespace_pkgs.py b/Lib/test/test_importlib/test_namespace_pkgs.py index 295f97e3e1..cd08498545 100644 --- a/Lib/test/test_importlib/test_namespace_pkgs.py +++ b/Lib/test/test_importlib/test_namespace_pkgs.py @@ -1,5 +1,7 @@ import contextlib import importlib +import importlib.abc +import importlib.machinery import os import sys import tempfile @@ -63,12 +65,7 @@ def setUp(self): self.resolved_paths = [ os.path.join(self.root, path) for path in self.paths ] - self.ctx = namespace_tree_context(path=self.resolved_paths) - self.ctx.__enter__() - - def tearDown(self): - # TODO: will we ever want to pass exc_info to __exit__? - self.ctx.__exit__(None, None, None) + self.enterContext(namespace_tree_context(path=self.resolved_paths)) class SingleNamespacePackage(NamespacePackageTest): @@ -377,6 +374,11 @@ def test_path_indexable(self): expected_path = os.path.join(self.root, 'portion1', 'foo') self.assertEqual(foo.__path__[0], expected_path) + def test_loader_abc(self): + import foo + self.assertTrue(isinstance(foo.__loader__, importlib.abc.Loader)) + self.assertTrue(isinstance(foo.__loader__, importlib.machinery.NamespaceLoader)) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_importlib/test_open.py b/Lib/test/test_importlib/test_open.py index 1d0ba5a653..fc0136e865 100644 --- a/Lib/test/test_importlib/test_open.py +++ b/Lib/test/test_importlib/test_open.py @@ -2,42 +2,48 @@ from importlib import resources from . import data01 -from . import util +from .resources import util -class CommonBinaryTests(util.CommonResourceTests, unittest.TestCase): +class CommonBinaryTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): - with resources.open_binary(package, path): + target = resources.files(package).joinpath(path) + with target.open('rb'): pass -class CommonTextTests(util.CommonResourceTests, unittest.TestCase): +class CommonTextTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): - with resources.open_text(package, path): + target = resources.files(package).joinpath(path) + with target.open(): pass class OpenTests: def test_open_binary(self): - with resources.open_binary(self.data, 'binary.file') as fp: + target = resources.files(self.data) / 'binary.file' + with target.open('rb') as fp: result = fp.read() self.assertEqual(result, b'\x00\x01\x02\x03') def test_open_text_default_encoding(self): - with resources.open_text(self.data, 'utf-8.file') as fp: + target = resources.files(self.data) / 'utf-8.file' + with target.open() as fp: result = fp.read() self.assertEqual(result, 'Hello, UTF-8 world!\n') def test_open_text_given_encoding(self): - with resources.open_text(self.data, 'utf-16.file', 'utf-16', 'strict') as fp: + target = resources.files(self.data) / 'utf-16.file' + with target.open(encoding='utf-16', errors='strict') as fp: result = fp.read() self.assertEqual(result, 'Hello, UTF-16 world!\n') def test_open_text_with_errors(self): # Raises UnicodeError without the 'errors' argument. - with resources.open_text(self.data, 'utf-16.file', 'utf-8', 'strict') as fp: + target = resources.files(self.data) / 'utf-16.file' + with target.open(encoding='utf-8', errors='strict') as fp: self.assertRaises(UnicodeError, fp.read) - with resources.open_text(self.data, 'utf-16.file', 'utf-8', 'ignore') as fp: + with target.open(encoding='utf-8', errors='ignore') as fp: result = fp.read() self.assertEqual( result, @@ -47,14 +53,12 @@ def test_open_text_with_errors(self): ) def test_open_binary_FileNotFoundError(self): - self.assertRaises( - FileNotFoundError, resources.open_binary, self.data, 'does-not-exist' - ) + target = resources.files(self.data) / 'does-not-exist' + self.assertRaises(FileNotFoundError, target.open, 'rb') def test_open_text_FileNotFoundError(self): - self.assertRaises( - FileNotFoundError, resources.open_text, self.data, 'does-not-exist' - ) + target = resources.files(self.data) / 'does-not-exist' + self.assertRaises(FileNotFoundError, target.open) class OpenDiskTests(OpenTests, unittest.TestCase): diff --git a/Lib/test/test_importlib/test_path.py b/Lib/test/test_importlib/test_path.py index d6ed09a9e0..6fc41f301d 100644 --- a/Lib/test/test_importlib/test_path.py +++ b/Lib/test/test_importlib/test_path.py @@ -3,12 +3,12 @@ from importlib import resources from . import data01 -from . import util +from .resources import util -class CommonTests(util.CommonResourceTests, unittest.TestCase): +class CommonTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): - with resources.path(package, path): + with resources.as_file(resources.files(package).joinpath(path)): pass @@ -17,7 +17,8 @@ def test_reading(self): # Path should be readable. # Test also implicitly verifies the returned object is a pathlib.Path # instance. - with resources.path(self.data, 'utf-8.file') as path: + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) # pathlib.Path.read_text() was introduced in Python 3.5. with path.open('r', encoding='utf-8') as file: @@ -32,7 +33,8 @@ def test_natural_path(self): # Guarantee the internal implementation detail that # file-system-backed resources do not get the tempdir # treatment. - with resources.path(self.data, 'utf-8.file') as path: + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: assert 'data' in str(path) @@ -51,7 +53,8 @@ class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase): def test_remove_in_context_manager(self): # It is not an error if the file that was temporarily stashed on the # file system is removed inside the `with` stanza. - with resources.path(self.data, 'utf-8.file') as path: + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: path.unlink() diff --git a/Lib/test/test_importlib/test_read.py b/Lib/test/test_importlib/test_read.py index f6ec13af62..ebd7226777 100644 --- a/Lib/test/test_importlib/test_read.py +++ b/Lib/test/test_importlib/test_read.py @@ -2,36 +2,41 @@ from importlib import import_module, resources from . import data01 -from . import util +from .resources import util -class CommonBinaryTests(util.CommonResourceTests, unittest.TestCase): +class CommonBinaryTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): - resources.read_binary(package, path) + resources.files(package).joinpath(path).read_bytes() -class CommonTextTests(util.CommonResourceTests, unittest.TestCase): +class CommonTextTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): - resources.read_text(package, path) + resources.files(package).joinpath(path).read_text() class ReadTests: - def test_read_binary(self): - result = resources.read_binary(self.data, 'binary.file') + def test_read_bytes(self): + result = resources.files(self.data).joinpath('binary.file').read_bytes() self.assertEqual(result, b'\0\1\2\3') def test_read_text_default_encoding(self): - result = resources.read_text(self.data, 'utf-8.file') + result = resources.files(self.data).joinpath('utf-8.file').read_text() self.assertEqual(result, 'Hello, UTF-8 world!\n') def test_read_text_given_encoding(self): - result = resources.read_text(self.data, 'utf-16.file', encoding='utf-16') + result = ( + resources.files(self.data) + .joinpath('utf-16.file') + .read_text(encoding='utf-16') + ) self.assertEqual(result, 'Hello, UTF-16 world!\n') def test_read_text_with_errors(self): # Raises UnicodeError without the 'errors' argument. - self.assertRaises(UnicodeError, resources.read_text, self.data, 'utf-16.file') - result = resources.read_text(self.data, 'utf-16.file', errors='ignore') + target = resources.files(self.data) / 'utf-16.file' + self.assertRaises(UnicodeError, target.read_text, encoding='utf-8') + result = target.read_text(encoding='utf-8', errors='ignore') self.assertEqual( result, 'H\x00e\x00l\x00l\x00o\x00,\x00 ' @@ -47,13 +52,24 @@ class ReadDiskTests(ReadTests, unittest.TestCase): class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): def test_read_submodule_resource(self): submodule = import_module('ziptestdata.subdirectory') - result = resources.read_binary(submodule, 'binary.file') + result = resources.files(submodule).joinpath('binary.file').read_bytes() self.assertEqual(result, b'\0\1\2\3') def test_read_submodule_resource_by_name(self): - result = resources.read_binary('ziptestdata.subdirectory', 'binary.file') + result = ( + resources.files('ziptestdata.subdirectory') + .joinpath('binary.file') + .read_bytes() + ) self.assertEqual(result, b'\0\1\2\3') +class ReadNamespaceTests(ReadTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/test_resource.py b/Lib/test/test_importlib/test_resource.py index 003f7e95ad..834b8bd8a2 100644 --- a/Lib/test/test_importlib/test_resource.py +++ b/Lib/test/test_importlib/test_resource.py @@ -5,7 +5,7 @@ from . import data01 from . import zipdata01, zipdata02 -from . import util +from .resources import util from importlib import resources, import_module from test.support import import_helper from test.support.os_helper import unlink @@ -14,34 +14,18 @@ class ResourceTests: # Subclasses are expected to set the `data` attribute. - def test_is_resource_good_path(self): - self.assertTrue(resources.is_resource(self.data, 'binary.file')) + def test_is_file_exists(self): + target = resources.files(self.data) / 'binary.file' + self.assertTrue(target.is_file()) - def test_is_resource_missing(self): - self.assertFalse(resources.is_resource(self.data, 'not-a-file')) + def test_is_file_missing(self): + target = resources.files(self.data) / 'not-a-file' + self.assertFalse(target.is_file()) - def test_is_resource_subresource_directory(self): - # Directories are not resources. - self.assertFalse(resources.is_resource(self.data, 'subdirectory')) - - def test_contents(self): - contents = set(resources.contents(self.data)) - # There may be cruft in the directory listing of the data directory. - # It could have a __pycache__ directory, - # an artifact of the - # test suite importing these modules, which - # are not germane to this test, so just filter them out. - contents.discard('__pycache__') - self.assertEqual( - contents, - { - '__init__.py', - 'subdirectory', - 'utf-8.file', - 'binary.file', - 'utf-16.file', - }, - ) + def test_is_dir(self): + target = resources.files(self.data) / 'subdirectory' + self.assertFalse(target.is_file()) + self.assertTrue(target.is_dir()) class ResourceDiskTests(ResourceTests, unittest.TestCase): @@ -53,30 +37,34 @@ class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase): pass +def names(traversable): + return {item.name for item in traversable.iterdir()} + + class ResourceLoaderTests(unittest.TestCase): def test_resource_contents(self): package = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C'] ) - self.assertEqual(set(resources.contents(package)), {'A', 'B', 'C'}) + self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'}) - def test_resource_is_resource(self): + def test_is_file(self): package = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] ) - self.assertTrue(resources.is_resource(package, 'B')) + self.assertTrue(resources.files(package).joinpath('B').is_file()) - def test_resource_directory_is_not_resource(self): + def test_is_dir(self): package = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] ) - self.assertFalse(resources.is_resource(package, 'D')) + self.assertTrue(resources.files(package).joinpath('D').is_dir()) - def test_resource_missing_is_not_resource(self): + def test_resource_missing(self): package = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] ) - self.assertFalse(resources.is_resource(package, 'Z')) + self.assertFalse(resources.files(package).joinpath('Z').is_file()) class ResourceCornerCaseTests(unittest.TestCase): @@ -94,7 +82,7 @@ def test_package_has_no_reader_fallback(self): module.__file__ = '/path/which/shall/not/be/named' module.__spec__.loader = module.__loader__ module.__spec__.origin = module.__file__ - self.assertFalse(resources.is_resource(module, 'A')) + self.assertFalse(resources.files(module).joinpath('A').is_file()) class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): @@ -102,22 +90,24 @@ class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): def test_is_submodule_resource(self): submodule = import_module('ziptestdata.subdirectory') - self.assertTrue(resources.is_resource(submodule, 'binary.file')) + self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file()) def test_read_submodule_resource_by_name(self): self.assertTrue( - resources.is_resource('ziptestdata.subdirectory', 'binary.file') + resources.files('ziptestdata.subdirectory') + .joinpath('binary.file') + .is_file() ) def test_submodule_contents(self): submodule = import_module('ziptestdata.subdirectory') self.assertEqual( - set(resources.contents(submodule)), {'__init__.py', 'binary.file'} + names(resources.files(submodule)), {'__init__.py', 'binary.file'} ) def test_submodule_contents_by_name(self): self.assertEqual( - set(resources.contents('ziptestdata.subdirectory')), + names(resources.files('ziptestdata.subdirectory')), {'__init__.py', 'binary.file'}, ) @@ -131,10 +121,12 @@ def test_unrelated_contents(self): distinct resources. Ref python/importlib_resources#44. """ self.assertEqual( - set(resources.contents('ziptestdata.one')), {'__init__.py', 'resource1.txt'} + names(resources.files('ziptestdata.one')), + {'__init__.py', 'resource1.txt'}, ) self.assertEqual( - set(resources.contents('ziptestdata.two')), {'__init__.py', 'resource2.txt'} + names(resources.files('ziptestdata.two')), + {'__init__.py', 'resource2.txt'}, ) @@ -175,41 +167,43 @@ def tearDown(self): # If the test fails, this will probably fail too pass - def test_contents_does_not_keep_open(self): - c = resources.contents('ziptestdata') + def test_iterdir_does_not_keep_open(self): + c = [item.name for item in resources.files('ziptestdata').iterdir()] self.zip_path.unlink() del c - def test_is_resource_does_not_keep_open(self): - c = resources.is_resource('ziptestdata', 'binary.file') + def test_is_file_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('binary.file').is_file() self.zip_path.unlink() del c - def test_is_resource_failure_does_not_keep_open(self): - c = resources.is_resource('ziptestdata', 'not-present') + def test_is_file_failure_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('not-present').is_file() self.zip_path.unlink() del c @unittest.skip("Desired but not supported.") - def test_path_does_not_keep_open(self): - c = resources.path('ziptestdata', 'binary.file') + def test_as_file_does_not_keep_open(self): # pragma: no cover + c = resources.as_file(resources.files('ziptestdata') / 'binary.file') self.zip_path.unlink() del c def test_entered_path_does_not_keep_open(self): # This is what certifi does on import to make its bundle # available for the process duration. - c = resources.path('ziptestdata', 'binary.file').__enter__() + c = resources.as_file( + resources.files('ziptestdata') / 'binary.file' + ).__enter__() self.zip_path.unlink() del c def test_read_binary_does_not_keep_open(self): - c = resources.read_binary('ziptestdata', 'binary.file') + c = resources.files('ziptestdata').joinpath('binary.file').read_bytes() self.zip_path.unlink() del c def test_read_text_does_not_keep_open(self): - c = resources.read_text('ziptestdata', 'utf-8.file', encoding='utf-8') + c = resources.files('ziptestdata').joinpath('utf-8.file').read_text() self.zip_path.unlink() del c @@ -227,14 +221,18 @@ def tearDownClass(cls): def test_is_submodule_resource(self): self.assertTrue( - resources.is_resource(import_module('namespacedata01'), 'binary.file') + resources.files(import_module('namespacedata01')) + .joinpath('binary.file') + .is_file() ) def test_read_submodule_resource_by_name(self): - self.assertTrue(resources.is_resource('namespacedata01', 'binary.file')) + self.assertTrue( + resources.files('namespacedata01').joinpath('binary.file').is_file() + ) def test_submodule_contents(self): - contents = set(resources.contents(import_module('namespacedata01'))) + contents = names(resources.files(import_module('namespacedata01'))) try: contents.remove('__pycache__') except KeyError: @@ -242,7 +240,7 @@ def test_submodule_contents(self): self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) def test_submodule_contents_by_name(self): - contents = set(resources.contents('namespacedata01')) + contents = names(resources.files('namespacedata01')) try: contents.remove('__pycache__') except KeyError: diff --git a/Lib/test/test_importlib/test_spec.py b/Lib/test/test_importlib/test_spec.py index a61e63fc2d..57a98f36a7 100644 --- a/Lib/test/test_importlib/test_spec.py +++ b/Lib/test/test_importlib/test_spec.py @@ -1,4 +1,4 @@ -from . import util as test_util +from test.test_importlib import util as test_util init = test_util.import_importlib('importlib') machinery = test_util.import_importlib('importlib.machinery') diff --git a/Lib/test/test_importlib/test_threaded_import.py b/Lib/test/test_importlib/test_threaded_import.py index 766d84fda8..3c143ad37f 100644 --- a/Lib/test/test_importlib/test_threaded_import.py +++ b/Lib/test/test_importlib/test_threaded_import.py @@ -19,6 +19,8 @@ from test.support.os_helper import (TESTFN, unlink, rmtree) from test.support import script_helper, threading_helper +threading_helper.requires_working_threading(module=True) + def task(N, done, done_tasks, errors): try: # We don't use modulefinder but still import it in order to stress @@ -156,7 +158,7 @@ def test_parallel_meta_path(self): finally: sys.meta_path.remove(finder) - # TODO: RUSTPYTHON + # TODO: RUSTPYTHON; maybe hang? @unittest.expectedFailure def test_parallel_path_hooks(self): # Here the Finder instance is only used to check concurrent calls diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index 9e2c294373..6c791fc012 100644 --- a/Lib/test/test_importlib/test_util.py +++ b/Lib/test/test_importlib/test_util.py @@ -1,4 +1,5 @@ -from . import util +from test.test_importlib import util + abc = util.import_importlib('importlib.abc') init = util.import_importlib('importlib') machinery = util.import_importlib('importlib.machinery') @@ -876,7 +877,7 @@ def test_magic_number(self): # stakeholders such as OS package maintainers must be notified # in advance. Such exceptional releases will then require an # adjustment to this test case. - EXPECTED_MAGIC_NUMBER = 3439 + EXPECTED_MAGIC_NUMBER = 3495 actual = int.from_bytes(importlib.util.MAGIC_NUMBER[:2], 'little') msg = ( diff --git a/Lib/test/test_importlib/test_windows.py b/Lib/test/test_importlib/test_windows.py index a791c0b450..051193fae0 100644 --- a/Lib/test/test_importlib/test_windows.py +++ b/Lib/test/test_importlib/test_windows.py @@ -1,4 +1,4 @@ -from . import util as test_util +from test.test_importlib import util as test_util machinery = test_util.import_importlib('importlib.machinery') import os @@ -6,10 +6,9 @@ import sys import unittest import warnings -from test import support from test.support import import_helper from contextlib import contextmanager -from .util import temp_module +from test.test_importlib.util import temp_module import_helper.import_module('winreg', required_on=['win']) from winreg import ( @@ -191,3 +190,6 @@ def test_path_join(self): self.check_join("C:", "C:", "") self.check_join("//Server/Share\\", "//Server/Share/", "") self.check_join("//Server/Share\\", "//Server/Share", "") + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_importlib/test_zip.py b/Lib/test/test_importlib/test_zip.py index bf16a3b95e..276f6288c9 100644 --- a/Lib/test/test_importlib/test_zip.py +++ b/Lib/test/test_importlib/test_zip.py @@ -1,7 +1,7 @@ import sys import unittest -from contextlib import ExitStack +from . import fixtures from importlib.metadata import ( PackageNotFoundError, distribution, @@ -10,27 +10,11 @@ files, version, ) -from importlib import resources -from test.support import requires_zlib - - -@requires_zlib() -class TestZip(unittest.TestCase): - root = 'test.test_importlib.data' - - def _fixture_on_path(self, filename): - pkg_file = resources.files(self.root).joinpath(filename) - file = self.resources.enter_context(resources.as_file(pkg_file)) - assert file.name.startswith('example-'), file.name - sys.path.insert(0, str(file)) - self.resources.callback(sys.path.pop, 0) +class TestZip(fixtures.ZipFixtures, unittest.TestCase): def setUp(self): - # Find the path to the example-*.whl so we can add it to the front of - # sys.path, where we'll then try to find the metadata thereof. - self.resources = ExitStack() - self.addCleanup(self.resources.close) + super().setUp() self._fixture_on_path('example-21.12-py3-none-any.whl') def test_zip_version(self): @@ -63,13 +47,9 @@ def test_one_distribution(self): assert len(dists) == 1 -@requires_zlib() class TestEgg(TestZip): def setUp(self): - # Find the path to the example-*.egg so we can add it to the front of - # sys.path, where we'll then try to find the metadata thereof. - self.resources = ExitStack() - self.addCleanup(self.resources.close) + super().setUp() self._fixture_on_path('example-21.12-py3.6.egg') def test_files(self): diff --git a/Lib/test/test_importlib/update-zips.py b/Lib/test/test_importlib/update-zips.py index 9ef0224ca6..231334aa7e 100755 --- a/Lib/test/test_importlib/update-zips.py +++ b/Lib/test/test_importlib/update-zips.py @@ -42,7 +42,7 @@ def generate(suffix): def walk(datapath): for dirpath, dirnames, filenames in os.walk(datapath): - with contextlib.suppress(KeyError): + with contextlib.suppress(ValueError): dirnames.remove('__pycache__') for filename in filenames: res = pathlib.Path(dirpath) / filename diff --git a/Lib/test/test_importlib/util.py b/Lib/test/test_importlib/util.py index 854506518c..0b6dcc5eaf 100644 --- a/Lib/test/test_importlib/util.py +++ b/Lib/test/test_importlib/util.py @@ -1,17 +1,11 @@ -import abc import builtins import contextlib import errno import functools -import importlib from importlib import machinery, util, invalidate_caches -from importlib.abc import ResourceReader -import io import marshal import os import os.path -from pathlib import Path, PurePath -from test import support from test.support import import_helper from test.support import os_helper import unittest @@ -19,9 +13,6 @@ import tempfile import types -from . import data01 -from . import zipdata01 - BUILTINS = types.SimpleNamespace() BUILTINS.good_name = None @@ -307,7 +298,7 @@ def writes_bytecode_files(fxn): """Decorator to protect sys.dont_write_bytecode from mutation and to skip tests that require it to be set to False.""" if sys.dont_write_bytecode: - return unittest.skip(fxn) + return unittest.skip("relies on writing bytecode")(fxn) @functools.wraps(fxn) def wrapper(*args, **kwargs): original = sys.dont_write_bytecode @@ -417,166 +408,3 @@ def caseok_env_changed(self, *, should_exist): if any(x in self.importlib._bootstrap_external._os.environ for x in possibilities) != should_exist: self.skipTest('os.environ changes not reflected in _os.environ') - - -def create_package(file, path, is_package=True, contents=()): - class Reader(ResourceReader): - def get_resource_reader(self, package): - return self - - def open_resource(self, path): - self._path = path - if isinstance(file, Exception): - raise file - else: - return file - - def resource_path(self, path_): - self._path = path_ - if isinstance(path, Exception): - raise path - else: - return path - - def is_resource(self, path_): - self._path = path_ - if isinstance(path, Exception): - raise path - for entry in contents: - parts = entry.split('/') - if len(parts) == 1 and parts[0] == path_: - return True - return False - - def contents(self): - if isinstance(path, Exception): - raise path - # There's no yield from in baseball, er, Python 2. - for entry in contents: - yield entry - - name = 'testingpackage' - # Unfortunately importlib.util.module_from_spec() was not introduced until - # Python 3.5. - module = types.ModuleType(name) - loader = Reader() - spec = machinery.ModuleSpec( - name, loader, - origin='does-not-exist', - is_package=is_package) - module.__spec__ = spec - module.__loader__ = loader - return module - - -class CommonResourceTests(abc.ABC): - @abc.abstractmethod - def execute(self, package, path): - raise NotImplementedError - - def test_package_name(self): - # Passing in the package name should succeed. - self.execute(data01.__name__, 'utf-8.file') - - def test_package_object(self): - # Passing in the package itself should succeed. - self.execute(data01, 'utf-8.file') - - def test_string_path(self): - # Passing in a string for the path should succeed. - path = 'utf-8.file' - self.execute(data01, path) - - @unittest.skipIf(sys.version_info < (3, 6), 'requires os.PathLike support') - def test_pathlib_path(self): - # Passing in a pathlib.PurePath object for the path should succeed. - path = PurePath('utf-8.file') - self.execute(data01, path) - - def test_absolute_path(self): - # An absolute path is a ValueError. - path = Path(__file__) - full_path = path.parent/'utf-8.file' - with self.assertRaises(ValueError): - self.execute(data01, full_path) - - def test_relative_path(self): - # A relative path is a ValueError. - with self.assertRaises(ValueError): - self.execute(data01, '../data01/utf-8.file') - - def test_importing_module_as_side_effect(self): - # The anchor package can already be imported. - del sys.modules[data01.__name__] - self.execute(data01.__name__, 'utf-8.file') - - def test_non_package_by_name(self): - # The anchor package cannot be a module. - with self.assertRaises(TypeError): - self.execute(__name__, 'utf-8.file') - - def test_non_package_by_package(self): - # The anchor package cannot be a module. - with self.assertRaises(TypeError): - module = sys.modules['test.test_importlib.util'] - self.execute(module, 'utf-8.file') - - @unittest.skipIf(sys.version_info < (3,), 'No ResourceReader in Python 2') - def test_resource_opener(self): - bytes_data = io.BytesIO(b'Hello, world!') - package = create_package(file=bytes_data, path=FileNotFoundError()) - self.execute(package, 'utf-8.file') - self.assertEqual(package.__loader__._path, 'utf-8.file') - - @unittest.skipIf(sys.version_info < (3,), 'No ResourceReader in Python 2') - def test_resource_path(self): - bytes_data = io.BytesIO(b'Hello, world!') - path = __file__ - package = create_package(file=bytes_data, path=path) - self.execute(package, 'utf-8.file') - self.assertEqual(package.__loader__._path, 'utf-8.file') - - def test_useless_loader(self): - package = create_package(file=FileNotFoundError(), - path=FileNotFoundError()) - with self.assertRaises(FileNotFoundError): - self.execute(package, 'utf-8.file') - - -class ZipSetupBase: - ZIP_MODULE = None - - @classmethod - def setUpClass(cls): - data_path = Path(cls.ZIP_MODULE.__file__) - data_dir = data_path.parent - cls._zip_path = str(data_dir / 'ziptestdata.zip') - sys.path.append(cls._zip_path) - cls.data = importlib.import_module('ziptestdata') - - @classmethod - def tearDownClass(cls): - try: - sys.path.remove(cls._zip_path) - except ValueError: - pass - - try: - del sys.path_importer_cache[cls._zip_path] - del sys.modules[cls.data.__name__] - except KeyError: - pass - - try: - del cls.data - del cls._zip_path - except AttributeError: - pass - - def setUp(self): - modules = import_helper.modules_setup() - self.addCleanup(import_helper.modules_cleanup, *modules) - - -class ZipSetup(ZipSetupBase): - ZIP_MODULE = zipdata01 # type: ignore diff --git a/vm/Lib/python_builtins/__hello__.py b/vm/Lib/python_builtins/__hello__.py new file mode 120000 index 0000000000..f6cae8932f --- /dev/null +++ b/vm/Lib/python_builtins/__hello__.py @@ -0,0 +1 @@ +../../../Lib/__hello__.py \ No newline at end of file diff --git a/vm/Lib/python_builtins/__phello__ b/vm/Lib/python_builtins/__phello__ new file mode 120000 index 0000000000..113aeb1504 --- /dev/null +++ b/vm/Lib/python_builtins/__phello__ @@ -0,0 +1 @@ +../../../Lib/__phello__ \ No newline at end of file diff --git a/vm/src/frozen.rs b/vm/src/frozen.rs index 44bcf23c57..8c809aa372 100644 --- a/vm/src/frozen.rs +++ b/vm/src/frozen.rs @@ -11,11 +11,12 @@ pub fn core_frozen_inits() -> impl Iterator { }; } - ext_modules!( - iter, - source = "initialized = True; print(\"Hello world!\")\n", - module_name = "__hello__", - ); + // keep as example but use file one now + // ext_modules!( + // iter, + // source = "initialized = True; print(\"Hello world!\")\n", + // module_name = "__hello__", + // ); // Python modules that the vm calls into, but are not actually part of the stdlib. They could // in theory be implemented in Rust, but are easiest to do in Python for one reason or another.