././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0663052 importlib_metadata-8.6.1/0000755000175100001660000000000014743546146015010 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/.coveragerc0000644000175100001660000000067314743546124017133 0ustar00runnerdocker[run] omit = # leading `*/` for pytest-dev/pytest-cov#456 */.tox/* */pep517-build-env-* tests/* prepare/* */_itertools.py exercises.py */pip-run-* disable_warnings = couldnt-parse [report] show_missing = True exclude_also = # Exclude common false positives per # https://coverage.readthedocs.io/en/latest/excluding.html#advanced-exclusion # Ref jaraco/skeleton#97 and jaraco/skeleton#135 class .*\bProtocol\): if TYPE_CHECKING: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/.editorconfig0000644000175100001660000000036614743546124017466 0ustar00runnerdockerroot = true [*] charset = utf-8 indent_style = tab indent_size = 4 insert_final_newline = true end_of_line = lf [*.py] indent_style = space max_line_length = 88 [*.{yml,yaml}] indent_style = space indent_size = 2 [*.rst] indent_style = space ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0533051 importlib_metadata-8.6.1/.github/0000755000175100001660000000000014743546146016350 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/.github/FUNDING.yml0000644000175100001660000000004214743546124020155 0ustar00runnerdockertidelift: pypi/importlib-metadata ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/.github/dependabot.yml0000644000175100001660000000022414743546124021172 0ustar00runnerdockerversion: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "daily" allow: - dependency-type: "all" ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0533051 importlib_metadata-8.6.1/.github/workflows/0000755000175100001660000000000014743546146020405 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/.github/workflows/main.yml0000644000175100001660000000566714743546124022066 0ustar00runnerdockername: tests on: merge_group: push: branches-ignore: # temporary GH branches relating to merge queues (jaraco/skeleton#93) - gh-readonly-queue/** tags: # required if branches-ignore is supplied (jaraco/skeleton#103) - '**' pull_request: workflow_dispatch: permissions: contents: read env: # Environment variable to support color support (jaraco/skeleton#66) FORCE_COLOR: 1 # Suppress noisy pip warnings PIP_DISABLE_PIP_VERSION_CHECK: 'true' PIP_NO_PYTHON_VERSION_WARNING: 'true' PIP_NO_WARN_SCRIPT_LOCATION: 'true' # Ensure tests can sense settings about the environment TOX_OVERRIDE: >- testenv.pass_env+=GITHUB_*,FORCE_COLOR jobs: test: strategy: # https://blog.jaraco.com/efficient-use-of-ci-resources/ matrix: python: - "3.9" - "3.13" platform: - ubuntu-latest - macos-latest - windows-latest include: - python: "3.10" platform: ubuntu-latest - python: "3.11" platform: ubuntu-latest - python: "3.12" platform: ubuntu-latest - python: "3.14" platform: ubuntu-latest - python: pypy3.10 platform: ubuntu-latest runs-on: ${{ matrix.platform }} continue-on-error: ${{ matrix.python == '3.14' }} steps: - uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} allow-prereleases: true - name: Install tox run: python -m pip install tox - name: Run run: tox collateral: strategy: fail-fast: false matrix: job: - diffcov - docs runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v4 with: python-version: 3.x - name: Install tox run: python -m pip install tox - name: Eval ${{ matrix.job }} run: tox -e ${{ matrix.job }} check: # This job does nothing and is only used for the branch protection if: always() needs: - test - collateral runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} release: permissions: contents: write needs: - check if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: python-version: 3.x - name: Install tox run: python -m pip install tox - name: Run run: tox -e release env: TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/.gitignore0000644000175100001660000000023214743546124016771 0ustar00runnerdockerbuild /coverage.xml /diffcov.html htmlcov importlib_metadata.egg-info .mypy_cache /.coverage /.DS_Store artifacts .eggs .doctrees dist pip-wheel-metadata ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/.pre-commit-config.yaml0000644000175100001660000000022614743546124021265 0ustar00runnerdockerrepos: - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.7.1 hooks: - id: ruff args: [--fix, --unsafe-fixes] - id: ruff-format ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/.readthedocs.yaml0000644000175100001660000000051614743546124020235 0ustar00runnerdockerversion: 2 python: install: - path: . extra_requirements: - doc # required boilerplate readthedocs/readthedocs.org#10401 build: os: ubuntu-lts-latest tools: python: latest # post-checkout job to ensure the clone isn't shallow jaraco/skeleton#114 jobs: post_checkout: - git fetch --unshallow || true ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/LICENSE0000644000175100001660000002613614743546124016021 0ustar00runnerdocker Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/NEWS.rst0000644000175100001660000005642014743546124016321 0ustar00runnerdockerv8.6.1 ====== Bugfixes -------- - Fixed indentation logic to also honor blank lines. v8.6.0 ====== Features -------- - Add support for rendering metadata where some fields have newlines (python/cpython#119650). v8.5.0 ====== Features -------- - Deferred import of zipfile.Path (#502) - Deferred import of json (#503) - Rely on zipp overlay for zipfile.Path. v8.4.0 ====== Features -------- - Deferred import of inspect for import performance. (#499) v8.3.0 ====== Features -------- - Disallow passing of 'dist' to EntryPoints.select. v8.2.0 ====== Features -------- - Add SimplePath to importlib_metadata.__all__. (#494) v8.1.0 ====== Features -------- - Prioritize valid dists to invalid dists when retrieving by name. (#489) v8.0.0 ====== Deprecations and Removals ------------------------- - Message.__getitem__ now raises a KeyError on missing keys. (#371) - Removed deprecated support for Distribution subclasses not implementing abstract methods. v7.2.1 ====== Bugfixes -------- - When reading installed files from an egg, use ``relative_to(walk_up=True)`` to honor files installed outside of the installation root. (#455) v7.2.0 ====== Features -------- - Deferred select imports in for speedup (python/cpython#109829). - Updated fixtures for python/cpython#120801. v7.1.0 ====== Features -------- - Improve import time (python/cpython#114664). Bugfixes -------- - Make MetadataPathFinder.find_distributions a classmethod for consistency with CPython. Closes #484. (#484) - Allow ``MetadataPathFinder.invalidate_caches`` to be called as a classmethod. v7.0.2 ====== No significant changes. v7.0.1 ====== Bugfixes -------- - Corrected the interface for SimplePath to encompass the expectations of locate_file and PackagePath. - Fixed type annotations to allow strings. v7.0.0 ====== Deprecations and Removals ------------------------- - Removed EntryPoint access by numeric index (tuple behavior). v6.11.0 ======= Features -------- - Added ``Distribution.origin`` supplying the ``direct_url.json`` in a ``SimpleNamespace``. (#404) v6.10.0 ======= Features -------- - Added diagnose script. (#461) v6.9.0 ====== Features -------- - Added EntryPoints.__repr__ (#473) v6.8.0 ====== Features -------- - Require Python 3.8 or later. v6.7.0 ====== * #453: When inferring top-level names that are importable for distributions in ``package_distributions``, now symlinks to other directories are honored. v6.6.0 ====== * #449: Expanded type annotations. v6.5.1 ====== * python/cpython#103661: Removed excess error suppression in ``_read_files_egginfo_installed`` and fixed path handling on Windows. v6.5.0 ====== * #422: Removed ABC metaclass from ``Distribution`` and instead deprecated construction of ``Distribution`` objects without concrete methods. v6.4.1 ====== * Updated docs with tweaks from upstream CPython. v6.4.0 ====== * Consolidated some behaviors in tests around ``_path``. * Added type annotation for ``Distribution.read_text``. v6.3.0 ====== * #115: Support ``installed-files.txt`` for ``Distribution.files`` when present. v6.2.1 ====== * #442: Fixed issue introduced in v6.1.0 where non-importable names (metadata dirs) began appearing in ``packages_distributions``. v6.2.0 ====== * #384: ``PackageMetadata`` now stipulates an additional ``get`` method allowing for easy querying of metadata keys that may not be present. v6.1.0 ====== * #428: ``packages_distributions`` now honors packages and modules with Python modules that not ``.py`` sources (e.g. ``.pyc``, ``.so``). v6.0.1 ====== * #434: Expand protocol for ``PackageMetadata.get_all`` to match the upstream implementation of ``email.message.Message.get_all`` in python/typeshed#9620. v6.0.0 ====== * #419: Declared ``Distribution`` as an abstract class, enforcing definition of abstract methods in instantiated subclasses. It's no longer possible to instantiate a ``Distribution`` or any subclasses unless they define the abstract methods. Please comment in the issue if this change breaks any projects. This change will likely be rolled back if it causes significant disruption. v5.2.0 ====== * #371: Deprecated expectation that ``PackageMetadata.__getitem__`` will return ``None`` for missing keys. In the future, it will raise a ``KeyError``. v5.1.0 ====== * #415: Instrument ``SimplePath`` with generic support. v5.0.0 ====== * #97, #284, #300: Removed compatibility shims for deprecated entry point interfaces. v4.13.0 ======= * #396: Added compatibility for ``PathDistributions`` originating from Python 3.8 and 3.9. v4.12.0 ======= * py-93259: Now raise ``ValueError`` when ``None`` or an empty string are passed to ``Distribution.from_name`` (and other callers). v4.11.4 ======= * #379: In ``PathDistribution._name_from_stem``, avoid including parts of the extension in the result. * #381: In ``PathDistribution._normalized_name``, ensure names loaded from the stem of the filename are also normalized, ensuring duplicate entry points by packages varying only by non-normalized name are hidden. Note (#459): This change had a backward-incompatible effect for any installers that created metadata in the filesystem with dashes in the package names (not replaced by underscores). v4.11.3 ======= * #372: Removed cast of path items in FastPath, not needed. v4.11.2 ======= * #369: Fixed bug where ``EntryPoint.extras`` was returning match objects and not the extras strings. v4.11.1 ======= * #367: In ``Distribution.requires`` for egg-info, if ``requires.txt`` is empty, return an empty list. v4.11.0 ======= * bpo-46246: Added ``__slots__`` to ``EntryPoints``. v4.10.2 ======= * #365 and bpo-46546: Avoid leaking ``method_name`` in ``DeprecatedList``. v4.10.1 ======= v2.1.3 ======= * #361: Avoid potential REDoS in ``EntryPoint.pattern``. v4.10.0 ======= * #354: Removed ``Distribution._local`` factory. This functionality was created as a demonstration of the possible implementation. Now, the `pep517 `_ package provides this functionality directly through `pep517.meta.load `_. v4.9.0 ====== * Require Python 3.7 or later. v4.8.3 ====== * #357: Fixed requirement generation from egg-info when a URL requirement is given. v4.8.2 ====== v2.1.2 ====== * #353: Fixed discovery of distributions when path is empty. v4.8.1 ====== * #348: Restored support for ``EntryPoint`` access by item, deprecating support in the process. Users are advised to use direct member access instead of item-based access:: - ep[0] -> ep.name - ep[1] -> ep.value - ep[2] -> ep.group - ep[:] -> ep.name, ep.value, ep.group v4.8.0 ====== * #337: Rewrote ``EntryPoint`` as a simple class, still immutable and still with the attributes, but without any expectation for ``namedtuple`` functionality such as ``_asdict``. v4.7.1 ====== * #344: Fixed regression in ``packages_distributions`` when neither top-level.txt nor a files manifest is present. v4.7.0 ====== * #330: In ``packages_distributions``, now infer top-level names from ``.files()`` when a ``top-level.txt`` (Setuptools-specific metadata) is not present. v4.6.4 ====== * #334: Correct ``SimplePath`` protocol to match ``pathlib`` protocol for ``__truediv__``. v4.6.3 ====== * Moved workaround for #327 to ``_compat`` module. v4.6.2 ====== * bpo-44784: Avoid errors in test suite when DeprecationWarnings are treated as errors. v4.6.1 ====== * #327: Deprecation warnings now honor call stack variance on PyPy. v4.6.0 ====== * #326: Performance tests now rely on `pytest-perf `_. To disable these tests, which require network access and a git checkout, pass ``-p no:perf`` to pytest. v4.5.0 ====== * #319: Remove ``SelectableGroups`` deprecation exception for flake8. v4.4.0 ====== * #300: Restore compatibility in the result from ``Distribution.entry_points`` (``EntryPoints``) to honor expectations in older implementations and issuing deprecation warnings for these cases: - ``EntryPoints`` objects are once again mutable, allowing for ``sort()`` and other list-based mutation operations. Avoid deprecation warnings by casting to a mutable sequence (e.g. ``list(dist.entry_points).sort()``). - ``EntryPoints`` results once again allow for access by index. To avoid deprecation warnings, cast the result to a Sequence first (e.g. ``tuple(dist.entry_points)[0]``). v4.3.1 ====== * #320: Fix issue where normalized name for eggs was incorrectly solicited, leading to metadata being unavailable for eggs. v4.3.0 ====== * #317: De-duplication of distributions no longer requires loading the full metadata for ``PathDistribution`` objects, entry point loading performance by ~10x. v4.2.0 ====== * Prefer f-strings to ``.format`` calls. v4.1.0 ====== * #312: Add support for metadata 2.2 (``Dynamic`` field). * #315: Add ``SimplePath`` protocol for interface clarity in ``PathDistribution``. v4.0.1 ====== * #306: Clearer guidance about compatibility in readme. v4.0.0 ====== * #304: ``PackageMetadata`` as returned by ``metadata()`` and ``Distribution.metadata()`` now provides normalized metadata honoring PEP 566: - If a long description is provided in the payload of the RFC 822 value, it can be retrieved as the ``Description`` field. - Any multi-line values in the metadata will be returned as such. - For any multi-line values, line continuation characters are removed. This backward-incompatible change means that any projects relying on the RFC 822 line continuation characters being present must be tolerant to them having been removed. - Add a ``json`` property that provides the metadata converted to a JSON-compatible form per PEP 566. v3.10.1 ======= * Minor tweaks from CPython. v3.10.0 ======= * #295: Internal refactoring to unify section parsing logic. v3.9.1 ====== * #296: Exclude 'prepare' package. * #297: Fix ValueError when entry points contains comments. v3.9.0 ====== * Use of Mapping (dict) interfaces on ``SelectableGroups`` is now flagged as deprecated. Instead, users are advised to use the select interface for future compatibility. Suppress the warning with this filter: ``ignore:SelectableGroups dict interface``. Or with this invocation in the Python environment: ``warnings.filterwarnings('ignore', 'SelectableGroups dict interface')``. Preferably, switch to the ``select`` interface introduced in 3.7.0. See the `entry points documentation `_ and changelog for the 3.6 release below for more detail. For some use-cases, especially those that rely on ``importlib.metadata`` in Python 3.8 and 3.9 or those relying on older ``importlib_metadata`` (especially on Python 3.5 and earlier), `backports.entry_points_selectable `_ was created to ease the transition. Please have a look at that project if simply relying on importlib_metadata 3.6+ is not straightforward. Background in #298. * #283: Entry point parsing no longer relies on ConfigParser and instead uses a custom, one-pass parser to load the config, resulting in a ~20% performance improvement when loading entry points. v3.8.2 ====== * #293: Re-enabled lazy evaluation of path lookup through a FreezableDefaultDict. v3.8.1 ====== * #293: Workaround for error in distribution search. v3.8.0 ====== * #290: Add mtime-based caching for ``FastPath`` and its lookups, dramatically increasing performance for repeated distribution lookups. v3.7.3 ====== * Docs enhancements and cleanup following review in `GH-24782 `_. v3.7.2 ====== * Cleaned up cruft in entry_points docstring. v3.7.1 ====== * Internal refactoring to facilitate ``entry_points() -> dict`` deprecation. v3.7.0 ====== * #131: Added ``packages_distributions`` to conveniently resolve a top-level package or module to its distribution(s). v3.6.0 ====== * #284: Introduces new ``EntryPoints`` object, a tuple of ``EntryPoint`` objects but with convenience properties for selecting and inspecting the results: - ``.select()`` accepts ``group`` or ``name`` keyword parameters and returns a new ``EntryPoints`` tuple with only those that match the selection. - ``.groups`` property presents all of the group names. - ``.names`` property presents the names of the entry points. - Item access (e.g. ``eps[name]``) retrieves a single entry point by name. ``entry_points`` now accepts "selection parameters", same as ``EntryPoint.select()``. ``entry_points()`` now provides a future-compatible ``SelectableGroups`` object that supplies the above interface (except item access) but remains a dict for compatibility. In the future, ``entry_points()`` will return an ``EntryPoints`` object for all entry points. If passing selection parameters to ``entry_points``, the future behavior is invoked and an ``EntryPoints`` is the result. * #284: Construction of entry points using ``dict([EntryPoint, ...])`` is now deprecated and raises an appropriate DeprecationWarning and will be removed in a future version. * #300: ``Distribution.entry_points`` now presents as an ``EntryPoints`` object and access by index is no longer allowed. If access by index is required, cast to a sequence first. v3.5.0 ====== * #280: ``entry_points`` now only returns entry points for unique distributions (by name). v3.4.0 ====== * #10: Project now declares itself as being typed. * #272: Additional performance enhancements to distribution discovery. * #111: For PyPA projects, add test ensuring that ``MetadataPathFinder._search_paths`` honors the needed interface. Method is still private. v3.3.0 ====== * #265: ``EntryPoint`` objects now expose a ``.dist`` object referencing the ``Distribution`` when constructed from a Distribution. v3.2.0 ====== * The object returned by ``metadata()`` now has a formally-defined protocol called ``PackageMetadata`` with declared support for the ``.get_all()`` method. Fixes #126. v3.1.1 ====== v2.1.1 ====== * #261: Restored compatibility for package discovery for metadata without version in the name and for legacy eggs. v3.1.0 ====== * Merge with 2.1.0. v2.1.0 ====== * #253: When querying for package metadata, the lookup now honors `package normalization rules `_. v3.0.0 ====== * Require Python 3.6 or later. v2.0.0 ====== * ``importlib_metadata`` no longer presents a ``__version__`` attribute. Consumers wishing to resolve the version of the package should query it directly with ``importlib_metadata.version('importlib-metadata')``. Closes #71. v1.7.0 ====== * ``PathNotFoundError`` now has a custom ``__str__`` mentioning "package metadata" being missing to help guide users to the cause when the package is installed but no metadata is present. Closes #124. v1.6.1 ====== * Added ``Distribution._local()`` as a provisional demonstration of how to load metadata for a local package. Implicitly requires that `pep517 `_ is installed. Ref #42. * Ensure inputs to FastPath are Unicode. Closes #121. * Tests now rely on ``importlib.resources.files`` (and backport) instead of the older ``path`` function. * Support any iterable from ``find_distributions``. Closes #122. v1.6.0 ====== * Added ``module`` and ``attr`` attributes to ``EntryPoint`` v1.5.2 ====== * Fix redundant entries from ``FastPath.zip_children``. Closes #117. v1.5.1 ====== * Improve reliability and consistency of compatibility imports for contextlib and pathlib when running tests. Closes #116. v1.5.0 ====== * Additional performance optimizations in FastPath now saves an additional 20% on a typical call. * Correct for issue where PyOxidizer finder has no ``__module__`` attribute. Closes #110. v1.4.0 ====== * Through careful optimization, ``distribution()`` is 3-4x faster. Thanks to Antony Lee for the contribution. Closes #95. * When searching through ``sys.path``, if any error occurs attempting to list a path entry, that entry is skipped, making the system much more lenient to errors. Closes #94. v1.3.0 ====== * Improve custom finders documentation. Closes #105. v1.2.0 ====== * Once again, drop support for Python 3.4. Ref #104. v1.1.3 ====== * Restored support for Python 3.4 due to improper version compatibility declarations in the v1.1.0 and v1.1.1 releases. Closes #104. v1.1.2 ====== * Repaired project metadata to correctly declare the ``python_requires`` directive. Closes #103. v1.1.1 ====== * Fixed ``repr(EntryPoint)`` on PyPy 3 also. Closes #102. v1.1.0 ====== * Dropped support for Python 3.4. * EntryPoints are now pickleable. Closes #96. * Fixed ``repr(EntryPoint)`` on PyPy 2. Closes #97. v1.0.0 ====== * Project adopts semver for versioning. * Removed compatibility shim introduced in 0.23. * For better compatibility with the stdlib implementation and to avoid the same distributions being discovered by the stdlib and backport implementations, the backport now disables the stdlib DistributionFinder during initialization (import time). Closes #91 and closes #100. 0.23 ==== * Added a compatibility shim to prevent failures on beta releases of Python before the signature changed to accept the "context" parameter on find_distributions. This workaround will have a limited lifespan, not to extend beyond release of Python 3.8 final. 0.22 ==== * Renamed ``package`` parameter to ``distribution_name`` as `recommended `_ in the following functions: ``distribution``, ``metadata``, ``version``, ``files``, and ``requires``. This backward-incompatible change is expected to have little impact as these functions are assumed to be primarily used with positional parameters. 0.21 ==== * ``importlib.metadata`` now exposes the ``DistributionFinder`` metaclass and references it in the docs for extending the search algorithm. * Add ``Distribution.at`` for constructing a Distribution object from a known metadata directory on the file system. Closes #80. * Distribution finders now receive a context object that supplies ``.path`` and ``.name`` properties. This change introduces a fundamental backward incompatibility for any projects implementing a ``find_distributions`` method on a ``MetaPathFinder``. This new layer of abstraction allows this context to be supplied directly or constructed on demand and opens the opportunity for a ``find_distributions`` method to solicit additional context from the caller. Closes #85. 0.20 ==== * Clarify in the docs that calls to ``.files`` could return ``None`` when the metadata is not present. Closes #69. * Return all requirements and not just the first for dist-info packages. Closes #67. 0.19 ==== * Restrain over-eager egg metadata resolution. * Add support for entry points with colons in the name. Closes #75. 0.18 ==== * Parse entry points case sensitively. Closes #68 * Add a version constraint on the backport configparser package. Closes #66 0.17 ==== * Fix a permission problem in the tests on Windows. 0.16 ==== * Don't crash if there exists an EGG-INFO directory on sys.path. 0.15 ==== * Fix documentation. 0.14 ==== * Removed ``local_distribution`` function from the API. **This backward-incompatible change removes this behavior summarily**. Projects should remove their reliance on this behavior. A replacement behavior is under review in the `pep517 project `_. Closes #42. 0.13 ==== * Update docstrings to match PEP 8. Closes #63. * Merged modules into one module. Closes #62. 0.12 ==== * Add support for eggs. !65; Closes #19. 0.11 ==== * Support generic zip files (not just wheels). Closes #59 * Support zip files with multiple distributions in them. Closes #60 * Fully expose the public API in ``importlib_metadata.__all__``. 0.10 ==== * The ``Distribution`` ABC is now officially part of the public API. Closes #37. * Fixed support for older single file egg-info formats. Closes #43. * Fixed a testing bug when ``$CWD`` has spaces in the path. Closes #50. * Add Python 3.8 to the ``tox`` testing matrix. 0.9 === * Fixed issue where entry points without an attribute would raise an Exception. Closes #40. * Removed unused ``name`` parameter from ``entry_points()``. Closes #44. * ``DistributionFinder`` classes must now be instantiated before being placed on ``sys.meta_path``. 0.8 === * This library can now discover/enumerate all installed packages. **This backward-incompatible change alters the protocol finders must implement to support distribution package discovery.** Closes #24. * The signature of ``find_distributions()`` on custom installer finders should now accept two parameters, ``name`` and ``path`` and these parameters must supply defaults. * The ``entry_points()`` method no longer accepts a package name but instead returns all entry points in a dictionary keyed by the ``EntryPoint.group``. The ``resolve`` method has been removed. Instead, call ``EntryPoint.load()``, which has the same semantics as ``pkg_resources`` and ``entrypoints``. **This is a backward incompatible change.** * Metadata is now always returned as Unicode text regardless of Python version. Closes #29. * This library can now discover metadata for a 'local' package (found in the current-working directory). Closes #27. * Added ``files()`` function for resolving files from a distribution. * Added a new ``requires()`` function, which returns the requirements for a package suitable for parsing by ``packaging.requirements.Requirement``. Closes #18. * The top-level ``read_text()`` function has been removed. Use ``PackagePath.read_text()`` on instances returned by the ``files()`` function. **This is a backward incompatible change.** * Release dates are now automatically injected into the changelog based on SCM tags. 0.7 === * Fixed issue where packages with dashes in their names would not be discovered. Closes #21. * Distribution lookup is now case-insensitive. Closes #20. * Wheel distributions can no longer be discovered by their module name. Like Path distributions, they must be indicated by their distribution package name. 0.6 === * Removed ``importlib_metadata.distribution`` function. Now the public interface is primarily the utility functions exposed in ``importlib_metadata.__all__``. Closes #14. * Added two new utility functions ``read_text`` and ``metadata``. 0.5 === * Updated README and removed details about Distribution class, now considered private. Closes #15. * Added test suite support for Python 3.4+. * Fixed SyntaxErrors on Python 3.4 and 3.5. !12 * Fixed errors on Windows joining Path elements. !15 0.4 === * Housekeeping. 0.3 === * Added usage documentation. Closes #8 * Add support for getting metadata from wheels on ``sys.path``. Closes #9 0.2 === * Added ``importlib_metadata.entry_points()``. Closes #1 * Added ``importlib_metadata.resolve()``. Closes #12 * Add support for Python 2.7. Closes #4 0.1 === * Initial release. .. Local Variables: mode: change-log-mode indent-tabs-mode: nil sentence-end-double-space: t fill-column: 78 coding: utf-8 End: ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0663052 importlib_metadata-8.6.1/PKG-INFO0000644000175100001660000001120214743546146016101 0ustar00runnerdockerMetadata-Version: 2.2 Name: importlib_metadata Version: 8.6.1 Summary: Read metadata from Python packages Author-email: "Jason R. Coombs" Project-URL: Source, https://github.com/python/importlib_metadata Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: zipp>=3.20 Requires-Dist: typing-extensions>=3.6.4; python_version < "3.8" Provides-Extra: test Requires-Dist: pytest!=8.1.*,>=6; extra == "test" Requires-Dist: importlib_resources>=1.3; python_version < "3.9" and extra == "test" Requires-Dist: packaging; extra == "test" Requires-Dist: pyfakefs; extra == "test" Requires-Dist: flufl.flake8; extra == "test" Requires-Dist: pytest-perf>=0.9.2; extra == "test" Requires-Dist: jaraco.test>=5.4; extra == "test" Provides-Extra: doc Requires-Dist: sphinx>=3.5; extra == "doc" Requires-Dist: jaraco.packaging>=9.3; extra == "doc" Requires-Dist: rst.linker>=1.9; extra == "doc" Requires-Dist: furo; extra == "doc" Requires-Dist: sphinx-lint; extra == "doc" Requires-Dist: jaraco.tidelift>=1.4; extra == "doc" Provides-Extra: perf Requires-Dist: ipython; extra == "perf" Provides-Extra: check Requires-Dist: pytest-checkdocs>=2.4; extra == "check" Requires-Dist: pytest-ruff>=0.2.1; sys_platform != "cygwin" and extra == "check" Provides-Extra: cover Requires-Dist: pytest-cov; extra == "cover" Provides-Extra: enabler Requires-Dist: pytest-enabler>=2.2; extra == "enabler" Provides-Extra: type Requires-Dist: pytest-mypy; extra == "type" .. image:: https://img.shields.io/pypi/v/importlib_metadata.svg :target: https://pypi.org/project/importlib_metadata .. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg .. image:: https://github.com/python/importlib_metadata/actions/workflows/main.yml/badge.svg :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 :alt: tests .. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json :target: https://github.com/astral-sh/ruff :alt: Ruff .. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest .. image:: https://img.shields.io/badge/skeleton-2024-informational :target: https://blog.jaraco.com/skeleton .. image:: https://tidelift.com/badges/package/pypi/importlib-metadata :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme Library to access the metadata for a Python package. This package supplies third-party access to the functionality of `importlib.metadata `_ including improvements added to subsequent Python versions. Compatibility ============= New features are introduced in this third-party library and later merged into CPython. The following table indicates which versions of this library were contributed to different versions in the standard library: .. list-table:: :header-rows: 1 * - importlib_metadata - stdlib * - 7.0 - 3.13 * - 6.5 - 3.12 * - 4.13 - 3.11 * - 4.6 - 3.10 * - 1.4 - 3.8 Usage ===== See the `online documentation `_ for usage details. `Finder authors `_ can also add support for custom package installers. See the above documentation for details. Caveats ======= This project primarily supports third-party packages installed by PyPA tools (or other conforming packages). It does not support: - Packages in the stdlib. - Packages installed without metadata. Project details =============== * Project home: https://github.com/python/importlib_metadata * Report bugs at: https://github.com/python/importlib_metadata/issues * Code hosting: https://github.com/python/importlib_metadata * Documentation: https://importlib-metadata.readthedocs.io/ For Enterprise ============== Available as part of the Tidelift Subscription. This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. `Learn more `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/README.rst0000644000175100001660000000563014743546124016477 0ustar00runnerdocker.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg :target: https://pypi.org/project/importlib_metadata .. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg .. image:: https://github.com/python/importlib_metadata/actions/workflows/main.yml/badge.svg :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 :alt: tests .. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json :target: https://github.com/astral-sh/ruff :alt: Ruff .. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest .. image:: https://img.shields.io/badge/skeleton-2024-informational :target: https://blog.jaraco.com/skeleton .. image:: https://tidelift.com/badges/package/pypi/importlib-metadata :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme Library to access the metadata for a Python package. This package supplies third-party access to the functionality of `importlib.metadata `_ including improvements added to subsequent Python versions. Compatibility ============= New features are introduced in this third-party library and later merged into CPython. The following table indicates which versions of this library were contributed to different versions in the standard library: .. list-table:: :header-rows: 1 * - importlib_metadata - stdlib * - 7.0 - 3.13 * - 6.5 - 3.12 * - 4.13 - 3.11 * - 4.6 - 3.10 * - 1.4 - 3.8 Usage ===== See the `online documentation `_ for usage details. `Finder authors `_ can also add support for custom package installers. See the above documentation for details. Caveats ======= This project primarily supports third-party packages installed by PyPA tools (or other conforming packages). It does not support: - Packages in the stdlib. - Packages installed without metadata. Project details =============== * Project home: https://github.com/python/importlib_metadata * Report bugs at: https://github.com/python/importlib_metadata/issues * Code hosting: https://github.com/python/importlib_metadata * Documentation: https://importlib-metadata.readthedocs.io/ For Enterprise ============== Available as part of the Tidelift Subscription. This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. `Learn more `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/SECURITY.md0000644000175100001660000000026414743546124016577 0ustar00runnerdocker# Security Contact To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/conftest.py0000644000175100001660000000142514743546124017205 0ustar00runnerdockerimport sys collect_ignore = [ # this module fails mypy tests because 'setup.py' matches './setup.py' 'tests/data/sources/example/setup.py', ] def pytest_configure(): remove_importlib_metadata() def remove_importlib_metadata(): """ Ensure importlib_metadata is not imported yet. Because pytest or other modules might import importlib_metadata, the coverage reports are broken (#322). Work around the issue by undoing the changes made by a previous import of importlib_metadata (if any). """ sys.meta_path[:] = [ item for item in sys.meta_path if item.__class__.__name__ != 'MetadataPathFinder' ] for mod in list(sys.modules): if mod.startswith('importlib_metadata'): del sys.modules[mod] ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0543053 importlib_metadata-8.6.1/docs/0000755000175100001660000000000014743546146015740 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/docs/__init__.py0000644000175100001660000000000014743546124020033 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/docs/api.rst0000644000175100001660000000044114743546124017236 0ustar00runnerdocker============= API Reference ============= ``importlib_metadata`` module ----------------------------- .. automodule:: importlib_metadata :members: :undoc-members: :show-inheritance: .. automodule:: importlib_metadata._meta :members: :undoc-members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/docs/conf.py0000644000175100001660000000457414743546124017245 0ustar00runnerdockerfrom __future__ import annotations extensions = [ 'sphinx.ext.autodoc', 'jaraco.packaging.sphinx', ] master_doc = "index" html_theme = "furo" # Link dates and other references in the changelog extensions += ['rst.linker'] link_files = { '../NEWS.rst': dict( using=dict(GH='https://github.com'), replace=[ dict( pattern=r'(Issue #|\B#)(?P\d+)', url='{package_url}/issues/{issue}', ), dict( pattern=r'(?m:^((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n)', with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', ), dict( pattern=r'PEP[- ](?P\d+)', url='https://peps.python.org/pep-{pep_number:0>4}/', ), dict( pattern=r'(python/cpython#|Python #)(?P\d+)', url='https://github.com/python/cpython/issues/{python}', ), dict( pattern=r'bpo-(?P\d+)', url='http://bugs.python.org/issue{bpo}', ), ], ) } # Be strict about any broken references nitpicky = True nitpick_ignore: list[tuple[str, str]] = [] # Include Python intersphinx mapping to prevent failures # jaraco/skeleton#51 extensions += ['sphinx.ext.intersphinx'] intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), } # Preserve authored syntax for defaults autodoc_preserve_defaults = True # Add support for linking usernames, PyPI projects, Wikipedia pages github_url = 'https://github.com/' extlinks = { 'user': (f'{github_url}%s', '@%s'), 'pypi': ('https://pypi.org/project/%s', '%s'), 'wiki': ('https://wikipedia.org/wiki/%s', '%s'), } extensions += ['sphinx.ext.extlinks'] # local extensions += ['jaraco.tidelift'] intersphinx_mapping.update( importlib_resources=( 'https://importlib-resources.readthedocs.io/en/latest/', None, ), ) intersphinx_mapping.update( packaging=( 'https://packaging.python.org/en/latest/', None, ), ) nitpick_ignore += [ # Workaround for #316 ('py:class', 'importlib_metadata.EntryPoints'), ('py:class', 'importlib_metadata.PackagePath'), ('py:class', 'importlib_metadata.SelectableGroups'), ('py:class', 'importlib_metadata._meta._T'), # Workaround for #435 ('py:class', '_T'), ] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/docs/history.rst0000644000175100001660000000011614743546124020165 0ustar00runnerdocker:tocdepth: 2 .. _changes: History ******* .. include:: ../NEWS (links).rst ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/docs/index.rst0000644000175100001660000000202414743546124017573 0ustar00runnerdockerWelcome to |project| documentation! =================================== .. sidebar-links:: :home: :pypi: ``importlib_metadata`` supplies a backport of :mod:`importlib.metadata`, enabling early access to features of future Python versions and making functionality available for older Python versions. Users are encouraged to use the Python standard library where suitable and fall back to this library for future compatibility. For general usage guidance, start with :mod:`importlib.metadata` but substitute ``importlib_metadata`` for ``importlib.metadata``. .. toctree:: :maxdepth: 1 api migration history .. tidelift-referral-banner:: Project details =============== * Project home: https://github.com/python/importlib_metadata * Report bugs at: https://github.com/python/importlib_metadata/issues * Code hosting: https://github.com/python/importlib_metadata * Documentation: https://importlib-metadata.readthedocs.io/ Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/docs/migration.rst0000644000175100001660000000570014743546124020461 0ustar00runnerdocker.. _migration: ================= Migration guide ================= The following guide will help you migrate common ``pkg_resources`` APIs to ``importlib_metadata``. ``importlib_metadata`` aims to replace the following ``pkg_resources`` APIs: * ``pkg_resources.iter_entry_points()`` * ``pkg_resources.require()`` * convenience functions * ``pkg_resources.find_distributions()`` * ``pkg_resources.get_distribution()`` Other functionality from ``pkg_resources`` is replaced by other packages such as `importlib_resources `_ and `packaging `_. pkg_resources.iter_entry_points() ================================= ``importlib_metadata`` provides :ref:`entry-points`. Compatibility note: entry points provided by importlib_metadata do not have the following implicit behaviors found in those from ``pkg_resources``: - Each EntryPoint is not automatically validated to match. To ensure each one is validated, invoke any property on the object (e.g. ``ep.name``). - When invoking ``EntryPoint.load()``, no checks are performed to ensure the declared extras are installed. If this behavior is desired/required, it is left to the user to perform the check and install any dependencies. See `importlib_metadata#368 `_ for more details. pkg_resources.require() ======================= ``importlib_metadata`` does not provide support for dynamically discovering or requiring distributions nor does it provide any support for managing the "working set". Furthermore, ``importlib_metadata`` assumes that only one version of a given distribution is discoverable at any time (no support for multi-version installs). Any projects that require the above behavior needs to provide that behavior independently. ``importlib_metadata`` does aim to resolve metadata concerns late such that any dynamic changes to package availability should be reflected immediately. Convenience functions ===================== In addition to the support for direct access to ``Distribution`` objects (below), ``importlib_metadata`` presents some top-level functions for easy access to the most common metadata: - :ref:`metadata` queries the metadata fields from the distribution. - :ref:`version` provides quick access to the distribution version. - :ref:`requirements` presents the requirements of the distribution. - :ref:`files` provides file-like access to the data blobs backing the metadata. pkg_resources.find_distributions() ================================== ``importlib_metadata`` provides functionality similar to ``find_distributions()``. Both ``distributions(...)`` and ``Distribution.discover(...)`` return an iterable of :ref:`distributions` matching the indicated parameters. pkg_resources.get_distribution() ================================= Similar to ``distributions``, the ``distribution()`` function provides access to a single distribution by name. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/exercises.py0000644000175100001660000000166514743546124017360 0ustar00runnerdockerfrom pytest_perf.deco import extras @extras('perf') def discovery_perf(): "discovery" import importlib_metadata # end warmup importlib_metadata.distribution('ipython') def entry_points_perf(): "entry_points()" import importlib_metadata # end warmup importlib_metadata.entry_points() @extras('perf') def cached_distribution_perf(): "cached distribution" import importlib_metadata importlib_metadata.distribution('ipython') # end warmup importlib_metadata.distribution('ipython') @extras('perf') def uncached_distribution_perf(): "uncached distribution" import importlib import importlib_metadata # end warmup importlib.invalidate_caches() importlib_metadata.distribution('ipython') def entrypoint_regexp_perf(): import re import importlib_metadata input = '0' + ' ' * 2**10 + '0' # end warmup re.match(importlib_metadata.EntryPoint.pattern, input) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0563052 importlib_metadata-8.6.1/importlib_metadata/0000755000175100001660000000000014743546146020651 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/__init__.py0000644000175100001660000010601514743546124022761 0ustar00runnerdocker""" APIs exposing metadata from third-party Python packages. This codebase is shared between importlib.metadata in the stdlib and importlib_metadata in PyPI. See https://github.com/python/importlib_metadata/wiki/Development-Methodology for more detail. """ from __future__ import annotations import abc import collections import email import functools import itertools import operator import os import pathlib import posixpath import re import sys import textwrap import types from contextlib import suppress from importlib import import_module from importlib.abc import MetaPathFinder from itertools import starmap from typing import Any, Iterable, List, Mapping, Match, Optional, Set, cast from . import _meta from ._collections import FreezableDefaultDict, Pair from ._compat import ( NullFinder, install, ) from ._functools import method_cache, pass_none from ._itertools import always_iterable, bucket, unique_everseen from ._meta import PackageMetadata, SimplePath from .compat import py39, py311 __all__ = [ 'Distribution', 'DistributionFinder', 'PackageMetadata', 'PackageNotFoundError', 'SimplePath', 'distribution', 'distributions', 'entry_points', 'files', 'metadata', 'packages_distributions', 'requires', 'version', ] class PackageNotFoundError(ModuleNotFoundError): """The package was not found.""" def __str__(self) -> str: return f"No package metadata was found for {self.name}" @property def name(self) -> str: # type: ignore[override] # make readonly (name,) = self.args return name class Sectioned: """ A simple entry point config parser for performance >>> for item in Sectioned.read(Sectioned._sample): ... print(item) Pair(name='sec1', value='# comments ignored') Pair(name='sec1', value='a = 1') Pair(name='sec1', value='b = 2') Pair(name='sec2', value='a = 2') >>> res = Sectioned.section_pairs(Sectioned._sample) >>> item = next(res) >>> item.name 'sec1' >>> item.value Pair(name='a', value='1') >>> item = next(res) >>> item.value Pair(name='b', value='2') >>> item = next(res) >>> item.name 'sec2' >>> item.value Pair(name='a', value='2') >>> list(res) [] """ _sample = textwrap.dedent( """ [sec1] # comments ignored a = 1 b = 2 [sec2] a = 2 """ ).lstrip() @classmethod def section_pairs(cls, text): return ( section._replace(value=Pair.parse(section.value)) for section in cls.read(text, filter_=cls.valid) if section.name is not None ) @staticmethod def read(text, filter_=None): lines = filter(filter_, map(str.strip, text.splitlines())) name = None for value in lines: section_match = value.startswith('[') and value.endswith(']') if section_match: name = value.strip('[]') continue yield Pair(name, value) @staticmethod def valid(line: str): return line and not line.startswith('#') class EntryPoint: """An entry point as defined by Python packaging conventions. See `the packaging docs on entry points `_ for more information. >>> ep = EntryPoint( ... name=None, group=None, value='package.module:attr [extra1, extra2]') >>> ep.module 'package.module' >>> ep.attr 'attr' >>> ep.extras ['extra1', 'extra2'] """ pattern = re.compile( r'(?P[\w.]+)\s*' r'(:\s*(?P[\w.]+)\s*)?' r'((?P\[.*\])\s*)?$' ) """ A regular expression describing the syntax for an entry point, which might look like: - module - package.module - package.module:attribute - package.module:object.attribute - package.module:attr [extra1, extra2] Other combinations are possible as well. The expression is lenient about whitespace around the ':', following the attr, and following any extras. """ name: str value: str group: str dist: Optional[Distribution] = None def __init__(self, name: str, value: str, group: str) -> None: vars(self).update(name=name, value=value, group=group) def load(self) -> Any: """Load the entry point from its definition. If only a module is indicated by the value, return that module. Otherwise, return the named object. """ match = cast(Match, self.pattern.match(self.value)) module = import_module(match.group('module')) attrs = filter(None, (match.group('attr') or '').split('.')) return functools.reduce(getattr, attrs, module) @property def module(self) -> str: match = self.pattern.match(self.value) assert match is not None return match.group('module') @property def attr(self) -> str: match = self.pattern.match(self.value) assert match is not None return match.group('attr') @property def extras(self) -> List[str]: match = self.pattern.match(self.value) assert match is not None return re.findall(r'\w+', match.group('extras') or '') def _for(self, dist): vars(self).update(dist=dist) return self def matches(self, **params): """ EntryPoint matches the given parameters. >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') >>> ep.matches(group='foo') True >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') True >>> ep.matches(group='foo', name='other') False >>> ep.matches() True >>> ep.matches(extras=['extra1', 'extra2']) True >>> ep.matches(module='bing') True >>> ep.matches(attr='bong') True """ self._disallow_dist(params) attrs = (getattr(self, param) for param in params) return all(map(operator.eq, params.values(), attrs)) @staticmethod def _disallow_dist(params): """ Querying by dist is not allowed (dist objects are not comparable). >>> EntryPoint(name='fan', value='fav', group='fag').matches(dist='foo') Traceback (most recent call last): ... ValueError: "dist" is not suitable for matching... """ if "dist" in params: raise ValueError( '"dist" is not suitable for matching. ' "Instead, use Distribution.entry_points.select() on a " "located distribution." ) def _key(self): return self.name, self.value, self.group def __lt__(self, other): return self._key() < other._key() def __eq__(self, other): return self._key() == other._key() def __setattr__(self, name, value): raise AttributeError("EntryPoint objects are immutable.") def __repr__(self): return ( f'EntryPoint(name={self.name!r}, value={self.value!r}, ' f'group={self.group!r})' ) def __hash__(self) -> int: return hash(self._key()) class EntryPoints(tuple): """ An immutable collection of selectable EntryPoint objects. """ __slots__ = () def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] # Work with str instead of int """ Get the EntryPoint in self matching name. """ try: return next(iter(self.select(name=name))) except StopIteration: raise KeyError(name) def __repr__(self): """ Repr with classname and tuple constructor to signal that we deviate from regular tuple behavior. """ return '%s(%r)' % (self.__class__.__name__, tuple(self)) def select(self, **params) -> EntryPoints: """ Select entry points from self that match the given parameters (typically group and/or name). """ return EntryPoints(ep for ep in self if py39.ep_matches(ep, **params)) @property def names(self) -> Set[str]: """ Return the set of all names of all entry points. """ return {ep.name for ep in self} @property def groups(self) -> Set[str]: """ Return the set of all groups of all entry points. """ return {ep.group for ep in self} @classmethod def _from_text_for(cls, text, dist): return cls(ep._for(dist) for ep in cls._from_text(text)) @staticmethod def _from_text(text): return ( EntryPoint(name=item.value.name, value=item.value.value, group=item.name) for item in Sectioned.section_pairs(text or '') ) class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" hash: Optional[FileHash] size: int dist: Distribution def read_text(self, encoding: str = 'utf-8') -> str: return self.locate().read_text(encoding=encoding) def read_binary(self) -> bytes: return self.locate().read_bytes() def locate(self) -> SimplePath: """Return a path-like object for this path""" return self.dist.locate_file(self) class FileHash: def __init__(self, spec: str) -> None: self.mode, _, self.value = spec.partition('=') def __repr__(self) -> str: return f'' class Distribution(metaclass=abc.ABCMeta): """ An abstract Python distribution package. Custom providers may derive from this class and define the abstract methods to provide a concrete implementation for their environment. Some providers may opt to override the default implementation of some properties to bypass the file-reading mechanism. """ @abc.abstractmethod def read_text(self, filename) -> Optional[str]: """Attempt to load metadata file given by the name. Python distribution metadata is organized by blobs of text typically represented as "files" in the metadata directory (e.g. package-1.0.dist-info). These files include things like: - METADATA: The distribution metadata including fields like Name and Version and Description. - entry_points.txt: A series of entry points as defined in `the entry points spec `_. - RECORD: A record of files according to `this recording spec `_. A package may provide any set of files, including those not listed here or none at all. :param filename: The name of the file in the distribution info. :return: The text if found, otherwise None. """ @abc.abstractmethod def locate_file(self, path: str | os.PathLike[str]) -> SimplePath: """ Given a path to a file in this distribution, return a SimplePath to it. This method is used by callers of ``Distribution.files()`` to locate files within the distribution. If it's possible for a Distribution to represent files in the distribution as ``SimplePath`` objects, it should implement this method to resolve such objects. Some Distribution providers may elect not to resolve SimplePath objects within the distribution by raising a NotImplementedError, but consumers of such a Distribution would be unable to invoke ``Distribution.files()``. """ @classmethod def from_name(cls, name: str) -> Distribution: """Return the Distribution for the given package name. :param name: The name of the distribution package to search for. :return: The Distribution instance (or subclass thereof) for the named package, if found. :raises PackageNotFoundError: When the named package's distribution metadata cannot be found. :raises ValueError: When an invalid value is supplied for name. """ if not name: raise ValueError("A distribution name is required.") try: return next(iter(cls._prefer_valid(cls.discover(name=name)))) except StopIteration: raise PackageNotFoundError(name) @classmethod def discover( cls, *, context: Optional[DistributionFinder.Context] = None, **kwargs ) -> Iterable[Distribution]: """Return an iterable of Distribution objects for all packages. Pass a ``context`` or pass keyword arguments for constructing a context. :context: A ``DistributionFinder.Context`` object. :return: Iterable of Distribution objects for packages matching the context. """ if context and kwargs: raise ValueError("cannot accept context and kwargs") context = context or DistributionFinder.Context(**kwargs) return itertools.chain.from_iterable( resolver(context) for resolver in cls._discover_resolvers() ) @staticmethod def _prefer_valid(dists: Iterable[Distribution]) -> Iterable[Distribution]: """ Prefer (move to the front) distributions that have metadata. Ref python/importlib_resources#489. """ buckets = bucket(dists, lambda dist: bool(dist.metadata)) return itertools.chain(buckets[True], buckets[False]) @staticmethod def at(path: str | os.PathLike[str]) -> Distribution: """Return a Distribution for the indicated metadata path. :param path: a string or path-like object :return: a concrete Distribution instance for the path """ return PathDistribution(pathlib.Path(path)) @staticmethod def _discover_resolvers(): """Search the meta_path for resolvers (MetadataPathFinders).""" declared = ( getattr(finder, 'find_distributions', None) for finder in sys.meta_path ) return filter(None, declared) @property def metadata(self) -> _meta.PackageMetadata: """Return the parsed metadata for this Distribution. The returned object will have keys that name the various bits of metadata per the `Core metadata specifications `_. Custom providers may provide the METADATA file or override this property. """ # deferred for performance (python/cpython#109829) from . import _adapters opt_text = ( self.read_text('METADATA') or self.read_text('PKG-INFO') # This last clause is here to support old egg-info files. Its # effect is to just end up using the PathDistribution's self._path # (which points to the egg-info file) attribute unchanged. or self.read_text('') ) text = cast(str, opt_text) return _adapters.Message(email.message_from_string(text)) @property def name(self) -> str: """Return the 'Name' metadata for the distribution package.""" return self.metadata['Name'] @property def _normalized_name(self): """Return a normalized version of the name.""" return Prepared.normalize(self.name) @property def version(self) -> str: """Return the 'Version' metadata for the distribution package.""" return self.metadata['Version'] @property def entry_points(self) -> EntryPoints: """ Return EntryPoints for this distribution. Custom providers may provide the ``entry_points.txt`` file or override this property. """ return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) @property def files(self) -> Optional[List[PackagePath]]: """Files in this distribution. :return: List of PackagePath for this distribution or None Result is `None` if the metadata file that enumerates files (i.e. RECORD for dist-info, or installed-files.txt or SOURCES.txt for egg-info) is missing. Result may be empty if the metadata exists but is empty. Custom providers are recommended to provide a "RECORD" file (in ``read_text``) or override this property to allow for callers to be able to resolve filenames provided by the package. """ def make_file(name, hash=None, size_str=None): result = PackagePath(name) result.hash = FileHash(hash) if hash else None result.size = int(size_str) if size_str else None result.dist = self return result @pass_none def make_files(lines): # Delay csv import, since Distribution.files is not as widely used # as other parts of importlib.metadata import csv return starmap(make_file, csv.reader(lines)) @pass_none def skip_missing_files(package_paths): return list(filter(lambda path: path.locate().exists(), package_paths)) return skip_missing_files( make_files( self._read_files_distinfo() or self._read_files_egginfo_installed() or self._read_files_egginfo_sources() ) ) def _read_files_distinfo(self): """ Read the lines of RECORD. """ text = self.read_text('RECORD') return text and text.splitlines() def _read_files_egginfo_installed(self): """ Read installed-files.txt and return lines in a similar CSV-parsable format as RECORD: each file must be placed relative to the site-packages directory and must also be quoted (since file names can contain literal commas). This file is written when the package is installed by pip, but it might not be written for other installation methods. Assume the file is accurate if it exists. """ text = self.read_text('installed-files.txt') # Prepend the .egg-info/ subdir to the lines in this file. # But this subdir is only available from PathDistribution's # self._path. subdir = getattr(self, '_path', None) if not text or not subdir: return paths = ( py311.relative_fix((subdir / name).resolve()) .relative_to(self.locate_file('').resolve(), walk_up=True) .as_posix() for name in text.splitlines() ) return map('"{}"'.format, paths) def _read_files_egginfo_sources(self): """ Read SOURCES.txt and return lines in a similar CSV-parsable format as RECORD: each file name must be quoted (since it might contain literal commas). Note that SOURCES.txt is not a reliable source for what files are installed by a package. This file is generated for a source archive, and the files that are present there (e.g. setup.py) may not correctly reflect the files that are present after the package has been installed. """ text = self.read_text('SOURCES.txt') return text and map('"{}"'.format, text.splitlines()) @property def requires(self) -> Optional[List[str]]: """Generated requirements specified for this Distribution""" reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() return reqs and list(reqs) def _read_dist_info_reqs(self): return self.metadata.get_all('Requires-Dist') def _read_egg_info_reqs(self): source = self.read_text('requires.txt') return pass_none(self._deps_from_requires_text)(source) @classmethod def _deps_from_requires_text(cls, source): return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source)) @staticmethod def _convert_egg_info_reqs_to_simple_reqs(sections): """ Historically, setuptools would solicit and store 'extra' requirements, including those with environment markers, in separate sections. More modern tools expect each dependency to be defined separately, with any relevant extras and environment markers attached directly to that requirement. This method converts the former to the latter. See _test_deps_from_requires_text for an example. """ def make_condition(name): return name and f'extra == "{name}"' def quoted_marker(section): section = section or '' extra, sep, markers = section.partition(':') if extra and markers: markers = f'({markers})' conditions = list(filter(None, [markers, make_condition(extra)])) return '; ' + ' and '.join(conditions) if conditions else '' def url_req_space(req): """ PEP 508 requires a space between the url_spec and the quoted_marker. Ref python/importlib_metadata#357. """ # '@' is uniquely indicative of a url_req. return ' ' * ('@' in req) for section in sections: space = url_req_space(section.value) yield section.value + space + quoted_marker(section.name) @property def origin(self): return self._load_json('direct_url.json') def _load_json(self, filename): # Deferred for performance (python/importlib_metadata#503) import json return pass_none(json.loads)( self.read_text(filename), object_hook=lambda data: types.SimpleNamespace(**data), ) class DistributionFinder(MetaPathFinder): """ A MetaPathFinder capable of discovering installed distributions. Custom providers should implement this interface in order to supply metadata. """ class Context: """ Keyword arguments presented by the caller to ``distributions()`` or ``Distribution.discover()`` to narrow the scope of a search for distributions in all DistributionFinders. Each DistributionFinder may expect any parameters and should attempt to honor the canonical parameters defined below when appropriate. This mechanism gives a custom provider a means to solicit additional details from the caller beyond "name" and "path" when searching distributions. For example, imagine a provider that exposes suites of packages in either a "public" or "private" ``realm``. A caller may wish to query only for distributions in a particular realm and could call ``distributions(realm="private")`` to signal to the custom provider to only include distributions from that realm. """ name = None """ Specific name for which a distribution finder should match. A name of ``None`` matches all distributions. """ def __init__(self, **kwargs): vars(self).update(kwargs) @property def path(self) -> List[str]: """ The sequence of directory path that a distribution finder should search. Typically refers to Python installed package paths such as "site-packages" directories and defaults to ``sys.path``. """ return vars(self).get('path', sys.path) @abc.abstractmethod def find_distributions(self, context=Context()) -> Iterable[Distribution]: """ Find distributions. Return an iterable of all Distribution instances capable of loading the metadata for packages matching the ``context``, a DistributionFinder.Context instance. """ class FastPath: """ Micro-optimized class for searching a root for children. Root is a path on the file system that may contain metadata directories either as natural directories or within a zip file. >>> FastPath('').children() ['...'] FastPath objects are cached and recycled for any given root. >>> FastPath('foobar') is FastPath('foobar') True """ @functools.lru_cache() # type: ignore[misc] def __new__(cls, root): return super().__new__(cls) def __init__(self, root): self.root = root def joinpath(self, child): return pathlib.Path(self.root, child) def children(self): with suppress(Exception): return os.listdir(self.root or '.') with suppress(Exception): return self.zip_children() return [] def zip_children(self): # deferred for performance (python/importlib_metadata#502) from zipp.compat.overlay import zipfile zip_path = zipfile.Path(self.root) names = zip_path.root.namelist() self.joinpath = zip_path.joinpath return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names) def search(self, name): return self.lookup(self.mtime).search(name) @property def mtime(self): with suppress(OSError): return os.stat(self.root).st_mtime self.lookup.cache_clear() @method_cache def lookup(self, mtime): return Lookup(self) class Lookup: """ A micro-optimized class for searching a (fast) path for metadata. """ def __init__(self, path: FastPath): """ Calculate all of the children representing metadata. From the children in the path, calculate early all of the children that appear to represent metadata (infos) or legacy metadata (eggs). """ base = os.path.basename(path.root).lower() base_is_egg = base.endswith(".egg") self.infos = FreezableDefaultDict(list) self.eggs = FreezableDefaultDict(list) for child in path.children(): low = child.lower() if low.endswith((".dist-info", ".egg-info")): # rpartition is faster than splitext and suitable for this purpose. name = low.rpartition(".")[0].partition("-")[0] normalized = Prepared.normalize(name) self.infos[normalized].append(path.joinpath(child)) elif base_is_egg and low == "egg-info": name = base.rpartition(".")[0].partition("-")[0] legacy_normalized = Prepared.legacy_normalize(name) self.eggs[legacy_normalized].append(path.joinpath(child)) self.infos.freeze() self.eggs.freeze() def search(self, prepared: Prepared): """ Yield all infos and eggs matching the Prepared query. """ infos = ( self.infos[prepared.normalized] if prepared else itertools.chain.from_iterable(self.infos.values()) ) eggs = ( self.eggs[prepared.legacy_normalized] if prepared else itertools.chain.from_iterable(self.eggs.values()) ) return itertools.chain(infos, eggs) class Prepared: """ A prepared search query for metadata on a possibly-named package. Pre-calculates the normalization to prevent repeated operations. >>> none = Prepared(None) >>> none.normalized >>> none.legacy_normalized >>> bool(none) False >>> sample = Prepared('Sample__Pkg-name.foo') >>> sample.normalized 'sample_pkg_name_foo' >>> sample.legacy_normalized 'sample__pkg_name.foo' >>> bool(sample) True """ normalized = None legacy_normalized = None def __init__(self, name: Optional[str]): self.name = name if name is None: return self.normalized = self.normalize(name) self.legacy_normalized = self.legacy_normalize(name) @staticmethod def normalize(name): """ PEP 503 normalization plus dashes as underscores. """ return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') @staticmethod def legacy_normalize(name): """ Normalize the package name as found in the convention in older packaging tools versions and specs. """ return name.lower().replace('-', '_') def __bool__(self): return bool(self.name) @install class MetadataPathFinder(NullFinder, DistributionFinder): """A degenerate finder for distribution packages on the file system. This finder supplies only a find_distributions() method for versions of Python that do not have a PathFinder find_distributions(). """ @classmethod def find_distributions( cls, context=DistributionFinder.Context() ) -> Iterable[PathDistribution]: """ Find distributions. Return an iterable of all Distribution instances capable of loading the metadata for packages matching ``context.name`` (or all names if ``None`` indicated) along the paths in the list of directories ``context.path``. """ found = cls._search_paths(context.name, context.path) return map(PathDistribution, found) @classmethod def _search_paths(cls, name, paths): """Find metadata directories in paths heuristically.""" prepared = Prepared(name) return itertools.chain.from_iterable( path.search(prepared) for path in map(FastPath, paths) ) @classmethod def invalidate_caches(cls) -> None: FastPath.__new__.cache_clear() class PathDistribution(Distribution): def __init__(self, path: SimplePath) -> None: """Construct a distribution. :param path: SimplePath indicating the metadata directory. """ self._path = path def read_text(self, filename: str | os.PathLike[str]) -> Optional[str]: with suppress( FileNotFoundError, IsADirectoryError, KeyError, NotADirectoryError, PermissionError, ): return self._path.joinpath(filename).read_text(encoding='utf-8') return None read_text.__doc__ = Distribution.read_text.__doc__ def locate_file(self, path: str | os.PathLike[str]) -> SimplePath: return self._path.parent / path @property def _normalized_name(self): """ Performance optimization: where possible, resolve the normalized name from the file system path. """ stem = os.path.basename(str(self._path)) return ( pass_none(Prepared.normalize)(self._name_from_stem(stem)) or super()._normalized_name ) @staticmethod def _name_from_stem(stem): """ >>> PathDistribution._name_from_stem('foo-3.0.egg-info') 'foo' >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info') 'CherryPy' >>> PathDistribution._name_from_stem('face.egg-info') 'face' >>> PathDistribution._name_from_stem('foo.bar') """ filename, ext = os.path.splitext(stem) if ext not in ('.dist-info', '.egg-info'): return name, sep, rest = filename.partition('-') return name def distribution(distribution_name: str) -> Distribution: """Get the ``Distribution`` instance for the named package. :param distribution_name: The name of the distribution package as a string. :return: A ``Distribution`` instance (or subclass thereof). """ return Distribution.from_name(distribution_name) def distributions(**kwargs) -> Iterable[Distribution]: """Get all ``Distribution`` instances in the current environment. :return: An iterable of ``Distribution`` instances. """ return Distribution.discover(**kwargs) def metadata(distribution_name: str) -> _meta.PackageMetadata: """Get the metadata for the named package. :param distribution_name: The name of the distribution package to query. :return: A PackageMetadata containing the parsed metadata. """ return Distribution.from_name(distribution_name).metadata def version(distribution_name: str) -> str: """Get the version string for the named package. :param distribution_name: The name of the distribution package to query. :return: The version string for the package as defined in the package's "Version" metadata key. """ return distribution(distribution_name).version _unique = functools.partial( unique_everseen, key=py39.normalized_name, ) """ Wrapper for ``distributions`` to return unique distributions by name. """ def entry_points(**params) -> EntryPoints: """Return EntryPoint objects for all installed packages. Pass selection parameters (group or name) to filter the result to entry points matching those properties (see EntryPoints.select()). :return: EntryPoints for all installed packages. """ eps = itertools.chain.from_iterable( dist.entry_points for dist in _unique(distributions()) ) return EntryPoints(eps).select(**params) def files(distribution_name: str) -> Optional[List[PackagePath]]: """Return a list of files for the named package. :param distribution_name: The name of the distribution package to query. :return: List of files composing the distribution. """ return distribution(distribution_name).files def requires(distribution_name: str) -> Optional[List[str]]: """ Return a list of requirements for the named package. :return: An iterable of requirements, suitable for packaging.requirement.Requirement. """ return distribution(distribution_name).requires def packages_distributions() -> Mapping[str, List[str]]: """ Return a mapping of top-level packages to their distributions. >>> import collections.abc >>> pkgs = packages_distributions() >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) True """ pkg_to_dist = collections.defaultdict(list) for dist in distributions(): for pkg in _top_level_declared(dist) or _top_level_inferred(dist): pkg_to_dist[pkg].append(dist.metadata['Name']) return dict(pkg_to_dist) def _top_level_declared(dist): return (dist.read_text('top_level.txt') or '').split() def _topmost(name: PackagePath) -> Optional[str]: """ Return the top-most parent as long as there is a parent. """ top, *rest = name.parts return top if rest else None def _get_toplevel_name(name: PackagePath) -> str: """ Infer a possibly importable module name from a name presumed on sys.path. >>> _get_toplevel_name(PackagePath('foo.py')) 'foo' >>> _get_toplevel_name(PackagePath('foo')) 'foo' >>> _get_toplevel_name(PackagePath('foo.pyc')) 'foo' >>> _get_toplevel_name(PackagePath('foo/__init__.py')) 'foo' >>> _get_toplevel_name(PackagePath('foo.pth')) 'foo.pth' >>> _get_toplevel_name(PackagePath('foo.dist-info')) 'foo.dist-info' """ # Defer import of inspect for performance (python/cpython#118761) import inspect return _topmost(name) or inspect.getmodulename(name) or str(name) def _top_level_inferred(dist): opt_names = set(map(_get_toplevel_name, always_iterable(dist.files))) def importable_name(name): return '.' not in name return filter(importable_name, opt_names) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/_adapters.py0000644000175100001660000000731014743546124023162 0ustar00runnerdockerimport email.message import email.policy import re import textwrap from ._text import FoldedCase class RawPolicy(email.policy.EmailPolicy): def fold(self, name, value): folded = self.linesep.join( textwrap.indent(value, prefix=' ' * 8, predicate=lambda line: True) .lstrip() .splitlines() ) return f'{name}: {folded}{self.linesep}' class Message(email.message.Message): r""" Specialized Message subclass to handle metadata naturally. Reads values that may have newlines in them and converts the payload to the Description. >>> msg_text = textwrap.dedent(''' ... Name: Foo ... Version: 3.0 ... License: blah ... de-blah ... ... First line of description. ... Second line of description. ... ... Fourth line! ... ''').lstrip().replace('', '') >>> msg = Message(email.message_from_string(msg_text)) >>> msg['Description'] 'First line of description.\nSecond line of description.\n\nFourth line!\n' Message should render even if values contain newlines. >>> print(msg) Name: Foo Version: 3.0 License: blah de-blah Description: First line of description. Second line of description. Fourth line! """ multiple_use_keys = set( map( FoldedCase, [ 'Classifier', 'Obsoletes-Dist', 'Platform', 'Project-URL', 'Provides-Dist', 'Provides-Extra', 'Requires-Dist', 'Requires-External', 'Supported-Platform', 'Dynamic', ], ) ) """ Keys that may be indicated multiple times per PEP 566. """ def __new__(cls, orig: email.message.Message): res = super().__new__(cls) vars(res).update(vars(orig)) return res def __init__(self, *args, **kwargs): self._headers = self._repair_headers() # suppress spurious error from mypy def __iter__(self): return super().__iter__() def __getitem__(self, item): """ Override parent behavior to typical dict behavior. ``email.message.Message`` will emit None values for missing keys. Typical mappings, including this ``Message``, will raise a key error for missing keys. Ref python/importlib_metadata#371. """ res = super().__getitem__(item) if res is None: raise KeyError(item) return res def _repair_headers(self): def redent(value): "Correct for RFC822 indentation" indent = ' ' * 8 if not value or '\n' + indent not in value: return value return textwrap.dedent(indent + value) headers = [(key, redent(value)) for key, value in vars(self)['_headers']] if self._payload: headers.append(('Description', self.get_payload())) self.set_payload('') return headers def as_string(self): return super().as_string(policy=RawPolicy()) @property def json(self): """ Convert PackageMetadata to a JSON-compatible format per PEP 0566. """ def transform(key): value = self.get_all(key) if key in self.multiple_use_keys else self[key] if key == 'Keywords': value = re.split(r'\s+', value) tk = key.lower().replace('-', '_') return tk, value return dict(map(transform, map(FoldedCase, self))) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/_collections.py0000644000175100001660000000134714743546124023701 0ustar00runnerdockerimport collections # from jaraco.collections 3.3 class FreezableDefaultDict(collections.defaultdict): """ Often it is desirable to prevent the mutation of a default dict after its initial construction, such as to prevent mutation during iteration. >>> dd = FreezableDefaultDict(list) >>> dd[0].append('1') >>> dd.freeze() >>> dd[1] [] >>> len(dd) 1 """ def __missing__(self, key): return getattr(self, '_frozen', super().__missing__)(key) def freeze(self): self._frozen = lambda key: self.default_factory() class Pair(collections.namedtuple('Pair', 'name value')): @classmethod def parse(cls, text): return cls(*map(str.strip, text.split("=", 1))) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/_compat.py0000644000175100001660000000244114743546124022642 0ustar00runnerdockerimport platform import sys __all__ = ['install', 'NullFinder'] def install(cls): """ Class decorator for installation on sys.meta_path. Adds the backport DistributionFinder to sys.meta_path and attempts to disable the finder functionality of the stdlib DistributionFinder. """ sys.meta_path.append(cls()) disable_stdlib_finder() return cls def disable_stdlib_finder(): """ Give the backport primacy for discovering path-based distributions by monkey-patching the stdlib O_O. See #91 for more background for rationale on this sketchy behavior. """ def matches(finder): return getattr( finder, '__module__', None ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions') for finder in filter(matches, sys.meta_path): # pragma: nocover del finder.find_distributions class NullFinder: """ A "Finder" (aka "MetaPathFinder") that never finds any modules, but may find distributions. """ @staticmethod def find_spec(*args, **kwargs): return None def pypy_partial(val): """ Adjust for variable stacklevel on partial under PyPy. Workaround for #327. """ is_pypy = platform.python_implementation() == 'PyPy' return val + is_pypy ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/_functools.py0000644000175100001660000000551714743546124023402 0ustar00runnerdockerimport functools import types # from jaraco.functools 3.3 def method_cache(method, cache_wrapper=None): """ Wrap lru_cache to support storing the cache data in the object instances. Abstracts the common paradigm where the method explicitly saves an underscore-prefixed protected property on first call and returns that subsequently. >>> class MyClass: ... calls = 0 ... ... @method_cache ... def method(self, value): ... self.calls += 1 ... return value >>> a = MyClass() >>> a.method(3) 3 >>> for x in range(75): ... res = a.method(x) >>> a.calls 75 Note that the apparent behavior will be exactly like that of lru_cache except that the cache is stored on each instance, so values in one instance will not flush values from another, and when an instance is deleted, so are the cached values for that instance. >>> b = MyClass() >>> for x in range(35): ... res = b.method(x) >>> b.calls 35 >>> a.method(0) 0 >>> a.calls 75 Note that if method had been decorated with ``functools.lru_cache()``, a.calls would have been 76 (due to the cached value of 0 having been flushed by the 'b' instance). Clear the cache with ``.cache_clear()`` >>> a.method.cache_clear() Same for a method that hasn't yet been called. >>> c = MyClass() >>> c.method.cache_clear() Another cache wrapper may be supplied: >>> cache = functools.lru_cache(maxsize=2) >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) >>> a = MyClass() >>> a.method2() 3 Caution - do not subsequently wrap the method with another decorator, such as ``@property``, which changes the semantics of the function. See also http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ for another implementation and additional justification. """ cache_wrapper = cache_wrapper or functools.lru_cache() def wrapper(self, *args, **kwargs): # it's the first call, replace the method with a cached, bound method bound_method = types.MethodType(method, self) cached_method = cache_wrapper(bound_method) setattr(self, method.__name__, cached_method) return cached_method(*args, **kwargs) # Support cache clear even before cache has been created. wrapper.cache_clear = lambda: None return wrapper # From jaraco.functools 3.3 def pass_none(func): """ Wrap func so it's not called if its first param is None >>> print_text = pass_none(print) >>> print_text('text') text >>> print_text(None) """ @functools.wraps(func) def wrapper(param, *args, **kwargs): if param is not None: return func(param, *args, **kwargs) return wrapper ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/_itertools.py0000644000175100001660000001234714743546124023411 0ustar00runnerdockerfrom collections import defaultdict, deque from itertools import filterfalse def unique_everseen(iterable, key=None): "List unique elements, preserving order. Remember all elements ever seen." # unique_everseen('AAAABBBCCDAABBB') --> A B C D # unique_everseen('ABBCcAD', str.lower) --> A B C D seen = set() seen_add = seen.add if key is None: for element in filterfalse(seen.__contains__, iterable): seen_add(element) yield element else: for element in iterable: k = key(element) if k not in seen: seen_add(k) yield element # copied from more_itertools 8.8 def always_iterable(obj, base_type=(str, bytes)): """If *obj* is iterable, return an iterator over its items:: >>> obj = (1, 2, 3) >>> list(always_iterable(obj)) [1, 2, 3] If *obj* is not iterable, return a one-item iterable containing *obj*:: >>> obj = 1 >>> list(always_iterable(obj)) [1] If *obj* is ``None``, return an empty iterable: >>> obj = None >>> list(always_iterable(None)) [] By default, binary and text strings are not considered iterable:: >>> obj = 'foo' >>> list(always_iterable(obj)) ['foo'] If *base_type* is set, objects for which ``isinstance(obj, base_type)`` returns ``True`` won't be considered iterable. >>> obj = {'a': 1} >>> list(always_iterable(obj)) # Iterate over the dict's keys ['a'] >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit [{'a': 1}] Set *base_type* to ``None`` to avoid any special handling and treat objects Python considers iterable as iterable: >>> obj = 'foo' >>> list(always_iterable(obj, base_type=None)) ['f', 'o', 'o'] """ if obj is None: return iter(()) if (base_type is not None) and isinstance(obj, base_type): return iter((obj,)) try: return iter(obj) except TypeError: return iter((obj,)) # Copied from more_itertools 10.3 class bucket: """Wrap *iterable* and return an object that buckets the iterable into child iterables based on a *key* function. >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character >>> sorted(list(s)) # Get the keys ['a', 'b', 'c'] >>> a_iterable = s['a'] >>> next(a_iterable) 'a1' >>> next(a_iterable) 'a2' >>> list(s['b']) ['b1', 'b2', 'b3'] The original iterable will be advanced and its items will be cached until they are used by the child iterables. This may require significant storage. By default, attempting to select a bucket to which no items belong will exhaust the iterable and cache all values. If you specify a *validator* function, selected buckets will instead be checked against it. >>> from itertools import count >>> it = count(1, 2) # Infinite sequence of odd numbers >>> key = lambda x: x % 10 # Bucket by last digit >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only >>> s = bucket(it, key=key, validator=validator) >>> 2 in s False >>> list(s[2]) [] """ def __init__(self, iterable, key, validator=None): self._it = iter(iterable) self._key = key self._cache = defaultdict(deque) self._validator = validator or (lambda x: True) def __contains__(self, value): if not self._validator(value): return False try: item = next(self[value]) except StopIteration: return False else: self._cache[value].appendleft(item) return True def _get_values(self, value): """ Helper to yield items from the parent iterator that match *value*. Items that don't match are stored in the local cache as they are encountered. """ while True: # If we've cached some items that match the target value, emit # the first one and evict it from the cache. if self._cache[value]: yield self._cache[value].popleft() # Otherwise we need to advance the parent iterator to search for # a matching item, caching the rest. else: while True: try: item = next(self._it) except StopIteration: return item_value = self._key(item) if item_value == value: yield item break elif self._validator(item_value): self._cache[item_value].append(item) def __iter__(self): for item in self._it: item_value = self._key(item) if self._validator(item_value): self._cache[item_value].append(item) yield from self._cache.keys() def __getitem__(self, value): if not self._validator(value): return iter(()) return self._get_values(value) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/_meta.py0000644000175100001660000000343714743546124022313 0ustar00runnerdockerfrom __future__ import annotations import os from typing import ( Any, Dict, Iterator, List, Optional, Protocol, TypeVar, Union, overload, ) _T = TypeVar("_T") class PackageMetadata(Protocol): def __len__(self) -> int: ... # pragma: no cover def __contains__(self, item: str) -> bool: ... # pragma: no cover def __getitem__(self, key: str) -> str: ... # pragma: no cover def __iter__(self) -> Iterator[str]: ... # pragma: no cover @overload def get( self, name: str, failobj: None = None ) -> Optional[str]: ... # pragma: no cover @overload def get(self, name: str, failobj: _T) -> Union[str, _T]: ... # pragma: no cover # overload per python/importlib_metadata#435 @overload def get_all( self, name: str, failobj: None = None ) -> Optional[List[Any]]: ... # pragma: no cover @overload def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]: """ Return all values associated with a possibly multi-valued key. """ @property def json(self) -> Dict[str, Union[str, List[str]]]: """ A JSON-compatible form of the metadata. """ class SimplePath(Protocol): """ A minimal subset of pathlib.Path required by Distribution. """ def joinpath( self, other: Union[str, os.PathLike[str]] ) -> SimplePath: ... # pragma: no cover def __truediv__( self, other: Union[str, os.PathLike[str]] ) -> SimplePath: ... # pragma: no cover @property def parent(self) -> SimplePath: ... # pragma: no cover def read_text(self, encoding=None) -> str: ... # pragma: no cover def read_bytes(self) -> bytes: ... # pragma: no cover def exists(self) -> bool: ... # pragma: no cover ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/_text.py0000644000175100001660000000416614743546124022351 0ustar00runnerdockerimport re from ._functools import method_cache # from jaraco.text 3.5 class FoldedCase(str): """ A case insensitive string class; behaves just like str except compares equal when the only variation is case. >>> s = FoldedCase('hello world') >>> s == 'Hello World' True >>> 'Hello World' == s True >>> s != 'Hello World' False >>> s.index('O') 4 >>> s.split('O') ['hell', ' w', 'rld'] >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) ['alpha', 'Beta', 'GAMMA'] Sequence membership is straightforward. >>> "Hello World" in [s] True >>> s in ["Hello World"] True You may test for set inclusion, but candidate and elements must both be folded. >>> FoldedCase("Hello World") in {s} True >>> s in {FoldedCase("Hello World")} True String inclusion works as long as the FoldedCase object is on the right. >>> "hello" in FoldedCase("Hello World") True But not if the FoldedCase object is on the left: >>> FoldedCase('hello') in 'Hello World' False In that case, use in_: >>> FoldedCase('hello').in_('Hello World') True >>> FoldedCase('hello') > FoldedCase('Hello') False """ def __lt__(self, other): return self.lower() < other.lower() def __gt__(self, other): return self.lower() > other.lower() def __eq__(self, other): return self.lower() == other.lower() def __ne__(self, other): return self.lower() != other.lower() def __hash__(self): return hash(self.lower()) def __contains__(self, other): return super().lower().__contains__(other.lower()) def in_(self, other): "Does self appear in other?" return self in FoldedCase(other) # cache lower since it's likely to be called frequently. @method_cache def lower(self): return super().lower() def index(self, sub): return self.lower().index(sub.lower()) def split(self, splitter=' ', maxsplit=0): pattern = re.compile(re.escape(splitter), re.I) return pattern.split(self, maxsplit) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0583053 importlib_metadata-8.6.1/importlib_metadata/compat/0000755000175100001660000000000014743546146022134 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/compat/__init__.py0000644000175100001660000000000014743546124024227 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/compat/py311.py0000644000175100001660000000114014743546124023353 0ustar00runnerdockerimport os import pathlib import sys import types def wrap(path): # pragma: no cover """ Workaround for https://github.com/python/cpython/issues/84538 to add backward compatibility for walk_up=True. An example affected package is dask-labextension, which uses jupyter-packaging to install JupyterLab javascript files outside of site-packages. """ def relative_to(root, *, walk_up=False): return pathlib.Path(os.path.relpath(path, root)) return types.SimpleNamespace(relative_to=relative_to) relative_fix = wrap if sys.version_info < (3, 12) else lambda x: x ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/compat/py39.py0000644000175100001660000000211614743546124023306 0ustar00runnerdocker""" Compatibility layer with Python 3.8/3.9 """ from typing import TYPE_CHECKING, Any, Optional if TYPE_CHECKING: # pragma: no cover # Prevent circular imports on runtime. from .. import Distribution, EntryPoint else: Distribution = EntryPoint = Any def normalized_name(dist: Distribution) -> Optional[str]: """ Honor name normalization for distributions that don't provide ``_normalized_name``. """ try: return dist._normalized_name except AttributeError: from .. import Prepared # -> delay to prevent circular imports. return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name']) def ep_matches(ep: EntryPoint, **params) -> bool: """ Workaround for ``EntryPoint`` objects without the ``matches`` method. """ try: return ep.matches(**params) except AttributeError: from .. import EntryPoint # -> delay to prevent circular imports. # Reconstruct the EntryPoint object to make sure it is compatible. return EntryPoint(ep.name, ep.value, ep.group).matches(**params) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/diagnose.py0000644000175100001660000000057314743546124023015 0ustar00runnerdockerimport sys from . import Distribution def inspect(path): print("Inspecting", path) dists = list(Distribution.discover(path=[path])) if not dists: return print("Found", len(dists), "packages:", end=' ') print(', '.join(dist.name for dist in dists)) def run(): for path in sys.path: inspect(path) if __name__ == '__main__': run() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/importlib_metadata/py.typed0000644000175100001660000000000014743546124022332 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0623052 importlib_metadata-8.6.1/importlib_metadata.egg-info/0000755000175100001660000000000014743546146022343 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411686.0 importlib_metadata-8.6.1/importlib_metadata.egg-info/PKG-INFO0000644000175100001660000001120214743546146023434 0ustar00runnerdockerMetadata-Version: 2.2 Name: importlib_metadata Version: 8.6.1 Summary: Read metadata from Python packages Author-email: "Jason R. Coombs" Project-URL: Source, https://github.com/python/importlib_metadata Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: zipp>=3.20 Requires-Dist: typing-extensions>=3.6.4; python_version < "3.8" Provides-Extra: test Requires-Dist: pytest!=8.1.*,>=6; extra == "test" Requires-Dist: importlib_resources>=1.3; python_version < "3.9" and extra == "test" Requires-Dist: packaging; extra == "test" Requires-Dist: pyfakefs; extra == "test" Requires-Dist: flufl.flake8; extra == "test" Requires-Dist: pytest-perf>=0.9.2; extra == "test" Requires-Dist: jaraco.test>=5.4; extra == "test" Provides-Extra: doc Requires-Dist: sphinx>=3.5; extra == "doc" Requires-Dist: jaraco.packaging>=9.3; extra == "doc" Requires-Dist: rst.linker>=1.9; extra == "doc" Requires-Dist: furo; extra == "doc" Requires-Dist: sphinx-lint; extra == "doc" Requires-Dist: jaraco.tidelift>=1.4; extra == "doc" Provides-Extra: perf Requires-Dist: ipython; extra == "perf" Provides-Extra: check Requires-Dist: pytest-checkdocs>=2.4; extra == "check" Requires-Dist: pytest-ruff>=0.2.1; sys_platform != "cygwin" and extra == "check" Provides-Extra: cover Requires-Dist: pytest-cov; extra == "cover" Provides-Extra: enabler Requires-Dist: pytest-enabler>=2.2; extra == "enabler" Provides-Extra: type Requires-Dist: pytest-mypy; extra == "type" .. image:: https://img.shields.io/pypi/v/importlib_metadata.svg :target: https://pypi.org/project/importlib_metadata .. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg .. image:: https://github.com/python/importlib_metadata/actions/workflows/main.yml/badge.svg :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 :alt: tests .. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json :target: https://github.com/astral-sh/ruff :alt: Ruff .. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest .. image:: https://img.shields.io/badge/skeleton-2024-informational :target: https://blog.jaraco.com/skeleton .. image:: https://tidelift.com/badges/package/pypi/importlib-metadata :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme Library to access the metadata for a Python package. This package supplies third-party access to the functionality of `importlib.metadata `_ including improvements added to subsequent Python versions. Compatibility ============= New features are introduced in this third-party library and later merged into CPython. The following table indicates which versions of this library were contributed to different versions in the standard library: .. list-table:: :header-rows: 1 * - importlib_metadata - stdlib * - 7.0 - 3.13 * - 6.5 - 3.12 * - 4.13 - 3.11 * - 4.6 - 3.10 * - 1.4 - 3.8 Usage ===== See the `online documentation `_ for usage details. `Finder authors `_ can also add support for custom package installers. See the above documentation for details. Caveats ======= This project primarily supports third-party packages installed by PyPA tools (or other conforming packages). It does not support: - Packages in the stdlib. - Packages installed without metadata. Project details =============== * Project home: https://github.com/python/importlib_metadata * Report bugs at: https://github.com/python/importlib_metadata/issues * Code hosting: https://github.com/python/importlib_metadata * Documentation: https://importlib-metadata.readthedocs.io/ For Enterprise ============== Available as part of the Tidelift Subscription. This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. `Learn more `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411686.0 importlib_metadata-8.6.1/importlib_metadata.egg-info/SOURCES.txt0000644000175100001660000000304114743546146024225 0ustar00runnerdocker.coveragerc .editorconfig .gitignore .pre-commit-config.yaml .readthedocs.yaml LICENSE NEWS.rst README.rst SECURITY.md conftest.py exercises.py mypy.ini pyproject.toml pytest.ini ruff.toml towncrier.toml tox.ini .github/FUNDING.yml .github/dependabot.yml .github/workflows/main.yml docs/__init__.py docs/api.rst docs/conf.py docs/history.rst docs/index.rst docs/migration.rst importlib_metadata/__init__.py importlib_metadata/_adapters.py importlib_metadata/_collections.py importlib_metadata/_compat.py importlib_metadata/_functools.py importlib_metadata/_itertools.py importlib_metadata/_meta.py importlib_metadata/_text.py importlib_metadata/diagnose.py importlib_metadata/py.typed importlib_metadata.egg-info/PKG-INFO importlib_metadata.egg-info/SOURCES.txt importlib_metadata.egg-info/dependency_links.txt importlib_metadata.egg-info/requires.txt importlib_metadata.egg-info/top_level.txt importlib_metadata/compat/__init__.py importlib_metadata/compat/py311.py importlib_metadata/compat/py39.py tests/__init__.py tests/_context.py tests/_path.py tests/fixtures.py tests/test_api.py tests/test_integration.py tests/test_main.py tests/test_zip.py tests/compat/__init__.py tests/compat/py312.py tests/compat/py39.py tests/compat/test_py39_compat.py tests/data/__init__.py tests/data/example-21.12-py3-none-any.whl tests/data/example-21.12-py3.6.egg tests/data/example2-1.0.0-py3-none-any.whl tests/data/sources/example/setup.py tests/data/sources/example/example/__init__.py tests/data/sources/example2/pyproject.toml tests/data/sources/example2/example2/__init__.py././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411686.0 importlib_metadata-8.6.1/importlib_metadata.egg-info/dependency_links.txt0000644000175100001660000000000114743546146026411 0ustar00runnerdocker ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411686.0 importlib_metadata-8.6.1/importlib_metadata.egg-info/requires.txt0000644000175100001660000000073714743546146024752 0ustar00runnerdockerzipp>=3.20 [:python_version < "3.8"] typing-extensions>=3.6.4 [check] pytest-checkdocs>=2.4 [check:sys_platform != "cygwin"] pytest-ruff>=0.2.1 [cover] pytest-cov [doc] sphinx>=3.5 jaraco.packaging>=9.3 rst.linker>=1.9 furo sphinx-lint jaraco.tidelift>=1.4 [enabler] pytest-enabler>=2.2 [perf] ipython [test] pytest!=8.1.*,>=6 packaging pyfakefs flufl.flake8 pytest-perf>=0.9.2 jaraco.test>=5.4 [test:python_version < "3.9"] importlib_resources>=1.3 [type] pytest-mypy ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411686.0 importlib_metadata-8.6.1/importlib_metadata.egg-info/top_level.txt0000644000175100001660000000002314743546146025070 0ustar00runnerdockerimportlib_metadata ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/mypy.ini0000644000175100001660000000111514743546124016501 0ustar00runnerdocker[mypy] # Is the project well-typed? strict = False # Early opt-in even when strict = False warn_unused_ignores = True warn_redundant_casts = True enable_error_code = ignore-without-code # Support namespace packages per https://github.com/python/mypy/issues/14057 explicit_package_bases = True disable_error_code = # Disable due to many false positives overload-overlap, # jaraco/pytest-perf#16 [mypy-pytest_perf.*] ignore_missing_imports = True # jaraco/zipp#123 [mypy-zipp.*] ignore_missing_imports = True # jaraco/jaraco.test#7 [mypy-jaraco.test.*] ignore_missing_imports = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/pyproject.toml0000644000175100001660000000260514743546124017723 0ustar00runnerdocker[build-system] requires = ["setuptools>=61.2", "setuptools_scm[toml]>=3.4.1"] build-backend = "setuptools.build_meta" [project] name = "importlib_metadata" authors = [ { name = "Jason R. Coombs", email = "jaraco@jaraco.com" }, ] description = "Read metadata from Python packages" readme = "README.rst" classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", ] requires-python = ">=3.9" dependencies = [ "zipp>=3.20", 'typing-extensions>=3.6.4; python_version < "3.8"', ] dynamic = ["version"] [project.urls] Source = "https://github.com/python/importlib_metadata" [project.optional-dependencies] test = [ # upstream "pytest >= 6, != 8.1.*", # local 'importlib_resources>=1.3; python_version < "3.9"', "packaging", "pyfakefs", "flufl.flake8", "pytest-perf >= 0.9.2", "jaraco.test >= 5.4", ] doc = [ # upstream "sphinx >= 3.5", "jaraco.packaging >= 9.3", "rst.linker >= 1.9", "furo", "sphinx-lint", # tidelift "jaraco.tidelift >= 1.4", # local ] perf = ["ipython"] check = [ "pytest-checkdocs >= 2.4", "pytest-ruff >= 0.2.1; sys_platform != 'cygwin'", ] cover = [ "pytest-cov", ] enabler = [ "pytest-enabler >= 2.2", ] type = [ # upstream "pytest-mypy", # local ] [tool.setuptools_scm] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/pytest.ini0000644000175100001660000000111014743546124017026 0ustar00runnerdocker[pytest] norecursedirs=dist build .tox .eggs addopts= --doctest-modules --import-mode importlib consider_namespace_packages=true filterwarnings= ## upstream # Ensure ResourceWarnings are emitted default::ResourceWarning # realpython/pytest-mypy#152 ignore:'encoding' argument not specified::pytest_mypy # python/cpython#100750 ignore:'encoding' argument not specified::platform # pypa/build#615 ignore:'encoding' argument not specified::build.env # dateutil/dateutil#1284 ignore:datetime.datetime.utcfromtimestamp:DeprecationWarning:dateutil.tz.tz ## end upstream ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/ruff.toml0000644000175100001660000000117114743546124016643 0ustar00runnerdocker# extend pyproject.toml for requires-python (workaround astral-sh/ruff#10299) extend = "pyproject.toml" [lint] extend-select = [ "C901", "PERF401", "W", ] ignore = [ # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", "E114", "E117", "D206", "D300", "Q000", "Q001", "Q002", "Q003", "COM812", "COM819", "ISC001", "ISC002", ] [format] # Enable preview to get hugged parenthesis unwrapping and other nice surprises # See https://github.com/jaraco/skeleton/pull/133#issuecomment-2239538373 preview = true # https://docs.astral.sh/ruff/settings/#format_quote-style quote-style = "preserve" ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0663052 importlib_metadata-8.6.1/setup.cfg0000644000175100001660000000004614743546146016631 0ustar00runnerdocker[egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1737411686.060305 importlib_metadata-8.6.1/tests/0000755000175100001660000000000014743546146016152 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/__init__.py0000644000175100001660000000000014743546124020245 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/_context.py0000644000175100001660000000043214743546124020342 0ustar00runnerdockerimport contextlib # from jaraco.context 4.3 class suppress(contextlib.suppress, contextlib.ContextDecorator): """ A version of contextlib.suppress with decorator support. >>> @suppress(KeyError) ... def key_error(): ... {}[''] >>> key_error() """ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/_path.py0000644000175100001660000000605014743546124017614 0ustar00runnerdocker# from jaraco.path 3.7.2 from __future__ import annotations import functools import pathlib from typing import TYPE_CHECKING, Mapping, Protocol, Union, runtime_checkable if TYPE_CHECKING: from typing_extensions import Self class Symlink(str): """ A string indicating the target of a symlink. """ FilesSpec = Mapping[str, Union[str, bytes, Symlink, 'FilesSpec']] @runtime_checkable class TreeMaker(Protocol): def __truediv__(self, other, /) -> Self: ... def mkdir(self, *, exist_ok) -> object: ... def write_text(self, content, /, *, encoding) -> object: ... def write_bytes(self, content, /) -> object: ... def symlink_to(self, target, /) -> object: ... def _ensure_tree_maker(obj: str | TreeMaker) -> TreeMaker: return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) def build( spec: FilesSpec, prefix: str | TreeMaker = pathlib.Path(), ): """ Build a set of files/directories, as described by the spec. Each key represents a pathname, and the value represents the content. Content may be a nested directory. >>> spec = { ... 'README.txt': "A README file", ... "foo": { ... "__init__.py": "", ... "bar": { ... "__init__.py": "", ... }, ... "baz.py": "# Some code", ... "bar.py": Symlink("baz.py"), ... }, ... "bing": Symlink("foo"), ... } >>> target = getfixture('tmp_path') >>> build(spec, target) >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8') '# Some code' >>> target.joinpath('bing/bar.py').read_text(encoding='utf-8') '# Some code' """ for name, contents in spec.items(): create(contents, _ensure_tree_maker(prefix) / name) @functools.singledispatch def create(content: str | bytes | FilesSpec, path: TreeMaker) -> None: path.mkdir(exist_ok=True) # Mypy only looks at the signature of the main singledispatch method. So it must contain the complete Union build(content, prefix=path) # type: ignore[arg-type] # python/mypy#11727 @create.register def _(content: bytes, path: TreeMaker) -> None: path.write_bytes(content) @create.register def _(content: str, path: TreeMaker) -> None: path.write_text(content, encoding='utf-8') @create.register def _(content: Symlink, path: TreeMaker) -> None: path.symlink_to(content) class Recording: """ A TreeMaker object that records everything that would be written. >>> r = Recording() >>> build({'foo': {'foo1.txt': 'yes'}, 'bar.txt': 'abc'}, r) >>> r.record ['foo/foo1.txt', 'bar.txt'] """ def __init__(self, loc=pathlib.PurePosixPath(), record=None): self.loc = loc self.record = record if record is not None else [] def __truediv__(self, other): return Recording(self.loc / other, self.record) def write_text(self, content, **kwargs): self.record.append(str(self.loc)) write_bytes = write_text def mkdir(self, **kwargs): return def symlink_to(self, target): pass ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1737411686.060305 importlib_metadata-8.6.1/tests/compat/0000755000175100001660000000000014743546146017435 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/compat/__init__.py0000644000175100001660000000000014743546124021530 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/compat/py312.py0000644000175100001660000000055414743546124020665 0ustar00runnerdockerimport contextlib from .py39 import import_helper @contextlib.contextmanager def isolated_modules(): """ Save modules on entry and cleanup on exit. """ (saved,) = import_helper.modules_setup() try: yield finally: import_helper.modules_cleanup(saved) vars(import_helper).setdefault('isolated_modules', isolated_modules) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/compat/py39.py0000644000175100001660000000043314743546124020607 0ustar00runnerdockerfrom jaraco.test.cpython import from_test_support, try_import os_helper = try_import('os_helper') or from_test_support( 'FS_NONASCII', 'skip_unless_symlink', 'temp_dir' ) import_helper = try_import('import_helper') or from_test_support( 'modules_setup', 'modules_cleanup' ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/compat/test_py39_compat.py0000644000175100001660000000502214743546124023210 0ustar00runnerdockerimport pathlib import sys import unittest from importlib_metadata import ( distribution, distributions, entry_points, metadata, version, ) from .. import fixtures class OldStdlibFinderTests(fixtures.DistInfoPkgOffPath, unittest.TestCase): def setUp(self): if sys.version_info >= (3, 10): self.skipTest("Tests specific for Python 3.8/3.9") super().setUp() def _meta_path_finder(self): from importlib.metadata import ( Distribution, DistributionFinder, PathDistribution, ) from importlib.util import spec_from_file_location path = pathlib.Path(self.site_dir) class CustomDistribution(Distribution): def __init__(self, name, path): self.name = name self._path_distribution = PathDistribution(path) def read_text(self, filename): return self._path_distribution.read_text(filename) def locate_file(self, path): return self._path_distribution.locate_file(path) class CustomFinder: @classmethod def find_spec(cls, fullname, _path=None, _target=None): candidate = pathlib.Path(path, *fullname.split(".")).with_suffix(".py") if candidate.exists(): return spec_from_file_location(fullname, candidate) @classmethod def find_distributions(self, context=DistributionFinder.Context()): for dist_info in path.glob("*.dist-info"): yield PathDistribution(dist_info) name, _, _ = str(dist_info).partition("-") yield CustomDistribution(name + "_custom", dist_info) return CustomFinder def test_compatibility_with_old_stdlib_path_distribution(self): """ Given a custom finder that uses Python 3.8/3.9 importlib.metadata is installed, when importlib_metadata functions are called, there should be no exceptions. Ref python/importlib_metadata#396. """ self.fixtures.enter_context(fixtures.install_finder(self._meta_path_finder())) assert list(distributions()) assert distribution("distinfo_pkg") assert distribution("distinfo_pkg_custom") assert version("distinfo_pkg") > "0" assert version("distinfo_pkg_custom") > "0" assert list(metadata("distinfo_pkg")) assert list(metadata("distinfo_pkg_custom")) assert list(entry_points(group="entries")) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1737411686.0613053 importlib_metadata-8.6.1/tests/data/0000755000175100001660000000000014743546146017063 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/data/__init__.py0000644000175100001660000000000014743546124021156 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/data/example-21.12-py3-none-any.whl0000644000175100001660000000265714743546124024134 0ustar00runnerdockerPKz{N)x!!example/__init__.pyKIMSMдRԒҢ<Ԋ܂Tu.PK{NhX{ example-21.12.dist-info/METADATA] 0E1[7ABΗjyMHSԿA {7 Ū1 l?!)0UeR ޝvQX'ܹn+s_J~ed]8TȬeYjBAoPK{N&\\example-21.12.dist-info/WHEEL HM K-*ϳR03rOK-J,/RHJ,./Q0363 /, (-JLR()*M ILR(4KM̫PK{NM%+A(example-21.12.dist-info/entry_points.txtN+I/N.,()rH-IUUHr3R sPK{Nd R %example-21.12.dist-info/top_level.txtKH-IPK{N<".example-21.12.dist-info/RECORDu̹r@o#E Dpr Ȯ|}3ix>:]P6%iG oyʵw~.eev@lˎ6{ŦLbŻ JDLRbBM#Pu5P?r&Y5NB(v-j[Q"Ͼg0W|1q?֣3>iٝ?;5dQ_6Aʆ` ' ]M+T PsȎ)ܺQ8Ԃ:mp2`)A:VPk(ePKz{N)x!!example/__init__.pyPK{NhX{ Rexample-21.12.dist-info/METADATAPK{N&\\ example-21.12.dist-info/WHEELPK{NM%+A(example-21.12.dist-info/entry_points.txtPK{Nd R %example-21.12.dist-info/top_level.txtPK{N<".`example-21.12.dist-info/RECORDPK././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/data/example-21.12-py3.6.egg0000644000175100001660000000273114743546124022517 0ustar00runnerdockerPKCN3UĄEGG-INFO/PKG-INFO]A 0E9E.b̮BPS!Nm ӄdz{EԀG 0lnVxJOѭ"#vkH\n)Jw\^YM(8 2ʏk,.oucTPKCNfEGG-INFO/SOURCES.txtuA @ FwqPKuS3!*'+(example2-1.0.0.dist-info/entrypoints.txtN+I/N.,()JH-IUUr3PKS$0gyexample2-1.0.0.dist-info/RECORD}нv0@g J A:@ G#X` h[ۡ.Kwf$E#t= (3, 9): from importlib import resources else: import importlib_resources as resources @contextlib.contextmanager def tmp_path(): """ Like os_helper.temp_dir, but yields a pathlib.Path. """ with os_helper.temp_dir() as path: yield pathlib.Path(path) @contextlib.contextmanager def install_finder(finder): sys.meta_path.append(finder) try: yield finally: sys.meta_path.remove(finder) class Fixtures: def setUp(self): self.fixtures = contextlib.ExitStack() self.addCleanup(self.fixtures.close) class SiteDir(Fixtures): def setUp(self): super().setUp() self.site_dir = self.fixtures.enter_context(tmp_path()) class OnSysPath(Fixtures): @staticmethod @contextlib.contextmanager def add_sys_path(dir): sys.path[:0] = [str(dir)] try: yield finally: sys.path.remove(str(dir)) def setUp(self): super().setUp() self.fixtures.enter_context(self.add_sys_path(self.site_dir)) self.fixtures.enter_context(import_helper.isolated_modules()) class SiteBuilder(SiteDir): def setUp(self): super().setUp() for cls in self.__class__.mro(): with contextlib.suppress(AttributeError): build_files(cls.files, prefix=self.site_dir) class DistInfoPkg(OnSysPath, SiteBuilder): files: FilesSpec = { "distinfo_pkg-1.0.0.dist-info": { "METADATA": """ Name: distinfo-pkg Author: Steven Ma Version: 1.0.0 Requires-Dist: wheel >= 1.0 Requires-Dist: pytest; extra == 'test' Keywords: sample package Once upon a time There was a distinfo pkg """, "RECORD": "mod.py,sha256=abc,20\n", "entry_points.txt": """ [entries] main = mod:main ns:sub = mod:main """, }, "mod.py": """ def main(): print("hello world") """, } def make_uppercase(self): """ Rewrite metadata with everything uppercase. """ shutil.rmtree(self.site_dir / "distinfo_pkg-1.0.0.dist-info") files = copy.deepcopy(DistInfoPkg.files) info = files["distinfo_pkg-1.0.0.dist-info"] info["METADATA"] = info["METADATA"].upper() build_files(files, self.site_dir) class DistInfoPkgEditable(DistInfoPkg): """ Package with a PEP 660 direct_url.json. """ some_hash = '524127ce937f7cb65665130c695abd18ca386f60bb29687efb976faa1596fdcc' files: FilesSpec = { 'distinfo_pkg-1.0.0.dist-info': { 'direct_url.json': json.dumps({ "archive_info": { "hash": f"sha256={some_hash}", "hashes": {"sha256": f"{some_hash}"}, }, "url": "file:///path/to/distinfo_pkg-1.0.0.editable-py3-none-any.whl", }) }, } class DistInfoPkgWithDot(OnSysPath, SiteBuilder): files: FilesSpec = { "pkg_dot-1.0.0.dist-info": { "METADATA": """ Name: pkg.dot Version: 1.0.0 """, }, } class DistInfoPkgWithDotLegacy(OnSysPath, SiteBuilder): files: FilesSpec = { "pkg.dot-1.0.0.dist-info": { "METADATA": """ Name: pkg.dot Version: 1.0.0 """, }, "pkg.lot.egg-info": { "METADATA": """ Name: pkg.lot Version: 1.0.0 """, }, } class DistInfoPkgOffPath(SiteBuilder): files = DistInfoPkg.files class EggInfoPkg(OnSysPath, SiteBuilder): files: FilesSpec = { "egginfo_pkg.egg-info": { "PKG-INFO": """ Name: egginfo-pkg Author: Steven Ma License: Unknown Version: 1.0.0 Classifier: Intended Audience :: Developers Classifier: Topic :: Software Development :: Libraries Keywords: sample package Description: Once upon a time There was an egginfo package """, "SOURCES.txt": """ mod.py egginfo_pkg.egg-info/top_level.txt """, "entry_points.txt": """ [entries] main = mod:main """, "requires.txt": """ wheel >= 1.0; python_version >= "2.7" [test] pytest """, "top_level.txt": "mod\n", }, "mod.py": """ def main(): print("hello world") """, } class EggInfoPkgPipInstalledNoToplevel(OnSysPath, SiteBuilder): files: FilesSpec = { "egg_with_module_pkg.egg-info": { "PKG-INFO": "Name: egg_with_module-pkg", # SOURCES.txt is made from the source archive, and contains files # (setup.py) that are not present after installation. "SOURCES.txt": """ egg_with_module.py setup.py egg_with_module_pkg.egg-info/PKG-INFO egg_with_module_pkg.egg-info/SOURCES.txt egg_with_module_pkg.egg-info/top_level.txt """, # installed-files.txt is written by pip, and is a strictly more # accurate source than SOURCES.txt as to the installed contents of # the package. "installed-files.txt": """ ../egg_with_module.py PKG-INFO SOURCES.txt top_level.txt """, # missing top_level.txt (to trigger fallback to installed-files.txt) }, "egg_with_module.py": """ def main(): print("hello world") """, } class EggInfoPkgPipInstalledExternalDataFiles(OnSysPath, SiteBuilder): files: FilesSpec = { "egg_with_module_pkg.egg-info": { "PKG-INFO": "Name: egg_with_module-pkg", # SOURCES.txt is made from the source archive, and contains files # (setup.py) that are not present after installation. "SOURCES.txt": """ egg_with_module.py setup.py egg_with_module.json egg_with_module_pkg.egg-info/PKG-INFO egg_with_module_pkg.egg-info/SOURCES.txt egg_with_module_pkg.egg-info/top_level.txt """, # installed-files.txt is written by pip, and is a strictly more # accurate source than SOURCES.txt as to the installed contents of # the package. "installed-files.txt": """ ../../../etc/jupyter/jupyter_notebook_config.d/relative.json /etc/jupyter/jupyter_notebook_config.d/absolute.json ../egg_with_module.py PKG-INFO SOURCES.txt top_level.txt """, # missing top_level.txt (to trigger fallback to installed-files.txt) }, "egg_with_module.py": """ def main(): print("hello world") """, } class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteBuilder): files: FilesSpec = { "egg_with_no_modules_pkg.egg-info": { "PKG-INFO": "Name: egg_with_no_modules-pkg", # SOURCES.txt is made from the source archive, and contains files # (setup.py) that are not present after installation. "SOURCES.txt": """ setup.py egg_with_no_modules_pkg.egg-info/PKG-INFO egg_with_no_modules_pkg.egg-info/SOURCES.txt egg_with_no_modules_pkg.egg-info/top_level.txt """, # installed-files.txt is written by pip, and is a strictly more # accurate source than SOURCES.txt as to the installed contents of # the package. "installed-files.txt": """ PKG-INFO SOURCES.txt top_level.txt """, # top_level.txt correctly reflects that no modules are installed "top_level.txt": b"\n", }, } class EggInfoPkgSourcesFallback(OnSysPath, SiteBuilder): files: FilesSpec = { "sources_fallback_pkg.egg-info": { "PKG-INFO": "Name: sources_fallback-pkg", # SOURCES.txt is made from the source archive, and contains files # (setup.py) that are not present after installation. "SOURCES.txt": """ sources_fallback.py setup.py sources_fallback_pkg.egg-info/PKG-INFO sources_fallback_pkg.egg-info/SOURCES.txt """, # missing installed-files.txt (i.e. not installed by pip) and # missing top_level.txt (to trigger fallback to SOURCES.txt) }, "sources_fallback.py": """ def main(): print("hello world") """, } class EggInfoFile(OnSysPath, SiteBuilder): files: FilesSpec = { "egginfo_file.egg-info": """ Metadata-Version: 1.0 Name: egginfo_file Version: 0.1 Summary: An example package Home-page: www.example.com Author: Eric Haffa-Vee Author-email: eric@example.coms License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN """, } # dedent all text strings before writing orig = _path.create.registry[str] _path.create.register(str, lambda content, path: orig(DALS(content), path)) build_files = _path.build def build_record(file_defs): return ''.join(f'{name},,\n' for name in record_names(file_defs)) def record_names(file_defs): recording = _path.Recording() _path.build(file_defs, recording) return recording.record class FileBuilder: def unicode_filename(self): return os_helper.FS_NONASCII or self.skip( "File system does not support non-ascii." ) def DALS(str): "Dedent and left-strip" return textwrap.dedent(str).lstrip() class ZipFixtures: root = 'tests.data' def _fixture_on_path(self, filename): pkg_file = resources.files(self.root).joinpath(filename) file = self.resources.enter_context(resources.as_file(pkg_file)) assert file.name.startswith('example'), file.name sys.path.insert(0, str(file)) self.resources.callback(sys.path.pop, 0) def setUp(self): # Add self.zip_name to the front of sys.path. self.resources = contextlib.ExitStack() self.addCleanup(self.resources.close) def parameterize(*args_set): """Run test method with a series of parameters.""" def wrapper(func): @functools.wraps(func) def _inner(self): for args in args_set: with self.subTest(**args): func(self, **args) return _inner return wrapper ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/test_api.py0000644000175100001660000002525314743546124020337 0ustar00runnerdockerimport importlib import re import textwrap import unittest from importlib_metadata import ( Distribution, PackageNotFoundError, distribution, entry_points, files, metadata, requires, version, ) from . import fixtures class APITests( fixtures.EggInfoPkg, fixtures.EggInfoPkgPipInstalledNoToplevel, fixtures.EggInfoPkgPipInstalledNoModules, fixtures.EggInfoPkgPipInstalledExternalDataFiles, fixtures.EggInfoPkgSourcesFallback, fixtures.DistInfoPkg, fixtures.DistInfoPkgWithDot, fixtures.EggInfoFile, unittest.TestCase, ): version_pattern = r'\d+\.\d+(\.\d)?' def test_retrieves_version_of_self(self): pkg_version = version('egginfo-pkg') assert isinstance(pkg_version, str) assert re.match(self.version_pattern, pkg_version) def test_retrieves_version_of_distinfo_pkg(self): pkg_version = version('distinfo-pkg') assert isinstance(pkg_version, str) assert re.match(self.version_pattern, pkg_version) def test_for_name_does_not_exist(self): with self.assertRaises(PackageNotFoundError): distribution('does-not-exist') def test_name_normalization(self): names = 'pkg.dot', 'pkg_dot', 'pkg-dot', 'pkg..dot', 'Pkg.Dot' for name in names: with self.subTest(name): assert distribution(name).metadata['Name'] == 'pkg.dot' def test_prefix_not_matched(self): prefixes = 'p', 'pkg', 'pkg.' for prefix in prefixes: with self.subTest(prefix): with self.assertRaises(PackageNotFoundError): distribution(prefix) def test_for_top_level(self): tests = [ ('egginfo-pkg', 'mod'), ('egg_with_no_modules-pkg', ''), ] for pkg_name, expect_content in tests: with self.subTest(pkg_name): self.assertEqual( distribution(pkg_name).read_text('top_level.txt').strip(), expect_content, ) def test_read_text(self): tests = [ ('egginfo-pkg', 'mod\n'), ('egg_with_no_modules-pkg', '\n'), ] for pkg_name, expect_content in tests: with self.subTest(pkg_name): top_level = [ path for path in files(pkg_name) if path.name == 'top_level.txt' ][0] self.assertEqual(top_level.read_text(), expect_content) def test_entry_points(self): eps = entry_points() assert 'entries' in eps.groups entries = eps.select(group='entries') assert 'main' in entries.names ep = entries['main'] self.assertEqual(ep.value, 'mod:main') self.assertEqual(ep.extras, []) def test_entry_points_distribution(self): entries = entry_points(group='entries') for entry in ("main", "ns:sub"): ep = entries[entry] self.assertIn(ep.dist.name, ('distinfo-pkg', 'egginfo-pkg')) self.assertEqual(ep.dist.version, "1.0.0") def test_entry_points_unique_packages_normalized(self): """ Entry points should only be exposed for the first package on sys.path with a given name (even when normalized). """ alt_site_dir = self.fixtures.enter_context(fixtures.tmp_path()) self.fixtures.enter_context(self.add_sys_path(alt_site_dir)) alt_pkg = { "DistInfo_pkg-1.1.0.dist-info": { "METADATA": """ Name: distinfo-pkg Version: 1.1.0 """, "entry_points.txt": """ [entries] main = mod:altmain """, }, } fixtures.build_files(alt_pkg, alt_site_dir) entries = entry_points(group='entries') assert not any( ep.dist.name == 'distinfo-pkg' and ep.dist.version == '1.0.0' for ep in entries ) # ns:sub doesn't exist in alt_pkg assert 'ns:sub' not in entries.names def test_entry_points_missing_name(self): with self.assertRaises(KeyError): entry_points(group='entries')['missing'] def test_entry_points_missing_group(self): assert entry_points(group='missing') == () def test_entry_points_allows_no_attributes(self): ep = entry_points().select(group='entries', name='main') with self.assertRaises(AttributeError): ep.foo = 4 def test_metadata_for_this_package(self): md = metadata('egginfo-pkg') assert md['author'] == 'Steven Ma' assert md['LICENSE'] == 'Unknown' assert md['Name'] == 'egginfo-pkg' classifiers = md.get_all('Classifier') assert 'Topic :: Software Development :: Libraries' in classifiers def test_importlib_metadata_version(self): resolved = version('importlib-metadata') assert re.match(self.version_pattern, resolved) def test_missing_key(self): """ Requesting a missing key raises KeyError. """ md = metadata('distinfo-pkg') with self.assertRaises(KeyError): md['does-not-exist'] def test_get_key(self): """ Getting a key gets the key. """ md = metadata('egginfo-pkg') assert md.get('Name') == 'egginfo-pkg' def test_get_missing_key(self): """ Requesting a missing key will return None. """ md = metadata('distinfo-pkg') assert md.get('does-not-exist') is None @staticmethod def _test_files(files): root = files[0].root for file in files: assert file.root == root assert not file.hash or file.hash.value assert not file.hash or file.hash.mode == 'sha256' assert not file.size or file.size >= 0 assert file.locate().exists() assert isinstance(file.read_binary(), bytes) if file.name.endswith('.py'): file.read_text() def test_file_hash_repr(self): util = [p for p in files('distinfo-pkg') if p.name == 'mod.py'][0] self.assertRegex(repr(util.hash), '') def test_files_dist_info(self): self._test_files(files('distinfo-pkg')) def test_files_egg_info(self): self._test_files(files('egginfo-pkg')) self._test_files(files('egg_with_module-pkg')) self._test_files(files('egg_with_no_modules-pkg')) self._test_files(files('sources_fallback-pkg')) def test_version_egg_info_file(self): self.assertEqual(version('egginfo-file'), '0.1') def test_requires_egg_info_file(self): requirements = requires('egginfo-file') self.assertIsNone(requirements) def test_requires_egg_info(self): deps = requires('egginfo-pkg') assert len(deps) == 2 assert any(dep == 'wheel >= 1.0; python_version >= "2.7"' for dep in deps) def test_requires_egg_info_empty(self): fixtures.build_files( { 'requires.txt': '', }, self.site_dir.joinpath('egginfo_pkg.egg-info'), ) deps = requires('egginfo-pkg') assert deps == [] def test_requires_dist_info(self): deps = requires('distinfo-pkg') assert len(deps) == 2 assert all(deps) assert 'wheel >= 1.0' in deps assert "pytest; extra == 'test'" in deps def test_more_complex_deps_requires_text(self): requires = textwrap.dedent( """ dep1 dep2 [:python_version < "3"] dep3 [extra1] dep4 dep6@ git+https://example.com/python/dep.git@v1.0.0 [extra2:python_version < "3"] dep5 """ ) deps = sorted(Distribution._deps_from_requires_text(requires)) expected = [ 'dep1', 'dep2', 'dep3; python_version < "3"', 'dep4; extra == "extra1"', 'dep5; (python_version < "3") and extra == "extra2"', 'dep6@ git+https://example.com/python/dep.git@v1.0.0 ; extra == "extra1"', ] # It's important that the environment marker expression be # wrapped in parentheses to avoid the following 'and' binding more # tightly than some other part of the environment expression. assert deps == expected def test_as_json(self): md = metadata('distinfo-pkg').json assert 'name' in md assert md['keywords'] == ['sample', 'package'] desc = md['description'] assert desc.startswith('Once upon a time\nThere was') assert len(md['requires_dist']) == 2 def test_as_json_egg_info(self): md = metadata('egginfo-pkg').json assert 'name' in md assert md['keywords'] == ['sample', 'package'] desc = md['description'] assert desc.startswith('Once upon a time\nThere was') assert len(md['classifier']) == 2 def test_as_json_odd_case(self): self.make_uppercase() md = metadata('distinfo-pkg').json assert 'name' in md assert len(md['requires_dist']) == 2 assert md['keywords'] == ['SAMPLE', 'PACKAGE'] class LegacyDots(fixtures.DistInfoPkgWithDotLegacy, unittest.TestCase): def test_name_normalization(self): names = 'pkg.dot', 'pkg_dot', 'pkg-dot', 'pkg..dot', 'Pkg.Dot' for name in names: with self.subTest(name): assert distribution(name).metadata['Name'] == 'pkg.dot' def test_name_normalization_versionless_egg_info(self): names = 'pkg.lot', 'pkg_lot', 'pkg-lot', 'pkg..lot', 'Pkg.Lot' for name in names: with self.subTest(name): assert distribution(name).metadata['Name'] == 'pkg.lot' class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase): def test_find_distributions_specified_path(self): dists = Distribution.discover(path=[str(self.site_dir)]) assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists) def test_distribution_at_pathlib(self): """Demonstrate how to load metadata direct from a directory.""" dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' dist = Distribution.at(dist_info_path) assert dist.version == '1.0.0' def test_distribution_at_str(self): dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' dist = Distribution.at(str(dist_info_path)) assert dist.version == '1.0.0' class InvalidateCache(unittest.TestCase): def test_invalidate_cache(self): # No externally observable behavior, but ensures test coverage... importlib.invalidate_caches() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/test_integration.py0000644000175100001660000000301414743546124022100 0ustar00runnerdocker""" Test behaviors specific to importlib_metadata. These tests are excluded downstream in CPython as they test functionality only in importlib_metadata or require behaviors ('packaging') that aren't available in the stdlib. """ import unittest import packaging.requirements import packaging.version from importlib_metadata import ( _compat, version, ) from . import fixtures class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase): def test_package_spec_installed(self): """ Illustrate the recommended procedure to determine if a specified version of a package is installed. """ def is_installed(package_spec): req = packaging.requirements.Requirement(package_spec) return version(req.name) in req.specifier assert is_installed('distinfo-pkg==1.0') assert is_installed('distinfo-pkg>=1.0,<2.0') assert not is_installed('distinfo-pkg<1.0') class FinderTests(fixtures.Fixtures, unittest.TestCase): def test_finder_without_module(self): class ModuleFreeFinder: """ A finder without an __module__ attribute """ def find_module(self, name): pass def __getattribute__(self, name): if name == '__module__': raise AttributeError(name) return super().__getattribute__(name) self.fixtures.enter_context(fixtures.install_finder(ModuleFreeFinder())) _compat.disable_stdlib_finder() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/test_main.py0000644000175100001660000003710014743546124020504 0ustar00runnerdockerimport importlib import pickle import re import unittest import pyfakefs.fake_filesystem_unittest as ffs import importlib_metadata from importlib_metadata import ( Distribution, EntryPoint, PackageNotFoundError, _unique, distributions, entry_points, metadata, packages_distributions, version, ) from . import fixtures from ._path import Symlink from .compat.py39 import os_helper class BasicTests(fixtures.DistInfoPkg, unittest.TestCase): version_pattern = r'\d+\.\d+(\.\d)?' def test_retrieves_version_of_self(self): dist = Distribution.from_name('distinfo-pkg') assert isinstance(dist.version, str) assert re.match(self.version_pattern, dist.version) def test_for_name_does_not_exist(self): with self.assertRaises(PackageNotFoundError): Distribution.from_name('does-not-exist') def test_package_not_found_mentions_metadata(self): """ When a package is not found, that could indicate that the package is not installed or that it is installed without metadata. Ensure the exception mentions metadata to help guide users toward the cause. See #124. """ with self.assertRaises(PackageNotFoundError) as ctx: Distribution.from_name('does-not-exist') assert "metadata" in str(ctx.exception) def test_abc_enforced(self): with self.assertRaises(TypeError): type('DistributionSubclass', (Distribution,), {})() @fixtures.parameterize( dict(name=None), dict(name=''), ) def test_invalid_inputs_to_from_name(self, name): with self.assertRaises(Exception): Distribution.from_name(name) class ImportTests(fixtures.DistInfoPkg, unittest.TestCase): def test_import_nonexistent_module(self): # Ensure that the MetadataPathFinder does not crash an import of a # non-existent module. with self.assertRaises(ImportError): importlib.import_module('does_not_exist') def test_resolve(self): ep = entry_points(group='entries')['main'] self.assertEqual(ep.load().__name__, "main") def test_entrypoint_with_colon_in_name(self): ep = entry_points(group='entries')['ns:sub'] self.assertEqual(ep.value, 'mod:main') def test_resolve_without_attr(self): ep = EntryPoint( name='ep', value='importlib_metadata', group='grp', ) assert ep.load() is importlib_metadata class NameNormalizationTests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): @staticmethod def make_pkg(name): """ Create minimal metadata for a dist-info package with the indicated name on the file system. """ return { f'{name}.dist-info': { 'METADATA': 'VERSION: 1.0\n', }, } def test_dashes_in_dist_name_found_as_underscores(self): """ For a package with a dash in the name, the dist-info metadata uses underscores in the name. Ensure the metadata loads. """ fixtures.build_files(self.make_pkg('my_pkg'), self.site_dir) assert version('my-pkg') == '1.0' def test_dist_name_found_as_any_case(self): """ Ensure the metadata loads when queried with any case. """ pkg_name = 'CherryPy' fixtures.build_files(self.make_pkg(pkg_name), self.site_dir) assert version(pkg_name) == '1.0' assert version(pkg_name.lower()) == '1.0' assert version(pkg_name.upper()) == '1.0' def test_unique_distributions(self): """ Two distributions varying only by non-normalized name on the file system should resolve as the same. """ fixtures.build_files(self.make_pkg('abc'), self.site_dir) before = list(_unique(distributions())) alt_site_dir = self.fixtures.enter_context(fixtures.tmp_path()) self.fixtures.enter_context(self.add_sys_path(alt_site_dir)) fixtures.build_files(self.make_pkg('ABC'), alt_site_dir) after = list(_unique(distributions())) assert len(after) == len(before) class InvalidMetadataTests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): @staticmethod def make_pkg(name, files=dict(METADATA="VERSION: 1.0")): """ Create metadata for a dist-info package with name and files. """ return { f'{name}.dist-info': files, } def test_valid_dists_preferred(self): """ Dists with metadata should be preferred when discovered by name. Ref python/importlib_metadata#489. """ # create three dists with the valid one in the middle (lexicographically) # such that on most file systems, the valid one is never naturally first. fixtures.build_files(self.make_pkg('foo-4.0', files={}), self.site_dir) fixtures.build_files(self.make_pkg('foo-4.1'), self.site_dir) fixtures.build_files(self.make_pkg('foo-4.2', files={}), self.site_dir) dist = Distribution.from_name('foo') assert dist.version == "1.0" class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): @staticmethod def pkg_with_non_ascii_description(site_dir): """ Create minimal metadata for a package with non-ASCII in the description. """ contents = { 'portend.dist-info': { 'METADATA': 'Description: pôrˈtend', }, } fixtures.build_files(contents, site_dir) return 'portend' @staticmethod def pkg_with_non_ascii_description_egg_info(site_dir): """ Create minimal metadata for an egg-info package with non-ASCII in the description. """ contents = { 'portend.dist-info': { 'METADATA': """ Name: portend pôrˈtend""", }, } fixtures.build_files(contents, site_dir) return 'portend' def test_metadata_loads(self): pkg_name = self.pkg_with_non_ascii_description(self.site_dir) meta = metadata(pkg_name) assert meta['Description'] == 'pôrˈtend' def test_metadata_loads_egg_info(self): pkg_name = self.pkg_with_non_ascii_description_egg_info(self.site_dir) meta = metadata(pkg_name) assert meta['Description'] == 'pôrˈtend' class DiscoveryTests( fixtures.EggInfoPkg, fixtures.EggInfoPkgPipInstalledNoToplevel, fixtures.EggInfoPkgPipInstalledNoModules, fixtures.EggInfoPkgSourcesFallback, fixtures.DistInfoPkg, unittest.TestCase, ): def test_package_discovery(self): dists = list(distributions()) assert all(isinstance(dist, Distribution) for dist in dists) assert any(dist.metadata['Name'] == 'egginfo-pkg' for dist in dists) assert any(dist.metadata['Name'] == 'egg_with_module-pkg' for dist in dists) assert any(dist.metadata['Name'] == 'egg_with_no_modules-pkg' for dist in dists) assert any(dist.metadata['Name'] == 'sources_fallback-pkg' for dist in dists) assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists) def test_invalid_usage(self): with self.assertRaises(ValueError): list(distributions(context='something', name='else')) def test_interleaved_discovery(self): """ Ensure interleaved searches are safe. When the search is cached, it is possible for searches to be interleaved, so make sure those use-cases are safe. Ref #293 """ dists = distributions() next(dists) version('egginfo-pkg') next(dists) class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): def test_egg_info(self): # make an `EGG-INFO` directory that's unrelated self.site_dir.joinpath('EGG-INFO').mkdir() # used to crash with `IsADirectoryError` with self.assertRaises(PackageNotFoundError): version('unknown-package') def test_egg(self): egg = self.site_dir.joinpath('foo-3.6.egg') egg.mkdir() with self.add_sys_path(egg): with self.assertRaises(PackageNotFoundError): version('foo') class MissingSysPath(fixtures.OnSysPath, unittest.TestCase): site_dir = '/does-not-exist' def test_discovery(self): """ Discovering distributions should succeed even if there is an invalid path on sys.path. """ importlib_metadata.distributions() class InaccessibleSysPath(fixtures.OnSysPath, ffs.TestCase): site_dir = '/access-denied' def setUp(self): super().setUp() self.setUpPyfakefs() self.fs.create_dir(self.site_dir, perm_bits=000) def test_discovery(self): """ Discovering distributions should succeed even if there is an invalid path on sys.path. """ list(importlib_metadata.distributions()) class TestEntryPoints(unittest.TestCase): def __init__(self, *args): super().__init__(*args) self.ep = importlib_metadata.EntryPoint( name='name', value='value', group='group' ) def test_entry_point_pickleable(self): revived = pickle.loads(pickle.dumps(self.ep)) assert revived == self.ep def test_positional_args(self): """ Capture legacy (namedtuple) construction, discouraged. """ EntryPoint('name', 'value', 'group') def test_immutable(self): """EntryPoints should be immutable""" with self.assertRaises(AttributeError): self.ep.name = 'badactor' def test_repr(self): assert 'EntryPoint' in repr(self.ep) assert 'name=' in repr(self.ep) assert "'name'" in repr(self.ep) def test_hashable(self): """EntryPoints should be hashable""" hash(self.ep) def test_module(self): assert self.ep.module == 'value' def test_attr(self): assert self.ep.attr is None def test_sortable(self): """ EntryPoint objects are sortable, but result is undefined. """ sorted([ EntryPoint(name='b', value='val', group='group'), EntryPoint(name='a', value='val', group='group'), ]) class FileSystem( fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder, unittest.TestCase ): def test_unicode_dir_on_sys_path(self): """ Ensure a Unicode subdirectory of a directory on sys.path does not crash. """ fixtures.build_files( {self.unicode_filename(): {}}, prefix=self.site_dir, ) list(distributions()) class PackagesDistributionsPrebuiltTest(fixtures.ZipFixtures, unittest.TestCase): def test_packages_distributions_example(self): self._fixture_on_path('example-21.12-py3-none-any.whl') assert packages_distributions()['example'] == ['example'] def test_packages_distributions_example2(self): """ Test packages_distributions on a wheel built by trampolim. """ self._fixture_on_path('example2-1.0.0-py3-none-any.whl') assert packages_distributions()['example2'] == ['example2'] class PackagesDistributionsTest( fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase ): def test_packages_distributions_neither_toplevel_nor_files(self): """ Test a package built without 'top-level.txt' or a file list. """ fixtures.build_files( { 'trim_example-1.0.0.dist-info': { 'METADATA': """ Name: trim_example Version: 1.0.0 """, } }, prefix=self.site_dir, ) packages_distributions() def test_packages_distributions_all_module_types(self): """ Test top-level modules detected on a package without 'top-level.txt'. """ suffixes = importlib.machinery.all_suffixes() metadata = dict( METADATA=""" Name: all_distributions Version: 1.0.0 """, ) files = { 'all_distributions-1.0.0.dist-info': metadata, } for i, suffix in enumerate(suffixes): files.update({ f'importable-name {i}{suffix}': '', f'in_namespace_{i}': { f'mod{suffix}': '', }, f'in_package_{i}': { '__init__.py': '', f'mod{suffix}': '', }, }) metadata.update(RECORD=fixtures.build_record(files)) fixtures.build_files(files, prefix=self.site_dir) distributions = packages_distributions() for i in range(len(suffixes)): assert distributions[f'importable-name {i}'] == ['all_distributions'] assert distributions[f'in_namespace_{i}'] == ['all_distributions'] assert distributions[f'in_package_{i}'] == ['all_distributions'] assert not any(name.endswith('.dist-info') for name in distributions) @os_helper.skip_unless_symlink def test_packages_distributions_symlinked_top_level(self) -> None: """ Distribution is resolvable from a simple top-level symlink in RECORD. See #452. """ files: fixtures.FilesSpec = { "symlinked_pkg-1.0.0.dist-info": { "METADATA": """ Name: symlinked-pkg Version: 1.0.0 """, "RECORD": "symlinked,,\n", }, ".symlink.target": {}, "symlinked": Symlink(".symlink.target"), } fixtures.build_files(files, self.site_dir) assert packages_distributions()['symlinked'] == ['symlinked-pkg'] class PackagesDistributionsEggTest( fixtures.EggInfoPkg, fixtures.EggInfoPkgPipInstalledNoToplevel, fixtures.EggInfoPkgPipInstalledNoModules, fixtures.EggInfoPkgSourcesFallback, unittest.TestCase, ): def test_packages_distributions_on_eggs(self): """ Test old-style egg packages with a variation of 'top_level.txt', 'SOURCES.txt', and 'installed-files.txt', available. """ distributions = packages_distributions() def import_names_from_package(package_name): return { import_name for import_name, package_names in distributions.items() if package_name in package_names } # egginfo-pkg declares one import ('mod') via top_level.txt assert import_names_from_package('egginfo-pkg') == {'mod'} # egg_with_module-pkg has one import ('egg_with_module') inferred from # installed-files.txt (top_level.txt is missing) assert import_names_from_package('egg_with_module-pkg') == {'egg_with_module'} # egg_with_no_modules-pkg should not be associated with any import names # (top_level.txt is empty, and installed-files.txt has no .py files) assert import_names_from_package('egg_with_no_modules-pkg') == set() # sources_fallback-pkg has one import ('sources_fallback') inferred from # SOURCES.txt (top_level.txt and installed-files.txt is missing) assert import_names_from_package('sources_fallback-pkg') == {'sources_fallback'} class EditableDistributionTest(fixtures.DistInfoPkgEditable, unittest.TestCase): def test_origin(self): dist = Distribution.from_name('distinfo-pkg') assert dist.origin.url.endswith('.whl') assert dist.origin.archive_info.hashes.sha256 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tests/test_zip.py0000644000175100001660000000340314743546124020361 0ustar00runnerdockerimport sys import unittest from importlib_metadata import ( PackageNotFoundError, distribution, distributions, entry_points, files, version, ) from . import fixtures class TestZip(fixtures.ZipFixtures, unittest.TestCase): def setUp(self): super().setUp() self._fixture_on_path('example-21.12-py3-none-any.whl') def test_zip_version(self): self.assertEqual(version('example'), '21.12') def test_zip_version_does_not_match(self): with self.assertRaises(PackageNotFoundError): version('definitely-not-installed') def test_zip_entry_points(self): scripts = entry_points(group='console_scripts') entry_point = scripts['example'] self.assertEqual(entry_point.value, 'example:main') entry_point = scripts['Example'] self.assertEqual(entry_point.value, 'example:main') def test_missing_metadata(self): self.assertIsNone(distribution('example').read_text('does not exist')) def test_case_insensitive(self): self.assertEqual(version('Example'), '21.12') def test_files(self): for file in files('example'): path = str(file.dist.locate_file(file)) assert '.whl/' in path, path def test_one_distribution(self): dists = list(distributions(path=sys.path[:1])) assert len(dists) == 1 class TestEgg(TestZip): def setUp(self): super().setUp() self._fixture_on_path('example-21.12-py3.6.egg') def test_files(self): for file in files('example'): path = str(file.dist.locate_file(file)) assert '.egg/' in path, path def test_normalized_name(self): dist = distribution('example') assert dist._normalized_name == 'example' ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/towncrier.toml0000644000175100001660000000005414743546124017714 0ustar00runnerdocker[tool.towncrier] title_format = "{version}" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1737411668.0 importlib_metadata-8.6.1/tox.ini0000644000175100001660000000247414743546124016326 0ustar00runnerdocker[testenv] description = perform primary checks (tests, style, types, coverage) deps = setenv = PYTHONWARNDEFAULTENCODING = 1 commands = pytest {posargs} passenv = HOME usedevelop = True extras = test check cover enabler type [testenv:diffcov] description = run tests and check that diff from main is covered deps = {[testenv]deps} diff-cover commands = pytest {posargs} --cov-report xml diff-cover coverage.xml --compare-branch=origin/main --html-report diffcov.html diff-cover coverage.xml --compare-branch=origin/main --fail-under=100 [testenv:docs] description = build the documentation extras = doc test changedir = docs commands = python -m sphinx -W --keep-going . {toxinidir}/build/html python -m sphinxlint [testenv:finalize] description = assemble changelog and tag a release skip_install = True deps = towncrier jaraco.develop >= 7.23 pass_env = * commands = python -m jaraco.develop.finalize [testenv:release] description = publish the package to PyPI and GitHub skip_install = True deps = build twine>=3 jaraco.develop>=7.1 pass_env = TWINE_PASSWORD GITHUB_TOKEN setenv = TWINE_USERNAME = {env:TWINE_USERNAME:__token__} commands = python -c "import shutil; shutil.rmtree('dist', ignore_errors=True)" python -m build python -m twine upload dist/* python -m jaraco.develop.create-github-release